answer
stringlengths 17
10.2M
|
|---|
package ru.job4j.array;
/**
* Search.
*
* @author Sergey Indyukov (onl.ont@mail.ru)
* @version $Id$
* @since 0.1
*/
public class Search {
public boolean contains(String origin, String sub) {
char[] orig = origin.toCharArray();
char[] s = sub.toCharArray();
int k = 1;
boolean c = false;
for (int i = 0, j = 0; i < orig.length; i++) {
if (orig[i] == s[j]) {
int gl = i;
int pod = j;
while (pod < s.length && gl < orig.length) {
if (orig[gl++] == s[pod++]) {
k++;
if (k == s.length) {
c = true;
}
} else {
break;
}
}
}
}
return c;
}
}
|
package dk.aau.sw402F15.tests.typechecker;
import dk.aau.sw402F15.ScopeChecker.ScopeChecker;
import dk.aau.sw402F15.TypeChecker.Exceptions.*;
import dk.aau.sw402F15.TypeChecker.Exceptions.IllegalAssignmentException;
import dk.aau.sw402F15.TypeChecker.Exceptions.IllegalComparisonException;
import dk.aau.sw402F15.TypeChecker.TypeChecker;
import dk.aau.sw402F15.parser.lexer.Lexer;
import dk.aau.sw402F15.parser.lexer.LexerException;
import dk.aau.sw402F15.parser.node.Start;
import dk.aau.sw402F15.parser.parser.Parser;
import dk.aau.sw402F15.parser.parser.ParserException;
import org.junit.Test;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.StringReader;
public class TypeCheckerTests {
@Test
public void checkIntDeclaration(){
checkCode("int i = 0;");
}
@Test
public void checkBoolDeclaration() {
checkCode("bool b = true;");
}
@Test
public void checkDecimalDeclaration() {
checkCode("float b = 1.1;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkIntDeclarationSetToBool() {
checkCode("int i = true;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkIntDeclarationSetToDecimal() {
checkCode("int i = 1.1;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkBoolDeclarationSetToInt() {
checkCode("bool b = 1;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkBoolDeclarationSetToDecimal() {
checkCode("bool b = 1.1;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkDecimalSetToBool(){
checkCode("float f = true;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkDecimalSetToInt(){
checkCode("float f = 1;");
}
@Test
public void checkIntIntComparisonGreater(){
checkCode("bool b = 1 > 2;");
}
@Test
public void checkIntIntComparisonGreaterOrEqual(){
checkCode("bool b = 1 >= 2;");
}
@Test
public void checkIntIntComparisonLess(){
checkCode("bool b = 1 < 2;");
}
@Test
public void checkIntIntComparisonLessOrEqual(){
checkCode("bool b = 1 <= 2;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntDecimalComparisonGreater(){
checkCode("bool b = 1 > 1.1;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntDecimalComparisonGreaterOrEqual(){
checkCode("bool b = 1 >= 1.1;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntDecimalComparisonLess(){
checkCode("bool b = 1 < 1.1;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntDecimalComparisonLessOrEqual(){
checkCode("bool b = 1 <= 1.1;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntBoolComparisonGreater(){
checkCode("bool b = 1 > true;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntBoolComparisonGreaterOrEqual(){
checkCode("bool b = 1 >= true;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntBoolComparisonLess(){
checkCode("bool b = 1 < true;");
}
@Test(expected = IllegalComparisonException.class)
public void checkIntBoolComparisonLessOrEqual(){
checkCode("bool b = 1 <= true;");
}
@Test(expected = IllegalAssignmentException.class)
public void checkAssignBoolExprToInt() {
checkCode("int i = 7 < 8;");
}
@Test(expected = IllegalReturnTypeException.class)
public void checkReturnInVoidFunction() {
checkCode("void func(){return 2;}");
}
@Test
public void checkNoReturnInVoidFunction() {
checkCode("void func(){}");
}
@Test(expected = MissingReturnStatementException.class)
public void checkMissingReturnStatement() {
checkCode("int func(){}");
}
@Test
public void checkNoMissingReturnStatement() {
checkCode("int func(){return 2;}");
}
@Test(expected = WrongParameterException.class)
public void checkWrongParameterInFunction() {
checkCode("int func(int k, int p){ func(2.2, 2); return k + p; } ");
}
@Test
public void checkNoWrongParameterInFunction() {
checkCode("int func(int k, int p){ func(2, 2); return k + p; } ");
}
@Test(expected = RedefinitionOfReadOnly.class)
public void checkAssignmentOfConst() { checkCode("const int i = 7; int func() { i = 2; } "); }
@Test(expected = RedefinitionOfReadOnly.class)
public void checkAssignmentOfConstInFunction() { checkCode("int func(){const int i = 7; i = 2; return 5;} "); }
private void checkCode(String code) {
Start node = null;
try {
node = getParser(code).parse();
} catch (ParserException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (LexerException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
// Apply scopechecker
ScopeChecker scopeChecker = new ScopeChecker();
node.apply(scopeChecker);
// Apply typechecker
node.apply(new TypeChecker(scopeChecker.getSymbolTable()));
}
private Parser getParser(String code) {
return new Parser(new Lexer(new PushbackReader(new StringReader(code), 1024)));
}
}
|
package com.facebook.yoga;
import com.facebook.proguard.annotations.DoNotStrip;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
@DoNotStrip
public abstract class YogaNodeJNIBase extends YogaNode implements Cloneable {
/* Those flags needs be in sync with YGJNI.cpp */
private static final byte MARGIN = 1;
private static final byte PADDING = 2;
private static final byte BORDER = 4;
private static final byte DOES_LEGACY_STRETCH_BEHAVIOUR = 8;
private static final byte HAS_NEW_LAYOUT = 16;
private static final byte LAYOUT_EDGE_SET_FLAG_INDEX = 0;
private static final byte LAYOUT_WIDTH_INDEX = 1;
private static final byte LAYOUT_HEIGHT_INDEX = 2;
private static final byte LAYOUT_LEFT_INDEX = 3;
private static final byte LAYOUT_TOP_INDEX = 4;
private static final byte LAYOUT_DIRECTION_INDEX = 5;
private static final byte LAYOUT_MARGIN_START_INDEX = 6;
private static final byte LAYOUT_PADDING_START_INDEX = 10;
private static final byte LAYOUT_BORDER_START_INDEX = 14;
@Nullable private YogaNodeJNIBase mOwner;
@Nullable private List<YogaNodeJNIBase> mChildren;
@Nullable private YogaMeasureFunction mMeasureFunction;
@Nullable private YogaBaselineFunction mBaselineFunction;
protected long mNativePointer;
@Nullable private Object mData;
@DoNotStrip
private @Nullable float[] arr = null;
@DoNotStrip
private int mLayoutDirection = 0;
private boolean mHasNewLayout = true;
private YogaNodeJNIBase(long nativePointer) {
if (nativePointer == 0) {
throw new IllegalStateException("Failed to allocate native memory");
}
mNativePointer = nativePointer;
}
YogaNodeJNIBase() {
this(YogaNative.jni_YGNodeNewJNI());
}
YogaNodeJNIBase(YogaConfig config) {
this(YogaNative.jni_YGNodeNewWithConfigJNI(((YogaConfigJNIBase)config).mNativePointer));
}
public void reset() {
mMeasureFunction = null;
mBaselineFunction = null;
mData = null;
arr = null;
mHasNewLayout = true;
mLayoutDirection = 0;
YogaNative.jni_YGNodeResetJNI(mNativePointer);
}
public int getChildCount() {
return mChildren == null ? 0 : mChildren.size();
}
public YogaNodeJNIBase getChildAt(int i) {
if (mChildren == null) {
throw new IllegalStateException("YogaNode does not have children");
}
return mChildren.get(i);
}
public void addChildAt(YogaNode c, int i) {
YogaNodeJNIBase child = (YogaNodeJNIBase) c;
if (child.mOwner != null) {
throw new IllegalStateException("Child already has a parent, it must be removed first.");
}
if (mChildren == null) {
mChildren = new ArrayList<>(4);
}
mChildren.add(i, child);
child.mOwner = this;
YogaNative.jni_YGNodeInsertChildJNI(mNativePointer, child.mNativePointer, i);
}
public void setIsReferenceBaseline(boolean isReferenceBaseline) {
YogaNative.jni_YGNodeSetIsReferenceBaselineJNI(mNativePointer, isReferenceBaseline);
}
public boolean isReferenceBaseline() {
return YogaNative.jni_YGNodeIsReferenceBaselineJNI(mNativePointer);
}
@Override
public YogaNodeJNIBase cloneWithoutChildren() {
try {
YogaNodeJNIBase clonedYogaNode = (YogaNodeJNIBase) super.clone();
long clonedNativePointer = YogaNative.jni_YGNodeCloneJNI(mNativePointer);
clonedYogaNode.mOwner = null;
clonedYogaNode.mNativePointer = clonedNativePointer;
clonedYogaNode.clearChildren();
return clonedYogaNode;
} catch (CloneNotSupportedException ex) {
// This class implements Cloneable, this should not happen
throw new RuntimeException(ex);
}
}
private void clearChildren() {
mChildren = null;
YogaNative.jni_YGNodeClearChildrenJNI(mNativePointer);
}
public YogaNodeJNIBase removeChildAt(int i) {
if (mChildren == null) {
throw new IllegalStateException(
"Trying to remove a child of a YogaNode that does not have children");
}
final YogaNodeJNIBase child = mChildren.remove(i);
child.mOwner = null;
YogaNative.jni_YGNodeRemoveChildJNI(mNativePointer, child.mNativePointer);
return child;
}
/**
* The owner is used to identify the YogaTree that a {@link YogaNode} belongs to.
* This method will return the parent of the {@link YogaNode} when the
* {@link YogaNode} only belongs to one YogaTree or null when the
* {@link YogaNode} is shared between two or more YogaTrees.
*
* @return the {@link YogaNode} that owns this {@link YogaNode}.
*/
@Nullable
public YogaNodeJNIBase getOwner() {
return mOwner;
}
/** @deprecated Use #getOwner() instead. This will be removed in the next version. */
@Deprecated
@Nullable
public YogaNodeJNIBase getParent() {
return getOwner();
}
public int indexOf(YogaNode child) {
return mChildren == null ? -1 : mChildren.indexOf(child);
}
public void calculateLayout(float width, float height) {
long[] nativePointers = null;
YogaNodeJNIBase[] nodes = null;
ArrayList<YogaNodeJNIBase> n = new ArrayList<>();
n.add(this);
for (int i = 0; i < n.size(); ++i) {
List<YogaNodeJNIBase> children = n.get(i).mChildren;
if (children != null) {
n.addAll(children);
}
}
nodes = n.toArray(new YogaNodeJNIBase[n.size()]);
nativePointers = new long[nodes.length];
for (int i = 0; i < nodes.length; ++i) {
nativePointers[i] = nodes[i].mNativePointer;
}
YogaNative.jni_YGNodeCalculateLayoutJNI(mNativePointer, width, height, nativePointers, nodes);
}
public void dirty() {
YogaNative.jni_YGNodeMarkDirtyJNI(mNativePointer);
}
public void dirtyAllDescendants() {
YogaNative.jni_YGNodeMarkDirtyAndPropogateToDescendantsJNI(mNativePointer);
}
public boolean isDirty() {
return YogaNative.jni_YGNodeIsDirtyJNI(mNativePointer);
}
@Override
public void copyStyle(YogaNode srcNode) {
YogaNative.jni_YGNodeCopyStyleJNI(mNativePointer, ((YogaNodeJNIBase) srcNode).mNativePointer);
}
public YogaDirection getStyleDirection() {
return YogaDirection.fromInt(YogaNative.jni_YGNodeStyleGetDirectionJNI(mNativePointer));
}
public void setDirection(YogaDirection direction) {
YogaNative.jni_YGNodeStyleSetDirectionJNI(mNativePointer, direction.intValue());
}
public YogaFlexDirection getFlexDirection() {
return YogaFlexDirection.fromInt(YogaNative.jni_YGNodeStyleGetFlexDirectionJNI(mNativePointer));
}
public void setFlexDirection(YogaFlexDirection flexDirection) {
YogaNative.jni_YGNodeStyleSetFlexDirectionJNI(mNativePointer, flexDirection.intValue());
}
public YogaJustify getJustifyContent() {
return YogaJustify.fromInt(YogaNative.jni_YGNodeStyleGetJustifyContentJNI(mNativePointer));
}
public void setJustifyContent(YogaJustify justifyContent) {
YogaNative.jni_YGNodeStyleSetJustifyContentJNI(mNativePointer, justifyContent.intValue());
}
public YogaAlign getAlignItems() {
return YogaAlign.fromInt(YogaNative.jni_YGNodeStyleGetAlignItemsJNI(mNativePointer));
}
public void setAlignItems(YogaAlign alignItems) {
YogaNative.jni_YGNodeStyleSetAlignItemsJNI(mNativePointer, alignItems.intValue());
}
public YogaAlign getAlignSelf() {
return YogaAlign.fromInt(YogaNative.jni_YGNodeStyleGetAlignSelfJNI(mNativePointer));
}
public void setAlignSelf(YogaAlign alignSelf) {
YogaNative.jni_YGNodeStyleSetAlignSelfJNI(mNativePointer, alignSelf.intValue());
}
public YogaAlign getAlignContent() {
return YogaAlign.fromInt(YogaNative.jni_YGNodeStyleGetAlignContentJNI(mNativePointer));
}
public void setAlignContent(YogaAlign alignContent) {
YogaNative.jni_YGNodeStyleSetAlignContentJNI(mNativePointer, alignContent.intValue());
}
public YogaPositionType getPositionType() {
return YogaPositionType.fromInt(YogaNative.jni_YGNodeStyleGetPositionTypeJNI(mNativePointer));
}
public void setPositionType(YogaPositionType positionType) {
YogaNative.jni_YGNodeStyleSetPositionTypeJNI(mNativePointer, positionType.intValue());
}
public YogaWrap getWrap() {
return YogaWrap.fromInt(YogaNative.jni_YGNodeStyleGetFlexWrapJNI(mNativePointer));
}
public void setWrap(YogaWrap flexWrap) {
YogaNative.jni_YGNodeStyleSetFlexWrapJNI(mNativePointer, flexWrap.intValue());
}
public YogaOverflow getOverflow() {
return YogaOverflow.fromInt(YogaNative.jni_YGNodeStyleGetOverflowJNI(mNativePointer));
}
public void setOverflow(YogaOverflow overflow) {
YogaNative.jni_YGNodeStyleSetOverflowJNI(mNativePointer, overflow.intValue());
}
public YogaDisplay getDisplay() {
return YogaDisplay.fromInt(YogaNative.jni_YGNodeStyleGetDisplayJNI(mNativePointer));
}
public void setDisplay(YogaDisplay display) {
YogaNative.jni_YGNodeStyleSetDisplayJNI(mNativePointer, display.intValue());
}
public float getFlex() {
return YogaNative.jni_YGNodeStyleGetFlexJNI(mNativePointer);
}
public void setFlex(float flex) {
YogaNative.jni_YGNodeStyleSetFlexJNI(mNativePointer, flex);
}
public float getFlexGrow() {
return YogaNative.jni_YGNodeStyleGetFlexGrowJNI(mNativePointer);
}
public void setFlexGrow(float flexGrow) {
YogaNative.jni_YGNodeStyleSetFlexGrowJNI(mNativePointer, flexGrow);
}
public float getFlexShrink() {
return YogaNative.jni_YGNodeStyleGetFlexShrinkJNI(mNativePointer);
}
public void setFlexShrink(float flexShrink) {
YogaNative.jni_YGNodeStyleSetFlexShrinkJNI(mNativePointer, flexShrink);
}
public YogaValue getFlexBasis() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetFlexBasisJNI(mNativePointer));
}
public void setFlexBasis(float flexBasis) {
YogaNative.jni_YGNodeStyleSetFlexBasisJNI(mNativePointer, flexBasis);
}
public void setFlexBasisPercent(float percent) {
YogaNative.jni_YGNodeStyleSetFlexBasisPercentJNI(mNativePointer, percent);
}
public void setFlexBasisAuto() {
YogaNative.jni_YGNodeStyleSetFlexBasisAutoJNI(mNativePointer);
}
public YogaValue getMargin(YogaEdge edge) {
return valueFromLong(YogaNative.jni_YGNodeStyleGetMarginJNI(mNativePointer, edge.intValue()));
}
public void setMargin(YogaEdge edge, float margin) {
YogaNative.jni_YGNodeStyleSetMarginJNI(mNativePointer, edge.intValue(), margin);
}
public void setMarginPercent(YogaEdge edge, float percent) {
YogaNative.jni_YGNodeStyleSetMarginPercentJNI(mNativePointer, edge.intValue(), percent);
}
public void setMarginAuto(YogaEdge edge) {
YogaNative.jni_YGNodeStyleSetMarginAutoJNI(mNativePointer, edge.intValue());
}
public YogaValue getPadding(YogaEdge edge) {
return valueFromLong(YogaNative.jni_YGNodeStyleGetPaddingJNI(mNativePointer, edge.intValue()));
}
public void setPadding(YogaEdge edge, float padding) {
YogaNative.jni_YGNodeStyleSetPaddingJNI(mNativePointer, edge.intValue(), padding);
}
public void setPaddingPercent(YogaEdge edge, float percent) {
YogaNative.jni_YGNodeStyleSetPaddingPercentJNI(mNativePointer, edge.intValue(), percent);
}
public float getBorder(YogaEdge edge) {
return YogaNative.jni_YGNodeStyleGetBorderJNI(mNativePointer, edge.intValue());
}
public void setBorder(YogaEdge edge, float border) {
YogaNative.jni_YGNodeStyleSetBorderJNI(mNativePointer, edge.intValue(), border);
}
public YogaValue getPosition(YogaEdge edge) {
return valueFromLong(YogaNative.jni_YGNodeStyleGetPositionJNI(mNativePointer, edge.intValue()));
}
public void setPosition(YogaEdge edge, float position) {
YogaNative.jni_YGNodeStyleSetPositionJNI(mNativePointer, edge.intValue(), position);
}
public void setPositionPercent(YogaEdge edge, float percent) {
YogaNative.jni_YGNodeStyleSetPositionPercentJNI(mNativePointer, edge.intValue(), percent);
}
public YogaValue getWidth() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetWidthJNI(mNativePointer));
}
public void setWidth(float width) {
YogaNative.jni_YGNodeStyleSetWidthJNI(mNativePointer, width);
}
public void setWidthPercent(float percent) {
YogaNative.jni_YGNodeStyleSetWidthPercentJNI(mNativePointer, percent);
}
public void setWidthAuto() {
YogaNative.jni_YGNodeStyleSetWidthAutoJNI(mNativePointer);
}
public YogaValue getHeight() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetHeightJNI(mNativePointer));
}
public void setHeight(float height) {
YogaNative.jni_YGNodeStyleSetHeightJNI(mNativePointer, height);
}
public void setHeightPercent(float percent) {
YogaNative.jni_YGNodeStyleSetHeightPercentJNI(mNativePointer, percent);
}
public void setHeightAuto() {
YogaNative.jni_YGNodeStyleSetHeightAutoJNI(mNativePointer);
}
public YogaValue getMinWidth() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetMinWidthJNI(mNativePointer));
}
public void setMinWidth(float minWidth) {
YogaNative.jni_YGNodeStyleSetMinWidthJNI(mNativePointer, minWidth);
}
public void setMinWidthPercent(float percent) {
YogaNative.jni_YGNodeStyleSetMinWidthPercentJNI(mNativePointer, percent);
}
public YogaValue getMinHeight() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetMinHeightJNI(mNativePointer));
}
public void setMinHeight(float minHeight) {
YogaNative.jni_YGNodeStyleSetMinHeightJNI(mNativePointer, minHeight);
}
public void setMinHeightPercent(float percent) {
YogaNative.jni_YGNodeStyleSetMinHeightPercentJNI(mNativePointer, percent);
}
public YogaValue getMaxWidth() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetMaxWidthJNI(mNativePointer));
}
public void setMaxWidth(float maxWidth) {
YogaNative.jni_YGNodeStyleSetMaxWidthJNI(mNativePointer, maxWidth);
}
public void setMaxWidthPercent(float percent) {
YogaNative.jni_YGNodeStyleSetMaxWidthPercentJNI(mNativePointer, percent);
}
public YogaValue getMaxHeight() {
return valueFromLong(YogaNative.jni_YGNodeStyleGetMaxHeightJNI(mNativePointer));
}
public void setMaxHeight(float maxheight) {
YogaNative.jni_YGNodeStyleSetMaxHeightJNI(mNativePointer, maxheight);
}
public void setMaxHeightPercent(float percent) {
YogaNative.jni_YGNodeStyleSetMaxHeightPercentJNI(mNativePointer, percent);
}
public float getAspectRatio() {
return YogaNative.jni_YGNodeStyleGetAspectRatioJNI(mNativePointer);
}
public void setAspectRatio(float aspectRatio) {
YogaNative.jni_YGNodeStyleSetAspectRatioJNI(mNativePointer, aspectRatio);
}
public void setMeasureFunction(YogaMeasureFunction measureFunction) {
mMeasureFunction = measureFunction;
YogaNative.jni_YGNodeSetHasMeasureFuncJNI(mNativePointer, measureFunction != null);
}
// Implementation Note: Why this method needs to stay final
// We cache the jmethodid for this method in Yoga code. This means that even if a subclass
// were to override measure, we'd still call this implementation from layout code since the
// overriding method will have a different jmethodid. This is final to prevent that mistake.
@DoNotStrip
public final long measure(float width, int widthMode, float height, int heightMode) {
if (!isMeasureDefined()) {
throw new RuntimeException("Measure function isn't defined!");
}
return mMeasureFunction.measure(
this,
width,
YogaMeasureMode.fromInt(widthMode),
height,
YogaMeasureMode.fromInt(heightMode));
}
public void setBaselineFunction(YogaBaselineFunction baselineFunction) {
mBaselineFunction = baselineFunction;
YogaNative.jni_YGNodeSetHasBaselineFuncJNI(mNativePointer, baselineFunction != null);
}
@DoNotStrip
public final float baseline(float width, float height) {
return mBaselineFunction.baseline(this, width, height);
}
public boolean isMeasureDefined() {
return mMeasureFunction != null;
}
@Override
public boolean isBaselineDefined() {
return mBaselineFunction != null;
}
public void setData(Object data) {
mData = data;
}
@Override
public @Nullable Object getData() {
return mData;
}
/**
* Use the set logger (defaults to adb log) to print out the styles, children, and computed
* layout of the tree rooted at this node.
*/
public void print() {
YogaNative.jni_YGNodePrintJNI(mNativePointer);
}
/**
* This method replaces the child at childIndex position with the newNode received by parameter.
* This is different than calling removeChildAt and addChildAt because this method ONLY replaces
* the child in the mChildren datastructure. @DoNotStrip: called from JNI
*
* @return the nativePointer of the newNode {@link YogaNode}
*/
@DoNotStrip
private final long replaceChild(YogaNodeJNIBase newNode, int childIndex) {
if (mChildren == null) {
throw new IllegalStateException("Cannot replace child. YogaNode does not have children");
}
mChildren.remove(childIndex);
mChildren.add(childIndex, newNode);
newNode.mOwner = this;
return newNode.mNativePointer;
}
private static YogaValue valueFromLong(long raw) {
return new YogaValue(Float.intBitsToFloat((int) raw), (int) (raw >> 32));
}
@Override
public float getLayoutX() {
return arr != null ? arr[LAYOUT_LEFT_INDEX] : 0;
}
@Override
public float getLayoutY() {
return arr != null ? arr[LAYOUT_TOP_INDEX] : 0;
}
@Override
public float getLayoutWidth() {
return arr != null ? arr[LAYOUT_WIDTH_INDEX] : 0;
}
@Override
public float getLayoutHeight() {
return arr != null ? arr[LAYOUT_HEIGHT_INDEX] : 0;
}
public boolean getDoesLegacyStretchFlagAffectsLayout() {
return arr != null && (((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & DOES_LEGACY_STRETCH_BEHAVIOUR) == DOES_LEGACY_STRETCH_BEHAVIOUR);
}
@Override
public float getLayoutMargin(YogaEdge edge) {
if (arr != null && ((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & MARGIN) == MARGIN) {
switch (edge) {
case LEFT:
return arr[LAYOUT_MARGIN_START_INDEX];
case TOP:
return arr[LAYOUT_MARGIN_START_INDEX + 1];
case RIGHT:
return arr[LAYOUT_MARGIN_START_INDEX + 2];
case BOTTOM:
return arr[LAYOUT_MARGIN_START_INDEX + 3];
case START:
return getLayoutDirection() == YogaDirection.RTL ? arr[LAYOUT_MARGIN_START_INDEX + 2] : arr[LAYOUT_MARGIN_START_INDEX];
case END:
return getLayoutDirection() == YogaDirection.RTL ? arr[LAYOUT_MARGIN_START_INDEX] : arr[LAYOUT_MARGIN_START_INDEX + 2];
default:
throw new IllegalArgumentException("Cannot get layout margins of multi-edge shorthands");
}
} else {
return 0;
}
}
@Override
public float getLayoutPadding(YogaEdge edge) {
if (arr != null && ((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & PADDING) == PADDING) {
int paddingStartIndex = LAYOUT_PADDING_START_INDEX - ((((int)arr[LAYOUT_EDGE_SET_FLAG_INDEX] & MARGIN) == MARGIN) ? 0 : 4);
switch (edge) {
case LEFT:
return arr[paddingStartIndex];
case TOP:
return arr[paddingStartIndex + 1];
case RIGHT:
return arr[paddingStartIndex + 2];
case BOTTOM:
return arr[paddingStartIndex + 3];
case START:
return getLayoutDirection() == YogaDirection.RTL ? arr[paddingStartIndex + 2] : arr[paddingStartIndex];
case END:
return getLayoutDirection() == YogaDirection.RTL ? arr[paddingStartIndex] : arr[paddingStartIndex + 2];
default:
throw new IllegalArgumentException("Cannot get layout paddings of multi-edge shorthands");
}
} else {
return 0;
}
}
@Override
public float getLayoutBorder(YogaEdge edge) {
if (arr != null && ((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & BORDER) == BORDER) {
int borderStartIndex = LAYOUT_BORDER_START_INDEX - ((((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & MARGIN) == MARGIN) ? 0 : 4) - ((((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX] & PADDING) == PADDING) ? 0 : 4);
switch (edge) {
case LEFT:
return arr[borderStartIndex];
case TOP:
return arr[borderStartIndex + 1];
case RIGHT:
return arr[borderStartIndex + 2];
case BOTTOM:
return arr[borderStartIndex + 3];
case START:
return getLayoutDirection() == YogaDirection.RTL ? arr[borderStartIndex + 2] : arr[borderStartIndex];
case END:
return getLayoutDirection() == YogaDirection.RTL ? arr[borderStartIndex] : arr[borderStartIndex + 2];
default:
throw new IllegalArgumentException("Cannot get layout border of multi-edge shorthands");
}
} else {
return 0;
}
}
@Override
public YogaDirection getLayoutDirection() {
return YogaDirection.fromInt(arr != null ? (int) arr[LAYOUT_DIRECTION_INDEX] : mLayoutDirection);
}
@Override
public boolean hasNewLayout() {
if (arr != null) {
return (((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX]) & HAS_NEW_LAYOUT) == HAS_NEW_LAYOUT;
} else {
return mHasNewLayout;
}
}
@Override
public void markLayoutSeen() {
if (arr != null) {
arr[LAYOUT_EDGE_SET_FLAG_INDEX] = ((int) arr[LAYOUT_EDGE_SET_FLAG_INDEX]) & ~(HAS_NEW_LAYOUT);
}
mHasNewLayout = false;
}
}
|
package water;
import water.util.Log;
import java.util.Arrays;
/** Lockable Keys - Keys locked during long running {@link Job}s, to prevent
* overwriting in-use keys. E.g. model-building: expected to read-lock input
* {@link water.fvec.Frame}s, and write-lock the output {@link hex.Model}.
* Parser should write-lock the output Frame, to guard against double-parsing.
* This is a simple cooperative distributed locking scheme. Because we need
* <em>distributed</em> locking, the normal Java locks do not work. Because
* we cannot enforce good behavior, this is a <em>cooperative</em> scheme
* only.
*
* Supports: <ul>
* <li>lock-and-delete-old-and-update (for new Keys)</li>
* <li>lock-and-delete (for removing old Keys)</li>
* <li>unlock</li>
* </ul>
*
* @author <a href="mailto:cliffc@0xdata.com"></a>
* @version 1.0
*/
public abstract class Lockable<T extends Lockable<T>> extends Keyed<T> {
/** List of Job Keys locking this Key.
* <ul>
* <li>Write-locker job is in {@code _lockers[0 ]}. Can be null locker.</li>
* <li>Read -locker jobs are in {@code _lockers[1+]}.</li>
* <li>Unlocked has _lockers equal to null.</li>
* <li>Only 1 situation will be true at a time; atomically updated.</li>
* <li>Transient, because this data is only valid on the master node.</li>
* </ul>
*/
public transient Key _lockers[];
/** Create a Lockable object, if it has a {@link Key}. */
public Lockable( Key key ) { super(key); }
// Atomic create+overwrite of prior key.
// If prior key exists, block until acquire a write-lock.
// Then call remove, removing all of a prior key.
// The replace this object as the new Lockable, still write-locked.
// "locker" can be null, meaning the special no-Job locker; for use by expected-fast operations
// Example: write-lock & remove an old Frame, and replace with a new locked Frame
// Local-Node Master-Node
// (1) new,old -->write_lock(job)--> old
// (2) new,old.waiting... new,old+job-locked atomic xtn loop
// (3) old.remove onSuccess
// (4) new <--update success <-- new+job-locked
/** Write-lock {@code this._key} by {@code job_key}.
* Throws IAE if the Key is already locked.
* @return the old POJO mapped to this Key, generally for deletion. */
public Lockable write_lock( Key job_key ) {
Log.debug("write-lock "+_key+" by job "+job_key);
return ((PriorWriteLock)new PriorWriteLock(job_key).invoke(_key))._old;
}
/** Write-lock {@code this._key} by {@code job_key}, and delete any prior mapping.
* Throws IAE if the Key is already locked.
* @return self, locked by job_key */
public T delete_and_lock( Key job_key ) {
Lockable old = write_lock(job_key);
if( old != null ) {
Log.debug("lock-then-clear "+_key+" by job "+job_key);
old.remove_impl(new Futures()).blockForPending();
}
return (T)this;
}
/** Write-lock key and delete; blocking.
* Throws IAE if the key is already locked.
*/
public static void delete( Key key ) {
Value val = DKV.get(key);
if( val==null ) return;
((Lockable)val.get()).delete();
}
/** Write-lock 'this' and delete; blocking.
* Throws IAE if the _key is already locked.
*/
public void delete( ) { delete(null,new Futures()).blockForPending(); }
/** Write-lock 'this' and delete.
* Throws IAE if the _key is already locked.
*/
public Futures delete( Key job_key, Futures fs ) {
if( _key != null ) {
Log.debug("lock-then-delete "+_key+" by job "+job_key);
new PriorWriteLock(job_key).invoke(_key);
}
return remove(fs);
}
// Obtain the write-lock on _key, which may already exist, using the current 'this'.
private class PriorWriteLock extends TAtomic<Lockable> {
private final Key _job_key; // Job doing the locking
private Lockable _old; // Return the old thing, for deleting later
private PriorWriteLock( Key job_key ) { _job_key = job_key; }
@Override public Lockable atomic(Lockable old) {
_old = old;
if( old != null ) { // Prior Lockable exists?
assert !old.is_wlocked(_job_key) : "Key "+_key+" already locked; lks="+Arrays.toString(old._lockers); // No double locking by same job
if( old.is_locked(_job_key) ) // read-locked by self? (double-write-lock checked above)
old.set_unlocked(old._lockers,_job_key); // Remove read-lock; will atomically upgrade to write-lock
if( !old.is_unlocked() ) // Blocking for some other Job to finish???
throw new IllegalArgumentException(old.getClass()+" "+_key+" is already in use. Unable to use it now. Consider using a different destination name.");
}
// Update & set the new value
set_write_lock(_job_key);
return Lockable.this;
}
}
/** Atomically get a read-lock on Key k, preventing future deletes or updates */
public static void read_lock( Key k, Key job_key ) {
Value val = DKV.get(k);
if( val.isLockable() )
((Lockable)val.get()).read_lock(job_key); // Lockable being locked
}
/** Atomically get a read-lock on this, preventing future deletes or updates */
public void read_lock( Key job_key ) {
if( _key != null ) {
Log.debug("shared-read-lock "+_key+" by job "+job_key);
new ReadLock(job_key).invoke(_key);
}
}
// Obtain read-lock
static private class ReadLock extends TAtomic<Lockable> {
final Key _job_key; // Job doing the unlocking
ReadLock( Key job_key ) { _job_key = job_key; }
@Override public Lockable atomic(Lockable old) {
if( old == null ) throw new IllegalArgumentException("Nothing to lock!");
if( old.is_wlocked() )
throw new IllegalArgumentException( old.getClass()+" "+_key+" is being created; Unable to read it now.");
old.set_read_lock(_job_key);
return old;
}
}
/** Atomically set a new version of self, without changing the locking. Typically used
* to upgrade a write-locked Model to a newer version with more training iterations. */
public T update( Key job_key ) {
Log.debug("update write-locked "+_key+" by job "+job_key);
new Update(job_key).invoke(_key);
return (T)this; // Flow-coding
}
// Freshen 'this' and leave locked
private class Update extends TAtomic<Lockable> {
final Key _job_key; // Job doing the unlocking
Update( Key job_key ) { _job_key = job_key; }
@Override public Lockable atomic(Lockable old) {
assert old != null : "Cannot update - Lockable is null!";
assert old.is_wlocked() : "Cannot update - Lockable is not write-locked!";
_lockers = old._lockers; // Keep lock state
return Lockable.this; // Freshen this
}
}
/** Atomically set a new version of self and unlock. */
public void unlock( Key job_key ) {
if( _key != null ) {
Log.debug("unlock "+_key+" by job "+job_key);
new Unlock(job_key).invoke(_key);
}
}
// Freshen 'this' and unlock
private class Unlock extends TAtomic<Lockable> {
final Key _job_key; // Job doing the unlocking
Unlock( Key job_key ) { _job_key = job_key; }
@Override public Lockable atomic(Lockable old) {
assert old != null : "Trying to unlock null!";
assert old.is_locked(_job_key) : "Can't unlock: Not locked!";
set_unlocked(old._lockers,_job_key);
return Lockable.this;
}
}
// Accessers for locking state. Minimal self-checking; primitive results.
private boolean is_locked(Key job_key) {
if( _lockers==null ) return false;
for( int i=(_lockers.length==1?0:1); i<_lockers.length; i++ ) {
Key k = _lockers[i];
if( job_key==k || (job_key != null && k != null && job_key.equals(k)) ) return true;
}
return false;
}
private boolean is_wlocked() { return _lockers!=null && _lockers.length==1; }
private boolean is_wlocked(Key job_key) { return is_wlocked() && (_lockers[0] == job_key || (_lockers[0] != null && _lockers[0].equals(job_key))); }
private boolean is_unlocked() { return _lockers== null; }
private void set_write_lock( Key job_key ) {
_lockers=new Key[]{job_key};
assert is_locked(job_key);
}
private void set_read_lock(Key job_key) {
assert !is_locked(job_key); // no double locking
assert !is_wlocked(); // not write locked
_lockers = _lockers == null ? new Key[2] : Arrays.copyOf(_lockers,_lockers.length+1);
_lockers[_lockers.length-1] = job_key;
assert is_locked(job_key);
}
private void set_unlocked(Key lks[], Key job_key) {
if( lks.length==1 ) { // Is write-locked?
assert job_key==lks[0] || job_key.equals(lks[0]);
_lockers = null; // Then unlocked
} else if( lks.length==2 ) { // One reader
assert lks[0]==null; // Not write-locked
assert lks[1]==job_key || (job_key != null && job_key.equals(lks[1]));
_lockers = null; // So unlocked
} else { // Else one of many readers
assert lks.length>2;
_lockers = Arrays.copyOf(lks,lks.length-1);
int j=1; // Skip the initial null slot
for( int i=1; i<lks.length; i++ )
if(job_key != null && !job_key.equals(lks[i]) || (job_key == null && lks[i] != null)){
_lockers[j++] = lks[i];
}
assert j==lks.length-1; // Was locked exactly once
}
assert !is_locked(job_key);
}
/** Force-unlock (break a lock); useful in some debug situations. */
//public static void unlock_lockable(final Key lockable, final Key job) {
// new DTask.DKeyTask<DTask.DKeyTask,Lockable>(null,lockable){
// @Override public void map(Lockable l) { l.unlock(job);}
// }.invokeTask();
/** Force-unlock (break a lock) all lockers; useful in some debug situations. */
public void unlock_all() {
if( _key != null )
for (Key k : _lockers) new UnlockSafe(k).invoke(_key);
}
private class UnlockSafe extends TAtomic<Lockable> {
final Key _job_key; // potential job doing the unlocking
UnlockSafe( Key job_key ) { _job_key = job_key; }
@Override public Lockable atomic(Lockable old) {
if (old.is_locked(_job_key))
set_unlocked(old._lockers,_job_key);
return Lockable.this;
}
}
/** Pretty string when locking fails */
//protected abstract String errStr();
}
|
package org.jfree.chart.plot;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.Arc2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.text.DecimalFormat;
import java.text.Format;
import org.jfree.chart.entity.EntityCollection;
import org.jfree.chart.entity.PieSectionEntity;
import org.jfree.chart.event.PlotChangeEvent;
import org.jfree.chart.labels.PieToolTipGenerator;
import org.jfree.chart.urls.PieURLGenerator;
import org.jfree.chart.util.LineUtilities;
import org.jfree.chart.util.ParamChecks;
import org.jfree.data.general.PieDataset;
import org.jfree.io.SerialUtilities;
import org.jfree.text.TextUtilities;
import org.jfree.ui.RectangleInsets;
import org.jfree.ui.TextAnchor;
import org.jfree.util.ObjectUtilities;
import org.jfree.util.PaintUtilities;
import org.jfree.util.Rotation;
import org.jfree.util.ShapeUtilities;
import org.jfree.util.UnitType;
/**
* A customised pie plot that leaves a hole in the middle.
*/
public class RingPlot extends PiePlot implements Cloneable, Serializable {
/** For serialization. */
private static final long serialVersionUID = 1556064784129676620L;
/** The center text mode. */
private CenterTextMode centerTextMode = CenterTextMode.NONE;
/**
* Text to display in the middle of the chart (used for
* CenterTextMode.FIXED).
*/
private String centerText;
/**
* The formatter used when displaying the first data value from the
* dataset (CenterTextMode.VALUE).
*/
private Format centerTextFormatter = new DecimalFormat("0.00");
/** The font used to display the center text. */
private Font centerTextFont;
/** The color used to display the center text. */
private Color centerTextColor;
/**
* A flag that controls whether or not separators are drawn between the
* sections of the chart.
*/
private boolean separatorsVisible;
/** The stroke used to draw separators. */
private transient Stroke separatorStroke;
/** The paint used to draw separators. */
private transient Paint separatorPaint;
/**
* The length of the inner separator extension (as a percentage of the
* depth of the sections).
*/
private double innerSeparatorExtension;
/**
* The length of the outer separator extension (as a percentage of the
* depth of the sections).
*/
private double outerSeparatorExtension;
/**
* The depth of the section as a percentage of the diameter.
*/
private double sectionDepth;
/**
* Creates a new plot with a <code>null</code> dataset.
*/
public RingPlot() {
this(null);
}
/**
* Creates a new plot for the specified dataset.
*
* @param dataset the dataset (<code>null</code> permitted).
*/
public RingPlot(PieDataset dataset) {
super(dataset);
this.centerTextMode = CenterTextMode.NONE;
this.centerText = null;
this.centerTextFormatter = new DecimalFormat("0.00");
this.centerTextFont = DEFAULT_LABEL_FONT;
this.centerTextColor = Color.BLACK;
this.separatorsVisible = true;
this.separatorStroke = new BasicStroke(0.5f);
this.separatorPaint = Color.gray;
this.innerSeparatorExtension = 0.20;
this.outerSeparatorExtension = 0.20;
this.sectionDepth = 0.20;
}
/**
* Returns the mode for displaying text in the center of the plot. The
* default value is {@link CenterTextMode#NONE} therefore no text
* will be displayed by default.
*
* @return The mode (never <code>null</code>).
*
* @since 1.0.18
*/
public CenterTextMode getCenterTextMode() {
return this.centerTextMode;
}
/**
* Sets the mode for displaying text in the center of the plot and sends
* a change event to all registered listeners. For
* {@link CenterTextMode#FIXED}, the display text will come from the
* <code>centerText</code> attribute (see {@link #getCenterText()}).
* For {@link CenterTextMode#VALUE}, the center text will be the value from
* the first section in the dataset.
*
* @param mode the mode (<code>null</code> not permitted).
*
* @since 1.0.18
*/
public void setCenterTextMode(CenterTextMode mode) {
ParamChecks.nullNotPermitted(mode, "mode");
this.centerTextMode = mode;
fireChangeEvent();
}
/**
* Returns the text to display in the center of the plot when the mode
* is {@link CenterTextMode#FIXED}.
*
* @return The text (possibly <code>null</code>).
*
* @since 1.0.18.
*/
public String getCenterText() {
return this.centerText;
}
/**
* Sets the text to display in the center of the plot and sends a
* change event to all registered listeners. If the text is set to
* <code>null</code>, no text will be displayed.
*
* @param text the text (<code>null</code> permitted).
*
* @since 1.0.18
*/
public void setCenterText(String text) {
this.centerText = text;
fireChangeEvent();
}
/**
* Returns the formatter used to format the center text value for the mode
* {@link CenterTextMode#VALUE}. The default value is
* <code>DecimalFormat("0.00");</code>.
*
* @return The formatter (never <code>null</code>).
*
* @since 1.0.18
*/
public Format getCenterTextFormatter() {
return this.centerTextFormatter;
}
/**
* Sets the formatter used to format the center text value and sends a
* change event to all registered listeners.
*
* @param formatter the formatter (<code>null</code> not permitted).
*
* @since 1.0.18
*/
public void setCenterTextFormatter(Format formatter) {
ParamChecks.nullNotPermitted(formatter, "formatter");
this.centerTextFormatter = formatter;
}
/**
* Returns the font used to display the center text. The default value
* is {@link PiePlot#DEFAULT_LABEL_FONT}.
*
* @return The font (never <code>null</code>).
*
* @since 1.0.18
*/
public Font getCenterTextFont() {
return this.centerTextFont;
}
/**
* Sets the font used to display the center text and sends a change event
* to all registered listeners.
*
* @param font the font (<code>null</code> not permitted).
*
* @since 1.0.18
*/
public void setCenterTextFont(Font font) {
ParamChecks.nullNotPermitted(font, "font");
this.centerTextFont = font;
fireChangeEvent();
}
/**
* Returns the color for the center text. The default value is
* <code>Color.BLACK</code>.
*
* @return The color (never <code>null</code>).
*
* @since 1.0.18
*/
public Color getCenterTextColor() {
return this.centerTextColor;
}
/**
* Sets the color for the center text and sends a change event to all
* registered listeners.
*
* @param color the color (<code>null</code> not permitted).
*
* @since 1.0.18
*/
public void setCenterTextColor(Color color) {
ParamChecks.nullNotPermitted(color, "color");
this.centerTextColor = color;
fireChangeEvent();
}
/**
* Returns a flag that indicates whether or not separators are drawn between
* the sections in the chart.
*
* @return A boolean.
*
* @see #setSeparatorsVisible(boolean)
*/
public boolean getSeparatorsVisible() {
return this.separatorsVisible;
}
/**
* Sets the flag that controls whether or not separators are drawn between
* the sections in the chart, and sends a {@link PlotChangeEvent} to all
* registered listeners.
*
* @param visible the flag.
*
* @see #getSeparatorsVisible()
*/
public void setSeparatorsVisible(boolean visible) {
this.separatorsVisible = visible;
fireChangeEvent();
}
/**
* Returns the separator stroke.
*
* @return The stroke (never <code>null</code>).
*
* @see #setSeparatorStroke(Stroke)
*/
public Stroke getSeparatorStroke() {
return this.separatorStroke;
}
/**
* Sets the stroke used to draw the separator between sections and sends
* a {@link PlotChangeEvent} to all registered listeners.
*
* @param stroke the stroke (<code>null</code> not permitted).
*
* @see #getSeparatorStroke()
*/
public void setSeparatorStroke(Stroke stroke) {
ParamChecks.nullNotPermitted(stroke, "stroke");
this.separatorStroke = stroke;
fireChangeEvent();
}
/**
* Returns the separator paint.
*
* @return The paint (never <code>null</code>).
*
* @see #setSeparatorPaint(Paint)
*/
public Paint getSeparatorPaint() {
return this.separatorPaint;
}
/**
* Sets the paint used to draw the separator between sections and sends a
* {@link PlotChangeEvent} to all registered listeners.
*
* @param paint the paint (<code>null</code> not permitted).
*
* @see #getSeparatorPaint()
*/
public void setSeparatorPaint(Paint paint) {
ParamChecks.nullNotPermitted(paint, "paint");
this.separatorPaint = paint;
fireChangeEvent();
}
/**
* Returns the length of the inner extension of the separator line that
* is drawn between sections, expressed as a percentage of the depth of
* the section.
*
* @return The inner separator extension (as a percentage).
*
* @see #setInnerSeparatorExtension(double)
*/
public double getInnerSeparatorExtension() {
return this.innerSeparatorExtension;
}
/**
* Sets the length of the inner extension of the separator line that is
* drawn between sections, as a percentage of the depth of the
* sections, and sends a {@link PlotChangeEvent} to all registered
* listeners.
*
* @param percent the percentage.
*
* @see #getInnerSeparatorExtension()
* @see #setOuterSeparatorExtension(double)
*/
public void setInnerSeparatorExtension(double percent) {
this.innerSeparatorExtension = percent;
fireChangeEvent();
}
/**
* Returns the length of the outer extension of the separator line that
* is drawn between sections, expressed as a percentage of the depth of
* the section.
*
* @return The outer separator extension (as a percentage).
*
* @see #setOuterSeparatorExtension(double)
*/
public double getOuterSeparatorExtension() {
return this.outerSeparatorExtension;
}
/**
* Sets the length of the outer extension of the separator line that is
* drawn between sections, as a percentage of the depth of the
* sections, and sends a {@link PlotChangeEvent} to all registered
* listeners.
*
* @param percent the percentage.
*
* @see #getOuterSeparatorExtension()
*/
public void setOuterSeparatorExtension(double percent) {
this.outerSeparatorExtension = percent;
fireChangeEvent();
}
/**
* Returns the depth of each section, expressed as a percentage of the
* plot radius.
*
* @return The depth of each section.
*
* @see #setSectionDepth(double)
* @since 1.0.3
*/
public double getSectionDepth() {
return this.sectionDepth;
}
/**
* The section depth is given as percentage of the plot radius.
* Specifying 1.0 results in a straightforward pie chart.
*
* @param sectionDepth the section depth.
*
* @see #getSectionDepth()
* @since 1.0.3
*/
public void setSectionDepth(double sectionDepth) {
this.sectionDepth = sectionDepth;
fireChangeEvent();
}
/**
* Initialises the plot state (which will store the total of all dataset
* values, among other things). This method is called once at the
* beginning of each drawing.
*
* @param g2 the graphics device.
* @param plotArea the plot area (<code>null</code> not permitted).
* @param plot the plot.
* @param index the secondary index (<code>null</code> for primary
* renderer).
* @param info collects chart rendering information for return to caller.
*
* @return A state object (maintains state information relevant to one
* chart drawing).
*/
@Override
public PiePlotState initialise(Graphics2D g2, Rectangle2D plotArea,
PiePlot plot, Integer index, PlotRenderingInfo info) {
PiePlotState state = super.initialise(g2, plotArea, plot, index, info);
state.setPassesRequired(3);
return state;
}
/**
* Draws a single data item.
*
* @param g2 the graphics device (<code>null</code> not permitted).
* @param section the section index.
* @param dataArea the data plot area.
* @param state state information for one chart.
* @param currentPass the current pass index.
*/
@Override
protected void drawItem(Graphics2D g2, int section, Rectangle2D dataArea,
PiePlotState state, int currentPass) {
PieDataset dataset = getDataset();
Number n = dataset.getValue(section);
if (n == null) {
return;
}
double value = n.doubleValue();
double angle1 = 0.0;
double angle2 = 0.0;
Rotation direction = getDirection();
if (direction == Rotation.CLOCKWISE) {
angle1 = state.getLatestAngle();
angle2 = angle1 - value / state.getTotal() * 360.0;
}
else if (direction == Rotation.ANTICLOCKWISE) {
angle1 = state.getLatestAngle();
angle2 = angle1 + value / state.getTotal() * 360.0;
}
else {
throw new IllegalStateException("Rotation type not recognised.");
}
double angle = (angle2 - angle1);
if (Math.abs(angle) > getMinimumArcAngleToDraw()) {
Comparable key = getSectionKey(section);
double ep = 0.0;
double mep = getMaximumExplodePercent();
if (mep > 0.0) {
ep = getExplodePercent(key) / mep;
}
Rectangle2D arcBounds = getArcBounds(state.getPieArea(),
state.getExplodedPieArea(), angle1, angle, ep);
Arc2D.Double arc = new Arc2D.Double(arcBounds, angle1, angle,
Arc2D.OPEN);
// create the bounds for the inner arc
double depth = this.sectionDepth / 2.0;
RectangleInsets s = new RectangleInsets(UnitType.RELATIVE,
depth, depth, depth, depth);
Rectangle2D innerArcBounds = new Rectangle2D.Double();
innerArcBounds.setRect(arcBounds);
s.trim(innerArcBounds);
// calculate inner arc in reverse direction, for later
// GeneralPath construction
Arc2D.Double arc2 = new Arc2D.Double(innerArcBounds, angle1
+ angle, -angle, Arc2D.OPEN);
GeneralPath path = new GeneralPath();
path.moveTo((float) arc.getStartPoint().getX(),
(float) arc.getStartPoint().getY());
path.append(arc.getPathIterator(null), false);
path.append(arc2.getPathIterator(null), true);
path.closePath();
Line2D separator = new Line2D.Double(arc2.getEndPoint(),
arc.getStartPoint());
if (currentPass == 0) {
Paint shadowPaint = getShadowPaint();
double shadowXOffset = getShadowXOffset();
double shadowYOffset = getShadowYOffset();
if (shadowPaint != null && getShadowGenerator() == null) {
Shape shadowArc = ShapeUtilities.createTranslatedShape(
path, (float) shadowXOffset, (float) shadowYOffset);
g2.setPaint(shadowPaint);
g2.fill(shadowArc);
}
}
else if (currentPass == 1) {
Paint paint = lookupSectionPaint(key);
g2.setPaint(paint);
g2.fill(path);
Paint outlinePaint = lookupSectionOutlinePaint(key);
Stroke outlineStroke = lookupSectionOutlineStroke(key);
if (getSectionOutlinesVisible() && outlinePaint != null
&& outlineStroke != null) {
g2.setPaint(outlinePaint);
g2.setStroke(outlineStroke);
g2.draw(path);
}
if (section == 0) {
String nstr = null;
if (this.centerTextMode.equals(CenterTextMode.VALUE)) {
nstr = this.centerTextFormatter.format(n);
} else if (this.centerTextMode.equals(CenterTextMode.FIXED)) {
nstr = this.centerText;
}
if (nstr != null) {
g2.setFont(this.centerTextFont);
g2.setPaint(this.centerTextColor);
TextUtilities.drawAlignedString(nstr, g2,
(float) dataArea.getCenterX(),
(float) dataArea.getCenterY(),
TextAnchor.CENTER);
}
}
// add an entity for the pie section
if (state.getInfo() != null) {
EntityCollection entities = state.getEntityCollection();
if (entities != null) {
String tip = null;
PieToolTipGenerator toolTipGenerator
= getToolTipGenerator();
if (toolTipGenerator != null) {
tip = toolTipGenerator.generateToolTip(dataset,
key);
}
String url = null;
PieURLGenerator urlGenerator = getURLGenerator();
if (urlGenerator != null) {
url = urlGenerator.generateURL(dataset, key,
getPieIndex());
}
PieSectionEntity entity = new PieSectionEntity(path,
dataset, getPieIndex(), section, key, tip,
url);
entities.add(entity);
}
}
}
else if (currentPass == 2) {
if (this.separatorsVisible) {
Line2D extendedSeparator = LineUtilities.extendLine(
separator, this.innerSeparatorExtension,
this.outerSeparatorExtension);
g2.setStroke(this.separatorStroke);
g2.setPaint(this.separatorPaint);
g2.draw(extendedSeparator);
}
}
}
state.setLatestAngle(angle2);
}
/**
* This method overrides the default value for cases where the ring plot
* is very thin. This fixes bug 2121818.
*
* @return The label link depth, as a percentage of the plot's radius.
*/
@Override
protected double getLabelLinkDepth() {
return Math.min(super.getLabelLinkDepth(), getSectionDepth() / 2);
}
/**
* Tests this plot for equality with an arbitrary object.
*
* @param obj the object to test against (<code>null</code> permitted).
*
* @return A boolean.
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof RingPlot)) {
return false;
}
RingPlot that = (RingPlot) obj;
if (!this.centerTextMode.equals(that.centerTextMode)) {
return false;
}
if (!ObjectUtilities.equal(this.centerText, that.centerText)) {
return false;
}
if (!this.centerTextFormatter.equals(that.centerTextFormatter)) {
return false;
}
if (!this.centerTextFont.equals(that.centerTextFont)) {
return false;
}
if (!this.centerTextColor.equals(that.centerTextColor)) {
return false;
}
if (this.separatorsVisible != that.separatorsVisible) {
return false;
}
if (!ObjectUtilities.equal(this.separatorStroke,
that.separatorStroke)) {
return false;
}
if (!PaintUtilities.equal(this.separatorPaint, that.separatorPaint)) {
return false;
}
if (this.innerSeparatorExtension != that.innerSeparatorExtension) {
return false;
}
if (this.outerSeparatorExtension != that.outerSeparatorExtension) {
return false;
}
if (this.sectionDepth != that.sectionDepth) {
return false;
}
return super.equals(obj);
}
/**
* Provides serialization support.
*
* @param stream the output stream.
*
* @throws IOException if there is an I/O error.
*/
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
SerialUtilities.writeStroke(this.separatorStroke, stream);
SerialUtilities.writePaint(this.separatorPaint, stream);
}
/**
* Provides serialization support.
*
* @param stream the input stream.
*
* @throws IOException if there is an I/O error.
* @throws ClassNotFoundException if there is a classpath problem.
*/
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
this.separatorStroke = SerialUtilities.readStroke(stream);
this.separatorPaint = SerialUtilities.readPaint(stream);
}
}
|
package com.worth.ifs.application;
import com.worth.ifs.application.constant.ApplicationStatusConstants;
import com.worth.ifs.application.domain.Question;
import com.worth.ifs.application.domain.Section;
import com.worth.ifs.application.form.ApplicationForm;
import com.worth.ifs.application.resource.ApplicationResource;
import com.worth.ifs.application.resource.QuestionStatusResource;
import com.worth.ifs.competition.domain.Competition;
import com.worth.ifs.form.domain.FormInputResponse;
import com.worth.ifs.profiling.ProfileExecution;
import com.worth.ifs.user.domain.Organisation;
import com.worth.ifs.user.domain.User;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* This controller will handle all requests that are related to the application overview.
* Application overview is the page that contains the most basic information about the current application and
* the basic information about the competition the application is related to.
*/
@Controller
@RequestMapping("/application")
public class ApplicationController extends AbstractApplicationController {
private final Log log = LogFactory.getLog(getClass());
private boolean selectFirstSectionIfNoneCurrentlySelected = false;
public static String redirectToApplication(ApplicationResource application){
return "redirect:/application/"+application.getId();
}
@ProfileExecution
@RequestMapping("/{applicationId}")
public String applicationDetails(ApplicationForm form, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request){
User user = userAuthenticationService.getAuthenticatedUser(request);
ApplicationResource application = applicationService.getById(applicationId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
return "application-details";
}
@ProfileExecution
@RequestMapping("/{applicationId}/section/{sectionId}")
public String applicationDetailsOpenSection(ApplicationForm form, Model model,
@PathVariable("applicationId") final Long applicationId,
@PathVariable("sectionId") final Long sectionId,
HttpServletRequest request){
User user = userAuthenticationService.getAuthenticatedUser(request);
ApplicationResource application = applicationService.getById(applicationId);
Section section = sectionService.getById(sectionId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.ofNullable(section), Optional.empty(), model, form);
addOrganisationAndUserFinanceDetails(application, user.getId(), model, form);
return "application-details";
}
@ProfileExecution
@RequestMapping(value = "/{applicationId}/summary", method = RequestMethod.GET)
public String applicationSummary(@ModelAttribute("form") ApplicationForm form, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request) {
List<FormInputResponse> responses = formInputResponseService.getByApplication(applicationId);
model.addAttribute("incompletedSections", sectionService.getInCompleted(applicationId));
model.addAttribute("responses", formInputResponseService.mapFormInputResponsesToFormInput(responses));
User user = userAuthenticationService.getAuthenticatedUser(request);
ApplicationResource application = applicationService.getById(applicationId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
addOrganisationAndUserFinanceDetails(application, user.getId(), model, form);
return "application-summary";
}
@ProfileExecution
@RequestMapping(value = "/{applicationId}/summary", method = RequestMethod.POST)
public String applicationSummarySubmit(@RequestParam("mark_as_complete") Long markQuestionCompleteId, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request) {
User user = userAuthenticationService.getAuthenticatedUser(request);
if(markQuestionCompleteId!=null) {
questionService.markAsComplete(markQuestionCompleteId, applicationId, user.getId());
}
return "redirect:/application/" + applicationId + "/summary";
}
@ProfileExecution
@RequestMapping("/{applicationId}/confirm-submit")
public String applicationConfirmSubmit(ApplicationForm form, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request){
User user = userAuthenticationService.getAuthenticatedUser(request);
ApplicationResource application = applicationService.getById(applicationId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
return "application-confirm-submit";
}
@RequestMapping("/{applicationId}/submit")
public String applicationSubmit(ApplicationForm form, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request){
User user = userAuthenticationService.getAuthenticatedUser(request);
applicationService.updateStatus(applicationId, ApplicationStatusConstants.SUBMITTED.getId());
ApplicationResource application = applicationService.getById(applicationId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
return "application-submitted";
}
@ProfileExecution
@RequestMapping("/{applicationId}/track")
public String applicationTrack(ApplicationForm form, Model model, @PathVariable("applicationId") final Long applicationId,
HttpServletRequest request){
User user = userAuthenticationService.getAuthenticatedUser(request);
ApplicationResource application = applicationService.getById(applicationId);
Competition competition = competitionService.getById(application.getCompetition());
addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
return "application-track";
}
@ProfileExecution
@RequestMapping("/create/{competitionId}")
public String applicationCreatePage(Model model, @PathVariable("competitionId") final Long competitionId, HttpServletRequest request){
return "application-create";
}
@ProfileExecution
@RequestMapping(value = "/create/{competitionId}", method = RequestMethod.POST)
public String applicationCreate(Model model,
@PathVariable("competitionId") final Long competitionId,
@RequestParam(value = "application_name", required = true) String applicationName,
HttpServletRequest request){
Long userId = userAuthenticationService.getAuthenticatedUser(request).getId();
String applicationNameWithoutWhiteSpace= applicationName.replaceAll("\\s","");
if(applicationNameWithoutWhiteSpace.length() > 0) {
ApplicationResource application = applicationService.createApplication(competitionId, userId, applicationName);
return "redirect:/application/"+application.getId();
}
else {
model.addAttribute("applicationNameEmpty", true);
return "application-create";
}
}
@ProfileExecution
@RequestMapping(value = "/create-confirm-competition")
public String competitionCreateApplication(Model model, HttpServletRequest request){
return "application-create-confirm-competition";
}
/**
* This method is for the post request when the users clicks the input[type=submit] button.
* This is also used when the user clicks the 'mark-as-complete' button or reassigns a question to another user.
*/
@ProfileExecution
@RequestMapping(value = "/{applicationId}/section/{sectionId}", params= {"singleFragment=true"}, method = RequestMethod.POST)
public String assignQuestionAndReturnSectionFragmentIndividualSection(ApplicationForm form, Model model,
@PathVariable("applicationId") final Long applicationId,
@RequestParam("sectionId") final Long sectionId,
HttpServletRequest request, HttpServletResponse response){
return doAssignQuestionAndReturnSectionFragment(model, applicationId, sectionId, request, response, form);
}
/**
* This method is for the post request when the users clicks the input[type=submit] button.
* This is also used when the user clicks the 'mark-as-complete' button or reassigns a question to another user.
*/
@ProfileExecution
@RequestMapping(value = "/{applicationId}", params = {"singleFragment=true"}, method = RequestMethod.POST)
public String assignQuestionAndReturnSectionFragment(ApplicationForm form, Model model,
@PathVariable("applicationId") final Long applicationId,
@RequestParam("sectionId") final Long sectionId,
HttpServletRequest request, HttpServletResponse response){
return doAssignQuestionAndReturnSectionFragment(model, applicationId, sectionId, request, response, form);
}
private String doAssignQuestionAndReturnSectionFragment(Model model, @PathVariable("applicationId") Long applicationId, @RequestParam("sectionId") Long sectionId, HttpServletRequest request, HttpServletResponse response, ApplicationForm form) {
doAssignQuestion(applicationId, request, response);
ApplicationResource application = applicationService.getById(applicationId);
User user = userAuthenticationService.getAuthenticatedUser(request);
Competition competition = competitionService.getById(application.getCompetition());
Optional<Section> currentSection = getSection(competition.getSections(), Optional.of(sectionId), true);
//super.addApplicationAndSectionsAndFinanceDetails(applicationId, user.getId(), currentSection, Optional.empty(), model, form, selectFirstSectionIfNoneCurrentlySelected);
super.addApplicationAndSections(application, competition, user.getId(), Optional.empty(), Optional.empty(), model, form);
super.addOrganisationAndUserFinanceDetails(application, user.getId(), model, form);
Long questionId = extractQuestionProcessRoleIdFromAssignSubmit(request);
Question question = currentSection.get().getQuestions().stream().filter(q -> q.getId().equals(questionId)).collect(Collectors.toList()).get(0);
model.addAttribute("question", question);
Organisation userOrganisation = organisationService.getUserOrganisation(application, user.getId()).get();
Map<Long, QuestionStatusResource> questionAssignees = questionService.getQuestionStatusesForApplicationAndOrganisation(applicationId, userOrganisation.getId());
QuestionStatusResource questionAssignee = questionAssignees.get(questionId);
model.addAttribute("questionAssignee", questionAssignee);
model.addAttribute("currentUser", user);
model.addAttribute("section", currentSection.get());
return "application/single-section-details";
}
/**
* Assign a question to a user
*
* @param model showing details
* @param applicationId the application for which the user is assigned
* @param sectionId section id for showing details
* @param request request parameters
* @return
*/
@ProfileExecution
@RequestMapping(value = "/{applicationId}/section/{sectionId}", method = RequestMethod.POST)
public String assignQuestion(Model model,
@PathVariable("applicationId") final Long applicationId,
@PathVariable("sectionId") final Long sectionId,
HttpServletRequest request,
HttpServletResponse response){
doAssignQuestion(applicationId, request, response);
return "redirect:/application/" + applicationId + "/section/" +sectionId;
}
private void doAssignQuestion(@PathVariable("applicationId") Long applicationId, HttpServletRequest request, HttpServletResponse response) {
assignQuestion(request, applicationId);
cookieFlashMessageFilter.setFlashMessage(response, "assignedQuestion");
}
}
|
package com.alexaut.kroniax.game;
import com.alexaut.kroniax.Application;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
public class GameHUD {
private float mTime;
private int mTries;
private BitmapFont mFont;
private OrthographicCamera mCamera;
public GameHUD(Application app) {
mTime = 0;
mTries = 0;
mFont = app.getGuiSkin().getFont("default-font");
mCamera = new OrthographicCamera(1280, 720);
mCamera.position.set(640, 360, 0);
mCamera.update();
}
public void addDeath() {
mTries++;
}
public void update(float deltaTime) {
mTime += deltaTime;
}
public void render(SpriteBatch spriteBatch) {
spriteBatch.setProjectionMatrix(mCamera.combined);
spriteBatch.enableBlending();
mFont.draw(spriteBatch, "Time: " + (int)(mTime * 10.f) / 10.f, 5, 30);
mFont.draw(spriteBatch, "Deaths: " + mTries, 5, 60);
spriteBatch.disableBlending();
}
}
|
package hudson.tasks;
import hudson.Launcher;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import hudson.tasks.junit.JUnitResultArchiver;
import java.util.List;
import java.util.ArrayList;
import java.util.Collection;
import java.io.IOException;
/**
* One step of the whole build process.
*
* @author Kohsuke Kawaguchi
*/
public interface BuildStep {
/**
* Runs before the build begins.
*
* @return
* true if the build can continue, false if there was an error
* and the build needs to be aborted.
*/
boolean prebuild( Build build, BuildListener listener );
/**
* Runs the step over the given build and reports the progress to the listener.
*
* @return
* true if the build can continue, false if there was an error
* and the build needs to be aborted.
*
* @throws InterruptedException
* If the build is interrupted by the user (in an attempt to abort the build.)
* Normally the {@link BuildStep} implementations may simply forward the exception
* it got from its lower-level functions.
* @throws IOException
* If the implementation wants to abort the processing when an {@link IOException}
* happens, it can simply propagate the exception to the caller. This will cause
* the build to fail, with the default error message.
* Implementations are encouraged to catch {@link IOException} on its own to
* provide a better error message, if it can do so, so that users have better
* understanding on why it failed.
*/
boolean perform(Build build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException;
/**
* Returns an action object if this {@link BuildStep} has an action
* to contribute to a {@link Project}.
*
* <p>
* {@link Project} calls this method for every {@link BuildStep} that
* it owns when the rendering is requested.
*
* @param project
* {@link Project} that owns this build step,
* since {@link BuildStep} object doesn't usually have this "parent" pointer.
*
* @return
* null if there's no action to be contributed.
*/
Action getProjectAction(Project project);
/**
* List of all installed builders.
*
* Builders are invoked to perform the build itself.
*/
public static final List<Descriptor<Builder>> BUILDERS = Descriptor.toList(
Shell.DESCRIPTOR,
BatchFile.DESCRIPTOR,
Ant.DESCRIPTOR,
Maven.DESCRIPTOR
);
/**
* List of all installed publishers.
*
* Publishers are invoked after the build is completed, normally to perform
* some post-actions on build results, such as sending notifications, collecting
* results, etc.
*
* @see PublisherList#addNotifier(Descriptor)
* @see PublisherList#addRecorder(Descriptor)
*/
public static final PublisherList PUBLISHERS = new PublisherList(Descriptor.toList(
ArtifactArchiver.DESCRIPTOR,
Fingerprinter.DESCRIPTOR,
JavadocArchiver.DESCRIPTOR,
JUnitResultArchiver.DescriptorImpl.DESCRIPTOR,
BuildTrigger.DESCRIPTOR,
Mailer.DESCRIPTOR
));
/**
* List of publisher descriptor.
*/
public static final class PublisherList extends ArrayList<Descriptor<Publisher>> {
public PublisherList(Collection<? extends Descriptor<Publisher>> c) {
super(c);
}
/**
* Adds a new publisher descriptor, which (generally speaking)
* shouldn't alter the build result, but just report the build result
* by some means, such as e-mail, IRC, etc.
*
* <p>
* This method adds the descriptor after all the "recorders".
*
* @see #addRecorder(Descriptor)
*/
public void addNotifier( Descriptor<Publisher> d ) {
add(d);
}
/**
* Adds a new publisher descriptor, which (generally speaking)
* alter the build result based on some artifacts of the build.
*
* <p>
* This method adds the descriptor before all the "notifiers".
*
* @see #addNotifier(Descriptor)
*/
public void addRecorder( Descriptor<Publisher> d ) {
int idx = super.indexOf(Mailer.DESCRIPTOR);
add(idx,d);
}
}
}
|
// Triple Play - utilities for use in PlayN-based games
package tripleplay.ui;
import playn.core.Pointer;
import playn.core.Pointer.Event;
import playn.core.Sound;
import pythagoras.f.IDimension;
import pythagoras.f.Point;
import react.Signal;
import react.Slot;
import react.Value;
/**
* Controls the behavior of a widget (how it responds to pointer events).
*/
public abstract class Behavior<T extends Element<T>> implements Pointer.Listener {
/** Implements button-like behavior: selects the element when the pointer is in bounds, and
* deselects on release. This is a pretty common case and inherited by {@link Click}. */
public static class Select<T extends Element<T>> extends Behavior<T> {
public Select (T owner) {
super(owner);
}
@Override protected void onPress (Pointer.Event event) {
updateSelected(true);
}
@Override protected void onHover (Pointer.Event event, boolean inBounds) {
updateSelected(inBounds);
}
@Override protected boolean onRelease (Pointer.Event event) {
// it's a click if we ended in bounds
return updateSelected(false);
}
@Override protected void onCancel (Pointer.Event event) {
updateSelected(false);
}
@Override protected void onClick (Pointer.Event event) {
// nothing by default, subclasses wire this up as needed
}
}
/** A behavior that ignores everything. This allows subclasses to easily implement a single
* {@code onX} method. */
public static class Ignore<T extends Element<T>> extends Behavior<T> {
public Ignore (T owner) { super(owner); }
@Override protected void onPress (Pointer.Event event) {}
@Override protected void onHover (Pointer.Event event, boolean inBounds) {}
@Override protected boolean onRelease (Pointer.Event event) { return false; }
@Override protected void onCancel (Pointer.Event event) {}
@Override protected void onClick (Pointer.Event event) {}
}
/** Implements clicking behavior. */
public static class Click<T extends Element<T>> extends Select<T> {
/** A delay (in milliseconds) during which the owner will remain unclickable after it has
* been clicked. This ensures that users don't hammer away at a widget, triggering
* multiple responses (which code rarely protects against). Inherited. */
public static Style<Integer> DEBOUNCE_DELAY = Style.newStyle(true, 500);
/** A signal emitted with our owner when clicked. */
public Signal<T> clicked = Signal.create();
public Click (T owner) {
super(owner);
}
/** Triggers a click. */
public void click () {
soundAction();
clicked.emit(_owner); // emit a click event
}
@Override public void layout () {
super.layout();
_debounceDelay = resolveStyle(DEBOUNCE_DELAY);
}
@Override protected void onPress (Pointer.Event event) {
// ignore press events if we're still in our debounce interval
if (event.time() - _lastClickStamp > _debounceDelay) super.onPress(event);
}
@Override protected void onClick (Pointer.Event event) {
_lastClickStamp = event.time();
click();
}
protected int _debounceDelay;
protected double _lastClickStamp;
}
/** Implements toggling behavior. */
public static class Toggle<T extends Element<T>> extends Behavior<T> {
/** A signal emitted with our owner when clicked. */
public final Signal<T> clicked = Signal.create();
/** Indicates whether our owner is selected. It may be listened to, and updated. */
public final Value<Boolean> selected = Value.create(false);
public Toggle (T owner) {
super(owner);
selected.connect(selectedDidChange());
}
/** Triggers a click. */
public void click () {
soundAction();
clicked.emit(_owner); // emit a click event
}
@Override protected void onPress (Pointer.Event event) {
_anchorState = _owner.isSelected();
selected.update(!_anchorState);
}
@Override protected void onHover (Pointer.Event event, boolean inBounds) {
selected.update(inBounds ? !_anchorState : _anchorState);
}
@Override protected boolean onRelease (Pointer.Event event) {
return _anchorState != _owner.isSelected();
}
@Override protected void onCancel (Pointer.Event event) {
selected.update(_anchorState);
}
@Override protected void onClick (Pointer.Event event) {
click();
}
protected boolean _anchorState;
}
/**
* Tracks the pressed position as an anchor and delegates to subclasses to update state based
* on anchor and drag position.
*/
public static abstract class Track<T extends Element<T>> extends Ignore<T>
{
/** A distance, in event coordinates, used to decide if tracking should be temporarily
* cancelled. If the pointer is hovered more than this distance outside of the owner's
* bounds, the tracking will revert to the anchor position, just like when the pointer is
* cancelled. A null value indicates that the tracking will be unconfined in this way.
* TODO: default to 35 if no Slider uses are relying on lack of hover limit. */
public static Style<Float> HOVER_LIMIT = Style.newStyle(true, (Float)null);
/** Holds the necessary data for the currently active press. {@code Track} subclasses can
* derive if more transient information is needed. */
public class State {
/** Time the press started. */
public final double pressTime;
/** The press and drag positions. */
public final Point press, drag;
/** Creates a new tracking state with the given starting press event. */
public State (Pointer.Event event) {
pressTime = event.time();
toPoint(event, press = new Point());
drag = new Point(press);
}
/** Updates the state to the current event value and called {@link Track#onTrack()}. */
public void update (Pointer.Event event) {
boolean cancel = false;
if (_hoverLimit != null) {
float lim = _hoverLimit, lx = event.localX(), ly = event.localY();
IDimension size = _owner.size();
cancel = lx + lim < 0 || ly + lim < 0 ||
lx - lim >= size.width() || ly - lim >= size.height();
}
toPoint(event, drag);
onTrack(press, cancel ? press : drag);
}
}
protected Track (T owner) {
super(owner);
}
/**
* Called when the pointer is dragged. After cancel or if the pointer goes outside the
* hover limit, drag will be equal to anchor.
* @param anchor the pointer position when initially pressed
* @param drag the current pointer position
*/
abstract protected void onTrack (Point anchor, Point drag);
/**
* Creates the state instance for the given press. Subclasses may return an instance
* of a derived {@code State} if more information is needed during tracking.
*/
protected State createState (Pointer.Event press) {
return new State(press);
}
/**
* Converts an event to coordinates consumed by {@link #onTrack(Point, Point)}. By
* default, simply uses the local x, y.
*/
protected void toPoint (Pointer.Event event, Point dest) {
dest.set(event.localX(), event.localY());
}
@Override protected void onPress (Event event) {
_state = createState(event);
}
@Override protected void onHover (Event event, boolean inBounds) {
if (_state != null) _state.update(event);
}
@Override protected boolean onRelease (Event event) {
_state = null;
return false;
}
@Override protected void onCancel (Event event) {
// track to the press position to cancel
if (_state != null) onTrack(_state.press, _state.press);
_state = null;
}
@Override public void layout () {
super.layout();
_hoverLimit = resolveStyle(HOVER_LIMIT);
}
protected State _state;
protected Float _hoverLimit;
}
/** A click behavior that captures the pointer and optionally issues clicks based on some time
* based function. */
public static abstract class Capturing<T extends Element<T>> extends Click<T>
implements Interface.Task
{
protected Capturing (T owner) {
super(owner);
}
@Override protected void onPress (Event event) {
super.onPress(event);
event.capture();
_task = _owner.root().iface().addTask(this);
}
@Override protected void onCancel (Event event) {
super.onCancel(event);
cancelTask();
}
@Override protected boolean onRelease (Event event) {
super.onRelease(event);
cancelTask();
return false;
}
/** Cancels the time-based task. This is called automatically by the pointer release
* and cancel events. */
protected void cancelTask () {
if (_task == null) return;
_task.remove();
_task = null;
}
protected Interface.TaskHandle _task;
}
/** Captures the pointer and dispatches one click on press, a second after an initial delay
* and at regular intervals after that. */
public static class RapidFire<T extends Element<T>> extends Capturing<T>
{
/** Milliseconds after the first click that the second click is dispatched. */
public static final Style<Integer> INITIAL_DELAY = Style.newStyle(true, 200);
/** Milliseconds between repeated click dispatches. */
public static final Style<Integer> REPEAT_DELAY = Style.newStyle(true, 75);
/** Creates a new rapid fire behavior for the given owner. */
public RapidFire (T owner) {
super(owner);
}
@Override protected void onPress (Event event) {
super.onPress(event);
_timeInBounds = 0;
click();
}
@Override protected void onHover (Event event, boolean inBounds) {
super.onHover(event, inBounds);
if (!inBounds) _timeInBounds = -1;
}
@Override public void update (int delta) {
if (_timeInBounds < 0) return;
int was = _timeInBounds;
_timeInBounds += delta;
int limit = was < _initDelay ? _initDelay :
_initDelay + _repDelay * ((was - _initDelay) / _repDelay + 1);
if (was < limit && _timeInBounds >= limit) click();
}
@Override public void layout () {
super.layout();
_initDelay = _owner.resolveStyle(INITIAL_DELAY);
_repDelay = _owner.resolveStyle(REPEAT_DELAY);
}
protected int _initDelay, _repDelay, _timeInBounds;
}
public Behavior (T owner) {
_owner = owner;
}
@Override public void onPointerStart (Pointer.Event event) {
if (_owner.isEnabled()) onPress(event);
}
@Override public void onPointerDrag (Pointer.Event event) {
if (_owner.isEnabled()) onHover(event, _owner.contains(event.localX(), event.localY()));
}
@Override public void onPointerEnd (Pointer.Event event) {
if (onRelease(event)) onClick(event);
}
@Override public void onPointerCancel (Pointer.Event event) {
onCancel(event);
}
/** Called when our owner is laid out. If the behavior needs to resolve configuration via
* styles, this is where it should do it. */
public void layout () {
_actionSound = resolveStyle(Style.ACTION_SOUND);
}
/** Emits the action sound for our owner, if one is configured. */
public void soundAction () {
if (_actionSound != null) _actionSound.play();
}
/** Resolves the value for the supplied style via our owner. */
protected <V> V resolveStyle (Style<V> style) {
return Styles.resolveStyle(_owner, style);
}
/** Returns the {@link Root} to which our owning element is added, or null. */
protected Root root () {
return _owner.root();
}
/** Called when the pointer is pressed down on our element. */
protected abstract void onPress (Pointer.Event event);
/** Called as the user drags the pointer around after pressing. Derived classes map this onto
* the widget state, such as updating selectedness. */
protected abstract void onHover (Pointer.Event event, boolean inBounds);
/** Called when the pointer is released after having been pressed on this widget. This should
* return true if the gesture is considered a click, in which case {@link #onClick} will
* be called automatically. */
protected abstract boolean onRelease (Pointer.Event event);
/** Called when the interaction is canceled after having been pressed on this widget. This
* should not result in a call to {@link #onClick}. */
protected abstract void onCancel (Pointer.Event event);
/** Called when the pointer is released and the subclass decides that it is a click, i.e.
* returns true from {@link #onRelease(Pointer.Event)}. */
protected abstract void onClick (Pointer.Event event);
/** Updates the selected state of our owner, invalidating if selectedness changes.
* @return true if the owner was selected on entry. */
protected boolean updateSelected (boolean selected) {
boolean wasSelected = _owner.isSelected();
if (selected != wasSelected) {
_owner.set(Element.Flag.SELECTED, selected);
_owner.invalidate();
}
return wasSelected;
}
/** Slot for calling {@link #updateSelected(boolean)}. */
protected Slot<Boolean> selectedDidChange () {
return new Slot<Boolean>() {
@Override public void onEmit (Boolean selected) {
updateSelected(selected);
}
};
}
protected final T _owner;
protected Sound _actionSound;
}
|
package com.exedio.cope;
import com.exedio.cope.testmodel.AttributeItem;
import com.exedio.cope.testmodel.PlusItem;
import com.exedio.dsmf.CheckConstraint;
import com.exedio.dsmf.Column;
import com.exedio.dsmf.Constraint;
import com.exedio.dsmf.ForeignKeyConstraint;
import com.exedio.dsmf.PrimaryKeyConstraint;
import com.exedio.dsmf.Schema;
import com.exedio.dsmf.UniqueConstraint;
public class SchemaTest extends TestmodelTest
{
private static final String TABLE1X = "PlusItemX";
private static final String COLUMN1X = "num2X";
public static final Class CHECK = CheckConstraint.class;
public static final Class PK = PrimaryKeyConstraint.class;
public static final Class FK = ForeignKeyConstraint.class;
public static final Class UNIQUE = UniqueConstraint.class;
public void testSchema()
{
if(postgresql) return;
final String TABLE1 = PlusItem.TYPE.getTableName();
final String COLUMN1 = PlusItem.num2.getColumnName();
assertEquals("PlusItem", TABLE1);
assertEquals("num2", COLUMN1);
final String column1Type;
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(true, column.exists());
assertEquals(null, column.getError());
assertEquals(Schema.Color.OK, column.getParticularColor());
column1Type = column.getType();
assertNotNull(column1Type);
column.renameTo(COLUMN1X);
}
// OK without verify
{
final Schema schema = model.getSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(false, table.exists());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(false, column.exists());
assertEquals(column1Type, column.getType());
}
// COLUMN RENAMED
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
{
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(false, column.exists());
assertEquals("missing", column.getError());
assertEquals(Schema.Color.ERROR, column.getParticularColor());
assertEquals(column1Type, column.getType());
}
{
final Column columnX = table.getColumn(COLUMN1X);
assertEquals(false, columnX.required());
assertEquals(true, columnX.exists());
assertEquals("not used", columnX.getError());
assertEquals(Schema.Color.WARNING, columnX.getParticularColor());
assertEquals(column1Type, columnX.getType());
columnX.renameTo(COLUMN1);
}
}
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Constraint constraint = table.getConstraint("PlusItem_num2_Ck");
if(model.supportsCheckConstraints())
constraint.drop();
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(true, column.exists());
assertEquals(null, column.getError());
assertEquals(Schema.Color.OK, column.getParticularColor());
assertEquals(column1Type, column.getType());
column.drop();
}
// COLUMN DROPPED
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(false, column.exists());
assertEquals("missing", column.getError());
assertEquals(Schema.Color.ERROR, column.getParticularColor());
assertEquals(column1Type, column.getType());
column.create();
}
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(true, column.exists());
assertEquals(null, column.getError());
assertEquals(Schema.Color.OK, column.getParticularColor());
assertEquals(column1Type, column.getType());
table.renameTo(TABLE1X);
}
// TABLE RENAMED
{
final Schema schema = model.getVerifiedSchema();
{
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(false, table.exists());
assertEquals("missing", table.getError());
assertEquals(Schema.Color.ERROR, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(false, column.exists());
assertEquals("missing", column.getError());
assertEquals(Schema.Color.ERROR, column.getParticularColor());
assertEquals(column1Type, column.getType());
}
{
final com.exedio.dsmf.Table tableX = schema.getTable(TABLE1X);
assertNotNull(tableX);
assertEquals(false, tableX.required());
assertEquals(true, tableX.exists());
assertEquals("not used", tableX.getError());
assertEquals(Schema.Color.WARNING, tableX.getParticularColor());
final Column column = tableX.getColumn(COLUMN1);
assertEquals(false, column.required());
assertEquals(true, column.exists());
assertEquals("not used", column.getError());
assertEquals(Schema.Color.WARNING, column.getParticularColor());
assertEquals(column1Type, column.getType());
tableX.renameTo(TABLE1);
}
}
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(true, column.exists());
assertEquals(null, column.getError());
assertEquals(Schema.Color.OK, column.getParticularColor());
assertEquals(column1Type, column.getType());
table.drop();
}
// TABLE DROPPED
{
final Schema schema = model.getVerifiedSchema();
{
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(false, table.exists());
assertEquals("missing", table.getError());
assertEquals(Schema.Color.ERROR, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(false, column.exists());
assertEquals("missing", column.getError());
assertEquals(Schema.Color.ERROR, column.getParticularColor());
assertEquals(column1Type, column.getType());
table.create();
}
}
{
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table table = schema.getTable(TABLE1);
assertNotNull(table);
assertEquals(true, table.required());
assertEquals(true, table.exists());
assertEquals(null, table.getError());
assertEquals(Schema.Color.OK, table.getParticularColor());
final Column column = table.getColumn(COLUMN1);
assertEquals(true, column.required());
assertEquals(true, column.exists());
assertEquals(null, column.getError());
assertEquals(Schema.Color.OK, column.getParticularColor());
assertEquals(column1Type, column.getType());
}
{
assertEquals(!mysql, model.supportsCheckConstraints());
final Schema schema = model.getVerifiedSchema();
final com.exedio.dsmf.Table attributeItem = schema.getTable(AttributeItem.TYPE.getTableName());
assertNotNull(attributeItem);
assertEquals(null, attributeItem.getError());
assertEquals(Schema.Color.OK, attributeItem.getParticularColor());
assertCheckConstraint(attributeItem, "AttrItem_somNotNullStr_Ck", "("+p(AttributeItem.someNotNullString)+" IS NOT NULL) AND ("+LENGTH(AttributeItem.someNotNullString)+"<="+StringField.DEFAULT_LENGTH+")");
assertCheckConstraint(attributeItem, "AttribuItem_someBoolea_Ck", "("+p(AttributeItem.someBoolean)+" IN (0,1)) OR ("+p(AttributeItem.someBoolean)+" IS NULL)");
assertCheckConstraint(attributeItem, "AttrItem_somNotNullBoo_Ck", "("+p(AttributeItem.someNotNullBoolean)+" IS NOT NULL) AND ("+p(AttributeItem.someNotNullBoolean)+" IN (0,1))");
assertCheckConstraint(attributeItem, "AttributeItem_someEnum_Ck", "("+p(AttributeItem.someEnum)+" IN (10,20,30)) OR ("+p(AttributeItem.someEnum)+" IS NULL)");
assertCheckConstraint(attributeItem, "AttrItem_somNotNullEnu_Ck", "("+p(AttributeItem.someNotNullEnum)+" IS NOT NULL) AND ("+p(AttributeItem.someNotNullEnum)+" IN (10,20,30))");
assertCheckConstraint(attributeItem, "AttrItem_somDataConTyp_Ck", "(("+LENGTH(AttributeItem.someData.getContentType())+">=1) AND ("+LENGTH(AttributeItem.someData.getContentType())+"<=61)) OR ("+p(AttributeItem.someData.getContentType())+" IS NULL)");
assertPkConstraint(attributeItem, "AttributeItem_Pk", null, AttributeItem.TYPE.getPrimaryKeyColumnName());
assertFkConstraint(attributeItem, "AttributeItem_someItem_Fk", "someItem", "EmptyItem", AttributeItem.TYPE.getPrimaryKeyColumnName());
final com.exedio.dsmf.Table uniqueItem = schema.getTable("UNIQUE_ITEMS");
assertNotNull(uniqueItem);
assertEquals(null, uniqueItem.getError());
assertEquals(Schema.Color.OK, uniqueItem.getParticularColor());
assertUniqueConstraint(uniqueItem, "IX_ITEMWSU_US", "("+p("UNIQUE_STRING")+")");
final com.exedio.dsmf.Table doubleUniqueItem = schema.getTable("ItemWithDoubleUnique");
assertNotNull(doubleUniqueItem);
assertEquals(null, doubleUniqueItem.getError());
assertEquals(Schema.Color.OK, doubleUniqueItem.getParticularColor());
assertUniqueConstraint(doubleUniqueItem, "ItemWithDoubUni_doUni_Unq", "("+p("string")+","+p("integer")+")");
final com.exedio.dsmf.Table stringItem = schema.getTable("STRINGITEMS");
assertNotNull(stringItem);
assertEquals(null, stringItem.getError());
assertEquals(Schema.Color.OK, stringItem.getParticularColor());
final Column min4Max8 = stringItem.getColumn("MIN4_MAX8");
assertEquals(null, min4Max8.getError());
assertEquals(Schema.Color.OK, min4Max8.getParticularColor());
final String string8;
if(hsqldb)
string8 = "varchar(8)";
else if(mysql)
string8 = "varchar(8) character set utf8 binary";
else
{
if(model.getProperties().getDatabaseCustomProperty("varchar")!=null)
string8 = "VARCHAR2(8)";
else
string8 = "NVARCHAR2(8)";
}
assertEquals(string8, min4Max8.getType());
assertCheckConstraint(stringItem, "STRINGITEMS_MIN_4_Ck", "(("+LENGTH("MIN_4")+">=4) AND ("+LENGTH("MIN_4")+"<="+StringField.DEFAULT_LENGTH+")) OR ("+p("MIN_4")+" IS NULL)");
assertCheckConstraint(stringItem, "STRINGITEMS_MAX_4_Ck", "("+LENGTH("MAX_4")+"<=4) OR ("+p("MAX_4")+" IS NULL)");
assertCheckConstraint(stringItem, "STRINGITEMS_MIN4_MAX8_Ck", "(("+LENGTH("MIN4_MAX8")+">=4) AND ("+LENGTH("MIN4_MAX8")+"<=8)) OR ("+p("MIN4_MAX8")+" IS NULL)");
assertCheckConstraint(stringItem, "STRINGITEMS_EXACT_6_Ck", "("+LENGTH("EXACT_6")+"=6) OR ("+p("EXACT_6")+" IS NULL)");
}
}
private CheckConstraint assertCheckConstraint(final com.exedio.dsmf.Table table, final String constraintName, final String requiredCondition)
{
return
(CheckConstraint)assertConstraint(table, CHECK, constraintName, requiredCondition);
}
private void assertPkConstraint(final com.exedio.dsmf.Table table, final String constraintName, final String requiredCondition, final String primaryKeyColumn)
{
final PrimaryKeyConstraint constraint =
(PrimaryKeyConstraint)assertConstraint(table, PK, constraintName, requiredCondition);
assertEquals(primaryKeyColumn, constraint.getPrimaryKeyColumn());
}
private void assertFkConstraint(final com.exedio.dsmf.Table table, final String constraintName, final String foreignKeyColumn, final String targetTable, final String targetColumn)
{
final ForeignKeyConstraint constraint =
(ForeignKeyConstraint)assertConstraint(table, FK, constraintName, null);
assertEquals(foreignKeyColumn, constraint.getForeignKeyColumn());
assertEquals(targetTable, constraint.getTargetTable());
assertEquals(targetColumn, constraint.getTargetColumn());
}
private void assertUniqueConstraint(final com.exedio.dsmf.Table table, final String constraintName, final String clause)
{
final UniqueConstraint constraint =
(UniqueConstraint)assertConstraint(table, UNIQUE, constraintName, clause);
assertEquals(clause, constraint.getClause());
}
private Constraint assertConstraint(final com.exedio.dsmf.Table table, final Class constraintType, final String constraintName, final String requiredCondition)
{
final Constraint constraint = table.getConstraint(constraintName);
final boolean expectedSupported = model.supportsCheckConstraints() || constraintType!=CHECK;
assertNotNull("no such constraint "+constraintName+", but has "+table.getConstraints(), constraint);
assertEquals(constraintName, constraintType, constraint.getClass());
assertEquals(constraintName, requiredCondition, constraint.getRequiredCondition());
assertEquals(expectedSupported, constraint.isSupported());
assertEquals(constraintName, expectedSupported ? null : "not supported", constraint.getError());
assertEquals(constraintName, Schema.Color.OK, constraint.getParticularColor());
return constraint;
}
private final String p(final Type type)
{
return p(type.getTableName());
}
private final String p(final Field attribute)
{
return p(attribute.getColumnName());
}
private final String p(final String name)
{
return model.getDatabase().getDriver().protectName(name);
}
private final String LENGTH(final FunctionField f)
{
return model.getDatabase().dialect.stringLength + '(' + p(f) + ')';
}
private final String LENGTH(final String f)
{
return model.getDatabase().dialect.stringLength + '(' + p(f) + ')';
}
}
|
package oop.project.timestamper;
import processing.core.PApplet;
public class Sketch extends PApplet {
float dayWidth;
float dayHeight;
Year year;
public void settings() {
fullScreen();
}
public void setup() {
dayHeight = height;
dayWidth = width / 5;
year = new Year(this, dayWidth, dayHeight);
}
float currX = 0;
float last_mouseX;
public void draw() {
background(0);
pushMatrix();
translate(currX, 0);
year.display();
popMatrix();
if (mousePressed) {
ellipse(mouseX, mouseY, 50, 50);
last_mouseX = currX;
if (last_mouseX > mouseX) {
currX -= 10;
}
else if (last_mouseX < mouseX) {
currX += 10;
}
}
}
}
|
package natlab.toolkits.DependenceAnalysis;
import java.io.File;
import org.w3c.dom.*;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import java.sql.Timestamp;
import java.util.*;
public class HeuristicEngineDriver {
private Hashtable<String,LinkedList> loopTable=new Hashtable<String,LinkedList>();
private String fileName;
private String dirName;
private Hashtable<String,LinkedList<PredictedData>> table;
//private static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss";
public HeuristicEngineDriver(String fName){
StringTokenizer st = new StringTokenizer(fName,".");
fileName=st.nextToken();
fileName=fileName+".xml";
//System.out.println(fileName);
}
private void populateProfiledData(Element nestedElement){
float lNo=Float.parseFloat(nestedElement.getAttribute("Number"));
ProfiledData inData=new ProfiledData();
inData.setLoopNo(lNo); //setting loopNo in inData.
NodeList lowerBoundList = nestedElement.getElementsByTagName("LowerBound");
Element lowerBoundElement = (Element)lowerBoundList.item(0);
// System.out.println("IIdsf"+lowerBoundElement.getNodeName());
NodeList variableNameList = lowerBoundElement.getElementsByTagName("VariableName");
Element variableNameElement = (Element)variableNameList.item(0);
NodeList textFNList = variableNameElement.getChildNodes(); //Setting the loopVariableName
String lVName=((Node)textFNList.item(0)).getNodeValue().trim();
//System.out.println("Variable Name : " +lVName);
//((Node)textFNList.item(0)).getNodeValue().trim());
inData.setLVName(lVName);
NodeList rangeList = lowerBoundElement.getElementsByTagName("Range");
Element rangeElement = (Element)rangeList.item(0);
//System.out.println("Range is:::"+rangeElement.getNodeName());
NodeList startList = rangeElement.getElementsByTagName("start");
Element startElement = (Element)startList.item(0);
NodeList textLNList = startElement.getChildNodes();
//System.out.println("start : " +
// ((Node)textLNList.item(0)).getNodeValue().trim());
int lbStart=Integer.parseInt(((Node)textLNList.item(0)).getNodeValue()); //setting start of lowerBound.
ProfiledLowerBound lBound=inData.getNewLBound();
lBound.setStart(lbStart);
NodeList endList = rangeElement.getElementsByTagName("end");
Element endElement = (Element)endList.item(0);
NodeList textLNList1 = endElement.getChildNodes();
//System.out.println("end : " +
// ((Node)textLNList1.item(0)).getNodeValue().trim());
int lbEnd=Integer.parseInt(((Node)textLNList1.item(0)).getNodeValue()); //setting end of lowerBound.
lBound.setEnd(lbEnd);
inData.setLBound(lBound);
//............For Upper Bound................//
NodeList upperBoundList = nestedElement.getElementsByTagName("UpperBound");
Element upperBoundElement = (Element)upperBoundList.item(0);
//System.out.println("IIdsfuuuuuuu"+upperBoundElement.getNodeName());
NodeList uvariableNameList = upperBoundElement.getElementsByTagName("VariableName");
Element uvariableNameElement = (Element)uvariableNameList.item(0);
NodeList utextFNList = uvariableNameElement.getChildNodes();
// System.out.println("Variable Name : " +
// ((Node)utextFNList.item(0)).getNodeValue().trim());
NodeList urangeList = upperBoundElement.getElementsByTagName("Range");
Element urangeElement = (Element)urangeList.item(0);
// System.out.println("Range is:::"+urangeElement.getNodeName());
NodeList ustartList = urangeElement.getElementsByTagName("start");
Element ustartElement = (Element)ustartList.item(0);
NodeList utextLNList = ustartElement.getChildNodes();
// System.out.println("start : " +
// ((Node)utextLNList.item(0)).getNodeValue().trim());
int ubStart=Integer.parseInt(((Node)utextLNList.item(0)).getNodeValue());
UpperBound uBound=inData.getNewUBound();
uBound.setStart(ubStart); //setting the upper bound..
NodeList uendList = urangeElement.getElementsByTagName("end");
Element uendElement = (Element)uendList.item(0);
NodeList utextLNList1 = uendElement.getChildNodes();
//System.out.println("end : " +
// ((Node)utextLNList1.item(0)).getNodeValue().trim());
int ubEnd=Integer.parseInt(((Node)utextLNList1.item(0)).getNodeValue());
uBound.setEnd(ubEnd); //setting the upper bound..
inData.setUBound(uBound);
//.......For LoopIncrement Factor...........//
NodeList lifList = nestedElement.getElementsByTagName("LoopIncrementFactor");
Element lifElement = (Element)lifList.item(0);
//System.out.println("IIdsfuuuuuuulif"+lifElement.getNodeName());
NodeList lifrangeList = lifElement.getElementsByTagName("Range");
Element lifrangeElement = (Element)lifrangeList.item(0);
// System.out.println("Range is:::"+lifrangeElement.getNodeName());
NodeList lifstartList = lifrangeElement.getElementsByTagName("start");
Element lifstartElement = (Element)lifstartList.item(0);
NodeList liftextLNList = lifstartElement.getChildNodes();
//System.out.println("start : " +
// ((Node)liftextLNList.item(0)).getNodeValue().trim());
int lifStart=Integer.parseInt(((Node)liftextLNList.item(0)).getNodeValue());
ProfiledLIF lif=inData.getNewLoopIncFac();
lif.setStart(lifStart);
NodeList lifendList = lifrangeElement.getElementsByTagName("end");
Element lifendElement = (Element)lifendList.item(0);
NodeList liftextLNList1 = lifendElement.getChildNodes();
//System.out.println("end : " +
// ((Node)liftextLNList1.item(0)).getNodeValue().trim());
int lifEnd=Integer.parseInt(((Node)liftextLNList1.item(0)).getNodeValue());
lif.setEnd(lifEnd);
inData.setLoopIncFac(lif);
if(loopTable.containsKey(nestedElement.getAttribute("Number"))){ //get the already existing linked list for the key and add the value to it
//this is to insert data from the different runs of the same loop into the same list.
LinkedList<ProfiledData> tList=(LinkedList<ProfiledData>)loopTable.get(nestedElement.getAttribute("Number"));
tList.add(inData);
}
else{// create a new linked list for this key and insert it in the hashtable.
LinkedList<ProfiledData> list=new LinkedList<ProfiledData>();
list.add(inData);
//loopTable.put(nestedElement.getAttribute("LoopNumber"),list);
loopTable.put(nestedElement.getAttribute("Number"),list);
}
}//end of populateProfiledData function.
public void parseXmlFile(){
//StringTokenizer st = new StringTokenizer(fileName,".");
//dirName=st.nextToken();
try{
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse (new File(dirName+"/"+fileName));
ProfiledData inData;//=null;
Element lElement;
// normalize text representation
doc.getDocumentElement ().normalize ();
NodeList loopList = doc.getElementsByTagName("LoopNo");
for(int s=0; s<loopList.getLength() ; s++){
Element nestedElement = (Element)loopList.item(s);
NodeList nodeList=nestedElement.getElementsByTagName("NestedLoop");
if(nodeList.getLength()==0){
lElement=nestedElement;
populateProfiledData(lElement);
}
else{
for(int j=0;j<nodeList.getLength();j++){
lElement=(Element)nodeList.item(j);
populateProfiledData(lElement);
}//end of for
}//end of else
}//end of for
}catch (SAXParseException err) {
System.out.println ("** Parsing error" + ", line "
+ err.getLineNumber () + ", uri " + err.getSystemId ());
System.out.println(" " + err.getMessage ());
}catch (SAXException e) {
Exception x = e.getException ();
((x == null) ? e : x).printStackTrace ();
}catch (Throwable t) {
t.printStackTrace ();
}
callIntroSort();
HeuristicEngine hEngine=new HeuristicEngine(loopTable);
table=hEngine.computeRegionDivisors();
//System.out.println("table size is"+table.size());
//Set s=table.entrySet();
//Iterator it=s.iterator();
//System.out.println(table.size());
//while(it.hasNext()){
//PredictedData pData=(PredictedData)it.next();
// System.out.println(pData.getLoopNo());
writeToXMLFile(table);
}
public Hashtable<String, LinkedList<PredictedData>> getTable() {
return table;
}
/*
* TODO:Needs to fix this currently just IntroSort is not fully implemented
*/
private void callIntroSort(){
Set s=loopTable.keySet();
Iterator it=s.iterator();
while(it.hasNext()){
LinkedList l=(LinkedList)loopTable.get(it.next());
Sorter b= new Sorter(l, new ComparableComparator());
try{
b.IntroSort(0, l.size());
}catch(Exception e){}
//for(int i=0;i<l.size();i++){
//System.out.print(((UpperBound)((ProfiledData)l.get(i)).getUBound()).getEnd()+" ");
//System.out.println(" ");
}
}
/*TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(document);
StreamResult result = new StreamResult(file);
transformer.transform(source, result);*/
// }//end of if
/* else//if file exist
{
document = documentBuilder.parse(file);
document.normalizeDocument();
Node rootElement = document.getDocumentElement();
Element tsElement = document.createElement("RunNo"); // creates a element
Date d=new Date();
Timestamp ts=new Timestamp(d.getTime());
tsElement.setAttribute(new String("TimeStamp"), ts.toString());
while(it.hasNext()){
//System.out.println("i am in xmlwrite");
PredictedData pData=(PredictedData)it.next();
//System.out.println("i am in xmlwrite"+pData.getLoopNo()+" "+pData.getLowerBound());
//System.out.println("i am in xmlwrite");
Element loopElement = document.createElement("LoopNo"); //create another element
loopElement.setAttribute(new String("LoopNumber"), Float.toString(pData.getLoopNo()));
loopElement.setAttribute(new String("LoopVariableName"), pData.getLVName());
Element predictedLBElement = document.createElement("PredictedLowerBound"); //create another element
predictedLBElement.setAttribute(new String("Value"), Integer.toString(pData.getLowerBound()));
loopElement.appendChild(predictedLBElement);
Element predictedLIFElement = document.createElement("PredictedLoopIncFactor"); //create another element
predictedLIFElement.setAttribute(new String("Value"), Integer.toString(pData.getLoopIncFactor()));
loopElement.appendChild(predictedLIFElement);
Element predictedUBElement = document.createElement("PredictedUpperBound"); //create another element
predictedUBElement.setAttribute(new String("Value"), Integer.toString(pData.getUpperBound()));
loopElement.appendChild(predictedUBElement);
tsElement.appendChild(loopElement); // add element1 under rootElement
}//end of while
rootElement.appendChild(tsElement);
//rootElement.appendChild(tsElement); // add element1 under rootElement
}//end of else*/
/* TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(document);
StreamResult result = new StreamResult(file);
transformer.transform(source, result);
}//end of try
catch (Exception e) {System.out.println(e.getMessage());}//end of for
}//end of function call*/
public void writeToXMLFile(Hashtable pTable){
File file = new File(dirName+ "/" + "RangeData" + fileName);
try
{ boolean exists = file.exists();
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document;
//Element rootElement=null;
//Node rElement=null;
Collection c=table.values();
Iterator it=c.iterator();
if(!exists){ //if file doesnot exist.
// System.out.println("i am in xmlwrite"+htable.size());
document = documentBuilder.newDocument();
}
else{//if file exist
document = documentBuilder.parse(file);
document.normalizeDocument();
}
Element tsElement = document.createElement("RunNo"); // creates a element
Date d=new Date();
Timestamp ts=new Timestamp(d.getTime());
tsElement.setAttribute(new String("TimeStamp"), ts.toString());
while(it.hasNext()){
LinkedList<PredictedData> tList=(LinkedList<PredictedData>)it.next();
for(int i=0;i<tList.size();i++){
PredictedData pData=(PredictedData)tList.get(i);
Element loopElement = document.createElement("LoopNo"); //create another element
loopElement.setAttribute(new String("LoopNumber"), Float.toString(pData.getLoopNo()));
loopElement.setAttribute(new String("LoopVariableName"), pData.getLVName());
Element predictedLBElement = document.createElement("PredictedLowerBound"); //create another element
predictedLBElement.setAttribute(new String("Value"), Integer.toString(pData.getLowerBound()));
loopElement.appendChild(predictedLBElement);
Element predictedLIFElement = document.createElement("PredictedLoopIncFactor"); //create another element
predictedLIFElement.setAttribute(new String("Value"), Integer.toString(pData.getLoopIncFactor()));
loopElement.appendChild(predictedLIFElement);
Element predictedUBElement = document.createElement("PredictedUpperBound"); //create another element
predictedUBElement.setAttribute(new String("Value"), Integer.toString(pData.getUpperBound()));
loopElement.appendChild(predictedUBElement);
tsElement.appendChild(loopElement); // add element1 under rootElement
}//end of for
}//end of while
if(!exists){
Element rootElement = document.createElement("HD"); // creates a element
rootElement.appendChild(tsElement);
document.appendChild(rootElement); // add the rootElement to the document
}
else{
Node rElement = document.getDocumentElement();
rElement.appendChild(tsElement);
}
/*TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(document);
StreamResult result = new StreamResult(file);
transformer.transform(source, result);*/
//}//end of if
//Node rootElement = document.getDocumentElement();
//Element tsElement = document.createElement("RunNo"); // creates a element
//Date d=new Date();
//Timestamp ts=new Timestamp(d.getTime());
//tsElement.setAttribute(new String("TimeStamp"), ts.toString());
/*while(it.hasNext()){
LinkedList<PredictedData> tList=(LinkedList<PredictedData>)it.next();
for(int i=0;i<tList.size();i++){
PredictedData pData=(PredictedData)tList.get(i);
Element loopElement = document.createElement("LoopNo"); //create another element
loopElement.setAttribute(new String("LoopNumber"), Float.toString(pData.getLoopNo()));
loopElement.setAttribute(new String("LoopVariableName"), pData.getLVName());
Element predictedLBElement = document.createElement("PredictedLowerBound"); //create another element
predictedLBElement.setAttribute(new String("Value"), Integer.toString(pData.getLowerBound()));
loopElement.appendChild(predictedLBElement);
Element predictedLIFElement = document.createElement("PredictedLoopIncFactor"); //create another element
predictedLIFElement.setAttribute(new String("Value"), Integer.toString(pData.getLoopIncFactor()));
loopElement.appendChild(predictedLIFElement);
Element predictedUBElement = document.createElement("PredictedUpperBound"); //create another element
predictedUBElement.setAttribute(new String("Value"), Integer.toString(pData.getUpperBound()));
loopElement.appendChild(predictedUBElement);
tsElement.appendChild(loopElement); // add element1 under rootElement
}//end of for
}//end of while
rootElement.appendChild(tsElement);
//rootElement.appendChild(tsElement); // add element1 under rootElement
//}//end of else*/
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(document);
StreamResult result = new StreamResult(file);
transformer.transform(source, result);
}//end of try
catch (Exception e) {System.out.println(e.getCause());}//end of for
}//end of function call
public String getDirName() {
return dirName;
}
public void setDirName(String dirName) {
this.dirName = dirName;
}
}
|
package natlab.toolkits.rewrite.simplification;
import java.util.*;
import ast.*;
import natlab.toolkits.rewrite.*;
import natlab.toolkits.analysis.varorfun.*;
/**
* Simplifies for statements. Reduces them so that there are only
* simple range for loops.
*
* @author Jesse Doherty
*/
public class ForSimplification extends AbstractSimplification
{
public ForSimplification( ASTNode tree,
AbstractLocalRewrite callback,
VFStructuralForwardAnalysis kind )
{
super( tree, callback, kind );
}
public Set<AbstractSimplification> getDependencies()
{
return new HashSet();
}
public void caseForStmt( ForStmt node )
{
AssignStmt assignStmt = node.getAssignStmt();
Expr iterableExpr = assignStmt.getRHS();
if( !(iterableExpr instanceof RangeExpr) ){
LinkedList<AssignStmt> newStmts = new LinkedList();
TempFactory t1Fact = TempFactory.genFreshTempFactory();
TempFactory t2Fact = TempFactory.genFreshTempFactory();
TempFactory t3Fact = TempFactory.genFreshTempFactory();
}
}
}
|
package opjj.hw5;
public final class EncodedMessages {
public static final String MESSAGE_1 = "'Purjudpplqj lv olnh vha: rqh plvwdnh dqg brx duh surylglqj vxssruw iru d olihwlph.' -- Mlfkdho Slqc";
public static final String MESSAGE_2 = "'Aalpnh rdst ph xu iwt vjn lwd tcsh je bpxcipxcxcv ndjg rdst xh kxdatci ehnrwdepiw lwd zcdlh lwtgt ndj axkt.' -- Mpgixc Gdasxcv";
public static void main(String[] args) {
CaesarCipher cipher = new CaesarCipher();
// change shift to decode messages; consider using for loop
int shift1 = 0;
int shift2 = 0;
String m1 = cipher.decode(MESSAGE_1, shift1);
String m2 = cipher.decode(MESSAGE_2, shift2);
System.out.println(m1);
System.out.println(m2);
}
}
|
interface A {
int X = 1; // implicitly public static final
default void f() {} // implicitly public
}
interface B {
int X = 2; // implicitly public static final
default void f() {} // implicitly public
}
// class C implements A, B { // WILL NOT COMPILE DUE TO f()
// public static void main(String[] args) {
// System.out.println(X); // WILL NOT COMPILE (X ambiguous)
|
package Lev17.Task1714;
public class Beach implements Comparable<Beach>{
private String name;
private float distance;
private int quality;
public Beach(String name, float distance, int quality) {
this.name = name;
this.distance = distance;
this.quality = quality;
}
public synchronized String getName() {
return name;
}
public synchronized void setName(String name) {
this.name = name;
}
public synchronized float getDistance() {
return distance;
}
public synchronized void setDistance(float distance) {
this.distance = distance;
}
public synchronized int getQuality() {
return quality;
}
public synchronized void setQuality(int quality) {
this.quality = quality;
}
public static void main(String[] args) {
}
@Override
public synchronized int compareTo(Beach o) {
if (o.getQuality() > 0) {
return 0;
}
}
}
|
/* -*- mode: Java; c-basic-offset: 2; -*- */
/**
* LZX Attributes
*/
package org.openlaszlo.compiler;
import org.openlaszlo.xml.internal.Schema.Type;
import org.openlaszlo.xml.internal.XMLUtils;
import org.jdom.Element;
/** Contains information about an attribute of a laszlo viewsystem class.
*/
public class AttributeSpec {
/** The source Element from which this attribute was parsed */
Element source = null;
/** The attribute name */
public String name;
/** The default value */
public String defaultValue = null;
/** The setter function */
String setter;
/** The type of the attribute value*/
public Type type;
/** Is this attribute required to instantiate an instance of this class? */
public boolean required = false;
/** When does the initial value for this attribute get evaluated? */
String when = NodeModel.WHEN_IMMEDIATELY;
String allocation = NodeModel.ALLOCATION_INSTANCE;
/** If this is a method, the arglist */
public String arglist = null;
/** Can this attribute be overridden without a warning? value is null, 'true' or 'false' */
String isfinal = null;
/** Is this attribute equivalent to element content of a given type? */
int contentType = NO_CONTENT;
/** Element content types: */
static final int NO_CONTENT = 0;
static final int TEXT_CONTENT = 1;
static final int HTML_CONTENT = 2;
private String typeToLZX() {
switch (contentType) {
case TEXT_CONTENT:
return "text";
case HTML_CONTENT:
return "html";
default:
return type.toString();
}
}
public String toLZX(String indent, ClassModel superclass) {
AttributeSpec superSpec = superclass.getAttribute(name, allocation);
if (ViewSchema.METHOD_TYPE.equals(type)) {
return indent + " <method name='" + name + "'" +
(((arglist == null) || "".equals(arglist))?"":(" args='" + XMLUtils.escapeXml(arglist) +"'")) +
" />";
}
if (superSpec == null) {
if (ViewSchema.EVENT_HANDLER_TYPE.equals(type)) {
return indent + "<event name='" + name + "' />";
}
return indent + "<attribute name='" + name + "'" +
((defaultValue != null)?(" value='" + XMLUtils.escapeXml(defaultValue) + "'"):"") +
((type != null)?(" type='" + typeToLZX() + "'"):"") +
((when != NodeModel.WHEN_IMMEDIATELY)?(" when='" + when + "'"):"") +
(required?(" required='true'"):"") +
" />";
} else if (! ViewSchema.EVENT_HANDLER_TYPE.equals(type)) {
String attrs = "";
if (defaultValue != null &&
(! defaultValue.equals(superSpec.defaultValue))) {
attrs += " value='" + XMLUtils.escapeXml(defaultValue) + "'";
}
if (type != null &&
(! type.equals(superclass.getAttributeType(name, allocation)))) {
attrs += " type='" + typeToLZX() + "'";
}
if (when != null &&
(! when.equals(superSpec.when))) {
attrs += " when='" + when + "'";
}
if (required != superSpec.required) {
attrs += " required='" + required + "'";
}
if (attrs.length() > 0) {
return indent + "<attribute name='" + name + "'" + attrs + " />";
}
}
return null;
}
public String toString() {
if (ViewSchema.METHOD_TYPE.equals(type)) {
return "[AttributeSpec: method name='" + name + "'" + (("".equals(arglist))?"":(" args='" + arglist +"'")) + " isfinal="+(isfinal == null ? "null" : ("'"+isfinal+"'"))+ " allocation="+allocation+"]";
}
if (ViewSchema.EVENT_HANDLER_TYPE.equals(type)) {
return "[AttributeSpec: event name='" + name + "' ]";
}
return "[AttributeSpec: attribute name='" + name + "'" +
((defaultValue != null)?(" value='" + defaultValue + "'"):"") +
((type != null)?(" type='" + typeToLZX() + "'"):"") +
((when != NodeModel.WHEN_IMMEDIATELY)?(" when='" + when + "'"):"") +
(required?(" required='true'"):"") +
" allocation="+allocation+
" ";
}
AttributeSpec (String name, Type type, String defaultValue, String setter, Element source) {
this.source = source;
this.name = name;
this.type = type;
this.defaultValue = defaultValue;
this.setter = setter;
this.when = XMLUtils.getAttributeValue(source, "when", NodeModel.WHEN_IMMEDIATELY);
}
AttributeSpec (String name, Type type, String defaultValue, String setter, boolean required, Element source) {
this.source = source;
this.name = name;
this.type = type;
this.defaultValue = defaultValue;
this.setter = setter;
this.required = required;
this.when = XMLUtils.getAttributeValue(source, "when", NodeModel.WHEN_IMMEDIATELY);
}
public AttributeSpec (String name, Type type, String defaultValue, String setter) {
this.name = name;
this.type = type;
this.defaultValue = defaultValue;
this.setter = setter;
}
AttributeSpec (String name, Type type, String defaultValue, String setter, boolean required) {
this.name = name;
this.type = type;
this.defaultValue = defaultValue;
this.setter = setter;
this.required = required;
}
}
|
package io.appium.android.bootstrap.handler;
import com.android.uiautomator.core.UiDevice;
import com.android.uiautomator.core.UiObjectNotFoundException;
import io.appium.android.bootstrap.*;
import org.json.JSONException;
import java.util.Hashtable;
/**
* This handler is used to set text in elements that support it.
*
*/
public class SetText extends CommandHandler {
/*
* @param command The {@link AndroidCommand} used for this handler.
*
* @return {@link AndroidCommandResult}
*
* @throws JSONException
*
* @see io.appium.android.bootstrap.CommandHandler#execute(io.appium.android.
* bootstrap.AndroidCommand)
*/
@Override
public AndroidCommandResult execute(final AndroidCommand command)
throws JSONException {
if (command.isElementCommand()) {
// Only makes sense on an element
try {
final Hashtable<String, Object> params = command.params();
final AndroidElement el = command.getElement();
boolean replace = Boolean.parseBoolean(params.get("replace").toString());
String text = params.get("text").toString();
boolean pressEnter = false;
if (text.endsWith("\\n")) {
pressEnter = true;
text = text.replace("\\n", "");
Logger.debug("Will press enter after setting text");
}
boolean unicodeKeyboard = false;
if (params.get("unicodeKeyboard") != null) {
unicodeKeyboard = Boolean.parseBoolean(params.get("unicodeKeyboard").toString());
}
String currText = el.getText();
new Clear().execute(command);
if (!el.getText().isEmpty()) {
// clear could have failed, or we could have a hint in the field
// we'll assume it is the latter
Logger.debug("Text not cleared. Assuming remainder is hint text.");
currText = "";
}
if (!replace) {
text = currText + text;
}
final boolean result = el.setText(text, unicodeKeyboard);
if (!result) {
return getErrorResult("el.setText() failed!");
}
if (pressEnter) {
final UiDevice d = UiDevice.getInstance();
d.pressEnter();
}
return getSuccessResult(result);
} catch (final UiObjectNotFoundException e) {
return new AndroidCommandResult(WDStatus.NO_SUCH_ELEMENT,
e.getMessage());
} catch (final Exception e) { // handle NullPointerException
return getErrorResult("Unknown error");
}
} else {
return getErrorResult("Unable to set text without an element.");
}
}
}
|
package net.yadaframework.web;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Resource;
import javax.mail.MessagingException;
import javax.mail.internet.MimeMessage;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.MessageSource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.stereotype.Service;
import org.thymeleaf.context.Context;
import org.thymeleaf.spring4.SpringTemplateEngine;
import net.yadaframework.components.YadaUtil;
import net.yadaframework.core.YadaConfiguration;
import net.yadaframework.core.YadaConstants;
import net.yadaframework.exceptions.InternalException;
@Service
public class YadaEmailService {
private Logger log = LoggerFactory.getLogger(YadaEmailService.class);
@Autowired private YadaConfiguration config;
@Autowired private JavaMailSender mailSender;
@Resource private SpringTemplateEngine emailTemplateEngine;
@Autowired private MessageSource messageSource;
@Autowired private ServletContext servletContext;
@Autowired private ApplicationContext applicationContext;
@Autowired private YadaWebUtil yadaWebUtil;
@Autowired private YadaUtil yadaUtil;
/**
* Convert a site-relative link to absolute, because in emails we can't use @{}.
* Example: th:href="${beans.yadaEmailService.buildLink('/read/234')}"
* @param relativeLink
* @return absolute link
*/
public String buildLink(String relativeLink) {
String myServerAddress = config.getServerAddress();
String relative = StringUtils.prependIfMissing(relativeLink, "/");
return myServerAddress + relative;
}
public boolean sendSupportRequest(String username, String supportRequest, HttpServletRequest request, Locale locale) {
final String emailName = "supportRequest";
final String[] toEmail = config.getSupportRequestRecipients();
final String[] subjectParams = {username};
String clientIp = yadaWebUtil.getClientIp(request);
String userAgent = request.getHeader("user-agent");
final Map<String, Object> templateParams = new HashMap<String, Object>();
templateParams.put("username", username);
templateParams.put("supportRequest", supportRequest);
templateParams.put("clientIp", clientIp);
templateParams.put("userAgent", userAgent);
Map<String, String> inlineResources = new HashMap<String, String>();
inlineResources.put("logosmall", config.getEmailLogoImage());
return sendHtmlEmail(toEmail, emailName, subjectParams, templateParams, inlineResources, locale, true);
}
public boolean sendHtmlEmail(String[] toEmail, String emailName, Object[] subjectParams, Map<String, Object> templateParams, Map<String, String> inlineResources, Locale locale, boolean addTimestamp) {
return sendHtmlEmail(toEmail, null, emailName, subjectParams, templateParams, inlineResources, locale, addTimestamp);
}
public boolean sendHtmlEmail(String[] toEmail, String replyTo, String emailName, Object[] subjectParams, Map<String, Object> templateParams, Map<String, String> inlineResources, Locale locale, boolean addTimestamp) {
return sendHtmlEmail(config.getEmailFrom(), toEmail, replyTo, emailName, subjectParams, templateParams, inlineResources, null, locale, addTimestamp);
}
public boolean sendHtmlEmail(String fromEmail, String[] toEmail, String replyTo, String emailName, Object[] subjectParams, Map<String, Object> templateParams, Map<String, String> inlineResources, Map<String, File> attachments, Locale locale, boolean addTimestamp) {
YadaEmailParam yadaEmailParam = new YadaEmailParam();
yadaEmailParam.fromEmail = fromEmail;
yadaEmailParam.toEmail = toEmail;
yadaEmailParam.replyTo = replyTo;
yadaEmailParam.emailName = emailName;
yadaEmailParam.subjectParams = subjectParams;
yadaEmailParam.templateParams = templateParams;
yadaEmailParam.inlineResources = inlineResources;
yadaEmailParam.attachments = attachments;
yadaEmailParam.locale = locale;
yadaEmailParam.addTimestamp = addTimestamp;
return sendHtmlEmail(yadaEmailParam);
}
public boolean sendHtmlEmail(YadaEmailParam yadaEmailParam) {
String fromEmail = yadaEmailParam.fromEmail;
String[] toEmail = yadaEmailParam.toEmail;
String replyTo = yadaEmailParam.replyTo;
String emailName = yadaEmailParam.emailName;
Object[] subjectParams = yadaEmailParam.subjectParams;
Map<String, Object> templateParams = yadaEmailParam.templateParams;
Map<String, String> inlineResources = yadaEmailParam.inlineResources;
Map<String, File> attachments = yadaEmailParam.attachments;
Locale locale = yadaEmailParam.locale;
boolean addTimestamp = yadaEmailParam.addTimestamp;
final String emailTemplate = getMailTemplateFile(emailName, locale);
final String subject = messageSource.getMessage("email.subject." + emailName, subjectParams, subjectParams[0].toString(), locale);
// String myServerAddress = yadaWebUtil.getWebappAddress(request);
// final WebContext ctx = new WebContext(request, response, servletContext, locale);
// Using Context instead of WebContext, we can't access WebContent files and can't use @{somelink}
final Context ctx = new Context(locale);
// FIXME se uso @config ottengo EL1057E:(pos 1): No bean resolver registered in the context to resolve access to bean 'config'
// Non so come si registra un bean resolver dentro a ctx, quindi uso "config" invece di "@config"
ctx.setVariable("config", config); // Posso usare config nei template (ma non @config!)
if (templateParams!=null) {
for (Entry<String, Object> entry : templateParams.entrySet()) {
ctx.setVariable(entry.getKey(), entry.getValue());
}
}
// ctx.setVariable("beans", new Beans(applicationContext)); // So I can use "beans.myBean" in the template (workaround for the missing "@myBean" support)
final String body = this.emailTemplateEngine.process("/" + YadaConstants.EMAIL_TEMPLATES_FOLDER + "/" + emailTemplate, ctx);
YadaEmailContent ec = new YadaEmailContent();
ec.from = fromEmail!=null?fromEmail:config.getEmailFrom();
if (replyTo!=null) {
ec.replyTo = replyTo;
}
ec.to = toEmail;
ec.subject = subject + (addTimestamp?" (" + timestamp(locale) +")":"");
ec.body = body;
ec.html = true;
if (inlineResources!=null) {
ec.inlineResourceIds = new String[inlineResources.size()];
ec.inlineResources = new org.springframework.core.io.Resource[inlineResources.size()];
int i=0;
for (Entry<String, String> entry : inlineResources.entrySet()) {
ec.inlineResourceIds[i] = entry.getKey();
// Must support fully qualified URLs, e.g. "file:C:/test.dat".
// Must support classpath pseudo-URLs, e.g. "classpath:test.dat".
// Should support relative file paths, e.g. "WEB-INF/test.dat". (This will be implementation-specific, typically provided by an ApplicationContext implementation.)
DefaultResourceLoader defaultResourceLoader = new DefaultResourceLoader();
ec.inlineResources[i] = defaultResourceLoader.getResource(entry.getValue());
if (!ec.inlineResources[i].exists()) {
log.error("Invalid resource: " + entry.getValue());
}
// ec.inlineResources[i] = new ServletContextResourceLoader(servletContext).getResource(entry.getValue());
i++;
}
}
if (attachments!=null) {
Set<String> keySet = attachments.keySet();
int size = keySet.size();
ec.attachedFilenames = new String[size];
ec.attachedFiles = new File[size];
int i=0;
for (String filename : attachments.keySet()) {
ec.attachedFilenames[i] = filename;
ec.attachedFiles[i] = attachments.get(filename);
i++;
}
}
return sendEmail(ec);
}
/**
* Dato un nome di template senza estensione, ne ritorna il nome completo di localizzazione.
* Per esempio "saluti" diventa "saluti_it" se esiste, altrimenti resta "saluti".
* Se il template non esiste, lancia InternalException
* @param templateNameNoHtml
* @param locale
* @return
*/
private String getMailTemplateFile(String templateNameNoHtml, Locale locale) {
// String base = "/WEB-INF/emailTemplates/";
String prefix = templateNameNoHtml; // emailChange
String languagePart = "_" + locale.getLanguage();
String suffix = ".html";
String filename = prefix + languagePart + suffix; // emailChange_it.html
// TODO check if the / before filename is still needed
ClassPathResource classPathResource = new ClassPathResource(YadaConstants.EMAIL_TEMPLATES_PREFIX + "/" + YadaConstants.EMAIL_TEMPLATES_FOLDER + "/" + filename);
if (classPathResource.exists()) {
return prefix + languagePart;
}
filename = prefix + suffix; // emailChange.html
classPathResource = new ClassPathResource(YadaConstants.EMAIL_TEMPLATES_PREFIX + "/" + YadaConstants.EMAIL_TEMPLATES_FOLDER + "/" + filename);
if (classPathResource.exists()) {
return prefix;
}
throw new InternalException("Email template not found: " + templateNameNoHtml);
}
/**
* Send an email by specifying the content directly, without a template
* @param from
* @param to
* @param replyTo
* @param cc
* @param bcc
* @param subject
* @param body
* @param html
* @param inlineFile
* @param inlineFilename
* @return
*/
public boolean sendEmail(String from, String to, String replyTo, String cc, String bcc, String subject, String body, boolean html, File inlineFile, String inlineFilename) {
YadaEmailContent content = new YadaEmailContent();
content.from = from;
content.replyTo = replyTo;
content.to = new String[]{to};
content.cc = new String[]{cc};
content.bcc = new String[]{bcc};
content.subject = subject;
content.body = body;
content.html = html;
content.inlineFiles = new File[] {inlineFile};
content.inlineFileIds = new String[] {inlineFilename};
return sendEmail(content);
}
public boolean sendEmail(YadaEmailContent yadaEmailContent) {
try {
MimeMessage msg = createMimeMessage(yadaEmailContent);
if (msg!=null) {
log.debug("Sending email to '{}'...", Arrays.asList(yadaEmailContent.to));
mailSender.send(msg);
log.debug("Email sent to '{}'", Arrays.asList(yadaEmailContent.to));
return true;
}
} catch (Exception e) {
log.error("Error while sending email message to '{}'", Arrays.asList(yadaEmailContent.to), e);
}
return false;
}
// Non usato ancora
public boolean sendEmailBatch(List<YadaEmailContent> yadaEmailContents) {
boolean result = true;
List<MimeMessage> messageList = new ArrayList<MimeMessage>();
for (YadaEmailContent yadaEmailContent : yadaEmailContents) {
try {
MimeMessage mimeMessage = createMimeMessage(yadaEmailContent);
messageList.add(mimeMessage);
} catch (Exception e) {
result = false;
log.error("Error while creating batch email message to {} (ignored)", yadaEmailContent.to, e);
}
}
if (messageList.size()>0) {
mailSender.send(messageList.toArray(new MimeMessage[messageList.size()])); // Batch
}
return result;
}
private String[] purifyRecipients(String[] addresses, YadaEmailContent yadaEmailContent) {
List<String> validEmail = config.getValidDestinationEmails();
if (validEmail!=null && !validEmail.isEmpty()) {
List<String> recipients = new ArrayList<String>(Arrays.asList(addresses));
// Tengo solo quelli validi
if (recipients.retainAll(validEmail)) {
// Alcuni sono stati rimossi. Logghiamo quelli rimossi
List<String> invalidEmails = new ArrayList<String>(Arrays.asList(addresses));
invalidEmails.removeAll(validEmail);
for (String address : invalidEmails) {
log.warn("Email non autorizzata (non in <validEmail>). Skipping message for '{}' from='{}' subject='{}' body='{}'",
new Object[]{address, yadaEmailContent.from, yadaEmailContent.subject, yadaEmailContent.body});
}
}
return recipients.toArray(new String[0]);
} else {
return addresses;
}
}
private MimeMessage createMimeMessage(YadaEmailContent yadaEmailContent) throws MessagingException {
if (!config.isEmailEnabled()) {
log.warn("Emails not enabled. Skipping message from='{}' to='{}' cc='{}' bcc='{}' subject='{}' body='{}'",
new Object[]{yadaEmailContent.from, yadaEmailContent.to, yadaEmailContent.cc, yadaEmailContent.bcc, yadaEmailContent.subject, yadaEmailContent.body});
return null;
}
int totRecipients = 0;
if (yadaEmailContent.to!=null) {
yadaEmailContent.to = purifyRecipients(yadaEmailContent.to, yadaEmailContent);
totRecipients += yadaEmailContent.to.length;
}
if (yadaEmailContent.cc!=null) {
yadaEmailContent.cc = purifyRecipients(yadaEmailContent.cc, yadaEmailContent);
totRecipients += yadaEmailContent.cc.length;
}
if (yadaEmailContent.bcc!=null) {
yadaEmailContent.bcc = purifyRecipients(yadaEmailContent.bcc, yadaEmailContent);
totRecipients += yadaEmailContent.bcc.length;
}
if ( totRecipients == 0) {
return null;
}
if (!config.isProductionEnvironment()) {
String env = config.getApplicationEnvironment();
yadaEmailContent.subject = "[" + (StringUtils.isBlank(env)?"TEST":env.toUpperCase()) + "] " + yadaEmailContent.subject;
// "<h3>This email has been sent during a system test, please ignore it, thank you.</h3>" + emailContent.body;
}
MimeMessage msg = mailSender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(msg, true, "UTF-8"); // true = multipart
helper.setFrom(yadaEmailContent.from);
if(yadaEmailContent.replyTo!=null) {
helper.setReplyTo(yadaEmailContent.replyTo);
}
if(yadaEmailContent.to!=null) {
helper.setTo(yadaEmailContent.to);
}
if(yadaEmailContent.cc!=null) {
helper.setCc(yadaEmailContent.cc);
}
if(yadaEmailContent.bcc!=null) {
helper.setBcc(yadaEmailContent.bcc);
}
helper.setSubject(yadaEmailContent.subject);
helper.setText(yadaEmailContent.body, yadaEmailContent.html); // true = html
if (yadaEmailContent.inlineFiles!=null && yadaEmailContent.inlineFileIds!=null && yadaEmailContent.inlineFiles.length==yadaEmailContent.inlineFileIds.length) {
for (int i = 0; i < yadaEmailContent.inlineFiles.length; i++) {
File file = yadaEmailContent.inlineFiles[i];
String fileId = yadaEmailContent.inlineFileIds[i];
helper.addInline(fileId, file);
}
}
if (yadaEmailContent.inlineResources!=null && yadaEmailContent.inlineResourceIds!=null && yadaEmailContent.inlineResources.length==yadaEmailContent.inlineResourceIds.length) {
for (int i = 0; i < yadaEmailContent.inlineResources.length; i++) {
org.springframework.core.io.Resource resource = yadaEmailContent.inlineResources[i];
String resourceId = yadaEmailContent.inlineResourceIds[i];
helper.addInline(resourceId, resource);
}
}
if (yadaEmailContent.attachedFiles!=null && yadaEmailContent.attachedFilenames!=null && yadaEmailContent.attachedFiles.length==yadaEmailContent.attachedFilenames.length) {
for (int i = 0; i < yadaEmailContent.attachedFiles.length; i++) {
File file = yadaEmailContent.attachedFiles[i];
String filename = yadaEmailContent.attachedFilenames[i];
helper.addAttachment(filename, file);
// helper.addInline(filename, file);
}
}
log.info("Mando email to={}, from={}, replyTo={}, cc={}, bcc={}, subject={}", new Object[] {yadaEmailContent.to, yadaEmailContent.from, yadaEmailContent.replyTo, yadaEmailContent.cc, yadaEmailContent.bcc, yadaEmailContent.subject});
log.debug("Email body = {}", yadaEmailContent.body);
return msg;
}
public String timestamp(Locale locale) {
return DateFormatUtils.format(new Date(), "yyyy-MM-dd@HH:mm", locale);
}
public String timestamp() {
return DateFormatUtils.ISO_DATETIME_FORMAT.format(new Date());
}
}
|
package com.jaychang.srv.decoration;
import android.annotation.SuppressLint;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.jaychang.srv.SimpleRecyclerView;
import static android.support.v7.widget.RecyclerView.NO_POSITION;
@SuppressWarnings("unchecked")
public class SectionHeaderItemDecoration extends RecyclerView.ItemDecoration {
private SectionHeaderProvider provider;
private SimpleRecyclerView simpleRecyclerView;
private LinearLayoutManager layoutManager;
private int sectionHeight;
private boolean isHeaderOverlapped;
private int firstHeaderTop;
private int secondHeaderTop;
private boolean isClipToPadding;
private Class clazz;
public SectionHeaderItemDecoration(Class clazz, SectionHeaderProvider provider) {
this.clazz = clazz;
this.provider = provider;
}
@SuppressLint("NewApi")
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
// init
if (simpleRecyclerView == null) {
simpleRecyclerView = ((SimpleRecyclerView) parent);
}
if (layoutManager == null) {
layoutManager = (LinearLayoutManager) parent.getLayoutManager();
}
isClipToPadding = parent.getClipToPadding();
int position = parent.getChildAdapterPosition(view);
if (position == NO_POSITION || !isSectionType(position)) {
return;
}
if (sectionHeight == 0) {
View sectionHeader = getAndMeasureSectionHeader(parent, position);
sectionHeight = sectionHeader.getMeasuredHeight();
}
if (!isSameSection(position)) {
outRect.top = sectionHeight + provider.getSectionHeaderMarginTop(getItem(position), position);
} else {
outRect.top = 0;
}
}
// draw section header
@SuppressLint("NewApi")
@Override
public void onDraw(Canvas canvas, RecyclerView parent, RecyclerView.State state) {
int topPadding = isClipToPadding ? parent.getPaddingTop() : 0;
int left = parent.getPaddingLeft();
int right = parent.getWidth() - parent.getPaddingRight();
boolean isFirst = false;
for (int i = 0; i < parent.getChildCount(); i++) {
View view = parent.getChildAt(i);
int position = parent.getChildAdapterPosition(view);
if (position != NO_POSITION && !isSameSection(position)) {
if (!isSectionType(position)) {
continue;
}
View sectionHeader = getAndMeasureSectionHeader(parent, position);
int top = view.getTop() - sectionHeight;
int bottom = view.getTop();
boolean isHeaderExit = top <= topPadding;
if (!isFirst) {
firstHeaderTop = top;
}
if (!isFirst && position != 0) {
secondHeaderTop = top;
if (isHeaderExit) {
isHeaderOverlapped = false;
} else {
isHeaderOverlapped = secondHeaderTop <= sectionHeight + topPadding;
}
}
isFirst = true;
sectionHeader.layout(left, top, right, bottom);
canvas.save();
if (isClipToPadding && isHeaderExit) {
canvas.clipRect(left, topPadding, right, bottom);
}
canvas.translate(left, top);
sectionHeader.draw(canvas);
canvas.restore();
}
}
}
// draw sticky section header
@SuppressLint("NewApi")
@Override
public void onDrawOver(Canvas canvas, RecyclerView parent, RecyclerView.State state) {
if (!provider.isSticky()) {
return;
}
int position = layoutManager.findFirstVisibleItemPosition();
if (position == NO_POSITION) {
return;
}
if (!isSectionType(position)) {
return;
}
int topPadding = isClipToPadding ? parent.getPaddingTop() : 0;
int left = parent.getPaddingLeft();
int right = parent.getWidth() - parent.getPaddingRight();
int top = topPadding;
int bottom = top + sectionHeight;
// if android:isClipToPadding="false", first header can be scroll up till reaching top.
if (!isClipToPadding && position == 0) {
top = firstHeaderTop > 0 ? firstHeaderTop : 0;
bottom = top + sectionHeight;
}
if (isHeaderOverlapped) {
top = top - topPadding - (sectionHeight - secondHeaderTop);
bottom = top + sectionHeight;
}
boolean isHeaderExit = top <= topPadding;
if (isHeaderExit) {
isHeaderOverlapped = false;
}
View sectionHeader = getAndMeasureSectionHeader(parent, position);
sectionHeader.layout(left, top, right, bottom);
canvas.save();
if (isClipToPadding && isHeaderExit) {
canvas.clipRect(left, topPadding, right, bottom);
}
canvas.translate(left, top);
sectionHeader.draw(canvas);
canvas.restore();
}
private View getAndMeasureSectionHeader(RecyclerView parent, int position) {
View sectionHeader = provider.getSectionHeaderView(getItem(position), position);
int widthSpec = View.MeasureSpec.makeMeasureSpec(parent.getWidth(), View.MeasureSpec.UNSPECIFIED);
int heightSpec = View.MeasureSpec.makeMeasureSpec(parent.getHeight(), View.MeasureSpec.UNSPECIFIED);
sectionHeader.measure(widthSpec, heightSpec);
return sectionHeader;
}
private boolean isSameSection(int position) {
if (position == 0) {
return false;
}
return isSectionType(position) && isSectionType(position - 1) &&
provider.isSameSection(getItem(position), getItem(position - 1));
}
private boolean isSectionType(int position) {
Class<?> aClass = getItem(position).getClass();
// handle realm proxy class
if (aClass.getName().endsWith("Proxy")) {
aClass = aClass.getSuperclass();
}
return clazz.getCanonicalName().equals(aClass.getCanonicalName());
}
private Object getItem(int position) {
return simpleRecyclerView.getCell(position).getItem();
}
}
|
package com.mattunderscore.trees.linked.tree;
import com.mattunderscore.trees.construction.BottomUpTreeBuilder;
import com.mattunderscore.trees.mutable.MutableNode;
import com.mattunderscore.trees.spi.EmptyTreeConstructor;
import com.mattunderscore.trees.spi.TreeConstructor;
import com.mattunderscore.trees.tree.Node;
import org.junit.Before;
import org.junit.Test;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public final class LinkedTreeTest {
private LinkedTree<String> tree;
@Before
public void setUp() {
final TreeConstructor<String, LinkedTree<String>> constructor = new LinkedTree.Constructor<>();
tree = constructor.build(
"a",
new LinkedTree[] {
constructor.build(
"b",
new LinkedTree[] {}),
constructor.build(
"c",
new LinkedTree[] {})});
}
@Test
public void structure() {
assertFalse(tree.isEmpty());
assertNotNull(tree.getRoot());
assertFalse(tree.getRoot().isLeaf());
assertEquals(2, tree.getRoot().getNumberOfChildren());
final Iterator<? extends Node<String>> iterator = tree.childIterator();
final Node<String> child0 = iterator.next();
final Node<String> child1 = iterator.next();
assertTrue(child0.isLeaf());
assertTrue(child1.isLeaf());
assertEquals(String.class, child0.getElementClass());
assertEquals("c", child1.getElement());
}
@Test
public void add() {
assertNotNull(tree.getRoot());
final Node<String> newNode = tree.getRoot().addChild("d");
assertEquals(3, tree.getRoot().getNumberOfChildren());
final Iterator<? extends Node<String>> iterator = tree.childIterator();
final Node<String> child0 = iterator.next();
final Node<String> child1 = iterator.next();
final Node<String> child2 = iterator.next();
assertEquals("d", child2.getElement());
}
@Test
public void remove() {
assertNotNull(tree.getRoot());
assertEquals(2, tree.getRoot().getNumberOfChildren());
final Iterator<? extends MutableNode<String>> iterator0 = tree.childIterator();
final MutableNode<String> child0 = iterator0.next();
final MutableNode<String> child1 = iterator0.next();
assertEquals("b", child0.getElement());
assertEquals("c", child1.getElement());
assertFalse(iterator0.hasNext());
assertTrue(tree.getRoot().removeChild(child0));
final Iterator<? extends MutableNode<String>> iterator1 = tree.childIterator();
final MutableNode<String> child2 = iterator1.next();
assertEquals("c", child2.getElement());
assertFalse(iterator1.hasNext());
}
@Test(expected = IndexOutOfBoundsException.class)
public void get() {
assertNotNull(tree.getRoot());
final LinkedTree<String> node0 = tree.getRoot().getChild(0);
final LinkedTree<String> node1 = tree.getRoot().getChild(1);
assertEquals("b", node0.getElement());
assertEquals("c", node1.getElement());
tree.getRoot().getChild(2);
}
@Test
public void set() {
assertNotNull(tree.getRoot());
tree.getRoot().setChild(2, "d");
assertEquals(3, tree.getRoot().getNumberOfChildren());
final Iterator<LinkedTree<String>> iterator = tree.childIterator();
assertEquals("b", iterator.next().getElement());
assertEquals("c", iterator.next().getElement());
assertEquals("d", iterator.next().getElement());
assertFalse(iterator.hasNext());
}
@Test
public void setWithNulls() {
assertNotNull(tree.getRoot());
tree.getRoot().setChild(3, "d");
assertEquals(4, tree.getRoot().getNumberOfChildren());
final Iterator<LinkedTree<String>> iterator = tree.childIterator();
assertEquals("b", iterator.next().getElement());
assertEquals("c", iterator.next().getElement());
assertEquals("d", iterator.next().getElement());
assertFalse(iterator.hasNext());
final Iterator<LinkedTree<String>> structuralIterator = tree.childStructuralIterator();
assertEquals("b", structuralIterator.next().getElement());
assertEquals("c", structuralIterator.next().getElement());
assertNull(structuralIterator.next());
assertEquals("d", structuralIterator.next().getElement());
assertFalse(structuralIterator.hasNext());
}
@Test
public void emptyConstructor() {
final EmptyTreeConstructor<String, LinkedTree<String>> constructor = new LinkedTree.EmptyConstructor<>();
final LinkedTree<String> emptyTree = constructor.build();
assertTrue(emptyTree.isEmpty());
assertNull(emptyTree.getRoot());
assertEquals(LinkedTree.class, constructor.forClass());
}
@Test
public void nodeToTreeConverterTest0() {
final LinkedTree.NodeConverter<String> converter = new LinkedTree.NodeConverter<>();
final LinkedTree<String> newTree = converter.treeFromRootNode(tree);
assertFalse(newTree.isEmpty());
assertNotNull(newTree.getRoot());
assertFalse(newTree.getRoot().isLeaf());
assertEquals(2, newTree.getRoot().getNumberOfChildren());
final Iterator<? extends Node<String>> iterator = newTree.childIterator();
final Node<String> child0 = iterator.next();
final Node<String> child1 = iterator.next();
assertTrue(child0.isLeaf());
assertTrue(child1.isLeaf());
assertEquals(String.class, child0.getElementClass());
assertEquals("c", child1.getElement());
assertEquals(LinkedTree.class, converter.forClass());
}
@Test
public void nodeToTreeConverterTest1() {
final LinkedTree.NodeConverter<String> converter = new LinkedTree.NodeConverter<>();
final LinkedTree<String> newTree = converter.treeFromRootNode(tree.getChild(1));
assertFalse(newTree.isEmpty());
assertNotNull(newTree.getRoot());
assertTrue(newTree.getRoot().isLeaf());
assertEquals("c", newTree.getRoot().getElement());
assertEquals(LinkedTree.class, converter.forClass());
}
@Test
public void converter() {
final LinkedTree.Converter<String> converter = new LinkedTree.Converter<>();
final LinkedTree<String> newTree = converter.build(tree);
assertFalse(newTree.isEmpty());
assertNotNull(newTree.getRoot());
assertFalse(newTree.getRoot().isLeaf());
assertEquals(2, newTree.getRoot().getNumberOfChildren());
final Iterator<? extends Node<String>> iterator = newTree.childIterator();
final Node<String> child0 = iterator.next();
final Node<String> child1 = iterator.next();
assertTrue(child0.isLeaf());
assertTrue(child1.isLeaf());
assertEquals(String.class, child0.getElementClass());
assertEquals("c", child1.getElement());
assertEquals(LinkedTree.class, converter.forClass());
}
}
|
package me.id.webverifylib;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.text.Html;
import android.webkit.CookieManager;
import android.webkit.WebView;
public class WebViewActivity extends ActionBarActivity
{
IDmeWebVerify iDmeWebVerify;
private WebView webView;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_web_view);
getSupportActionBar().setTitle(Html.fromHtml("<font color='#2fc073'>Verify With ID.me </font>"));
getSupportActionBar().setBackgroundDrawable(new ColorDrawable(0xff2e3d50));
String scope = getIntent().getStringExtra("scope");
String url = getIntent().getStringExtra("URL");
String clientId = getIntent().getStringExtra("clientID");
String redirectUri = getIntent().getStringExtra("redirectURI");
boolean returnProperties = getIntent().getBooleanExtra("returnProperties", true);
iDmeWebVerify = new IDmeWebVerify(clientId, redirectUri, scope, this, returnProperties);
webView = (WebView) findViewById(R.id.webView);
webView.setWebViewClient(iDmeWebVerify.getWebViewClient());
webView.loadUrl(url);
}
@Override
public void onDestroy()
{
super.onDestroy();
CookieManager cookieManager = CookieManager.getInstance();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
cookieManager.removeAllCookie();
}
else
{
cookieManager.removeAllCookies(null);
}
webView.clearCache(true);
webView.clearHistory();
webView.clearFormData();
webView.destroy();
}
}
|
package org.modeshape.repository;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import net.jcip.annotations.ThreadSafe;
import org.modeshape.common.collection.Problems;
import org.modeshape.common.collection.SimpleProblems;
import org.modeshape.common.util.CheckArg;
import org.modeshape.common.util.Logger;
import org.modeshape.common.util.Reflection;
import org.modeshape.graph.ExecutionContext;
import org.modeshape.graph.Graph;
import org.modeshape.graph.JcrLexicon;
import org.modeshape.graph.Location;
import org.modeshape.graph.Node;
import org.modeshape.graph.Subgraph;
import org.modeshape.graph.connector.RepositorySource;
import org.modeshape.graph.observe.Changes;
import org.modeshape.graph.observe.NetChangeObserver;
import org.modeshape.graph.observe.Observer;
import org.modeshape.graph.property.Name;
import org.modeshape.graph.property.Path;
import org.modeshape.graph.property.PathFactory;
import org.modeshape.graph.property.PathNotFoundException;
import org.modeshape.graph.property.Property;
import org.modeshape.graph.property.PropertyType;
import org.modeshape.graph.property.ValueFactories;
import org.modeshape.graph.property.ValueFactory;
import org.modeshape.graph.request.ReadBranchRequest;
import org.modeshape.repository.service.AbstractServiceAdministrator;
import org.modeshape.repository.service.AdministeredService;
import org.modeshape.repository.service.ServiceAdministrator;
/**
* A service that manages the {@link RepositorySource}es defined within a configuration repository.
*/
@ThreadSafe
public class RepositoryService implements AdministeredService, Observer {
/**
* The administrative component for this service.
*
* @author Randall Hauch
*/
protected class Administrator extends AbstractServiceAdministrator {
protected Administrator() {
super(RepositoryI18n.federationServiceName, State.PAUSED);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean doCheckIsTerminated() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
protected void doStart( State fromState ) {
super.doStart(fromState);
startService();
}
/**
* {@inheritDoc}
*
* @see org.modeshape.repository.service.AbstractServiceAdministrator#doShutdown(org.modeshape.repository.service.ServiceAdministrator.State)
*/
@Override
protected void doShutdown( State fromState ) {
super.doShutdown(fromState);
shutdownService();
}
/**
* {@inheritDoc}
*
* @see org.modeshape.repository.service.ServiceAdministrator#awaitTermination(long, java.util.concurrent.TimeUnit)
*/
public boolean awaitTermination( long timeout,
TimeUnit unit ) {
return true;
}
}
private final ExecutionContext context;
private final RepositoryLibrary sources;
private final String configurationSourceName;
private final String configurationWorkspaceName;
private final Path pathToConfigurationRoot;
private final ConfigurationChangeObserver configurationChangeObserver;
private final Administrator administrator = new Administrator();
private final AtomicBoolean started = new AtomicBoolean(false);
/** The problem sink used when encountering problems while starting repositories */
private final Problems problems;
public RepositoryService( RepositorySource configurationSource,
String configurationWorkspaceName,
Path pathToConfigurationRoot,
ExecutionContext context,
Problems problems ) {
CheckArg.isNotNull(configurationSource, "configurationSource");
CheckArg.isNotNull(context, "context");
PathFactory pathFactory = context.getValueFactories().getPathFactory();
if (pathToConfigurationRoot == null) pathToConfigurationRoot = pathFactory.create("/dna:system");
if (problems == null) problems = new SimpleProblems();
Path sourcesPath = pathFactory.create(pathToConfigurationRoot, ModeShapeLexicon.SOURCES);
this.sources = new RepositoryLibrary(configurationSource, configurationWorkspaceName, sourcesPath, context);
this.sources.addSource(configurationSource);
this.pathToConfigurationRoot = pathToConfigurationRoot;
this.configurationSourceName = configurationSource.getName();
this.configurationWorkspaceName = configurationWorkspaceName;
this.context = context;
this.problems = problems;
this.configurationChangeObserver = new ConfigurationChangeObserver();
}
/**
* {@inheritDoc}
*/
public final ServiceAdministrator getAdministrator() {
return this.administrator;
}
/**
* @return configurationSourceName
*/
public final String getConfigurationSourceName() {
return configurationSourceName;
}
/**
* @return configurationWorkspaceName
*/
public final String getConfigurationWorkspaceName() {
return configurationWorkspaceName;
}
/**
* Get the library of {@link RepositorySource} instances used by this service.
*
* @return the RepositorySource library; never null
*/
public final RepositoryLibrary getRepositoryLibrary() {
return sources;
}
/**
* @return pathToConfigurationRoot
*/
protected final Path getPathToConfigurationRoot() {
return pathToConfigurationRoot;
}
/**
* @return env
*/
public final ExecutionContext getExecutionEnvironment() {
return context;
}
public String getJndiName() {
// TODO
return null;
}
protected synchronized void startService() {
if (this.started.get() == false) {
// Read the configuration ...
// Read the configuration and repository source nodes (children under "/dna:sources") ...
Graph graph = Graph.create(getConfigurationSourceName(), sources, context);
Path pathToSourcesNode = context.getValueFactories().getPathFactory().create(pathToConfigurationRoot,
ModeShapeLexicon.SOURCES);
try {
String workspaceName = getConfigurationWorkspaceName();
if (workspaceName != null) graph.useWorkspace(workspaceName);
Subgraph sourcesGraph = graph.getSubgraphOfDepth(ReadBranchRequest.NO_MAXIMUM_DEPTH).at(pathToSourcesNode);
// Iterate over each of the children, and create the RepositorySource ...
for (Location location : sourcesGraph.getRoot().getChildren()) {
sources.addSource(createRepositorySource(sourcesGraph, location, problems));
}
} catch (PathNotFoundException e) {
// No sources were found, and this is okay!
} catch (Throwable err) {
throw new ModeShapeConfigurationException(RepositoryI18n.errorStartingRepositoryService.text(), err);
}
this.started.set(true);
}
}
protected synchronized void shutdownService() {
// Close the repository library ...
this.sources.getAdministrator().shutdown();
}
/**
* Instantiate the {@link RepositorySource} described by the supplied properties.
*
* @param subgraph the subgraph containing the configuration information for this {@link RepositorySource}
* @param location the location of the properties to apply to the new {@link RepositorySource}
* @param problems the problems container in which any problems should be reported; never null
* @return the repository source instance, or null if it could not be created
*/
protected RepositorySource createRepositorySource( Subgraph subgraph,
Location location,
Problems problems ) {
return (RepositorySource)createInstanceFromProperties(subgraph, location, problems, true);
}
/**
* Instantiate the {@link Object} described by the supplied properties.
*
* @param subgraph the subgraph containing the configuration information for this instance
* @param location the location of the properties to apply to the new instance
* @param problems the problems container in which any problems should be reported; never null
* @param mustHaveClassName indicates that the properties must include a class name; if true a problem will be added for
* instances that do not have a class name specified
* @return the instance, or null if it could not be created
*/
protected Object createInstanceFromProperties( Subgraph subgraph,
Location location,
Problems problems,
boolean mustHaveClassName ) {
ValueFactories valueFactories = context.getValueFactories();
ValueFactory<String> stringFactory = valueFactories.getStringFactory();
Node node = subgraph.getNode(location);
assert location.hasPath();
Path path = node.getLocation().getPath();
Map<Name, Property> properties = node.getPropertiesByName();
// Get the classname and classpath ...
Property classnameProperty = properties.get(ModeShapeLexicon.CLASSNAME);
Property classpathProperty = properties.get(ModeShapeLexicon.CLASSPATH);
if (classnameProperty == null) {
if (mustHaveClassName) {
problems.addError(RepositoryI18n.requiredPropertyIsMissingFromNode, ModeShapeLexicon.CLASSNAME, path);
}
return null;
}
// If the classpath property is null or empty, the default classpath will be used
if (problems.hasErrors()) return null;
// Create the instance ...
String classname = stringFactory.create(classnameProperty.getValues().next());
String[] classpath = classpathProperty == null ? new String[] {} : stringFactory.create(classpathProperty.getValuesAsArray());
ClassLoader classLoader = context.getClassLoader(classpath);
Object instance = null;
try {
Class<?> sourceClass = classLoader.loadClass(classname);
instance = sourceClass.newInstance();
} catch (ClassNotFoundException err) {
problems.addError(err, RepositoryI18n.unableToLoadClassUsingClasspath, classname, classpath);
} catch (IllegalAccessException err) {
problems.addError(err, RepositoryI18n.unableToAccessClassUsingClasspath, classname, classpath);
} catch (Throwable err) {
problems.addError(err, RepositoryI18n.unableToInstantiateClassUsingClasspath, classname, classpath);
}
if (instance == null) return null;
// We need to set the name using the local name of the node...
Property nameProperty = context.getPropertyFactory().create(JcrLexicon.NAME,
path.getLastSegment().getName().getLocalName());
properties.put(JcrLexicon.NAME, nameProperty);
// Attempt to set the configuration information as bean properties,
// if they exist on the object and are not already set to some value ...
setBeanPropertyIfExistsAndNotSet(instance, "configurationSourceName", getConfigurationSourceName());
setBeanPropertyIfExistsAndNotSet(instance, "configurationWorkspaceName", getConfigurationWorkspaceName());
setBeanPropertyIfExistsAndNotSet(instance, "configurationPath", stringFactory.create(path));
// Now set all the properties that we can, ignoring any property that doesn't fit the pattern ...
Reflection reflection = new Reflection(instance.getClass());
for (Map.Entry<Name, Property> entry : properties.entrySet()) {
Name propertyName = entry.getKey();
Property property = entry.getValue();
String javaPropertyName = propertyName.getLocalName();
if (property.isEmpty()) continue;
Object value = null;
Method setter = null;
try {
setter = reflection.findFirstMethod("set" + javaPropertyName, false);
if (setter == null) continue;
// Determine the type of the one parameter ...
Class<?>[] parameterTypes = setter.getParameterTypes();
if (parameterTypes.length != 1) continue; // not a valid JavaBean property
Class<?> paramType = parameterTypes[0];
PropertyType allowedType = PropertyType.discoverType(paramType);
if (allowedType == null) continue; // assume not a JavaBean property with usable type
ValueFactory<?> factory = context.getValueFactories().getValueFactory(allowedType);
if (paramType.isArray()) {
if (paramType.getComponentType().isArray()) continue; // array of array, which we don't do
Object[] values = factory.create(property.getValuesAsArray());
// Convert to an array of primitives if that's what the signature requires ...
Class<?> componentType = paramType.getComponentType();
if (Integer.TYPE.equals(componentType)) {
int[] primitiveValues = new int[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Long)values[i]).intValue();
}
value = primitiveValues;
} else if (Short.TYPE.equals(componentType)) {
short[] primitiveValues = new short[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Long)values[i]).shortValue();
}
value = primitiveValues;
} else if (Long.TYPE.equals(componentType)) {
long[] primitiveValues = new long[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Long)values[i]).longValue();
}
value = primitiveValues;
} else if (Double.TYPE.equals(componentType)) {
double[] primitiveValues = new double[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Double)values[i]).doubleValue();
}
value = primitiveValues;
} else if (Float.TYPE.equals(componentType)) {
float[] primitiveValues = new float[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Double)values[i]).floatValue();
}
value = primitiveValues;
} else if (Boolean.TYPE.equals(componentType)) {
boolean[] primitiveValues = new boolean[values.length];
for (int i = 0; i != values.length; ++i) {
primitiveValues[i] = ((Boolean)values[i]).booleanValue();
}
value = primitiveValues;
} else {
value = values;
}
} else {
value = factory.create(property.getFirstValue());
// Convert to the correct primitive, if needed ...
if (Integer.TYPE.equals(paramType)) {
value = new Integer(((Long)value).intValue());
} else if (Short.TYPE.equals(paramType)) {
value = new Short(((Long)value).shortValue());
} else if (Float.TYPE.equals(paramType)) {
value = new Float(((Double)value).floatValue());
}
}
// Invoke the method ...
String msg = "Setting property {0} to {1} on source at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).trace(msg,
javaPropertyName,
value,
path,
configurationSourceName,
configurationWorkspaceName);
setter.invoke(instance, value);
} catch (SecurityException err) {
Logger.getLogger(getClass()).debug(err, "Error invoking {0}.{1}", instance.getClass(), setter);
} catch (IllegalArgumentException err) {
// Do nothing ... assume not a JavaBean property (but log)
String msg = "Invalid argument invoking {0} with parameter {1} on source at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).debug(err,
msg,
setter,
value,
path,
configurationSourceName,
configurationWorkspaceName);
} catch (IllegalAccessException err) {
Logger.getLogger(getClass()).debug(err, "Error invoking {0}.{1}", instance.getClass(), setter);
} catch (InvocationTargetException err) {
// Do nothing ... assume not a JavaBean property (but log)
String msg = "Error invoking {0} with parameter {1} on source at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).debug(err.getTargetException(),
msg,
setter,
value,
path,
configurationSourceName,
configurationWorkspaceName);
}
}
// Check for nested instances in the configuration
for (Location childLocation : node.getChildren()) {
assert childLocation.hasPath();
Path childPath = childLocation.getPath();
Name childName = childPath.getLastSegment().getName();
Object value = createInstanceFromProperties(subgraph, childLocation, problems, false);
if (problems.hasErrors()) {
return null;
}
String javaPropertyName = childName.getLocalName();
Method setter = reflection.findFirstMethod("set" + javaPropertyName, false);
if (setter == null) continue;
try {
setter.invoke(instance, value);
// Invoke the method ...
String msg = "Setting property {0} to {1} on object at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).trace(msg,
javaPropertyName,
value,
childPath,
configurationSourceName,
configurationWorkspaceName);
setter.invoke(instance, value);
} catch (SecurityException err) {
Logger.getLogger(getClass()).debug(err, "Error invoking {0}.{1}", instance.getClass(), setter);
} catch (IllegalArgumentException err) {
// Do nothing ... assume not a JavaBean property (but log)
String msg = "Invalid argument invoking {0} with parameter {1} on object at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).debug(err,
msg,
setter,
value,
childPath,
configurationSourceName,
configurationWorkspaceName);
} catch (IllegalAccessException err) {
Logger.getLogger(getClass()).debug(err, "Error invoking {0}.{1}", instance.getClass(), setter);
} catch (InvocationTargetException err) {
// Do nothing ... assume not a JavaBean property (but log)
String msg = "Error invoking {0} with parameter {1} on source at {2} in configuration repository {3} in workspace {4}";
Logger.getLogger(getClass()).debug(err.getTargetException(),
msg,
setter,
value,
childPath,
configurationSourceName,
configurationWorkspaceName);
}
}
return instance;
}
protected boolean setBeanPropertyIfExistsAndNotSet( Object target,
String propertyName,
Object value ) {
Reflection reflection = new Reflection(target.getClass());
try {
if (reflection.invokeGetterMethodOnTarget(propertyName, target) == null) {
reflection.invokeSetterMethodOnTarget(propertyName, target, value);
return true;
}
return false;
} catch (Exception e) {
// Log that the property was not found ...
Logger.getLogger(getClass())
.debug("Unknown property '{0}' on '{1}' class", propertyName, target.getClass().getName());
return false;
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals( Object obj ) {
if (obj == this) return true;
return false;
}
/**
* {@inheritDoc}
*
* @see org.modeshape.graph.observe.Observer#notify(org.modeshape.graph.observe.Changes)
*/
public void notify( Changes changes ) {
// Forward the changes to the net change observer ...
this.configurationChangeObserver.notify(changes);
}
protected class ConfigurationChangeObserver extends NetChangeObserver {
/**
* {@inheritDoc}
*
* @see org.modeshape.graph.observe.NetChangeObserver#notify(org.modeshape.graph.observe.NetChangeObserver.NetChanges)
*/
@Override
protected void notify( NetChanges netChanges ) {
if (!getConfigurationSourceName().equals(netChanges.getSourceName())) return;
for (NetChange change : netChanges.getNetChanges()) {
if (!getConfigurationWorkspaceName().equals(change.getRepositoryWorkspaceName())) return;
Path changedPath = change.getPath();
Path configPath = getPathToConfigurationRoot();
if (!changedPath.isAtOrBelow(getPathToConfigurationRoot())) return;
boolean changedNodeIsPotentiallySource = configPath.size() + 1 == changedPath.size();
// At this point, we know that something inside the configuration changed, so figure out what happened ...
if (changedNodeIsPotentiallySource && change.includes(ChangeType.NODE_REMOVED)) {
// Then potentially a source with the supplied name has been removed ...
String sourceName = changedPath.getLastSegment().getName().getLocalName();
getRepositoryLibrary().removeSource(sourceName);
} else {
// The add/change/remove is either at or below a source, so try to create a new source for it ...
Path sourcePath = changedNodeIsPotentiallySource ? changedPath : changedPath.subpath(0, configPath.size() + 1);
Problems problems = new SimpleProblems();
// Now read the node and create the source ...
Graph graph = Graph.create(getConfigurationSourceName(), getRepositoryLibrary(), getExecutionEnvironment());
try {
String workspaceName = getConfigurationWorkspaceName();
if (workspaceName != null) graph.useWorkspace(workspaceName);
Subgraph subgraph = graph.getSubgraphOfDepth(ReadBranchRequest.NO_MAXIMUM_DEPTH).at(sourcePath);
RepositorySource source = createRepositorySource(subgraph, Location.create(sourcePath), problems);
if (source != null) {
// It was the config for a source, so try to add or replace an existing source ...
getRepositoryLibrary().addSource(source, true);
}
} catch (PathNotFoundException e) {
// No source was found, and this is okay (since it may just been deleted)...
String sourceName = changedPath.getLastSegment().getName().getLocalName();
getRepositoryLibrary().removeSource(sourceName);
}
}
}
}
}
}
|
package net.zomis.spring.games.generic;
import net.zomis.spring.games.messages.GameMoveResult;
import java.util.function.Function;
public class GroovyGameHelper implements GameHelper<Object, Object> {
public Function<Object, Object> constructor;
@Override
public Object constructGame(Object configuration) {
return constructor.apply(configuration);
}
@Override
public void addPlayer(Object playerConfig) {
}
@Override
public Object start() {
return null;
}
@Override
public GameMoveResult performAction(int playerIndex, Object action) {
return null;
}
@Override
public Object gameDetails(Object game) {
return null;
}
}
|
package org.motechproject.ivr.kookoo;
import org.apache.commons.lang.StringUtils;
import org.motechproject.server.service.ivr.CallDirection;
import org.motechproject.util.Cookies;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
import java.util.Map;
public class KooKooIVRContext {
private KookooRequest kooKooRequest;
private HttpServletRequest request;
private Cookies cookies;
private static final String CURRENT_DECISION_TREE_POSITION = "current_decision_tree_position";
public static final String PREFERRED_LANGUAGE_CODE = "preferred_lang_code";
private static final String CALL_DETAIL_RECORD_ID = "call_detail_record_id";
public static final String TREE_NAME_KEY = "tree_name";
public static final String EXTERNAL_ID = "external_id";
public static final String POUND_SYMBOL = "%23";
public static final String CALL_ID = "call_id";
protected KooKooIVRContext() {
}
public KooKooIVRContext(KookooRequest kooKooRequest, HttpServletRequest request, HttpServletResponse response) {
this.kooKooRequest = kooKooRequest;
this.request = request;
cookies = new Cookies(request, response);
}
public HttpServletRequest httpRequest() {
return request;
}
public String userInput() {
return StringUtils.remove(kooKooRequest.getData(), POUND_SYMBOL);
}
public String currentTreePosition() {
String currentPosition = cookies.getValue(CURRENT_DECISION_TREE_POSITION);
return currentPosition == null ? "" : currentPosition;
}
public void currentDecisionTreePath(String path) {
cookies.add(CURRENT_DECISION_TREE_POSITION, path);
}
public String callId() {
String callId = cookies.getValue(CALL_ID);
return callId == null ? kooKooRequest.getSid() : callId;
}
public void callId(String callid) {
cookies.add(CALL_ID, callid);
}
public String preferredLanguage() {
return cookies.getValue(PREFERRED_LANGUAGE_CODE);
}
public void preferredLanguage(String languageCode) {
cookies.add(PREFERRED_LANGUAGE_CODE, languageCode);
}
public void callDetailRecordId(String kooKooCallDetailRecordId) {
cookies.add(CALL_DETAIL_RECORD_ID, kooKooCallDetailRecordId);
}
public String callDetailRecordId() {
return cookies.getValue(CALL_DETAIL_RECORD_ID);
}
public void treeName(String treeName) {
request.setAttribute(TREE_NAME_KEY, treeName);
}
public String treeName() {
return (String) request.getAttribute(TREE_NAME_KEY);
}
public KookooRequest kooKooRequest() {
return kooKooRequest;
}
public Cookies cookies() {
return cookies;
}
public String externalId() {
return (String) request.getSession().getAttribute(EXTERNAL_ID);
}
public void invalidateSession() {
request.getSession().invalidate();
}
public String ivrEvent() {
return kooKooRequest.getEvent();
}
public String callerId() {
return kooKooRequest.getCid();
}
public CallDirection callDirection() {
return kooKooRequest.getCallDirection();
}
public void initialize() {
callId(kooKooRequest.getSid());
}
public void setDefaults() {
kooKooRequest.setDefaults();
}
public String allCookies() {
return cookies.toString();
}
}
|
import java.util.Random;
import org.junit.Test;
import static org.junit.Assert.*;
public class ProgramChecker {
public boolean Blum(Graph G1, Graph G2, BitMatrix A1, BitMatrix A2, int k){
int n= G1.V.length;
Map map= Graph.areIsomorphic(G1,G2);
if (map != null && map.length == n){
if (Graph.checkAllEdges(G1, G2, map)){
return true;
}
else{
return false;
}
}
else{
Random random= new Random();
for (int i= 0; i < k; i++){
int coin= random.nextInt(2);
if (coin == 0){
BitMatrix permMatrix= PermMatrix.makeRandom(n);
BitMatrix permMatrixT= PermMatrix.transpose(permMatrix);
A2= PermMatrix.multiply(permMatrix, A2);
A2= PermMatrix.multiply(A2, permMatrixT);
G2= new Graph(A2);
map= Graph.areIsomorphic(G1, G2);
if (map != null && map.length != n){
return false;
}
}
if (coin == 1){
BitMatrix permMatrix= PermMatrix.makeRandom(n);
BitMatrix permMatrixT= PermMatrix.transpose(permMatrix);
A1= PermMatrix.multiply(permMatrix, A1);
A1= PermMatrix.multiply(A1, permMatrixT);
G1= new Graph(A1);
map= Graph.areIsomorphic(G1, G2);
if (map != null && map.length != n){
return false;
}
}
}
return true;
}
}
@Test
public void ProgramCheckerTest() throws Exception {
int n= 128;
int t= 10;
int p= 1;
int q= 2;
int k= 100;
/*Test isomorphic graphs*/
for (int i= 0; i < t; i++){
BitMatrix adjMatrix= AdjMatrix.makeRandom(n, p, q);
BitMatrix permMatrix= PermMatrix.makeRandom(n);
BitMatrix permMatrixT= PermMatrix.transpose(permMatrix);
BitMatrix adjMatrixPerm= PermMatrix.multiply(permMatrix, adjMatrix);
adjMatrixPerm= PermMatrix.multiply(adjMatrixPerm, permMatrixT);
Graph G1= new Graph(adjMatrix);
Graph G2= new Graph(adjMatrixPerm);
if (!Blum(G1, G2, adjMatrix, adjMatrixPerm, k)){
System.out.println("Program check failed on isomorphic graphs!");
fail();
}
System.out.println("Test " + i +" is finished!");
}
/*Test non-isomorphic graphs*/
for (int i= 0; i < t; i++){
BitMatrix adjMatrix= AdjMatrix.makeRandom(n);
BitMatrix adjMatrix2= AdjMatrix.makeRandom(n);
Graph G1= new Graph(adjMatrix);
Graph G2= new Graph(adjMatrix2);
if (!Blum(G1, G2, adjMatrix, adjMatrix2, k)){
System.out.println("Program check failed on non-isomorphic graphs!");
fail();
}
System.out.println("Test " + i +" is finished!");
}
}
}
|
package org.innovateuk.ifs.config.cache;
import io.lettuce.core.ClientOptions;
import io.lettuce.core.ClientOptions.DisconnectedBehavior;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.cache.CacheProperties;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.data.redis.RedisProperties;
import org.springframework.cache.Cache;
import org.springframework.cache.annotation.CachingConfigurerSupport;
import org.springframework.cache.interceptor.CacheErrorHandler;
import org.springframework.cache.interceptor.SimpleCacheErrorHandler;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.cache.RedisCacheConfiguration;
import org.springframework.data.redis.connection.RedisClusterConfiguration;
import org.springframework.data.redis.connection.RedisPassword;
import org.springframework.data.redis.connection.RedisStandaloneConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration.LettuceClientConfigurationBuilder;
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
import org.springframework.data.redis.serializer.RedisSerializationContext;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import java.time.Duration;
import static java.time.temporal.ChronoUnit.SECONDS;
@Configuration
public class CacheConfiguration extends CachingConfigurerSupport {
private static final Log LOG = LogFactory.getLog(CacheConfiguration.class);
@Value("${ifs.data.service.cache.ttl.seconds}")
private int ttlSeconds;
@Value("${ifs.data.service.redis.cluster}")
private boolean cluster;
@Bean
@ConditionalOnProperty(value = "spring.cache.type", havingValue = "redis")
public LettuceConnectionFactory redisConnectionFactory(RedisProperties redisProperties) {
LettuceClientConfigurationBuilder builder = LettuceClientConfiguration.builder()
.clientOptions(ClientOptions.builder()
.disconnectedBehavior(DisconnectedBehavior.REJECT_COMMANDS)
.build());
if (redisProperties.isSsl()) {
builder.useSsl();
}
if (cluster) {
return clusterConfiguration(redisProperties, builder);
} else {
return standaloneConfiguration(redisProperties, builder);
}
}
private LettuceConnectionFactory clusterConfiguration(RedisProperties redisProperties, LettuceClientConfigurationBuilder builder) {
RedisClusterConfiguration clusterConfiguration = new RedisClusterConfiguration();
clusterConfiguration.clusterNode(redisProperties.getHost(), redisProperties.getPort());
clusterConfiguration.setPassword(RedisPassword.of(redisProperties.getPassword()));
clusterConfiguration.setMaxRedirects(3);
return new LettuceConnectionFactory(clusterConfiguration, builder.build());
}
private LettuceConnectionFactory standaloneConfiguration(RedisProperties redisProperties, LettuceClientConfigurationBuilder builder) {
RedisStandaloneConfiguration config = new RedisStandaloneConfiguration(redisProperties.getHost(), redisProperties.getPort());
config.setPassword(RedisPassword.of(redisProperties.getPassword()));
return new LettuceConnectionFactory(config, builder.build());
}
@Bean
public ServiceResultWrappingSerializer serviceResultWrappingSerializer() {
return new ServiceResultWrappingSerializer();
}
@Bean
public RedisCacheConfiguration redisCacheConfiguration(ServiceResultWrappingSerializer serviceResultWrappingSerializer, CacheProperties cacheProperties) {
return RedisCacheConfiguration.defaultCacheConfig()
.serializeKeysWith(
RedisSerializationContext.SerializationPair.fromSerializer(
new StringRedisSerializer()
)
)
.serializeValuesWith(
RedisSerializationContext.SerializationPair.fromSerializer(
serviceResultWrappingSerializer
)
)
.entryTtl(Duration.of(ttlSeconds, SECONDS))
.prefixKeysWith(cacheProperties.getRedis().getKeyPrefix());
}
@Override
public CacheErrorHandler errorHandler() {
return new SimpleCacheErrorHandler() {
@Override
public void handleCacheGetError(RuntimeException exception, Cache cache, Object key) {
LOG.debug("Failed to get cache item with key " + key.toString(), exception);
}
@Override
public void handleCachePutError(RuntimeException exception, Cache cache, Object key, Object value) {
LOG.error("Failed to put cache item with key " + key.toString(), exception);
}
@Override
public void handleCacheEvictError(RuntimeException exception, Cache cache, Object key) {
LOG.error("Failed to evict cache item with key " + key.toString(), exception);
}
};
}
}
|
package org.ow2.chameleon.fuchsia.bluetooth.importer;
import org.apache.felix.ipojo.*;
import org.apache.felix.ipojo.annotations.*;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.ow2.chameleon.fuchsia.core.component.AbstractImporterComponent;
import org.ow2.chameleon.fuchsia.core.declaration.ImportDeclaration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static org.apache.felix.ipojo.Factory.INSTANCE_NAME_PROPERTY;
import static org.ow2.chameleon.fuchsia.core.declaration.Constants.PROTOCOL_NAME;
@Component(name = "BluetoothImporter-Factory")
@Provides(specifications = org.ow2.chameleon.fuchsia.core.component.ImporterService.class)
// FIXME ADD LOCKS !!
public class BluetoothImporter extends AbstractImporterComponent {
// FIXME scope metadata
@ServiceProperty(name = TARGET_FILTER_PROPERTY, value = "(&(" + PROTOCOL_NAME + "=bluetooth)(scope=generic))")
private String filter;
@ServiceProperty(name = INSTANCE_NAME_PROPERTY)
private String name;
private final BundleContext m_bundleContext;
/**
* logger
*/
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final Map<String, Factory> bluetoothProxiesFactories;
private final Set<ImportDeclaration> unresolvedImportDeclarations;
private final Map<ImportDeclaration, ComponentInstance> resolvedImportDeclarations;
/**
* Constructor in order to have the bundle context injected
*
* @param bundleContext
*/
public BluetoothImporter(BundleContext bundleContext) {
m_bundleContext = bundleContext;
bluetoothProxiesFactories = new HashMap<String, Factory>();
unresolvedImportDeclarations = new HashSet<ImportDeclaration>();
resolvedImportDeclarations = new HashMap<ImportDeclaration, ComponentInstance>();
}
@Override
@Invalidate
protected void stop() {
logger.info("Stop Dynamo Fuchsia Importer");
super.stop();
}
@Override
@Validate
protected void start() {
logger.info("Start Dynamo Fuchsia Importer");
super.start();
}
/**
* Call if an import declaration match with the LDAP filter
*
* @param importDeclaration : the matching import declaration
*/
@Override
protected void useImportDeclaration(ImportDeclaration importDeclaration) {
logger.warn("useImportDeclaration called for : " + importDeclaration.toString());
String fn = (String) importDeclaration.getMetadata().get("bluetooth.device.friendlyname");
Factory factory = bluetoothProxiesFactories.get(fn);
if (factory != null) {
ComponentInstance proxy = createProxy(importDeclaration, factory);
resolvedImportDeclarations.put(importDeclaration, proxy);
} else {
unresolvedImportDeclarations.add(importDeclaration);
}
}
private ComponentInstance createProxy(ImportDeclaration importDeclaration, Factory f) {
logger.warn("CreateProxy called for : " + importDeclaration.toString());
ComponentInstance ci = null;
Dictionary conf = new Hashtable();
conf.put("metadata", importDeclaration.getMetadata());
if (f != null) {
try {
ci = f.createComponentInstance(conf);
} catch (UnacceptableConfiguration unacceptableConfiguration) {
logger.error("Cannot create instance of Factory " + " : ", unacceptableConfiguration);
} catch (MissingHandlerException e) {
logger.error("Cannot create instance of Factory " + " : ", e);
} catch (ConfigurationException e) {
logger.error("Cannot create instance of Factory " + " : ", e);
}
}
return ci;
}
/**
* Call when an import declaration is leaving the OSGi register
*
* @param importDeclaration : the leaving import declaration
*/
@Override
protected void denyImportDeclaration(ImportDeclaration importDeclaration) {
logger.debug("Bluetooth Importer destroy a proxy for " + importDeclaration);
// FIXME : destroy proxy
}
@Bind(aggregate = true, optional = true, filter = "(protocol=bluetooth)")
private void bindBluetoothProxyFactories(Factory f, ServiceReference<Factory> sr) {
logger.warn("Found one factory : " + f.getName());
String friendlyName = (String) sr.getProperty("device_name");
bluetoothProxiesFactories.put(friendlyName, f);
ImportDeclaration iDec = null;
Iterator<ImportDeclaration> iterator = unresolvedImportDeclarations.iterator();
while (iterator.hasNext()) {
iDec = iterator.next();
// FIXME remove magic string
String fn = (String) iDec.getMetadata().get("bluetooth.device.friendlyname");
if (fn.startsWith(friendlyName)) {
ComponentInstance proxy = createProxy(iDec, f);
iterator.remove();
resolvedImportDeclarations.put(iDec, proxy);
}
}
}
@Unbind
private void unbindBluetoothProxyFactories(Factory f, ServiceReference<Factory> sr) {
bluetoothProxiesFactories.remove((String) sr.getProperty("device_name"));
// FIXME destroy proxy
}
public List<String> getConfigPrefix() {
List<String> l = new ArrayList<String>();
l.add("bluetooth,*");
return l;
}
public String getName() {
return name;
}
}
|
package org.caleydo.view.pathway;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import javax.media.opengl.GL2;
import org.caleydo.core.data.collection.dimension.DataRepresentation;
import org.caleydo.core.data.container.Average;
import org.caleydo.core.data.container.ContainerStatistics;
import org.caleydo.core.data.selection.EventBasedSelectionManager;
import org.caleydo.core.data.selection.SelectionManager;
import org.caleydo.core.data.selection.SelectionType;
import org.caleydo.core.data.virtualarray.DimensionVirtualArray;
import org.caleydo.core.data.virtualarray.RecordVirtualArray;
import org.caleydo.core.data.virtualarray.VirtualArray;
import org.caleydo.core.id.IDMappingManager;
import org.caleydo.core.manager.GeneralManager;
import org.caleydo.core.util.base.IUniqueObject;
import org.caleydo.core.util.collection.Pair;
import org.caleydo.core.util.mapping.color.ColorMapper;
import org.caleydo.core.view.opengl.camera.ViewFrustum;
import org.caleydo.core.view.opengl.canvas.EDetailLevel;
import org.caleydo.core.view.opengl.picking.PickingType;
import org.caleydo.datadomain.genetic.GeneticDataDomain;
import org.caleydo.datadomain.pathway.graph.PathwayGraph;
import org.caleydo.datadomain.pathway.graph.item.edge.PathwayRelationEdgeRep;
import org.caleydo.datadomain.pathway.graph.item.vertex.EPathwayVertexShape;
import org.caleydo.datadomain.pathway.graph.item.vertex.EPathwayVertexType;
import org.caleydo.datadomain.pathway.graph.item.vertex.PathwayVertex;
import org.caleydo.datadomain.pathway.graph.item.vertex.PathwayVertexRep;
import org.caleydo.datadomain.pathway.manager.PathwayItemManager;
/**
* OpenGL2 pathway manager.
*
* @author Marc Streit
*/
public class GLPathwayContentCreator {
private GeneralManager generalManager;
private static final float Z_OFFSET = 0.01f;
private GLPathway glPathwayView;
private int enzymeNodeDisplayListId = -1;
private int compoundNodeDisplayListId = -1;
private int framedEnzymeNodeDisplayListId = -1;
private int framedCompoundNodeDisplayListId = -1;
private int upscaledFilledEnzymeNodeDisplayListId = -1;
private int upscaledFramedEnzymeNodeDisplayListID = -1;
private boolean enableEdgeRendering = false;
private boolean enableGeneMapping = true;
private HashMap<PathwayGraph, Integer> hashPathway2VerticesDisplayListId;
private HashMap<PathwayGraph, Integer> hashPathway2EdgesDisplayListId;
private ColorMapper colorMapper;
private SelectionManager internalSelectionManager;
private ArrayList<Integer> selectedEdgeRepId;
private IDMappingManager idMappingManager;
private PathwayItemManager pathwayItemManager;
private GeneticDataDomain geneticDataDomain;
private DataRepresentation dimensionDataRepresentation = DataRepresentation.NORMALIZED;
private VirtualArray<?, ?, ?> selectedSamplesVA;
/**
* Constructor.
*/
public GLPathwayContentCreator(ViewFrustum viewFrustum, GLPathway glPathwayView) {
this.generalManager = GeneralManager.get();
this.glPathwayView = glPathwayView;
idMappingManager = glPathwayView.getPathwayDataDomain().getGeneIDMappingManager();
colorMapper = glPathwayView.getDataDomain().getColorMapper();
hashPathway2VerticesDisplayListId = new HashMap<PathwayGraph, Integer>();
hashPathway2EdgesDisplayListId = new HashMap<PathwayGraph, Integer>();
selectedEdgeRepId = new ArrayList<Integer>();
pathwayItemManager = PathwayItemManager.get();
geneticDataDomain = (GeneticDataDomain) glPathwayView.getDataDomain();
}
public void init(final GL2 gl, SelectionManager geneSelectionManager) {
buildEnzymeNodeDisplayList(gl);
buildCompoundNodeDisplayList(gl);
buildFramedEnzymeNodeDisplayList(gl);
buildFramedCompoundNodeDisplayList(gl);
buildUpscaledEnzymeNodeDisplayList(gl);
buildUpscaledFramedEnzymeNodeDisplayList(gl);
this.internalSelectionManager = geneSelectionManager;
}
public void buildPathwayDisplayList(final GL2 gl, final IUniqueObject containingView,
final PathwayGraph pathway) {
if (pathway == null)
return;
int iVerticesDisplayListId = -1;
int edgesDisplayListId = -1;
if (hashPathway2VerticesDisplayListId.containsKey(pathway)) {
// Replace current display list if a display list exists
iVerticesDisplayListId = hashPathway2VerticesDisplayListId.get(pathway);
}
else {
// Creating vertex display list for pathways
iVerticesDisplayListId = gl.glGenLists(1);
hashPathway2VerticesDisplayListId.put(pathway, iVerticesDisplayListId);
}
createSelectedSamplesVA();
gl.glNewList(iVerticesDisplayListId, GL2.GL_COMPILE);
extractVertices(gl, containingView, pathway);
gl.glEndList();
if (hashPathway2EdgesDisplayListId.containsKey(pathway)) {
// Replace current display list if a display list exists
edgesDisplayListId = hashPathway2EdgesDisplayListId.get(pathway);
}
else {
// Creating edge display list for pathways
edgesDisplayListId = gl.glGenLists(1);
hashPathway2EdgesDisplayListId.put(pathway, edgesDisplayListId);
}
gl.glNewList(edgesDisplayListId, GL2.GL_COMPILE);
extractEdges(gl, pathway);
gl.glEndList();
}
private void createSelectedSamplesVA() {
Set<Integer> selectedSamples = glPathwayView.getSampleSelectionManager().getElements(
SelectionType.SELECTION);
List<Integer> selectedSamplesArray = new ArrayList<Integer>();
// Only add selected samples for single pathway
if (!glPathwayView.isRenderedRemote()) {
selectedSamplesArray.addAll(selectedSamples);
}
else {
// if no sample is currently selected, we add all samples for
// calculating the average
if (selectedSamplesArray.size() == 0) {
if (!geneticDataDomain.isGeneRecord())
selectedSamplesArray.addAll(glPathwayView.getDataContainer()
.getRecordPerspective().getVirtualArray().getIDs());
else
selectedSamplesArray.addAll(glPathwayView.getDataContainer()
.getDimensionPerspective().getVirtualArray().getIDs());
}
}
if (!geneticDataDomain.isGeneRecord())
selectedSamplesVA = new RecordVirtualArray(glPathwayView
.getSampleSelectionManager().getIDType(), selectedSamplesArray);
else
selectedSamplesVA = new DimensionVirtualArray(glPathwayView
.getSampleSelectionManager().getIDType(), selectedSamplesArray);
}
public void performIdenticalNodeHighlighting(SelectionType selectionType) {
if (internalSelectionManager == null)
return;
selectedEdgeRepId.clear();
ArrayList<Integer> iAlTmpSelectedGraphItemIds = new ArrayList<Integer>();
Set<Integer> tmpItemIDs;
tmpItemIDs = internalSelectionManager.getElements(selectionType);
if (tmpItemIDs != null) {
iAlTmpSelectedGraphItemIds.addAll(tmpItemIDs);
}
if (iAlTmpSelectedGraphItemIds.size() == 0)
return;
// Copy selection IDs to array list object
for (Integer graphItemID : iAlTmpSelectedGraphItemIds) {
for (PathwayVertex vertex : pathwayItemManager.getPathwayVertexRep(graphItemID)
.getPathwayVertices()) {
for (PathwayVertexRep vertexRep : vertex.getPathwayVertexReps()) {
if (tmpItemIDs.contains(vertexRep.getID())) {
continue;
}
internalSelectionManager.addToType(selectionType, vertexRep.getID());
}
}
}
}
private void buildEnzymeNodeDisplayList(final GL2 gl) {
enzymeNodeDisplayListId = gl.glGenLists(1);
float nodeWidth = PathwayRenderStyle.ENZYME_NODE_WIDTH;
float nodeHeight = PathwayRenderStyle.ENZYME_NODE_HEIGHT;
gl.glNewList(enzymeNodeDisplayListId, GL2.GL_COMPILE);
fillNodeDisplayList(gl, nodeWidth + 0.002f, nodeHeight);
gl.glEndList();
}
private void buildUpscaledEnzymeNodeDisplayList(final GL2 gl) {
upscaledFilledEnzymeNodeDisplayListId = gl.glGenLists(1);
float nodeWidth = PathwayRenderStyle.ENZYME_NODE_WIDTH;
float nodeHeight = PathwayRenderStyle.ENZYME_NODE_HEIGHT;
float scaleFactor = 3;
nodeWidth *= scaleFactor;
nodeHeight *= scaleFactor;
gl.glNewList(upscaledFilledEnzymeNodeDisplayListId, GL2.GL_COMPILE);
fillNodeDisplayList(gl, nodeWidth, nodeHeight);
gl.glEndList();
}
protected void buildUpscaledFramedEnzymeNodeDisplayList(final GL2 gl) {
upscaledFramedEnzymeNodeDisplayListID = gl.glGenLists(1);
float fNodeWidth = PathwayRenderStyle.ENZYME_NODE_WIDTH;
float fNodeHeight = PathwayRenderStyle.ENZYME_NODE_HEIGHT;
float scaleFactor = 1.4f;
fNodeWidth *= scaleFactor;
fNodeHeight *= scaleFactor;
gl.glNewList(upscaledFramedEnzymeNodeDisplayListID, GL2.GL_COMPILE);
fillNodeDisplayListFrame(gl, fNodeWidth, fNodeHeight);
gl.glEndList();
}
protected void buildFramedEnzymeNodeDisplayList(final GL2 gl) {
framedEnzymeNodeDisplayListId = gl.glGenLists(1);
float fNodeWidth = PathwayRenderStyle.ENZYME_NODE_WIDTH;
float fNodeHeight = PathwayRenderStyle.ENZYME_NODE_HEIGHT;
gl.glNewList(framedEnzymeNodeDisplayListId, GL2.GL_COMPILE);
fillNodeDisplayListFrame(gl, fNodeWidth + 0.02f, fNodeHeight);
gl.glEndList();
}
protected void buildCompoundNodeDisplayList(final GL2 gl) {
// Creating display list for node cube objects
compoundNodeDisplayListId = gl.glGenLists(1);
float nodeWidth = PathwayRenderStyle.COMPOUND_NODE_WIDTH;
float nodeHeight = PathwayRenderStyle.COMPOUND_NODE_HEIGHT;
gl.glNewList(compoundNodeDisplayListId, GL2.GL_COMPILE);
fillNodeDisplayList(gl, nodeWidth, nodeHeight);
gl.glEndList();
}
protected void buildFramedCompoundNodeDisplayList(final GL2 gl) {
// Creating display list for node cube objects
framedCompoundNodeDisplayListId = gl.glGenLists(1);
float nodeWidth = PathwayRenderStyle.COMPOUND_NODE_WIDTH;
float nodeHeight = PathwayRenderStyle.COMPOUND_NODE_HEIGHT;
gl.glNewList(framedCompoundNodeDisplayListId, GL2.GL_COMPILE);
fillNodeDisplayListFrame(gl, nodeWidth, nodeHeight);
gl.glEndList();
}
private void fillNodeDisplayList(final GL2 gl, float nodeWidth, float nodeHeight) {
gl.glBegin(GL2.GL_QUADS);
gl.glNormal3f(0.0f, 0.0f, 1.0f);
gl.glVertex3f(0, 0, Z_OFFSET);
gl.glVertex3f(nodeWidth, 0, Z_OFFSET);
gl.glVertex3f(nodeWidth, -nodeHeight, Z_OFFSET);
gl.glVertex3f(0, -nodeHeight, Z_OFFSET);
gl.glEnd();
}
protected void fillNodeDisplayListFrame(final GL2 gl, float nodeWidth, float nodeHeight) {
gl.glLineWidth(3);
gl.glBegin(GL2.GL_LINE_LOOP);
gl.glVertex3f(0, 0, Z_OFFSET + 0.03f);
gl.glVertex3f(nodeWidth, 0, Z_OFFSET + 0.03f);
gl.glVertex3f(nodeWidth, -nodeHeight, Z_OFFSET + 0.03f);
gl.glVertex3f(0, -nodeHeight, Z_OFFSET + 0.03f);
gl.glEnd();
}
private void extractVertices(final GL2 gl, final IUniqueObject containingView,
PathwayGraph pathwayToExtract) {
for (PathwayVertexRep vertexRep : pathwayToExtract.vertexSet()) {
if (vertexRep == null) {
continue;
}
createVertex(gl, containingView, vertexRep, pathwayToExtract);
}
}
private void extractEdges(final GL2 gl, PathwayGraph pathwayToExtract) {
// while (pathwayToExtract.edgeSet()) {
// edgeRep = edgeIterator.next();
// if (edgeRep != null) {
// if (enableEdgeRendering) {
// createEdge(gl, edgeRep, pathwayToExtract);
// // Render edge if it is contained in the minimum spanning tree
// // of the neighborhoods
// else if (selectedEdgeRepId.contains(edgeRep.getID())) {
// createEdge(gl, edgeRep, pathwayToExtract);
}
private void createVertex(final GL2 gl, final IUniqueObject containingView,
PathwayVertexRep vertexRep, PathwayGraph containingPathway) {
float[] tmpNodeColor = null;
gl.glPushName(generalManager
.getViewManager()
.getPickingManager()
.getPickingID(containingView.getID(),
PickingType.PATHWAY_ELEMENT_SELECTION.name(), vertexRep.getID()));
EPathwayVertexShape shape = vertexRep.getShapeType();
if (shape.equals(EPathwayVertexShape.poly))
renderPolyVertex(gl, vertexRep);
float canvasXPos = vertexRep.getCenterX() * PathwayRenderStyle.SCALING_FACTOR_X;
float canvasYPos = vertexRep.getCenterY() * PathwayRenderStyle.SCALING_FACTOR_Y;
float nodeWidth = vertexRep.getWidth() * PathwayRenderStyle.SCALING_FACTOR_X;
float nodeHeight = vertexRep.getHeight() * PathwayRenderStyle.SCALING_FACTOR_Y;
gl.glTranslatef(canvasXPos, -canvasYPos, 0);
EPathwayVertexType vertexType = vertexRep.getType();
switch (vertexType) {
// Pathway link
case map:
// Ignore KEGG title node
if (vertexRep.getName().contains("TITLE")) {
gl.glTranslatef(-canvasXPos, canvasYPos, 0);
gl.glPopName();
return;
}
tmpNodeColor = new float[] { 0f, 0f, 0f, 0.25f };
gl.glColor4fv(tmpNodeColor, 0);
fillNodeDisplayList(gl, nodeWidth, nodeHeight);
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glColor4fv(tmpNodeColor, 0);
fillNodeDisplayListFrame(gl, nodeWidth, nodeHeight);
}
else if (internalSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glColor4fv(tmpNodeColor, 0);
fillNodeDisplayListFrame(gl, nodeWidth, nodeHeight);
}
break;
case compound:
EventBasedSelectionManager metabolicSelectionManager = glPathwayView
.getMetaboliteSelectionManager();
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())
|| metabolicSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getName().hashCode())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedCompoundNodeDisplayListId);
}
else if (internalSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getID())
|| metabolicSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getName().hashCode())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedCompoundNodeDisplayListId);
}
tmpNodeColor = PathwayRenderStyle.COMPOUND_NODE_COLOR;
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(compoundNodeDisplayListId);
break;
case group:
// gl.glColor4f(1, 1, 0, 1);
// fillNodeDisplayList(gl, nodeWidth, nodeHeight);
break;
case gene:
case enzyme:
// new kegg data assign enzymes without mapping to "undefined"
// which we represent as other
case other:
gl.glLineWidth(1);
if (enableGeneMapping) {
Average average = getExpressionAverage(vertexRep);
if (average != null)
tmpNodeColor = colorMapper.getColor((float) average
.getArithmeticMean());
if (tmpNodeColor != null) {
gl.glColor4f(tmpNodeColor[0], tmpNodeColor[1], tmpNodeColor[2], 0.7f);
if (glPathwayView.getDetailLevel() == EDetailLevel.HIGH) {
// gl.glEnable(GL2.GL_BLEND);
gl.glBlendFunc(GL2.GL_SRC_ALPHA, GL2.GL_ONE_MINUS_SRC_ALPHA);
gl.glCallList(enzymeNodeDisplayListId);
// gl.glEnable(GL2.GL_DEPTH_TEST);
// max std dev is 0.5 -> thus we multiply it with 2
Float stdDev = PathwayRenderStyle.ENZYME_NODE_HEIGHT
* (float) average.getStandardDeviation() * 5.0f;
float x = PathwayRenderStyle.ENZYME_NODE_WIDTH + 0.000f;
float y = -PathwayRenderStyle.ENZYME_NODE_HEIGHT + 0.002f;
if (!stdDev.isNaN()) {
// opaque background
gl.glColor4f(1, 1, 1, 1f);
gl.glBegin(GL2.GL_QUADS);
gl.glVertex3f(x, y - .001f, Z_OFFSET);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH,
y - .001f, Z_OFFSET);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH,
0 + .001f, Z_OFFSET);
gl.glVertex3f(x, 0 + 0.001f, Z_OFFSET);
gl.glEnd();
gl.glColor4fv(PathwayRenderStyle.STD_DEV_COLOR, 0);
gl.glBegin(GL2.GL_QUADS);
gl.glVertex3f(x, y, Z_OFFSET + 0.01f);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH, y,
Z_OFFSET + 0.01f);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH, y
+ stdDev, Z_OFFSET + 0.01f);
gl.glVertex3f(x, y + stdDev, Z_OFFSET + 0.01f);
gl.glEnd();
// frame
gl.glColor4f(0, 0, 0, 1f);
gl.glBegin(GL2.GL_LINE_LOOP);
gl.glVertex3f(x, y - .001f, Z_OFFSET + 0.02f);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH,
y - .001f, Z_OFFSET + 0.02f);
gl.glVertex3f(x + PathwayRenderStyle.STD_DEV_BAR_WIDTH,
0 + .001f, Z_OFFSET + 0.02f);
gl.glVertex3f(x, 0 + 0.001f, Z_OFFSET + 0.02f);
gl.glEnd();
}
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedEnzymeNodeDisplayListId);
}
else if (internalSelectionManager.checkStatus(
SelectionType.MOUSE_OVER, vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedEnzymeNodeDisplayListId);
}
}
else {
// Upscaled version of pathway node needed for e.g.
// StratomeX
gl.glCallList(upscaledFilledEnzymeNodeDisplayListId);
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(upscaledFilledEnzymeNodeDisplayListId);
}
else if (internalSelectionManager.checkStatus(
SelectionType.MOUSE_OVER, vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(upscaledFilledEnzymeNodeDisplayListId);
}
}
}
else {
// render a black glyph in the corder of the
// rectangle in order to indicate that we either do
// not have mapping or data
// transparent node for picking
gl.glColor4f(0, 0, 0, 0);
gl.glCallList(enzymeNodeDisplayListId);
tmpNodeColor = PathwayRenderStyle.ENZYME_NODE_COLOR;
gl.glColor4f(tmpNodeColor[0], tmpNodeColor[1], tmpNodeColor[2], 0.7f);
gl.glCallList(compoundNodeDisplayListId);
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedEnzymeNodeDisplayListId);
}
else if (internalSelectionManager.checkStatus(
SelectionType.MOUSE_OVER, vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedEnzymeNodeDisplayListId);
}
}
}
else {
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
}
else if (internalSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
}
else if (internalSelectionManager.checkStatus(SelectionType.NORMAL,
vertexRep.getID())) {
tmpNodeColor = PathwayRenderStyle.ENZYME_NODE_COLOR;
}
else {
tmpNodeColor = new float[] { 0, 0, 0, 0 };
}
gl.glColor4fv(tmpNodeColor, 0);
gl.glCallList(framedEnzymeNodeDisplayListId);
if (!internalSelectionManager.checkStatus(SelectionType.DESELECTED,
vertexRep.getID())) {
// Transparent node for picking
gl.glColor4f(0, 0, 0, 0);
gl.glCallList(enzymeNodeDisplayListId);
}
}
break;
}
gl.glTranslatef(-canvasXPos, canvasYPos, 0);
gl.glPopName();
}
private void renderPolyVertex(GL2 gl, PathwayVertexRep vertexRep) {
float[] tmpNodeColor = null;
ArrayList<Pair<Short, Short>> coords = vertexRep.getCoords();
gl.glLineWidth(3);
if (enableGeneMapping) {
Average average = getExpressionAverage(vertexRep);
tmpNodeColor = colorMapper.getColor((float) average.getArithmeticMean());
gl.glLineWidth(4);
if (tmpNodeColor != null) {
gl.glColor3fv(tmpNodeColor, 0);
if (glPathwayView.getDetailLevel() == EDetailLevel.HIGH) {
gl.glBegin(GL2.GL_LINE_STRIP);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X, -coords.get(pointIndex)
.getSecond() * PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
// Transparent node for picking
gl.glColor4f(0, 0, 0, 0);
gl.glBegin(GL2.GL_POLYGON);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X, -coords.get(pointIndex)
.getSecond() * PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
}
else {
gl.glBegin(GL2.GL_POLYGON);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X, -coords.get(pointIndex)
.getSecond() * PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
gl.glLineWidth(3);
gl.glColor4fv(tmpNodeColor, 0);
gl.glBegin(GL2.GL_LINE_STRIP);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X,
-coords.get(pointIndex).getSecond()
* PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
}
else if (internalSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
gl.glLineWidth(3);
gl.glColor4fv(tmpNodeColor, 0);
gl.glBegin(GL2.GL_LINE_STRIP);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X,
-coords.get(pointIndex).getSecond()
* PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
}
}
}
}
else {
// Handle selection highlighting of element
if (internalSelectionManager.checkStatus(SelectionType.SELECTION,
vertexRep.getID())) {
tmpNodeColor = SelectionType.SELECTION.getColor();
}
else if (internalSelectionManager.checkStatus(SelectionType.MOUSE_OVER,
vertexRep.getID())) {
tmpNodeColor = SelectionType.MOUSE_OVER.getColor();
}
// else if (internalSelectionManager.checkStatus(
// SelectionType.NORMAL, vertexRep.getID())) {
// tmpNodeColor = PathwayRenderStyle.ENZYME_NODE_COLOR;
else {
tmpNodeColor = PathwayRenderStyle.ENZYME_NODE_COLOR;
// tmpNodeColor = new float[] { 0, 0, 0, 0 };
}
gl.glColor4fv(tmpNodeColor, 0);
gl.glLineWidth(3);
gl.glBegin(GL2.GL_LINE_STRIP);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X, -coords.get(pointIndex)
.getSecond() * PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
if (!internalSelectionManager.checkStatus(SelectionType.DESELECTED,
vertexRep.getID())) {
// Transparent node for picking
gl.glColor4f(0, 0, 0, 0);
gl.glBegin(GL2.GL_POLYGON);
for (int pointIndex = 0; pointIndex < coords.size(); pointIndex++) {
gl.glVertex3f(coords.get(pointIndex).getFirst()
* PathwayRenderStyle.SCALING_FACTOR_X, -coords.get(pointIndex)
.getSecond() * PathwayRenderStyle.SCALING_FACTOR_Y, Z_OFFSET);
}
gl.glEnd();
}
}
}
private void createEdge(final GL2 gl, PathwayRelationEdgeRep edgeRep,
PathwayGraph containingPathway) {
// List<IGraphItem> listGraphItemsIn = edgeRep
// .getAllItemsByProp(EGraphItemProperty.INCOMING);
// List<IGraphItem> listGraphItemsOut = edgeRep
// .getAllItemsByProp(EGraphItemProperty.OUTGOING);
// if (listGraphItemsIn.isEmpty() || listGraphItemsOut.isEmpty())
// return;
// float[] tmpColor;
// float fReactionLineOffset = 0;
// // Check if edge is a reaction
// if (edgeRep instanceof PathwayReactionEdgeGraphItemRep) {
// tmpColor = PathwayRenderStyle.REACTION_EDGE_COLOR;
// fReactionLineOffset = 0.01f;
// // Check if edge is a relation
// else if (edgeRep instanceof PathwayRelationEdgeGraphItemRep) {
// tmpColor = PathwayRenderStyle.RELATION_EDGE_COLOR;
// } else {
// tmpColor = new float[] { 0, 0, 0, 0 };
// gl.glLineWidth(4);
// gl.glColor4fv(tmpColor, 0);
// gl.glBegin(GL2.GL_LINES);
// Iterator<IGraphItem> iterSourceGraphItem =
// listGraphItemsIn.iterator();
// Iterator<IGraphItem> iterTargetGraphItem =
// listGraphItemsOut.iterator();
// PathwayVertexGraphItemRep tmpSourceGraphItem;
// PathwayVertexGraphItemRep tmpTargetGraphItem;
// while (iterSourceGraphItem.hasNext()) {
// tmpSourceGraphItem = (PathwayVertexGraphItemRep)
// iterSourceGraphItem.next();
// while (iterTargetGraphItem.hasNext()) {
// tmpTargetGraphItem = (PathwayVertexGraphItemRep) iterTargetGraphItem
// .next();
// gl.glVertex3f(tmpSourceGraphItem.getXOrigin()
// * PathwayRenderStyle.SCALING_FACTOR_X + fReactionLineOffset,
// -tmpSourceGraphItem.getYOrigin()
// * PathwayRenderStyle.SCALING_FACTOR_Y
// + fReactionLineOffset, 0.02f);
// gl.glVertex3f(tmpTargetGraphItem.getXOrigin()
// * PathwayRenderStyle.SCALING_FACTOR_X + fReactionLineOffset,
// -tmpTargetGraphItem.getYOrigin()
// * PathwayRenderStyle.SCALING_FACTOR_Y
// + fReactionLineOffset, 0.02f);
// gl.glEnd();
}
public void renderPathway(final GL2 gl, final PathwayGraph pathway, boolean bRenderLabels) {
if (enableEdgeRendering || !selectedEdgeRepId.isEmpty()) {
int tmpEdgesDisplayListID = hashPathway2EdgesDisplayListId.get(pathway);
gl.glCallList(tmpEdgesDisplayListID);
}
Integer tmpVerticesDisplayListID = hashPathway2VerticesDisplayListId.get(pathway);
if (tmpVerticesDisplayListID != null) {
gl.glCallList(tmpVerticesDisplayListID);
// if (bRenderLabels && bEnableAnnotation)
// renderLabels(gl, iPathwayID);
}
}
private Average getExpressionAverage(PathwayVertexRep vertexRep) {
if (selectedSamplesVA == null)
return null;
int davidID = pathwayItemManager.getDavidIdByPathwayVertex((PathwayVertex) vertexRep
.getPathwayVertices().get(0));
if (davidID == -1 || davidID == 0)
return null;
else {
Set<Integer> expressionIndices = idMappingManager.<Integer, Integer> getIDAsSet(
glPathwayView.getPathwayDataDomain().getDavidIDType(), glPathwayView
.getGeneSelectionManager().getIDType(), davidID);
if (expressionIndices == null)
return null;
// FIXME multi mappings not properly handled - only the first is
// taken
for (Integer expressionIndex : expressionIndices) {
Average average = ContainerStatistics.calculateAverage(selectedSamplesVA,
geneticDataDomain.getTable(), expressionIndex);
return average;
}
}
return null;
}
public void enableEdgeRendering(final boolean bEnableEdgeRendering) {
this.enableEdgeRendering = bEnableEdgeRendering;
}
public void enableGeneMapping(final boolean bEnableGeneMappging) {
this.enableGeneMapping = bEnableGeneMappging;
}
public void enableNeighborhood(final boolean bEnableNeighborhood) {
}
public void switchDataRepresentation() {
if (dimensionDataRepresentation.equals(DataRepresentation.NORMALIZED)) {
if (!geneticDataDomain.getTable().containsFoldChangeRepresentation())
geneticDataDomain.getTable().createFoldChangeRepresentation();
dimensionDataRepresentation = DataRepresentation.FOLD_CHANGE_NORMALIZED;
}
else
dimensionDataRepresentation = DataRepresentation.NORMALIZED;
}
}
|
package org.eclipse.jetty.spdy.client;
import java.io.IOException;
import java.nio.channels.SocketChannel;
import java.util.List;
import java.util.concurrent.Executor;
import javax.net.ssl.SSLEngine;
import org.eclipse.jetty.io.AbstractConnection;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.io.RuntimeIOException;
import org.eclipse.jetty.io.ssl.SslConnection.DecryptedEndPoint;
import org.eclipse.jetty.npn.NextProtoNego;
import org.eclipse.jetty.util.BufferUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
public class NextProtoNegoClientConnection extends AbstractConnection implements NextProtoNego.ClientProvider
{
private final Logger LOG = Log.getLogger(getClass());
private final SocketChannel channel;
private final Object attachment;
private final SPDYClient client;
private final SSLEngine engine;
private volatile boolean completed;
public NextProtoNegoClientConnection(SocketChannel channel, DecryptedEndPoint endPoint, Object attachment, Executor executor, SPDYClient client)
{
super(endPoint, executor);
this.channel = channel;
this.attachment = attachment;
this.client = client;
this.engine = endPoint.getSslConnection().getSSLEngine();
NextProtoNego.put(engine, this);
}
@Override
public void onOpen()
{
super.onOpen();
try
{
getEndPoint().flush(BufferUtil.EMPTY_BUFFER);
if (completed)
replaceConnection();
else
fillInterested();
}
catch(IOException e)
{
throw new RuntimeIOException(e);
}
}
@Override
public void onFillable()
{
while (true)
{
int filled = fill();
if (filled == 0 && !completed)
fillInterested();
if (filled <= 0 || completed)
break;
}
if (completed)
replaceConnection();
}
private int fill()
{
try
{
return getEndPoint().fill(BufferUtil.EMPTY_BUFFER);
}
catch (IOException x)
{
LOG.debug(x);
getEndPoint().close();
return -1;
}
}
@Override
public boolean supports()
{
return true;
}
@Override
public void unsupported()
{
NextProtoNego.remove(engine);
completed = true;
}
@Override
public String selectProtocol(List<String> protocols)
{
NextProtoNego.remove(engine);
completed = true;
String protocol = client.selectProtocol(protocols);
return protocol == null ? null : protocol;
}
private void replaceConnection()
{
EndPoint endPoint = getEndPoint();
Connection connection = client.getConnectionFactory().newConnection(channel, endPoint, attachment);
endPoint.getConnection().onClose();
endPoint.setConnection(connection);
connection.onOpen();
}
}
|
package org.jlib.container.sequence.index;
import org.jlib.container.sequence.ReplaceSequenceIteratorState;
import org.jlib.container.sequence.Sequence;
/**
* {@link IndexSequenceIteratorState} of a {@link ReplaceIndexSequence}.
*
* @param <Element>
* type of elements held in the {@link Sequence}
*
* @author Igor Akkerman
*/
public interface ReplaceIndexSequenceIteratorState<Element>
extends IndexSequenceIteratorState<Element>, ReplaceSequenceIteratorState<Element> {
@Override
public ReplaceIndexSequenceIteratorState<Element> getNextState();
@Override
public ReplaceIndexSequenceIteratorState<Element> getPreviousState();
@Override
public ReplaceIndexSequenceIteratorState<Element> getReplacedState();
}
|
import java.lang.reflect.*;
import java.io.*;
import java.net.*;
import java.util.*;
import org.xbill.DNS.*;
import org.xbill.DNS.utils.*;
/** @author Brian Wellington <bwelling@xbill.org> */
public class jnamed {
Hashtable caches;
Hashtable znames;
Hashtable TSIGs;
public
jnamed(String conffile) throws IOException {
FileInputStream fs;
boolean started = false;
try {
fs = new FileInputStream(conffile);
}
catch (Exception e) {
System.out.println("Cannot open " + conffile);
return;
}
caches = new Hashtable();
znames = new Hashtable();
TSIGs = new Hashtable();
BufferedReader br = new BufferedReader(new InputStreamReader(fs));
String line = null;
while ((line = br.readLine()) != null) {
StringTokenizer st = new StringTokenizer(line);
if (!st.hasMoreTokens())
continue;
String keyword = st.nextToken();
if (!st.hasMoreTokens()) {
System.out.println("Invalid line: " + line);
continue;
}
if (keyword.charAt(0) == '
continue;
if (keyword.equals("primary"))
addPrimaryZone(st.nextToken());
if (keyword.equals("secondary"))
addSecondaryZone(st.nextToken(), st.nextToken());
else if (keyword.equals("cache")) {
Cache cache = new Cache(st.nextToken());
caches.put(new Short(DClass.IN), cache);
}
else if (keyword.equals("key"))
addTSIG(st.nextToken(), st.nextToken());
else if (keyword.equals("port")) {
short port = Short.parseShort(st.nextToken());
addUDP(port);
addTCP(port);
started = true;
}
}
if (!started) {
addUDP((short) 53);
addTCP((short) 53);
}
System.out.println("running");
}
public void
addPrimaryZone(String zonefile) throws IOException {
Cache cache = getCache(DClass.IN);
Zone newzone = new Zone(zonefile, cache);
znames.put(newzone.getOrigin(), newzone);
/*System.out.println("Adding zone named <" + newzone.getOrigin() + ">");*/
}
public void
addSecondaryZone(String zone, String remote) throws IOException {
Cache cache = getCache(DClass.IN);
Name zname = new Name(zone);
Zone newzone = new Zone(zname, DClass.IN, remote, cache);
znames.put(zname, newzone);
/*System.out.println("Adding zone named <" + zname + ">");*/
}
public void
addTSIG(String name, String key) {
TSIGs.put(new Name(name), base64.fromString(key));
}
public Cache
getCache(short dclass) {
Cache c = (Cache) caches.get(new Short(dclass));
if (c == null) {
c = new Cache(dclass);
caches.put(new Short(dclass), c);
}
return c;
}
public Zone
findBestZone(Name name) {
Zone foundzone = null;
foundzone = (Zone) znames.get(name);
if (foundzone != null)
return foundzone;
int labels = name.labels();
for (int i = 1; i < labels; i++) {
Name tname = new Name(name, i);
foundzone = (Zone) znames.get(tname);
if (foundzone != null)
return foundzone;
}
return null;
}
public RRset
findExactMatch(Name name, short type, short dclass, boolean glue) {
Zone zone = findBestZone(name);
if (zone != null)
return zone.findExactMatch(name, type);
else {
RRset [] rrsets;
Cache cache = getCache(dclass);
if (glue)
rrsets = cache.findAnyRecords(name, type);
else
rrsets = cache.findRecords(name, type);
if (rrsets == null)
return null;
else
return rrsets[0]; /* not quite right */
}
}
void
addRRset(Name name, Message response, RRset rrset, byte section,
boolean sigonly)
{
Enumeration e;
for (byte s = 1; s < section; s++)
if (response.findRRset(name, rrset.getType(), s))
return;
if (!sigonly) {
e = rrset.rrs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
if (!name.isWild() && r.getName().isWild())
r = r.withName(name);
response.addRecord(r, section);
}
}
e = rrset.sigs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
if (!name.isWild() && r.getName().isWild())
r = r.withName(name);
response.addRecord(r, section);
}
}
private void
addSOA(Message response, Zone zone) {
response.addRecord(zone.getSOA(), Section.AUTHORITY);
}
private void
addNS(Message response, Zone zone) {
RRset nsRecords = zone.getNS();
Enumeration e = nsRecords.rrs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
response.addRecord(r, Section.AUTHORITY);
}
}
private void
addCacheNS(Message response, Cache cache, Name name) {
SetResponse sr = cache.lookupRecords(name, Type.NS, Credibility.HINT);
if (!sr.isDelegation())
return;
RRset nsRecords = sr.getNS();
Enumeration e = nsRecords.rrs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
response.addRecord(r, Section.AUTHORITY);
}
}
private void
addGlue(Message response, Name name) {
RRset a = findExactMatch(name, Type.A, DClass.IN, true);
if (a == null)
return;
if (response.findRRset(name, Type.A))
return;
Enumeration e = a.rrs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
response.addRecord(r, Section.ADDITIONAL);
}
e = a.sigs();
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
response.addRecord(r, Section.ADDITIONAL);
}
}
private void
addAdditional2(Message response, int section) {
Enumeration e = response.getSection(section);
while (e.hasMoreElements()) {
Record r = (Record) e.nextElement();
Name glueName = null;
switch (r.getType()) {
case Type.MX:
glueName = ((MXRecord)r).getTarget();
break;
case Type.NS:
glueName = ((NSRecord)r).getTarget();
break;
case Type.KX:
glueName = ((KXRecord)r).getTarget();
break;
case Type.NAPTR:
glueName = ((NAPTRRecord)r).getReplacement();
break;
case Type.SRV:
glueName = ((SRVRecord)r).getTarget();
break;
default:
break;
}
if (glueName != null)
addGlue(response, glueName);
}
}
void
addAdditional(Message response) {
addAdditional2(response, Section.ANSWER);
addAdditional2(response, Section.AUTHORITY);
}
byte
addAnswer(Message response, Name name, short type, short dclass, int iterations)
{
SetResponse sr;
boolean sigonly;
byte rcode = Rcode.NOERROR;
if (iterations > 6)
return Rcode.SERVFAIL;
if (type == Type.SIG) {
type = Type.ANY;
sigonly = true;
}
else
sigonly = false;
Zone zone = findBestZone(name);
if (zone != null)
sr = zone.findRecords(name, type);
else {
Cache cache = getCache(dclass);
sr = cache.lookupRecords(name, type,
Credibility.NONAUTH_ANSWER);
}
if (sr.isUnknown()) {
addCacheNS(response, getCache(dclass), name);
}
if (sr.isNXDOMAIN()) {
response.getHeader().setRcode(Rcode.NXDOMAIN);
if (zone != null) {
addSOA(response, zone);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
rcode = Rcode.NXDOMAIN;
}
else if (sr.isNXRRSET()) {
if (zone != null) {
addSOA(response, zone);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
}
else if (sr.isDelegation()) {
RRset nsRecords = sr.getNS();
addRRset(nsRecords.getName(), response, nsRecords,
Section.AUTHORITY, false);
}
else if (sr.isCNAME()) {
RRset rrset = new RRset();
CNAMERecord cname = sr.getCNAME();
rrset.addRR(cname);
addRRset(name, response, rrset, Section.ANSWER, false);
if (zone != null && iterations == 0)
response.getHeader().setFlag(Flags.AA);
if (name.equals(cname.getTarget()))
return Rcode.NOERROR;
rcode = addAnswer(response, cname.getTarget(),
type, dclass, iterations + 1);
}
else if (sr.isDNAME()) {
RRset rrset = new RRset();
DNAMERecord dname = sr.getDNAME();
rrset.addRR(dname);
addRRset(name, response, rrset, Section.ANSWER, false);
Name newname = name.fromDNAME(dname);
if (newname == null)
return Rcode.SERVFAIL;
try {
rrset = new RRset();
rrset.addRR(new CNAMERecord(name, dclass, 0, newname));
addRRset(name, response, rrset, Section.ANSWER, false);
}
catch (IOException e) {}
if (zone != null && iterations == 0)
response.getHeader().setFlag(Flags.AA);
if (dname.getName().equals(dname.getTarget()))
return Rcode.NOERROR;
rcode = addAnswer(response, newname, type, dclass,
iterations + 1);
}
else if (sr.isSuccessful()) {
RRset [] rrsets = sr.answers();
for (int i = 0; i < rrsets.length; i++)
addRRset(name, response, rrsets[i],
Section.ANSWER, sigonly);
if (zone != null) {
addNS(response, zone);
if (iterations == 0)
response.getHeader().setFlag(Flags.AA);
}
else
addCacheNS(response, getCache(dclass), name);
}
return rcode;
}
TSIG
findTSIG(Name name) {
byte [] key = (byte []) TSIGs.get(name);
if (key != null)
return new TSIG(name, key);
else
return null;
}
Message
doAXFR(Name name, Message query, Socket s) {
Zone zone = (Zone) znames.get(name);
if (zone == null) {
/* System.out.println("no zone " + name + " to AXFR");*/
return errorMessage(query, Rcode.REFUSED);
}
Enumeration e = zone.AXFR();
try {
DataOutputStream dataOut;
dataOut = new DataOutputStream(s.getOutputStream());
while (e.hasMoreElements()) {
RRset rrset = (RRset) e.nextElement();
Message response = new Message();
addRRset(rrset.getName(), response, rrset,
Section.ANSWER, false);
byte [] out = response.toWire();
dataOut.writeShort(out.length);
dataOut.write(out);
}
}
catch (IOException ex) {
System.out.println("AXFR failed");
}
try {
s.close();
}
catch (IOException ex) {
}
return null;
}
/*
* Note: a null return value means that the caller doesn't need to do
* anything. Currently this only happens if this is an AXFR request over
* TCP.
*/
Message
generateReply(Message query, byte [] in, Socket s) {
boolean badversion;
int maxLength;
boolean sigonly;
SetResponse sr;
if (query.getHeader().getOpcode() != Opcode.QUERY)
return errorMessage(query, Rcode.NOTIMPL);
Record queryRecord = query.getQuestion();
TSIGRecord queryTSIG = query.getTSIG();
TSIG tsig = null;
if (queryTSIG != null) {
tsig = findTSIG(queryTSIG.getName());
if (!tsig.verify(query, in, null))
return formerrMessage(in);
}
OPTRecord queryOPT = query.getOPT();
if (queryOPT != null && queryOPT.getVersion() > 0)
badversion = true;
if (s != null)
maxLength = 65535;
else if (queryOPT != null)
maxLength = queryOPT.getPayloadSize();
else
maxLength = 512;
Message response = new Message();
response.getHeader().setID(query.getHeader().getID());
response.getHeader().setFlag(Flags.QR);
if (query.getHeader().getFlag(Flags.RD));
response.getHeader().setFlag(Flags.RD);
response.addRecord(queryRecord, Section.QUESTION);
Name name = queryRecord.getName();
short type = queryRecord.getType();
short dclass = queryRecord.getDClass();
if (type == Type.AXFR && s != null)
return doAXFR(name, query, s);
if (!Type.isRR(type) && type != Type.ANY)
return errorMessage(query, Rcode.NOTIMPL);
byte rcode = addAnswer(response, name, type, dclass, 0);
if (rcode != Rcode.NOERROR && rcode != Rcode.NXDOMAIN)
return errorMessage(query, rcode);
addAdditional(response);
if (queryTSIG != null) {
try {
if (tsig != null)
tsig.apply(response, queryTSIG);
}
catch (IOException e) {
}
}
try {
response.freeze();
byte [] out = response.toWire();
if (out.length > maxLength) {
response.thaw();
truncate(response, out.length, maxLength);
if (tsig != null)
tsig.apply(response, queryTSIG);
}
}
catch (IOException e) {
}
return response;
}
public int
truncateSection(Message in, int maxLength, int length, int section) {
int removed = 0;
Record [] records = in.getSectionArray(section);
for (int i = records.length - 1; i >= 0; i
Record r = records[i];
removed += r.getWireLength();
length -= r.getWireLength();
in.removeRecord(r, section);
if (length > maxLength)
continue;
else {
for (int j = i - 1; j >= 0; j
Record r2 = records[j];
if (!r.getName().equals(r2.getName()) ||
r.getType() != r2.getType() ||
r.getDClass() != r2.getDClass())
break;
removed += r2.getWireLength();
length -= r2.getWireLength();
in.removeRecord(r2, section);
}
return removed;
}
}
return removed;
}
public void
truncate(Message in, int length, int maxLength) {
TSIGRecord tsig = in.getTSIG();
if (tsig != null)
maxLength -= tsig.getWireLength();
length -= truncateSection(in, maxLength, length, Section.ADDITIONAL);
if (length < maxLength)
return;
in.getHeader().setFlag(Flags.TC);
if (tsig != null) {
in.removeAllRecords(Section.ANSWER);
in.removeAllRecords(Section.AUTHORITY);
return;
}
length -= truncateSection(in, maxLength, length, Section.AUTHORITY);
if (length < maxLength)
return;
length -= truncateSection(in, maxLength, length, Section.ANSWER);
}
public Message
formerrMessage(byte [] in) {
Header header;
try {
header = new Header(new DataByteInputStream(in));
}
catch (IOException e) {
header = new Header(0);
}
Message response = new Message();
response.setHeader(header);
for (int i = 0; i < 4; i++)
response.removeAllRecords(i);
header.setRcode(Rcode.FORMERR);
return response;
}
public Message
errorMessage(Message query, short rcode) {
Header header = query.getHeader();
Message response = new Message();
response.setHeader(header);
for (int i = 0; i < 4; i++)
response.removeAllRecords(i);
if (rcode == Rcode.SERVFAIL)
response.addRecord(query.getQuestion(), Section.QUESTION);
header.setRcode(rcode);
return response;
}
public void
serveTCP(short port) {
try {
ServerSocket sock = new ServerSocket(port);
while (true) {
Socket s = sock.accept();
int inLength;
DataInputStream dataIn;
DataOutputStream dataOut;
byte [] in;
try {
InputStream is = s.getInputStream();
dataIn = new DataInputStream(is);
inLength = dataIn.readUnsignedShort();
in = new byte[inLength];
dataIn.readFully(in);
}
catch (InterruptedIOException e) {
s.close();
continue;
}
Message query, response;
try {
query = new Message(in);
response = generateReply(query, in, s);
if (response == null)
continue;
}
catch (IOException e) {
response = formerrMessage(in);
}
byte [] out = response.toWire();
dataOut = new DataOutputStream(s.getOutputStream());
dataOut.writeShort(out.length);
dataOut.write(out);
s.close();
}
}
catch (IOException e) {
System.out.println("serveTCP: " + e);
}
}
public void
serveUDP(short port) {
try {
DatagramSocket sock = new DatagramSocket(port);
while (true) {
short udpLength = 512;
byte [] in = new byte[udpLength];
DatagramPacket dp = new DatagramPacket(in, in.length);
try {
sock.receive(dp);
}
catch (InterruptedIOException e) {
continue;
}
Message query, response;
try {
query = new Message(in);
response = generateReply(query, in, null);
if (response == null)
continue;
}
catch (IOException e) {
response = formerrMessage(in);
}
byte [] out = response.toWire();
dp = new DatagramPacket(out, out.length,
dp.getAddress(), dp.getPort());
sock.send(dp);
}
}
catch (IOException e) {
System.out.println("serveUDP: " + e);
}
}
public void
addTCP(final short port) {
Thread t;
t = new Thread(new Runnable() {public void run() {serveTCP(port);}});
t.start();
}
public void
addUDP(final short port) {
Thread t;
t = new Thread(new Runnable() {public void run() {serveUDP(port);}});
t.start();
}
public static void main(String [] args) {
if (args.length > 1) {
System.out.println("usage: jnamed [conf]");
System.exit(0);
}
jnamed s;
try {
String conf;
if (args.length == 1)
conf = args[0];
else
conf = "jnamed.conf";
s = new jnamed(conf);
}
catch (IOException e) {
System.out.println(e);
}
}
}
|
package org.webdsl.tools;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.hibernate.dialect.Dialect;
import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.webdsl.WebDSLEntity;
public final class Utils {
public static Object[] concatArrays(Object[] ar1, Object[] ar2) {
List<Object> thelist = new ArrayList<Object>();
for(Object o : ar1)
thelist.add(o);
for(Object o : ar2)
thelist.add(o);
return thelist.toArray();
}
/*public static boolean equal(int a, int b) {
return a == b;
}
public static boolean equal(double a, double b) {
return a == b;
}
public static boolean equal(int a, Integer b) {
return b.equals(a);
}
public static boolean equal(double a, Double b) {
return b.equals(a);
}*/
public static boolean equal(Object a, Object b) {
if(a == null && b == null) {
return true;
} else if( (a == null && b != null) || (a != null && b == null) ) {
return false;
}
if(a instanceof Long && b instanceof Integer) {
return ((Long) a).longValue() == ((Integer) b).intValue();
}
if(a instanceof Integer && b instanceof Long) {
return ((Long) b).longValue() == ((Integer) a).intValue();
}
return a.equals(b);
}
public static Object cast(Object e2 , Class<?> t) {
return (e2 instanceof org.hibernate.proxy.HibernateProxy)?
t.cast( ((org.hibernate.proxy.HibernateProxy) e2).getHibernateLazyInitializer().getImplementation())
: t.cast(e2);
}
public static boolean isInstance(Object o, Class<?> c) {
//org.hibernate.Hibernate.initialize(o);
if(o instanceof WebDSLEntity) {
return ((WebDSLEntity)o).isInstance(c);
} else {
return c.isInstance(o);
}
}
public static String encodeIdList(Collection<?> c){
String res = "";
for(Object obj: c) {
WebDSLEntity e = (WebDSLEntity) obj;
res+=e.getId()+",";
}
return res.substring(0, Math.max(0,res.length()-1));
}
/*public <T> T deproxy(Object maybeProxy, Class<T> baseClass) throws ClassCastException {
if (maybeProxy instanceof org.hibernate.proxy.HibernateProxy) {
return baseClass.cast(((org.hibernate.proxy.HibernateProxy) maybeProxy).getHibernateLazyInitializer().getImplementation());
}
return baseClass.cast(maybeProxy);
} */
public static boolean containsDigit(String s){
for(char c : s.toCharArray()){
if(Character.isDigit(c)){
return true;
}
}
return false;
}
public static boolean containsLowerCase(String s){
for(char c : s.toCharArray()){
if(Character.isLowerCase(c)){
return true;
}
}
return false;
}
public static boolean containsUpperCase(String s){
for(char c : s.toCharArray()){
if(Character.isUpperCase(c)){
return true;
}
}
return false;
}
private static java.util.regex.Pattern cleanUrlPattern = java.util.regex.Pattern.compile("[a-zA-Z0-9-]*");
public static boolean isCleanUrl(String s){
return cleanUrlPattern.matcher(s).matches();
}
public static String secretDigest(String s){
org.jasypt.util.password.StrongPasswordEncryptor temp = new org.jasypt.util.password.StrongPasswordEncryptor();
return temp.encryptPassword(s);
}
public static boolean secretCheck(String s1,String s2){
org.jasypt.util.password.StrongPasswordEncryptor temp = new org.jasypt.util.password.StrongPasswordEncryptor();
return temp.checkPassword(s2,s1);
}
//@TODO: there are several issues with primitive types in the generated code, it would be better if they are always boxed
public static boolean isNullAutoBox(Object o){ return o == null; }
public static String escapeHtml(String s){
return org.apache.commons.lang3.StringEscapeUtils.escapeHtml4(s);
}
public static String escapeHtml(Object o){ // covers primitive types due to autoboxing
return o == null ? "" : org.apache.commons.lang3.StringEscapeUtils.escapeHtml4(o.toString());
}
public static String showAttributeEscapeHtml(String s1, Object s2){
return " " + escapeHtml(s1) + "=\"" + ("onkeyup".equals(s1)?"onkeyupdelay(function(){" + escapeHtml(s2) + "});":escapeHtml(s2)) + "\"";
}
// An alternative implementation of FieldInterceptorImpl.readObject / AbstractFieldInterceptor.intercept that supports initializing a single lazy property
public static Object readLazyProperty(org.webdsl.WebDSLEntity entity, org.hibernate.bytecode.javassist.FieldHandler fieldHandler, String fieldName, Object value) {
if(fieldHandler == null) return value;
org.hibernate.intercept.javassist.FieldInterceptorImpl fieldInterceptor = (org.hibernate.intercept.javassist.FieldInterceptorImpl)fieldHandler;
org.hibernate.engine.SessionImplementor session = fieldInterceptor.getSession();
if ( session == null ) {
throw new org.hibernate.LazyInitializationException( "entity with lazy properties is not associated with a session" );
}
else if ( !session.isOpen() || !session.isConnected() ) {
throw new org.hibernate.LazyInitializationException( "session is not connected" );
}
final org.hibernate.engine.EntityEntry entry = session.getPersistenceContext().getEntry( entity );
if ( entry == null ) {
throw new org.hibernate.HibernateException( "entity is not associated with the session: " + entity.getId() );
}
final Object[] snapshot = entry.getLoadedState();
org.hibernate.engine.SessionFactoryImplementor factory = session.getFactory();
org.hibernate.persister.entity.EntityPersister persister = factory.getEntityPersister(fieldInterceptor.getEntityName());
org.hibernate.type.Type type = persister.getPropertyType(fieldName);
int propertyIndex = persister.getEntityMetamodel().getPropertyIndex(fieldName);
// Here we initialize the value from the persistence context or from the database
Object propValue = null;
try{
propValue = type.nullSafeGet(null, (String)null, session, entity);
}
catch(java.sql.SQLException sqle) {
throw org.hibernate.exception.JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not initialize lazy property: " +
org.hibernate.pretty.MessageHelper.infoString( persister, entity.getId(), factory ),
null
);
}
// Here we do the same as AbstractEntityPersister.initializeLazyProperty(String,Object,SessionImplementor,Object[],int,Object)
persister.setPropertyValue(entity, propertyIndex, propValue, session.getEntityMode());
if(snapshot != null) {
snapshot[ propertyIndex ] = type.deepCopy( propValue, session.getEntityMode(), factory );
}
return propValue;
// An earlier implementation
/*org.hibernate.type.EntityType type = (org.hibernate.type.EntityType)persister.getPropertyType(fieldName);
org.hibernate.engine.EntityUniqueKey euk = new org.hibernate.engine.EntityUniqueKey(
type.getAssociatedEntityName(),
type.getRHSUniqueKeyPropertyName(),
entity.getId(),
persister.getIdentifierType(),
session.getEntityMode(),
factory);
Object result = session.getPersistenceContext().getEntity(euk);
if(result == null) {
persister.getPropertyType(fieldName).nullSafeGet(null, null, session, entity);
}
else {
// here we do the same as AbstractEntityPersister.initializeLazyProperty(String,Object,SessionImplementor,Object[],int,Object)
persister.setPropertyValue(entity, ((org.hibernate.persister.entity.AbstractEntityPersister)persister).getPropertyIndex(fieldName), result, session.getEntityMode());
fieldInterceptor.getUninitializedFields().remove(fieldName);
return result;
}*/
//return fieldHandler.readObject(entity, fieldName, value);
}
public static void handleSchemaCreateUpdate(SessionFactory sessionFactory, Configuration annotationConfiguration) throws java.sql.SQLException {
//database schema create/update
String dbmode = utils.BuildProperties.getDbMode();
if("update".equals(dbmode) || "create-drop".equals(dbmode)){
Dialect dialect = Dialect.getDialect(annotationConfiguration.getProperties());
Session session = sessionFactory.openSession();
DatabaseMetadata meta = new DatabaseMetadata(session.connection(), dialect);
StringBuffer sb;
if("create-drop".equals(dbmode)){
String[] dropscript = annotationConfiguration.generateDropSchemaScript(dialect);
if(dropscript.length>0){ org.webdsl.logging.Logger.info("=== dbmode=create-drop - Logging drop table SQL statements ==="); }
else{ org.webdsl.logging.Logger.info("=== dbmode=create-drop - No drop table SQL statements were generated. ==="); }
sb = new StringBuffer("\n");
for(String s : Arrays.asList(dropscript)){
sb.append(s);
sb.append("\n");
}
org.webdsl.logging.Logger.info(sb);
String[] createscript = annotationConfiguration.generateSchemaCreationScript(dialect);
if(createscript.length>0){ org.webdsl.logging.Logger.info("=== dbmode=create-drop - Logging create table SQL statements ==="); }
else{ org.webdsl.logging.Logger.info("=== dbmode=create-drop - No create table SQL statements were generated. ==="); }
sb = new StringBuffer("\n");
for(String s : Arrays.asList(createscript)){
sb.append(s);
sb.append("\n");
}
org.webdsl.logging.Logger.info(sb);
org.webdsl.logging.Logger.info("=== dbmode=create-drop - Running database schema drop and create ===");
boolean script = true;
boolean doUpdate = true;
new SchemaExport( annotationConfiguration ).create( script, doUpdate );
org.webdsl.logging.Logger.info("=== dbmode=create-drop - Finished database schema drop and create ===");
}
else if("update".equals(dbmode)){
String[] updatescript = annotationConfiguration.generateSchemaUpdateScript(dialect, meta);
if(updatescript.length>0){
sb = new StringBuffer("=== dbmode=update - Logging update table SQL statements ===\n\n");
for(String s : Arrays.asList(updatescript)){
sb.append(s);
sb.append("\n");
}
org.webdsl.logging.Logger.info(sb);
org.webdsl.logging.Logger.info("=== dbmode=update - Running database schema update ===");
boolean script = true;
boolean doUpdate = true;
new SchemaUpdate( annotationConfiguration ).execute( script, doUpdate );
org.webdsl.logging.Logger.info("=== dbmode=update - Finished database schema update ===");
}
else{ org.webdsl.logging.Logger.info("=== dbmode=update - No update table SQL statements were generated. Schema update will be skipped. ==="); }
}
session.close();
}
else{
org.webdsl.logging.Logger.info("=== application.ini contains setting 'dbmode="+dbmode+"', only 'update' or 'create-drop' will trigger database schema updates ===");
}
}
}
|
package org.languagetool.rules.spelling.morfologik;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jetbrains.annotations.Nullable;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.JLanguageTool;
import org.languagetool.Language;
import org.languagetool.UserConfig;
import org.languagetool.languagemodel.LanguageModel;
import org.languagetool.rules.Categories;
import org.languagetool.rules.ITSIssueType;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.spelling.SpellingCheckRule;
import org.languagetool.rules.spelling.suggestions.SuggestionsChanges;
import org.languagetool.rules.spelling.suggestions.SuggestionsOrderer;
import org.languagetool.rules.spelling.suggestions.SuggestionsOrdererFeatureExtractor;
import org.languagetool.rules.spelling.suggestions.XGBoostSuggestionsOrderer;
import org.languagetool.tools.Tools;
public abstract class MorfologikSpellerRule extends SpellingCheckRule {
protected MorfologikMultiSpeller speller1;
protected MorfologikMultiSpeller speller2;
protected MorfologikMultiSpeller speller3;
protected Locale conversionLocale;
private final SuggestionsOrderer suggestionsOrderer;
private final boolean runningExperiment;
private boolean ignoreTaggedWords = false;
private boolean checkCompound = false;
private Pattern compoundRegex = Pattern.compile("-");
private final UserConfig userConfig;
/**
* Get the filename, e.g., <tt>/resource/pl/spelling.dict</tt>.
*/
public abstract String getFileName();
@Override
public abstract String getId();
public MorfologikSpellerRule(ResourceBundle messages, Language language) throws IOException {
this(messages, language, null);
}
public MorfologikSpellerRule(ResourceBundle messages, Language language, UserConfig userConfig) throws IOException {
this(messages, language, userConfig, Collections.emptyList());
}
public MorfologikSpellerRule(ResourceBundle messages, Language language, UserConfig userConfig, List<Language> altLanguages) throws IOException {
this(messages, language, userConfig, altLanguages, null);
}
public MorfologikSpellerRule(ResourceBundle messages, Language language, UserConfig userConfig,
List<Language> altLanguages, LanguageModel languageModel) throws IOException {
super(messages, language, userConfig, altLanguages, languageModel);
this.userConfig = userConfig;
super.setCategory(Categories.TYPOS.getCategory(messages));
this.conversionLocale = conversionLocale != null ? conversionLocale : Locale.getDefault();
init();
setLocQualityIssueType(ITSIssueType.Misspelling);
if (SuggestionsChanges.isRunningExperiment("NewSuggestionsOrderer")) {
suggestionsOrderer = new SuggestionsOrdererFeatureExtractor(language, this.languageModel);
runningExperiment = true;
} else {
runningExperiment = false;
suggestionsOrderer = new XGBoostSuggestionsOrderer(language, languageModel);
}
}
@Override
public String getDescription() {
return messages.getString("desc_spelling");
}
public void setLocale(Locale locale) {
conversionLocale = locale;
}
/**
* Skip words that are known in the POS tagging dictionary, assuming they
* cannot be incorrect.
*/
public void setIgnoreTaggedWords() {
ignoreTaggedWords = true;
}
@Override
public RuleMatch[] match(AnalyzedSentence sentence) throws IOException {
List<RuleMatch> ruleMatches = new ArrayList<>();
AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace();
//lazy init
if (speller1 == null) {
String binaryDict = null;
if (JLanguageTool.getDataBroker().resourceExists(getFileName()) || Paths.get(getFileName()).toFile().exists()) {
binaryDict = getFileName();
}
if (binaryDict != null) {
initSpeller(binaryDict);
} else {
// should not happen, as we only configure this rule (or rather its subclasses)
// when we have the resources:
return toRuleMatchArray(ruleMatches);
}
}
int idx = -1;
for (AnalyzedTokenReadings token : tokens) {
idx++;
if (canBeIgnored(tokens, idx, token)) {
continue;
}
int startPos = token.getStartPos();
// if we use token.getToken() we'll get ignored characters inside and speller will choke
String word = token.getAnalyzedToken(0).getToken();
int newRuleIdx = ruleMatches.size();
if (tokenizingPattern() == null) {
ruleMatches.addAll(getRuleMatches(word, startPos, sentence, ruleMatches, idx, tokens));
} else {
int index = 0;
Matcher m = tokenizingPattern().matcher(word);
while (m.find()) {
String match = word.subSequence(index, m.start()).toString();
ruleMatches.addAll(getRuleMatches(match, startPos + index, sentence, ruleMatches, idx, tokens));
index = m.end();
}
if (index == 0) { // tokenizing char not found
ruleMatches.addAll(getRuleMatches(word, startPos, sentence, ruleMatches, idx, tokens));
} else {
ruleMatches.addAll(getRuleMatches(word.subSequence(index, word.length()).toString(), startPos + index, sentence, ruleMatches, idx, tokens));
}
}
if (ruleMatches.size() > newRuleIdx) {
// matches added for current token - need to adjust for hidden characters
int hiddenCharOffset = token.getToken().length() - word.length();
if (hiddenCharOffset > 0) {
for (int i = newRuleIdx; i < ruleMatches.size(); i++) {
RuleMatch ruleMatch = ruleMatches.get(i);
if( token.getEndPos() < ruleMatch.getToPos() ) // done by multi-token speller, no need to adjust
continue;
ruleMatch.setOffsetPosition(ruleMatch.getFromPos(), ruleMatch.getToPos()+hiddenCharOffset);
}
}
}
}
return toRuleMatchArray(ruleMatches);
}
private void initSpeller(String binaryDict) throws IOException {
String plainTextDict = null;
String languageVariantPlainTextDict = null;
if (getSpellingFileName() != null && JLanguageTool.getDataBroker().resourceExists(getSpellingFileName())) {
plainTextDict = getSpellingFileName();
}
if (getLanguageVariantSpellingFileName() != null && JLanguageTool.getDataBroker().resourceExists(getLanguageVariantSpellingFileName())) {
languageVariantPlainTextDict = getLanguageVariantSpellingFileName();
}
speller1 = new MorfologikMultiSpeller(binaryDict, plainTextDict, languageVariantPlainTextDict, userConfig, 1);
speller2 = new MorfologikMultiSpeller(binaryDict, plainTextDict, languageVariantPlainTextDict, userConfig, 2);
speller3 = new MorfologikMultiSpeller(binaryDict, plainTextDict, languageVariantPlainTextDict, userConfig, 3);
setConvertsCase(speller1.convertsCase());
}
private boolean canBeIgnored(AnalyzedTokenReadings[] tokens, int idx, AnalyzedTokenReadings token) throws IOException {
return token.isSentenceStart() ||
token.isImmunized() ||
token.isIgnoredBySpeller() ||
isUrl(token.getToken()) ||
isEMail(token.getToken()) ||
(ignoreTaggedWords && token.isTagged() && !isProhibited(token.getToken())) ||
ignoreToken(tokens, idx);
}
/**
* @return true if the word is misspelled
* @since 2.4
*/
protected boolean isMisspelled(MorfologikMultiSpeller speller, String word) {
if (!speller.isMisspelled(word)) {
return false;
}
if (checkCompound && compoundRegex.matcher(word).find()) {
String[] words = compoundRegex.split(word);
for (String singleWord: words) {
if (speller.isMisspelled(singleWord)) {
return true;
}
}
return false;
}
return true;
}
protected int getFrequency(MorfologikMultiSpeller speller, String word) {
return speller.getFrequency(word);
}
protected List<RuleMatch> getRuleMatches(String word, int startPos, AnalyzedSentence sentence, List<RuleMatch> ruleMatchesSoFar, int idx, AnalyzedTokenReadings[] tokens) throws IOException {
List<RuleMatch> ruleMatches = new ArrayList<>();
RuleMatch ruleMatch = null;
if (isMisspelled(speller1, word) || isProhibited(word)) {
if (ruleMatchesSoFar.size() > 0 &&ruleMatchesSoFar.get(ruleMatchesSoFar.size() - 1).getToPos() > startPos) {
return ruleMatches; // the current word is already dealt with in the previous match, so do nothing
}
if (idx > 0) {
String prevWord = tokens[idx-1].getToken();
if (prevWord.length() > 0 && !prevWord.matches(".*\\d.*")) {
int prevStartPos = tokens[idx - 1].getStartPos();
// "thanky ou" -> "thank you"
String sugg1a = prevWord.substring(0, prevWord.length() - 1);
String sugg1b = prevWord.substring(prevWord.length() - 1) + word;
if (sugg1a.length() > 1 && sugg1b.length() > 2 && !isMisspelled(speller1, sugg1a) && !isMisspelled(speller1, sugg1b) &&
getFrequency(speller1, sugg1a) + getFrequency(speller1, sugg1b) > getFrequency(speller1, prevWord)) {
ruleMatch = createWrongSplitMatch(sentence, ruleMatchesSoFar, startPos, word, sugg1a, sugg1b, prevStartPos);
}
// "than kyou" -> "thank you" ; but not "She awaked" -> "Shea waked"
String sugg2a = prevWord + word.substring(0, 1);
String sugg2b = word.substring(1);
if (sugg2a.length() > 1 && sugg2b.length() > 2 && !isMisspelled(speller1, sugg2a) && !isMisspelled(speller1, sugg2b)) {
if (ruleMatch == null) {
if (getFrequency(speller1, sugg2a) + getFrequency(speller1, sugg2b) > getFrequency(speller1, prevWord)) {
ruleMatch = createWrongSplitMatch(sentence, ruleMatchesSoFar, startPos, word, sugg2a, sugg2b, prevStartPos);
}
} else {
ruleMatch.addSuggestedReplacement((sugg2a + " " + sugg2b).trim());
}
}
// "g oing-> "going"
String sugg = prevWord + word;
if (word.equals(word.toLowerCase()) && !isMisspelled(speller1, sugg)) {
if (ruleMatch == null) {
if (getFrequency(speller1, sugg) >= getFrequency(speller1, prevWord)) {
ruleMatch = new RuleMatch(this, sentence, prevStartPos, startPos + word.length(),
messages.getString("spelling"), messages.getString("desc_spelling_short"));
ruleMatch.setSuggestedReplacement(sugg);
}
} else {
ruleMatch.addSuggestedReplacement(sugg);
}
}
}
}
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
return ruleMatches;
}
// the same with the next word
if (idx < tokens.length - 1) {
String nextWord = tokens[idx + 1].getToken();
if (nextWord.length() > 0 && !nextWord.matches(".*\\d.*")) {
int nextStartPos = tokens[idx + 1].getStartPos();
String sugg1a = word.substring(0, word.length() - 1);
String sugg1b = word.substring(word.length() - 1) + nextWord;
if (sugg1a.length() > 1 && sugg1b.length() > 2 && !isMisspelled(speller1, sugg1a) && !isMisspelled(speller1, sugg1b)) {
if (getFrequency(speller1, sugg1a) + getFrequency(speller1, sugg1b) > getFrequency(speller1, nextWord)) {
ruleMatch = createWrongSplitMatch(sentence, ruleMatchesSoFar, nextStartPos, nextWord, sugg1a, sugg1b, startPos);
}
}
String sugg2a = word + nextWord.substring(0, 1);
String sugg2b = nextWord.substring(1);
if (sugg2a.length() > 1 && sugg2b.length() > 2 && !isMisspelled(speller1, sugg2a) && !isMisspelled(speller1, sugg2b)) {
if (ruleMatch == null) {
if (getFrequency(speller1, sugg2a) + getFrequency(speller1, sugg2b) > getFrequency(speller1, nextWord)) {
ruleMatch = createWrongSplitMatch(sentence, ruleMatchesSoFar, nextStartPos, nextWord, sugg2a, sugg2b, startPos);
}
} else {
ruleMatch.addSuggestedReplacement((sugg2a + " " + sugg2b).trim());
}
}
String sugg = word + nextWord;
if (nextWord.equals(nextWord.toLowerCase()) && !isMisspelled(speller1, sugg)) {
if (ruleMatch == null) {
if (getFrequency(speller1, sugg) >= getFrequency(speller1, nextWord)) {
ruleMatch = new RuleMatch(this, sentence, startPos, nextStartPos + nextWord.length(),
messages.getString("spelling"), messages.getString("desc_spelling_short"));
ruleMatch.setSuggestedReplacement(sugg);
}
} else {
ruleMatch.addSuggestedReplacement(sugg);
}
}
}
}
if (ruleMatch != null) {
ruleMatches.add(ruleMatch);
return ruleMatches;
}
Language acceptingLanguage = acceptedInAlternativeLanguage(word);
if (acceptingLanguage != null) {
// e.g. "Der Typ ist in UK echt famous" -> could be German 'famos'
ruleMatch = new RuleMatch(this, sentence, startPos, startPos + word.length(),
Tools.i18n(messages, "accepted_in_alt_language", word, messages.getString(acceptingLanguage.getShortCode())));
ruleMatch.setType(RuleMatch.Type.Hint);
} else {
ruleMatch = new RuleMatch(this, sentence, startPos, startPos + word.length(), messages.getString("spelling"),
messages.getString("desc_spelling_short"));
}
boolean fullResults = SuggestionsChanges.getInstance() != null &&
SuggestionsChanges.getInstance().getCurrentExperiment() != null &&
(boolean) SuggestionsChanges.getInstance().getCurrentExperiment()
.parameters.getOrDefault("fullSuggestionCandidates", Boolean.FALSE);
if (userConfig == null || userConfig.getMaxSpellingSuggestions() == 0 || ruleMatchesSoFar.size() <= userConfig.getMaxSpellingSuggestions()) {
List<String> defaultSuggestions = speller1.getSuggestionsFromDefaultDicts(word);
List<String> userSuggestions = speller1.getSuggestionsFromUserDicts(word);
//System.out.println("speller1: " + suggestions);
if (word.length() >= 3 && (fullResults || defaultSuggestions.isEmpty())) {
// speller1 uses a maximum edit distance of 1, it won't find suggestion for "garentee", "greatful" etc.
//System.out.println("speller2: " + speller2.getSuggestions(word));
defaultSuggestions.addAll(speller2.getSuggestionsFromDefaultDicts(word));
userSuggestions.addAll(speller2.getSuggestionsFromUserDicts(word));
if (word.length() >= 5 && (fullResults || defaultSuggestions.isEmpty())) {
//System.out.println("speller3: " + speller3.getSuggestions(word));
defaultSuggestions.addAll(speller3.getSuggestionsFromDefaultDicts(word));
userSuggestions.addAll(speller3.getSuggestionsFromUserDicts(word));
}
}
//System.out.println("getAdditionalTopSuggestions(suggestions, word): " + getAdditionalTopSuggestions(suggestions, word));
defaultSuggestions.addAll(0, getAdditionalTopSuggestions(defaultSuggestions, word));
//System.out.println("getAdditionalSuggestions(suggestions, word): " + getAdditionalSuggestions(suggestions, word));
defaultSuggestions.addAll(getAdditionalSuggestions(defaultSuggestions, word));
if (!(defaultSuggestions.isEmpty() && userSuggestions.isEmpty())) {
filterSuggestions(defaultSuggestions);
filterDupes(userSuggestions);
defaultSuggestions = orderSuggestions(defaultSuggestions, word);
// use suggestionsOrderer only w/ A/B - Testing or manually enabled experiments
if (runningExperiment) {
addSuggestionsToRuleMatch(word,
userSuggestions, defaultSuggestions, suggestionsOrderer, ruleMatch);
} else if (userConfig != null && userConfig.getAbTest() != null &&
userConfig.getAbTest().equals("SuggestionsRanker") &&
suggestionsOrderer.isMlAvailable() && userConfig.getTextSessionId() != null) {
boolean testingA = userConfig.getTextSessionId() % 2 == 0;
if (testingA) {
addSuggestionsToRuleMatch(word, userSuggestions, defaultSuggestions, null, ruleMatch);
} else {
addSuggestionsToRuleMatch(word, userSuggestions, defaultSuggestions, suggestionsOrderer, ruleMatch);
}
} else {
addSuggestionsToRuleMatch(word, userSuggestions, defaultSuggestions, null, ruleMatch);
}
}
} else {
// limited to save CPU
ruleMatch.setSuggestedReplacement(messages.getString("too_many_errors"));
}
ruleMatches.add(ruleMatch);
}
return ruleMatches;
}
/**
* Get the regular expression pattern used to tokenize
* the words as in the source dictionary. For example,
* it may contain a hyphen, if the words with hyphens are
* not included in the dictionary
* @return A compiled {@link Pattern} that is used to tokenize words or {@code null}.
*/
@Nullable
public Pattern tokenizingPattern() {
return null;
}
protected List<String> orderSuggestions(List<String> suggestions, String word) {
return suggestions;
}
private List<String> orderSuggestions(List<String> suggestions, String word, AnalyzedSentence sentence, int startPos) {
List<String> orderedSuggestions;
if (userConfig != null && userConfig.getAbTest() != null && userConfig.getAbTest().equals("SuggestionsOrderer") &&
suggestionsOrderer.isMlAvailable() && userConfig.getTextSessionId() != null) {
boolean logGroup = Math.random() < 0.01;
if (logGroup) {
System.out.print("Running A/B-Test for SuggestionsOrderer ->");
}
if (userConfig.getTextSessionId() % 2 == 0) {
if (logGroup) {
System.out.println("in group A (using new ordering)");
}
orderedSuggestions = suggestionsOrderer.orderSuggestionsUsingModel(suggestions, word, sentence, startPos);
} else {
if (logGroup) {
System.out.println("in group B (using old ordering)");
}
orderedSuggestions = orderSuggestions(suggestions, word);
}
} else {
if (suggestionsOrderer.isMlAvailable()) {
orderedSuggestions = suggestionsOrderer.orderSuggestionsUsingModel(suggestions, word, sentence, startPos);
} else {
orderedSuggestions = orderSuggestions(suggestions, word);
}
}
return orderedSuggestions;
}
/**
* @param checkCompound If true and the word is not in the dictionary
* it will be split (see {@link #setCompoundRegex(String)})
* and each component will be checked separately
* @since 2.4
*/
protected void setCheckCompound(boolean checkCompound) {
this.checkCompound = checkCompound;
}
/**
* @param compoundRegex see {@link #setCheckCompound(boolean)}
* @since 2.4
*/
protected void setCompoundRegex(String compoundRegex) {
this.compoundRegex = Pattern.compile(compoundRegex);
}
/**
* Checks whether a given String consists only of surrogate pairs.
* @param word to be checked
* @since 4.2
*/
protected boolean isSurrogatePairCombination (String word) {
if (word.length() > 1 && word.length() % 2 == 0 && word.codePointCount(0, word.length()) != word.length()) {
// some symbols such as emojis () have a string length that equals 2
boolean isSurrogatePairCombination = true;
for (int i = 0; i < word.length() && isSurrogatePairCombination; i += 2) {
isSurrogatePairCombination &= Character.isSurrogatePair(word.charAt(i), word.charAt(i + 1));
}
return isSurrogatePairCombination;
}
return false;
}
/**
* Ignore surrogate pairs (emojis)
* @since 4.3
* @see org.languagetool.rules.spelling.SpellingCheckRule#ignoreWord(java.lang.String)
*/
@Override
protected boolean ignoreWord(String word) throws IOException {
return super.ignoreWord(word) || isSurrogatePairCombination(word);
}
}
|
package com.wirelust.personalapi.api.helpers;
import java.util.Date;
import com.wirelust.personalapi.api.v1.representations.AccountType;
import com.wirelust.personalapi.data.model.Account;
public class AccountHelper {
private AccountHelper() {
// class is static only
}
public static AccountType toRepresentation(Account account) {
return toRepresentation(account, false);
}
public static AccountType toExtendedRepresentation(Account account) {
return toRepresentation(account, true);
}
private static AccountType toRepresentation(Account account, boolean withExtended) {
if (account == null) {
return null;
}
AccountType at = new AccountType();
at.setId(account.getId());
at.setUsername(account.getUsername());
at.setRealName(account.getFullName());
at.setAvatar(account.getAvatar());
if (withExtended) {
at.setBackground(account.getBackground());
at.setBio(account.getBio());
at.setDateCreated(account.getDateCreated() == null ? null : new Date(account.getDateCreated().getTime()));
at.setDateModified(account.getDateModified() == null ? null
: new Date(account.getDateModified().getTime()));
at.setDateLogin(account.getDateLogin() == null ? null : new Date(account.getDateLogin().getTime()));
at.setLocation(account.getLocation());
at.setTimezone(account.getTimezone());
at.setWebsite(account.getWebsite());
at.setFollowersCount(account.getFollowersCount());
at.setFollowingCount(account.getFollowingCount());
at.setPublicVideoCount(account.getPublicVideoCount());
at.setTotalVideoCount(account.getTotalVideoCount());
}
return at;
}
}
|
package jkind.util;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.MathContext;
import jkind.JKindException;
/**
* An arbitrary sized fractional value
*
* Stored as <code>numerator</code> / <code>denominator</code> where the
* fraction is in reduced form and <code>denominator</code> is always positive
*/
public class BigFraction implements Comparable<BigFraction> {
public static final BigFraction ZERO = new BigFraction(BigInteger.ZERO);
public static final BigFraction ONE = new BigFraction(BigInteger.ONE);
// The numerator and denominator are always stored in reduced form with the
// denominator always positive
final private BigInteger num;
final private BigInteger denom;
public BigFraction(BigInteger num, BigInteger denom) {
if (num == null || denom == null) {
throw new NullPointerException();
}
if (denom.equals(BigInteger.ZERO)) {
throw new ArithmeticException("Divide by zero");
}
BigInteger gcd = num.gcd(denom);
if (denom.compareTo(BigInteger.ZERO) > 0) {
this.num = num.divide(gcd);
this.denom = denom.divide(gcd);
} else {
this.num = num.negate().divide(gcd);
this.denom = denom.negate().divide(gcd);
}
}
public BigFraction(BigInteger num) {
this(num, BigInteger.ONE);
}
public static BigFraction valueOf(BigDecimal value) {
if (value.scale() >= 0) {
return new BigFraction(value.unscaledValue(), BigInteger.valueOf(10).pow(value.scale()));
} else {
return new BigFraction(value.unscaledValue().multiply(
BigInteger.valueOf(10).pow(-value.scale())));
}
}
public BigInteger getNumerator() {
return num;
}
public BigInteger getDenominator() {
return denom;
}
public BigFraction add(BigFraction val) {
return new BigFraction(num.multiply(val.denom).add(val.num.multiply(denom)),
denom.multiply(val.denom));
}
public BigFraction add(BigInteger val) {
return add(new BigFraction(val));
}
public BigFraction subtract(BigFraction val) {
return new BigFraction(num.multiply(val.denom).subtract(val.num.multiply(denom)),
denom.multiply(val.denom));
}
public BigFraction subtract(BigInteger val) {
return subtract(new BigFraction(val));
}
public BigFraction multiply(BigFraction val) {
return new BigFraction(num.multiply(val.num), denom.multiply(val.denom));
}
public BigFraction multiply(BigInteger val) {
return multiply(new BigFraction(val));
}
public BigFraction divide(BigFraction val) {
return new BigFraction(num.multiply(val.denom), denom.multiply(val.num));
}
public BigFraction divide(BigInteger val) {
return divide(new BigFraction(val));
}
public BigFraction negate() {
return new BigFraction(num.negate(), denom);
}
public int signum() {
return num.signum();
}
public double doubleValue() {
double result = num.doubleValue() / denom.doubleValue();
if (Double.isFinite(result)) {
return result;
} else {
BigDecimal numDec = new BigDecimal(num);
BigDecimal denomDec = new BigDecimal(denom);
return numDec.divide(denomDec, MathContext.DECIMAL64).doubleValue();
}
}
public BigInteger floor() {
BigInteger divAndRem[] = num.divideAndRemainder(denom);
if (num.signum() >= 0 || divAndRem[1].equals(BigInteger.ZERO)) {
return divAndRem[0];
} else {
return divAndRem[0].subtract(BigInteger.ONE);
}
}
public BigDecimal toBigDecimal(int scale) {
BigDecimal decNum = new BigDecimal(num).setScale(scale);
BigDecimal decDenom = new BigDecimal(denom);
return decNum.divide(decDenom, BigDecimal.ROUND_DOWN);
}
public String toTruncatedDecimal(int scale, String suffix) {
if (scale <= 0) {
throw new JKindException("Scale must be positive");
}
BigDecimal dec = toBigDecimal(scale);
if (this.equals(BigFraction.valueOf(dec))) {
return Util.removeTrailingZeros(dec.toPlainString());
} else {
return dec.toPlainString() + suffix;
}
}
@Override
public int compareTo(BigFraction other) {
return num.multiply(other.denom).compareTo(other.num.multiply(denom));
}
@Override
public String toString() {
if (denom.equals(BigInteger.ONE)) {
return num.toString();
} else {
return num + "/" + denom;
}
}
@Override
public int hashCode() {
return num.hashCode() + denom.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof BigFraction)) {
return false;
}
BigFraction other = (BigFraction) obj;
return num.equals(other.num) && denom.equals(other.denom);
}
}
|
package org.languagetool.rules.spelling.morfologik;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.JLanguageTool;
import org.languagetool.Language;
import org.languagetool.rules.Category;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.spelling.SpellingCheckRule;
import org.languagetool.AnalyzedToken;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.text.Normalizer;
import java.text.Normalizer.Form;
public abstract class MorfologikSpellerRule extends SpellingCheckRule {
protected MorfologikSpeller speller;
protected Locale conversionLocale;
private boolean ignoreTaggedWords = false;
/**
* Get the filename, e.g., <tt>/resource/pl/spelling.dict</tt>.
*/
public abstract String getFileName();
public MorfologikSpellerRule(ResourceBundle messages, Language language) throws IOException {
super(messages, language);
super.setCategory(new Category(messages.getString("category_typo")));
this.conversionLocale = conversionLocale != null ? conversionLocale : Locale.getDefault();
init();
}
@Override
public abstract String getId();
@Override
public String getDescription() {
return messages.getString("desc_spelling");
}
public void setLocale(Locale locale) {
conversionLocale = locale;
}
/**
* Skip words that are known in the POS tagging dictionary, assuming they
* cannot be incorrect.
*/
public void setIgnoreTaggedWords() {
ignoreTaggedWords = true;
}
@Override
public RuleMatch[] match(AnalyzedSentence text) throws IOException {
final List<RuleMatch> ruleMatches = new ArrayList<RuleMatch>();
final AnalyzedTokenReadings[] tokens = text.getTokensWithoutWhitespace();
//lazy init
if (speller == null) {
if (JLanguageTool.getDataBroker().resourceExists(getFileName())) {
speller = new MorfologikSpeller(getFileName(), conversionLocale);
} else {
// should not happen, as we only configure this rule (or rather its subclasses)
// when we have the resources:
return toRuleMatchArray(ruleMatches);
}
}
skip:
for (AnalyzedTokenReadings token : tokens) {
if (isUrl(token.getToken())) {
continue;
}
final String word = token.getToken();
if (ignoreWord(word) || token.isImmunized()) {
continue;
}
if (ignoreTaggedWords) {
for (AnalyzedToken at : token.getReadings()) {
if (!at.hasNoTag()) {
continue skip; // if it HAS a POS tag then it is a known word.
}
}
}
if (tokenizingPattern() == null) {
ruleMatches.addAll(getRuleMatch(word, token.getStartPos()));
} else {
int index = 0;
final Matcher m = tokenizingPattern().matcher(word);
while (m.find()) {
final String match = word.subSequence(index, m.start()).toString();
ruleMatches.addAll(getRuleMatch(match, token.getStartPos() + index));
index = m.end();
}
if (index == 0) { // tokenizing char not found
ruleMatches.addAll(getRuleMatch(word, token.getStartPos()));
} else {
ruleMatches.addAll(getRuleMatch(word.subSequence(
index, word.length()).toString(), token.getStartPos() + index));
}
}
}
return toRuleMatchArray(ruleMatches);
}
protected boolean isMisspelled(MorfologikSpeller speller, String word) {
return speller.isMisspelled(word);
}
private List<RuleMatch> getRuleMatch(final String word, final int startPos) {
final List<RuleMatch> ruleMatches = new ArrayList<RuleMatch>();
if (isMisspelled(speller, word)) {
final RuleMatch ruleMatch = new RuleMatch(this, startPos, startPos
+ word.length(), messages.getString("spelling"),
messages.getString("desc_spelling_short"));
//If lower case word is not a misspelled word, return it as the only suggestion
if (!isMisspelled(speller, word.toLowerCase(conversionLocale))) {
List<String> suggestion = Arrays.asList(word.toLowerCase(conversionLocale));
ruleMatch.setSuggestedReplacements(suggestion);
ruleMatches.add(ruleMatch);
return ruleMatches;
}
List<String> suggestions = speller.getSuggestions(word);
suggestions = getAdditionalSuggestions(suggestions, word);
if (!suggestions.isEmpty()) {
ruleMatch.setSuggestedReplacements(orderSuggestions(suggestions,word));
}
ruleMatches.add(ruleMatch);
}
return ruleMatches;
}
/**
* Get the regular expression pattern used to tokenize
* the words as in the source dictionary. For example,
* it may contain a hyphen, if the words with hyphens are
* not included in the dictionary
* @return A compiled {@link Pattern} that is used to tokenize words or null.
*/
public Pattern tokenizingPattern() {
return null;
}
/**
* Remove all diacritical marks from a String
*/
protected static String removeAccents(String text) {
return text == null ? null
: Normalizer.normalize(text, Form.NFD)
.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
}
protected List<String> getAdditionalSuggestions(List<String> suggestions, String word) {
return suggestions;
}
protected List<String> orderSuggestions(List<String> suggestions, String word) {
return suggestions;
}
}
|
package jkind.analysis;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jkind.ExitCodes;
import jkind.Output;
import jkind.SolverOption;
import jkind.analysis.evaluation.DivisionChecker;
import jkind.lustre.Constant;
import jkind.lustre.EnumType;
import jkind.lustre.Equation;
import jkind.lustre.Expr;
import jkind.lustre.IdExpr;
import jkind.lustre.NamedType;
import jkind.lustre.Node;
import jkind.lustre.Program;
import jkind.lustre.TypeDef;
import jkind.lustre.VarDecl;
import jkind.util.Util;
public class StaticAnalyzer {
public static void check(Program program, SolverOption solver) {
checkErrors(program, solver);
checkSolverLimitations(program, solver);
checkWarnings(program, solver);
}
private static void checkErrors(Program program, SolverOption solver) {
boolean valid = true;
valid = valid && hasMainNode(program);
valid = valid && typesUnique(program);
valid = valid && TypesDefined.check(program);
valid = valid && TypeDependencyChecker.check(program);
valid = valid && enumsAndConstantsUnique(program);
valid = valid && ConstantDependencyChecker.check(program);
valid = valid && nodesUnique(program);
valid = valid && variablesUnique(program);
valid = valid && TypeChecker.check(program);
valid = valid && SubrangesNonempty.check(program);
valid = valid && ArraysNonempty.check(program);
valid = valid && constantsConstant(program);
valid = valid && DivisionChecker.check(program);
valid = valid && NodeDependencyChecker.check(program);
valid = valid && assignmentsSound(program);
valid = valid && ConstantArrayAccessBounded.check(program);
valid = valid && propertiesUnique(program);
valid = valid && propertiesExist(program);
valid = valid && propertiesBoolean(program);
if (solver != SolverOption.Z3) {
valid = valid && LinearChecker.check(program, Level.ERROR);
}
if (!valid) {
System.exit(ExitCodes.STATIC_ANALYSIS_ERROR);
}
}
private static void checkSolverLimitations(Program program, SolverOption solver) {
if (solver == SolverOption.YICES2) {
if (!Yices2FeatureChecker.check(program)) {
System.exit(ExitCodes.UNSUPPORTED_FEATURE);
}
} else if (solver == SolverOption.MATHSAT) {
if (!MathSatFeatureChecker.check(program)) {
System.exit(ExitCodes.UNSUPPORTED_FEATURE);
}
}
}
private static void checkWarnings(Program program, SolverOption solver) {
warnUnusedAsserts(program);
warnAlgebraicLoops(program);
WarnUnguardedPreVisitor.check(program);
if (solver == SolverOption.Z3) {
LinearChecker.check(program, Level.WARNING);
}
}
private static boolean hasMainNode(Program program) {
if (program.getMainNode() == null) {
Output.error("no main node");
return false;
}
return true;
}
private static boolean typesUnique(Program program) {
boolean unique = true;
Set<String> seen = new HashSet<>();
for (TypeDef def : program.types) {
if (!seen.add(def.id)) {
Output.error(def.location, "type " + def.id + " already defined");
unique = false;
}
}
return unique;
}
private static boolean enumsAndConstantsUnique(Program program) {
boolean unique = true;
Set<String> seen = new HashSet<>();
for (EnumType et : Util.getEnumTypes(program.types)) {
for (String value : et.values) {
if (!seen.add(value)) {
Output.error(et.location, value + " defined multiple times");
unique = false;
}
}
}
for (Constant c : program.constants) {
if (!seen.add(c.id)) {
Output.error(c.location, c.id + " defined multiple times");
unique = false;
}
}
for (Node node : program.nodes) {
for (VarDecl vd : Util.getVarDecls(node)) {
if (seen.contains(vd.id)) {
Output.error(vd.location, vd.id + " already defined globally");
unique = false;
}
}
}
return unique;
}
private static boolean nodesUnique(Program program) {
boolean unique = true;
Set<String> seen = new HashSet<>();
for (Node node : program.nodes) {
if (!seen.add(node.id)) {
Output.error(node.location, "node " + node.id + " already defined");
unique = false;
}
}
return unique;
}
private static boolean variablesUnique(Program program) {
boolean unique = true;
for (Node node : program.nodes) {
unique = variablesUnique(node) && unique;
}
return unique;
}
private static boolean variablesUnique(Node node) {
boolean unique = true;
Set<String> seen = new HashSet<>();
for (VarDecl decl : Util.getVarDecls(node)) {
if (!seen.add(decl.id)) {
Output.error(decl.location, "variable " + decl.id + " already declared");
unique = false;
}
}
return unique;
}
private static boolean constantsConstant(Program program) {
boolean constant = true;
ConstantAnalyzer constantAnalyzer = new ConstantAnalyzer(program);
for (Constant c : program.constants) {
if (!c.expr.accept(constantAnalyzer)) {
Output.error(c.location, "constant " + c.id + " does not have a constant value");
constant = false;
}
}
return constant;
}
private static boolean assignmentsSound(Program program) {
boolean sound = true;
for (Node node : program.nodes) {
sound = assignmentsSound(node) && sound;
}
return sound;
}
private static boolean assignmentsSound(Node node) {
Set<String> toAssign = new HashSet<>();
toAssign.addAll(Util.getIds(node.outputs));
toAssign.addAll(Util.getIds(node.locals));
Set<String> assigned = new HashSet<>();
boolean sound = true;
for (Equation eq : node.equations) {
for (IdExpr idExpr : eq.lhs) {
if (toAssign.contains(idExpr.id)) {
toAssign.remove(idExpr.id);
assigned.add(idExpr.id);
} else if (assigned.contains(idExpr.id)) {
Output.error(idExpr.location, "variable '" + idExpr.id
+ "' cannot be reassigned");
sound = false;
} else {
Output.error(idExpr.location, "variable '" + idExpr.id + "' cannot be assigned");
sound = false;
}
}
}
if (!toAssign.isEmpty()) {
Output.error("in node '" + node.id + "' variables must be assigned: " + toAssign);
sound = false;
}
return sound;
}
private static boolean propertiesUnique(Program program) {
boolean unique = true;
for (Node node : program.nodes) {
Set<String> seen = new HashSet<>();
for (String prop : node.properties) {
if (!seen.add(prop)) {
Output.error("in node '" + node.id + "' property '" + prop
+ "' declared multiple times");
unique = false;
}
}
}
return unique;
}
private static boolean propertiesExist(Program program) {
boolean exist = true;
for (Node node : program.nodes) {
Set<String> variables = new HashSet<>(Util.getIds(Util.getVarDecls(node)));
for (String prop : node.properties) {
if (!variables.contains(prop)) {
Output.error("in node '" + node.id + "' property '" + prop + "' does not exist");
exist = false;
}
}
}
return exist;
}
private static boolean propertiesBoolean(Program program) {
boolean allBoolean = true;
for (Node node : program.nodes) {
Set<String> booleans = getBooleans(node);
for (String prop : node.properties) {
if (!booleans.contains(prop)) {
Output.error("in node '" + node.id + "' property '" + prop
+ "' does not have type bool");
allBoolean = false;
}
}
}
return allBoolean;
}
private static Set<String> getBooleans(Node node) {
Set<String> booleans = new HashSet<>();
for (VarDecl varDecl : Util.getVarDecls(node)) {
if (varDecl.type == NamedType.BOOL) {
booleans.add(varDecl.id);
}
}
return booleans;
}
private static void warnUnusedAsserts(Program program) {
for (Node node : program.nodes) {
if (node.id.equals(program.main)) {
continue;
}
for (Expr expr : node.assertions) {
Output.warning(expr.location, "assertion in subnode ignored");
}
}
}
private static void warnAlgebraicLoops(Program program) {
for (Node node : program.nodes) {
Map<String, Set<String>> directDepends = new HashMap<>();
for (Equation eq : node.equations) {
Set<String> set = CurrIdExtractorVisitor.getCurrIds(eq.expr);
for (IdExpr idExpr : eq.lhs) {
directDepends.put(idExpr.id, set);
}
}
Set<String> covered = new HashSet<>();
for (Equation eq : node.equations) {
List<String> stack = new ArrayList<>();
for (IdExpr idExpr : eq.lhs) {
checkAlgebraicLoops(node.id, idExpr.id, stack, covered, directDepends);
}
}
}
}
private static boolean checkAlgebraicLoops(String node, String id, List<String> stack,
Set<String> covered, Map<String, Set<String>> directDepends) {
if (stack.contains(id)) {
StringBuilder text = new StringBuilder();
text.append("in node '" + node + "' possible algebraic loop: ");
for (String s : stack.subList(stack.indexOf(id), stack.size())) {
text.append(s + " -> ");
}
text.append(id);
Output.warning(text.toString());
return true;
}
if (!covered.add(id)) {
return false;
}
if (directDepends.containsKey(id)) {
stack.add(id);
for (String next : directDepends.get(id)) {
if (checkAlgebraicLoops(node, next, stack, covered, directDepends)) {
return true;
}
}
stack.remove(stack.size() - 1);
}
return false;
}
}
|
package com.exedio.cope;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class DataTest extends AbstractLibTest
{
public DataTest()
{
super(Main.dataModel);
}
private DataItem item;
// TODO rename by length
private final byte[] data = new byte[]{-86,122,-8,23};
private final byte[] data2 = new byte[]{-97,35,-126,86,19,-8};
private final byte[] data8 = new byte[]{-54,104,-63,23,19,-45,71,-23};
private final byte[] data10 = new byte[]{-97,19,-8,35,-126,-86,122,86,19,-8};
private final byte[] data11 = new byte[]{22,-97,19,-8,35,-126,-86,122,86,19,-8};
private final byte[] dataEmpty = new byte[]{};
private byte[] dataBig;
public void setUp() throws Exception
{
super.setUp();
final int dataFileLength = data8.length;
final int dataBigLength = (1024*1024) + 77;
dataBig = new byte[dataBigLength];
for(int i = 0; i<dataBigLength; i++)
dataBig[i] = data8[i % dataFileLength];
deleteOnTearDown(item = new DataItem());
}
public void tearDown() throws Exception
{
// release memory
dataBig = null;
super.tearDown();
}
private void assertIt(final byte[] expectedData) throws MandatoryViolationException, IOException
{
assertIt(expectedData, item);
}
private void assertIt(final byte[] expectedData, final DataItem item)
throws MandatoryViolationException, IOException
{
assertIt(expectedData, item, oracle, model);
}
private static final void assertIt(final byte[] expectedData, final DataItem item, final boolean oracle, final Model model)
throws MandatoryViolationException, IOException
{
if(expectedData!=null && !(oracle && !model.getProperties().hasDatadirPath() && expectedData.length==0))
{
assertTrue(!item.isDataNull());
assertEquals(expectedData.length, item.getDataLength());
assertData(expectedData, item.getData());
final ByteArrayOutputStream tempStream = new ByteArrayOutputStream();
item.getData(tempStream);
assertData(expectedData, tempStream.toByteArray());
final File tempFile = File.createTempFile("cope-DataTest.", ".tmp");
assertTrue(tempFile.delete());
assertFalse(tempFile.exists());
item.getData(tempFile);
assertTrue(tempFile.exists());
assertEqualContent(expectedData, tempFile);
}
else
{
assertTrue(item.isDataNull());
assertEquals(-1, item.getDataLength());
assertEquals(null, item.getData());
final ByteArrayOutputStream tempStream = new ByteArrayOutputStream();
item.getData(tempStream);
assertEquals(0, tempStream.toByteArray().length);
final File tempFile = File.createTempFile("cope-DataTest.", ".tmp");
assertTrue(tempFile.delete());
assertFalse(tempFile.exists());
item.getData(tempFile);
assertFalse(tempFile.exists());
}
}
public void testData() throws MandatoryViolationException, IOException
{
assertEquals(10, data10.length);
assertEquals(11, data11.length);
// test model
assertEquals(item.TYPE, item.data.getType());
assertEquals("data", item.data.getName());
assertEquals(false, item.data.isMandatory());
assertEqualsUnmodifiable(list(), item.data.getPatterns());
assertEquals(item.data.DEFAULT_LENGTH, item.data.getMaximumLength());
assertEquals(item.TYPE, item.data10.getType());
assertEquals("data10", item.data10.getName());
assertEquals(false, item.data10.isMandatory());
assertEqualsUnmodifiable(list(), item.data10.getPatterns());
assertEquals(10, item.data10.getMaximumLength());
try
{
new DataAttribute(Item.OPTIONAL).lengthMax(0);
fail();
}
catch(RuntimeException e)
{
assertEquals("maximum length must be greater zero, but was 0.", e.getMessage());
}
try
{
new DataAttribute(Item.OPTIONAL).lengthMax(-10);
fail();
}
catch(RuntimeException e)
{
assertEquals("maximum length must be greater zero, but was -10.", e.getMessage());
}
// test data
assertIt(null);
// set byte[]
item.setData(data);
assertIt(data);
item.setData(data2);
assertIt(data2);
item.setData(dataEmpty);
assertIt(dataEmpty);
item.setData(dataBig);
assertIt(dataBig);
item.setData((byte[])null);
assertIt(null);
// set InputStream
item.setData(stream(data));
assertStreamClosed();
assertIt(data);
item.setData(stream(data2));
assertStreamClosed();
assertIt(data2);
item.setData(stream(dataEmpty));
assertStreamClosed();
assertIt(dataEmpty);
item.setData(stream(dataBig));
assertStreamClosed();
assertIt(dataBig);
item.setData((InputStream)null);
assertIt(null);
// set File
item.setData(file(data8));
assertIt(data8);
item.setData(file(dataEmpty));
assertIt(dataEmpty);
item.setData(file(dataBig));
assertIt(dataBig);
item.setData((File)null);
assertIt(null);
try
{
item.getData((OutputStream)null);
fail();
}
catch(NullPointerException e)
{
assertEquals(null, e.getMessage());
}
try
{
item.getData((File)null);
fail();
}
catch(NullPointerException e)
{
assertEquals(null, e.getMessage());
}
final DataSubItem subItem = new DataSubItem();
deleteOnTearDown(subItem);
subItem.setData(stream(data));
assertStreamClosed();
assertIt(data, subItem);
assertEquals(data.length, subItem.getDataLength());
// test maximum length
item.setData10(data10);
assertData(data10, item.getData10());
try
{
item.setData10(data11);
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(11, e.getLength());
assertEquals("length violation on DataItem.0, 11 bytes is too long for DataItem#data10", e.getMessage());
}
assertData(data10, item.getData10());
try
{
item.setData10(stream(data11));
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(-1, e.getLength());
assertEquals(e.getMessage(), "length violation on DataItem.0, is too long for DataItem#data10", e.getMessage());
}
if(model.getProperties().hasDatadirPath()) // TODO should not be needed
item.setData10(data10);
assertData(data10, item.getData10());
try
{
item.setData10(file(data11));
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(11, e.getLength());
assertEquals("length violation on DataItem.0, 11 bytes is too long for DataItem#data10", e.getMessage());
}
assertData(data10, item.getData10());
}
}
|
package org.cytoscape.prefuse.layouts.internal;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.cytoscape.model.CyNode;
import org.cytoscape.view.layout.AbstractParallelPartitionLayoutTask;
import org.cytoscape.view.layout.LayoutEdge;
import org.cytoscape.view.layout.LayoutNode;
import org.cytoscape.view.layout.LayoutPartition;
import org.cytoscape.view.model.CyNetworkView;
import org.cytoscape.view.model.View;
import org.cytoscape.work.undo.UndoSupport;
import prefuse.util.force.DragForce;
import prefuse.util.force.ForceItem;
import prefuse.util.force.ForceSimulator;
import prefuse.util.force.NBodyForce;
import prefuse.util.force.SpringForce;
import prefuse.util.force.StateMonitor;
public class ForceDirectedLayoutTask extends AbstractParallelPartitionLayoutTask {
// private ForceSimulator m_fsim;
private ForceDirectedLayout.Integrators integrator;
private final ForceDirectedLayoutContext context;
private final StateMonitor monitor;
/**
* Creates a new ForceDirectedLayout object.
*/
public ForceDirectedLayoutTask(
String displayName,
CyNetworkView networkView,
Set<View<CyNode>> nodesToLayOut,
ForceDirectedLayoutContext context,
ForceDirectedLayout.Integrators integrator,
String attrName,
UndoSupport undo
) {
super(displayName, context.singlePartition, networkView, nodesToLayOut, attrName, undo);
this.context = context;
this.integrator = integrator;
edgeWeighter = context.edgeWeighter;
edgeWeighter.setWeightAttribute(layoutAttribute);
monitor = new StateMonitor();
// m_fsim = new ForceSimulator(monitor);
// m_fsim.addForce(new NBodyForce(monitor));
// m_fsim.addForce(new SpringForce());
// m_fsim.addForce(new DragForce());
}
@Override
public String toString() {
return ForceDirectedLayout.ALGORITHM_DISPLAY_NAME;
}
@Override
public void layoutPartition(LayoutPartition part) {
// if (taskMonitor != null)
// taskMonitor.setStatusMessage("Partition " + part.getPartitionNumber() + ": Initializing...");
ForceSimulator m_fsim = new ForceSimulator(monitor);
m_fsim.addForce(new NBodyForce(monitor));
m_fsim.addForce(new SpringForce());
m_fsim.addForce(new DragForce());
// Calculate our edge weights
part.calculateEdgeWeights();
m_fsim = new ForceSimulator(monitor);
m_fsim.addForce(new NBodyForce(monitor));
m_fsim.addForce(new SpringForce());
m_fsim.addForce(new DragForce());
List<LayoutNode> nodeList = part.getNodeList();
List<LayoutEdge> edgeList = part.getEdgeList();
if (context.isDeterministic) {
Collections.sort(nodeList);
Collections.sort(edgeList);
}
Map<LayoutNode,ForceItem> forceItems = new HashMap<>();
// initialize nodes
for (LayoutNode ln : nodeList) {
if (cancelled)
return;
ForceItem fitem = forceItems.get(ln);
if (fitem == null) {
fitem = new ForceItem();
forceItems.put(ln, fitem);
}
fitem.mass = getMassValue(ln);
fitem.location[0] = 0f;
fitem.location[1] = 0f;
m_fsim.addItem(fitem);
}
// initialize edges
for (LayoutEdge e : edgeList) {
if (cancelled)
return;
LayoutNode n1 = e.getSource();
ForceItem f1 = forceItems.get(n1);
LayoutNode n2 = e.getTarget();
ForceItem f2 = forceItems.get(n2);
if (f1 == null || f2 == null)
continue;
m_fsim.addSpring(f1, f2, getSpringCoefficient(e), getSpringLength(e));
}
// perform layout
long timestep = 1000L;
for (int i = 0; i < context.numIterations; i++) {
if (cancelled)
return;
// if (taskMonitor != null)
// taskMonitor.setStatusMessage(
// "Partition " + part.getPartitionNumber() + ": Iteration " + (i + 1)
// + " of " + context.numIterations + "...");
timestep *= (1.0 - i / (double) context.numIterations);
long step = timestep + 50;
m_fsim.runSimulator(step);
// setTaskStatus((int) (((double) i / (double) context.numIterations) * 90. + 5));
}
// update positions
part.resetNodes(); // reset the nodes so we get the new average location
for (LayoutNode ln : part.getNodeList()) {
if (cancelled)
return;
if (!ln.isLocked()) {
ForceItem fitem = forceItems.get(ln);
ln.setX(fitem.location[0]);
ln.setY(fitem.location[1]);
part.moveNodeToLocation(ln);
}
}
}
@Override
public void cancel() {
super.cancel();
monitor.cancel();
}
/**
* Get the mass value associated with the given node. Subclasses should
* override this method to perform custom mass assignment.
* @param n the node for which to compute the mass value
* @return the mass value for the node. By default, all items are given
* a mass value of 1.0.
*/
protected float getMassValue(LayoutNode n) {
return (float)context.defaultNodeMass;
}
/**
* Get the spring length for the given edge. Subclasses should
* override this method to perform custom spring length assignment.
* @param e the edge for which to compute the spring length
* @return the spring length for the edge. A return value of
* -1 means to ignore this method and use the global default.
*/
protected float getSpringLength(LayoutEdge e) {
double weight = e.getWeight();
if (weight == 0.0)
return (float)(context.defaultSpringLength);
return (float)(context.defaultSpringLength/weight);
}
/**
* Get the spring coefficient for the given edge, which controls the
* tension or strength of the spring. Subclasses should
* override this method to perform custom spring tension assignment.
* @param e the edge for which to compute the spring coefficient.
* @return the spring coefficient for the edge. A return value of
* -1 means to ignore this method and use the global default.
*/
protected float getSpringCoefficient(LayoutEdge e) {
return (float)context.defaultSpringCoefficient;
}
}
|
package org.libreplan.web.planner.allocation;
import static org.libreplan.web.I18nHelper._;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.Period;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.libreplan.business.planner.entities.AggregateOfResourceAllocations;
import org.libreplan.business.planner.entities.AssignmentFunction;
import org.libreplan.business.planner.entities.AssignmentFunction.AssignmentFunctionName;
import org.libreplan.business.planner.entities.CalculatedValue;
import org.libreplan.business.planner.entities.GenericResourceAllocation;
import org.libreplan.business.planner.entities.ManualFunction;
import org.libreplan.business.planner.entities.ResourceAllocation;
import org.libreplan.business.planner.entities.SigmoidFunction;
import org.libreplan.business.planner.entities.SpecificResourceAllocation;
import org.libreplan.business.planner.entities.StretchesFunctionTypeEnum;
import org.libreplan.business.planner.entities.Task;
import org.libreplan.business.planner.entities.TaskElement;
import org.libreplan.business.resources.entities.Criterion;
import org.libreplan.business.workingday.EffortDuration;
import org.libreplan.web.common.EffortDurationBox;
import org.libreplan.web.common.IMessagesForUser;
import org.libreplan.web.common.MessagesForUser;
import org.libreplan.web.common.OnlyOneVisible;
import org.libreplan.web.common.Util;
import org.libreplan.web.planner.allocation.streches.StrechesFunctionConfiguration;
import org.zkoss.ganttz.timetracker.ICellForDetailItemRenderer;
import org.zkoss.ganttz.timetracker.IConvertibleToColumn;
import org.zkoss.ganttz.timetracker.PairOfLists;
import org.zkoss.ganttz.timetracker.TimeTrackedTable;
import org.zkoss.ganttz.timetracker.TimeTrackedTableWithLeftPane;
import org.zkoss.ganttz.timetracker.TimeTracker;
import org.zkoss.ganttz.timetracker.TimeTracker.IDetailItemFilter;
import org.zkoss.ganttz.timetracker.TimeTrackerComponentWithoutColumns;
import org.zkoss.ganttz.timetracker.zoom.DetailItem;
import org.zkoss.ganttz.timetracker.zoom.IZoomLevelChangedListener;
import org.zkoss.ganttz.timetracker.zoom.ZoomLevel;
import org.zkoss.ganttz.util.Interval;
import org.zkoss.zk.ui.Component;
import org.zkoss.zk.ui.WrongValueException;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.zk.ui.event.Events;
import org.zkoss.zk.ui.util.Clients;
import org.zkoss.zk.ui.util.GenericForwardComposer;
import org.zkoss.zul.Button;
import org.zkoss.zul.Div;
import org.zkoss.zul.Grid;
import org.zkoss.zul.Hbox;
import org.zkoss.zul.Label;
import org.zkoss.zul.LayoutRegion;
import org.zkoss.zul.ListModel;
import org.zkoss.zul.Listbox;
import org.zkoss.zul.Listcell;
import org.zkoss.zul.Listitem;
import org.zkoss.zul.Messagebox;
import org.zkoss.zul.SimpleListModel;
import org.zkoss.zul.api.Column;
public class AdvancedAllocationController extends GenericForwardComposer {
public static class AllocationInput {
private final AggregateOfResourceAllocations aggregate;
private final IAdvanceAllocationResultReceiver resultReceiver;
private final TaskElement task;
public AllocationInput(AggregateOfResourceAllocations aggregate,
TaskElement task,
IAdvanceAllocationResultReceiver resultReceiver) {
Validate.notNull(aggregate);
Validate.notNull(resultReceiver);
Validate.notNull(task);
this.aggregate = aggregate;
this.task = task;
this.resultReceiver = resultReceiver;
}
List<ResourceAllocation<?>> getAllocationsSortedByStartDate() {
return getAggregate().getAllocationsSortedByStartDate();
}
EffortDuration getTotalEffort() {
return getAggregate().getTotalEffort();
}
AggregateOfResourceAllocations getAggregate() {
return aggregate;
}
String getTaskName() {
return task.getName();
}
IAdvanceAllocationResultReceiver getResultReceiver() {
return resultReceiver;
}
Interval calculateInterval() {
List<ResourceAllocation<?>> all = getAllocationsSortedByStartDate();
if (all.isEmpty()) {
return new Interval(task.getStartDate(), task
.getEndDate());
} else {
LocalDate start = min(all.get(0)
.getStartConsideringAssignments(), all.get(0)
.getStartDate());
LocalDate taskEndDate = LocalDate.fromDateFields(task
.getEndDate());
LocalDate end = max(getEnd(all), taskEndDate);
return new Interval(asDate(start), asDate(end));
}
}
private LocalDate min(LocalDate... dates) {
return Collections.min(Arrays.asList(dates), null);
}
private LocalDate max(LocalDate... dates) {
return Collections.max(Arrays.asList(dates), null);
}
private static LocalDate getEnd(List<ResourceAllocation<?>> all) {
ArrayList<ResourceAllocation<?>> reversed = reverse(all);
LocalDate end = reversed.get(0).getEndDate();
ListIterator<ResourceAllocation<?>> listIterator = reversed
.listIterator(1);
while (listIterator.hasNext()) {
ResourceAllocation<?> current = listIterator.next();
if (current.getEndDate().compareTo(end) >= 0) {
end = current.getEndDate();
} else {
return end;
}
}
return end;
}
private static ArrayList<ResourceAllocation<?>> reverse(
List<ResourceAllocation<?>> all) {
ArrayList<ResourceAllocation<?>> reversed = new ArrayList<ResourceAllocation<?>>(
all);
Collections.reverse(reversed);
return reversed;
}
private static Date asDate(LocalDate start) {
return start.toDateMidnight().toDate();
}
}
public interface IAdvanceAllocationResultReceiver {
public Restriction createRestriction();
public void accepted(AggregateOfResourceAllocations modifiedAllocations);
public void cancel();
}
public interface IBack {
public void goBack();
boolean isAdvanceAssignmentOfSingleTask();
}
public abstract static class Restriction {
public interface IRestrictionSource {
EffortDuration getTotalEffort();
LocalDate getStart();
LocalDate getEnd();
CalculatedValue getCalculatedValue();
}
public static Restriction build(IRestrictionSource restrictionSource) {
switch (restrictionSource.getCalculatedValue()) {
case END_DATE:
return Restriction.emptyRestriction();
case NUMBER_OF_HOURS:
return Restriction.onlyAssignOnInterval(restrictionSource
.getStart(), restrictionSource.getEnd());
case RESOURCES_PER_DAY:
return Restriction.emptyRestriction();
default:
throw new RuntimeException("unhandled case: "
+ restrictionSource.getCalculatedValue());
}
}
private static Restriction emptyRestriction() {
return new NoRestriction();
}
private static Restriction onlyAssignOnInterval(LocalDate start,
LocalDate end){
return new OnlyOnIntervalRestriction(start, end);
}
abstract LocalDate limitStartDate(LocalDate startDate);
abstract LocalDate limitEndDate(LocalDate localDate);
abstract boolean isDisabledEditionOn(DetailItem item);
public abstract boolean isInvalidTotalEffort(EffortDuration totalEffort);
public abstract void showInvalidEffort(IMessagesForUser messages,
EffortDuration totalEffort);
public abstract void markInvalidEffort(Row groupingRow,
EffortDuration currentEffort);
}
private static class OnlyOnIntervalRestriction extends Restriction {
private final LocalDate start;
private final LocalDate end;
private OnlyOnIntervalRestriction(LocalDate start, LocalDate end) {
super();
this.start = start;
this.end = end;
}
private org.joda.time.Interval intervalAllowed() {
return new org.joda.time.Interval(start.toDateTimeAtStartOfDay(),
end.toDateTimeAtStartOfDay());
}
@Override
boolean isDisabledEditionOn(DetailItem item) {
return !intervalAllowed().overlaps(
new org.joda.time.Interval(item.getStartDate(), item
.getEndDate()));
}
@Override
public boolean isInvalidTotalEffort(EffortDuration totalEffort) {
return false;
}
@Override
LocalDate limitEndDate(LocalDate argEnd) {
return end.compareTo(argEnd) < 0 ? end : argEnd;
}
@Override
LocalDate limitStartDate(LocalDate argStart) {
return start.compareTo(argStart) > 0 ? start : argStart;
}
@Override
public void showInvalidEffort(IMessagesForUser messages,
EffortDuration totalEffort) {
throw new UnsupportedOperationException();
}
@Override
public void markInvalidEffort(Row groupingRow,
EffortDuration currentEffort) {
throw new UnsupportedOperationException();
}
}
private static class NoRestriction extends Restriction {
@Override
boolean isDisabledEditionOn(DetailItem item) {
return false;
}
@Override
public boolean isInvalidTotalEffort(EffortDuration totalEffort) {
return false;
}
@Override
LocalDate limitEndDate(LocalDate endDate) {
return endDate;
}
@Override
LocalDate limitStartDate(LocalDate startDate) {
return startDate;
}
@Override
public void markInvalidEffort(Row groupingRow,
EffortDuration currentEffort) {
throw new UnsupportedOperationException();
}
@Override
public void showInvalidEffort(IMessagesForUser messages,
EffortDuration totalEffort) {
throw new UnsupportedOperationException();
}
}
private static final int VERTICAL_MAX_ELEMENTS = 25;
private IMessagesForUser messages;
private Component insertionPointTimetracker;
private Div insertionPointLeftPanel;
private LayoutRegion insertionPointRightPanel;
private Button paginationDownButton;
private Button paginationUpButton;
private Button verticalPaginationUpButton;
private Button verticalPaginationDownButton;
private TimeTracker timeTracker;
private PaginatorFilter paginatorFilter;
private Listbox advancedAllocationZoomLevel;
private TimeTrackerComponentWithoutColumns timeTrackerComponent;
private Grid leftPane;
private TimeTrackedTable<Row> table;
private IBack back;
private List<AllocationInput> allocationInputs;
private Component associatedComponent;
private Listbox advancedAllocationHorizontalPagination;
private Listbox advancedAllocationVerticalPagination;
private boolean fixedZoomByUser = false;
private ZoomLevel zoomLevel;
public AdvancedAllocationController(IBack back,
List<AllocationInput> allocationInputs) {
setInputData(back, allocationInputs);
}
private void setInputData(IBack back, List<AllocationInput> allocationInputs) {
Validate.notNull(back);
Validate.noNullElements(allocationInputs);
this.back = back;
this.allocationInputs = allocationInputs;
}
public void reset(IBack back, List<AllocationInput> allocationInputs) {
rowsCached = null;
setInputData(back, allocationInputs);
loadAndInitializeComponents();
}
@Override
public void doAfterCompose(Component comp) throws Exception {
super.doAfterCompose(comp);
normalLayout = comp.getFellow("normalLayout");
noDataLayout = comp.getFellow("noDataLayout");
onlyOneVisible = new OnlyOneVisible(normalLayout, noDataLayout);
this.associatedComponent = comp;
loadAndInitializeComponents();
Clients.evalJavaScript("ADVANCE_ALLOCATIONS.listenToScroll();");
}
private void loadAndInitializeComponents() {
messages = new MessagesForUser(associatedComponent
.getFellow("messages"));
if (allocationInputs.isEmpty()) {
onlyOneVisible.showOnly(noDataLayout);
} else {
onlyOneVisible.showOnly(normalLayout);
createComponents();
insertComponentsInLayout();
timeTrackerComponent.afterCompose();
table.afterCompose();
}
}
private class PaginatorFilter implements IDetailItemFilter {
private DateTime intervalStart;
private DateTime intervalEnd;
private DateTime paginatorStart;
private DateTime paginatorEnd;
private ZoomLevel zoomLevel = ZoomLevel.DETAIL_ONE;
@Override
public Interval getCurrentPaginationInterval() {
return new Interval(intervalStart.toDate(), intervalEnd.toDate());
}
private Period intervalIncrease() {
switch (zoomLevel) {
case DETAIL_ONE:
return Period.years(5);
case DETAIL_TWO:
return Period.years(5);
case DETAIL_THREE:
return Period.years(2);
case DETAIL_FOUR:
return Period.months(6);
case DETAIL_FIVE:
return Period.weeks(6);
}
return Period.years(5);
}
public void populateHorizontalListbox() {
advancedAllocationHorizontalPagination.getItems().clear();
DateTimeFormatter df = DateTimeFormat.forPattern("dd/MMM/yyyy");
if (intervalStart != null) {
DateTime itemStart = intervalStart;
DateTime itemEnd = intervalStart.plus(intervalIncrease());
while (intervalEnd.isAfter(itemStart)) {
if (intervalEnd.isBefore(itemEnd)
|| !intervalEnd.isAfter(itemEnd
.plus(intervalIncrease()))) {
itemEnd = intervalEnd;
}
Listitem item = new Listitem(df.print(itemStart) + " - "
+ df.print(itemEnd.minusDays(1)));
advancedAllocationHorizontalPagination.appendChild(item);
itemStart = itemEnd;
itemEnd = itemEnd.plus(intervalIncrease());
}
}
advancedAllocationHorizontalPagination
.setDisabled(advancedAllocationHorizontalPagination
.getItems().size() < 2);
advancedAllocationHorizontalPagination.setSelectedIndex(0);
}
public void goToHorizontalPage(int interval) {
if (interval >= 0) {
paginatorStart = intervalStart;
for (int i = 0; i < interval; i++) {
paginatorStart = paginatorStart.plus(intervalIncrease());
}
paginatorEnd = paginatorStart.plus(intervalIncrease());
// Avoid reduced intervals
if (!intervalEnd.isAfter(paginatorEnd.plus(intervalIncrease()))) {
paginatorEnd = intervalEnd;
}
updatePaginationButtons();
}
}
@Override
public Collection<DetailItem> selectsFirstLevel(
Collection<DetailItem> firstLevelDetails) {
ArrayList<DetailItem> result = new ArrayList<DetailItem>();
for (DetailItem each : firstLevelDetails) {
if ((each.getStartDate() == null)
|| !(each.getStartDate().isBefore(paginatorStart))
&& (each.getStartDate().isBefore(paginatorEnd))) {
result.add(each);
}
}
return result;
}
@Override
public Collection<DetailItem> selectsSecondLevel(
Collection<DetailItem> secondLevelDetails) {
ArrayList<DetailItem> result = new ArrayList<DetailItem>();
for (DetailItem each : secondLevelDetails) {
if ((each.getStartDate() == null)
|| !(each.getStartDate().isBefore(paginatorStart))
&& (each.getStartDate().isBefore(paginatorEnd))) {
result.add(each);
}
}
return result;
}
public void next() {
paginatorStart = paginatorStart.plus(intervalIncrease());
paginatorEnd = paginatorEnd.plus(intervalIncrease());
// Avoid reduced last intervals
if (!intervalEnd.isAfter(paginatorEnd.plus(intervalIncrease()))) {
paginatorEnd = paginatorEnd.plus(intervalIncrease());
}
updatePaginationButtons();
}
public void previous() {
paginatorStart = paginatorStart.minus(intervalIncrease());
paginatorEnd = paginatorEnd.minus(intervalIncrease());
updatePaginationButtons();
}
private void updatePaginationButtons() {
paginationDownButton.setDisabled(isFirstPage());
paginationUpButton.setDisabled(isLastPage());
}
public boolean isFirstPage() {
return !(paginatorStart.isAfter(intervalStart));
}
public boolean isLastPage() {
return ((paginatorEnd.isAfter(intervalEnd)) || (paginatorEnd
.isEqual(intervalEnd)));
}
public void setZoomLevel(ZoomLevel detailLevel) {
zoomLevel = detailLevel;
}
public void setInterval(Interval realInterval) {
intervalStart = realInterval.getStart().toDateTimeAtStartOfDay();
intervalEnd = realInterval.getFinish().toDateTimeAtStartOfDay();
paginatorStart = intervalStart;
paginatorEnd = intervalStart.plus(intervalIncrease());
if ((paginatorEnd.plus(intervalIncrease()).isAfter(intervalEnd))) {
paginatorEnd = intervalEnd;
}
updatePaginationButtons();
}
@Override
public void resetInterval() {
setInterval(timeTracker.getRealInterval());
}
}
private void createComponents() {
timeTracker = new TimeTracker(addMarginTointerval(), self);
paginatorFilter = new PaginatorFilter();
if (fixedZoomByUser && (zoomLevel != null)) {
timeTracker.setZoomLevel(zoomLevel);
}
paginatorFilter.setZoomLevel(timeTracker.getDetailLevel());
paginatorFilter.setInterval(timeTracker.getRealInterval());
paginationUpButton.setDisabled(isLastPage());
advancedAllocationZoomLevel.setSelectedIndex(timeTracker
.getDetailLevel().ordinal());
timeTracker.setFilter(paginatorFilter);
timeTracker.addZoomListener(new IZoomLevelChangedListener() {
@Override
public void zoomLevelChanged(ZoomLevel detailLevel) {
fixedZoomByUser = true;
zoomLevel = detailLevel;
paginatorFilter.setZoomLevel(detailLevel);
paginatorFilter.setInterval(timeTracker.getRealInterval());
timeTracker.setFilter(paginatorFilter);
populateHorizontalListbox();
Clients.evalJavaScript("ADVANCE_ALLOCATIONS.listenToScroll();");
}
});
timeTrackerComponent = new TimeTrackerComponentWithoutColumns(
timeTracker, "timetrackerheader");
timeTrackedTableWithLeftPane = new TimeTrackedTableWithLeftPane<Row, Row>(
getDataSource(), getColumnsForLeft(), getLeftRenderer(),
getRightRenderer(), timeTracker);
table = timeTrackedTableWithLeftPane.getRightPane();
table.setSclass("timeTrackedTableWithLeftPane");
leftPane = timeTrackedTableWithLeftPane.getLeftPane();
leftPane.setFixedLayout(true);
Clients.evalJavaScript("ADVANCE_ALLOCATIONS.listenToScroll();");
populateHorizontalListbox();
}
public void paginationDown() {
paginatorFilter.previous();
reloadComponent();
advancedAllocationHorizontalPagination
.setSelectedIndex(advancedAllocationHorizontalPagination
.getSelectedIndex() - 1);
}
public void paginationUp() {
paginatorFilter.next();
reloadComponent();
advancedAllocationHorizontalPagination.setSelectedIndex(Math.max(0,
advancedAllocationHorizontalPagination.getSelectedIndex()) + 1);
}
public void goToSelectedHorizontalPage() {
paginatorFilter
.goToHorizontalPage(advancedAllocationHorizontalPagination
.getSelectedIndex());
reloadComponent();
}
private void populateHorizontalListbox() {
advancedAllocationHorizontalPagination.setVisible(true);
paginatorFilter.populateHorizontalListbox();
}
private void reloadComponent() {
timeTrackedTableWithLeftPane.reload();
timeTrackerComponent.recreate();
// Reattach listener for zoomLevel changes. May be optimized
timeTracker.addZoomListener(new IZoomLevelChangedListener() {
@Override
public void zoomLevelChanged(ZoomLevel detailLevel) {
paginatorFilter.setZoomLevel(detailLevel);
paginatorFilter.setInterval(timeTracker.getRealInterval());
timeTracker.setFilter(paginatorFilter);
populateHorizontalListbox();
Clients.evalJavaScript("ADVANCE_ALLOCATIONS.listenToScroll();");
}
});
Clients.evalJavaScript("ADVANCE_ALLOCATIONS.listenToScroll();");
}
public boolean isFirstPage() {
return paginatorFilter.isFirstPage();
}
public boolean isLastPage() {
return paginatorFilter.isLastPage();
}
private void insertComponentsInLayout() {
insertionPointRightPanel.getChildren().clear();
insertionPointRightPanel.appendChild(table);
insertionPointLeftPanel.getChildren().clear();
insertionPointLeftPanel.appendChild(leftPane);
insertionPointTimetracker.getChildren().clear();
insertionPointTimetracker.appendChild(timeTrackerComponent);
}
public void onClick$acceptButton() {
for (AllocationInput allocationInput : allocationInputs) {
EffortDuration totalEffort = allocationInput.getTotalEffort();
Restriction restriction = allocationInput.getResultReceiver()
.createRestriction();
if (restriction.isInvalidTotalEffort(totalEffort)) {
Row groupingRow = groupingRows.get(allocationInput);
restriction.markInvalidEffort(groupingRow, totalEffort);
}
}
back.goBack();
for (AllocationInput allocationInput : allocationInputs) {
allocationInput.getResultReceiver().accepted(allocationInput
.getAggregate());
}
}
public void onClick$saveButton() {
for (AllocationInput allocationInput : allocationInputs) {
EffortDuration totalEffort = allocationInput.getTotalEffort();
Restriction restriction = allocationInput.getResultReceiver()
.createRestriction();
if (restriction.isInvalidTotalEffort(totalEffort)) {
Row groupingRow = groupingRows.get(allocationInput);
restriction.markInvalidEffort(groupingRow, totalEffort);
}
}
for (AllocationInput allocationInput : allocationInputs) {
allocationInput.getResultReceiver().accepted(
allocationInput.getAggregate());
}
try {
Messagebox.show(_("Changes applied"), _("Information"),
Messagebox.OK, Messagebox.INFORMATION);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void onClick$cancelButton() {
back.goBack();
for (AllocationInput allocationInput : allocationInputs) {
allocationInput.getResultReceiver().cancel();
}
}
public ListModel getZoomLevels() {
ZoomLevel[] selectableZoomlevels = { ZoomLevel.DETAIL_ONE,
ZoomLevel.DETAIL_TWO, ZoomLevel.DETAIL_THREE,
ZoomLevel.DETAIL_FOUR, ZoomLevel.DETAIL_FIVE };
return new SimpleListModel(selectableZoomlevels);
}
public void setZoomLevel(final ZoomLevel zoomLevel) {
timeTracker.setZoomLevel(zoomLevel);
}
public void onClick$zoomIncrease() {
timeTracker.zoomIncrease();
}
public void onClick$zoomDecrease() {
timeTracker.zoomDecrease();
}
private List<Row> rowsCached = null;
private Map<AllocationInput, Row> groupingRows = new HashMap<AllocationInput, Row>();
private OnlyOneVisible onlyOneVisible;
private Component normalLayout;
private Component noDataLayout;
private TimeTrackedTableWithLeftPane<Row, Row> timeTrackedTableWithLeftPane;
private int verticalIndex = 0;
private List<Integer> verticalPaginationIndexes;
private int verticalPage;
private List<Row> getRows() {
if (rowsCached != null) {
return filterRows(rowsCached);
}
rowsCached = new ArrayList<Row>();
int position = 1;
for (AllocationInput allocationInput : allocationInputs) {
if (allocationInput.getAggregate()
.getAllocationsSortedByStartDate().isEmpty()) {
} else {
Row groupingRow = buildGroupingRow(allocationInput);
groupingRow.setDescription(position + " " + allocationInput.getTaskName());
groupingRows.put(allocationInput, groupingRow);
rowsCached.add(groupingRow);
List<Row> genericRows = genericRows(allocationInput);
groupingRow.listenTo(genericRows);
rowsCached.addAll(genericRows);
List<Row> specificRows = specificRows(allocationInput);
groupingRow.listenTo(specificRows);
rowsCached.addAll(specificRows);
position++;
}
}
populateVerticalListbox();
return filterRows(rowsCached);
}
private List<Row> filterRows(List<Row> rows) {
verticalPaginationUpButton.setDisabled(verticalIndex <= 0);
verticalPaginationDownButton
.setDisabled((verticalIndex + VERTICAL_MAX_ELEMENTS) >= rows
.size());
if(advancedAllocationVerticalPagination.getChildren().size() >= 2) {
advancedAllocationVerticalPagination.setDisabled(false);
advancedAllocationVerticalPagination.setSelectedIndex(
verticalPage);
}
else {
advancedAllocationVerticalPagination.setDisabled(true);
}
return rows.subList(verticalIndex,
verticalPage + 1 < verticalPaginationIndexes.size() ?
verticalPaginationIndexes.get(verticalPage + 1).intValue() :
rows.size());
}
public void verticalPagedown() {
verticalPage++;
verticalIndex = verticalPaginationIndexes.get(verticalPage);
timeTrackedTableWithLeftPane.reload();
}
public void setVerticalPagedownButtonDisabled(boolean disabled) {
verticalPaginationUpButton.setDisabled(disabled);
}
public void verticalPageup() {
verticalPage
verticalIndex = verticalPaginationIndexes.get(verticalPage);
timeTrackedTableWithLeftPane.reload();
}
public void goToSelectedVerticalPage() {
verticalPage = advancedAllocationVerticalPagination.
getSelectedIndex();
verticalIndex = verticalPaginationIndexes.get(verticalPage);
timeTrackedTableWithLeftPane.reload();
}
public void populateVerticalListbox() {
if (rowsCached != null) {
verticalPaginationIndexes = new ArrayList<Integer>();
advancedAllocationVerticalPagination.getChildren().clear();
for(int i=0; i<rowsCached.size(); i=
correctVerticalPageDownPosition(i+VERTICAL_MAX_ELEMENTS)) {
int endPosition = correctVerticalPageUpPosition(Math.min(
rowsCached.size(), i+VERTICAL_MAX_ELEMENTS) - 1);
String label = rowsCached.get(i).getDescription() + " - " +
rowsCached.get(endPosition).getDescription();
Listitem item = new Listitem();
item.appendChild(new Listcell(label));
advancedAllocationVerticalPagination.appendChild(item);
verticalPaginationIndexes.add(i);
}
if (!rowsCached.isEmpty()) {
advancedAllocationVerticalPagination.setSelectedIndex(0);
}
}
}
private int correctVerticalPageUpPosition(int position) {
int correctedPosition = position;
//moves the pointer up until it finds the previous grouping row
//or the beginning of the list
while(correctedPosition > 0 &&
!rowsCached.get(correctedPosition).isGroupingRow()) {
correctedPosition
}
return correctedPosition;
}
private int correctVerticalPageDownPosition(int position) {
int correctedPosition = position;
//moves the pointer down until it finds the next grouping row
//or the end of the list
while(correctedPosition < rowsCached.size() &&
!rowsCached.get(correctedPosition).isGroupingRow()) {
correctedPosition++;
}
return correctedPosition;
}
private List<Row> specificRows(AllocationInput allocationInput) {
List<Row> result = new ArrayList<Row>();
for (SpecificResourceAllocation specificResourceAllocation : allocationInput.getAggregate()
.getSpecificAllocations()) {
result.add(createSpecificRow(specificResourceAllocation,
allocationInput.getResultReceiver().createRestriction(), allocationInput.task));
}
return result;
}
private Row createSpecificRow(
SpecificResourceAllocation specificResourceAllocation,
Restriction restriction, TaskElement task) {
return Row.createRow(messages, restriction,
specificResourceAllocation.getResource()
.getName(), 1, Arrays
.asList(specificResourceAllocation), specificResourceAllocation
.getResource().getShortDescription(),
specificResourceAllocation.getResource().isLimitingResource(), task);
}
private List<Row> genericRows(AllocationInput allocationInput) {
List<Row> result = new ArrayList<Row>();
for (GenericResourceAllocation genericResourceAllocation : allocationInput.getAggregate()
.getGenericAllocations()) {
result.add(buildGenericRow(genericResourceAllocation,
allocationInput.getResultReceiver().createRestriction(), allocationInput.task));
}
return result;
}
private Row buildGenericRow(
GenericResourceAllocation genericResourceAllocation,
Restriction restriction, TaskElement task) {
return Row.createRow(messages, restriction, Criterion
.getCaptionFor(genericResourceAllocation.getCriterions()), 1, Arrays
.asList(genericResourceAllocation), genericResourceAllocation
.isLimiting(), task);
}
private Row buildGroupingRow(AllocationInput allocationInput) {
Restriction restriction = allocationInput.getResultReceiver()
.createRestriction();
String taskName = allocationInput.getTaskName();
Row groupingRow = Row.createRow(messages, restriction, taskName, 0,
allocationInput.getAllocationsSortedByStartDate(), false, allocationInput.task);
return groupingRow;
}
private ICellForDetailItemRenderer<ColumnOnRow, Row> getLeftRenderer() {
return new ICellForDetailItemRenderer<ColumnOnRow, Row>() {
@Override
public Component cellFor(ColumnOnRow column, Row row) {
return column.cellFor(row);
}
};
}
private List<ColumnOnRow> getColumnsForLeft() {
List<ColumnOnRow> result = new ArrayList<ColumnOnRow>();
result.add(new ColumnOnRow(_("Name")) {
@Override
public Component cellFor(Row row) {
return row.getNameLabel();
}
});
result.add(new ColumnOnRow(_("Efforts"), "50px") {
@Override
public Component cellFor(Row row) {
return row.getAllEffort();
}
});
result.add(new ColumnOnRow(_("Function"), "130px") {
@Override
public Component cellFor(Row row) {
return row.getFunction();
}
});
return result;
}
private Callable<PairOfLists<Row, Row>> getDataSource() {
return new Callable<PairOfLists<Row, Row>>() {
@Override
public PairOfLists<Row, Row> call() {
List<Row> rows = getRows();
return new PairOfLists<Row, Row>(rows, rows);
}
};
}
private ICellForDetailItemRenderer<DetailItem, Row> getRightRenderer() {
return new ICellForDetailItemRenderer<DetailItem, Row>() {
@Override
public Component cellFor(DetailItem item, Row data) {
return data.effortOnInterval(item);
}
};
}
private Interval intervalFromData() {
Interval result = null;
for (AllocationInput each : allocationInputs) {
Interval intervalForInput = each.calculateInterval();
result = result == null ? intervalForInput : result
.coalesce(intervalForInput);
}
return result;
}
private Interval addMarginTointerval() {
Interval interval = intervalFromData();
// No global margin is added by default
return interval;
}
public boolean isAdvancedAllocationOfSingleTask() {
return back.isAdvanceAssignmentOfSingleTask();
}
}
abstract class ColumnOnRow implements IConvertibleToColumn {
private final String columnName;
private String width = null;
ColumnOnRow(String columnName) {
this.columnName = columnName;
}
ColumnOnRow(String columnName, String width) {
this.columnName = columnName;
this.width = width;
}
public abstract Component cellFor(Row row);
@Override
public Column toColumn() {
Column column = new org.zkoss.zul.Column();
column.setLabel(_(columnName));
column.setSclass(columnName.toLowerCase());
if (width != null) {
column.setWidth(width);
}
return column;
}
public String getName() {
return columnName;
}
}
interface CellChangedListener {
public void changeOn(DetailItem detailItem);
public void changeOnGlobal();
}
class Row {
static Row createRow(IMessagesForUser messages,
AdvancedAllocationController.Restriction restriction,
String name, int level,
List<? extends ResourceAllocation<?>> allocations,
String description, boolean limiting, TaskElement task) {
Row newRow = new Row(messages, restriction, name, level, allocations,
limiting, task);
newRow.setDescription(description);
return newRow;
}
static Row createRow(IMessagesForUser messages,
AdvancedAllocationController.Restriction restriction, String name,
int level, List<? extends ResourceAllocation<?>> allocations,
boolean limiting, TaskElement task) {
return new Row(messages, restriction, name, level, allocations,
limiting, task);
}
public void markErrorOnTotal(String message) {
throw new WrongValueException(allEffortInput, message);
}
private EffortDurationBox allEffortInput;
private Label nameLabel;
private List<CellChangedListener> listeners = new ArrayList<CellChangedListener>();
private Map<DetailItem, Component> componentsByDetailItem = new WeakHashMap<DetailItem, Component>();
private String name;
private String description;
private int level;
private final AggregateOfResourceAllocations aggregate;
private final AdvancedAllocationController.Restriction restriction;
private final IMessagesForUser messages;
private final String functionName;
private TaskElement task;
void listenTo(Collection<Row> rows) {
for (Row row : rows) {
listenTo(row);
}
}
void listenTo(Row row) {
row.add(new CellChangedListener() {
@Override
public void changeOnGlobal() {
reloadAllEffort();
reloadEffortsSameRowForDetailItems();
}
@Override
public void changeOn(DetailItem detailItem) {
Component component = componentsByDetailItem.get(detailItem);
if (component == null) {
return;
}
reloadEffortOnInterval(component, detailItem);
reloadAllEffort();
}
});
}
void add(CellChangedListener listener) {
listeners.add(listener);
}
private void fireCellChanged(DetailItem detailItem) {
for (CellChangedListener cellChangedListener : listeners) {
cellChangedListener.changeOn(detailItem);
}
}
private void fireCellChanged() {
for (CellChangedListener cellChangedListener : listeners) {
cellChangedListener.changeOnGlobal();
}
}
Component getAllEffort() {
if (allEffortInput == null) {
allEffortInput = buildSumAllEffort();
reloadAllEffort();
addListenerIfNeeded(allEffortInput);
}
return allEffortInput;
}
private EffortDurationBox buildSumAllEffort() {
EffortDurationBox box = (isGroupingRow() || isLimiting) ? EffortDurationBox
.notEditable() : new EffortDurationBox();
box.setWidth("40px");
return box;
}
private void addListenerIfNeeded(Component allEffortComponent) {
if (isGroupingRow() || isLimiting) {
return;
}
final EffortDurationBox effortDurationBox = (EffortDurationBox) allEffortComponent;
effortDurationBox.addEventListener(Events.ON_CHANGE,
new EventListener() {
@Override
public void onEvent(Event event) {
EffortDuration value = effortDurationBox
.getEffortDurationValue();
ResourceAllocation<?> resourceAllocation = getAllocation();
resourceAllocation
.withPreviousAssociatedResources()
.onIntervalWithinTask(
resourceAllocation.getStartDate(),
resourceAllocation.getEndDate())
.allocate(value);
AssignmentFunction assignmentFunction = resourceAllocation.getAssignmentFunction();
if (assignmentFunction != null) {
assignmentFunction.applyTo(resourceAllocation);
}
fireCellChanged();
reloadEffortsSameRowForDetailItems();
reloadAllEffort();
}
});
}
private void reloadEffortsSameRowForDetailItems() {
for (Entry<DetailItem, Component> entry : componentsByDetailItem
.entrySet()) {
reloadEffortOnInterval(entry.getValue(), entry.getKey());
}
}
private void reloadAllEffort() {
if (allEffortInput == null) {
return;
}
EffortDuration allEffort = aggregate.getTotalEffort();
allEffortInput.setValue(allEffort);
Clients.closeErrorBox(allEffortInput);
if (isLimiting) {
allEffortInput.setDisabled(true);
}
if (restriction.isInvalidTotalEffort(allEffort)) {
restriction.showInvalidEffort(messages, allEffort);
}
}
private Hbox hboxAssigmentFunctionsCombo = null;
Component getFunction() {
if (isGroupingRow()) {
return new Label();
} else if (isLimiting) {
return new Label(_("Limiting assignment"));
} else {
if (hboxAssigmentFunctionsCombo == null) {
initializeAssigmentFunctionsCombo();
}
return hboxAssigmentFunctionsCombo;
}
}
private AssignmentFunctionListbox assignmentFunctionsCombo = null;
private Button assignmentFunctionsConfigureButton = null;
private void initializeAssigmentFunctionsCombo() {
hboxAssigmentFunctionsCombo = new Hbox();
assignmentFunctionsCombo = new AssignmentFunctionListbox(
functions, getAllocation().getAssignmentFunction());
hboxAssigmentFunctionsCombo.appendChild(assignmentFunctionsCombo);
assignmentFunctionsConfigureButton = getAssignmentFunctionsConfigureButton(assignmentFunctionsCombo);
hboxAssigmentFunctionsCombo.appendChild(assignmentFunctionsConfigureButton);
}
class AssignmentFunctionListbox extends Listbox {
private Listitem previousListitem;
public AssignmentFunctionListbox(IAssignmentFunctionConfiguration[] functions,
AssignmentFunction initialValue) {
for (IAssignmentFunctionConfiguration each : functions) {
Listitem listitem = listItem(each);
this.appendChild(listitem);
if (each.isTargetedTo(initialValue)) {
selectItemAndSavePreviousValue(listitem);
}
}
this.addEventListener(Events.ON_SELECT, onSelectListbox());
this.setMold("select");
this.setStyle("font-size: 10px");
}
private void selectItemAndSavePreviousValue(Listitem listitem) {
setSelectedItem(listitem);
previousListitem = listitem;
}
private Listitem listItem(
IAssignmentFunctionConfiguration assignmentFunction) {
Listitem listitem = new Listitem(_(assignmentFunction.getName()));
listitem.setValue(assignmentFunction);
return listitem;
}
private EventListener onSelectListbox() {
return new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
IAssignmentFunctionConfiguration function = (IAssignmentFunctionConfiguration) getSelectedItem()
.getValue();
// Cannot apply function if task contains consolidated day assignments
final ResourceAllocation<?> resourceAllocation = getAllocation();
if (function.isSigmoid()
&& !resourceAllocation
.getConsolidatedAssignments().isEmpty()) {
showCannotApplySigmoidFunction();
setSelectedItem(getPreviousListitem());
return;
}
// User didn't accept
if (showConfirmChangeFunctionDialog() != Messagebox.YES) {
setSelectedItem(getPreviousListitem());
return;
}
// Apply assignment function
if (function != null) {
setPreviousListitem(getSelectedItem());
function.applyOn(resourceAllocation);
updateAssignmentFunctionsConfigureButton(
assignmentFunctionsConfigureButton,
function.isConfigurable());
}
}
};
}
private Listitem getPreviousListitem() {
return previousListitem;
}
private void setPreviousListitem(Listitem previousListitem) {
this.previousListitem = previousListitem;
}
private void showCannotApplySigmoidFunction() {
try {
Messagebox
.show(_("Task contains consolidated progress. Cannot apply sigmoid function."),
_("Error"), Messagebox.OK, Messagebox.ERROR);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private int showConfirmChangeFunctionDialog()
throws InterruptedException {
return Messagebox
.show(_("You are going to change the assignment function. Are you sure?"),
_("Confirm change"),
Messagebox.YES | Messagebox.NO, Messagebox.QUESTION);
}
private void setSelectedFunction(String functionName) {
List<Listitem> children = getChildren();
for (Listitem item : children) {
IAssignmentFunctionConfiguration function = (IAssignmentFunctionConfiguration) item
.getValue();
if (function.getName().equals(functionName)) {
setSelectedItem(item);
}
}
}
}
private IAssignmentFunctionConfiguration flat = new IAssignmentFunctionConfiguration() {
@Override
public void goToConfigure() {
throw new UnsupportedOperationException(
"Flat allocation is not configurable");
}
@Override
public String getName() {
return AssignmentFunctionName.FLAT.toString();
}
@Override
public boolean isTargetedTo(AssignmentFunction function) {
return function == null;
}
@Override
public void applyOn(
ResourceAllocation<?> resourceAllocation) {
resourceAllocation.setAssignmentFunctionWithoutApply(null);
resourceAllocation
.withPreviousAssociatedResources()
.onIntervalWithinTask(resourceAllocation.getStartDate(),
resourceAllocation.getEndDate())
.allocate(allEffortInput.getEffortDurationValue());
reloadEfforts();
}
private void reloadEfforts() {
reloadEffortsSameRowForDetailItems();
reloadAllEffort();
fireCellChanged();
}
@Override
public boolean isSigmoid() {
return false;
}
@Override
public boolean isConfigurable() {
return false;
}
};
private IAssignmentFunctionConfiguration manualFunction = new IAssignmentFunctionConfiguration() {
@Override
public void goToConfigure() {
throw new UnsupportedOperationException(
"Manual allocation is not configurable");
}
@Override
public String getName() {
return AssignmentFunctionName.MANUAL.toString();
}
@Override
public boolean isTargetedTo(AssignmentFunction function) {
return function instanceof ManualFunction;
}
@Override
public void applyOn(ResourceAllocation<?> resourceAllocation) {
resourceAllocation.setAssignmentFunctionAndApplyIfNotFlat(ManualFunction.create());
}
@Override
public boolean isSigmoid() {
return false;
}
@Override
public boolean isConfigurable() {
return false;
}
};
private abstract class CommonStrechesConfiguration extends
StrechesFunctionConfiguration {
@Override
protected void assignmentFunctionChanged() {
reloadEffortsSameRowForDetailItems();
reloadAllEffort();
fireCellChanged();
}
@Override
protected ResourceAllocation<?> getAllocation() {
return Row.this.getAllocation();
}
@Override
protected Component getParentOnWhichOpenWindow() {
return allEffortInput.getParent();
}
}
private IAssignmentFunctionConfiguration defaultStrechesFunction = new CommonStrechesConfiguration() {
@Override
protected String getTitle() {
return _("Stretches list");
}
@Override
protected boolean getChartsEnabled() {
return true;
}
@Override
protected StretchesFunctionTypeEnum getType() {
return StretchesFunctionTypeEnum.STRETCHES;
}
@Override
public String getName() {
return AssignmentFunctionName.STRETCHES.toString();
}
};
private IAssignmentFunctionConfiguration strechesWithInterpolation = new CommonStrechesConfiguration() {
@Override
protected String getTitle() {
return _("Stretches with Interpolation");
}
@Override
protected boolean getChartsEnabled() {
return false;
}
@Override
protected StretchesFunctionTypeEnum getType() {
return StretchesFunctionTypeEnum.INTERPOLATED;
}
@Override
public String getName() {
return AssignmentFunctionName.INTERPOLATION.toString();
}
};
private IAssignmentFunctionConfiguration sigmoidFunction = new IAssignmentFunctionConfiguration() {
@Override
public void goToConfigure() {
throw new UnsupportedOperationException(
"Sigmoid function is not configurable");
}
@Override
public String getName() {
return AssignmentFunctionName.SIGMOID.toString();
}
@Override
public boolean isTargetedTo(AssignmentFunction function) {
return function instanceof SigmoidFunction;
}
@Override
public void applyOn(
ResourceAllocation<?> resourceAllocation) {
resourceAllocation.setAssignmentFunctionAndApplyIfNotFlat(SigmoidFunction.create());
reloadEfforts();
}
private void reloadEfforts() {
reloadEffortsSameRowForDetailItems();
reloadAllEffort();
fireCellChanged();
}
@Override
public boolean isSigmoid() {
return true;
}
@Override
public boolean isConfigurable() {
return false;
}
};
private IAssignmentFunctionConfiguration[] functions = {
flat,
manualFunction,
defaultStrechesFunction,
strechesWithInterpolation,
sigmoidFunction
};
private boolean isLimiting;
private Button getAssignmentFunctionsConfigureButton(
final Listbox assignmentFunctionsListbox) {
Button button = Util.createEditButton(new EventListener() {
@Override
public void onEvent(Event event) {
IAssignmentFunctionConfiguration configuration = (IAssignmentFunctionConfiguration) assignmentFunctionsListbox
.getSelectedItem().getValue();
configuration.goToConfigure();
}
});
IAssignmentFunctionConfiguration configuration = (IAssignmentFunctionConfiguration) assignmentFunctionsListbox
.getSelectedItem().getValue();
updateAssignmentFunctionsConfigureButton(button,
configuration.isConfigurable());
return button;
}
private void updateAssignmentFunctionsConfigureButton(Button button,
boolean configurable) {
if (configurable) {
button.setTooltiptext(_("Configure"));
button.setDisabled(false);
} else {
button.setTooltiptext(_("Not configurable"));
button.setDisabled(true);
}
}
Component getNameLabel() {
if (nameLabel == null) {
nameLabel = new Label();
nameLabel.setValue(name);
if (!StringUtils.isBlank(description)) {
nameLabel.setTooltiptext(description);
} else {
nameLabel.setTooltiptext(name);
}
nameLabel.setSclass("level" + level);
}
return nameLabel;
}
private Row(IMessagesForUser messages,
AdvancedAllocationController.Restriction restriction, String name,
int level, List<? extends ResourceAllocation<?>> allocations,
boolean limiting, TaskElement task) {
this.messages = messages;
this.restriction = restriction;
this.name = name;
this.level = level;
this.isLimiting = limiting;
this.task = task;
this.aggregate = AggregateOfResourceAllocations
.createFromSatisfied(new ArrayList<ResourceAllocation<?>>(allocations));
this.functionName = getAssignmentFunctionName(allocations);
}
private String getAssignmentFunctionName(
List<? extends ResourceAllocation<?>> allocations) {
AssignmentFunction function = getAssignmentFunction(allocations);
return (function != null) ? function.getName()
: AssignmentFunctionName.FLAT.toString();
}
private AssignmentFunction getAssignmentFunction(
List<? extends ResourceAllocation<?>> allocations) {
if (allocations != null) {
ResourceAllocation<?> allocation = allocations.iterator().next();
return allocation.getAssignmentFunction();
}
return null;
}
private EffortDuration getEffortForDetailItem(DetailItem item) {
DateTime startDate = item.getStartDate();
DateTime endDate = item.getEndDate();
return this.aggregate.effortBetween(startDate.toLocalDate(), endDate
.toLocalDate());
}
Component effortOnInterval(DetailItem item) {
Component result = cannotBeEdited(item) ? new Label()
: disableIfNeeded(item, new EffortDurationBox());
reloadEffortOnInterval(result, item);
componentsByDetailItem.put(item, result);
addListenerIfNeeded(item, result);
return result;
}
private boolean cannotBeEdited(DetailItem item) {
return isGroupingRow() || doesNotIntersectWithTask(item)
|| isBeforeLatestConsolidation(item);
}
private EffortDurationBox disableIfNeeded(DetailItem item,
EffortDurationBox effortDurationBox) {
effortDurationBox.setDisabled(restriction.isDisabledEditionOn(item));
return effortDurationBox;
}
private void addListenerIfNeeded(final DetailItem item,
final Component component) {
if (cannotBeEdited(item)) {
return;
}
final EffortDurationBox effortBox = (EffortDurationBox) component;
component.addEventListener(Events.ON_CHANGE, new EventListener() {
@Override
public void onEvent(Event event) {
EffortDuration value = effortBox.getEffortDurationValue();
LocalDate startDate = restriction.limitStartDate(item
.getStartDate().toLocalDate());
LocalDate endDate = restriction.limitEndDate(item.getEndDate()
.toLocalDate());
changeAssignmentFunctionToManual();
getAllocation().withPreviousAssociatedResources()
.onIntervalWithinTask(startDate, endDate)
.allocate(value);
fireCellChanged(item);
effortBox.setRawValue(getEffortForDetailItem(item));
reloadAllEffort();
}
});
}
private void changeAssignmentFunctionToManual() {
assignmentFunctionsCombo
.setSelectedFunction(AssignmentFunctionName.MANUAL.toString());
ResourceAllocation<?> allocation = getAllocation();
if (!(allocation.getAssignmentFunction() instanceof ManualFunction)) {
allocation.setAssignmentFunctionAndApplyIfNotFlat(ManualFunction.create());
}
}
private void reloadEffortOnInterval(Component component, DetailItem item) {
if (cannotBeEdited(item)) {
Label label = (Label) component;
label.setValue(getEffortForDetailItem(item).toFormattedString());
label.setClass(getLabelClassFor(item));
} else {
EffortDurationBox effortDurationBox = (EffortDurationBox) component;
effortDurationBox.setValue(getEffortForDetailItem(item));
if (isLimiting) {
effortDurationBox.setDisabled(true);
effortDurationBox.setSclass(" limiting");
}
}
}
private String getLabelClassFor(DetailItem item) {
if (isGroupingRow()) {
return "calculated-hours";
}
if (doesNotIntersectWithTask(item)) {
return "unmodifiable-hours";
}
if (isBeforeLatestConsolidation(item)) {
return "consolidated-hours";
}
return "";
}
private boolean doesNotIntersectWithTask(DetailItem item) {
return isBeforeTaskStartDate(item) || isAfterTaskEndDate(item);
}
private boolean isBeforeTaskStartDate(DetailItem item) {
return task.getIntraDayStartDate().compareTo(
item.getEndDate().toLocalDate()) >= 0;
}
private boolean isAfterTaskEndDate(DetailItem item) {
return task.getIntraDayEndDate().compareTo(
item.getStartDate().toLocalDate()) <= 0;
}
private boolean isBeforeLatestConsolidation(DetailItem item) {
if(!((Task)task).hasConsolidations()) {
return false;
}
LocalDate d = ((Task) task).getFirstDayNotConsolidated().getDate();
DateTime firstDayNotConsolidated =
new DateTime(d.getYear(), d.getMonthOfYear(),
d.getDayOfMonth(), 0, 0, 0, 0);
return item.getStartDate().compareTo(firstDayNotConsolidated) < 0;
}
private ResourceAllocation<?> getAllocation() {
if (isGroupingRow()) {
throw new IllegalStateException("is grouping row");
}
return aggregate.getAllocationsSortedByStartDate().get(0);
}
public boolean isGroupingRow() {
return level == 0;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
package uk.ac.ebi.biosamples.utils;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AdaptiveThreadPoolExecutor extends ThreadPoolExecutor {
private Logger log = LoggerFactory.getLogger(this.getClass());
private AtomicInteger completedJobs = new AtomicInteger(0);
private AdaptiveThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit,
BlockingQueue<Runnable> workQueue, RejectedExecutionHandler rejectedExecutionHandler) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, rejectedExecutionHandler);
}
protected void afterExecute(Runnable r, Throwable t) {
if (t != null) return;
completedJobs.incrementAndGet();
}
/**
* By default creates a pool with a queue size of 1000 that
* will test to increase/decrease threads every 60 seconds
* and does not guarantee to distribute jobs fairly amoung threads
* @return
*/
public static AdaptiveThreadPoolExecutor create() {
return create(1000,60000,false);
}
public static AdaptiveThreadPoolExecutor create(int maxQueueSize, int pollInterval, boolean fairness) {
//default to the number of processors
int corePoolSize = Runtime.getRuntime().availableProcessors();
int maximumPoolSize = corePoolSize;
//keep alive is not relevant, since core == maximum
long keepAliveTime = 1;
TimeUnit unit = TimeUnit.DAYS;
// a queue constructed with fairness set to true grants threads access
// in FIFO order.
// Fairness generally decreases throughput but reduces variability and
// avoids starvation.
BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue<>(maxQueueSize, fairness);
// A handler for rejected tasks that runs the rejected task directly in
// the calling thread of the execute method,
// unless the executor has been shut down, in which case the task is
// discarded.
RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
AdaptiveThreadPoolExecutor threadPool = new AdaptiveThreadPoolExecutor(corePoolSize, maximumPoolSize,
keepAliveTime, unit, workQueue, rejectedExecutionHandler);
Thread monitorThread = new Thread(new PoolMonitor(threadPool, pollInterval));
monitorThread.setDaemon(true);
monitorThread.start();
return threadPool;
}
private static class PoolMonitor implements Runnable {
private Logger log = LoggerFactory.getLogger(this.getClass());
private final AdaptiveThreadPoolExecutor pool;
private final int pollInterval;
private final Map<Integer, Double> threadsScores = new HashMap<>();
private final Map<Integer, Long> threadsTime = new HashMap<>();
private final double margin = 1.1;
public PoolMonitor(AdaptiveThreadPoolExecutor pool, int pollInterval) {
this.pool = pool;
this.pollInterval = pollInterval;
}
@Override
public void run() {
long lastStep = System.nanoTime();
while (!pool.isTerminated()) {
//wait for it to do stuff
try {
Thread.sleep(pollInterval);
} catch (InterruptedException e) {
if (Thread.interrupted()) {// Clears interrupted status!
throw new RuntimeException(e);
}
}
//test the number of jobs done
//get number of threads they were done with
long now = System.nanoTime();
long interval = now-lastStep;
lastStep = now;
int currentThreads = pool.getMaximumPoolSize();
int doneJobs = pool.completedJobs.getAndSet(0);
//number of jobs per sec per thread
double score = (((double)doneJobs)*1000000000.0d)/(interval*currentThreads);
log.info("Completed "+doneJobs+" in "+interval+"ns using "+currentThreads+" threads : score = "+score);
//store the result of this score
threadsScores.put(currentThreads, score);
threadsTime.put(currentThreads, now);
//remove any scores that are too old
Iterator<Integer> iterator = threadsTime.keySet().iterator();
while (iterator.hasNext()) {
int testThreads = iterator.next();
long testTime = threadsTime.get(testThreads);
//more than 10 pollings ago?
if (testTime + (pollInterval*1000000l*10) < now) {
//too old score, remove it
log.info("Remove out-of-date score for "+testThreads);
iterator.remove();
threadsTime.remove(testThreads);
}
}
//work out what the best number of threads is
double bestScore = margin*score;
int bestThreads = currentThreads;
for (int testThreads : threadsScores.keySet()) {
double testScore = threadsScores.get(testThreads);
if (testScore > bestScore) {
bestScore = testScore;
bestThreads = testThreads;
}
}
log.info("Best scoring number of threads is "+bestThreads+" with "+bestScore);
//if we are more than margin below the best, change to the best
if (margin*score < bestScore) {
log.info("Adjusting to use "+(bestThreads)+" threads");
pool.setCorePoolSize(bestThreads);
pool.setMaximumPoolSize(bestThreads);
} else {
//experiment if we might do better increase or decreasing the threads
if (!threadsScores.containsKey(currentThreads+1) || threadsScores.get(currentThreads+1) > margin*score) {
//increase the number of threads
log.info("Adjusting to use "+(currentThreads+1)+" threads");
pool.setCorePoolSize(currentThreads+1);
pool.setMaximumPoolSize(currentThreads+1);
} else if (currentThreads > 1 && (!threadsScores.containsKey(currentThreads-1) || threadsScores.get(currentThreads-1) > margin*score)) {
//decrease the number of threads
//only decrease threads if there are at least 2 (so we don't drop to zero!)
log.info("Adjusting to use "+(currentThreads-1)+" threads");
pool.setCorePoolSize(currentThreads-1);
pool.setMaximumPoolSize(currentThreads-1);
}
}
}
}
}
}
|
package com.intellij.ide.plugins;
import com.intellij.execution.process.ProcessIOExecutorService;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.icons.AllIcons;
import com.intellij.ide.CopyProvider;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.marketplace.MarketplaceRequests;
import com.intellij.ide.plugins.newui.*;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationInfo;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupListener;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.updateSettings.impl.UpdateChecker;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.ThrowableNotNullFunction;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.StringUtilRt;
import com.intellij.ui.*;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.JBTextField;
import com.intellij.ui.components.fields.ExtendableTextComponent;
import com.intellij.ui.components.labels.LinkLabel;
import com.intellij.ui.components.labels.LinkListener;
import com.intellij.ui.popup.PopupFactoryImpl;
import com.intellij.ui.popup.list.PopupListElementRenderer;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.net.HttpConfigurable;
import com.intellij.util.ui.*;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* @author Alexander Lobas
*/
public class PluginManagerConfigurable
implements SearchableConfigurable, Configurable.NoScroll, Configurable.NoMargin, Configurable.TopComponentProvider {
private static final Logger LOG = Logger.getInstance(PluginManagerConfigurable.class);
public static final String ID = "preferences.pluginManager";
public static final String SELECTION_TAB_KEY = "PluginConfigurable.selectionTab";
@SuppressWarnings("UseJBColor") public static final Color MAIN_BG_COLOR =
JBColor.namedColor("Plugins.background", new JBColor(() -> JBColor.isBright() ? UIUtil.getListBackground() : new Color(0x313335)));
public static final Color SEARCH_BG_COLOR = JBColor.namedColor("Plugins.SearchField.background", MAIN_BG_COLOR);
public static final Color SEARCH_FIELD_BORDER_COLOR =
JBColor.namedColor("Plugins.SearchField.borderColor", new JBColor(0xC5C5C5, 0x515151));
private static final int MARKETPLACE_TAB = 0;
private static final int INSTALLED_TAB = 1;
public static final int ITEMS_PER_GROUP = 9;
public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MMM dd, yyyy");
private static final DecimalFormat K_FORMAT = new DecimalFormat("
private static final DecimalFormat M_FORMAT = new DecimalFormat("
private TabbedPaneHeaderComponent myTabHeaderComponent;
private MultiPanel myCardPanel;
private PluginsTab myMarketplaceTab;
private PluginsTab myInstalledTab;
private PluginsGroupComponentWithProgress myMarketplacePanel;
private PluginsGroupComponent myInstalledPanel;
private Runnable myMarketplaceRunnable;
private SearchResultPanel myMarketplaceSearchPanel;
private SearchResultPanel myInstalledSearchPanel;
private final LinkLabel<Object> myUpdateAll = new LinkLabel<>(IdeBundle.message("plugin.manager.update.all"), null);
private final JLabel myUpdateCounter = new CountComponent();
private final CountIcon myCountIcon = new CountIcon();
private final MyPluginModel myPluginModel = new MyPluginModel();
private PluginUpdatesService myPluginUpdatesService;
private List<String> myTagsSorted;
private List<String> myVendorsSorted;
private DefaultActionGroup myMarketplaceSortByGroup;
private Consumer<MarketplaceSortByAction> myMarketplaceSortByCallback;
private LinkComponent myMarketplaceSortByAction;
private DefaultActionGroup myInstalledSearchGroup;
private Consumer<InstalledSearchOptionAction> myInstalledSearchCallback;
private boolean myInstalledSearchSetState = true;
private Collection<IdeaPluginDescriptor> myInitUpdates;
public PluginManagerConfigurable() {
}
/**
* @deprecated use {@link PluginManagerConfigurable}
*/
@Deprecated
public PluginManagerConfigurable(PluginManagerUISettings uiSettings) {
}
@NotNull
@Override
public String getId() {
return ID;
}
@Override
public String getDisplayName() {
return IdeBundle.message("title.plugins");
}
@NotNull
@Override
public Component getCenterComponent(@NotNull TopComponentController controller) {
myPluginModel.setTopController(controller);
return myTabHeaderComponent;
}
@NotNull
public JComponent getTopComponent() {
myPluginModel.setTopController(TopComponentController.EMPTY);
return myTabHeaderComponent;
}
@Nullable
@Override
public JComponent createComponent() {
myTabHeaderComponent = new TabbedPaneHeaderComponent(createGearActions(), index -> {
myCardPanel.select(index, true);
storeSelectionTab(index);
String query = (index == MARKETPLACE_TAB ? myInstalledTab : myMarketplaceTab).getSearchQuery();
(index == MARKETPLACE_TAB ? myMarketplaceTab : myInstalledTab).setSearchQuery(query);
});
myUpdateAll.setVisible(false);
myUpdateCounter.setVisible(false);
myTabHeaderComponent.addTab(IdeBundle.message("plugin.manager.tab.marketplace"), null);
myTabHeaderComponent.addTab(IdeBundle.message("plugin.manager.tab.installed"), myCountIcon);
Consumer<Integer> callback = countValue -> {
int count = countValue == null ? 0 : countValue;
String text = String.valueOf(count);
boolean visible = count > 0;
myUpdateAll.setEnabled(true);
myUpdateAll.setVisible(visible);
myUpdateCounter.setText(text);
myUpdateCounter.setVisible(visible);
myCountIcon.setText(text);
myTabHeaderComponent.update();
};
if (myInitUpdates != null) {
callback.accept(myInitUpdates.size());
}
myPluginUpdatesService = PluginUpdatesService.connectConfigurable(callback);
myPluginModel.setPluginUpdatesService(myPluginUpdatesService);
boolean selectInstalledTab = !ContainerUtil.isEmpty(myInitUpdates);
createMarketplaceTab();
createInstalledTab();
myCardPanel = new MultiPanel() {
@Override
protected JComponent create(Integer key) {
if (key == MARKETPLACE_TAB) {
return myMarketplaceTab.createPanel();
}
if (key == INSTALLED_TAB) {
return myInstalledTab.createPanel();
}
return super.create(key);
}
};
myCardPanel.setMinimumSize(new JBDimension(580, 380));
myCardPanel.setPreferredSize(new JBDimension(800, 600));
myTabHeaderComponent.setListener();
int selectionTab = selectInstalledTab ? INSTALLED_TAB : getStoredSelectionTab();
myTabHeaderComponent.setSelection(selectionTab);
myCardPanel.select(selectionTab, true);
if (selectInstalledTab) {
myInstalledTab.setSearchQuery("/outdated");
}
return myCardPanel;
}
@NotNull
private DefaultActionGroup createGearActions() {
DefaultActionGroup actions = new DefaultActionGroup();
actions.add(new DumbAwareAction(IdeBundle.message("plugin.manager.repositories")) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
if (ShowSettingsUtil.getInstance().editConfigurable(myCardPanel, new PluginHostsConfigurable())) {
resetPanels();
}
}
});
actions.add(new DumbAwareAction(IdeBundle.message("button.http.proxy.settings")) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
if (HttpConfigurable.editConfigurable(myCardPanel)) {
resetPanels();
}
}
});
actions.addSeparator();
actions.add(new InstallFromDiskAction());
actions.addSeparator();
actions.add(new ChangePluginStateAction(false));
actions.add(new ChangePluginStateAction(true));
return actions;
}
private static void showRightBottomPopup(@NotNull Component component, @NotNull @Nls String title, @NotNull ActionGroup group) {
DefaultActionGroup actions = new GroupByActionGroup();
actions.addSeparator(" " + title);
actions.addAll(group);
DataContext context = DataManager.getInstance().getDataContext(component);
JBPopup popup = new PopupFactoryImpl.ActionGroupPopup(null, actions, context, false, false, false, true, null, -1, null, null) {
@Override
protected ListCellRenderer getListElementRenderer() {
return new PopupListElementRenderer(this) {
@Override
protected SeparatorWithText createSeparator() {
return new SeparatorWithText() {
{
setTextForeground(JBColor.BLACK);
setCaptionCentered(false);
}
@Override
protected void paintLine(Graphics g, int x, int y, int width) {
}
};
}
@Override
protected void setSeparatorFont(Font font) {
mySeparatorComponent.setFont(font);
}
@Override
protected Border getDefaultItemComponentBorder() {
return new EmptyBorder(JBInsets.create(UIUtil.getListCellVPadding(), 15));
}
};
}
};
popup.addListener(new JBPopupListener() {
@Override
public void beforeShown(@NotNull LightweightWindowEvent event) {
Point location = component.getLocationOnScreen();
Dimension size = popup.getSize();
popup.setLocation(new Point(location.x + component.getWidth() - size.width, location.y + component.getHeight()));
}
});
popup.show(component);
}
private void resetPanels() {
CustomPluginRepositoryService.getInstance().clearCache();
myTagsSorted = null;
myVendorsSorted = null;
myPluginUpdatesService.recalculateUpdates();
if (myMarketplacePanel == null) {
return;
}
int selectionTab = myTabHeaderComponent.getSelectionTab();
if (selectionTab == MARKETPLACE_TAB) {
myMarketplaceRunnable.run();
}
else {
myMarketplacePanel.setVisibleRunnable(myMarketplaceRunnable);
}
}
private static int getStoredSelectionTab() {
int value = PropertiesComponent.getInstance().getInt(SELECTION_TAB_KEY, MARKETPLACE_TAB);
return value < MARKETPLACE_TAB || value > INSTALLED_TAB ? MARKETPLACE_TAB : value;
}
private static void storeSelectionTab(int value) {
PropertiesComponent.getInstance().setValue(SELECTION_TAB_KEY, value, MARKETPLACE_TAB);
}
private void createMarketplaceTab() {
myMarketplaceTab = new PluginsTab() {
@Override
protected void createSearchTextField(int flyDelay) {
super.createSearchTextField(250);
mySearchTextField.setHistoryPropertyName("MarketplacePluginsSearchHistory");
}
@NotNull
@Override
protected PluginDetailsPageComponent createDetailsPanel(@NotNull LinkListener<Object> searchListener) {
PluginDetailsPageComponent detailPanel = new PluginDetailsPageComponent(myPluginModel, searchListener, true);
myPluginModel.addDetailPanel(detailPanel);
return detailPanel;
}
@NotNull
@Override
protected JComponent createPluginsPanel(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
MultiSelectionEventHandler eventHandler = new MultiSelectionEventHandler();
myMarketplacePanel =
new PluginsGroupComponentWithProgress(new PluginListLayout(), eventHandler,
d -> new ListPluginComponent(myPluginModel, d, mySearchListener, true));
myMarketplacePanel.setSelectionListener(selectionListener);
registerCopyProvider(myMarketplacePanel);
//noinspection ConstantConditions
((SearchUpDownPopupController)myMarketplaceSearchPanel.controller).setEventHandler(eventHandler);
Runnable runnable = () -> {
List<PluginsGroup> groups = new ArrayList<>();
try {
Map<String, List<IdeaPluginDescriptor>> customRepositoriesMap = CustomPluginRepositoryService.getInstance().getCustomRepositoryPluginMap();
try {
addGroupViaLightDescriptor(groups, IdeBundle.message("plugins.configurable.featured"), "is_featured_search=true",
"/sortBy:featured");
addGroupViaLightDescriptor(groups, IdeBundle.message("plugins.configurable.new.and.updated"), "orderBy=update+date",
"/sortBy:updated");
addGroupViaLightDescriptor(groups, IdeBundle.message("plugins.configurable.top.downloads"), "orderBy=downloads",
"/sortBy:downloads");
addGroupViaLightDescriptor(groups, IdeBundle.message("plugins.configurable.top.rated"), "orderBy=rating", "/sortBy:rating");
}
catch (IOException e) {
LOG.info("Main plugin repository is not available ('" + e.getMessage() + "'). Please check your network settings.");
}
for (String host : UpdateSettings.getInstance().getPluginHosts()) {
List<IdeaPluginDescriptor> allDescriptors = customRepositoriesMap.get(host);
if (allDescriptors != null) {
addGroup(groups, IdeBundle.message("plugins.configurable.repository.0", host), "/repository:\"" + host + "\"",
descriptors -> {
int allSize = allDescriptors.size();
descriptors.addAll(ContainerUtil.getFirstItems(allDescriptors, ITEMS_PER_GROUP));
PluginsGroup.sortByName(descriptors);
return allSize > ITEMS_PER_GROUP;
});
}
}
}
catch (IOException e) {
LOG.info(e);
}
finally {
ApplicationManager.getApplication().invokeLater(() -> {
myMarketplacePanel.stopLoading();
try {
PluginLogo.startBatchMode();
for (PluginsGroup group : groups) {
myMarketplacePanel.addGroup(group);
}
}
finally {
PluginLogo.endBatchMode();
}
myMarketplacePanel.doLayout();
myMarketplacePanel.initialSelection();
}, ModalityState.any());
}
};
myMarketplaceRunnable = () -> {
myMarketplacePanel.clear();
myMarketplacePanel.startLoading();
ApplicationManager.getApplication().executeOnPooledThread(runnable);
};
myMarketplacePanel.getEmptyText().setText(IdeBundle.message("plugins.configurable.marketplace.plugins.not.loaded"))
.appendSecondaryText(IdeBundle.message("message.check.the.internet.connection.and") + " ", StatusText.DEFAULT_ATTRIBUTES, null)
.appendSecondaryText(IdeBundle.message("message.link.refresh"), SimpleTextAttributes.LINK_PLAIN_ATTRIBUTES,
e -> myMarketplaceRunnable.run());
ApplicationManager.getApplication().executeOnPooledThread(runnable);
return createScrollPane(myMarketplacePanel, false);
}
@Override
protected void updateMainSelection(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
selectionListener.accept(myMarketplacePanel);
}
@NotNull
@Override
protected SearchResultPanel createSearchPanel(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
SearchUpDownPopupController marketplaceController = new SearchUpDownPopupController(mySearchTextField) {
@NotNull
@Override
protected List<String> getAttributes() {
List<String> attributes = new ArrayList<>();
attributes.add(SearchWords.TAG.getValue());
attributes.add(SearchWords.SORT_BY.getValue());
attributes.add(SearchWords.ORGANIZATION.getValue());
if (!UpdateSettings.getInstance().getPluginHosts().isEmpty()) {
attributes.add(SearchWords.REPOSITORY.getValue());
}
return attributes;
}
@Nullable
@Override
protected List<String> getValues(@NotNull String attribute) {
SearchWords word = SearchWords.find(attribute);
if (word == null) return null;
switch (word) {
case TAG:
if (ContainerUtil.isEmpty(myTagsSorted)) { // XXX
Set<String> allTags = new HashSet<>();
for (IdeaPluginDescriptor descriptor : CustomPluginRepositoryService.getInstance().getCustomRepositoryPlugins()) {
if (descriptor instanceof PluginNode) {
List<String> tags = ((PluginNode)descriptor).getTags();
if (!ContainerUtil.isEmpty(tags)) {
allTags.addAll(tags);
}
}
}
try {
ProcessIOExecutorService.INSTANCE.submit(() -> {
allTags.addAll(MarketplaceRequests.getInstance().getAllPluginsTags());
}).get();
}
catch (InterruptedException | ExecutionException e) {
LOG.error("Error while getting tags from marketplace", e);
}
myTagsSorted = ContainerUtil.sorted(allTags, String::compareToIgnoreCase);
}
return myTagsSorted;
case SORT_BY:
return Arrays.asList("downloads", "name", "rating", "updated");
case ORGANIZATION:
if (ContainerUtil.isEmpty(myVendorsSorted)) { // XXX
LinkedHashSet<String> vendors = new LinkedHashSet<>();
try {
ProcessIOExecutorService.INSTANCE.submit(() -> {
vendors.addAll(MarketplaceRequests.getInstance().getAllPluginsVendors());
}).get();
}
catch (InterruptedException | ExecutionException e) {
LOG.error("Error while getting vendors from marketplace", e);
}
myVendorsSorted = new ArrayList<>(vendors);
}
return myVendorsSorted;
case REPOSITORY:
return UpdateSettings.getInstance().getPluginHosts();
}
return null;
}
@Override
protected void showPopupForQuery() {
showSearchPanel(mySearchTextField.getText());
}
@Override
protected void handleEnter() {
if (!mySearchTextField.getText().isEmpty()) {
handleTrigger("marketplace.suggest.popup.enter");
}
}
@Override
protected void handlePopupListFirstSelection() {
handleTrigger("marketplace.suggest.popup.select");
}
private void handleTrigger(@NonNls String key) {
if (myPopup != null && myPopup.type == SearchPopup.Type.SearchQuery) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(key);
}
}
};
myMarketplaceSortByGroup = new DefaultActionGroup();
for (SortBySearchOption option : SortBySearchOption.values()) {
myMarketplaceSortByGroup.addAction(new MarketplaceSortByAction(option));
}
myMarketplaceSortByAction = new LinkComponent() {
@Override
protected boolean isInClickableArea(Point pt) {
return true;
}
};
myMarketplaceSortByAction.setIcon(new Icon() {
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
getIcon().paintIcon(c, g, x, y + 1);
}
@Override
public int getIconWidth() {
return getIcon().getIconWidth();
}
@Override
public int getIconHeight() {
return getIcon().getIconHeight();
}
@NotNull
private Icon getIcon() {
return AllIcons.General.ButtonDropTriangle;
}
}); // TODO: icon
myMarketplaceSortByAction.setPaintUnderline(false);
myMarketplaceSortByAction.setIconTextGap(JBUIScale.scale(4));
myMarketplaceSortByAction.setHorizontalTextPosition(SwingConstants.LEFT);
myMarketplaceSortByAction.setForeground(PluginsGroupComponent.SECTION_HEADER_FOREGROUND);
//noinspection unchecked
myMarketplaceSortByAction.setListener(
(component, __) -> showRightBottomPopup(component.getParent().getParent(), IdeBundle.message("plugins.configurable.sort.by"),
myMarketplaceSortByGroup), null);
myMarketplaceSortByCallback = updateAction -> {
MarketplaceSortByAction removeAction = null;
MarketplaceSortByAction addAction = null;
if (updateAction.myState) {
for (AnAction action : myMarketplaceSortByGroup.getChildren(null)) {
MarketplaceSortByAction sortByAction = (MarketplaceSortByAction)action;
if (sortByAction != updateAction && sortByAction.myState) {
sortByAction.myState = false;
removeAction = sortByAction;
break;
}
}
addAction = updateAction;
}
else {
if (updateAction.myOption == SortBySearchOption.Relevance) {
updateAction.myState = true;
return;
}
for (AnAction action : myMarketplaceSortByGroup.getChildren(null)) {
MarketplaceSortByAction sortByAction = (MarketplaceSortByAction)action;
if (sortByAction.myOption == SortBySearchOption.Relevance) {
sortByAction.myState = true;
break;
}
}
removeAction = updateAction;
}
List<String> queries = new ArrayList<>();
new SearchQueryParser.Marketplace(mySearchTextField.getText()) {
@Override
protected void addToSearchQuery(@NotNull String query) {
queries.add(query);
}
@Override
protected void handleAttribute(@NotNull String name, @NotNull String value) {
queries.add(name + SearchQueryParser.wrapAttribute(value));
}
};
if (removeAction != null) {
String query = removeAction.getQuery();
if (query != null) {
queries.remove(query);
}
}
if (addAction != null) {
String query = addAction.getQuery();
if (query != null) {
queries.add(query);
}
}
String query = StringUtil.join(queries, " ");
mySearchTextField.setTextIgnoreEvents(query);
if (query.isEmpty()) {
myMarketplaceTab.hideSearchPanel();
}
else {
myMarketplaceTab.showSearchPanel(query);
}
};
MultiSelectionEventHandler eventHandler = new MultiSelectionEventHandler();
marketplaceController.setSearchResultEventHandler(eventHandler);
PluginsGroupComponentWithProgress panel =
new PluginsGroupComponentWithProgress(new PluginListLayout(), eventHandler,
descriptor -> new ListPluginComponent(myPluginModel, descriptor, mySearchListener, true));
panel.setSelectionListener(selectionListener);
registerCopyProvider(panel);
myMarketplaceSearchPanel =
new SearchResultPanel(marketplaceController, panel, 0, 0) {
@Override
protected void handleQuery(@NotNull String query, @NotNull PluginsGroup result) {
try {
Map<String, List<IdeaPluginDescriptor>> customRepositoriesMap = CustomPluginRepositoryService.getInstance().getCustomRepositoryPluginMap();
SearchQueryParser.Marketplace parser = new SearchQueryParser.Marketplace(query);
if (!parser.repositories.isEmpty()) {
for (String repository : parser.repositories) {
List<IdeaPluginDescriptor> descriptors = customRepositoriesMap.get(repository);
if (descriptors == null) {
continue;
}
if (parser.searchQuery == null) {
result.descriptors.addAll(descriptors);
}
else {
for (IdeaPluginDescriptor descriptor : descriptors) {
if (StringUtil.containsIgnoreCase(descriptor.getName(), parser.searchQuery)) {
result.descriptors.add(descriptor);
}
}
}
}
ContainerUtil.removeDuplicates(result.descriptors);
result.sortByName();
return;
}
List<PluginNode> pluginsFromMarketplace = MarketplaceRequests.getInstance().searchPlugins(parser.getUrlQuery(), 10000);
List<IdeaPluginDescriptor> plugins = UpdateChecker.mergePluginsFromRepositories(
pluginsFromMarketplace,
ContainerUtil.flatten(customRepositoriesMap.values()), false
); // compare plugin versions between marketplace & custom repositories
result.descriptors.addAll(0, plugins);
if (parser.searchQuery != null) {
List<IdeaPluginDescriptor> descriptors = new ArrayList<>();
for (Entry<String, List<IdeaPluginDescriptor>> entry : customRepositoriesMap.entrySet()) {
for (IdeaPluginDescriptor descriptor : entry.getValue()) {
if (StringUtil.containsIgnoreCase(descriptor.getName(), parser.searchQuery)) {
descriptors.add(descriptor);
}
}
}
result.descriptors.addAll(0, descriptors);
}
ContainerUtil.removeDuplicates(result.descriptors);
if (!result.descriptors.isEmpty()) {
String title = "Sort By";
for (AnAction action : myMarketplaceSortByGroup.getChildren(null)) {
MarketplaceSortByAction sortByAction = (MarketplaceSortByAction)action;
sortByAction.setState(parser);
if (sortByAction.myState) {
title = "Sort By: " + sortByAction.myOption.name();
}
}
myMarketplaceSortByAction.setText(title);
result.addRightAction(myMarketplaceSortByAction);
}
}
catch (IOException e) {
LOG.info(e);
ApplicationManager.getApplication().invokeLater(
() -> myPanel.getEmptyText()
.setText(IdeBundle.message("plugins.configurable.search.result.not.loaded"))
.appendSecondaryText(
IdeBundle.message("plugins.configurable.check.internet"), StatusText.DEFAULT_ATTRIBUTES, null), ModalityState.any()
);
}
}
};
return myMarketplaceSearchPanel;
}
};
}
private void createInstalledTab() {
myInstalledSearchGroup = new DefaultActionGroup();
for (InstalledSearchOption option : InstalledSearchOption.values()) {
myInstalledSearchGroup.add(new InstalledSearchOptionAction(option));
}
myInstalledTab = new PluginsTab() {
@Override
protected void createSearchTextField(int flyDelay) {
super.createSearchTextField(flyDelay);
JBTextField textField = mySearchTextField.getTextEditor();
textField.putClientProperty("search.extension", ExtendableTextComponent.Extension
.create(AllIcons.Actions.More, AllIcons.Actions.More, IdeBundle.message("plugins.configurable.search.options"), // TODO: icon
() -> showRightBottomPopup(textField, IdeBundle.message("plugins.configurable.show"), myInstalledSearchGroup)));
textField.putClientProperty("JTextField.variant", null);
textField.putClientProperty("JTextField.variant", "search");
mySearchTextField.setHistoryPropertyName("InstalledPluginsSearchHistory");
}
@NotNull
@Override
protected PluginDetailsPageComponent createDetailsPanel(@NotNull LinkListener<Object> searchListener) {
PluginDetailsPageComponent detailPanel = new PluginDetailsPageComponent(myPluginModel, searchListener, false);
myPluginModel.addDetailPanel(detailPanel);
return detailPanel;
}
@NotNull
@Override
protected JComponent createPluginsPanel(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
MultiSelectionEventHandler eventHandler = new MultiSelectionEventHandler();
myInstalledPanel =
new PluginsGroupComponent(new PluginListLayout(), eventHandler,
descriptor -> new ListPluginComponent(myPluginModel, descriptor, mySearchListener, false));
myInstalledPanel.setSelectionListener(selectionListener);
registerCopyProvider(myInstalledPanel);
//noinspection ConstantConditions
((SearchUpDownPopupController)myInstalledSearchPanel.controller).setEventHandler(eventHandler);
try {
PluginLogo.startBatchMode();
PluginsGroup installing = new PluginsGroup(IdeBundle.message("plugins.configurable.installing"));
installing.descriptors.addAll(MyPluginModel.getInstallingPlugins());
if (!installing.descriptors.isEmpty()) {
installing.sortByName();
installing.titleWithCount();
myInstalledPanel.addGroup(installing);
}
PluginsGroup downloaded = new PluginsGroup(IdeBundle.message("plugins.configurable.downloaded"));
downloaded.descriptors.addAll(InstalledPluginsState.getInstance().getInstalledPlugins());
Map<String, List<IdeaPluginDescriptor>> bundledGroups = new HashMap<>();
ApplicationInfoEx appInfo = ApplicationInfoEx.getInstanceEx();
int downloadedEnabled = 0;
boolean hideImplDetails = PluginManager.getInstance().hideImplementationDetails();
String otherCategoryTitle = IdeBundle.message("plugins.configurable.other.bundled");
for (IdeaPluginDescriptor descriptor : PluginManagerCore.getPlugins()) {
if (!appInfo.isEssentialPlugin(descriptor.getPluginId())) {
if (descriptor.isBundled()) {
if (hideImplDetails && descriptor.isImplementationDetail()) {
continue;
}
String category = StringUtil.defaultIfEmpty(descriptor.getCategory(), otherCategoryTitle);
List<IdeaPluginDescriptor> groupDescriptors = bundledGroups.get(category);
if (groupDescriptors == null) {
bundledGroups.put(category, groupDescriptors = new ArrayList<>());
}
groupDescriptors.add(descriptor);
}
else {
downloaded.descriptors.add(descriptor);
if (descriptor.isEnabled()) {
downloadedEnabled++;
}
}
}
}
if (!downloaded.descriptors.isEmpty()) {
myUpdateAll.setListener(new LinkListener<Object>() {
@Override
public void linkSelected(LinkLabel<Object> aSource, Object aLinkData) {
myUpdateAll.setEnabled(false);
for (UIPluginGroup group : myInstalledPanel.getGroups()) {
for (ListPluginComponent plugin : group.plugins) {
plugin.updatePlugin();
}
}
}
}, null);
downloaded.addRightAction(myUpdateAll);
downloaded.addRightAction(myUpdateCounter);
downloaded.sortByName();
downloaded.titleWithCount(downloadedEnabled);
myInstalledPanel.addGroup(downloaded);
myPluginModel.addEnabledGroup(downloaded);
}
myPluginModel.setDownloadedGroup(myInstalledPanel, downloaded, installing);
List<PluginsGroup> groups = new ArrayList<>();
for (Entry<String, List<IdeaPluginDescriptor>> entry : bundledGroups.entrySet()) {
PluginsGroup group = new PluginsGroup(entry.getKey()) {
@Override
public void titleWithCount(int enabled) {
rightAction.setText(enabled == 0 ? IdeBundle.message("plugins.configurable.enable.all")
: IdeBundle.message("plugins.configurable.disable.all"));
}
};
group.descriptors.addAll(entry.getValue());
group.sortByName();
group.rightAction = new LinkLabel<>("", null, (__, ___) -> myPluginModel
.changeEnableDisable(ContainerUtil.toArray(group.descriptors, IdeaPluginDescriptor[]::new),
group.rightAction.getText().startsWith("Enable")));
group.titleWithEnabled(myPluginModel);
groups.add(group);
}
ContainerUtil.sort(groups, (o1, o2) -> StringUtil.compare(o1.title, o2.title, true));
PluginsGroup otherGroup = ContainerUtil.find(groups, group -> group.title.equals(otherCategoryTitle));
if (otherGroup != null) {
groups.remove(otherGroup);
groups.add(otherGroup);
}
for (PluginsGroup group : groups) {
myInstalledPanel.addGroup(group);
myPluginModel.addEnabledGroup(group);
}
myPluginUpdatesService.connectInstalled(updates -> {
if (ContainerUtil.isEmpty(updates)) {
clearUpdates(myInstalledPanel);
clearUpdates(myInstalledSearchPanel.getPanel());
}
else {
applyUpdates(myInstalledPanel, updates);
applyUpdates(myInstalledSearchPanel.getPanel(), updates);
}
selectionListener.accept(myInstalledPanel);
});
}
finally {
PluginLogo.endBatchMode();
}
if (myInitUpdates != null) {
applyUpdates(myInstalledPanel, myInitUpdates);
}
return createScrollPane(myInstalledPanel, true);
}
@Override
protected void updateMainSelection(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
selectionListener.accept(myInstalledPanel);
}
@Override
public void hideSearchPanel() {
super.hideSearchPanel();
if (myInstalledSearchSetState) {
for (AnAction action : myInstalledSearchGroup.getChildren(null)) {
((InstalledSearchOptionAction)action).setState(null);
}
}
myPluginModel.setInvalidFixCallback(null);
}
@NotNull
@Override
protected SearchResultPanel createSearchPanel(@NotNull Consumer<? super PluginsGroupComponent> selectionListener) {
SearchUpDownPopupController installedController = new SearchUpDownPopupController(mySearchTextField) {
@NotNull
@Override
@NonNls
protected List<String> getAttributes() {
return Arrays
.asList(
"/downloaded",
"/outdated",
"/enabled",
"/disabled",
"/invalid",
"/bundled",
SearchWords.ORGANIZATION.getValue(),
SearchWords.TAG.getValue()
);
}
@Nullable
@Override
protected List<String> getValues(@NotNull String attribute) {
if (SearchWords.ORGANIZATION.getValue().equals(attribute)) {
return myPluginModel.getVendors();
}
if (SearchWords.TAG.getValue().equals(attribute)) {
return myPluginModel.getTags();
}
return null;
}
@Override
protected void showPopupForQuery() {
showSearchPanel(mySearchTextField.getText());
}
};
MultiSelectionEventHandler eventHandler = new MultiSelectionEventHandler();
installedController.setSearchResultEventHandler(eventHandler);
PluginsGroupComponent panel =
new PluginsGroupComponent(new PluginListLayout(), eventHandler,
descriptor -> new ListPluginComponent(myPluginModel, descriptor, mySearchListener, false));
panel.setSelectionListener(selectionListener);
registerCopyProvider(panel);
myInstalledSearchCallback = updateAction -> {
List<String> queries = new ArrayList<>();
new SearchQueryParser.Installed(mySearchTextField.getText()) {
@Override
protected void addToSearchQuery(@NotNull String query) {
queries.add(query);
}
@Override
protected void handleAttribute(@NotNull String name, @NotNull String value) {
if (!updateAction.myState) {
queries.add(name + (value.isEmpty() ? "" : SearchQueryParser.wrapAttribute(value)));
}
}
};
if (updateAction.myState) {
for (AnAction action : myInstalledSearchGroup.getChildren(null)) {
if (action != updateAction) {
((InstalledSearchOptionAction)action).myState = false;
}
}
queries.add(updateAction.getQuery());
}
else {
queries.remove(updateAction.getQuery());
}
try {
myInstalledSearchSetState = false;
String query = StringUtil.join(queries, " ");
mySearchTextField.setTextIgnoreEvents(query);
if (query.isEmpty()) {
myInstalledTab.hideSearchPanel();
}
else {
myInstalledTab.showSearchPanel(query);
}
}
finally {
myInstalledSearchSetState = true;
}
};
myInstalledSearchPanel = new SearchResultPanel(installedController, panel, 0, 0) {
@Override
protected void setEmptyText(@NotNull String query) {
myPanel.getEmptyText().setText(IdeBundle.message("plugins.configurable.nothing.found"));
if (query.contains("/downloaded") || query.contains("/outdated") ||
query.contains("/enabled") || query.contains("/disabled") ||
query.contains("/invalid") || query.contains("/bundled")) {
return;
}
myPanel.getEmptyText().appendSecondaryText(IdeBundle.message("plugins.configurable.search.in.marketplace"),
SimpleTextAttributes.LINK_PLAIN_ATTRIBUTES,
e -> myTabHeaderComponent.setSelectionWithEvents(MARKETPLACE_TAB));
}
@Override
protected void handleQuery(@NotNull String query, @NotNull PluginsGroup result) {
myPluginModel.setInvalidFixCallback(null);
SearchQueryParser.Installed parser = new SearchQueryParser.Installed(query);
if (myInstalledSearchSetState) {
for (AnAction action : myInstalledSearchGroup.getChildren(null)) {
((InstalledSearchOptionAction)action).setState(parser);
}
}
List<IdeaPluginDescriptor> descriptors = myPluginModel.getInstalledDescriptors();
if (!parser.vendors.isEmpty()) {
for (Iterator<IdeaPluginDescriptor> I = descriptors.iterator(); I.hasNext(); ) {
if (!MyPluginModel.isVendor(I.next(), parser.vendors)) {
I.remove();
}
}
}
if (!parser.tags.isEmpty()) {
for (Iterator<IdeaPluginDescriptor> I = descriptors.iterator(); I.hasNext(); ) {
if (!ContainerUtil.intersects(getTags(I.next()), parser.tags)) {
I.remove();
}
}
}
for (Iterator<IdeaPluginDescriptor> I = descriptors.iterator(); I.hasNext(); ) {
IdeaPluginDescriptor descriptor = I.next();
if (parser.attributes) {
if (parser.enabled && (!myPluginModel.isEnabled(descriptor) || myPluginModel.hasErrors(descriptor))) {
I.remove();
continue;
}
if (parser.disabled && (myPluginModel.isEnabled(descriptor) || myPluginModel.hasErrors(descriptor))) {
I.remove();
continue;
}
if (parser.bundled && !descriptor.isBundled()) {
I.remove();
continue;
}
if (parser.downloaded && descriptor.isBundled()) {
I.remove();
continue;
}
if (parser.invalid && !myPluginModel.hasErrors(descriptor)) {
I.remove();
continue;
}
if (parser.needUpdate && !PluginUpdatesService.isNeedUpdate(descriptor)) {
I.remove();
continue;
}
}
if (parser.searchQuery != null && !containsQuery(descriptor, parser.searchQuery)) {
I.remove();
}
}
result.descriptors.addAll(descriptors);
if (!result.descriptors.isEmpty()) {
if (parser.invalid) {
myPluginModel.setInvalidFixCallback(() -> {
PluginsGroup group = myInstalledSearchPanel.getGroup();
if (group.ui == null) {
myPluginModel.setInvalidFixCallback(null);
return;
}
PluginsGroupComponent resultPanel = myInstalledSearchPanel.getPanel();
for (IdeaPluginDescriptor descriptor : new ArrayList<>(group.descriptors)) {
if (!myPluginModel.hasErrors(descriptor)) {
resultPanel.removeFromGroup(group, descriptor);
}
}
group.titleWithCount();
myInstalledSearchPanel.fullRepaint();
if (group.descriptors.isEmpty()) {
myPluginModel.setInvalidFixCallback(null);
myInstalledSearchPanel.removeGroup();
}
});
}
else if (parser.needUpdate) {
result.rightAction = new LinkLabel<>(IdeBundle.message("plugin.manager.update.all"), null, (__, ___) -> {
result.rightAction.setEnabled(false);
for (ListPluginComponent plugin : result.ui.plugins) {
plugin.updatePlugin();
}
});
}
Collection<IdeaPluginDescriptor> updates = myInitUpdates == null ? PluginUpdatesService.getUpdates() : myInitUpdates;
myInitUpdates = null;
if (!ContainerUtil.isEmpty(updates)) {
myPostFillGroupCallback = () -> {
applyUpdates(myPanel, updates);
selectionListener.accept(myInstalledPanel);
};
}
}
}
};
return myInstalledSearchPanel;
}
};
myPluginModel.setCancelInstallCallback(descriptor -> {
PluginsGroup group = myInstalledSearchPanel.getGroup();
if (group.ui != null && group.ui.findComponent(descriptor) != null) {
myInstalledSearchPanel.getPanel().removeFromGroup(group, descriptor);
group.titleWithCount();
myInstalledSearchPanel.fullRepaint();
if (group.descriptors.isEmpty()) {
myInstalledSearchPanel.removeGroup();
}
}
});
}
private static boolean containsQuery(IdeaPluginDescriptor descriptor, String searchQuery) {
if (StringUtil.containsIgnoreCase(descriptor.getName(), searchQuery)) return true;
String description = descriptor.getDescription();
return description != null && StringUtil.containsIgnoreCase(description, searchQuery);
}
private static void clearUpdates(@NotNull PluginsGroupComponent panel) {
for (UIPluginGroup group : panel.getGroups()) {
for (ListPluginComponent plugin : group.plugins) {
plugin.setUpdateDescriptor(null);
}
}
}
private static void applyUpdates(@NotNull PluginsGroupComponent panel, @NotNull Collection<IdeaPluginDescriptor> updates) {
for (IdeaPluginDescriptor descriptor : updates) {
for (UIPluginGroup group : panel.getGroups()) {
ListPluginComponent component = group.findComponent(descriptor);
if (component != null) {
component.setUpdateDescriptor(descriptor);
break;
}
}
}
}
public static void registerCopyProvider(@NotNull PluginsGroupComponent component) {
CopyProvider copyProvider = new CopyProvider() {
@Override
public void performCopy(@NotNull DataContext dataContext) {
StringBuilder result = new StringBuilder();
for (ListPluginComponent pluginComponent : component.getSelection()) {
result.append(pluginComponent.myPlugin.getName()).append(" (").append(pluginComponent.myPlugin.getVersion()).append(")\n");
}
CopyPasteManager.getInstance().setContents(new TextTransferable(result.substring(0, result.length() - 1)));
}
@Override
public boolean isCopyEnabled(@NotNull DataContext dataContext) {
return !component.getSelection().isEmpty();
}
@Override
public boolean isCopyVisible(@NotNull DataContext dataContext) {
return true;
}
};
DataManager.registerDataProvider(component, dataId -> PlatformDataKeys.COPY_PROVIDER.is(dataId) ? copyProvider : null);
}
@NotNull
public static List<String> getTags(@NotNull IdeaPluginDescriptor plugin) {
List<String> tags = null;
String productCode = plugin.getProductCode();
if (plugin instanceof PluginNode) {
tags = ((PluginNode)plugin).getTags();
if (productCode != null) {
if (LicensePanel.isEA2Product(productCode)) {
if (tags != null && tags.contains(Tags.Paid.name())) {
tags = new ArrayList<>(tags);
tags.remove(Tags.Paid.name());
}
}
else if (tags == null) {
return Collections.singletonList(Tags.Paid.name());
}
else if (!tags.contains(Tags.Paid.name())) {
tags = new ArrayList<>(tags);
tags.add(Tags.Paid.name());
}
}
}
else if (productCode != null && !plugin.isBundled() && !LicensePanel.isEA2Product(productCode)) {
LicensingFacade instance = LicensingFacade.getInstance();
if (instance != null) {
String stamp = instance.getConfirmationStamp(productCode);
if (stamp != null) {
return Collections.singletonList(stamp.startsWith("eval:") ? Tags.Trial.name() : Tags.Purchased.name());
}
}
return Collections.singletonList(Tags.Paid.name());
}
if (ContainerUtil.isEmpty(tags)) {
return Collections.emptyList();
}
if (tags.size() > 1) {
tags = new ArrayList<>(tags);
if (tags.remove(Tags.EAP.name())) {
tags.add(0, Tags.EAP.name());
}
if (tags.remove(Tags.Paid.name())) {
tags.add(0, Tags.Paid.name());
}
}
return tags;
}
@NotNull
public static <T extends Component> T setTinyFont(@NotNull T component) {
return SystemInfo.isMac ? RelativeFont.TINY.install(component) : component;
}
public static int offset5() {
return JBUIScale.scale(5);
}
@Nullable
public static synchronized String getDownloads(@NotNull IdeaPluginDescriptor plugin) {
String downloads = null;
if (plugin instanceof PluginNode) {
downloads = ((PluginNode)plugin).getDownloads();
}
return getFormatLength(downloads);
}
@Nullable
static synchronized String getFormatLength(@Nullable String len) {
if (!StringUtil.isEmptyOrSpaces(len)) {
try {
long value = Long.parseLong(len);
if (value > 1000) {
return value < 1000000 ? K_FORMAT.format(value / 1000D) : M_FORMAT.format(value / 1000000D);
}
return Long.toString(value);
}
catch (NumberFormatException ignore) {
}
}
return null;
}
@Nullable
public static synchronized String getLastUpdatedDate(@NotNull IdeaPluginDescriptor plugin) {
long date = 0;
if (plugin instanceof PluginNode) {
date = ((PluginNode)plugin).getDate();
}
return date > 0 && date != Long.MAX_VALUE ? DATE_FORMAT.format(new Date(date)) : null;
}
@Nullable
public static String getRating(@NotNull IdeaPluginDescriptor plugin) {
String rating = null;
if (plugin instanceof PluginNode) {
rating = ((PluginNode)plugin).getRating();
}
if (rating != null) {
try {
if (Double.valueOf(rating) > 0) {
return StringUtil.trimEnd(rating, ".0");
}
}
catch (NumberFormatException ignore) {
}
}
return null;
}
@Nullable
public static synchronized String getSize(@NotNull IdeaPluginDescriptor plugin) {
String size = null;
if (plugin instanceof PluginNode) {
size = ((PluginNode)plugin).getSize();
}
if (!StringUtil.isEmptyOrSpaces(size)) {
try {
return StringUtilRt.formatFileSize(Long.parseLong(size)).toUpperCase(Locale.ENGLISH);
}
catch (NumberFormatException ignore) {
}
}
return null;
}
@NotNull
public static String getVersion(@NotNull IdeaPluginDescriptor oldPlugin, @NotNull IdeaPluginDescriptor newPlugin) {
return StringUtil.defaultIfEmpty(oldPlugin.getVersion(), "unknown") +
" " + UIUtil.rightArrow() + " " +
StringUtil.defaultIfEmpty(newPlugin.getVersion(), "unknown");
}
@Messages.YesNoResult
public static int showRestartDialog() {
return showRestartDialog(IdeBundle.message("update.notifications.title"));
}
@Messages.YesNoResult
public static int showRestartDialog(@NotNull @NlsContexts.DialogTitle String title) {
return showRestartDialog(title, action -> IdeBundle
.message("ide.restart.required.message", action, ApplicationNamesInfo.getInstance().getFullProductName()));
}
@Messages.YesNoResult
public static int showRestartDialog(@NotNull @NlsContexts.DialogTitle String title, @NotNull Function<? super String, @Nls String> message) {
String action =
IdeBundle.message(ApplicationManager.getApplication().isRestartCapable() ? "ide.restart.action" : "ide.shutdown.action");
return Messages
.showYesNoDialog(message.apply(action), title, action, IdeBundle.message("ide.notnow.action"), Messages.getQuestionIcon());
}
public static void shutdownOrRestartApp() {
shutdownOrRestartApp(IdeBundle.message("update.notifications.title"));
}
public static void shutdownOrRestartApp(@NotNull @NlsContexts.DialogTitle String title) {
if (showRestartDialog(title) == Messages.YES) {
ApplicationManagerEx.getApplicationEx().restart(true);
}
}
public static void shutdownOrRestartAppAfterInstall(@NotNull String plugin) {
String title = IdeBundle.message("update.notifications.title");
Function<String, String> message = action -> IdeBundle
.message("plugin.installed.ide.restart.required.message", plugin, action, ApplicationNamesInfo.getInstance().getFullProductName());
if (showRestartDialog(title, message) == Messages.YES) {
ApplicationManagerEx.getApplicationEx().restart(true);
}
}
public static void showPluginConfigurable(@Nullable Project project, IdeaPluginDescriptor @NotNull ... descriptors) {
PluginManagerConfigurable configurable = new PluginManagerConfigurable();
ShowSettingsUtil.getInstance().editConfigurable(project, configurable, () -> configurable.select(descriptors));
}
public static void showPluginConfigurable(@Nullable Project project, @NotNull Collection<IdeaPluginDescriptor> updates) {
PluginManagerConfigurable configurable = new PluginManagerConfigurable();
configurable.setInitUpdates(updates);
ShowSettingsUtil.getInstance().editConfigurable(project, configurable);
}
private enum SortBySearchOption {
Downloads, Name, Rating, Relevance, Updated
}
private final class MarketplaceSortByAction extends ToggleAction implements DumbAware {
private final SortBySearchOption myOption;
private boolean myState;
private MarketplaceSortByAction(@NotNull SortBySearchOption option) {
super(option.name());
myOption = option;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return myState;
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
myState = state;
myMarketplaceSortByCallback.accept(this);
}
public void setState(@NotNull SearchQueryParser.Marketplace parser) {
if (myOption == SortBySearchOption.Relevance) {
myState = parser.sortBy == null;
getTemplatePresentation().setVisible(
parser.sortBy == null || !parser.tags.isEmpty() || !parser.vendors.isEmpty() || parser.searchQuery != null
);
}
else {
myState = parser.sortBy != null && myOption.name().equalsIgnoreCase(parser.sortBy);
}
}
@Nullable
public String getQuery() {
switch (myOption) {
case Downloads:
return "/sortBy:downloads";
case Name:
return "/sortBy:name";
case Rating:
return "/sortBy:rating";
case Updated:
return "/sortBy:updated";
case Relevance:
default:
return null;
}
}
}
private enum InstalledSearchOption {
Downloaded(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.Downloaded")),
NeedUpdate(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.NeedUpdate")),
Enabled(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.Enabled")),
Disabled(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.Disabled")),
Invalid(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.Invalid")),
Bundled(IdeBundle.messagePointer("plugins.configurable.InstalledSearchOption.Bundled"));
private final Supplier<@Nls String> myPresentableNameSupplier;
InstalledSearchOption(Supplier<@Nls String> name) {myPresentableNameSupplier = name;}
}
private final class InstalledSearchOptionAction extends ToggleAction implements DumbAware {
private final InstalledSearchOption myOption;
private boolean myState;
private InstalledSearchOptionAction(@NotNull InstalledSearchOption option) {
super(option.myPresentableNameSupplier);
myOption = option;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return myState;
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
myState = state;
myInstalledSearchCallback.accept(this);
}
public void setState(@Nullable SearchQueryParser.Installed parser) {
if (parser == null) {
myState = false;
return;
}
switch (myOption) {
case Enabled:
myState = parser.enabled;
break;
case Disabled:
myState = parser.disabled;
break;
case Downloaded:
myState = parser.downloaded;
break;
case Bundled:
myState = parser.bundled;
break;
case Invalid:
myState = parser.invalid;
break;
case NeedUpdate:
myState = parser.needUpdate;
break;
}
}
@NotNull
public String getQuery() {
return myOption == InstalledSearchOption.NeedUpdate ? "/outdated" : "/" + StringUtil.decapitalize(myOption.name());
}
}
private static class GroupByActionGroup extends DefaultActionGroup implements CheckedActionGroup {
}
private final class ChangePluginStateAction extends DumbAwareAction {
private final boolean myEnable;
private ChangePluginStateAction(boolean enable) {
super(enable ? IdeBundle.message("plugins.configurable.enable.all.downloaded")
: IdeBundle.message("plugins.configurable.disable.all.downloaded"));
myEnable = enable;
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
IdeaPluginDescriptor[] descriptors;
PluginsGroup group = myPluginModel.getDownloadedGroup();
if (group == null || group.ui == null) {
ApplicationInfoImpl appInfo = (ApplicationInfoImpl)ApplicationInfo.getInstance();
List<IdeaPluginDescriptor> descriptorList = new ArrayList<>();
for (IdeaPluginDescriptor descriptor : PluginManagerCore.getPlugins()) {
if (!appInfo.isEssentialPlugin(descriptor.getPluginId()) &&
!descriptor.isBundled() && descriptor.isEnabled() != myEnable) {
descriptorList.add(descriptor);
}
}
descriptors = descriptorList.toArray(new IdeaPluginDescriptor[0]);
}
else {
descriptors = group.ui.plugins.stream().filter(component -> myPluginModel.isEnabled(component.myPlugin) != myEnable)
.map(component -> component.myPlugin).toArray(IdeaPluginDescriptor[]::new);
}
if (descriptors.length > 0) {
myPluginModel.changeEnableDisable(descriptors, myEnable);
}
}
}
@NotNull
public static JComponent createScrollPane(@NotNull PluginsGroupComponent panel, boolean initSelection) {
JBScrollPane pane =
new JBScrollPane(panel, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
pane.setBorder(JBUI.Borders.empty());
if (initSelection) {
panel.initialSelection();
}
return pane;
}
private void addGroup(
@NotNull List<? super PluginsGroup> groups,
@NotNull @Nls String name,
@NotNull String showAllQuery,
@NotNull ThrowableNotNullFunction<? super List<IdeaPluginDescriptor>, Boolean, ? extends IOException> function
)
throws IOException {
PluginsGroup group = new PluginsGroup(name);
if (Boolean.TRUE.equals(function.fun(group.descriptors))) {
group.rightAction =
new LinkLabel<>(IdeBundle.message("plugins.configurable.show.all"), null, myMarketplaceTab.mySearchListener, showAllQuery);
group.rightAction.setBorder(JBUI.Borders.emptyRight(5));
}
if (!group.descriptors.isEmpty()) {
groups.add(group);
}
}
private void addGroupViaLightDescriptor(
@NotNull List<? super PluginsGroup> groups,
@NotNull @Nls String name,
@NotNull @NonNls String query,
@NotNull @NonNls String showAllQuery
) throws IOException {
addGroup(groups, name, showAllQuery, descriptors -> {
List<PluginNode> pluginNodes = MarketplaceRequests.getInstance().searchPlugins(query, ITEMS_PER_GROUP * 2);
descriptors.addAll(ContainerUtil.getFirstItems(pluginNodes, ITEMS_PER_GROUP));
return pluginNodes.size() >= ITEMS_PER_GROUP;
});
}
@Override
@NotNull
public String getHelpTopic() {
return ID;
}
@Override
public void disposeUIResources() {
if (myPluginModel.toBackground()) {
InstallPluginInfo.showRestart();
InstalledPluginsState.getInstance().clearShutdownCallback();
}
myMarketplaceTab.dispose();
myInstalledTab.dispose();
if (myMarketplacePanel != null) {
myMarketplacePanel.dispose();
}
if (myMarketplaceSearchPanel != null) {
myMarketplaceSearchPanel.dispose();
}
myPluginUpdatesService.dispose();
PluginPriceService.cancel();
InstalledPluginsState.getInstance().runShutdownCallback();
InstalledPluginsState.getInstance().resetChangesAppliedWithoutRestart();
}
@Override
public void cancel() {
myPluginModel.removePluginsOnCancel(myCardPanel);
}
@Override
public boolean isModified() {
return myPluginModel.isModified();
}
@Override
public void apply() throws ConfigurationException {
if (myPluginModel.apply(myCardPanel)) return;
if (myPluginModel.createShutdownCallback) {
InstalledPluginsState.getInstance()
.setShutdownCallback(() -> ApplicationManager.getApplication().invokeLater(() -> shutdownOrRestartApp()));
}
}
@Override
public void reset() {
myPluginModel.removePluginsOnCancel(myCardPanel);
}
@NotNull
public MyPluginModel getPluginModel() {
return myPluginModel;
}
public void setInitUpdates(@NotNull Collection<IdeaPluginDescriptor> initUpdates) {
myInitUpdates = initUpdates;
}
public void select(IdeaPluginDescriptor @NotNull ... descriptors) {
if (myTabHeaderComponent.getSelectionTab() != INSTALLED_TAB) {
myTabHeaderComponent.setSelectionWithEvents(INSTALLED_TAB);
}
if (descriptors.length == 0) {
return;
}
List<ListPluginComponent> components = new ArrayList<>();
for (IdeaPluginDescriptor descriptor : descriptors) {
for (UIPluginGroup group : myInstalledPanel.getGroups()) {
ListPluginComponent component = group.findComponent(descriptor);
if (component != null) {
components.add(component);
break;
}
}
}
if (!components.isEmpty()) {
myInstalledPanel.setSelection(components);
}
}
@Nullable
@Override
public Runnable enableSearch(String option) {
if (StringUtil.isEmpty(option) && (myTabHeaderComponent.getSelectionTab() == MARKETPLACE_TAB || myInstalledSearchPanel.isEmpty())) {
return null;
}
return () -> {
boolean marketplace = option != null && option.startsWith(SearchWords.TAG.getValue());
int tabIndex = marketplace ? MARKETPLACE_TAB : INSTALLED_TAB;
if (myTabHeaderComponent.getSelectionTab() != tabIndex) {
myTabHeaderComponent.setSelectionWithEvents(tabIndex);
}
PluginsTab tab = marketplace ? myMarketplaceTab : myInstalledTab;
tab.clearSearchPanel(option);
if (!StringUtil.isEmpty(option)) {
tab.showSearchPanel(option);
}
};
}
private final class InstallFromDiskAction extends DumbAwareAction {
private InstallFromDiskAction() {super(IdeBundle.messagePointer("action.InstallFromDiskAction.text"));}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
PluginInstaller.chooseAndInstall(myPluginModel, myCardPanel, callbackData -> {
myPluginModel.pluginInstalledFromDisk(callbackData);
boolean select = myInstalledPanel == null;
if (myTabHeaderComponent.getSelectionTab() != INSTALLED_TAB) {
myTabHeaderComponent.setSelectionWithEvents(INSTALLED_TAB);
}
myInstalledTab.clearSearchPanel("");
if (select) {
for (UIPluginGroup group : myInstalledPanel.getGroups()) {
ListPluginComponent component = group.findComponent(callbackData.getPluginDescriptor());
if (component != null) {
myInstalledPanel.setSelection(component);
break;
}
}
}
});
}
}
}
|
package it.unibz.inf.ontop.spec.mapping.parser.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import it.unibz.inf.ontop.exception.MinorOntopInternalBugException;
import it.unibz.inf.ontop.injection.OntopMappingSettings;
import it.unibz.inf.ontop.spec.mapping.TargetAtom;
import it.unibz.inf.ontop.spec.mapping.TargetAtomFactory;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.model.vocabulary.RDFS;
import it.unibz.inf.ontop.model.vocabulary.XSD;
import it.unibz.inf.ontop.spec.mapping.parser.impl.TurtleOBDAParser.*;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.apache.commons.rdf.api.IRI;
import org.apache.commons.rdf.api.RDF;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
/**
* Stateful! See currentSubject.
*/
public abstract class AbstractTurtleOBDAVisitor extends TurtleOBDABaseVisitor implements TurtleOBDAVisitor {
// Column placeholder pattern
private static final Pattern varPattern = Pattern.compile("\\{([^}]+)}");
private static final Pattern constantBnodePattern = Pattern.compile("^_:(.*)");
@Override
public Object visitLiteral(LiteralContext ctx) {
return super.visitLiteral(ctx);
}
protected abstract boolean validateAttributeName(String value);
/**
* Map of directives
*/
private final HashMap<String, String> directives = new HashMap<>();
/**
* The current graph
*/
private ImmutableTerm currentGraph;
/**
* The current subject term
*/
private ImmutableTerm currentSubject;
protected String error = "";
private final TermFactory termFactory;
private final RDF rdfFactory;
private final TypeFactory typeFactory;
private final TargetAtomFactory targetAtomFactory;
private final OntopMappingSettings settings;
protected AbstractTurtleOBDAVisitor(TermFactory termFactory, TypeFactory typeFactory,
TargetAtomFactory targetAtomFactory, RDF rdfFactory,
OntopMappingSettings settings) {
this.typeFactory = typeFactory;
this.targetAtomFactory = targetAtomFactory;
this.rdfFactory = rdfFactory;
this.termFactory = termFactory;
this.settings = settings;
}
public String getError() {
return error;
}
private String removeBrackets(String text) {
return text.substring(1, text.length() - 1);
}
private ImmutableTerm typeTerm(String text, IRI datatype) {
return termFactory.getRDFLiteralConstant(text, datatype);
}
protected ImmutableTerm constructIRI(String text) {
return constructBnodeOrIRI(text,
col -> termFactory.getIRIFunctionalTerm(col, true),
termFactory::getIRIFunctionalTerm,
false);
}
protected ImmutableTerm constructBnodeOrIRI(String text,
Function<Variable, ImmutableFunctionalTerm> columnFct,
BiFunction<String, ImmutableList<ImmutableTerm>, ImmutableFunctionalTerm> templateFct,
boolean isBnode) {
final String PLACEHOLDER = "{}";
List<FormatString> tokens = parseIRIOrBnode(text, isBnode);
int size = tokens.size();
if (size == 1) {
FormatString token = tokens.get(0);
if (token instanceof FixedString) {
return termFactory.getConstantIRI(rdfFactory.createIRI(token.toString()));
} else if (token instanceof ColumnString) {
// the IRI string is coming from the DB (no escaping needed)
Variable column = termFactory.getVariable(token.toString());
return columnFct.apply(column);
}
throw new MinorOntopInternalBugException("Unexpected token: " + token);
} else {
StringBuilder sb = new StringBuilder();
List<ImmutableTerm> terms = new ArrayList<>();
for (FormatString token : tokens) {
if (token instanceof FixedString) { // if part of URI template
sb.append(token.toString());
} else if (token instanceof ColumnString) {
sb.append(PLACEHOLDER);
Variable column = termFactory.getVariable(token.toString());
terms.add(termFactory.getPartiallyDefinedToStringCast(column));
}
}
String iriTemplate = sb.toString(); // complete IRI template
return templateFct.apply(iriTemplate, ImmutableList.copyOf(terms));
}
}
private List<FormatString> parseIRIOrBnode(String text, boolean isBnode) {
List<FormatString> toReturn = new ArrayList<>();
Matcher m = varPattern.matcher(text);
int i = 0;
while (i < text.length()) {
if (m.find(i)) {
if (m.start() != i) {
String subString = text.substring(i, m.start());
toReturn.add(new FixedString(
// Remove the prefix _:
(isBnode && (i == 0)) ? subString.substring(2) : subString));
}
String value = m.group(1);
if (validateAttributeName(value)) {
toReturn.add(new ColumnString(value));
i = m.end();
}
} else {
toReturn.add(new FixedString(text.substring(i)));
break;
}
}
return toReturn;
}
private ImmutableTerm constructConstantBNode(String text) {
Matcher m = constantBnodePattern.matcher(text);
return termFactory.getConstantBNode(m.group(1));
}
private ImmutableTerm constructBnodeFunction(String text) {
return constructBnodeOrIRI(text,
col -> termFactory.getBnodeFunctionalTerm(col, true),
termFactory::getBnodeFunctionalTerm,
true);
}
private interface FormatString {
int index();
String toString();
}
private class FixedString implements FormatString {
private String s;
FixedString(String s) {
this.s = s;
}
@Override
public int index() {
return -1;
} // flag code for fixed string
@Override
public String toString() {
return s;
}
}
private class ColumnString implements FormatString {
private String s;
ColumnString(String s) {
this.s = s;
}
@Override
public int index() {
return 0;
} // flag code for column string
@Override
public String toString() {
return s;
}
}
//this function distinguishes curly bracket with
//back slash "\{" from curly bracket "{"
private int getIndexOfCurlyB(String str) {
int i;
int j;
i = str.indexOf("{");
j = str.indexOf("\\{");
while ((i - 1 == j) && (j != -1)) {
i = str.indexOf("{", i + 1);
j = str.indexOf("\\{", j + 1);
}
return i;
}
//in case of concat this function parses the literal
//and adds parsed constant literals and template literal to terms list
private List<ImmutableTerm> addToTermsList(String str) {
ArrayList<ImmutableTerm> terms = new ArrayList<>();
int i, j;
String st;
str = str.substring(1, str.length() - 1);
while (str.contains("{")) {
i = getIndexOfCurlyB(str);
if (i > 0) {
st = str.substring(0, i);
st = st.replace("\\\\", "");
terms.add(termFactory.getDBStringConstant(st));
str = str.substring(str.indexOf("{", i), str.length());
} else if (i == 0) {
j = str.indexOf("}");
terms.add(termFactory.getVariable(str.substring(1, j)));
str = str.substring(j + 1, str.length());
} else {
break;
}
}
if (!str.equals("")) {
str = str.replace("\\\\", "");
terms.add(termFactory.getDBStringConstant(str));
}
return terms;
}
//this function returns nested concats
private ImmutableTerm getNestedConcat(String str) {
List<ImmutableTerm> terms;
terms = addToTermsList(str);
if (terms.size() == 1) {
return terms.get(0);
}
return termFactory.getNullRejectingDBConcatFunctionalTerm(ImmutableList.copyOf(terms));
}
private String concatPrefix(String prefixedName) {
String[] tokens = prefixedName.split(":", 2);
String uri = directives.get(tokens[0]); // the first token is the prefix
return uri + tokens[1]; // the second token is the local name
}
@Override
public ImmutableList<TargetAtom> visitParse(ParseContext ctx) {
ctx.directiveStatement().forEach(this::visit);
ImmutableList<TargetAtom> tripleAtoms = ctx.triplesStatement().stream()
.flatMap(this::visitTriplesStatement)
.collect(ImmutableCollectors.toList());
ImmutableList<TargetAtom> quadAtoms = ctx.quadsStatement().stream()
.flatMap(this::visitQuadsStatement)
.collect(ImmutableCollectors.toList());
return ImmutableList.copyOf(Iterables.concat(tripleAtoms, quadAtoms));
}
@Override
public Void visitDirectiveStatement(DirectiveStatementContext ctx) {
visit(ctx.directive());
return null;
}
@Override
public Void visitDirective(DirectiveContext ctx) {
visit(ctx.prefixID());
return null;
}
@Override
public Stream<TargetAtom> visitTriplesStatement(TriplesStatementContext ctx) {
TriplesContext triples = ctx.triples();
if (triples != null) {
return visitTriples(triples);
} else {
return Stream.empty();
}
}
@Override
public Stream<TargetAtom> visitQuadsStatement(QuadsStatementContext ctx) {
this.currentGraph = visitGraph(ctx.graph());
return ctx.triplesStatement().stream().flatMap(this::visitTriplesStatement);
}
@Override
public Void visitPrefixID(PrefixIDContext ctx) {
String iriref = removeBrackets(ctx.IRIREF().getText());
String ns = ctx.PNAME_NS().getText();
directives.put(ns.substring(0, ns.length() - 1), iriref); // remove the end colon
return null;
}
@Override
public Object visitBase(BaseContext ctx) {
String iriRef = removeBrackets(ctx.IRIREF().getText());
directives.put("", iriRef);
return null;
}
@Override
public Stream<TargetAtom> visitTriples(TriplesContext ctx) {
currentSubject = visitSubject(ctx.subject());
return visitPredicateObjectList(ctx.predicateObjectList());
}
@Override
public Stream<TargetAtom> visitPredicateObjectList(PredicateObjectListContext ctx) {
return ctx.predicateObject().stream()
.flatMap(this::visitPredicateObject);
}
@Override
public Stream<TargetAtom> visitPredicateObject(PredicateObjectContext ctx) {
Stream<TargetAtom> result = visitObjectList(ctx.objectList()).map(object ->
currentGraph == null
? targetAtomFactory.getTripleTargetAtom(currentSubject, visitVerb(ctx.verb()), object)
: targetAtomFactory.getQuadTargetAtom(currentSubject, visitVerb(ctx.verb()), object, currentGraph));
return result;
}
@Override
public ImmutableTerm visitVerb(VerbContext ctx) {
ResourceContext rc = ctx.resource();
if (rc != null) {
return visitResource(rc);
}
return termFactory.getConstantIRI(it.unibz.inf.ontop.model.vocabulary.RDF.TYPE);
}
@Override
public Stream<ImmutableTerm> visitObjectList(ObjectListContext ctx) {
return ctx.object().stream()
.map(this::visitObject);
}
@Override
public ImmutableTerm visitSubject(SubjectContext ctx) {
ResourceContext rc = ctx.resource();
if (rc != null) {
return visitResource(rc);
}
VariableContext vc = ctx.variable();
if (vc != null) {
return termFactory.getIRIFunctionalTerm(visitVariable(vc), true);
}
BlankContext bc = ctx.blank();
if (bc != null) {
return visitBlank(bc);
}
return null;
}
@Override
public ImmutableTerm visitGraph(GraphContext ctx) {
if (ctx == null) return null;
ResourceContext rc = ctx.resource();
if (rc != null) {
return visitResource(rc);
}
VariableContext vc = ctx.variable();
if (vc != null) {
return termFactory.getIRIFunctionalTerm(visitVariable(vc), true);
}
BlankContext bc = ctx.blank();
if (bc != null) {
return visitBlank(bc);
}
return null;
}
@Override
public ImmutableTerm visitObject(ObjectContext ctx) {
ImmutableTerm term = (ImmutableTerm) visit(ctx.children.iterator().next());
return (term instanceof Variable)
? termFactory.getRDFLiteralFunctionalTerm(
termFactory.getPartiallyDefinedToStringCast((Variable) term),
// We give the abstract datatype RDFS.LITERAL when it is not determined yet
// --> The concrete datatype be inferred afterwards
RDFS.LITERAL)
: term;
}
@Override
public ImmutableTerm visitResource(ResourceContext ctx) {
if (ctx.iriExt() != null) {
return visitIriExt(ctx.iriExt());
}
return constructIRI(this.visitIri(ctx.iri()).getIRIString());
}
public ImmutableTerm visitIriExt(IriExtContext ctx) {
if (ctx.IRIREF_EXT() != null) {
return constructIRI(removeBrackets(ctx.IRIREF_EXT().getText()));
}
return constructIRI(concatPrefix(ctx.PREFIXED_NAME_EXT().getText()));
}
@Override
public ImmutableFunctionalTerm visitVariableLiteral_1(VariableLiteral_1Context ctx) {
ImmutableFunctionalTerm lexicalTerm = termFactory.getPartiallyDefinedToStringCast(
visitVariable(ctx.variable()));
return termFactory.getRDFLiteralFunctionalTerm(lexicalTerm, visitLanguageTag(ctx.languageTag()));
}
@Override
public ImmutableFunctionalTerm visitVariableLiteral_2(VariableLiteral_2Context ctx) {
ImmutableFunctionalTerm lexicalTerm = termFactory.getPartiallyDefinedToStringCast(
visitVariable(ctx.variable()));
IRI iri = visitIri(ctx.iri());
if ((!settings.areAbstractDatatypesToleratedInMapping())
&& typeFactory.getDatatype(iri).isAbstract())
// TODO: throw a better exception (invalid input)
throw new IllegalArgumentException("The datatype of a literal must not be abstract: "
+ iri + "\nSet the property "
+ OntopMappingSettings.TOLERATE_ABSTRACT_DATATYPE + " to true to tolerate them.");
return termFactory.getRDFLiteralFunctionalTerm(lexicalTerm, iri);
}
@Override
public IRI visitIri(IriContext ctx) {
TerminalNode token = ctx.PREFIXED_NAME();
return rdfFactory.createIRI(
token != null
? concatPrefix(token.getText())
: removeBrackets(ctx.IRIREF().getText()));
}
@Override
public Variable visitVariable(VariableContext ctx) {
String variableName = removeBrackets(ctx.STRING_WITH_CURLY_BRACKET().getText());
validateAttributeName(variableName);
return termFactory.getVariable(variableName);
}
@Override
public ImmutableTerm visitBlank(BlankContext ctx) {
if (ctx.BLANK_NODE_FUNCTION() != null) {
return constructBnodeFunction(ctx.BLANK_NODE_FUNCTION().getText());
}
if (ctx.BLANK_NODE_LABEL() != null) {
return constructConstantBNode(ctx.BLANK_NODE_LABEL().getText());
}
throw new IllegalArgumentException("Anonymous blank nodes not supported yet in mapping targets");
}
@Override
public String visitLanguageTag(LanguageTagContext ctx) {
return ctx.LANGTAG().getText().substring(1).toLowerCase();
}
@Override
public ImmutableTerm visitUntypedStringLiteral(UntypedStringLiteralContext ctx) {
LitStringContext lsc = ctx.litString();
ImmutableTerm literal = visitLitString(lsc);
LanguageTagContext lc = ctx.languageTag();
if (lc != null) {
return termFactory.getRDFLiteralFunctionalTerm(literal, visitLanguageTag(lc));
}
return termFactory.getRDFLiteralFunctionalTerm(literal, XSD.STRING)
.simplify();
}
@Override
public ImmutableTerm visitLitString(LitStringContext ctx) {
String str = ctx.STRING_LITERAL_QUOTE().getText();
if (str.contains("{")) {
return getNestedConcat(str);
}
return termFactory.getDBStringConstant(str.substring(1, str.length() - 1)); // without the double quotes
}
@Override
public ImmutableTerm visitTypedLiteral(TypedLiteralContext ctx) {
ImmutableTerm stringValue = visitLitString(ctx.litString());
IRI iriRef = visitIri(ctx.iri());
return termFactory.getRDFLiteralFunctionalTerm(stringValue, iriRef)
.simplify();
}
@Override
public ImmutableTerm visitUntypedNumericLiteral(UntypedNumericLiteralContext ctx) {
return (ImmutableTerm) visitChildren(ctx);
}
@Override
public ImmutableTerm visitUntypedBooleanLiteral(UntypedBooleanLiteralContext ctx) {
return typeTerm(ctx.BOOLEAN_LITERAL().getText(), XSD.BOOLEAN);
}
@Override
public ImmutableTerm visitNumericUnsigned(NumericUnsignedContext ctx) {
TerminalNode token = ctx.INTEGER();
if (token != null) {
return typeTerm(token.getText(), XSD.INTEGER);
}
token = ctx.DOUBLE();
if (token != null) {
return typeTerm(token.getText(), XSD.DOUBLE);
}
return typeTerm(ctx.DECIMAL().getText(), XSD.DECIMAL);
}
@Override
public ImmutableTerm visitNumericPositive(NumericPositiveContext ctx) {
TerminalNode token = ctx.INTEGER_POSITIVE();
if (token != null) {
return typeTerm(token.getText(), XSD.INTEGER);
}
token = ctx.DOUBLE_POSITIVE();
if (token != null) {
return typeTerm(token.getText(), XSD.DOUBLE);
}
return typeTerm(ctx.DECIMAL_POSITIVE().getText(), XSD.DECIMAL);
}
@Override
public ImmutableTerm visitNumericNegative(NumericNegativeContext ctx) {
TerminalNode token = ctx.INTEGER_NEGATIVE();
if (token != null) {
return typeTerm(token.getText(), XSD.INTEGER);
}
token = ctx.DOUBLE_NEGATIVE();
if (token != null) {
return typeTerm(token.getText(), XSD.DOUBLE);
}
return typeTerm(ctx.DECIMAL_NEGATIVE().getText(), XSD.DECIMAL);
}
}
|
package org.jenkins.tools.test;
import hudson.Functions;
import hudson.maven.MavenEmbedderException;
import hudson.model.UpdateSite;
import hudson.model.UpdateSite.Plugin;
import hudson.util.VersionNumber;
import java.io.BufferedReader;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.scm.ScmFileSet;
import org.apache.maven.scm.ScmTag;
import org.apache.maven.scm.command.checkout.CheckOutScmResult;
import org.apache.maven.scm.manager.ScmManager;
import org.apache.maven.scm.repository.ScmRepository;
import org.codehaus.plexus.PlexusContainerException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.io.RawInputStreamFacade;
import org.jenkins.tools.test.exception.PluginSourcesUnavailableException;
import org.jenkins.tools.test.exception.PomExecutionException;
import org.jenkins.tools.test.exception.PomTransformationException;
import org.jenkins.tools.test.model.MavenCoordinates;
import org.jenkins.tools.test.model.MavenPom;
import org.jenkins.tools.test.model.PluginCompatReport;
import org.jenkins.tools.test.model.PluginCompatResult;
import org.jenkins.tools.test.model.PluginCompatTesterConfig;
import org.jenkins.tools.test.model.hook.PluginCompatTesterHookBeforeCompile;
import org.jenkins.tools.test.model.hook.PluginCompatTesterHooks;
import org.jenkins.tools.test.model.PluginInfos;
import org.jenkins.tools.test.model.PluginRemoting;
import org.jenkins.tools.test.model.PomData;
import org.jenkins.tools.test.model.TestExecutionResult;
import org.jenkins.tools.test.model.TestStatus;
import org.springframework.core.io.ClassPathResource;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jenkins.tools.test.maven.ExternalMavenRunner;
import org.jenkins.tools.test.maven.InternalMavenRunner;
import org.jenkins.tools.test.maven.MavenRunner;
/**
* Frontend for plugin compatibility tests
* @author Frederic Camblor, Olivier Lamy
*/
public class PluginCompatTester {
private static final String DEFAULT_SOURCE_ID = "default";
/** First version with new parent POM. */
public static final String JENKINS_CORE_FILE_REGEX = "WEB-INF/lib/jenkins-core-([0-9.]+(?:-[0-9.]+)?(?:-(?i)([a-z]+)(-)?([0-9.]+)?)?(?:-SNAPSHOT)?)[.]jar";
private PluginCompatTesterConfig config;
private final MavenRunner runner;
public PluginCompatTester(PluginCompatTesterConfig config){
this.config = config;
runner = config.getExternalMaven() == null ? new InternalMavenRunner() : new ExternalMavenRunner(config.getExternalMaven());
}
private SortedSet<MavenCoordinates> generateCoreCoordinatesToTest(UpdateSite.Data data, PluginCompatReport previousReport){
SortedSet<MavenCoordinates> coreCoordinatesToTest = null;
// If parent GroupId/Artifact are not null, this will be fast : we will only test
// against 1 core coordinate
if(config.getParentGroupId() != null && config.getParentArtifactId() != null){
coreCoordinatesToTest = new TreeSet<MavenCoordinates>();
// If coreVersion is not provided in PluginCompatTesterConfig, let's use latest core
// version used in update center
String coreVersion = config.getParentVersion()==null?data.core.version:config.getParentVersion();
MavenCoordinates coreArtifact = new MavenCoordinates(config.getParentGroupId(), config.getParentArtifactId(), coreVersion);
coreCoordinatesToTest.add(coreArtifact);
// If parent groupId/artifactId are null, we'll test against every already recorded
// cores
} else if(config.getParentGroupId() == null && config.getParentArtifactId() == null){
coreCoordinatesToTest = previousReport.getTestedCoreCoordinates();
} else {
throw new IllegalStateException("config.parentGroupId and config.parentArtifactId should either be both null or both filled\n" +
"config.parentGroupId="+String.valueOf(config.getParentGroupId())+", config.parentArtifactId="+String.valueOf(config.getParentArtifactId()));
}
return coreCoordinatesToTest;
}
public PluginCompatReport testPlugins()
throws PlexusContainerException, IOException, MavenEmbedderException
{
PluginCompatTesterHooks pcth = new PluginCompatTesterHooks(config.getHookPrefixes());
// Providing XSL Stylesheet along xml report file
if(config.reportFile != null){
if(config.isProvideXslReport()){
File xslFilePath = PluginCompatReport.getXslFilepath(config.reportFile);
FileUtils.copyStreamToFile(new RawInputStreamFacade(getXslTransformerResource().getInputStream()), xslFilePath);
}
}
DataImporter dataImporter = null;
if(config.getGaeBaseUrl() != null && config.getGaeSecurityToken() != null){
dataImporter = new DataImporter(config.getGaeBaseUrl(), config.getGaeSecurityToken());
}
HashMap<String,String> pluginGroupIds = new HashMap<String, String>(); // Used to track real plugin groupIds from WARs
UpdateSite.Data data = config.getWar() == null ? extractUpdateCenterData() : scanWAR(config.getWar(), pluginGroupIds);
PluginCompatReport report = PluginCompatReport.fromXml(config.reportFile);
SortedSet<MavenCoordinates> testedCores = config.getWar() == null ? generateCoreCoordinatesToTest(data, report) : coreVersionFromWAR(data);
MavenRunner.Config mconfig = new MavenRunner.Config();
mconfig.userSettingsFile = config.getM2SettingsFile();
// TODO REMOVE
mconfig.userProperties.put( "failIfNoTests", "false" );
mconfig.userProperties.put( "argLine", "-XX:MaxPermSize=128m" );
String mavenPropertiesFilePath = this.config.getMavenPropertiesFile();
if ( StringUtils.isNotBlank( mavenPropertiesFilePath )) {
File file = new File (mavenPropertiesFilePath);
if (file.exists()) {
FileInputStream fileInputStream = null;
try {
fileInputStream = new FileInputStream( file );
Properties properties = new Properties( );
properties.load( fileInputStream );
for (Map.Entry<Object,Object> entry : properties.entrySet()) {
mconfig.userProperties.put((String) entry.getKey(), (String) entry.getValue());
}
} finally {
IOUtils.closeQuietly( fileInputStream );
}
} else {
System.out.println("File " + mavenPropertiesFilePath + " not exists" );
}
}
SCMManagerFactory.getInstance().start();
for(MavenCoordinates coreCoordinates : testedCores){
System.out.println("Starting plugin tests on core coordinates : "+coreCoordinates.toString());
for (Plugin plugin : data.plugins.values()) {
if(config.getIncludePlugins()==null || config.getIncludePlugins().contains(plugin.name.toLowerCase())){
PluginInfos pluginInfos = new PluginInfos(plugin.name, plugin.version, plugin.url);
if(config.getExcludePlugins()!=null && config.getExcludePlugins().contains(plugin.name.toLowerCase())){
System.out.println("Plugin "+plugin.name+" is in excluded plugins => test skipped !");
continue;
}
String errorMessage = null;
TestStatus status = null;
MavenCoordinates actualCoreCoordinates = coreCoordinates;
PluginRemoting remote;
if (localCheckoutProvided() && onlyOnePluginIncluded()) {
remote = new PluginRemoting(new File(config.getLocalCheckoutDir(), "pom.xml"));
} else {
remote = new PluginRemoting(plugin.url);
}
PomData pomData;
try {
pomData = remote.retrievePomData();
System.out.println("detected parent POM " + pomData.parent.toGAV());
if ((pomData.parent.groupId.equals(PluginCompatTesterConfig.DEFAULT_PARENT_GROUP)
&& pomData.parent.artifactId.equals(PluginCompatTesterConfig.DEFAULT_PARENT_ARTIFACT)
|| pomData.parent.groupId.equals("org.jvnet.hudson.plugins"))
&& coreCoordinates.version.matches("1[.][0-9]+[.][0-9]+")
&& new VersionNumber(coreCoordinates.version).compareTo(new VersionNumber("1.485")) < 0) { // TODO unless 1.480.3+
System.out.println("Cannot test against " + coreCoordinates.version + " due to lack of deployed POM for " + coreCoordinates.toGAV());
actualCoreCoordinates = new MavenCoordinates(coreCoordinates.groupId, coreCoordinates.artifactId, coreCoordinates.version.replaceFirst("[.][0-9]+$", ""));
}
} catch (Throwable t) {
status = TestStatus.INTERNAL_ERROR;
errorMessage = t.getMessage();
pomData = null;
}
if(!config.isSkipTestCache() && report.isCompatTestResultAlreadyInCache(pluginInfos, actualCoreCoordinates, config.getTestCacheTimeout(), config.getCacheThresholStatus())){
System.out.println("Cache activated for plugin "+pluginInfos.pluginName+" => test skipped !");
continue; // Don't do anything : we are in the cached interval ! :-)
}
List<String> warningMessages = new ArrayList<String>();
if (errorMessage == null) {
try {
TestExecutionResult result = testPluginAgainst(actualCoreCoordinates, plugin, mconfig, pomData, data.plugins, pluginGroupIds, pcth);
// If no PomExecutionException, everything went well...
status = TestStatus.SUCCESS;
warningMessages.addAll(result.pomWarningMessages);
} catch (PomExecutionException e) {
if(!e.succeededPluginArtifactIds.contains("maven-compiler-plugin")){
status = TestStatus.COMPILATION_ERROR;
} else if(!e.succeededPluginArtifactIds.contains("maven-surefire-plugin")){
status = TestStatus.TEST_FAILURES;
} else { // Can this really happen ???
status = TestStatus.SUCCESS;
}
errorMessage = e.getErrorMessage();
warningMessages.addAll(e.getPomWarningMessages());
} catch (Error e){
// Rethrow the error ... something is wrong !
throw e;
} catch (Throwable t){
status = TestStatus.INTERNAL_ERROR;
errorMessage = t.getMessage();
}
}
File buildLogFile = createBuildLogFile(config.reportFile, plugin.name, plugin.version, actualCoreCoordinates);
String buildLogFilePath = "";
if(buildLogFile.exists()){
buildLogFilePath = createBuildLogFilePathFor(pluginInfos.pluginName, pluginInfos.pluginVersion, actualCoreCoordinates);
}
PluginCompatResult result = new PluginCompatResult(actualCoreCoordinates, status, errorMessage, warningMessages, buildLogFilePath);
report.add(pluginInfos, result);
// Adding result to GAE
if(dataImporter != null){
dataImporter.importPluginCompatResult(result, pluginInfos, config.reportFile.getParentFile());
// TODO: import log files
}
if(config.reportFile != null){
if(!config.reportFile.exists()){
FileUtils.fileWrite(config.reportFile.getAbsolutePath(), "");
}
report.save(config.reportFile);
}
} else {
System.out.println("Plugin "+plugin.name+" not in included plugins => test skipped !");
}
}
}
// Generating HTML report if needed
if(config.reportFile != null){
if(config.isGenerateHtmlReport()){
generateHtmlReportFile();
}
}
return report;
}
private void generateHtmlReportFile() throws IOException {
Source xmlSource = new StreamSource(config.reportFile);
Source xsltSource = new StreamSource(getXslTransformerResource().getInputStream());
Result result = new StreamResult(PluginCompatReport.getHtmlFilepath(config.reportFile));
TransformerFactory factory = TransformerFactory.newInstance();
Transformer transformer = null;
try {
transformer = factory.newTransformer(xsltSource);
transformer.transform(xmlSource, result);
} catch (TransformerException e) {
throw new RuntimeException(e);
}
}
private static ClassPathResource getXslTransformerResource(){
return new ClassPathResource("resultToReport.xsl");
}
private static File createBuildLogFile(File reportFile, String pluginName, String pluginVersion, MavenCoordinates coreCoords){
return new File(reportFile.getParentFile().getAbsolutePath()
+"/"+createBuildLogFilePathFor(pluginName, pluginVersion, coreCoords));
}
private static String createBuildLogFilePathFor(String pluginName, String pluginVersion, MavenCoordinates coreCoords){
return String.format("logs/%s/v%s_against_%s_%s_%s.log", pluginName, pluginVersion, coreCoords.groupId, coreCoords.artifactId, coreCoords.version);
}
private TestExecutionResult testPluginAgainst(MavenCoordinates coreCoordinates, Plugin plugin, MavenRunner.Config mconfig, PomData pomData, Map<String,Plugin> otherPlugins, Map<String, String> pluginGroupIds, PluginCompatTesterHooks pcth)
throws PluginSourcesUnavailableException, PomTransformationException, PomExecutionException, IOException
{
System.out.println(String.format("%n%n%n%n%n"));
System.out.println(String.format("
System.out.println(String.format("
System.out.println(String.format("##%n## Starting to test plugin %s v%s%n## against %s%n##", plugin.name, plugin.version, coreCoordinates));
System.out.println(String.format("
System.out.println(String.format("
System.out.println(String.format("%n%n%n%n%n"));
File pluginCheckoutDir = new File(config.workDirectory.getAbsolutePath()+"/"+plugin.name+"/");
try {
// Run any precheckout hooks
Map<String, Object> beforeCheckout = new HashMap<String, Object>();
beforeCheckout.put("pluginName", plugin.name);
beforeCheckout.put("plugin", plugin);
beforeCheckout.put("pomData", pomData);
beforeCheckout.put("config", config);
beforeCheckout.put("runCheckout", true);
beforeCheckout = pcth.runBeforeCheckout(beforeCheckout);
if(beforeCheckout.get("executionResult") != null) { // Check if the hook returned a result
return (TestExecutionResult)beforeCheckout.get("executionResult");
} else if((boolean)beforeCheckout.get("runCheckout")) {
if(beforeCheckout.get("checkoutDir") != null){
pluginCheckoutDir = (File)beforeCheckout.get("checkoutDir");
}
if(pluginCheckoutDir.exists()){
System.out.println("Deleting working directory "+pluginCheckoutDir.getAbsolutePath());
FileUtils.deleteDirectory(pluginCheckoutDir);
}
pluginCheckoutDir.mkdir();
System.out.println("Created plugin checkout dir : "+pluginCheckoutDir.getAbsolutePath());
if (localCheckoutProvided()) {
if (!onlyOnePluginIncluded()) {
throw new RuntimeException("You specified a local clone but did not choose only one plugin to execute PCT against it");
}
// and even up-to-date versions of org.apache.commons.io.FileUtils seem to not handle links,
FileUtils.copyDirectoryStructure(config.getLocalCheckoutDir(), pluginCheckoutDir);
} else {
// These hooks could redirect the SCM, skip checkout (if multiple plugins use the same preloaded repo)
System.out.println("Checking out from SCM connection URL : " + pomData.getConnectionUrl() + " (" + plugin.name + "-" + plugin.version + ")");
ScmManager scmManager = SCMManagerFactory.getInstance().createScmManager();
ScmRepository repository = scmManager.makeScmRepository(pomData.getConnectionUrl());
CheckOutScmResult result = scmManager.checkOut(repository, new ScmFileSet(pluginCheckoutDir), new ScmTag(plugin.name + "-" + plugin.version));
if (!result.isSuccess()) {
throw new RuntimeException(result.getProviderMessage() + " || " + result.getCommandOutput());
}
}
} else {
// If the plugin exists in a different directory (multimodule plugins)
if (beforeCheckout.get("pluginDir") != null) {
pluginCheckoutDir = (File)beforeCheckout.get("checkoutDir");
}
System.out.println("The plugin has already been checked out, likely due to a multimodule situation. Continue.");
}
} catch (ComponentLookupException e) {
System.err.println("Error : " + e.getMessage());
throw new PluginSourcesUnavailableException("Problem while creating ScmManager !", e);
} catch (Exception e) {
System.err.println("Error : " + e.getMessage());
throw new PluginSourcesUnavailableException("Problem while checking out plugin sources!", e);
}
File buildLogFile = createBuildLogFile(config.reportFile, plugin.name, plugin.version, coreCoordinates);
FileUtils.forceMkdir(buildLogFile.getParentFile()); // Creating log directory
FileUtils.fileWrite(buildLogFile.getAbsolutePath(), ""); // Creating log file
// Ran the BeforeCompileHooks
Map<String, Object> beforeCompile = new HashMap<String, Object>();
beforeCompile.put("pluginName", plugin.name);
beforeCompile.put("plugin", plugin);
beforeCompile.put("pluginDir", pluginCheckoutDir);
beforeCompile.put("pomData", pomData);
beforeCompile.put("config", config);
beforeCompile.put("core", coreCoordinates);
Map<String, Object> hookInfo = pcth.runBeforeCompilation(beforeCompile);
boolean ranCompile = hookInfo.containsKey(PluginCompatTesterHookBeforeCompile.OVERRIDE_DEFAULT_COMPILE) ? (boolean) hookInfo.get(PluginCompatTesterHookBeforeCompile.OVERRIDE_DEFAULT_COMPILE) : false;
try {
// First build against the original POM.
// This defends against source incompatibilities (which we do not care about for this purpose);
// and ensures that we are testing a plugin binary as close as possible to what was actually released.
// We also skip potential javadoc execution to avoid general test failure.
if (!ranCompile) {
runner.run(mconfig, pluginCheckoutDir, buildLogFile, "clean", "process-test-classes", "-Dmaven.javadoc.skip");
}
ranCompile = true;
// Then transform the POM and run tests against that.
// You might think that it would suffice to run e.g.
// (2.15+ required for ${maven.test.dependency.excludes} and ${maven.test.additionalClasspath} to be honored from CLI)
// but it does not work; there are lots of linkage errors as some things are expected to be in the test classpath which are not.
// Much simpler to do use the parent POM to set up the test classpath.
MavenPom pom = new MavenPom(pluginCheckoutDir);
try {
addSplitPluginDependencies(plugin.name, mconfig, pluginCheckoutDir, pom, otherPlugins, pluginGroupIds, coreCoordinates.version);
} catch (Exception x) {
x.printStackTrace();
pomData.getWarningMessages().add(Functions.printThrowable(x));
// but continue
}
List<String> args = new ArrayList<String>();
args.add("--define=maven.test.redirectTestOutputToFile=false");
args.add("--define=concurrency=1");
args.add("hpi:resolve-test-dependencies");
args.add("hpi:test-hpl");
args.add("surefire:test");
// Run preexecution hooks
Map<String, Object> forExecutionHooks = new HashMap<String, Object>();
forExecutionHooks.put("pluginName", plugin.name);
forExecutionHooks.put("args", args);
forExecutionHooks.put("pomData", pomData);
forExecutionHooks.put("pom", pom);
forExecutionHooks.put("coreCoordinates", coreCoordinates);
forExecutionHooks.put("config", config);
pcth.runBeforeExecution(forExecutionHooks);
runner.run(mconfig, pluginCheckoutDir, buildLogFile, ((List<String>)forExecutionHooks.get("args")).toArray(new String[args.size()]));
return new TestExecutionResult(((PomData)forExecutionHooks.get("pomData")).getWarningMessages());
}catch(PomExecutionException e){
PomExecutionException e2 = new PomExecutionException(e);
e2.getPomWarningMessages().addAll(pomData.getWarningMessages());
if (ranCompile) {
// So the status is considered to be TEST_FAILURES not COMPILATION_ERROR:
e2.succeededPluginArtifactIds.add("maven-compiler-plugin");
}
throw e2;
}
}
private boolean localCheckoutProvided() {
return config.getLocalCheckoutDir() != null && config.getLocalCheckoutDir().exists();
}
private boolean onlyOnePluginIncluded() {
return config.getIncludePlugins() != null && config.getIncludePlugins().size() == 1;
}
private UpdateSite.Data extractUpdateCenterData(){
URL url = null;
String jsonp = null;
try {
url = new URL(config.updateCenterUrl);
jsonp = IOUtils.toString(url.openStream());
}catch(IOException e){
throw new RuntimeException("Invalid update center url : "+config.updateCenterUrl, e);
}
String json = jsonp.substring(jsonp.indexOf('(')+1,jsonp.lastIndexOf(')'));
UpdateSite us = new UpdateSite(DEFAULT_SOURCE_ID, url.toExternalForm());
return newUpdateSiteData(us, JSONObject.fromObject(json));
}
/**
* Scans through a WAR file, accumulating plugin information
* @param war WAR to scan
* @param pluginGroupIds Map pluginName to groupId if set in the manifest, MUTATED IN THE EXECUTION
* @return Update center data
* @throws IOException
*/
private UpdateSite.Data scanWAR(File war, Map<String, String> pluginGroupIds) throws IOException {
JSONObject top = new JSONObject();
top.put("id", DEFAULT_SOURCE_ID);
JSONObject plugins = new JSONObject();
JarFile jf = new JarFile(war);
if (pluginGroupIds == null) {
pluginGroupIds = new HashMap<String, String>();
}
try {
Enumeration<JarEntry> entries = jf.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
String name = entry.getName();
Matcher m = Pattern.compile(JENKINS_CORE_FILE_REGEX).matcher(name);
if (m.matches()) {
if (top.has("core")) {
throw new IOException(">1 jenkins-core.jar in " + war);
}
top.put("core", new JSONObject().accumulate("name", "core").accumulate("version", m.group(1)).accumulate("url", ""));
}
m = Pattern.compile("WEB-INF/(?:optional-)?plugins/([^/.]+)[.][hj]pi").matcher(name);
if (m.matches()) {
JSONObject plugin = new JSONObject().accumulate("url", "");
InputStream is = jf.getInputStream(entry);
try {
JarInputStream jis = new JarInputStream(is);
try {
Manifest manifest = jis.getManifest();
String shortName = manifest.getMainAttributes().getValue("Short-Name");
if (shortName == null) {
shortName = manifest.getMainAttributes().getValue("Extension-Name");
if (shortName == null) {
shortName = m.group(1);
}
}
plugin.put("name", shortName);
pluginGroupIds.put(shortName, manifest.getMainAttributes().getValue("Group-Id"));
String version = manifest.getMainAttributes().getValue("Plugin-Version");
// Remove extra build information from the version number
final Matcher matcher = Pattern.compile("^(.+-SNAPSHOT)(.+)$").matcher(version);
if (matcher.matches()) {
version = matcher.group(1);
}
plugin.put("version", version);
plugin.put("url", "jar:" + war.toURI() + "!/" + name);
JSONArray dependenciesA = new JSONArray();
String dependencies = manifest.getMainAttributes().getValue("Plugin-Dependencies");
if (dependencies != null) {
// e.g. matrix-auth:1.0.2;resolution:=optional,credentials:1.8.3;resolution:=optional
for (String pair : dependencies.split(",")) {
boolean optional = pair.endsWith("resolution:=optional");
String[] nameVer = pair.replace(";resolution:=optional", "").split(":");
assert nameVer.length == 2;
dependenciesA.add(new JSONObject().accumulate("name", nameVer[0]).accumulate("version", nameVer[1]).accumulate("optional", String.valueOf(optional)));
}
}
plugin.accumulate("dependencies", dependenciesA);
plugins.put(shortName, plugin);
} finally {
jis.close();
}
} finally {
is.close();
}
}
}
} finally {
jf.close();
}
top.put("plugins", plugins);
if (!top.has("core")) {
throw new IOException("no jenkins-core.jar in " + war);
}
System.out.println("Scanned contents of " + war + ": " + top);
return newUpdateSiteData(new UpdateSite(DEFAULT_SOURCE_ID, null), top);
}
private SortedSet<MavenCoordinates> coreVersionFromWAR(UpdateSite.Data data) {
SortedSet<MavenCoordinates> result = new TreeSet<MavenCoordinates>();
result.add(new MavenCoordinates(PluginCompatTesterConfig.DEFAULT_PARENT_GROUP, PluginCompatTesterConfig.DEFAULT_PARENT_ARTIFACT, data.core.version));
return result;
}
private UpdateSite.Data newUpdateSiteData(UpdateSite us, JSONObject jsonO) throws RuntimeException {
try {
Constructor<UpdateSite.Data> dataConstructor = UpdateSite.Data.class.getDeclaredConstructor(UpdateSite.class, JSONObject.class);
dataConstructor.setAccessible(true);
return dataConstructor.newInstance(us, jsonO);
}catch(Exception e){
throw new RuntimeException("UpdateSite.Data instanciation problems", e);
}
}
private void addSplitPluginDependencies(String thisPlugin, MavenRunner.Config mconfig, File pluginCheckoutDir, MavenPom pom, Map<String,Plugin> otherPlugins, Map<String, String> pluginGroupIds, String coreVersion) throws PomExecutionException, IOException {
File tmp = File.createTempFile("dependencies", ".log");
VersionNumber coreDep = null;
Map<String,VersionNumber> pluginDeps = new HashMap<String,VersionNumber>();
Map<String,VersionNumber> pluginDepsTest = new HashMap<String,VersionNumber>();
try {
runner.run(mconfig, pluginCheckoutDir, tmp, "dependency:resolve");
Reader r = new FileReader(tmp);
try {
BufferedReader br = new BufferedReader(r);
Pattern p = Pattern.compile("\\[INFO\\] ([^:]+):([^:]+):([a-z-]+):(([^:]+):)?([^:]+):(provided|compile|runtime|system)");
Pattern p2 = Pattern.compile("\\[INFO\\] ([^:]+):([^:]+):([a-z-]+):(([^:]+):)?([^:]+):(test)");
String line;
while ((line = br.readLine()) != null) {
Matcher m = p.matcher(line);
Matcher m2 = p2.matcher(line);
String groupId;
String artifactId;
VersionNumber version;
if (!m.matches() && !m2.matches()) {
continue;
} else if (m.matches()) {
groupId = m.group(1);
artifactId = m.group(2);
try {
version = new VersionNumber(m.group(6));
} catch (IllegalArgumentException x) {
// OK, some other kind of dep, just ignore
continue;
}
} else { //m2.matches()
groupId = m2.group(1);
artifactId = m2.group(2);
try {
version = new VersionNumber(m2.group(6));
} catch (IllegalArgumentException x) {
// OK, some other kind of dep, just ignore
continue;
}
}
if (groupId.equals("org.jenkins-ci.main") && artifactId.equals("jenkins-core")) {
coreDep = version;
} else if (groupId.equals("org.jenkins-ci.plugins")) {
if(m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
} else if (groupId.equals("org.jenkins-ci.main") && artifactId.equals("maven-plugin")) {
if(m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
} else if (groupId.equals(pluginGroupIds.get(artifactId))) {
if(m2.matches()) {
pluginDepsTest.put(artifactId, version);
} else {
pluginDeps.put(artifactId, version);
}
}
}
} finally {
r.close();
}
} finally {
tmp.delete();
}
System.out.println("Analysis: coreDep=" + coreDep + " pluginDeps=" + pluginDeps + " pluginDepsTest=" + pluginDepsTest);
if (coreDep != null) {
// Synchronize with ClassicPluginStrategy.DETACHED_LIST:
String[] splits = {
"maven-plugin:1.296:1.296",
"subversion:1.310:1.0",
"cvs:1.340:0.1",
"ant:1.430.*:1.0",
"javadoc:1.430.*:1.0",
"external-monitor-job:1.467.*:1.0",
"ldap:1.467.*:1.0",
"pam-auth:1.467.*:1.0",
"mailer:1.493.*:1.2",
"matrix-auth:1.535.*:1.0.2",
"windows-slaves:1.547.*:1.0",
"antisamy-markup-formatter:1.553.*:1.0",
"matrix-project:1.561.*:1.0",
"junit:1.577.*:1.0",
"bouncycastle-api:2.16.*:2.16.0",
"command-launcher:2.86.*:1.0",
};
// Synchronize with ClassicPluginStrategy.BREAK_CYCLES:
String[] exceptions = {
"script-security/matrix-auth",
"script-security/windows-slaves",
"script-security/antisamy-markup-formatter",
"script-security/matrix-project",
"script-security/bouncycastle-api",
"script-security/command-launcher",
"credentials/matrix-auth",
"credentials/windows-slaves"
};
Map<String,VersionNumber> toAdd = new HashMap<String,VersionNumber>();
Map<String,VersionNumber> toReplace = new HashMap<String,VersionNumber>();
Map<String,VersionNumber> toAddTest = new HashMap<String,VersionNumber>();
Map<String,VersionNumber> toReplaceTest = new HashMap<String,VersionNumber>();
for (String split : splits) {
String[] pieces = split.split(":");
String plugin = pieces[0];
if (Arrays.asList(exceptions).contains(thisPlugin + "/" + plugin)) {
System.out.println("Skipping implicit dep " + thisPlugin + " → " + plugin);
continue;
}
VersionNumber splitPoint = new VersionNumber(pieces[1].replace(".*", ""));
VersionNumber declaredMinimum = new VersionNumber(pieces[2]);
if (coreDep.compareTo(splitPoint) < 0 && new VersionNumber(coreVersion).compareTo(splitPoint) >=0 && !pluginDeps.containsKey(plugin)) {
Plugin bundledP = otherPlugins.get(plugin);
if (bundledP != null) {
VersionNumber bundledV;
try {
bundledV = new VersionNumber(bundledP.version);
} catch (NumberFormatException x) { // TODO apparently this does not handle `1.0-beta-1` and the like?!
System.out.println("Skipping unparseable dep on " + bundledP.name + ": " + bundledP.version);
continue;
}
if (bundledV.isNewerThan(declaredMinimum)) {
toAdd.put(plugin, bundledV);
continue;
}
}
toAdd.put(plugin, declaredMinimum);
}
}
List<String> convertFromTestDep = new ArrayList<String>();
checkDefinedDeps(pluginDeps, toAdd, toReplace, otherPlugins, new ArrayList<>(pluginDepsTest.keySet()), convertFromTestDep);
pluginDepsTest.putAll(difference(pluginDepsTest, toAdd));
pluginDepsTest.putAll(difference(pluginDepsTest, toReplace));
checkDefinedDeps(pluginDepsTest, toAddTest, toReplaceTest, otherPlugins);
// Could contain transitive dependencies which were part of the plugin's dependencies or to be added
toAddTest = difference(pluginDeps, toAddTest);
toAddTest = difference(toAdd, toAddTest);
if (!toAdd.isEmpty() || !toReplace.isEmpty() || !toAddTest.isEmpty() || !toReplaceTest.isEmpty()) {
System.out.println("Adding/replacing plugin dependencies for compatibility: " + toAdd + " " + toReplace + "\nFor test: " + toAddTest + " " + toReplaceTest);
pom.addDependencies(toAdd, toReplace, toAddTest, toReplaceTest, coreDep, pluginGroupIds, convertFromTestDep);
}
}
}
private void checkDefinedDeps(Map<String,VersionNumber> pluginList, Map<String,VersionNumber> adding, Map<String,VersionNumber> replacing, Map<String,Plugin> otherPlugins) {
checkDefinedDeps(pluginList, adding, replacing, otherPlugins, new ArrayList<String>(), null);
}
private void checkDefinedDeps(Map<String,VersionNumber> pluginList, Map<String,VersionNumber> adding, Map<String,VersionNumber> replacing, Map<String,Plugin> otherPlugins, List<String> inTest, List<String> toConvertFromTest) {
for (Map.Entry<String,VersionNumber> pluginDep : pluginList.entrySet()) {
String plugin = pluginDep.getKey();
Plugin bundledP = otherPlugins.get(plugin);
if (bundledP != null) {
VersionNumber bundledV = new VersionNumber(bundledP.version);
if (bundledV.isNewerThan(pluginDep.getValue())) {
assert !adding.containsKey(plugin);
replacing.put(plugin, bundledV);
}
// Also check any dependencies, so if we are upgrading cloudbees-folder, we also add an explicit dep on a bundled credentials.
for (Map.Entry<String,String> dependency : bundledP.dependencies.entrySet()) {
String depPlugin = dependency.getKey();
if (pluginList.containsKey(depPlugin)) {
continue; // already handled
}
Plugin depBundledP = otherPlugins.get(depPlugin);
if (depBundledP != null) {
updateAllDependents(plugin, depBundledP, pluginList, adding, replacing, otherPlugins, inTest, toConvertFromTest);
}
}
}
}
}
/**
* Search the dependents of a given plugin to determine if we need to use the bundled version.
* This helps in cases where tests fail due to new insufficient versions as well as more
* accurtely representing the totality of upgraded plugins for provided war files.
*/
private void updateAllDependents(String parent, Plugin dependent, Map<String,VersionNumber> pluginList, Map<String,VersionNumber> adding, Map<String,VersionNumber> replacing, Map<String,Plugin> otherPlugins, List<String> inTest, List<String> toConvertFromTest) {
// Check if this exists with an undesired scope
String pluginName = dependent.name;
if (inTest.contains(pluginName)) {
// This is now required in the compile scope. For example: copyartifact's dependency matrix-project requires junit
System.out.println("Converting " + pluginName + " from the test scope since it was a dependency of " + parent);
toConvertFromTest.add(pluginName);
replacing.put(pluginName, new VersionNumber(dependent.version));
} else {
System.out.println("Adding " + pluginName + " since it was a dependency of " + parent);
adding.put(pluginName, new VersionNumber(dependent.version));
}
// Also check any dependencies
for (Map.Entry<String,String> dependency : dependent.dependencies.entrySet()) {
String depPlugin = dependency.getKey();
if (pluginList.containsKey(depPlugin)) {
continue; // already handled
}
Plugin depBundledP = otherPlugins.get(depPlugin);
if (depBundledP != null) {
updateAllDependents(pluginName, depBundledP, pluginList, adding, replacing, otherPlugins, inTest, toConvertFromTest);
}
}
}
/**
* Finds the difference of the given maps.
* In set theory: base - toAdd
*
* @param base the left map; all returned items are not in this map
* @param toAdd the right map; all returned items are found in this map
*/
private Map<String, VersionNumber> difference(Map<String, VersionNumber> base, Map<String, VersionNumber> toAdd) {
Map<String, VersionNumber> diff = new HashMap<String, VersionNumber>();
for (Map.Entry<String,VersionNumber> adding : toAdd.entrySet()) {
if (!base.containsKey(adding.getKey())) {
diff.put(adding.getKey(), adding.getValue());
}
}
return diff;
}
}
|
package org.codehaus.modello.plugin.xdoc;
import org.codehaus.modello.ModelloException;
import org.codehaus.modello.ModelloRuntimeException;
import org.codehaus.modello.model.Model;
import org.codehaus.modello.model.ModelAssociation;
import org.codehaus.modello.model.ModelClass;
import org.codehaus.modello.model.ModelField;
import org.codehaus.modello.plugin.AbstractModelloGenerator;
import org.codehaus.modello.plugin.model.ModelClassMetadata;
import org.codehaus.modello.plugins.xml.XmlFieldMetadata;
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Set;
/**
* @author <a href="mailto:jason@modello.org">Jason van Zyl</a>
* @author <a href="mailto:emmanuel@venisse.net">Emmanuel Venisse</a>
* @version $Id$
*/
public class XdocGenerator
extends AbstractModelloGenerator
{
public void generate( Model model, Properties parameters )
throws ModelloException
{
initialize( model, parameters );
try
{
generateXdoc();
}
catch ( IOException ex )
{
throw new ModelloException( "Exception while generating XDoc.", ex );
}
}
private void generateXdoc()
throws IOException
{
Model objectModel = getModel();
String directory = getOutputDirectory().getAbsolutePath();
if ( isPackageWithVersion() )
{
directory += "/" + getGeneratedVersion();
}
File f = new File( directory, objectModel.getId() + ".xml" );
if ( !f.getParentFile().exists() )
{
f.getParentFile().mkdirs();
}
FileWriter writer = new FileWriter( f );
XMLWriter w = new PrettyPrintXMLWriter( writer );
writer.write( "<?xml version=\"1.0\"?>\n" );
w.startElement( "document" );
w.startElement( "properties" );
w.startElement( "title" );
w.writeText( objectModel.getName() );
w.endElement();
w.endElement();
// Body
w.startElement( "body" );
// Descriptor with links
w.startElement( "section" );
w.addAttribute( "name", objectModel.getName() );
w.startElement( "p" );
if ( objectModel.getDescription() != null )
{
w.writeMarkup( objectModel.getDescription() );
}
else
{
w.writeText( "No description." );
}
w.endElement();
w.startElement( "source" );
StringBuffer sb = new StringBuffer();
ModelClass root = objectModel.getClass( objectModel.getRoot( getGeneratedVersion() ), getGeneratedVersion() );
sb.append( getModelClassDescriptor( objectModel, root, null, 0 ) );
w.writeMarkup( "\n" + sb );
w.endElement();
// Element descriptors
// Traverse from root so "abstract" models aren't included
writeElementDescriptor( w, objectModel, root, null, new HashSet() );
w.endElement();
w.endElement();
w.endElement();
writer.flush();
writer.close();
}
private void writeElementDescriptor( XMLWriter w, Model objectModel, ModelClass modelClass, ModelField field,
Set written )
{
written.add( modelClass );
ModelClassMetadata metadata = (ModelClassMetadata) modelClass.getMetadata( ModelClassMetadata.ID );
String tagName;
if ( metadata == null || metadata.getTagName() == null )
{
if ( field == null )
{
tagName = uncapitalise( modelClass.getName() );
}
else
{
tagName = field.getName();
if ( field instanceof ModelAssociation )
{
ModelAssociation a = (ModelAssociation) field;
if ( ModelAssociation.MANY_MULTIPLICITY.equals( a.getMultiplicity() ) )
{
tagName = singular( tagName );
}
}
}
}
else
{
tagName = metadata.getTagName();
}
if ( field != null )
{
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
if ( fieldMetadata != null )
{
if ( fieldMetadata.getAssociationTagName() != null )
{
tagName = fieldMetadata.getAssociationTagName();
}
else
{
if ( fieldMetadata.getTagName() != null )
{
tagName = fieldMetadata.getTagName();
}
}
}
}
w.startElement( "a" );
w.addAttribute( "name", "class_" + tagName );
w.endElement();
w.startElement( "subsection" );
w.addAttribute( "name", tagName );
w.startElement( "p" );
if ( modelClass.getDescription() != null )
{
w.writeMarkup( modelClass.getDescription() );
}
else
{
w.writeMarkup( "No description." );
}
w.endElement();
w.startElement( "table" );
w.startElement( "tr" );
w.startElement( "th" );
w.writeText( "Element" );
w.endElement();
w.startElement( "th" );
w.writeText( "Description" );
w.endElement();
w.endElement();
List fields = getFieldsForClass( objectModel, modelClass );
for ( Iterator j = fields.iterator(); j.hasNext(); )
{
ModelField f = (ModelField) j.next();
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) f.getMetadata( XmlFieldMetadata.ID );
w.startElement( "tr" );
w.startElement( "td" );
w.startElement( "code" );
boolean flatAssociation = f instanceof ModelAssociation
&& isClassInModel( ( (ModelAssociation) f ).getTo(), objectModel )
&& XmlFieldMetadata.LIST_STYLE_FLAT.equals( fieldMetadata.getListStyle() );
if ( flatAssociation )
{
ModelAssociation association = (ModelAssociation) f;
ModelClass associationModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
w.writeText( uncapitalise( associationModelClass.getName() ) );
}
else
{
w.writeText( f.getName() );
}
w.endElement();
w.endElement();
w.startElement( "td" );
if ( flatAssociation )
{
w.writeMarkup( "<b>List</b> " );
}
if ( f.getDescription() != null )
{
w.writeMarkup( f.getDescription() );
}
else
{
w.writeText( "No description." );
}
// Write the default value, if it exists.
// But only for fields that are not a ModelAssociation
if ( f.getDefaultValue() != null && !( f instanceof ModelAssociation ) )
{
w.writeText( " The default value is " );
w.startElement( "code" );
w.writeText( f.getDefaultValue() );
w.endElement();
w.writeText( "." );
}
w.endElement();
w.endElement();
}
w.endElement();
w.endElement();
for ( Iterator iter = fields.iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
if ( f instanceof ModelAssociation && isClassInModel( ( (ModelAssociation) f ).getTo(), objectModel ) )
{
ModelAssociation association = (ModelAssociation) f;
ModelClass fieldModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
if ( !written.contains( f.getName() ) )
{
writeElementDescriptor( w, objectModel, fieldModelClass, f, written );
}
}
}
}
private List getFieldsForClass( Model objectModel, ModelClass modelClass )
{
List fields = new ArrayList();
while ( modelClass != null )
{
fields.addAll( modelClass.getFields( getGeneratedVersion() ) );
String superClass = modelClass.getSuperClass();
if ( superClass != null )
{
modelClass = objectModel.getClass( superClass, getGeneratedVersion() );
}
else
{
modelClass = null;
}
}
return fields;
}
/**
* Return the child attribute fields of this class.
* @param objectModel global object model
* @param modelClass current class
* @return the list of attribute fields of this class
*/
private List getAttributeFieldsForClass( Model objectModel, ModelClass modelClass )
{
List attributeFields = new ArrayList();
while ( modelClass != null )
{
List allFields = modelClass.getFields( getGeneratedVersion() );
Iterator allFieldsIt = allFields.iterator();
while ( allFieldsIt.hasNext() )
{
ModelField field = (ModelField) allFieldsIt.next();
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
if ( fieldMetadata.isAttribute() )
{
attributeFields.add( field );
}
}
String superClass = modelClass.getSuperClass();
if ( superClass != null )
{
modelClass = objectModel.getClass( superClass, getGeneratedVersion() );
}
else
{
modelClass = null;
}
}
return attributeFields;
}
private String getModelClassDescriptor( Model objectModel, ModelClass modelClass, ModelField field, int depth )
throws ModelloRuntimeException
{
StringBuffer sb = new StringBuffer();
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
ModelClassMetadata metadata = (ModelClassMetadata) modelClass.getMetadata( ModelClassMetadata.ID );
String tagName;
if ( metadata == null || metadata.getTagName() == null )
{
if ( field == null )
{
tagName = uncapitalise( modelClass.getName() );
}
else
{
tagName = field.getName();
if ( field instanceof ModelAssociation )
{
ModelAssociation a = (ModelAssociation) field;
if ( ModelAssociation.MANY_MULTIPLICITY.equals( a.getMultiplicity() ) )
{
tagName = singular( tagName );
}
}
}
}
else
{
tagName = metadata.getTagName();
}
if ( field != null )
{
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
if ( fieldMetadata != null )
{
if ( fieldMetadata.getAssociationTagName() != null )
{
tagName = fieldMetadata.getAssociationTagName();
}
else
{
if ( fieldMetadata.getTagName() != null )
{
tagName = fieldMetadata.getTagName();
}
}
}
}
sb.append( "<<a href=\"#class_" ).append( tagName ).append( "\">" ).append( tagName );
sb.append( "</a>" );
List fields = getFieldsForClass( objectModel, modelClass );
List attributeFields = getAttributeFieldsForClass( objectModel, modelClass );
if ( attributeFields.size() > 0 )
{
for ( Iterator iter = attributeFields.iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
sb.append( " " );
sb.append( uncapitalise( f.getName() ) ).append( "=.." );
}
sb.append( " " );
fields.removeAll( attributeFields );
}
if ( fields.size() > 0 )
{
sb.append( ">\n" );
for ( Iterator iter = fields.iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) f.getMetadata( XmlFieldMetadata.ID );
ModelClass fieldModelClass;
if ( f instanceof ModelAssociation && isClassInModel( ( (ModelAssociation) f ).getTo(), objectModel ) )
{
ModelAssociation association = (ModelAssociation) f;
if ( XmlFieldMetadata.LIST_STYLE_FLAT.equals( fieldMetadata.getListStyle() ) )
{
fieldModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
sb.append( getModelClassDescriptor( objectModel, fieldModelClass, f, depth + 1 ) );
}
else
{
if ( ModelAssociation.MANY_MULTIPLICITY.equals( association.getMultiplicity() ) )
{
depth++;
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "<" ).append( uncapitalise( association.getName() ) ).append( ">\n" );
}
fieldModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
sb.append( getModelClassDescriptor( objectModel, fieldModelClass, f, depth + 1 ) );
if ( ModelAssociation.MANY_MULTIPLICITY.equals( association.getMultiplicity() ) )
{
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "</" ).append( uncapitalise( association.getName() ) ).append( ">\n" );
depth
}
}
}
else
{
for ( int i = 0; i < depth + 1; i++ )
{
sb.append( " " );
}
sb.append( "<" ).append( uncapitalise( f.getName() ) ).append( "/>\n" );
}
}
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "</" ).append( tagName ).append( ">\n" );
}
else
{
sb.append( "/>\n" );
}
return sb.toString();
}
}
|
package com.phonegap;
import android.provider.Contacts.ContactMethods;
import android.provider.Contacts.People;
import android.util.Log;
import android.webkit.WebView;
import android.app.Activity;
import android.content.ContentResolver;
import android.net.Uri;
import android.database.Cursor;
import android.database.sqlite.SQLiteException;
@SuppressWarnings("deprecation")
public class ContactManager {
public class ContactTriplet
{
public String name = "";
public String email = "";
public String phone = "";
}
private static final String LOG_TAG = "Contact Query";
Activity mApp;
WebView mView;
Uri mPeople = android.provider.Contacts.People.CONTENT_URI;
Uri mPhone = android.provider.Contacts.Phones.CONTENT_URI;
Uri mEmail = android.provider.Contacts.ContactMethods.CONTENT_URI;
ContactManager(Activity app, WebView view)
{
mApp = app;
mView = view;
}
// This is to add backwards compatibility to the OLD Contacts API\
public void getContactsAndSendBack()
{
String[] projection = new String[] {
People._ID,
People.NAME,
People.NUMBER,
People.PRIMARY_EMAIL_ID
};
try{
Cursor myCursor = mApp.managedQuery(mPeople, projection,
null, null , People.NAME + " ASC");
processResults(myCursor, true);
}
catch (SQLiteException ex)
{
Log.d(LOG_TAG, ex.getMessage());
}
}
public void search(String name, String npa, String email)
{
if (email.length() > 0)
searchByEmail(email);
else
searchPeople(name, npa);
}
private void searchByEmail(String email)
{
String[] projection = new String[] {
ContactMethods._ID,
ContactMethods.DATA,
ContactMethods.KIND,
ContactMethods.PERSON_ID
};
String[] variables = new String[] {
email
};
try{
Cursor myCursor = mApp.managedQuery(mEmail, projection,
"contact_methods." + ContactMethods.DATA + " = ?" + "AND contact_methods.kind = 1", variables , ContactMethods.DATA + " ASC");
getMethodData(myCursor);
}
catch (SQLiteException ex)
{
Log.d(LOG_TAG, ex.getMessage());
}
}
private void searchPeople(String name, String number)
{
String conditions = "";
if (name.length() == 0)
{
name = "%";
conditions += People.NAME + " LIKE ? AND ";
}
else
{
conditions += People.NAME + " = ? AND ";
}
if (number.length() == 0)
number = "%";
else
{
number = number.replace('+', '%');
number = number.replace('.', '%');
number = number.replace('-', '%');
}
conditions += People.NUMBER + " LIKE ? ";
String[] projection = new String[] {
People._ID,
People.NAME,
People.NUMBER,
People.PRIMARY_EMAIL_ID
};
String[] variables = new String[] {
name, number
};
try{
Cursor myCursor = mApp.managedQuery(mPeople, projection,
conditions, variables , People.NAME + " ASC");
processResults(myCursor, false);
}
catch (SQLiteException ex)
{
Log.d(LOG_TAG, ex.getMessage());
}
}
private void processResults(Cursor cur, boolean all){
if (cur.moveToFirst()) {
String name;
String phoneNumber;
String email_id;
String email;
int nameColumn = cur.getColumnIndex(People.NAME);
int phoneColumn = cur.getColumnIndex(People.NUMBER);
int emailIdColumn = cur.getColumnIndex(People.PRIMARY_EMAIL_ID);
do {
// Get the field values
name = cur.getString(nameColumn);
phoneNumber = cur.getString(phoneColumn);
email_id = cur.getString(emailIdColumn);
if (email_id != null && email_id.length() > 0)
email = getEmail(email_id);
else
email = "";
// Code for backwards compatibility with the OLD Contacts API
if (all)
mView.loadUrl("javascript:navigator.ContactManager.droidAddContact('" + name + "','" + phoneNumber + "','" + email +"')");
else
mView.loadUrl("javascript:navigator.Contacts.droidFoundContact('" + name + "','" + phoneNumber + "','" + email +"')");
} while (cur.moveToNext());
if (all)
mView.loadUrl("javascript:navigator.ContactManager.droidDone()");
else
mView.loadUrl("javascript:navigator.Contacts.droidDone();");
}
else
{
if(all)
mView.loadUrl("javascript:navigator.ContactManager.fail()");
else
mView.loadUrl("javascript:navigator.Contacts.fail('None found!')");
}
}
private void getMethodData(Cursor cur)
{
ContactTriplet data = new ContactTriplet();
String id;
String email;
if (cur.moveToFirst()) {
int idColumn = cur.getColumnIndex(ContactMethods._ID);
int emailColumn = cur.getColumnIndex(ContactMethods.DATA);
do {
// Get the field values
id = cur.getString(idColumn);
email = cur.getString(emailColumn);
data = getContactData(id);
if(data != null)
{
data.email = email;
mView.loadUrl("javascript:navigator.Contacts.droidFoundContact('" + data.name + "','" + data.phone + "','" + data.email +"')");
}
} while (cur.moveToNext());
mView.loadUrl("javascript:navigator.Contacts.droidDoneContacts();");
}
}
private ContactTriplet getContactData(String id) {
ContactTriplet data = null;
String[] projection = new String[] {
People._ID,
People.NAME,
People.NUMBER,
People.PRIMARY_EMAIL_ID
};
String[] variables = new String[] {
id
};
try{
Cursor myCursor = mApp.managedQuery(mPeople, projection,
People.PRIMARY_EMAIL_ID + " = ?", variables , People.NAME + " ASC");
data = getTriplet(myCursor);
}
catch (SQLiteException ex)
{
Log.d(LOG_TAG, ex.getMessage());
}
return data;
}
private ContactTriplet getTriplet(Cursor cur) {
ContactTriplet data = new ContactTriplet();
if (cur.moveToFirst()) {
int nameColumn = cur.getColumnIndex(People.NAME);
int numberColumn = cur.getColumnIndex(People.NUMBER);
do {
data.name = cur.getString(nameColumn);
data.phone = cur.getString(numberColumn);
} while (cur.moveToNext());
}
return data;
}
private String getEmailColumnData(Cursor cur)
{
String email = "";
if (cur != null && cur.moveToFirst()) {
int emailColumn = cur.getColumnIndex(ContactMethods.DATA);
do {
// Get the field values
email = cur.getString(emailColumn);
} while (cur.moveToNext());
}
return email;
}
private String getEmail(String id)
{
String email = "";
String[] projection = new String[] {
ContactMethods._ID,
ContactMethods.DATA,
ContactMethods.KIND
};
String[] variables = new String[] {
id
};
try
{
Cursor myCursor = mApp.managedQuery(mEmail, projection,
"contact_methods." + ContactMethods._ID + " = ?" + " AND contact_methods.kind = 1", variables , ContactMethods.DATA + " ASC");
email = getEmailColumnData(myCursor);
}
catch (SQLiteException ex)
{
Log.d(LOG_TAG, ex.getMessage());
}
return email;
}
}
|
package org.codehaus.modello.plugin.xdoc;
import org.codehaus.modello.ModelloException;
import org.codehaus.modello.ModelloRuntimeException;
import org.codehaus.modello.model.Model;
import org.codehaus.modello.model.ModelAssociation;
import org.codehaus.modello.model.ModelClass;
import org.codehaus.modello.model.ModelField;
import org.codehaus.modello.plugin.AbstractModelloGenerator;
import org.codehaus.modello.plugin.model.ModelClassMetadata;
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import java.util.HashSet;
/**
* @author <a href="mailto:jason@modello.org">Jason van Zyl</a>
* @author <a href="mailto:emmanuel@venisse.net">Emmanuel Venisse</a>
* @version $Id$
*/
public class XdocGenerator
extends AbstractModelloGenerator
{
public void generate( Model model, Properties parameters )
throws ModelloException
{
initialize( model, parameters );
try
{
generateXdoc();
}
catch ( IOException ex )
{
throw new ModelloException( "Exception while generating XDoc.", ex );
}
}
private void generateXdoc()
throws IOException
{
Model objectModel = getModel();
String directory = getOutputDirectory().getAbsolutePath();
if ( isPackageWithVersion() )
{
directory += "/" + getGeneratedVersion();
}
File f = new File( directory, objectModel.getId() + ".xml" );
if ( !f.getParentFile().exists() )
{
f.getParentFile().mkdirs();
}
FileWriter writer = new FileWriter( f );
XMLWriter w = new PrettyPrintXMLWriter( writer );
writer.write( "<?xml version=\"1.0\"?>\n" );
w.startElement( "document" );
w.startElement( "properties" );
w.startElement( "title" );
w.writeText( objectModel.getName() );
w.endElement();
w.endElement();
// Body
w.startElement( "body" );
// Descriptor with links
w.startElement( "section" );
w.addAttribute( "name", objectModel.getName() );
w.startElement( "p" );
w.writeMarkup( objectModel.getDescription() );
w.endElement();
w.startElement( "source" );
StringBuffer sb = new StringBuffer();
ModelClass root = objectModel.getClass( objectModel.getRoot( getGeneratedVersion() ), getGeneratedVersion() );
sb.append( getModelClassDescriptor( objectModel, root, 0 ) );
w.writeMarkup( "\n" + sb.toString() );
w.endElement();
// Element descriptors
// Traverse from root so "abstract" models aren't included
writeElementDescriptor( w, objectModel, root, new HashSet() );
w.endElement();
w.endElement();
w.endElement();
writer.flush();
writer.close();
}
private void writeElementDescriptor( XMLWriter w, Model objectModel, ModelClass modelClass, Set written )
{
written.add( modelClass );
ModelClassMetadata metadata = (ModelClassMetadata) modelClass.getMetadata( ModelClassMetadata.ID );
String tagName;
if ( metadata == null || metadata.getTagName() == null )
{
tagName = uncapitalise( modelClass.getName() );
}
else
{
tagName = metadata.getTagName();
}
w.startElement( "a" );
w.addAttribute( "name", "class_" + modelClass.getName() );
w.endElement();
w.startElement( "subsection" );
w.addAttribute( "name", tagName );
if ( modelClass.getDescription() != null )
{
w.startElement( "p" );
w.writeMarkup( modelClass.getDescription() );
w.endElement();
}
w.startElement( "table" );
w.startElement( "tr" );
w.startElement( "th" );
w.writeText( "Element" );
w.endElement();
w.startElement( "th" );
w.writeText( "Description" );
w.endElement();
w.endElement();
for ( Iterator j = modelClass.getFields( getGeneratedVersion() ).iterator(); j.hasNext(); )
{
ModelField field = (ModelField) j.next();
w.startElement( "tr" );
w.startElement( "td" );
w.startElement( "code" );
w.writeText( field.getName() );
w.endElement();
w.endElement();
w.startElement( "td" );
if ( field.getDescription() != null )
{
w.writeMarkup( field.getDescription() );
}
else
{
w.writeText( "No description." );
}
w.endElement();
w.endElement();
}
w.endElement();
w.endElement();
for ( Iterator iter = modelClass.getFields( getGeneratedVersion() ).iterator(); iter.hasNext(); )
{
ModelField field = (ModelField) iter.next();
if ( field instanceof ModelAssociation &&
isClassInModel( ( (ModelAssociation) field ).getTo(), objectModel ) )
{
ModelAssociation association = (ModelAssociation) field;
ModelClass fieldModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
if ( !written.contains( fieldModelClass ) )
{
writeElementDescriptor( w, objectModel, fieldModelClass, written );
}
}
}
}
private String getModelClassDescriptor( Model objectModel, ModelClass modelClass, int depth )
throws ModelloRuntimeException
{
StringBuffer sb = new StringBuffer();
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
ModelClassMetadata metadata = (ModelClassMetadata) modelClass.getMetadata( ModelClassMetadata.ID );
String tagName;
if ( metadata == null || metadata.getTagName() == null )
{
tagName = uncapitalise( modelClass.getName() );
}
else
{
tagName = metadata.getTagName();
}
sb.append( "<<a href=\"#class_" + modelClass.getName() + "\">" + tagName );
if ( modelClass.getFields( getGeneratedVersion() ).size() > 0 )
{
sb.append( "</a>>\n" );
for ( Iterator iter = modelClass.getFields( getGeneratedVersion() ).iterator(); iter.hasNext(); )
{
ModelField field = (ModelField) iter.next();
ModelClass fieldModelClass = null;
if ( field instanceof ModelAssociation &&
isClassInModel( ( (ModelAssociation) field ).getTo(), objectModel ) )
{
ModelAssociation association = (ModelAssociation) field;
if ( ModelAssociation.MANY_MULTIPLICITY.equals( association.getMultiplicity() ) )
{
depth++;
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "<" + uncapitalise( association.getName() ) + ">\n" );
}
fieldModelClass = objectModel.getClass( association.getTo(), getGeneratedVersion() );
sb.append( getModelClassDescriptor( objectModel, fieldModelClass, depth + 1 ) );
if ( ModelAssociation.MANY_MULTIPLICITY.equals( association.getMultiplicity() ) )
{
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "</" + uncapitalise( association.getName() ) + ">\n" );
depth
}
}
else
{
for ( int i = 0; i < depth + 1; i++ )
{
sb.append( " " );
}
sb.append( "<" + uncapitalise( field.getName() ) + "/>\n" );
}
}
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
sb.append( "</" + tagName + ">\n" );
}
else
{
sb.append( "</a>/>\n" );
}
return sb.toString();
}
}
|
package com.sometrik.framework;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import com.android.trivialdrivesample.util.IabHelper;
import com.android.trivialdrivesample.util.IabHelper.IabAsyncInProgressException;
import com.android.trivialdrivesample.util.IabResult;
import com.android.trivialdrivesample.util.Inventory;
import com.android.trivialdrivesample.util.Purchase;
import android.app.ActionBar;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Typeface;
import android.text.Editable;
import android.text.Html;
import android.text.InputType;
import android.text.TextWatcher;
import android.text.method.LinkMovementMethod;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.Window;
import android.view.inputmethod.EditorInfo;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.PopupMenu;
import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.PopupWindow;
public class NativeCommand {
private int internalId = 0;
private int childInternalId = 0;
private int value = 0;
private int flags = 0;
private String textValue = "";
private String textValue2 = "";
private CommandType command;
private String key;
private FrameWork frame;
private ArrayList<PopupMenu> menuList = new ArrayList<PopupMenu>();
private int rowNumber = -1;
private int columnNumber = -1;
private final int FLAG_PADDING_LEFT = 1;
private final int FLAG_PADDING_RIGHT = 2;
private final int FLAG_PADDING_TOP = 4;
private final int FLAG_PADDING_BOTTOM = 8;
private final int FLAG_PASSWORD = 16;
private final int FLAG_NUMERIC = 32;
private final int FLAG_HYPERLINK = 64;
private final int FLAG_USE_PURCHASES_API = 128;
public enum CommandType {
CREATE_PLATFORM,
CREATE_APPLICATION,
CREATE_BASICVIEW,
CREATE_FORMVIEW,
CREATE_OPENGL_VIEW,
CREATE_TEXTFIELD, // For viewing single value
CREATE_TEXTVIEW, // For viewing multiline text
CREATE_LISTVIEW, // For viewing lists
CREATE_GRIDVIEW, // For viewing tables
CREATE_BUTTON,
CREATE_SWITCH,
CREATE_PICKER, // called Spinner in Android
CREATE_LINEAR_LAYOUT,
CREATE_TABLE_LAYOUT,
CREATE_AUTO_COLUMN_LAYOUT,
CREATE_HEADING_TEXT,
CREATE_TEXT,
CREATE_DIALOG, // For future
CREATE_IMAGEVIEW,
CREATE_ACTION_SHEET,
CREATE_CHECKBOX,
CREATE_RADIO_GROUP,
CREATE_SEPARATOR,
CREATE_SLIDER,
CREATE_ACTIONBAR,
DELETE_ELEMENT,
SHOW_MESSAGE_DIALOG,
SHOW_INPUT_DIALOG,
SHOW_ACTION_SHEET,
LAUNCH_BROWSER,
POST_NOTIFICATION,
HISTORY_GO_BACK,
HISTORY_GO_FORWARD,
CLEAR, // Clears the contents of GridView
SET_INT_VALUE, // Sets value of radio groups, checkboxes and pickers
SET_TEXT_VALUE, // Sets value of textfields, labels and images
SET_INT_DATA,
SET_TEXT_DATA, // Sets the cell value of GridView
SET_LABEL, // Sets label for buttons and checkboxes
SET_ENABLED,
SET_READONLY,
SET_VISIBILITY,
SET_SHAPE, // Specifies the number of rows and columns in a GridView
SET_STYLE,
SET_ERROR,
FLUSH_VIEW, // Flushes GridView content
UPDATE_PREFERENCE,
ADD_OPTION,
ADD_COLUMN,
QUIT_APP,
// Timers
CREATE_TIMER,
// In-app purchases
LIST_PRODUCTS,
BUY_PRODUCT,
LIST_PURCHASES,
CONSUME_PURCHASE
}
public NativeCommand(FrameWork frame, int messageTypeId, int internalId, int childInternalId, int value, byte[] textValue, byte[] textValue2, int flags, int rowNumber, int columnNumber){
this.frame = frame;
command = CommandType.values()[messageTypeId];
this.internalId = internalId;
this.childInternalId = childInternalId;
this.value = value;
this.flags = flags;
this.rowNumber = rowNumber;
this.columnNumber = columnNumber;
if (textValue != null) {
this.textValue = new String(textValue, frame.getCharset());
}
if (textValue2 != null) {
this.textValue2 = new String(textValue2, frame.getCharset());
}
}
public NativeCommand(FrameWork frame, int messageTypeId, int internalId, int childInternalId, int value, byte[] textValue, byte[] textValue2, int flags){
this.frame = frame;
command = CommandType.values()[messageTypeId];
this.internalId = internalId;
this.childInternalId = childInternalId;
this.value = value;
this.flags = flags;
if (textValue != null) {
this.textValue = new String(textValue, frame.getCharset());
}
if (textValue2 != null) {
this.textValue2 = new String(textValue2, frame.getCharset());
}
}
public void apply(NativeCommandHandler view) {
System.out.println("Processing message " + command + " id: " + internalId + " Child id: " + getChildInternalId());
switch (command) {
case CREATE_FORMVIEW:
FWScrollView scrollView = new FWScrollView(frame, textValue);
scrollView.setId(getChildInternalId());
scrollView.setPadding(10, 10, 10, 10);
FrameWork.addToViewList(scrollView);
if (view == null){
System.out.println("view was null");
if (frame.getCurrentViewId() == 0){
scrollView.setValue(1);
}
} else {
view.addChild(scrollView);
}
break;
case CREATE_BASICVIEW:
case CREATE_LINEAR_LAYOUT:
FWLayout layout = createLinearLayout();
view.addChild(layout);
break;
case CREATE_AUTO_COLUMN_LAYOUT:{
FWAuto auto = new FWAuto(frame);
auto.setId(getChildInternalId());
FrameWork.addToViewList(auto);
view.addChild(auto);
}
break;
case CREATE_TABLE_LAYOUT:
FWTable table = createTableLayout(false);
view.addChild(table);
break;
case CREATE_BUTTON:
FWButton button = createButton();
view.addChild(button);
break;
case CREATE_PICKER:
FWPicker picker = createSpinner();
view.addChild(picker);
break;
case CREATE_SWITCH:
FWSwitch click = createSwitch();
view.addChild(click);
break;
case CLEAR:
//FWList clears list on 0
view.setValue(0);
break;
case CREATE_GRIDVIEW:
//TODO
//Fix from being debug status
// FWLayout debugList = createDebugResultsScreen();
FWList debugList = new FWList(frame, new FWAdapter(frame, null));
debugList.setId(childInternalId);
FrameWork.addToViewList(debugList);
view.addChild(debugList);
break;
case CREATE_TIMER:
Timer timer = new Timer();
timer.schedule((new TimerTask(){
@Override
public void run() {
FrameWork.timerEvent(System.currentTimeMillis() / 1000, internalId, childInternalId);
}
}), value, value);
break;
case CREATE_CHECKBOX:
FWCheckBox checkBox = createCheckBox();
FrameWork.addToViewList(checkBox);
view.addChild(checkBox);
break;
case CREATE_OPENGL_VIEW:
frame.createNativeOpenGLView(childInternalId);
break;
case CREATE_TEXTVIEW:
FWEditText editTextView = createBigEditText();
view.addChild(editTextView);
break;
case CREATE_TEXTFIELD:
FWEditText editText = createEditText();
view.addChild(editText);
break;
case CREATE_RADIO_GROUP:
FWRadioGroup radioGroup = new FWRadioGroup(frame);
radioGroup.setId(childInternalId);
break;
case CREATE_HEADING_TEXT:
FWTextView headingText = createTextView(true);
view.addChild(headingText);
break;
case CREATE_TEXT:
FWTextView textView = createTextView(false);
view.addChild(textView);
break;
case CREATE_IMAGEVIEW:
ImageView imageView = createImageView();
view.addChild(imageView);
break;
case ADD_OPTION:
// Forward Command to FWPicker
view.addOption(getValue(), getTextValue());
break;
case ADD_COLUMN:
view.addOption(getValue(), getTextValue());
break;
case POST_NOTIFICATION:
frame.createNotification(getTextValue(), getTextValue2());
break;
case CREATE_APPLICATION:
frame.setAppId(getInternalId());
frame.setSharedPreferences(textValue);
if (isSet(FLAG_USE_PURCHASES_API)) {
System.out.println("Initializing purchaseHelper");
frame.initializePurchaseHelper(textValue2, new IabHelper.OnIabSetupFinishedListener() {
@Override
public void onIabSetupFinished(IabResult result) {
if (result.isSuccess()) {
System.out.println("PurchaseHelper successfully setup");
sendInventory(frame.getPurchaseHelperInventory());
} else {
System.out.println("PurchaseHelper failed to setup");
}
}
});
}
break;
case SET_INT_VALUE:
view.setValue(getValue());
break;
case SET_TEXT_VALUE:
view.setValue(textValue);
break;
case SET_TEXT_DATA:
view.addData(rowNumber, columnNumber, textValue);
break;
case SET_VISIBILITY:
if (value == 0){
view.setViewVisibility(false);
} else {
view.setViewVisibility(true);
}
break;
case SET_ENABLED:
view.setViewEnabled(value != 0);
break;
case SET_STYLE:
view.setStyle(textValue, textValue2);
break;
case SET_ERROR:
view.setError(value != 0, textValue);
break;
case LAUNCH_BROWSER:
frame.launchBrowser(getTextValue());
break;
case SHOW_MESSAGE_DIALOG:
showMessageDialog(textValue, textValue2);
break;
case SHOW_INPUT_DIALOG:
showInputDialog(textValue, textValue2);
break;
case CREATE_DIALOG:
FWPopupView popView = createDialogView();
FrameWork.addToViewList(popView);
break;
case CREATE_ACTION_SHEET:
createActionSheet();
break;
case CREATE_ACTIONBAR:
//TODO not everything is set
frame.requestWindowFeature(Window.FEATURE_ACTION_BAR);
ActionBar ab = frame.getActionBar();
ab.setTitle(textValue);
break;
case QUIT_APP:
// TODO
frame.finish();
break;
case UPDATE_PREFERENCE:
//Now stores String value to string key.
frame.getPreferencesEditor().putString(textValue, textValue2);
frame.getPreferencesEditor().apply();
break;
case DELETE_ELEMENT:
deleteElement(view, childInternalId);
break;
case BUY_PRODUCT:
try {
launchPurchase(textValue);
} catch (IabAsyncInProgressException e) {
e.printStackTrace();
System.out.println("Error on launchPurchase with message: " + e.getMessage());
}
default:
System.out.println("Message couldn't be handled");
break;
}
}
private void createActionSheet(){
PopupMenu menu = new PopupMenu(frame, null);
menu.setOnMenuItemClickListener(new OnMenuItemClickListener(){
@Override
public boolean onMenuItemClick(MenuItem item) {
return false;
}
});
menuList.add(menu);
}
private FWTable createTableLayout(boolean autoSize){
FWTable table = new FWTable(frame);
table.setId(getChildInternalId());
if (autoSize){
table.setAutoSize(true);
} else {
Log.d("table", "ALERT " + value);
table.setColumnCount(value);
}
table.setStretchAllColumns(true);
table.setShrinkAllColumns(true);
FrameWork.addToViewList(table);
return table;
}
private FWSwitch createSwitch() {
FWSwitch click = new FWSwitch(frame);
click.setId(childInternalId);
if (textValue != "") {
click.setTextOn(textValue);
}
if (textValue2 != "") {
click.setTextOff(textValue2);
}
click.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, buttonView.getId(), isChecked ? 1 : 0);
}
});
FrameWork.addToViewList(click);
return click;
}
private void deleteElement(NativeCommandHandler parent, int childId) {
FrameWork.views.remove(childInternalId);
if (parent instanceof ViewGroup) {
ViewGroup group = (ViewGroup) parent;
int childCount = group.getChildCount();
for (int i = 0; i < childCount; i++) {
View view = group.getChildAt(i);
if (view.getId() == childInternalId) {
((ViewGroup) parent).removeViewAt(i);
break;
}
}
} else {
System.out.println("Deletion parent was not an instance of ViewGroup");
}
}
private ImageView createImageView() {
ImageView imageView = new ImageView(frame);
imageView.setId(childInternalId);
try {
InputStream is = frame.getAssets().open(textValue);
Bitmap bitmap = BitmapFactory.decodeStream(is);
imageView.setImageBitmap(bitmap);
return imageView;
} catch (IOException e) {
e.printStackTrace();
System.out.println("error loading asset file to imageView");
System.exit(1);
}
return null;
}
private FWLayout createLinearLayout() {
FWLayout layout = new FWLayout(frame);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
// params.weight = 1.0f;
// params.gravity = Gravity.FILL;
// layout.setBaselineAligned(false);
layout.setLayoutParams(params);
layout.setId(getChildInternalId());
FrameWork.addToViewList(layout);
if (getValue() == 2) {
layout.setOrientation(LinearLayout.HORIZONTAL);
} else {
layout.setOrientation(LinearLayout.VERTICAL);
}
return layout;
}
private FWButton createButton() {
FWButton button = new FWButton(frame);
button.setId(getChildInternalId());
button.setText(getTextValue());
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
button.setLayoutParams(params);
button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("Java: my button was clicked with id " + getChildInternalId());
if (!FrameWork.transitionAnimation) {
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), 1);
}
}
});
FrameWork.addToViewList(button);
return button;
}
private FWEditText createEditText(){
final FWEditText editText = new FWEditText(frame);
editText.setId(getChildInternalId());
editText.setText(getTextValue());
editText.setMinWidth(80);
editText.setSingleLine();
editText.setImeOptions(EditorInfo.IME_ACTION_DONE);
if (isSet(FLAG_PASSWORD) && isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_NUMBER_VARIATION_PASSWORD);
} else if (isSet(FLAG_PASSWORD)) {
editText.setInputType(InputType.TYPE_TEXT_VARIATION_PASSWORD);
} else if (isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_CLASS_NUMBER);
}
editText.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable editable) {
String inputText = editable.toString();
byte[] b = inputText.getBytes(frame.getCharset());
frame.textChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), b);
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
public void onTextChanged(CharSequence s, int start, int before, int count) {}
});
FrameWork.addToViewList(editText);
return editText;
}
private FWEditText createBigEditText() {
final FWEditText editText = new FWEditText(frame);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
editText.setMinLines(4);
editText.setLayoutParams(params);
editText.setId(getChildInternalId());
editText.setText(getTextValue());
editText.setVerticalScrollBarEnabled(true);
editText.setMovementMethod(new ScrollingMovementMethod());
editText.addDelayedChangeListener(getChildInternalId());
FrameWork.addToViewList(editText);
return editText;
}
private FWPicker createSpinner(){
FWPicker picker = new FWPicker(frame);
picker.setId(getChildInternalId());
FrameWork.addToViewList(picker);
return picker;
}
private FWCheckBox createCheckBox() {
FWCheckBox checkBox = new FWCheckBox(frame);
checkBox.setPadding(0, 0, 10, 0);
checkBox.setId(childInternalId);
if (textValue != "") {
checkBox.setText(textValue);
}
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton box, boolean isChecked) {
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, childInternalId, isChecked ? 1 : 0);
}
});
return checkBox;
}
private FWTextView createTextView(boolean bolded) {
FWTextView textView = new FWTextView(frame);
textView.setId(getChildInternalId());
if (bolded) {
textView.setTypeface(null, Typeface.BOLD);
}
if (isSet(FLAG_HYPERLINK)) {
textView.setMovementMethod(LinkMovementMethod.getInstance());
String text = "<a href='" + textValue2 + "'>" + textValue + "</a>";
textView.setText(Html.fromHtml(text));
} else {
textView.setText(textValue);
}
FrameWork.addToViewList(textView);
return textView;
}
// Create dialog with user text input
private void showInputDialog(String title, String message) {
System.out.println("Creating input dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
final EditText input = new EditText(frame);
input.setInputType(InputType.TYPE_CLASS_TEXT);
builder.setView(input);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Negative button listener
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
dialog.dismiss();
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
String inputText = String.valueOf(input.getText());
byte[] b = inputText.getBytes(frame.getCharset());
frame.endModal(System.currentTimeMillis() / 1000.0, 1, b);
dialog.dismiss();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
}
private FWPopupView createDialogView(){
FWPopupView window = new FWPopupView(frame, childInternalId);
return window;
}
// create Message dialog
private void showMessageDialog(String title, String message) {
System.out.println("creating message dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 1, null);
dialog.dismiss();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
System.out.println("message dialog created");
}
private void launchPurchase(final String productId) throws IabAsyncInProgressException {
// Sku = product id from google account
frame.getPurchaseHelper().launchPurchaseFlow(frame, productId, IabHelper.ITEM_TYPE_INAPP, null, 1, new IabHelper.OnIabPurchaseFinishedListener() {
@Override
public void onIabPurchaseFinished(IabResult result, Purchase info) {
if (result.isSuccess()) {
System.out.println("Purchase of product id " + productId + " completed");
FrameWork.onPurchaseEvent(System.currentTimeMillis() / 1000.0, frame.getAppId(), info.getSku(), true, info.getPurchaseTime() / 1000.0);
// TODO
} else {
System.out.println("Purchase of product id " + productId + " failed");
// TODO
}
}
}, "");
}
private void sendInventory(Inventory inventory) {
List<Purchase> purchaseList = inventory.getAllPurchases();
System.out.println("getting purchase history. Purchase list size: " + purchaseList.size());
for (Purchase purchase : inventory.getAllPurchases()) {
FrameWork.onPurchaseEvent(System.currentTimeMillis() / 1000.0, frame.getAppId(), purchase.getSku(), false, purchase.getPurchaseTime() / 1000.0);
}
}
private Boolean isSet(int flag) {
return (flags & flag) != 0;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public int getInternalId() {
return internalId;
}
public int getChildInternalId() {
return childInternalId;
}
public String getTextValue() {
return textValue;
}
public String getTextValue2() {
return textValue2;
}
public CommandType getCommand() {
return command;
}
public int getValue() {
return value;
}
}
|
package org.codehaus.modello.plugin.xdoc;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import org.codehaus.modello.ModelloException;
import org.codehaus.modello.ModelloParameterConstants;
import org.codehaus.modello.ModelloRuntimeException;
import org.codehaus.modello.model.Model;
import org.codehaus.modello.model.ModelAssociation;
import org.codehaus.modello.model.ModelClass;
import org.codehaus.modello.model.ModelDefault;
import org.codehaus.modello.model.ModelField;
import org.codehaus.modello.model.Version;
import org.codehaus.modello.model.VersionRange;
import org.codehaus.modello.plugin.xdoc.metadata.XdocFieldMetadata;
import org.codehaus.modello.plugins.xml.AbstractXmlGenerator;
import org.codehaus.modello.plugins.xml.metadata.XmlAssociationMetadata;
import org.codehaus.modello.plugins.xml.metadata.XmlClassMetadata;
import org.codehaus.modello.plugins.xml.metadata.XmlFieldMetadata;
import org.codehaus.plexus.util.WriterFactory;
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
/**
* @author <a href="mailto:jason@modello.org">Jason van Zyl</a>
* @author <a href="mailto:emmanuel@venisse.net">Emmanuel Venisse</a>
* @version $Id$
*/
public class XdocGenerator
extends AbstractXmlGenerator
{
private static final VersionRange DEFAULT_VERSION_RANGE = new VersionRange( "0.0.0+" );
private Version firstVersion = DEFAULT_VERSION_RANGE.getFromVersion();
private Version version = DEFAULT_VERSION_RANGE.getFromVersion();
public void generate( Model model, Properties parameters )
throws ModelloException
{
initialize( model, parameters );
if ( parameters.getProperty( ModelloParameterConstants.FIRST_VERSION ) != null )
{
firstVersion = new Version( parameters.getProperty( ModelloParameterConstants.FIRST_VERSION ) );
}
if ( parameters.getProperty( ModelloParameterConstants.VERSION ) != null )
{
version = new Version( parameters.getProperty( ModelloParameterConstants.VERSION ) );
}
try
{
generateXdoc( parameters );
}
catch ( IOException ex )
{
throw new ModelloException( "Exception while generating XDoc.", ex );
}
}
private void generateXdoc( Properties parameters )
throws IOException
{
Model objectModel = getModel();
File directory = getOutputDirectory();
if ( isPackageWithVersion() )
{
directory = new File( directory, getGeneratedVersion().toString() );
}
if ( !directory.exists() )
{
directory.mkdirs();
}
// we assume parameters not null
String xdocFileName = parameters.getProperty( ModelloParameterConstants.OUTPUT_XDOC_FILE_NAME );
File f = new File( directory, objectModel.getId() + ".xml" );
if ( xdocFileName != null )
{
f = new File( directory, xdocFileName );
}
Writer writer = WriterFactory.newXmlWriter( f );
XMLWriter w = new PrettyPrintXMLWriter( writer );
writer.write( "<?xml version=\"1.0\"?>\n" );
w.startElement( "document" );
w.startElement( "properties" );
w.startElement( "title" );
w.writeText( objectModel.getName() );
w.endElement();
w.endElement();
// Body
w.startElement( "body" );
w.startElement( "section" );
w.addAttribute( "name", objectModel.getName() );
w.startElement( "p" );
if ( objectModel.getDescription() != null )
{
w.writeMarkup( objectModel.getDescription() );
}
else
{
w.writeText( "No description." );
}
w.endElement();
// XML representation of the model with links
w.startElement( "source" );
ModelClass root = objectModel.getClass( objectModel.getRoot( getGeneratedVersion() ), getGeneratedVersion() );
w.writeMarkup( "\n" + getXmlDescriptor( root, null, 0 ) );
w.endElement();
// Element descriptors
// Traverse from root so "abstract" models aren't included
writeElementDescriptor( w, root, null, new HashSet() );
w.endElement();
w.endElement();
w.endElement();
writer.flush();
writer.close();
}
private void writeElementDescriptor( XMLWriter w, ModelClass modelClass, ModelAssociation association,
Set written )
{
writeElementDescriptor( w, modelClass, association, written, true );
}
private void writeElementDescriptor( XMLWriter w, ModelClass modelClass, ModelAssociation association,
Set written, boolean recursive )
{
written.add( modelClass );
String tagName = resolveTagName( modelClass, association );
w.startElement( "a" );
w.addAttribute( "name", "class_" + tagName );
w.endElement();
w.startElement( "subsection" );
w.addAttribute( "name", tagName );
w.startElement( "p" );
if ( modelClass.getDescription() != null )
{
w.writeMarkup( modelClass.getDescription() );
}
else
{
w.writeMarkup( "No description." );
}
w.endElement();
ModelField contentField = getContentField( getFieldsForClass( modelClass ) );
if (contentField != null)
{
w.startElement( "p" );
w.startElement( "b" );
w.writeText( "Element Content: " );
if ( contentField.getDescription() != null )
{
w.writeMarkup( contentField.getDescription() );
}
else
{
w.writeMarkup( "No description." );
}
w.endElement();
w.endElement();
}
List attributeFields = new ArrayList( getAttributeFieldsForClass( modelClass ) );
List elementFields = new ArrayList( getFieldsForClass( modelClass ) );
elementFields.removeAll( attributeFields );
generateFieldsTable( w, elementFields, true );
generateFieldsTable( w, attributeFields, false );
w.endElement();
for ( Iterator iter = getFieldsForClass( modelClass ).iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
if ( isInnerAssociation( f ) && recursive )
{
ModelAssociation assoc = (ModelAssociation) f;
ModelClass fieldModelClass = getModel().getClass( assoc.getTo(), getGeneratedVersion() );
if ( !written.contains( f.getName() ) )
{
if ( ( modelClass.getName().equals( fieldModelClass.getName() ) )
&& ( modelClass.getPackageName().equals( fieldModelClass.getPackageName() ) ) )
{
writeElementDescriptor( w, fieldModelClass, assoc, written, false );
}
else
{
writeElementDescriptor( w, fieldModelClass, assoc, written );
}
}
}
}
}
private void generateFieldsTable( XMLWriter w, List fields, boolean elementFields )
{
if ( fields == null || fields.isEmpty() )
{
// skip empty table
return;
}
// skip if only one field and Content type
if ( fields.size() == 1 )
{
if ( "Content".equals( (( ModelField ) fields.get( 0 )).getType() ) )
{
return;
}
}
w.startElement( "table" );
w.startElement( "tr" );
w.startElement( "th" );
w.writeText( elementFields ? "Element" : "Attribute" );
w.endElement();
w.startElement( "th" );
w.writeText( "Type" );
w.endElement();
w.startElement( "th" );
w.writeText( "Description" );
w.endElement();
boolean showSinceColumn = version.greaterThan( firstVersion );
if ( showSinceColumn )
{
w.startElement( "th" );
w.writeText( "Since" );
w.endElement();
}
w.endElement();
for ( Iterator j = fields.iterator(); j.hasNext(); )
{
ModelField f = (ModelField) j.next();
if ( "Content".equals( f.getType() ) )
{
continue;
}
XmlFieldMetadata xmlFieldMetadata = (XmlFieldMetadata) f.getMetadata( XmlFieldMetadata.ID );
w.startElement( "tr" );
// Element/Attribute column
w.startElement( "td" );
w.startElement( "code" );
boolean flatAssociation = false;
if ( isInnerAssociation( f ) )
{
ModelAssociation assoc = (ModelAssociation) f;
ModelClass associationModelClass = getModel().getClass( assoc.getTo(), getGeneratedVersion() );
w.startElement( "a" );
w.addAttribute( "href", "#class_" + resolveTagName( associationModelClass, assoc ) );
if ( flatAssociation )
{
if ( xmlFieldMetadata.getTagName() != null )
{
w.writeText( uncapitalise( xmlFieldMetadata.getTagName() ) );
}
else
{
w.writeText( uncapitalise( associationModelClass.getName() ) );
}
}
else
{
w.writeText( f.getName() );
}
w.endElement();
}
else
{
w.writeText( resolveTagName( f, xmlFieldMetadata ) );
}
w.endElement(); // code
w.endElement();
// Type column
w.startElement( "td" );
w.startElement( "code" );
if ( f instanceof ModelAssociation )
{
ModelAssociation assoc = (ModelAssociation) f;
if ( assoc.isOneMultiplicity() )
{
w.writeText( assoc.getTo() );
}
else
{
w.writeText( assoc.getType().substring( "java.util.".length() ) );
if ( assoc.isGenericType() )
{
w.writeText( "<" + assoc.getTo() + ">" );
}
}
}
else
{
w.writeText( f.getType() );
}
w.endElement(); // code
w.endElement();
// Description column
w.startElement( "td" );
if ( flatAssociation )
{
w.writeMarkup( "<b>List</b> " );
}
if ( f.getDescription() != null )
{
w.writeMarkup( f.getDescription() );
}
else
{
w.writeText( "No description." );
}
// Write the default value, if it exists.
// But only for fields that are not a ModelAssociation
if ( f.getDefaultValue() != null && !( f instanceof ModelAssociation ) )
{
w.writeText( " The default value is " );
w.startElement( "code" );
w.writeText( f.getDefaultValue() );
w.endElement();
w.writeText( "." );
}
w.endElement();
// Since column
if ( showSinceColumn )
{
w.startElement( "td" );
if ( f.getVersionRange() != null )
{
Version fromVersion = f.getVersionRange().getFromVersion();
if ( fromVersion != null && fromVersion.greaterThan( firstVersion ) )
{
w.writeMarkup( fromVersion.toString() );
}
}
w.endElement();
}
w.endElement();
}
w.endElement();
}
private String getXmlDescriptor( ModelClass modelClass, ModelAssociation association, int depth )
{
return getXmlDescriptor( modelClass, association, depth, true );
}
/**
* Build the pretty tree describing the XML representation of the model. This method is recursive.
* @param modelClass the class we are printing the model
* @param association the association we are coming from (can be <code>null</code>)
* @param depth how deep we currently are (for spacers purpose)
* @param recursive are we still in recursive mode or not
* @return the String representing the tree model
* @throws ModelloRuntimeException
*/
private String getXmlDescriptor( ModelClass modelClass, ModelAssociation association, int depth,
boolean recursive )
throws ModelloRuntimeException
{
StringBuffer sb = new StringBuffer();
appendSpacer( sb, depth );
String tagName = resolveTagName( modelClass, association );
sb.append( "<<a href=\"#class_" ).append( tagName ).append( "\">" ).append( tagName ).append( "</a>" );
List fields = getFieldsForClass( modelClass );
List attributeFields = getAttributeFieldsForClass( modelClass );
if ( attributeFields.size() > 0 )
{
for ( Iterator iter = attributeFields.iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
XmlFieldMetadata xmlFieldMetadata = (XmlFieldMetadata) f.getMetadata( XmlFieldMetadata.ID );
sb.append( ' ' );
sb.append( resolveTagName( f, xmlFieldMetadata ) ).append( "=.." );
}
sb.append( ' ' );
fields.removeAll( attributeFields );
}
if ( fields.size() > 0 )
{
sb.append( ">\n" );
for ( Iterator iter = fields.iterator(); iter.hasNext(); )
{
ModelField f = (ModelField) iter.next();
XmlFieldMetadata xmlFieldMetadata = (XmlFieldMetadata) f.getMetadata( XmlFieldMetadata.ID );
XdocFieldMetadata xdocFieldMetadata = (XdocFieldMetadata) f.getMetadata( XdocFieldMetadata.ID );
if ( XdocFieldMetadata.BLANK.equals( xdocFieldMetadata.getSeparator() ) )
{
sb.append( '\n' );
}
if ( isInnerAssociation( f ) && recursive )
{
ModelAssociation assoc = (ModelAssociation) f;
boolean wrappedItems = false;
if ( assoc.isManyMultiplicity() )
{
XmlAssociationMetadata xmlAssociationMetadata =
(XmlAssociationMetadata) assoc.getAssociationMetadata( XmlAssociationMetadata.ID );
wrappedItems = xmlAssociationMetadata.isWrappedItems();
}
if ( wrappedItems )
{
depth++;
appendSpacer( sb, depth );
sb.append( "<" ).append( uncapitalise( assoc.getName() ) ).append( ">\n" );
}
ModelClass fieldModelClass = getModel().getClass( assoc.getTo(), getGeneratedVersion() );
if ( ( modelClass.getName().equals( fieldModelClass.getName() ) )
&& ( modelClass.getPackageName().equals( fieldModelClass.getPackageName() ) ) )
{
sb.append( getXmlDescriptor( fieldModelClass, assoc, depth + 1, false ) );
}
else
{
sb.append( getXmlDescriptor( fieldModelClass, assoc, depth + 1 ) );
}
if ( wrappedItems )
{
appendSpacer( sb, depth );
sb.append( "</" ).append( uncapitalise( assoc.getName() ) ).append( ">\n" );
depth
}
}
else if ( ModelDefault.PROPERTIES.equals( f.getType() ) )
{
String fieldTagName = resolveTagName( f, xmlFieldMetadata );
ModelAssociation assoc = (ModelAssociation) f;
XmlAssociationMetadata xmlAssociationMetadata =
(XmlAssociationMetadata) assoc.getAssociationMetadata( XmlAssociationMetadata.ID );
appendSpacer( sb, depth + 1 );
sb.append( "<" ).append( fieldTagName ).append( ">\n" );
if ( xmlAssociationMetadata.isMapExplode() )
{
appendSpacer( sb, depth + 2 );
sb.append( "<key/>\n" );
appendSpacer( sb, depth + 2 );
sb.append( "<value/>\n" );
}
else
{
appendSpacer( sb, depth + 2 );
sb.append( "<<i>key</i>><i>value</i></<i>key</i>>\n" );
}
appendSpacer( sb, depth + 1 );
sb.append( "<" ).append( fieldTagName ).append( "/>\n" );
}
else
{
appendSpacer( sb, depth + 1 );
sb.append( "<" ).append( resolveTagName( f, xmlFieldMetadata ) ).append( "/>\n" );
}
}
appendSpacer( sb, depth );
sb.append( "</" ).append( tagName ).append( ">\n" );
}
else
{
sb.append( "/>\n" );
}
return sb.toString();
}
/**
* Compute the tagName of a given class, living inside an association.
* @param modelClass the class we are looking for the tag name
* @param association the association where this class is used
* @return the tag name to use
* @todo refactor to use resolveTagName helpers instead
*/
private String resolveTagName( ModelClass modelClass, ModelAssociation association )
{
XmlClassMetadata xmlClassMetadata = (XmlClassMetadata) modelClass.getMetadata( XmlClassMetadata.ID );
String tagName;
if ( xmlClassMetadata == null || xmlClassMetadata.getTagName() == null )
{
if ( association == null )
{
tagName = uncapitalise( modelClass.getName() );
}
else
{
tagName = association.getName();
if ( association.isManyMultiplicity() )
{
tagName = singular( tagName );
}
}
}
else
{
tagName = xmlClassMetadata.getTagName();
}
if ( association != null )
{
XmlFieldMetadata xmlFieldMetadata = (XmlFieldMetadata) association.getMetadata( XmlFieldMetadata.ID );
XmlAssociationMetadata xmlAssociationMetadata =
(XmlAssociationMetadata) association.getAssociationMetadata( XmlAssociationMetadata.ID );
if ( xmlFieldMetadata != null )
{
if ( xmlAssociationMetadata.getTagName() != null )
{
tagName = xmlAssociationMetadata.getTagName();
}
else if ( xmlFieldMetadata.getTagName() != null )
{
tagName = xmlFieldMetadata.getTagName();
}
}
}
return tagName;
}
/**
* Appends the required spacers to the given StringBuffer.
* @param sb where to append the spacers
* @param depth the depth of spacers to generate
*/
private static void appendSpacer( StringBuffer sb, int depth )
{
for ( int i = 0; i < depth; i++ )
{
sb.append( " " );
}
}
}
|
package org.gem.calc;
import java.io.File;
import java.rmi.RemoteException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.collections.Closure;
import org.opensha.commons.data.Site;
import org.opensha.commons.data.function.ArbitrarilyDiscretizedFunc;
import org.opensha.commons.data.function.DiscretizedFuncAPI;
import org.opensha.commons.geo.Location;
import org.opensha.commons.geo.LocationList;
import org.opensha.commons.geo.LocationUtils;
import org.opensha.commons.param.DoubleParameter;
import org.opensha.sha.calc.HazardCurveCalculator;
import org.opensha.sha.earthquake.EqkRupForecastAPI;
import org.opensha.sha.earthquake.ProbEqkRupture;
import org.opensha.sha.earthquake.ProbEqkSource;
import org.opensha.sha.earthquake.rupForecastImpl.GEM1.GEM1ERF;
import org.opensha.sha.imr.ScalarIntensityMeasureRelationshipAPI;
import org.opensha.sha.imr.param.OtherParams.StdDevTypeParam;
import org.opensha.sha.imr.param.SiteParams.DepthTo2pt5kmPerSecParam;
import org.opensha.sha.imr.param.SiteParams.Vs30_Param;
import org.opensha.sha.util.TectonicRegionType;
import static org.apache.commons.collections.CollectionUtils.forAllDo;
import org.gem.calc.DisaggregationResult;
import org.gem.hdf5.HDF5Util;
public class DisaggregationCalculator {
/**
* Dataset for the full disagg matrix (for HDF5 ouput).
*/
public static final String FULLDISAGGMATRIX = "fulldisaggmatrix";
private final Double[] latBinLims;
private final Double[] lonBinLims;
private final Double[] magBinLims;
private final Double[] epsilonBinLims;
private static final TectonicRegionType[] tectonicRegionTypes = TectonicRegionType.values();
/**
* Dimensions for matrices produced by this calculator, based on the length
* of the bin limits passed to the constructor.
*/
private final long[] dims;
/**
* Used for checking that bin edge lists are not null;
*/
private static final Closure notNull = new Closure()
{
public void execute(Object o)
{
if (o == null)
{
throw new IllegalArgumentException("Bin edges should not be null");
}
}
};
/**
* Used for checking that bin edge lists have a length greater than or equal
* to 2.
*/
private static final Closure lenGE2 = new Closure()
{
public void execute(Object o)
{
if (o instanceof Object[])
{
Object[] oArray = (Object[]) o;
if (oArray.length < 2)
{
throw new IllegalArgumentException("Bin edge arrays must have a length >= 2");
}
}
}
};
private static final Closure isSorted = new Closure()
{
public void execute(Object o) {
if (o instanceof Object[])
{
Object[] oArray = (Object[]) o;
Object[] sorted = Arrays.copyOf(oArray, oArray.length);
Arrays.sort(sorted);
if (!Arrays.equals(sorted, oArray))
{
throw new IllegalArgumentException("Bin edge arrays must arranged in ascending order");
}
}
}
};
public DisaggregationCalculator(
Double[] latBinEdges,
Double[] lonBinEdges,
Double[] magBinEdges,
Double[] epsilonBinEdges)
{
List binEdges = Arrays.asList(latBinEdges, lonBinEdges, magBinEdges,
epsilonBinEdges);
// Validation for the bin edges:
forAllDo(binEdges, notNull);
forAllDo(binEdges, lenGE2);
forAllDo(binEdges, isSorted);
this.latBinLims = latBinEdges;
this.lonBinLims = lonBinEdges;
this.magBinLims = magBinEdges;
this.epsilonBinLims = epsilonBinEdges;
this.dims = new long[5];
this.dims[0] = this.latBinLims.length - 1;
this.dims[1] = this.lonBinLims.length - 1;
this.dims[2] = this.magBinLims.length - 1;
this.dims[3] = this.epsilonBinLims.length - 1;
this.dims[4] = tectonicRegionTypes.length;
}
/**
* Compute the full disaggregation matrix and write it to an HDF5 file.
*
* The result is a DisaggregationResult object, containing the GMV, the full
* 5D matrix, and the absolute path the HDF5 file.
*
* @param path directory where the matrix should be written to
* @throws Exception */
public DisaggregationResult computeAndWriteMatrix(
double lat,
double lon,
GEM1ERF erf,
Map<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> imrMap,
double poe,
List<Double> imls,
double vs30Value,
double depthTo2pt5KMPS,
String path) throws Exception
{
DisaggregationResult daResult = computeMatrix(lat, lon, erf, imrMap,
poe, imls, vs30Value, depthTo2pt5KMPS);
String fileName = UUID.randomUUID().toString() + ".h5";
String fullPath = new File(path, fileName).getAbsolutePath();
HDF5Util.writeMatrix(fullPath, FULLDISAGGMATRIX, dims, daResult.getMatrix());
daResult.setMatrixPath(fullPath);
return daResult;
}
/**
* Simplified computeMatrix method for convenient calls from the Python
* code.
*/
public DisaggregationResult computeMatrix(
double lat,
double lon,
GEM1ERF erf,
Map<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> imrMap,
double poe,
List<Double> imls,
double vs30Value,
double depthTo2pt5KMPS)
{
Site site = new Site(new Location(lat, lon));
site.addParameter(new DoubleParameter(Vs30_Param.NAME, vs30Value));
site.addParameter(new DoubleParameter(DepthTo2pt5kmPerSecParam.NAME, depthTo2pt5KMPS));
DiscretizedFuncAPI hazardCurve = new ArbitrarilyDiscretizedFunc();
// initialize the hazard curve with the number of points == the number of IMLs
for (double d : imls)
{
hazardCurve.set(d, 0.0);
}
try
{
HazardCurveCalculator hcc = new HazardCurveCalculator();
hcc.getHazardCurve(hazardCurve, site, imrMap, erf);
}
catch (RemoteException e)
{
throw new RuntimeException(e);
}
double minMag = (Double) erf.getParameter(GEM1ERF.MIN_MAG_NAME).getValue();
return computeMatrix(site, erf, imrMap, poe, hazardCurve, minMag);
}
public DisaggregationResult computeMatrix(
Site site,
EqkRupForecastAPI erf,
Map<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> imrMap,
double poe,
DiscretizedFuncAPI hazardCurve,
double minMag) // or just pass a List<double> of IML values and compute the curve inside here?
{
assertPoissonian(erf);
assertNonZeroStdDev(imrMap);
double disaggMatrix[][][][][] =
new double[(int) dims[0]]
[(int) dims[1]]
[(int) dims[2]]
[(int) dims[3]]
[(int) dims[4]];
// value by which to normalize the final matrix
double totalAnnualRate = 0.0;
double gmv = getGMV(hazardCurve, poe);
for (int srcCnt = 0; srcCnt < erf.getNumSources(); srcCnt++)
{
ProbEqkSource source = erf.getSource(srcCnt);
double totProb = source.computeTotalProbAbove(minMag);
double totRate = -Math.log(1 - totProb);
TectonicRegionType trt = source.getTectonicRegionType();
ScalarIntensityMeasureRelationshipAPI imr = imrMap.get(trt);
imr.setSite(site);
imr.setIntensityMeasureLevel(gmv);
for(int rupCnt = 0; rupCnt < source.getNumRuptures(); rupCnt++)
{
ProbEqkRupture rupture = source.getRupture(rupCnt);
imr.setEqkRupture(rupture);
Location location = closestLocation(rupture.getRuptureSurface().getLocationList(), site.getLocation());
double lat, lon, mag, epsilon;
lat = location.getLatitude();
lon = location.getLongitude();
mag = rupture.getMag();
epsilon = imr.getEpsilon();
if (!allInRange(lat, lon, mag, epsilon))
{
// one or more of the parameters is out of range;
// skip this rupture
continue;
}
int[] binIndices = getBinIndices(lat, lon, mag, epsilon, trt);
double annualRate = totRate
* imr.getExceedProbability()
* rupture.getProbability();
disaggMatrix[binIndices[0]][binIndices[1]][binIndices[2]][binIndices[3]][binIndices[4]] += annualRate;
totalAnnualRate += annualRate;
} // end rupture loop
} // end source loop
disaggMatrix = normalize(disaggMatrix, totalAnnualRate);
DisaggregationResult daResult = new DisaggregationResult();
daResult.setGMV(gmv);
daResult.setMatrix(disaggMatrix);
return daResult;
}
public boolean allInRange(
double lat, double lon, double mag, double epsilon)
{
return inRange(this.latBinLims, lat)
&& inRange(this.lonBinLims, lon)
&& inRange(this.magBinLims, mag)
&& inRange(this.epsilonBinLims, epsilon);
}
public static void assertPoissonian(EqkRupForecastAPI erf)
{
for (int i = 0; i < erf.getSourceList().size(); i++)
{
ProbEqkSource source = erf.getSource(i);
if (!source.isPoissonianSource()) {
throw new RuntimeException(
"Sources must be Poissonian. (Non-Poissonian source are not currently supported.)");
}
}
}
public static void assertNonZeroStdDev(
Map<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> imrMap)
{
for (ScalarIntensityMeasureRelationshipAPI imr : imrMap.values())
{
String stdDevType =
(String) imr.getParameter(StdDevTypeParam.NAME).getValue();
if (stdDevType.equalsIgnoreCase(StdDevTypeParam.STD_DEV_TYPE_NONE))
{
throw new RuntimeException(
"Attenuation relationship must have a non-zero standard deviation.");
}
}
}
public static boolean inRange(Double[] bins, Double value)
{
return value >= bins[0] && value < bins[bins.length - 1];
}
/**
* Figure out which bins each input parameter fits into. The returned array
* of indices represent the 5 dimensional coordinates in the disaggregation
* matrix.
* @param lat
* @param lon
* @param mag
* @param epsilon
* @param trt
*/
public int[] getBinIndices(
double lat, double lon, double mag,
double epsilon, TectonicRegionType trt)
{
int[] result = new int[5];
result[0] = digitize(this.latBinLims, lat);
result[1] = digitize(this.lonBinLims, lon);
result[2] = digitize(this.magBinLims, mag);
result[3] = digitize(this.epsilonBinLims, epsilon);
result[4] = Arrays.asList(TectonicRegionType.values()).indexOf(trt);
return result;
}
public static int digitize(Double[] bins, Double value)
{
for (int i = 0; i < bins.length - 1; i++)
{
if (value >= bins[i] && value < bins[i + 1])
{
return i;
}
}
throw new IllegalArgumentException(
"Value '" + value + "' is outside the expected range");
}
/**
* Given a LocationList and a Location target, get the Location in the
* LocationList which is closest to the target Location.
* @param list
* @param target
* @return closest Location (in the input ListLocation) to the target
*/
public static Location closestLocation(LocationList list, Location target)
{
Location closest = null;
double minDistance = Double.MAX_VALUE;
for (Location loc : list)
{
double horzDist = LocationUtils.horzDistance(loc, target);
double vertDist = LocationUtils.vertDistance(loc, target);
double distance = Math.sqrt(Math.pow(horzDist, 2) + Math.pow(vertDist, 2));
if (distance < minDistance)
{
minDistance = distance;
closest = loc;
}
}
return closest;
}
/**
* Extract a GMV (Ground Motion Value) for a given curve and PoE
* (Probability of Exceedance) value.
*
* IML (Intensity Measure Level) values make up the X-axis of the curve.
* IMLs are arranged in ascending order. The lower the IML value, the
* higher the PoE value (Y value) on the curve. Thus, it is assumed that
* hazard curves will always have a negative slope.
*
* If the input poe value is > the max Y value in the curve, extrapolate
* and return the X value corresponding to the max Y value (the first Y
* value).
* If the input poe value is < the min Y value in the curve, extrapolate
* and return the X value corresponding to the min Y value (the last Y
* value).
* Otherwise, interpolate an X value in the curve given the input PoE.
* @param hazardCurve
* @param poe Probability of Exceedance value
* @return GMV corresponding to the input poe
*/
public static Double getGMV(DiscretizedFuncAPI hazardCurve, double poe)
{
if (poe > hazardCurve.getY(0))
{
return hazardCurve.getX(0);
}
else if (poe < hazardCurve.getY(hazardCurve.getNum() - 1))
{
return hazardCurve.getX(hazardCurve.getNum() - 1);
}
else
{
return hazardCurve.getFirstInterpolatedX(poe);
}
}
/**
* Normalize a 5D matrix by the given value.
* @param matrix
* @param normFactor
*/
public static double[][][][][] normalize(double[][][][][] matrix, double normFactor)
{
for (int i = 0; i < matrix.length; i++)
{
for (int j = 0; j < matrix[i].length; j++)
{
for (int k = 0; k < matrix[i][j].length; k++)
{
for (int l = 0; l < matrix[i][j][k].length; l++)
{
for (int m = 0; m < matrix[i][j][k][l].length; m++)
{
matrix[i][j][k][l][m] /= normFactor;
}
}
}
}
}
return matrix;
}
}
|
package com.aerospike.client;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import com.aerospike.client.Info.NameValueParser;
import com.aerospike.client.cluster.Cluster;
import com.aerospike.client.cluster.Node;
import com.aerospike.client.command.BatchExecutor;
import com.aerospike.client.command.Command;
import com.aerospike.client.command.FieldType;
import com.aerospike.client.command.ScanCommand;
import com.aerospike.client.command.ScanExecutor;
import com.aerospike.client.command.SingleCommand;
import com.aerospike.client.policy.ClientPolicy;
import com.aerospike.client.policy.Policy;
import com.aerospike.client.policy.QueryPolicy;
import com.aerospike.client.policy.ScanPolicy;
import com.aerospike.client.policy.WritePolicy;
import com.aerospike.client.query.IndexType;
import com.aerospike.client.query.QueryExecutor;
import com.aerospike.client.query.RecordSet;
import com.aerospike.client.query.Statement;
import com.aerospike.client.util.MsgPack;
import com.aerospike.client.util.Util;
/**
* Instantiate an <code>AerospikeClient</code> object to access an Aerospike
* database cluster and perform database operations.
* <p>
* This client is thread-safe. One client instance should be used per cluster.
* Multiple threads should share this cluster instance.
* <p>
* Your application uses this class API to perform database operations such as
* writing and reading records, and selecting sets of records. Write operations
* include specialized functionality such as append/prepend and arithmetic
* addition.
* <p>
* Each record may have multiple bins, unless the Aerospike server nodes are
* configured as "single-bin". In "multi-bin" mode, partial records may be
* written or read by specifying the relevant subset of bins.
*/
public class AerospikeClient {
// Member variables.
private Cluster cluster;
// Constructors
/**
* Initialize Aerospike client.
* If the host connection succeeds, the client will:
* <p>
* - Add host to the cluster map <br>
* - Request host's list of other nodes in cluster <br>
* - Add these nodes to cluster map <br>
* <p>
* If the connection succeeds, the client is ready to process database requests.
* If the connection fails, the cluster will remain in a disconnected state
* until the server is activated.
*
* @param hostname host name
* @param port host port
* @throws AerospikeException if host connection fails
*/
public AerospikeClient(String hostname, int port) throws AerospikeException {
this(new ClientPolicy(), new Host(hostname, port));
}
/**
* Initialize Aerospike client.
* The client policy is used to set defaults and size internal data structures.
* If the host connection succeeds, the client will:
* <p>
* - Add host to the cluster map <br>
* - Request host's list of other nodes in cluster <br>
* - Add these nodes to cluster map <br>
* <p>
* If the connection succeeds, the client is ready to process database requests.
* If the connection fails and the policy's failOnInvalidHosts is true, a connection
* exception will be thrown. Otherwise, the cluster will remain in a disconnected state
* until the server is activated.
*
* @param policy client configuration parameters, pass in null for defaults
* @param hostname host name
* @param port host port
* @throws AerospikeException if host connection fails
*/
public AerospikeClient(ClientPolicy policy, String hostname, int port) throws AerospikeException {
this(policy, new Host(hostname, port));
}
/**
* Initialize Aerospike client with suitable hosts to seed the cluster map.
* The client policy is used to set defaults and size internal data structures.
* For each host connection that succeeds, the client will:
* <p>
* - Add host to the cluster map <br>
* - Request host's list of other nodes in cluster <br>
* - Add these nodes to cluster map <br>
* <p>
* In most cases, only one host is necessary to seed the cluster. The remaining hosts
* are added as future seeds in case of a complete network failure.
* <p>
* If one connection succeeds, the client is ready to process database requests.
* If all connections fail and the policy's failIfNotConnected is true, a connection
* exception will be thrown. Otherwise, the cluster will remain in a disconnected state
* until the server is activated.
*
* @param policy client configuration parameters, pass in null for defaults
* @param hosts array of potential hosts to seed the cluster
* @throws AerospikeException if all host connections fail
*/
public AerospikeClient(ClientPolicy policy, Host... hosts) throws AerospikeException {
if (policy == null) {
policy = new ClientPolicy();
}
cluster = new Cluster(policy, hosts);
if (policy.failIfNotConnected && ! cluster.isConnected()) {
throw new AerospikeException.Connection("Failed to connect to host(s): " + Arrays.toString(hosts));
}
}
// Compatibility Layer Initialization
/**
* Compatibility layer constructor. Do not use.
*/
protected AerospikeClient() {
}
/**
* Compatibility layer host initialization. Do not use.
*/
protected final void addServer(String hostname, int port) throws AerospikeException {
Host[] hosts = new Host[] {new Host(hostname, port)};
// If cluster has already been initialized, add hosts to existing cluster.
if (cluster != null) {
cluster.addSeeds(hosts);
return;
}
cluster = new Cluster(new ClientPolicy(), hosts);
}
// Cluster Connection Management
/**
* Close all client connections to database server nodes.
*/
public final void close() {
cluster.close();
}
/**
* Determine if we are ready to talk to the database server cluster.
*
* @return <code>true</code> if cluster is ready,
* <code>false</code> if cluster is not ready
*/
public final boolean isConnected() {
return cluster.isConnected();
}
/**
* Return array of active server nodes in the cluster.
*
* @return array of active nodes
*/
public final Node[] getNodes() {
return cluster.getNodes();
}
/**
* Return list of active server node names in the cluster.
*
* @return list of active node names
*/
public final List<String> getNodeNames() {
Node[] nodes = cluster.getNodes();
ArrayList<String> names = new ArrayList<String>(nodes.length);
for (Node node : nodes) {
names.add(node.getName());
}
return names;
}
// Write Record Operations
/**
* Write record bin(s).
* The policy specifies the transaction timeout, record expiration and how the transaction is
* handled when the record already exists.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param bins array of bin name/value pairs
* @throws AerospikeException if write fails
*/
public final void put(WritePolicy policy, Key key, Bin... bins) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.write(policy, Operation.Type.WRITE, bins);
}
// String Operations
/**
* Append bin string values to existing record bin values.
* The policy specifies the transaction timeout, record expiration and how the transaction is
* handled when the record already exists.
* This call only works for string values.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param bins array of bin name/value pairs
* @throws AerospikeException if append fails
*/
public final void append(WritePolicy policy, Key key, Bin... bins) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.write(policy, Operation.Type.APPEND, bins);
}
/**
* Prepend bin string values to existing record bin values.
* The policy specifies the transaction timeout, record expiration and how the transaction is
* handled when the record already exists.
* This call works only for string values.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param bins array of bin name/value pairs
* @throws AerospikeException if prepend fails
*/
public final void prepend(WritePolicy policy, Key key, Bin... bins) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.write(policy, Operation.Type.PREPEND, bins);
}
// Arithmetic Operations
/**
* Add integer bin values to existing record bin values.
* The policy specifies the transaction timeout, record expiration and how the transaction is
* handled when the record already exists.
* This call only works for integer values.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param bins array of bin name/value pairs
* @throws AerospikeException if add fails
*/
public final void add(WritePolicy policy, Key key, Bin... bins) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.write(policy, Operation.Type.ADD, bins);
}
// Delete Operations
/**
* Delete record for specified key.
* The policy specifies the transaction timeout.
*
* @param policy delete configuration parameters, pass in null for defaults
* @param key unique record identifier
* @return whether record existed on server before deletion
* @throws AerospikeException if delete fails
*/
public final boolean delete(WritePolicy policy, Key key) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setWrite(Command.INFO2_WRITE | Command.INFO2_DELETE);
command.begin();
command.writeHeader(policy, 0);
command.writeKey();
command.execute(policy);
return command.getResultCode() == ResultCode.OK;
}
// Touch Operations
/**
* Create record if it does not already exist. If the record exists, the record's
* time to expiration will be reset to the policy's expiration.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @throws AerospikeException if touch fails
*/
public final void touch(WritePolicy policy, Key key) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setWrite(Command.INFO2_WRITE);
command.estimateOperationSize();
command.begin();
command.writeHeader(policy, 1);
command.writeKey();
command.writeOperation(Operation.Type.TOUCH);
command.execute(policy);
}
// Existence-Check Operations
/**
* Determine if a record key exists.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @return whether record exists or not
* @throws AerospikeException if command fails
*/
public final boolean exists(Policy policy, Key key) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setRead(Command.INFO1_READ | Command.INFO1_NOBINDATA);
command.begin();
command.writeHeader(0);
command.writeKey();
command.execute(policy);
return command.getResultCode() == ResultCode.OK;
}
/**
* Check if multiple record keys exist in one batch call.
* The returned boolean array is in positional order with the original key array order.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param keys array of unique record identifiers
* @return array key/existence status pairs
* @throws AerospikeException if command fails
*/
public final boolean[] exists(Policy policy, Key[] keys) throws AerospikeException {
boolean[] existsArray = new boolean[keys.length];
BatchExecutor.executeBatch(cluster, policy, keys, existsArray, null, null, Command.INFO1_READ | Command.INFO1_NOBINDATA);
return existsArray;
}
// Read Record Operations
/**
* Read entire record for specified key.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @return if found, return record instance. If not found, return null.
* @throws AerospikeException if read fails
*/
public final Record get(Policy policy, Key key) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setRead(Command.INFO1_READ | Command.INFO1_GET_ALL);
command.begin();
command.writeHeader(0);
command.writeKey();
command.execute(policy);
return command.getRecord();
}
/**
* Read record header and bins for specified key.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param binNames bins to retrieve
* @return if found, return record instance. If not found, return null.
* @throws AerospikeException if read fails
*/
public final Record get(Policy policy, Key key, String... binNames) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setRead(Command.INFO1_READ);
for (String binName : binNames) {
command.estimateOperationSize(binName);
}
command.begin();
command.writeHeader(binNames.length);
command.writeKey();
for (String binName : binNames) {
command.writeOperation(binName, Operation.Type.READ);
}
command.execute(policy);
return command.getRecord();
}
/**
* Read record generation and expiration only for specified key. Bins are not read.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @return if found, return record instance. If not found, return null.
* @throws AerospikeException if read fails
*/
public final Record getHeader(Policy policy, Key key) throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
// The server does not currently return record header data with INFO1_NOBINDATA attribute set.
// The workaround is to request a non-existent bin.
// TODO: Fix this on server.
//command.setRead(Command.INFO1_READ | Command.INFO1_NOBINDATA);
command.setRead(Command.INFO1_READ);
command.estimateOperationSize((String)null);
command.begin();
command.writeHeader(0);
command.writeKey();
command.writeOperation((String)null, Operation.Type.READ);
command.execute(policy);
return command.getRecord();
}
// Batch Read Operations
/**
* Read multiple records for specified keys in one batch call.
* The returned records are in positional order with the original key array order.
* If a key is not found, the positional record will be null.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param keys array of unique record identifiers
* @return array of records
* @throws AerospikeException if read fails
*/
public final Record[] get(Policy policy, Key[] keys) throws AerospikeException {
Record[] records = new Record[keys.length];
BatchExecutor.executeBatch(cluster, policy, keys, null, records, null, Command.INFO1_READ | Command.INFO1_GET_ALL);
return records;
}
/**
* Read multiple record headers and bins for specified keys in one batch call.
* The returned records are in positional order with the original key array order.
* If a key is not found, the positional record will be null.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param keys array of unique record identifiers
* @param binNames array of bins to retrieve
* @return array of records
* @throws AerospikeException if read fails
*/
public final Record[] get(Policy policy, Key[] keys, String... binNames)
throws AerospikeException {
Record[] records = new Record[keys.length];
// Create lookup table for bin name filtering.
HashSet<String> names = new HashSet<String>(binNames.length);
for (String binName : binNames) {
names.add(binName);
}
BatchExecutor.executeBatch(cluster, policy, keys, null, records, names, Command.INFO1_READ);
return records;
}
/**
* Read multiple record header data for specified keys in one batch call.
* The returned records are in positional order with the original key array order.
* If a key is not found, the positional record will be null.
* The policy can be used to specify timeouts.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param keys array of unique record identifiers
* @return array of records
* @throws AerospikeException if read fails
*/
public final Record[] getHeader(Policy policy, Key[] keys) throws AerospikeException {
Record[] records = new Record[keys.length];
BatchExecutor.executeBatch(cluster, policy, keys, null, records, null, Command.INFO1_READ | Command.INFO1_NOBINDATA);
return records;
}
// Generic Database Operations
/**
* Perform multiple read/write operations on a single key in one batch call.
* An example would be to add an integer value to an existing record and then
* read the result, all in one database call.
*
* @param policy write configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param operations database operations to perform
* @return record if there is a read in the operations list
* @throws AerospikeException if command fails
*/
public final Record operate(WritePolicy policy, Key key, Operation... operations)
throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
int readAttr = 0;
int writeAttr = 0;
boolean readHeader = false;
for (Operation operation : operations) {
switch (operation.type) {
case READ:
readAttr |= Command.INFO1_READ;
// Read all bins if no bin is specified.
if (operation.binName == null) {
readAttr |= Command.INFO1_GET_ALL;
}
break;
case READ_HEADER:
// The server does not currently return record header data with INFO1_NOBINDATA attribute set.
// The workaround is to request a non-existent bin.
// TODO: Fix this on server.
//readAttr |= Command.INFO1_READ | Command.INFO1_NOBINDATA;
readAttr |= Command.INFO1_READ;
readHeader = true;
break;
default:
writeAttr = Command.INFO2_WRITE;
break;
}
command.estimateOperationSize(operation);
}
command.setRead(readAttr);
command.setWrite(writeAttr);
command.begin();
if (writeAttr != 0) {
command.writeHeader(policy, operations.length);
}
else {
command.writeHeader(operations.length);
}
command.writeKey();
for (Operation operation : operations) {
command.writeOperation(operation);
}
if (readHeader) {
command.writeOperation((String)null, Operation.Type.READ);
}
command.execute(policy);
return command.getRecord();
}
// Scan Operations
/**
* Read all records in specified namespace and set. If the policy's
* <code>concurrentNodes</code> is specified, each server node will be read in
* parallel. Otherwise, server nodes are read in series.
* <p>
* This call will block until the scan is complete - callbacks are made
* within the scope of this call.
*
* @param policy scan configuration parameters, pass in null for defaults
* @param namespace namespace - equivalent to database name
* @param setName optional set name - equivalent to database table
* @param callback read callback method - called with record data
* @throws AerospikeException if scan fails
*/
public final void scanAll(ScanPolicy policy, String namespace, String setName, ScanCallback callback)
throws AerospikeException {
if (policy == null) {
policy = new ScanPolicy();
}
// Retry policy must be one-shot for scans.
policy.maxRetries = 0;
Node[] nodes = cluster.getNodes();
if (policy.concurrentNodes) {
ScanExecutor executor = new ScanExecutor(policy, namespace, setName, callback);
executor.scanParallel(nodes);
}
else {
for (Node node : nodes) {
scanNode(policy, node, namespace, setName, callback);
}
}
}
/**
* Read all records in specified namespace and set for one node only.
* The node is specified by name.
* <p>
* This call will block until the scan is complete - callbacks are made
* within the scope of this call.
*
* @param policy scan configuration parameters, pass in null for defaults
* @param nodeName server node name;
* @param namespace namespace - equivalent to database name
* @param setName optional set name - equivalent to database table
* @param callback read callback method - called with record data
* @throws AerospikeException if scan fails
*/
public final void scanNode(ScanPolicy policy, String nodeName, String namespace, String setName, ScanCallback callback)
throws AerospikeException {
if (policy == null) {
policy = new ScanPolicy();
}
// Retry policy must be one-shot for scans.
policy.maxRetries = 0;
Node node = cluster.getNode(nodeName);
scanNode(policy, node, namespace, setName, callback);
}
/**
* Read all records in specified namespace and set for one node only.
* <p>
* This call will block until the scan is complete - callbacks are made
* within the scope of this call.
*
* @param policy scan configuration parameters, pass in null for defaults
* @param node server node
* @param namespace namespace - equivalent to database name
* @param setName optional set name - equivalent to database table
* @param callback read callback method - called with record data
* @throws AerospikeException if transaction fails
*/
private final void scanNode(ScanPolicy policy, Node node, String namespace, String setName, ScanCallback callback)
throws AerospikeException {
ScanCommand command = new ScanCommand(node, callback);
command.scan(policy, namespace, setName);
}
// User defined functions (Supported by 3.0 servers only)
/**
* Register package containing user defined functions with server.
* This method is only supported by Aerospike 3.0 servers.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param clientPath path of client file containing user defined functions
* @param serverPath path to store user defined functions on the server
* @param language language of user defined functions
* @throws AerospikeException if register fails
*/
public final void register(Policy policy, String clientPath, String serverPath, Language language)
throws AerospikeException {
String content = Util.readFileEncodeBase64(clientPath);
StringBuilder sb = new StringBuilder(serverPath.length() + content.length() + 100);
sb.append("udf-put:filename=");
sb.append(serverPath);
sb.append(";content=");
sb.append(content);
sb.append(";content-len=");
sb.append(content.length());
sb.append(";udf-type=");
sb.append(language.id);
sb.append(";");
// Send command to all nodes.
String command = sb.toString();
Node[] nodes = cluster.getNodes();
int timeout = (policy == null)? 0 : policy.timeout;
for (Node node : nodes) {
Info info = new Info(node.getConnection(timeout), command);
NameValueParser parser = info.getNameValueParser();
while (parser.next()) {
String name = parser.getName();
if (name.equals("error")) {
throw new AerospikeException(serverPath + " registration failed: " + parser.getValue());
}
}
}
}
/**
* Execute user defined function on server and return results.
* The function operates on a single record.
* This method is only supported by Aerospike 3.0 servers.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param key unique record identifier
* @param fileName server file where user defined function resides
* @param functionName user defined function
* @param args arguments passed in to user defined function
* @return return value of user defined function
* @throws AerospikeException if transaction fails
*/
public final Object execute(Policy policy, Key key, String fileName, String functionName, Value... args)
throws AerospikeException {
SingleCommand command = new SingleCommand(cluster, key);
command.setWrite(Command.INFO2_WRITE);
byte[] argBytes = MsgPack.pack(args);
command.estimateUdfSize(fileName, functionName, argBytes);
command.begin();
command.writeHeader(0);
command.writeKey();
command.writeField(fileName, FieldType.UDF_FILENAME);
command.writeField(functionName, FieldType.UDF_FUNCTION);
command.writeField(argBytes, FieldType.UDF_ARGLIST);
command.execute(policy);
Record record = command.getRecord();
if (record == null || record.bins == null) {
return null;
}
Map<String,Object> map = record.bins;
Object obj = map.get("SUCCESS");
if (obj != null) {
return obj;
}
// User defined functions don't have to return a value.
if (map.containsKey("SUCCESS")) {
return null;
}
obj = map.get("FAILURE");
if (obj != null) {
throw new AerospikeException(obj.toString());
}
throw new AerospikeException("Invalid UDF return value");
}
// Query functions (Supported by 3.0 servers only)
/**
* Execute query and return results.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param statement database query command
* @return collection of query results
* @throws AerospikeException if query fails
*/
public final RecordSet query(QueryPolicy policy, Statement statement)
throws AerospikeException {
if (policy == null) {
policy = new QueryPolicy();
}
// Retry policy must be one-shot for queries.
policy.maxRetries = 0;
return new QueryExecutor(policy, statement, cluster.getNodes());
}
/**
* Create secondary index.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param namespace namespace - equivalent to database name
* @param setName optional set name - equivalent to database table
* @param indexName name of secondary index
* @param binName bin name that data is indexed on
* @param indexType type of secondary index
* @throws AerospikeException if index create fails
*/
public final void createIndex(
Policy policy,
String namespace,
String setName,
String indexName,
String binName,
IndexType indexType
) throws AerospikeException {
StringBuilder sb = new StringBuilder(500);
sb.append("sindex-create:ns=");
sb.append(namespace);
if (setName != null && setName.length() > 0) {
sb.append(";set=");
sb.append(setName);
}
sb.append(";indexname=");
sb.append(indexName);
sb.append(";numbins=1");
sb.append(";indexdata=");
sb.append(binName);
sb.append(",");
sb.append(indexType);
sb.append(";priority=normal");
// Send command to all nodes.
String command = sb.toString();
Node[] nodes = cluster.getNodes();
int timeout = (policy == null)? 0 : policy.timeout;
for (Node node : nodes) {
Info info = new Info(node.getConnection(timeout), command);
String response = info.getValue();
// Command is successful if OK or index already exists.
if (! response.equalsIgnoreCase("OK") && ! response.equals("FAIL:208:ERR FOUND") ) {
throw new AerospikeException("Create index failed: " + response);
}
}
}
/**
* Delete secondary index.
*
* @param policy generic configuration parameters, pass in null for defaults
* @param namespace namespace - equivalent to database name
* @param setName optional set name - equivalent to database table
* @param indexName name of secondary index
* @throws AerospikeException if index create fails
*/
public final void dropIndex(
Policy policy,
String namespace,
String setName,
String indexName
) throws AerospikeException {
StringBuilder sb = new StringBuilder(500);
sb.append("sindex-delete:ns=");
sb.append(namespace);
if (setName != null && setName.length() > 0) {
sb.append(";set=");
sb.append(setName);
}
sb.append(";indexname=");
sb.append(indexName);
// Send command to all nodes.
String command = sb.toString();
Node[] nodes = cluster.getNodes();
int timeout = (policy == null)? 0 : policy.timeout;
for (Node node : nodes) {
Info info = new Info(node.getConnection(timeout), command);
String response = info.getValue();
// Command is successful if ok or index did not previously exist.
if (! response.equalsIgnoreCase("ok") && ! response.equals("FAIL:202:NO INDEX") ) {
throw new AerospikeException("Drop index failed: " + response);
}
}
}
}
|
package com.ibm.streamsx.topology;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import com.ibm.streamsx.topology.builder.BInputPort;
import com.ibm.streamsx.topology.builder.BOperatorInvocation;
import com.ibm.streamsx.topology.builder.BOutput;
import com.ibm.streamsx.topology.context.Placeable;
import com.ibm.streamsx.topology.function.BiFunction;
import com.ibm.streamsx.topology.function.Consumer;
import com.ibm.streamsx.topology.function.Function;
import com.ibm.streamsx.topology.function.Predicate;
import com.ibm.streamsx.topology.function.Supplier;
import com.ibm.streamsx.topology.function.ToIntFunction;
import com.ibm.streamsx.topology.function.UnaryOperator;
import com.ibm.streamsx.topology.spl.SPLStream;
/**
* A {@code TStream} is a declaration of a continuous sequence of tuples. A
* connected topology of streams and functional transformations is built using
* {@link Topology}. <BR>
* Generic methods on this interface provide the ability to
* {@link #filter(Predicate) filter}, {@link #transform(Function)
* transform} or {@link #sink(Consumer) sink} this declared stream using a
* function. <BR>
* Utility methods in the {@code com.ibm.streams.topology.streams} package
* provide specific source streams, or transformations on streams with specific
* types.
* <P>
* {@code TStream} implements {@link Placeable} to allow placement
* directives against the processing that produced this stream.
* For example, calling a {@code Placeable} method on the stream
* returned from {@link #filter(Predicate)} will apply to the
* container that is executing the {@code Predicate} passed into {@code filter()}.
* </P>
*
* @param <T>
* Tuple type, any instance of {@code T} at runtime must be
* serializable.
*/
public interface TStream<T> extends TopologyElement, Placeable<TStream<T>> {
/**
* Enumeration for routing tuples to parallel channels.
* @see TStream#parallel(Supplier, Routing)
*/
public enum Routing {
/**
* Tuples will be routed to parallel channels such that an even
* distribution is maintained.
*/
ROUND_ROBIN,
/**
* Tuples will be consistently routed to the same channel based upon
* their key. The key is obtained through:
* <UL>
* <LI>A function called against each tuple when using {@link TStream#parallel(Supplier, Function)}</LI>
* <LI>The {@link com.ibm.streamsx.topology.logic.Logic#identity() identity function} when using {@link TStream#parallel(Supplier, Routing)}</LI>
* </UL>
* The key for a {@code t} is the return from {@code keyer.apply(t)}.
* <BR>
* Any two tuples {@code t1} and {@code t2} will appear on
* the same channel if for their keys {@code k1} and {@code k2}
* {@code k1.equals(k2)} is true.
* <BR>
* If {@code k1} and {@code k2} are not equal then there is
* no guarantee about which channels {@code t1} and {@code t2}
* will appear on, they may end up on the same or different channels.
* <BR>
* The assumption is made that
* the key classes correctly implement the contract for {@code equals} and
* {@code hashCode()}.
*/
KEY_PARTITIONED,
/**
* Tuples will be consistently routed to the same channel based upon
* their {@code hashCode()}.
*/
HASH_PARTITIONED
};
/**
* Declare a new stream that filters tuples from this stream. Each tuple
* {@code t} on this stream will appear in the returned stream if
* {@link Predicate#test(Object) filter.test(t)} returns {@code true}. If
* {@code filter.test(t)} returns {@code false} then then {@code t} will not
* appear in the returned stream.
* <P>
* Example of filtering out all empty strings from stream {@code s} of type
* {@code String}
*
* <pre>
* <code>
* TStream<String> s = ...
* TStream<String> filtered = s.filter(new Predicate<String>() {
* @Override
* public boolean test(String t) {
* return !t.isEmpty();
* }} );
* </code>
* </pre>
*
* </P>
*
* @param filter
* Filtering logic to be executed against each tuple.
* @return Filtered stream
* @see #split(int, ToIntFunction)
*/
TStream<T> filter(Predicate<T> filter);
List<TStream<T>> split(int n, ToIntFunction<T> splitter);
/**
* Declare a new stream that transforms each tuple from this stream into one
* (or zero) tuple of a different type {@code U}. For each tuple {@code t}
* on this stream, the returned stream will contain a tuple that is the
* result of {@code transformer.apply(t)} when the return is not {@code null}.
* If {@code transformer.apply(t)} returns {@code null} then no tuple
* is submitted to the returned stream for {@code t}.
*
* <P>
* Example of transforming a stream containing numeric values as
* {@code String} objects into a stream of {@code Double} values.
*
* <pre>
* <code>
* TStream<String> strings = ...
* TStream<Double> doubles = strings.transform(new Function<String, Double>() {
* @Override
* public Double apply(String v) {
* return Double.valueOf(v);
* }});
* </code>
* </pre>
*
* </P>
* @param transformer
* Transformation logic to be executed against each tuple.
* @return Stream that will contain tuples of type {@code U} transformed from this
* stream's tuples.
*/
<U> TStream<U> transform(Function<T, U> transformer);
/**
* Declare a new stream that modifies each tuple from this stream into one
* (or zero) tuple of the same type {@code T}. For each tuple {@code t}
* on this stream, the returned stream will contain a tuple that is the
* result of {@code modifier.apply(t)} when the return is not {@code null}.
* The function may return the same reference as its input {@code t} or
* a different object of the same type.
* If {@code modifier.apply(t)} returns {@code null} then no tuple
* is submitted to the returned stream for {@code t}.
*
* <P>
* Example of modifying a stream {@code String} values by adding the suffix '{@code extra}'.
*
* <pre>
* <code>
* TStream<String> strings = ...
* TStream<String> modifiedStrings = strings.modify(new UnaryOperator<String>() {
* @Override
* public String apply(String tuple) {
* return tuple.concat("extra");
* }});
* </code>
* </pre>
*
* </P>
* <P>
* This method is equivalent to
* {@code transform(Function<T,T> modifier}).
* </P
*
* @param modifier
* Modifier logic to be executed against each tuple.
* @return Stream that will contain tuples of type {@code T} modified from this
* stream's tuples.
*/
TStream<T> modify(UnaryOperator<T> modifier);
/**
* Declare a new stream that transforms tuples from this stream into one or
* more (or zero) tuples of a different type {@code U}. For each tuple
* {@code t} on this stream, the returned stream will contain all non-null tuples in
* the {@code Iterator<U>} that is the result of {@code transformer.apply(t)}.
* Tuples will be added to the returned stream in the order the iterator
* returns them.
*
* <BR>
* If the return is null or an empty iterator then no tuples are added to
* the returned stream for input tuple {@code t}.
* <P>
* Example of transforming a stream containing lines of text into a stream
* of words split out from each line. The order of the words in the stream
* will match the order of the words in the lines.
*
* <pre>
* <code>
* TStream<String> lines = ...
* TStream<String> words = lines.multiTransform(new Function<String, Iterable<String>>() {
* @Override
* public Iterable<String> apply(String tuple) {
* return Arrays.asList(tuple.split(" "));
* }});
* </code>
* </pre>
*
* </P>
*
* @param transformer
* Transformation logic to be executed against each tuple.
* @return Stream that will contain tuples of type {@code U} transformed from this
* stream's tuples.
*/
<U> TStream<U> multiTransform(Function<T, Iterable<U>> transformer);
/**
* Sink (terminate) this stream. For each tuple {@code t} on this stream
* {@link Consumer#accept(Object) sinker.accept(t)} will be called. This is
* typically used to send information to external systems, such as databases
* or dashboards.
* <P>
* Example of terminating a stream of {@code String} tuples by printing them
* to {@code System.out}.
*
* <pre>
* <code>
* TStream<String> values = ...
* values.sink(new Consumer<String>() {
*
* @Override
* public void accept(String tuple) {
* System.out.println(tuple);
*
* }
* });
* </code>
* </pre>
*
* </P>
*
* @param sinker
* Logic to be executed against each tuple on this stream.
* @return the sink element
*/
TSink sink(Consumer<T> sinker);
/**
* Create a stream that is a union of this stream and {@code other} stream
* of the same type {@code T}. Any tuple on this stream or {@code other}
* will appear on the returned stream. <BR>
* No ordering of tuples across this stream and {@code other} is defined,
* thus the return stream is unordered.
* <BR>
* If {@code other} is this stream or keyed version of this stream
* then {@code this} is returned as a stream cannot be unioned with itself.
*
* @param other
* Stream to union with this stream.
* @return Stream that will contain tuples from this stream and
* {@code other}.
*/
TStream<T> union(TStream<T> other);
/**
* Create a stream that is a union of this stream and {@code others} streams
* of the same type {@code T}. Any tuple on this stream or any of
* {@code others} will appear on the returned stream. <BR>
* No ordering of tuples across this stream and {@code others} is defined,
* thus the return stream is unordered. <BR>
* If others does not contain any streams then {@code this} is returned.
* <BR>
* A stream or a keyed version of a stream cannot be unioned with itself,
* so any stream that is represented multiple times in {@code others}
* or this stream will be reduced to a single copy of itself.
* <BR>
* In the case that no stream is to be unioned with this stream
* then {@code this} is returned (for example, {@code others}
* is empty or only contains the same logical stream as {@code this}.
*
* @param others
* Streams to union with this stream.
* @return Stream containing tuples from this stream and {@code others}.
*/
TStream<T> union(Set<TStream<T>> others);
/**
* Print each tuple on {@code System.out}. For each tuple {@code t} on this
* stream {@code System.out.println(t.toString())} will be called.
*/
TSink print();
/**
* Class of the tuples on this stream, if known.
* Will be the same as {@link #getTupleType()}
* if it is a {@code Class} object.
* @return Class of the tuple on this stream, {@code null}
* if {@link #getTupleType()} is not a {@code Class} object.
*/
Class<T> getTupleClass();
/**
* Type of the tuples on this stream.
* Can be null if no type knowledge can be determined.
*
* @return Type of the tuples on this stream,
* {@code null} if no type knowledge could be determined
*/
Type getTupleType();
/**
* Join this stream with window of type {@code U}. For each tuple on this
* stream, it is joined with the contents of {@code window}. Each tuple is
* passed into {@code joiner} and the return value is submitted to the
* returned stream. If call returns null then no tuple is submitted.
*
* @param joiner Join function.
* @return A stream that is the results of joining this stream with
* {@code window}.
*/
<J, U> TStream<J> join(TWindow<U,?> window,
BiFunction<T, List<U>, J> joiner);
/**
* Join this stream with a partitioned window of type {@code U} with key type {@code K}.
* For each tuple on this stream, it is joined with the contents of {@code window}
* for the key {@code keyer.apply(tuple)}. Each tuple is
* passed into {@code joiner} and the return value is submitted to the
* returned stream. If call returns null then no tuple is submitted.
*
* @param window Window to join this stream with.
* @param keyer Key function for this stream to match the window's key.
* @param joiner Join function.
* @return A stream that is the results of joining this stream with
* {@code window}.
*/
<J, U, K> TStream<J> join(TWindow<U,K> window,
Function<T,K> keyer,
BiFunction<T, List<U>, J> joiner);
/**
* Join this stream with the last tuple seen on a stream of type {@code U}
* with partitioning.
* For each tuple on this
* stream, it is joined with the last tuple seen on {@code other}
* with a matching key (of type {@code K}).
* <BR>
* Each tuple {@code t} on this stream will match the last tuple
* {@code u} on {@code other} if
* {@code keyer.apply(t).equals(otherKeyer.apply(u))}
* is true.
* <BR>
* The assumption is made that
* the key classes correctly implement the contract for {@code equals} and
* {@code hashCode()}.
* <P>Each tuple is
* passed into {@code joiner} and the return value is submitted to the
* returned stream. If call returns null then no tuple is submitted.
* </P>
* @param keyer Key function for this stream
* @param other Stream to join with.
* @param otherKeyer Key function for {@code other}
* @param joiner Join function.
* @return A stream that is the results of joining this stream with
* {@code other}.
*/
<J,U,K> TStream<J> joinLast(
Function<T,K> keyer,
TStream<U> other,
Function<U,K> otherKeyer,
BiFunction<T, U, J> joiner);
/**
* Join this stream with the last tuple seen on a stream of type {@code U}.
* For each tuple on this
* stream, it is joined with the last tuple seen on {@code other}. Each tuple is
* passed into {@code joiner} and the return value is submitted to the
* returned stream. If call returns null then no tuple is submitted.
* <BR>
* This is a simplified version of
* <BR>
* {@code this.join(other.last(), new BiFunction<T,List<U>,J>() ...) }
* <BR>
* where instead the window contents are passed as a single tuple of type {@code U}
* rather than a list containing one tuple. If no tuple has been seen on {@code other}
* then {@code null} will be passed as the second argument to {@code joiner}.
*
* @param other Stream to join with.
* @param joiner Join function.
* @return A stream that is the results of joining this stream with
* {@code other}.
*/
<J,U> TStream<J> joinLast(
TStream<U> other,
BiFunction<T, U, J> joiner);
/**
* Declare a {@link TWindow} that continually represents the last {@code time} seconds
* of tuples (in the given time {@code unit}) on this stream.
* If no tuples have been seen on the stream in the last {@code time} seconds
* then the window will be empty.
* <BR>
* The window has a single partition that always contains the
* last {@code time} seconds of tuples seen on this stream
* <BR>
* A key based partitioned window can be created from the returned window
* using {@link TWindow#key(Function)} or {@link TWindow#key()}.
* When the window is partitioned each partition independently maintains the last {@code time}
* seconds of tuples for each key seen on this stream.
*
* @param time Time size of the window
* @param unit Unit for {@code time}
* @return Window on this stream representing the last {@code time} seconds.
*/
TWindow<T,Object> last(long time, TimeUnit unit);
/**
* Declare a {@link TWindow} that continually represents the last {@code count} tuples
* seen on this stream. If the stream has not yet seen {@code count}
* tuples then it will contain all of the tuples seen on the stream,
* which will be less than {@code count}. If no tuples have been
* seen on the stream then the window will be empty.
* <BR>
* The window has a single partition that always contains the
* last {@code count} tuples seen on this stream.
* <BR>
* The window has a single partition that always contains the last tuple seen
* on this stream.
* <BR>
* A key based partitioned window can be created from the returned window
* using {@link TWindow#key(Function)} or {@link TWindow#key()}.
* When the window is partitioned each partition independently maintains the
* last {@code count} tuples for each key seen on this stream.
*
* @param count Tuple size of the window
* @return Window on this stream representing the last {@code count} tuples.
*/
TWindow<T,Object> last(int count);
/**
* Declare a {@link TWindow} that continually represents the last tuple on this stream.
* If no tuples have been seen on the stream then the window will be empty.
* <BR>
* The window has a single partition that always contains the last tuple seen
* on this stream.
* <BR>
* A key based partitioned window can be created from the returned window
* using {@link TWindow#key(Function)} or {@link TWindow#key()}.
* When the window is partitioned each partition independently maintains the
* last tuple for each key seen on this stream.
*
* @return Window on this stream representing the last tuple.
*/
TWindow<T,Object> last();
/**
* Declare a {@link TWindow} on this stream that has the same configuration
* as another window.
* <BR>
* The window has a single partition.
* <BR>
* A key based partitioned window can be created from the returned window
* using {@link TWindow#key(Function)} or {@link TWindow#key()}.
*
* @param configWindow
* Window to copy the configuration from.
* @return Window on this stream with the same configuration as {@code configWindow}.
*/
TWindow<T,Object> window(TWindow<?,?> configWindow);
/**
* Publish tuples from this stream for consumption by other IBM Streams applications.
*
* Applications consume published streams using:
* <UL>
* <LI>
* {@link Topology#subscribe(String, Class)} for Java Streams applications.</LI>
* <LI>
* {@code com.ibm.streamsx.topology.topic::Subscribe} operator for SPL
* Streams applications.</LI>
* </UL>
* <BR>
* A subscriber matches to a publisher if:
* <UL>
* <LI>
* The topic is an exact match, and:</LI>
* <LI>
* For JSON streams ({@code TStream<JSONObject>}) the subscription is to
* a JSON stream.
* </LI>
* <LI>
* For Java streams ({@code TStream<T>}) the declared Java type ({@code T}
* ) of the stream is an exact match.</LI>
* <LI>
* For {@link SPLStream SPL streams} the {@link SPLStream#getSchema() SPL
* schema} is an exact match.</LI>
* </UL>
*
* @see Topology#subscribe(String, Class)
* @see com.ibm.streamsx.topology.spl.SPLStreams#subscribe(TopologyElement, String, com.ibm.streams.operator.StreamSchema)
*/
void publish(String topic);
/**
* Parallelizes the stream into a a fixed
* number of parallel channels using round-robin distribution.
* <BR>
* Tuples are routed to the parallel channels in a
* {@link Routing#ROUND_ROBIN round-robin fashion}.
* <BR>
* Subsequent transformations on the returned stream will be executed
* {@code width} channels until {@link #endParallel()} is called or
* the stream terminates.
* <br>
* See {@link #parallel(Supplier, Routing)} for more information.
* @param width
* The degree of parallelism in the parallel region.
* @return A reference to a stream for which subsequent transformations will be
* executed in parallel using {@code width} channels.
*/
TStream<T> parallel(int width);
/**
* Parallelizes the stream into {@code width} parallel channels.
* Same as {@link #parallel(int)} except the {@code width} is
* specified with a {@code Supplier<Integer>} such as one created
* by {@link Topology#createSubmissionParameter(String, Class)}.
*
* @param width
* The degree of parallelism in the parallel region.
* @return A reference to a stream for which subsequent transformations will be
* executed in parallel using {@code width} channels.
*/
TStream<T> parallel(Supplier<Integer> width);
TStream<T> parallel(Supplier<Integer> width, Routing routing);
/**
* Parallelizes the stream into a number of parallel channels
* using key based distribution.
* <BR>
* For each tuple {@code t} {@code keyer.apply(t)} is called
* and then the tuples are routed
* so that all tuples with the
* {@link Routing#KEY_PARTITIONED same key are sent to the same channel}.
*
* @param width The degree of parallelism.
* @param keyer Function to obtain the key from each tuple.
* @return A reference to a stream with {@code width} channels
* at the beginning of the parallel region.
*
* @see Routing#KEY_PARTITIONED
* @see #parallel(Supplier, Routing)
*/
TStream<T> parallel(Supplier<Integer> width, Function<T,?> keyer);
/**
* Ends a parallel region by merging the channels into a single stream.
*
* @return A stream for which subsequent transformations are no longer parallelized.
* @see #parallel(int)
* @see #parallel(Supplier, Routing)
* @see #parallel(Supplier, Function)
*/
TStream<T> endParallel();
TStream<T> sample(double fraction);
/**
* Return a stream whose immediate subsequent processing will execute
* in a separate operating system process from this stream's processing.
* <BR>
* For the following Topology:
* <pre><code>
* -->transform1-->.isolate()-->transform2-->transform3-->.isolate()-->sink
* </code></pre>
* It is guaranteed that:
* <UL>
* <LI>{@code transform1} and {@code transform2} will execute in separate processes.</LI>
* <LI>{@code transform3} and {@code sink} will execute in separate processes. </LI>
* </UL>
* If multiple transformations ({@code t1, t2, t3}) are applied to a stream returned from {@code isolate()}
* then it is guaranteed that each of them will execute in a separate operating
* system process to this stream, but no guarantees are made about where {@code t1, t2, t3}
* are placed in relationship to each other.
* <br>
* Only applies for distributed contexts.
* @return A stream that runs in a separate process from this stream.
*/
TStream<T> isolate();
TStream<T> lowLatency();
TStream<T> endLowLatency();
/**
* Throttle a stream by ensuring any tuple is submitted with least
* {@code delay} from the previous tuple.
*
* @param delay
* Maximum amount to delay a tuple.
* @param unit
* Unit of {@code delay}.
* @return Stream containing all tuples on this stream. but throttled.
*/
TStream<T> throttle(long delay, TimeUnit unit);
/**
* Return a strongly typed reference to this stream.
* If this stream is already strongly typed as containing tuples
* of type {@code tupleClass} then {@code this} is returned.
* @param tupleTypeClass Class type for the tuples.
* @return A stream with the same contents as this stream but strongly typed as
* containing tuples of type {@code tupleClass}.
*/
TStream<T> asType(Class<T> tupleTypeClass);
/**
* Return a keyed stream that contains the same tuples as this stream.
* A keyed stream is a stream where each tuple has an inherent
* key, defined by {@code keyFunction}.
* <P>
* A keyed stream provides control over the behavior of
* downstream processing of tuples with {@link #parallel(int) parallel streams} and
* {@link TWindow windows}.
* <BR>
* With parallel streams all tuples that have the same key
* will be processed by the same channel.
* <BR>
* With windows all tuples that have the same key will
* be processed as an independent window. For example,
* with a window created using {@link #last(int) last(3)}
* then each key has its own window containing the last
* three tuples with the same key.
* </P>
* @param keyFunction Function that gets the key from a tuple.
* The key function must be stateless.
* @return Keyed stream containing tuples from this stream.
*
* @see TKeyedStream
* @see TWindow
*
* @param <K> Type of the key.
*/
// <K> TKeyedStream<T,K> key(Function<T,K> keyFunction);
/**
* Return a keyed stream that contains the same tuples as this stream.
* The key of each tuple is the tuple itself.
* <BR>
* For example, a {@code TStream<String> strings} may be keyed using
* {@code strings.key()} and thus when made {@link #parallel(int) parallel}
* all {@code String} objects with the same value will be sent to the
* same channel.
* @return this.
*
* @see #key(Function)
*/
// TKeyedStream<T,T> key();
/**
* Internal method.
* <BR>
* <B><I>Not intended to be called by applications, may be removed at any time.</I></B>
*/
BOutput output();
/**
* Internal method.
* <BR>
* <B><I>Not intended to be called by applications, may be removed at any time.</I></B>
* <BR>
* Connect this stream to a downstream operator. If input is null then a new
* input port will be created, otherwise it will be used to connect to this
* stream. Returns input or the new port if input was null.
*/
BInputPort connectTo(BOperatorInvocation receivingBop, boolean functional, BInputPort input);
}
|
package fi.aalto.tripchain;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Date;
import java.util.UUID;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONException;
import org.json.JSONObject;
import fi.aalto.tripchain.route.ActivityReceiver;
import fi.aalto.tripchain.route.LocationListener;
import fi.aalto.tripchain.route.Route;
import android.app.Notification;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
public class BackgroundService extends Service {
private final static String TAG = BackgroundService.class.getSimpleName();
private ActivityReceiver activityReceiver;
private LocationListener locationListener;
private Route route;
private Handler handler;
private volatile boolean recording = false;
private long timestamp;
private SharedPreferences preferences;
public synchronized Route getRoute() {
return this.route;
}
@Override
public void onCreate() {
Log.d(TAG, "onCreate");
this.handler = new Handler();
preferences = getSharedPreferences(Configuration.SHARED_PREFERENCES, MODE_MULTI_PROCESS);
}
private void postTrip(JSONObject trip) throws ClientProtocolException, IOException {
HttpClient client = new DefaultHttpClient();
HttpPost httpPost = new HttpPost("http://tripchaingame.herokuapp.com/api/trip.json");
httpPost.addHeader("Content-Type", "application/json");
httpPost.setEntity(new StringEntity(trip.toString()));
HttpResponse response = client.execute(httpPost);
Log.d(TAG, "post status: " + response.getStatusLine());
}
public void stop() {
Log.d(TAG, "Stopping!");
this.activityReceiver.stop();
this.locationListener.stop();
this.recording = false;
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
JSONObject trip = new JSONObject();
trip.put("clientId", preferences.getString("login_id", null));
trip.put("trip", route.toJson());
trip.put("startedAt", timestamp);
postTrip(trip);
} catch (Exception e) {
Log.d(TAG, "Failed to post trip", e);
}
stopForeground(true);
return null;
}
}.execute();
}
public void start() {
Log.d(TAG, "Starting!");
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(this)
.setSmallIcon(R.drawable.ic_launcher)
.setContentTitle("Tripchain")
.setContentText("Recording route");
startForeground(12345, mBuilder.build());
this.timestamp = System.currentTimeMillis();
this.recording = true;
this.route = new Route();
this.activityReceiver = new ActivityReceiver(this);
this.locationListener = new LocationListener(this);
}
@Override
public void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy");
}
@Override
public IBinder onBind(Intent arg0) {
return mBinder;
}
private final ServiceConnectionApi.Stub mBinder = new ServiceConnectionApi.Stub() {
@Override
public void stop() throws RemoteException {
handler.post(new Runnable() {
@Override
public void run() {
BackgroundService.this.stop();
}
});
}
@Override
public void start() throws RemoteException {
handler.post(new Runnable() {
@Override
public void run() {
BackgroundService.this.start();
}
});
}
@Override
public boolean recording() throws RemoteException {
return BackgroundService.this.recording;
}
};
}
|
package ui;
import java.awt.BorderLayout;
import java.awt.EventQueue;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Panel;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import java.util.StringTokenizer;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import javax.swing.border.EmptyBorder;
import javax.swing.filechooser.FileFilter;
import java.awt.Color;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
public class LabelingFrame extends JFrame implements ActionListener {
private static final long serialVersionUID = -2225603632488216748L;
private JPanel contentPane;
List<Tweet> tweets;
//Fields holding the two types of labels for labeling
private JTextField firstLabelField;
private JTextField secondLabelField;
//Field to hold the tweet text
private JTextArea tweetArea;
JLabel tweetTitlePanel;
//List iterator for moving thru tweets
private ListIterator<Tweet> tweetIterator;
//All the labelled tweets so far
private Set<Tweet> labelledTweets;
//Current tweet we are looking at
private Tweet currentTweet;
//Dummy tweet for beginning and ending
private Tweet defaultTweet;
JButton openFile, prev, next, delete, finish, firstLabel, secondLabel, check;
private JFileChooser fc;
private int curTweet, totalTweets;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
LabelingFrame frame = new LabelingFrame();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the frame.
*/
public LabelingFrame() {
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setBounds(100, 100, 500, 350);
contentPane = new JPanel();
contentPane.setBackground(new Color(0, 0, 0));
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
contentPane.setLayout(new BorderLayout(0, 0));
setContentPane(contentPane);
Panel buttonPanel = new Panel();
buttonPanel.setBackground(new Color(0, 0, 0));
contentPane.add(buttonPanel, BorderLayout.SOUTH);
openFile = new JButton("Open File");
openFile.setFont(new Font("Verdana", Font.PLAIN, 11));
openFile.setBackground(new Color(0, 0, 0));
openFile.setForeground(new Color(148, 0, 211));
openFile.addActionListener(this);
buttonPanel.add(openFile);
prev = new JButton("Previous");
prev.setFont(new Font("Verdana", Font.PLAIN, 11));
prev.setForeground(new Color(148, 0, 211));
prev.setBackground(new Color(0, 0, 0));
prev.addActionListener(this);
buttonPanel.add(prev);
next = new JButton("Next");
next.setFont(new Font("Verdana", Font.PLAIN, 11));
next.setForeground(new Color(148, 0, 211));
next.setBackground(new Color(0, 0, 0));
next.addActionListener(this);
buttonPanel.add(next);
// delete = new JButton("Delete");
// delete.addActionListener(this);
// buttonPanel.add(delete);
finish = new JButton("Finish");
finish.setFont(new Font("Verdana", Font.PLAIN, 11));
finish.setBackground(new Color(0, 0, 0));
finish.setForeground(new Color(148, 0, 211));
finish.addActionListener(this);
buttonPanel.add(finish);
// check = new JButton("Check Finished");
// check.addActionListener(this);
// buttonPanel.add(check);
JLabel Title = new JLabel("Labler");
Title.setForeground(new Color(148, 0, 211));
Title.setBackground(new Color(0, 0, 0));
Title.setFont(new Font("Verdana", Font.PLAIN, 32));
Title.setHorizontalAlignment(SwingConstants.CENTER);
contentPane.add(Title, BorderLayout.NORTH);
Panel labelPanel = new Panel();
contentPane.add(labelPanel, BorderLayout.CENTER);
labelPanel.setLayout(new BorderLayout(0, 0));
tweetArea = new JTextArea();
tweetArea.setText("Sample");
tweetArea.setFont(new Font("Verdana", Font.PLAIN, 20));
tweetArea.setForeground(new Color(148, 0, 211));
tweetArea.setBackground(Color.BLACK);
tweetArea.setLineWrap(true);
tweetArea.setWrapStyleWord(true);
tweetArea.setEditable(false);
labelPanel.add(tweetArea);
Panel labelOptionsPanel = new Panel();
labelOptionsPanel.setBackground(new Color(0, 0, 0));
labelPanel.add(labelOptionsPanel, BorderLayout.SOUTH);
firstLabel = new JButton("Label as First");
firstLabel.setFont(new Font("Verdana", Font.PLAIN, 11));
firstLabel.setBackground(new Color(0, 0, 0));
firstLabel.setForeground(new Color(148, 0, 211));
firstLabel.addActionListener(this);
labelOptionsPanel.add(firstLabel);
secondLabel = new JButton("Label as Second");
secondLabel.setFont(new Font("Verdana", Font.PLAIN, 11));
secondLabel.setForeground(new Color(148, 0, 211));
secondLabel.setBackground(new Color(0, 0, 0));
secondLabel.addActionListener(this);
labelOptionsPanel.add(secondLabel);
Panel setLabelsPanel = new Panel();
setLabelsPanel.setBackground(new Color(0, 0, 0));
labelPanel.add(setLabelsPanel, BorderLayout.EAST);
GridBagLayout gbl_setLabelsPanel = new GridBagLayout();
gbl_setLabelsPanel.columnWidths = new int[] {30, 30, 30};
gbl_setLabelsPanel.rowHeights = new int[] {30, 30, 30};
gbl_setLabelsPanel.columnWeights = new double[]{0.0, 1.0, 0.0};
gbl_setLabelsPanel.rowWeights = new double[]{0.0, 0.0, 0.0};
setLabelsPanel.setLayout(gbl_setLabelsPanel);
JLabel lblLabels = new JLabel("Labels");
lblLabels.setFont(new Font("Verdana", Font.PLAIN, 11));
lblLabels.setForeground(new Color(148, 0, 211));
lblLabels.setBackground(new Color(0, 0, 0));
GridBagConstraints gbc_lblLabels = new GridBagConstraints();
gbc_lblLabels.insets = new Insets(0, 0, 5, 5);
gbc_lblLabels.gridx = 1;
gbc_lblLabels.gridy = 0;
setLabelsPanel.add(lblLabels, gbc_lblLabels);
JLabel lblFirst = new JLabel("First: ");
lblFirst.setFont(new Font("Verdana", Font.PLAIN, 11));
lblFirst.setForeground(new Color(148, 0, 211));
GridBagConstraints gbc_lblFirst = new GridBagConstraints();
gbc_lblFirst.anchor = GridBagConstraints.EAST;
gbc_lblFirst.insets = new Insets(0, 0, 5, 5);
gbc_lblFirst.gridx = 0;
gbc_lblFirst.gridy = 1;
setLabelsPanel.add(lblFirst, gbc_lblFirst);
firstLabelField = new JTextField();
firstLabelField.setFont(new Font("Verdana", Font.PLAIN, 11));
firstLabelField.setForeground(new Color(148, 0, 211));
firstLabelField.setBackground(new Color(0, 0, 0));
firstLabelField.setText("<Enter text>");
GridBagConstraints gbc_firstLabelField = new GridBagConstraints();
gbc_firstLabelField.insets = new Insets(0, 0, 5, 5);
gbc_firstLabelField.fill = GridBagConstraints.HORIZONTAL;
gbc_firstLabelField.gridx = 1;
gbc_firstLabelField.gridy = 1;
setLabelsPanel.add(firstLabelField, gbc_firstLabelField);
firstLabelField.setColumns(10);
JLabel lblNewLabel_1 = new JLabel("Second:");
lblNewLabel_1.setFont(new Font("Verdana", Font.PLAIN, 11));
lblNewLabel_1.setForeground(new Color(148, 0, 211));
GridBagConstraints gbc_lblNewLabel_1 = new GridBagConstraints();
gbc_lblNewLabel_1.anchor = GridBagConstraints.EAST;
gbc_lblNewLabel_1.insets = new Insets(0, 0, 0, 5);
gbc_lblNewLabel_1.gridx = 0;
gbc_lblNewLabel_1.gridy = 2;
setLabelsPanel.add(lblNewLabel_1, gbc_lblNewLabel_1);
secondLabelField = new JTextField();
secondLabelField.setFont(new Font("Verdana", Font.PLAIN, 11));
secondLabelField.setForeground(new Color(148, 0, 211));
secondLabelField.setBackground(new Color(0, 0, 0));
secondLabelField.setText("<Enter text>");
GridBagConstraints gbc_secondLabelField = new GridBagConstraints();
gbc_secondLabelField.insets = new Insets(0, 0, 0, 5);
gbc_secondLabelField.fill = GridBagConstraints.HORIZONTAL;
gbc_secondLabelField.gridx = 1;
gbc_secondLabelField.gridy = 2;
setLabelsPanel.add(secondLabelField, gbc_secondLabelField);
secondLabelField.setColumns(10);
tweetTitlePanel = new JLabel("Tweet");
tweetTitlePanel.setFont(new Font("Verdana", Font.PLAIN, 15));
tweetTitlePanel.setForeground(new Color(148, 0, 211));
tweetTitlePanel.setBackground(new Color(0, 0, 0));
tweetTitlePanel.setHorizontalAlignment(SwingConstants.LEFT);
labelPanel.add(tweetTitlePanel, BorderLayout.NORTH);
fc = new JFileChooser();
fc.setFileFilter(new TweetFilter());
labelledTweets = new HashSet<Tweet>();
defaultTweet = new Tweet(0, "No more tweets!");
currentTweet = defaultTweet;
}
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == openFile) {
this.loadUnLabeledFile();
if (tweetIterator != null) {
currentTweet = tweetIterator.next();
this.updateUi(currentTweet);
}
}
else if (e.getSource() == prev) {
if (tweetIterator != null && tweetIterator.hasPrevious()) {
currentTweet = tweetIterator.previous();
--curTweet;
this.updateUi(currentTweet);
}
else {
JOptionPane.showMessageDialog(this, "You can't do that!", "User Error",
JOptionPane.ERROR_MESSAGE);
}
}
else if (e.getSource() == next) {
if (tweetIterator != null && tweetIterator.hasNext()) {
currentTweet = tweetIterator.next();
++curTweet;
this.updateUi(currentTweet);
}
}
else if (e.getSource() == delete) {
try {
if (tweetIterator != null) {
tweetIterator.remove();
currentTweet = new Tweet(0, "Freshly Deleted!");
updateUi(currentTweet);
}
else {
JOptionPane.showMessageDialog(this, "You can't do that!", "User Error",
JOptionPane.ERROR_MESSAGE);
}
}
catch (IllegalStateException ise) {
JOptionPane.showMessageDialog(this, "You can't do that!", "User Error",
JOptionPane.ERROR_MESSAGE);
}
}
else if (e.getSource() == finish) {
this.saveAndQuit();
}
else if (e.getSource() == firstLabel || e.getSource() == secondLabel) {
if (currentTweet != null && !firstLabelField.getText().equals("<Enter text>")
&& !secondLabelField.getText().equals("<Enter text>")) {
currentTweet.label = (e.getSource() == firstLabel)
? firstLabelField.getText(): secondLabelField.getText();
boolean moveTwice = (labelledTweets.contains(currentTweet));
if(currentTweet != defaultTweet) {
labelledTweets.add(currentTweet);
System.out.println("Labelled tweet: " + currentTweet);
}
if (tweetIterator.hasNext()) {
currentTweet = tweetIterator.next();
++curTweet;
}
else {
JOptionPane.showMessageDialog(this, "That's all of them!", "Info",
JOptionPane.INFORMATION_MESSAGE);
currentTweet = defaultTweet;
}
//If we moved backwards thru the list we need to compensate
if (moveTwice && tweetIterator.hasNext()) {
currentTweet = tweetIterator.next();
}
updateUi(currentTweet);
}
else {
JOptionPane.showMessageDialog(this, "You need to set the labels!", "User Error",
JOptionPane.ERROR_MESSAGE);
}
}
else if (e.getSource() == check) {
Set<Tweet> labelled = this.loadLabledFile();
List<Tweet> lk = new LinkedList<Tweet>(labelled);
List<Tweet> toRemove = new ArrayList<Tweet>();
if (labelled != null) {
labelledTweets.removeAll(labelled);
int removed = 0;
while (tweetIterator.hasNext()) {
Tweet cur = tweetIterator.next();
// if (labelled.contains(cur)) {
// tweetIterator.remove();
// ++removed;
for (int i = 0; i < lk.size(); ++i) {
if (cur.tweetEquals(lk.get(i))) {
//tweetIterator.remove();
toRemove.add(cur);
++removed;
break;
}
}
//Fast break if we found everything already
if (removed == labelled.size()) {
break;
}
}
tweets.removeAll(toRemove);
tweetIterator = tweets.listIterator();
if (tweetIterator.hasNext()) {
currentTweet = tweetIterator.next();
}
else {
currentTweet = defaultTweet;
}
JOptionPane.showMessageDialog(this, "Removed: " + removed +
((removed != 1) ? " Tweets," : " Tweet,") + " already classified", "Info",
JOptionPane.INFORMATION_MESSAGE);
}
}
updateUi(currentTweet);
}
private void saveAndQuit() {
int returnVal = fc.showSaveDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
if (labelledTweets.size() != 0) {
this.saveTweets(fc.getSelectedFile());
}
System.exit(0);
}
}
private void saveTweets(File f) {
BufferedWriter bw;
try {
bw = new BufferedWriter(new FileWriter(f));
for (Tweet t: labelledTweets) {
bw.write(t.toString());
bw.newLine();
}
bw.flush();
bw.close();
} catch (IOException e) {
JOptionPane.showMessageDialog(this, "Unable to save file :(", "IO Error",
JOptionPane.ERROR_MESSAGE);
}
}
private void updateUi(Tweet newTweet) {
tweetArea.setText(newTweet.text);
tweetTitlePanel.setText("Tweet number " + curTweet + " of " + totalTweets);
String ftext = firstLabelField.getText().equals("<Enter text>") ? "first" : firstLabelField.getText();
String stext = secondLabelField.getText().equals("<Enter text>") ? "second" : secondLabelField.getText();
firstLabel.setText("Label as: " + (ftext));
secondLabel.setText("Label as: " + stext);
}
public Set<Tweet> loadLabledFile() {
int returnVal = fc.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String line = "";
try {
BufferedReader br = new BufferedReader(new FileReader(file));
Set<Tweet> tweets = new HashSet<Tweet>();
while ((line = br.readLine()) != null) {
Tweet n = makeLabelledTweet(line);
//Tweet n = makeTweet(line);
if (n != null) {
tweets.add(n);
}
}
br.close();
return tweets;
} catch (IOException e) {
JOptionPane.showMessageDialog(this, "Unable to open file", "IO Error",
JOptionPane.ERROR_MESSAGE);
}
}
return null;
}
public void loadUnLabeledFile() {
int returnVal = fc.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
String line = "";
int failedTweets = 0;
try {
BufferedReader br = new BufferedReader(new FileReader(file));
tweets = new LinkedList<Tweet>();
while ((line = br.readLine()) != null) {
Tweet n = makeTweet(line);
if (n != null) {
tweets.add(n);
}
else {
++failedTweets;
}
}
br.close();
JOptionPane.showMessageDialog(this, "Successfully read: " + tweets.size() +
((tweets.size() != 1) ? " Tweets. " : " Tweet. ") + "Failed to read " + failedTweets +
((failedTweets != 1) ? " Tweets." : " Tweet"), "IO Error",
JOptionPane.INFORMATION_MESSAGE);
tweetIterator = tweets.listIterator();
totalTweets = tweets.size();
curTweet = 1;
} catch (IOException e) {
JOptionPane.showMessageDialog(this, "Unable to open file", "IO Error",
JOptionPane.ERROR_MESSAGE);
}
}
}
private Tweet makeTweet(String tweetText) {
try {
Tweet newTweet = new Tweet(tweetText);
return newTweet;
}
catch (NumberFormatException e) {
return null;
}
}
private Tweet makeLabelledTweet(String tweetText) {
int firstSpace = tweetText.indexOf(" ");
String label = tweetText.substring(0, firstSpace);
Tweet t = makeTweet(tweetText.substring(firstSpace + 1, tweetText.length()));
return t;
}
private class Tweet {
public long id;
public String text;
public String label;
public Tweet(long id, String text) {
this.id = id;
this.text = text;
this.label = "";
}
public Tweet(String tweet) {
StringTokenizer st = new StringTokenizer(tweet);
id = Long.parseLong(st.nextToken());
StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) {
sb.append(st.nextToken());
sb.append(" ");
}
text = sb.toString();
}
@Override
public String toString() {
return label.toLowerCase() + " " + id + " " + text;
}
@Override
public int hashCode() {
long hash = 7;
hash = hash * 31 + id;
//hash = hash * 31 + text.hashCode();
for (String s: text.split(" ")) {
hash = hash * 31 + s.hashCode();
}
return (int) hash;
}
public boolean tweetEquals(Tweet other) {
if (this.id == other.id) {
StringTokenizer me = new StringTokenizer(text);
StringTokenizer them = new StringTokenizer(other.text);
while (me.hasMoreTokens() && them.hasMoreElements()) {
if (!me.nextToken().equals(them.nextToken())) {
return false;
}
}
return true;
}
else {
return false;
}
}
}
private class TweetFilter extends FileFilter {
@Override
public boolean accept(File f) {
String name = f.getName().toLowerCase();
return f.isDirectory() || name.endsWith(".txt") || name.endsWith(".dan")
|| name.endsWith(".brandon") || name.endsWith(".curtis");
}
@Override
public String getDescription() {
return "Allows files with txt, dan, brandon, or curtis extensions";
}
}
}
|
package ti.modules.titanium.bump;
import org.appcelerator.titanium.TiContext;
import org.appcelerator.titanium.TiDict;
import org.appcelerator.titanium.TiModule;
import org.appcelerator.titanium.util.Log;
import org.appcelerator.titanium.util.TiActivityResultHandler;
import org.appcelerator.titanium.util.TiActivitySupport;
import org.appcelerator.titanium.util.TiConvert;
import android.app.Activity;
import android.content.Intent;
import android.os.Handler;
import com.bumptech.bumpapi.BumpAPI;
import com.bumptech.bumpapi.BumpAPIListener;
import com.bumptech.bumpapi.BumpConnectFailedReason;
import com.bumptech.bumpapi.BumpConnection;
import com.bumptech.bumpapi.BumpDisconnectReason;
import com.bumptech.bumpapi.BumpResources;
public class BumpModule extends TiModule implements TiActivityResultHandler, BumpAPIListener {
private static final String LCAT = "BumpModule";
private static final boolean DBG = true;
private BumpConnection conn;
private String apiKey = null;
private String username = null;
private String bumpMessage = null;
private final Handler baseHandler = new Handler();
public BumpModule(TiContext context) {
super(context);
// Setup ourselves as the listener for the result of the Activity
}
public void sendMessage(String message) {
if (null != this.conn) {
try {
byte[] chunk = message.getBytes("UTF-8");
this.conn.send(chunk);
} catch (Exception e) {
Log.e(LCAT, "Error Sending data to other party. "+e.getMessage());
}
} else {
TiDict eventArgs = new TiDict();
eventArgs.put("message", "Not Connected");
this.fireEvent("error", eventArgs);
Log.i(LCAT, "Not connected");
}
}
protected void connectBump() {
Activity activity = getTiContext().getTiApp().getCurrentActivity();
TiActivitySupport activitySupport = (TiActivitySupport) activity;
final int resultCode = activitySupport.getUniqueResultCode();
try {
// Work around for the way they implement resource management
BumpResources bp = new BumpResources(this.getTiContext());
if (DBG) {
Log.d(LCAT, "Bump Connect Called - setting up Intent");
}
Intent bump = new Intent(activity, BumpAPI.class);
bump.putExtra(BumpAPI.EXTRA_API_KEY, apiKey);
// Set some extra args if they are defined
if (null != username) {
Log.d(LCAT, "Setting Bump Username: "+username);
bump.putExtra(BumpAPI.EXTRA_USER_NAME, username);
}
if (null != bumpMessage) {
Log.d(LCAT, "Setting Bump message: "+bumpMessage);
bump.putExtra(BumpAPI.EXTRA_ACTION_MSG, bumpMessage);
}
activitySupport.launchActivityForResult(bump, resultCode, this);
if (DBG) {
Log.d(LCAT, "Launched Bump Activity");
}
// Bubble up the event
TiDict eventData = new TiDict();
this.fireEvent("ready", eventData);
} catch (Exception e) {
Log.e(LCAT, "--- Exception: "+e.toString());
}
}
public void connect(TiDict props) {
// Process the args to the method
if (props.containsKey("apikey")) {
apiKey = TiConvert.toString(props.getString("apikey"));
} else {
Log.e(LCAT, "Invalid argument - apikey is required");
}
if (props.containsKey("username")) {
username = TiConvert.toString(props.getString("username"));
}
if (props.containsKey("message")) {
bumpMessage = TiConvert.toString(props.getString("message"));
}
// A little extra debugging
if (DBG) {
Log.d(LCAT, "Bump Connect arguments:");
Log.d(LCAT, "apikey: "+apiKey);
if (null != username) {
Log.d(LCAT, "username: "+username);
} else {
Log.d(LCAT, "username not passed");
}
if (null != bumpMessage) {
Log.d(LCAT, "message: "+bumpMessage);
} else {
Log.d(LCAT, "No bump message passed");
}
}
// Call the master connect
this.connectBump();
}
@Override
public void onResult(Activity activity, int requestCode, int resultCode, Intent data) {
if (DBG) {
Log.d(LCAT, "Activity onResult with Result: "+resultCode);
}
if (resultCode == Activity.RESULT_OK) {
// Bump connected successfully, set its listener
try {
this.conn = (BumpConnection) data.getParcelableExtra(BumpAPI.EXTRA_CONNECTION);
conn.setListener(this, baseHandler);
// Fan out the event to the app
TiDict eventData = new TiDict();
eventData.put("username", conn.getOtherUserName());
this.fireEvent("connected", eventData);
if (DBG) {
Log.i(LCAT, "
}
} catch (Exception e) {
Log.e(LCAT, "
}
} else {
// Failed to connect, obtain the reason
if (DBG) {
Log.d(LCAT, "onConnect Fail");
}
try {
BumpConnectFailedReason reason = (BumpConnectFailedReason) data.getSerializableExtra(BumpAPI.EXTRA_REASON);
TiDict eventData = new TiDict();
eventData.put("message", reason.toString());
if (reason == BumpConnectFailedReason.FAIL_USER_CANCELED) {
this.fireEvent("cancel", eventData);
} else {
// Notify the app about the failure
this.fireEvent("error", eventData);
}
Log.e(LCAT, "
} catch (Exception e) {
// TODO: handle exception
Log.e(LCAT, "
}
}
}
@Override
public void onStop() {
if (conn != null) {
conn.disconnect();
conn = null;
}
super.onStop();
if (DBG) {
Log.i(LCAT, "--- onStop ");
}
}
@Override
public void onResume() {
super.onResume();
if (DBG) {
Log.i(LCAT, "--- onResume ");
}
}
@Override
public void onPause() {
//super.onPause();
if (DBG) {
Log.i(LCAT, "--- onPause ");
}
}
@Override
public void onError(Activity activity, int requestCode, Exception e) {
if (DBG) {
Log.e(LCAT, "--- onError "+e.getMessage());
}
}
@Override
public void bumpDataReceived(byte[] chunk) {
try {
String data = new String(chunk, "UTF-8");
if (DBG) {
Log.d(LCAT,"Received Data from other party: "+data);
}
if (DBG) {
dataReceived(conn.getOtherUserName() + " said: " + data);
} else {
dataReceived(data);
}
} catch (Exception e) {
Log.e(LCAT, "Failed to parse incoming data");
}
}
@Override
public void bumpDisconnect(BumpDisconnectReason reason) {
String disconnectDueTo = null;
switch (reason) {
case END_OTHER_USER_QUIT:
disconnectDueTo = "END_OTHER_USER_QUIT";
if (DBG) {
dataReceived("
}
break;
case END_OTHER_USER_LOST:
disconnectDueTo = "END_OTHER_USER_LOST";
if (DBG) {
dataReceived("
}
break;
default:
disconnectDueTo = "UNKNOWN";
break;
}
// Float the event to the app
TiDict eventData = new TiDict();
eventData.put("message", disconnectDueTo);
this.fireEvent("disconnect", eventData);
}
public String dataReceived(String data) {
// Float up the event to the app
TiDict eventData = new TiDict();
eventData.put("data", data);
this.fireEvent("data",eventData);
Log.e(LCAT, "Data: "+data);
return data;
}
}
|
package org.jetel.graph;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.Future;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.commons.io.filefilter.AbstractFileFilter;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.graph.runtime.EngineInitializer;
import org.jetel.graph.runtime.GraphRuntimeContext;
import org.jetel.main.runGraph;
import org.jetel.test.CloverTestCase;
import org.jetel.util.file.FileUtils;
import org.jetel.util.string.StringUtils;
public class ResetTest extends CloverTestCase {
private final static String SCENARIOS_RELATIVE_PATH = "../cloveretl.test.scenarios/";
private final static String[] EXAMPLE_PATH = {
"../cloveretl.examples/SimpleExamples/",
"../cloveretl.examples/AdvancedExamples/",
"../cloveretl.examples/CTL1FunctionsTutorial/",
"../cloveretl.examples/CTL2FunctionsTutorial/",
"../cloveretl.examples/DataProfiling/",
"../cloveretl.examples/DataSampling/",
"../cloveretl.examples/ExtExamples/",
"../cloveretl.examples/RealWorldExamples/",
"../cloveretl.examples.community/RealWorldExamples/",
"../cloveretl.examples/WebSiteExamples/",
"../cloveretl.examples.community/WebSiteExamples/",
"../cloveretl.test.scenarios/",
"../cloveretl.examples.commercial/",
"../cloveretl.examples/CompanyTransactionsTutorial/"
};
private final static String[] NEEDS_SCENARIOS_CONNECTION = {
"graphRevenues.grf",
"graphDBExecuteMsSql.grf",
"graphDBExecuteMySql.grf",
"graphDBExecuteOracle.grf",
"graphDBExecutePostgre.grf",
"graphDBExecuteSybase.grf",
"graphInfobrightDataWriterRemote.grf",
"graphLdapReaderWriter.grf"
};
private final static String[] NEEDS_SCENARIOS_LIB = {
"graphDBExecuteOracle.grf",
"graphDBExecuteSybase.grf",
"graphLdapReaderWriter.grf"
};
private final static String GRAPHS_DIR = "graph";
private final static String TRANS_DIR = "trans";
private final static String[] OUT_DIRS = {"data-out/", "data-tmp/", "seq/"};
private final String basePath;
private final File graphFile;
private final boolean batchMode;
private boolean cleanUp = true;
private static Log logger = LogFactory.getLog(ResetTest.class);
public static Test suite() {
final TestSuite suite = new TestSuite();
for (int i = 0; i < EXAMPLE_PATH.length; i++) {
logger.info("Testing graphs in " + EXAMPLE_PATH[i]);
final File graphsDir = new File(EXAMPLE_PATH[i], GRAPHS_DIR);
if(!graphsDir.exists()){
throw new IllegalStateException("Graphs directory " + graphsDir.getAbsolutePath() +" not found");
}
IOFileFilter fileFilter = new AbstractFileFilter() {
@Override
public boolean accept(File file) {
return file.getName().endsWith(".grf")
&& !file.getName().startsWith("TPCH")// ok, performance tests - last very long
&& !file.getName().contains("Performance")// ok, performance tests - last very long
&& !file.getName().equals("graphJoinData.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphJoinHash.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphOrdersReformat.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphDataGeneratorExt.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphApproximativeJoin.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphDBJoin.grf") // ok, uses class file that is not created
&& !file.getName().equals("conversionNum2num.grf") // ok, should fail
&& !file.getName().equals("outPortWriting.grf") // ok, should fail
&& !file.getName().equals("graphDb2Load.grf") // ok, can only work with db2 client
&& !file.getName().equals("graphMsSqlDataWriter.grf") // ok, can only work with MsSql client
&& !file.getName().equals("graphMysqlDataWriter.grf") // ok, can only work with MySql client
&& !file.getName().equals("graphOracleDataWriter.grf") // ok, can only work with Oracle client
&& !file.getName().equals("graphPostgreSqlDataWriter.grf") // ok, can only work with postgre client
&& !file.getName().equals("graphInformixDataWriter.grf") // ok, can only work with informix server
&& !file.getName().equals("graphInfobrightDataWriter.grf") // ok, can only work with infobright server
&& !file.getName().equals("graphSystemExecuteWin.grf") // ok, graph for Windows
&& !file.getName().equals("graphLdapReader_Uninett.grf") // ok, invalid server
&& !file.getName().equals("graphSequenceChecker.grf") // ok, is to fail
&& !file.getName().equals("FixedData.grf") // ok, is to fail
&& !file.getName().equals("xpathReaderStates.grf") // ok, is to fail
&& !file.getName().equals("graphDataPolicy.grf") // ok, is to fail
&& !file.getName().equals("conversionDecimal2integer.grf") // ok, is to fail
&& !file.getName().equals("conversionDecimal2long.grf") // ok, is to fail
&& !file.getName().equals("conversionDouble2integer.grf") // ok, is to fail
&& !file.getName().equals("conversionDouble2long.grf") // ok, is to fail
&& !file.getName().equals("conversionLong2integer.grf") // ok, is to fail
&& !file.getName().equals("nativeSortTestGraph.grf") // ok, invalid paths
&& !file.getName().equals("mountainsInformix.grf") // see issue 2550
&& !file.getName().equals("SystemExecuteWin_EchoFromFile.grf") // graph for windows
&& !file.getName().equals("XLSEncryptedFail.grf") // ok, is to fail
&& !file.getName().equals("XLSXEncryptedFail.grf") // ok, is to fail
&& !file.getName().equals("XLSInvalidFile.grf") // ok, is to fail
&& !file.getName().equals("XLSReaderOrderMappingFail.grf") // ok, is to fail
&& !file.getName().equals("XLSXReaderOrderMappingFail.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardStrict.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardStrict.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardControlled1.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardControlled1.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardControlled7.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardControlled7.grf") // ok, is to fail
&& !file.getName().equals("SSWRITER_MultilineInsertIntoTemplate.grf") // uses graph parameter definition from after-commit.ts
&& !file.getName().equals("SSWRITER_FormatInMetadata.grf") // uses graph parameter definition from after-commit.ts
&& !file.getName().equals("WSC_NamespaceBindingsDefined.grf") // ok, is to fail
&& !file.getName().equals("FailingGraph.grf") // ok, is to fail
&& !file.getName().equals("RunGraph_FailWhenUnderlyingGraphFails.grf") // probably should fail, recheck after added to after-commit.ts
&& !file.getName().equals("DataIntersection_order_check_A.grf") // ok, is to fail
&& !file.getName().equals("DataIntersection_order_check_B.grf") // ok, is to fail
&& !file.getName().equals("UDR_Logging_SFTP_CL1469.grf") // ok, is to fail
&& !file.getName().startsWith("AddressDoctor") //wrong path to db file, try to fix when AD installed on jenkins machines
&& !file.getName().equals("EmailReader_Local.grf") // remove after CL-2167 solved
&& !file.getName().equals("EmailReader_Server.grf") // remove after CLD-3437 solved (or mail.javlin.eu has valid certificate)
&& !file.getName().contains("firebird") // remove after CL-2170 solved
&& !file.getName().startsWith("ListOfRecords_Functions_02_") // remove after CL-2173 solved
&& !file.getName().equals("UDR_FileURL_OneZipMultipleFilesUnspecified.grf") // remove after CL-2174 solved
&& !file.getName().equals("UDR_FileURL_OneZipOneFileUnspecified.grf") // remove after CL-2174 solved
&& !file.getName().startsWith("MapOfRecords_Functions_01_Compiled_") // remove after CL-2175 solved
&& !file.getName().startsWith("MapOfRecords_Functions_01_Interpreted_") // remove after CL-2176 solved
&& !file.getName().equals("manyRecords.grf") // remove after CL-1292 implemented
&& !file.getName().equals("packedDecimal.grf") // remove after CL-1811 solved
&& !file.getName().equals("SimpleZipWrite.grf") // used by ArchiveFlushTest.java, doesn't make sense to run it separately
&& !file.getName().equals("XMLExtract_TKLK_003_Back.grf") // needs output from XMLWriter_LKTW_003.grf
&& !file.getName().equals("SQLDataParser_precision_CL2187.grf") // ok, is to fail
&& !file.getName().equals("incrementalReadingDB_explicitMapping.grf") // remove after CL-2239 solved
&& !file.getName().equals("HTTPConnector_get_bodyparams.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_error_unknownhost.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_error_unknownprotocol.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_inputfield.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_inputfileURL.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_requestcontent.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_post_error_unknownhost.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_post_error_unknownprotocol.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_inputmapping_null_values.grf") // ok, is to fail
&& !file.getName().equals("HttpConnector_errHandlingNoRedir.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_fileURL_not_xml.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_charset_invalid.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_mappingURL_missing.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_fileURL_not_exists.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_charset_not_default_fail.grf") // ok, is to fail
&& !file.getName().equals("RunGraph_differentOutputMetadataFail.grf") // ok, is to fail
&& !file.getName().equals("LUTPersistent_wrong_metadata.grf") // ok, is to fail
&& !file.getName().equals("UDW_nonExistingDir_fail_CL-2478.grf") // ok, is to fail
&& !file.getName().equals("CTL_lookup_put_fail.grf") // ok, is to fail
&& !file.getName().equals("SystemExecute_printBatchFile.grf") // ok, is to fail
&& !file.getName().equals("JoinMergeIssue_FailWhenMasterUnsorted.grf") // ok, is to fail
&& !file.getName().equals("UDW_remoteZipPartitioning_fail_CL-2564.grf") // ok, is to fail
&& !file.getName().equals("checkConfigTest.grf") // ok, is to fail
&& !file.getName().equals("DebuggingGraph.grf") // ok, is to fail
&& !file.getName().equals("CompanyChecks.grf") // an example that needs embedded derby
&& !file.getName().equals("DatabaseAccess.grf") // an example that needs embedded derby
&& !file.getName().equals("graphDatabaseAccess.grf") // an example that needs embedded derby
&& !file.getName().startsWith("Proxy_") // allowed to run only on virt-cyan as proxy tests
&& !file.getName().equals("SandboxOperationHandlerTest.grf") // runs only on server
&& !file.getName().equals("DenormalizerWithoutInputFile.grf") // probably subgraph not supposed to be executed separately
&& !file.getName().equals("SimpleSequence_longValue.grf") // needs the sequence to be reset on start
&& !file.getName().equals("DBLookupTable_negativeResponse_noCache.grf") // remove after CLO-715 solved
&& !file.getName().equals("BeanWriterReader_employees.grf") // remove after CL-2474 solved
&& !file.getName().equals("EmptyGraph.grf"); // ok, is to fail
}
};
IOFileFilter dirFilter = new AbstractFileFilter() {
@Override
public boolean accept(File file) {
return file.isDirectory() && file.getName().equals("hadoop");
}
};
@SuppressWarnings("unchecked")
Collection<File> filesCollection = org.apache.commons.io.FileUtils.listFiles(graphsDir, fileFilter, dirFilter);
File[] graphFiles = filesCollection.toArray(new File[0]);
Arrays.sort(graphFiles);
for(int j = 0; j < graphFiles.length; j++){
suite.addTest(new ResetTest(EXAMPLE_PATH[i], graphFiles[j], false, false));
suite.addTest(new ResetTest(EXAMPLE_PATH[i], graphFiles[j], true, j == graphFiles.length - 1 ? true : false));
}
}
return suite;
}
@Override
protected void setUp() throws Exception {
super.setUp();
initEngine();
}
protected static String getTestName(String basePath, File graphFile, boolean batchMode) {
final StringBuilder ret = new StringBuilder();
final String n = graphFile.getName();
int lastDot = n.lastIndexOf('.');
if (lastDot == -1) {
ret.append(n);
} else {
ret.append(n.substring(0, lastDot));
}
if (batchMode) {
ret.append("-batch");
} else {
ret.append("-nobatch");
}
return ret.toString();
}
protected ResetTest(String basePath, File graphFile, boolean batchMode, boolean cleanup) {
super(getTestName(basePath, graphFile, batchMode));
this.basePath = basePath;
this.graphFile = graphFile;
this.batchMode = batchMode;
this.cleanUp = cleanup;
}
@Override
protected void runTest() throws Throwable {
final String baseAbsolutePath = new File(basePath).getAbsolutePath().replace('\\', '/');
logger.info("Project dir: " + baseAbsolutePath);
logger.info("Analyzing graph " + graphFile.getPath());
logger.info("Batch mode: " + batchMode);
final GraphRuntimeContext runtimeContext = new GraphRuntimeContext();
runtimeContext.setUseJMX(false);
runtimeContext.setContextURL(FileUtils.getFileURL(FileUtils.appendSlash(baseAbsolutePath))); // context URL should be absolute
// absolute path in PROJECT parameter is required for graphs using Derby database
runtimeContext.addAdditionalProperty("PROJECT", baseAbsolutePath);
if (StringUtils.findString(graphFile.getName(), NEEDS_SCENARIOS_CONNECTION) != -1) {
final String connDir = new File(SCENARIOS_RELATIVE_PATH + "conn").getAbsolutePath();
runtimeContext.addAdditionalProperty("CONN_DIR", connDir);
logger.info("CONN_DIR set to " + connDir);
}
if (StringUtils.findString(graphFile.getName(), NEEDS_SCENARIOS_LIB) != -1) {// set LIB_DIR to jdbc drivers directory
final String libDir = new File(SCENARIOS_RELATIVE_PATH + "lib").getAbsolutePath();
runtimeContext.addAdditionalProperty("LIB_DIR", libDir);
logger.info("LIB_DIR set to " + libDir);
}
// for scenarios graphs, add the TRANS dir to the classpath
if (basePath.contains("cloveretl.test.scenarios")) {
runtimeContext.setRuntimeClassPath(new URL[] {FileUtils.getFileURL(FileUtils.appendSlash(baseAbsolutePath) + TRANS_DIR + "/")});
runtimeContext.setCompileClassPath(runtimeContext.getRuntimeClassPath());
}
runtimeContext.setBatchMode(batchMode);
final TransformationGraph graph = TransformationGraphXMLReaderWriter.loadGraph(new FileInputStream(graphFile), runtimeContext);
try {
graph.setDebugMode(false);
EngineInitializer.initGraph(graph);
for (int i = 0; i < 3; i++) {
final Future<Result> futureResult = runGraph.executeGraph(graph, runtimeContext);
Result result = Result.N_A;
result = futureResult.get();
switch (result) {
case FINISHED_OK:
// everything O.K.
logger.info("Execution of graph successful !");
break;
case ABORTED:
// execution was ABORTED !!
logger.info("Execution of graph failed !");
fail("Execution of graph failed !");
break;
default:
logger.info("Execution of graph failed !");
fail("Execution of graph failed !");
}
}
} catch (Throwable e) {
throw new IllegalStateException("Error executing grap " + graphFile, e);
} finally {
if (cleanUp) {
cleanupData();
}
logger.info("Transformation graph is freeing.\n");
graph.free();
}
}
private void cleanupData() {
for (String outDir : OUT_DIRS) {
File outDirFile = new File(basePath, outDir);
File[] file = outDirFile.listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
return f.isFile();
}
});
for (int i = 0; i < file.length; i++) {
final boolean drt = file[i].delete();
if (drt) {
logger.info("Cleanup: deleted file " + file[i].getAbsolutePath());
} else {
logger.info("Cleanup: error delete file " + file[i].getAbsolutePath());
}
}
}
}
}
|
package net.fortuna.ical4j.model;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Iterator;
import java.util.TimeZone;
import junit.framework.TestCase;
import net.fortuna.ical4j.model.parameter.Value;
import net.fortuna.ical4j.model.property.DtEnd;
import net.fortuna.ical4j.model.property.DtStart;
import net.fortuna.ical4j.util.TimeZones;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* @author Ben Fortuna
*/
public class RecurTest extends TestCase {
private static Log log = LogFactory.getLog(RecurTest.class);
private TimeZone originalDefault;
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
originalDefault = TimeZone.getDefault();
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
TimeZone.setDefault(originalDefault);
}
public void testGetDates() {
Recur recur = new Recur(Recur.DAILY, 10);
recur.setInterval(2);
log.info(recur);
Calendar cal = Calendar.getInstance();
Date start = new Date(cal.getTime().getTime());
cal.add(Calendar.DAY_OF_WEEK_IN_MONTH, 10);
Date end = new Date(cal.getTime().getTime());
log.info(recur.getDates(start, end, Value.DATE_TIME));
recur.setUntil(new Date(cal.getTime().getTime()));
log.info(recur);
log.info(recur.getDates(start, end, Value.DATE_TIME));
recur.setFrequency(Recur.WEEKLY);
recur.getDayList().add(WeekDay.MO);
log.info(recur);
DateList dates = recur.getDates(start, end, Value.DATE);
log.info(dates);
assertTrue("Date list exceeds COUNT limit", dates.size() <= 10);
}
/**
* Test BYDAY rules.
*/
public void testGetDatesByDay() {
Recur recur = new Recur(Recur.DAILY, 10);
recur.setInterval(1);
recur.getDayList().add(WeekDay.MO);
recur.getDayList().add(WeekDay.TU);
recur.getDayList().add(WeekDay.WE);
recur.getDayList().add(WeekDay.TH);
recur.getDayList().add(WeekDay.FR);
log.info(recur);
Calendar cal = Calendar.getInstance();
Date start = new Date(cal.getTime().getTime());
cal.add(Calendar.DAY_OF_WEEK_IN_MONTH, 10);
Date end = new Date(cal.getTime().getTime());
DateList dates = recur.getDates(start, end, Value.DATE_TIME);
log.info(dates);
assertTrue("Date list exceeds COUNT limit", dates.size() <= 10);
}
/**
* Test BYDAY recurrence rules..
*/
public void testGetDatesByDay2() throws ParseException {
String rrule = "FREQ=MONTHLY;WKST=SU;INTERVAL=2;BYDAY=5TU";
Recur recur = new Recur(rrule);
Calendar cal = Calendar.getInstance();
cal.clear(Calendar.SECOND);
java.util.Date start = cal.getTime();
cal.add(Calendar.YEAR, 2);
java.util.Date end = cal.getTime();
DateList recurrences = recur.getDates(new Date(start), new Date(end), Value.DATE);
for (Iterator i = recurrences.iterator(); i.hasNext();) {
Date recurrence = (Date) i.next();
cal.setTime(recurrence);
assertEquals(5, cal.get(Calendar.WEEK_OF_MONTH));
}
}
public void testGetDatesWithBase() {
/*
* Here is an example of evaluating multiple BYxxx rule parts.
*
* DTSTART;TZID=US-Eastern:19970105T083000
* RRULE:FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;
* BYMINUTE=30
*/
Calendar testCal = Calendar.getInstance();
testCal.set(Calendar.YEAR, 1997);
testCal.set(Calendar.MONTH, 1);
testCal.set(Calendar.DAY_OF_MONTH, 5);
testCal.set(Calendar.HOUR, 8);
testCal.set(Calendar.MINUTE, 30);
testCal.set(Calendar.SECOND, 0);
Recur recur = new Recur(Recur.YEARLY, -1);
recur.setInterval(2);
recur.getMonthList().add(new Integer(1));
recur.getDayList().add(WeekDay.SU);
recur.getHourList().add(new Integer(8));
recur.getHourList().add(new Integer(9));
recur.getMinuteList().add(new Integer(30));
Calendar cal = Calendar.getInstance();
Date start = new DateTime(cal.getTime().getTime());
cal.add(Calendar.YEAR, 2);
Date end = new DateTime(cal.getTime().getTime());
log.info(recur);
DateList dates = recur.getDates(new DateTime(testCal.getTime()), start, end, Value.DATE_TIME);
log.info(dates);
}
/*
public void testSublistNegative() {
List list = new LinkedList();
list.add("1");
list.add("2");
list.add("3");
assertSublistEquals(list, list, 0);
assertSublistEquals(asList("3"), list, -1);
assertSublistEquals(asList("2"), list, -2);
assertSublistEquals(asList("1"), list, -3);
assertSublistEquals(list, list, -4);
}
public void testSublistPositive() {
List list = new LinkedList();
list.add("1");
list.add("2");
list.add("3");
assertSublistEquals(list, list, 0);
assertSublistEquals(asList("1"), list, 1);
assertSublistEquals(asList("2"), list, 2);
assertSublistEquals(asList("3"), list, 3);
assertSublistEquals(list, list, 4);
}
private void assertSublistEquals(List expected, List list, int offset) {
List sublist = new LinkedList();
Recur.sublist(list, offset, sublist);
assertEquals(expected, sublist);
}
private List asList(Object o) {
List list = new LinkedList();
list.add(o);
return list;
}
public void testSetPosNegative() throws Exception {
Date[] dates = new Date[] { new Date(1), new Date(2), new Date(3) };
Date[] expected = new Date[] { new Date(3), new Date(2) };
assertSetPosApplied(expected, dates, "BYSETPOS=-1,-2");
}
public void testSetPosPositve() throws Exception {
Date[] dates = new Date[] { new Date(1), new Date(2), new Date(3) };
Date[] expected = new Date[] { new Date(2), new Date(3) };
assertSetPosApplied(expected, dates, "BYSETPOS=2,3");
}
public void testSetPosOutOfBounds() throws Exception {
Date[] dates = new Date[] { new Date(1) };
Date[] expected = new Date[] {};
assertSetPosApplied(expected, dates, "BYSETPOS=-2,2");
}
private void assertSetPosApplied(Date[] expected, Date[] dates, String rule)
throws Exception {
Recur recur = new Recur(rule);
DateList expectedList = asDateList(expected);
assertEquals(expectedList, recur.applySetPosRules(asDateList(dates)));
}
*/
private DateList asDateList(Date[] dates) {
DateList dateList = new DateList(Value.DATE);
dateList.addAll(Arrays.asList(dates));
return dateList;
}
/**
* This test creates a rule outside of the specified boundaries to
* confirm that the returned date list is empty.
* <pre>
* Weekly on Tuesday and Thursday for 5 weeks:
*
* DTSTART;TZID=US-Eastern:19970902T090000
* RRULE:FREQ=WEEKLY;UNTIL=19971007T000000Z;WKST=SU;BYDAY=TU,TH
* or
*
* RRULE:FREQ=WEEKLY;COUNT=10;WKST=SU;BYDAY=TU,TH
*
* ==> (1997 9:00 AM EDT)September 2,4,9,11,16,18,23,25,30;October 2
* </pre>
*/
public final void testBoundaryProcessing() {
Recur recur = new Recur(Recur.WEEKLY, 10);
recur.getDayList().add(WeekDay.TU);
recur.getDayList().add(WeekDay.TH);
log.info(recur);
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 1997);
cal.set(Calendar.MONTH, Calendar.SEPTEMBER);
cal.set(Calendar.DAY_OF_MONTH, 2);
cal.set(Calendar.HOUR_OF_DAY, 9);
cal.clear(Calendar.MINUTE);
cal.clear(Calendar.SECOND);
Date seed = new DateTime(cal.getTime());
cal = Calendar.getInstance();
Date start = new DateTime(cal.getTime());
cal.add(Calendar.YEAR, 2);
Date end = new DateTime(cal.getTime());
DateList dates = recur.getDates(seed, start, end, Value.DATE_TIME);
log.info(dates);
assertTrue(dates.isEmpty());
}
/**
* This test confirms SETPOS rules are working correctly.
* <pre>
* The BYSETPOS rule part specifies a COMMA character (US-ASCII decimal
* 44) separated list of values which corresponds to the nth occurrence
* within the set of events specified by the rule. Valid values are 1 to
* 366 or -366 to -1. It MUST only be used in conjunction with another
* BYxxx rule part. For example "the last work day of the month" could
* be represented as:
*
* RRULE:FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1
* </pre>
*/
public final void testSetPosProcessing() {
Recur recur = new Recur(Recur.MONTHLY, -1);
recur.getDayList().add(WeekDay.MO);
recur.getDayList().add(WeekDay.TU);
recur.getDayList().add(WeekDay.WE);
recur.getDayList().add(WeekDay.TH);
recur.getDayList().add(WeekDay.FR);
recur.getSetPosList().add(new Integer(-1));
log.info(recur);
Calendar cal = Calendar.getInstance();
Date start = new DateTime(cal.getTime());
cal.add(Calendar.YEAR, 2);
Date end = new DateTime(cal.getTime());
DateList dates = recur.getDates(start, end, Value.DATE_TIME);
log.info(dates);
}
public void testMgmill2001() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.DAY_OF_MONTH, 11);
cal.set(Calendar.MONTH, 0);
cal.set(Calendar.YEAR, 2005);
java.util.Date eventStart = cal.getTime();
cal.set(Calendar.DAY_OF_MONTH, 1);
java.util.Date rangeStart = cal.getTime();
cal.set(Calendar.YEAR, 2009);
java.util.Date rangeEnd = cal.getTime();
// FREQ=MONTHLY;INTERVAL=1;COUNT=4;BYMONTHDAY=2
Recur recur = new Recur(Recur.MONTHLY, 4);
recur.setInterval(1);
recur.getMonthDayList().add(new Integer(2));
getDates(rangeStart, rangeEnd, eventStart, recur);
// FREQ=MONTHLY;INTERVAL=2;COUNT=4;BYDAY=2MO
recur = new Recur(Recur.MONTHLY, 4);
recur.setInterval(2);
recur.getDayList().add(new WeekDay(WeekDay.MO, 2));
getDates(rangeStart, rangeEnd, eventStart, recur);
// FREQ=YEARLY;COUNT=4;BYMONTH=2;BYMONTHDAY=3
recur = new Recur(Recur.YEARLY, 4);
recur.getMonthList().add(new Integer(2));
recur.getMonthDayList().add(new Integer(3));
getDates(rangeStart, rangeEnd, eventStart, recur);
// FREQ=YEARLY;COUNT=4;BYMONTH=2;BYDAY=2SU
recur = new Recur(Recur.YEARLY, 4);
recur.getMonthList().add(new Integer(2));
recur.getDayList().add(new WeekDay(WeekDay.SU, 2));
getDates(rangeStart, rangeEnd, eventStart, recur);
}
private void getDates(java.util.Date startRange, java.util.Date endRange, java.util.Date eventStart, Recur recur) {
net.fortuna.ical4j.model.Date start = new net.fortuna.ical4j.model.Date(startRange);
net.fortuna.ical4j.model.Date end = new net.fortuna.ical4j.model.Date(endRange);
net.fortuna.ical4j.model.Date seed = new net.fortuna.ical4j.model.Date(eventStart);
DateList dates = recur.getDates(seed, start, end, Value.DATE);
for (int i=0; i<dates.size(); i++) {
System.out.println("date_" + i + " = " + dates.get(i).toString());
}
}
/**
* @throws ParseException
*/
public final void testRecurGetDates() throws ParseException {
Recur recur = new Recur("FREQ=WEEKLY;INTERVAL=1;BYDAY=SA");
Date start = new Date("20050101Z");
Date end = new Date("20060101Z");
DateList list = recur.getDates(start, end, null);
for (int i = 0; i < list.size(); i++) {
Date date = (Date) list.get(i);
// Calendar calendar = Dates.getCalendarInstance(date);
Calendar calendar = Calendar.getInstance(); //TimeZone.getTimeZone("Etc/UTC"));
calendar.setTime(date);
assertEquals(Calendar.SATURDAY, calendar.get(Calendar.DAY_OF_WEEK));
}
}
public void testGetDatesRalph() throws ParseException {
Recur recur = new
Recur("FREQ=WEEKLY;WKST=MO;INTERVAL=1;UNTIL=20051003T000000Z;BYDAY=MO,WE");
Calendar queryStartDate = new
GregorianCalendar(TimeZone.getTimeZone(TimeZones.UTC_ID));
queryStartDate.set(2005, Calendar.SEPTEMBER, 3, 0,
0, 0);
Calendar queryEndDate = new
GregorianCalendar(TimeZone.getTimeZone(TimeZones.UTC_ID));
queryEndDate.set(2005, Calendar.OCTOBER, 31, 23,
59, 0);
DateList dateList = recur.getDates(new
DateTime(queryStartDate.getTime()), new
DateTime(queryStartDate.getTime()), new
DateTime(queryEndDate.getTime()), Value.DATE_TIME);
log.info(dateList);
}
/**
* Test ordering of returned dates.
* @throws ParseException
*/
public void testDateOrdering() throws ParseException {
String s1 = "FREQ=WEEKLY;COUNT=75;INTERVAL=2;BYDAY=SU,MO,TU;WKST=SU";
Recur rec = new Recur(s1);
Date d1 = new Date();
Calendar cal = Calendar.getInstance();
cal.add(Calendar.YEAR,1);
Date d2 = new Date(cal.getTimeInMillis());
DateList dl1 = rec.getDates(d1,d2, Value.DATE_TIME);
Date prev = null;
Date event = null;
for(int i=0; i<dl1.size(); i++) {
prev = event;
event = (Date) dl1.get(i);
log.info("Occurence "+i+" at "+event);
assertTrue(prev == null || !prev.after(event));
}
}
/**
* @throws ParseException
*/
public void testMonthByDay() throws ParseException {
String rrule = "FREQ=MONTHLY;UNTIL=20061220T000000;INTERVAL=1;BYDAY=3WE";
Recur recur = new Recur(rrule);
Calendar cal = Calendar.getInstance();
Date start = new Date(cal.getTime());
cal.add(Calendar.YEAR, 1);
DateList recurrences = recur.getDates(start, new Date(cal.getTime()), Value.DATE);
}
/**
* @throws ParseException
*/
public void testAlternateTimeZone() throws ParseException {
String rrule = "FREQ=WEEKLY;BYDAY=WE;BYHOUR=12;BYMINUTE=0";
Recur recur = new Recur(rrule);
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
TimeZoneRegistry tzreg = TimeZoneRegistryFactory.getInstance().createRegistry();
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("America/Los_Angeles"));
cal.clear(Calendar.SECOND);
java.util.Date start = cal.getTime();
DtStart dtStart = new DtStart(new DateTime(start));
dtStart.setTimeZone(tzreg.getTimeZone("America/Los_Angeles"));
cal.add(Calendar.MONTH, 2);
java.util.Date end = cal.getTime();
DtEnd dtEnd = new DtEnd(new DateTime(end));
DateList recurrences = recur.getDates(dtStart.getDate(), dtEnd.getDate(), Value.DATE_TIME);
for (Iterator i = recurrences.iterator(); i.hasNext();) {
DateTime recurrence = (DateTime) i.next();
assertEquals(tzreg.getTimeZone("America/Los_Angeles"), recurrence.getTimeZone());
}
}
/**
* @throws ParseException
*/
public void testFriday13Recur() throws ParseException {
String rrule = "FREQ=MONTHLY;BYDAY=FR;BYMONTHDAY=13";
Recur recur = new Recur(rrule);
Calendar cal = Calendar.getInstance();
cal.clear(Calendar.SECOND);
cal.set(1997, 0, 1);
java.util.Date start = cal.getTime();
cal.set(2000, 0, 1);
java.util.Date end = cal.getTime();
DateList recurrences = recur.getDates(new Date(start), new Date(end), Value.DATE);
for (Iterator i = recurrences.iterator(); i.hasNext();) {
Date recurrence = (Date) i.next();
cal.setTime(recurrence);
assertEquals(13, cal.get(Calendar.DAY_OF_MONTH));
assertEquals(Calendar.FRIDAY, cal.get(Calendar.DAY_OF_WEEK));
}
}
}
|
package co.vandenham.telegram.botapi.types;
import com.google.gson.annotations.SerializedName;
public class Contact {
@SerializedName("phone_number")
private String phoneNumber;
@SerializedName("first_name")
private String firstName;
@SerializedName("last_name")
private String lastName;
@SerializedName("user_id")
private int userId;
/**
* @return Contact's phone number
*/
public String getPhoneNumber() {
return phoneNumber;
}
/**
* @return Contact's first name
*/
public String getFirstName() {
return firstName;
}
/**
* <i>Optional.</i>
*
* @return Contact's last name
*/
public String getLastName() {
return lastName;
}
/**
* <i>Optional.</i>
*
* @return Contact's user identifier in Telegram
*/
public int getUserId() {
return userId;
}
}
|
package net.kyori.adventure.audience;
/* package */ final class EmptyAudience implements Audience {
/* package */ static final EmptyAudience INSTANCE = new EmptyAudience();
@Override
public boolean equals(Object obj) {
return this == obj;
}
@Override
public int hashCode() {
return 0;
}
@Override
public String toString() {
return "EmptyAudience";
}
}
|
package org.asynchttpclient.providers;
import static org.asynchttpclient.util.MiscUtil.isNonEmpty;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.List;
import org.asynchttpclient.Cookie;
import org.asynchttpclient.FluentCaseInsensitiveStringsMap;
import org.asynchttpclient.HttpResponseBodyPart;
import org.asynchttpclient.HttpResponseHeaders;
import org.asynchttpclient.HttpResponseStatus;
import org.asynchttpclient.Response;
import org.asynchttpclient.util.AsyncHttpProviderUtils;
public abstract class ResponseBase implements Response {
protected final static String DEFAULT_CHARSET = "ISO-8859-1";
protected final List<HttpResponseBodyPart> bodyParts;
protected final HttpResponseHeaders headers;
protected final HttpResponseStatus status;
private List<Cookie> cookies;
protected ResponseBase(HttpResponseStatus status, HttpResponseHeaders headers, List<HttpResponseBodyPart> bodyParts) {
this.bodyParts = bodyParts;
this.headers = headers;
this.status = status;
}
protected abstract List<Cookie> buildCookies();
protected String calculateCharset(String charset) {
if (charset == null) {
String contentType = getContentType();
if (contentType != null)
charset = AsyncHttpProviderUtils.parseCharset(contentType); // parseCharset can return null
}
return charset != null ? charset : DEFAULT_CHARSET;
}
@Override
public final int getStatusCode() {
return status.getStatusCode();
}
@Override
public final String getStatusText() {
return status.getStatusText();
}
@Override
public final URI getUri() {
return status.getUri();
}
@Override
public final String getContentType() {
return headers != null ? getHeader("Content-Type") : null;
}
@Override
public final String getHeader(String name) {
return headers != null ? getHeaders().getFirstValue(name) : null;
}
@Override
public final List<String> getHeaders(String name) {
return headers != null ? getHeaders().get(name) : null;
}
@Override
public final FluentCaseInsensitiveStringsMap getHeaders() {
return headers != null ? headers.getHeaders() : new FluentCaseInsensitiveStringsMap();
}
@Override
public final boolean isRedirected() {
switch (status.getStatusCode()) {
case 301:
case 302:
case 303:
case 307:
case 308:
return true;
default:
return false;
}
}
@Override
public byte[] getResponseBodyAsBytes() throws IOException {
return AsyncHttpProviderUtils.contentToBytes(bodyParts);
}
public ByteBuffer getResponseBodyAsByteBuffer() throws IOException {
return ByteBuffer.wrap(getResponseBodyAsBytes());
}
@Override
public String getResponseBody() throws IOException {
return getResponseBody(DEFAULT_CHARSET);
}
public String getResponseBody(String charset) throws IOException {
return AsyncHttpProviderUtils.contentToString(bodyParts, calculateCharset(charset));
}
@Override
public InputStream getResponseBodyAsStream() throws IOException {
return AsyncHttpProviderUtils.contentAsStream(bodyParts);
}
@Override
public List<Cookie> getCookies() {
if (headers == null) {
return Collections.emptyList();
}
if (cookies == null) {
cookies = buildCookies();
}
return cookies;
}
@Override
public boolean hasResponseStatus() {
return status != null;
}
@Override
public boolean hasResponseHeaders() {
return headers != null && isNonEmpty(headers.getHeaders());
}
@Override
public boolean hasResponseBody() {
return isNonEmpty(bodyParts);
}
}
|
package org.geotools.data.ingres;
import java.io.IOException;
import org.geotools.data.jdbc.FilterToSQL;
import org.geotools.filter.FilterCapabilities;
import org.geotools.jdbc.JDBCDataStore;
import org.geotools.jdbc.PreparedFilterToSQL;
import org.geotools.jdbc.SQLDialect;
import org.opengis.filter.expression.Literal;
import org.opengis.filter.expression.PropertyName;
import org.opengis.filter.spatial.BBOX;
import org.opengis.filter.spatial.Beyond;
import org.opengis.filter.spatial.BinarySpatialOperator;
import org.opengis.filter.spatial.Contains;
import org.opengis.filter.spatial.Crosses;
import org.opengis.filter.spatial.DWithin;
import org.opengis.filter.spatial.Disjoint;
import org.opengis.filter.spatial.DistanceBufferOperator;
import org.opengis.filter.spatial.Equals;
import org.opengis.filter.spatial.Intersects;
import org.opengis.filter.spatial.Overlaps;
import org.opengis.filter.spatial.Touches;
import org.opengis.filter.spatial.Within;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.LinearRing;
/**
*
*
* @source $URL$
*/
public class IngresFilterToSQL extends PreparedFilterToSQL {
boolean looseBBOXEnabled;
public IngresFilterToSQL(IngresDialect dialect) {
super(dialect);
}
public boolean isLooseBBOXEnabled() {
return looseBBOXEnabled;
}
public void setLooseBBOXEnabled(boolean looseBBOXEnabled) {
this.looseBBOXEnabled = looseBBOXEnabled;
}
@Override
protected void visitLiteralGeometry(Literal expression) throws IOException {
// evaluate the literal and store it for later
Geometry geom = (Geometry) evaluateLiteral(expression, Geometry.class);
if ( geom instanceof LinearRing ) {
//ingres does not handle linear rings, convert to just a line string
geom = geom.getFactory().createLineString(((LinearRing) geom).getCoordinateSequence());
}
out.write("ST_GeomFromText('");
out.write(geom.toText());
if(currentSRID == null && currentGeometry != null) {
// if we don't know at all, use the srid of the geometry we're comparing against
// (much slower since that has to be extracted record by record as opposed to
// being a constant)
out.write("', ST_SRID(\"" + currentGeometry.getLocalName() + "\"))");
} else {
out.write("', " + currentSRID + ")");
}
}
@Override
protected FilterCapabilities createFilterCapabilities() {
FilterCapabilities caps = new FilterCapabilities();
caps.addAll(SQLDialect.BASE_DBMS_CAPABILITIES);
// adding the spatial filters support
// caps.addType(BBOX.class);
// caps.addType(Contains.class);
// caps.addType(Within.class);
caps.addType(Crosses.class);
caps.addType(Disjoint.class);
caps.addType(Equals.class);
caps.addType(Intersects.class);
caps.addType(Overlaps.class);
caps.addType(Touches.class);
caps.addType(Beyond.class);
return caps;
}
protected Object visitBinarySpatialOperator(BinarySpatialOperator filter,
PropertyName property, Literal geometry, boolean swapped,
Object extraData) {
try {
if (filter instanceof DistanceBufferOperator) {
visitDistanceSpatialOperator((DistanceBufferOperator) filter,
property, geometry, swapped, extraData);
} else {
visitComparisonSpatialOperator(filter, property, geometry,
swapped, extraData);
}
} catch (IOException e) {
throw new RuntimeException(IO_ERROR, e);
}
return extraData;
}
void visitDistanceSpatialOperator(DistanceBufferOperator filter,
PropertyName property, Literal geometry, boolean swapped,
Object extraData) throws IOException {
if ((filter instanceof DWithin && !swapped)
|| (filter instanceof Beyond && swapped)) {
out.write("ST_DWithin(");
property.accept(this, extraData);
out.write(",");
geometry.accept(this, extraData);
out.write(",");
out.write(Double.toString(filter.getDistance()));
out.write(")");
}
if ((filter instanceof DWithin && swapped)
|| (filter instanceof Beyond && !swapped)) {
out.write("ST_Distance(");
property.accept(this, extraData);
out.write(",");
geometry.accept(this, extraData);
out.write(") > ");
out.write(Double.toString(filter.getDistance()));
}
}
void visitComparisonSpatialOperator(BinarySpatialOperator filter,
PropertyName property, Literal geometry, boolean swapped, Object extraData)
throws IOException {
String closingParenthesis = ")";
// if(!(filter instanceof Disjoint)) {
// out.write("ST_Disjoint ");
if (filter instanceof Equals) {
out.write("ST_Equals");
} else if (filter instanceof Disjoint) {
out.write("NOT (ST_Intersects");
closingParenthesis += ")";
} else if (filter instanceof Intersects || filter instanceof BBOX) {
out.write("ST_Intersects");
} else if (filter instanceof Crosses) {
out.write("ST_Crosses");
} else if (filter instanceof Within) {
if(swapped)
out.write("ST_Within");
else
out.write("ST_Contains");
} else if (filter instanceof Contains) {
if(swapped)
out.write("ST_Contains");
else
out.write("ST_Within");
} else if (filter instanceof Overlaps) {
out.write("ST_Overlaps");
} else if (filter instanceof Touches) {
out.write("ST_Touches");
} else {
throw new RuntimeException("Unsupported filter type " + filter.getClass());
}
out.write("(");
property.accept(this, extraData);
out.write(", ");
geometry.accept(this, extraData);
if(currentSRID == null && currentGeometry != null) {
// if we don't know at all, use the srid of the geometry we're comparing against
// (much slower since that has to be extracted record by record as opposed to
// being a constant)
out.write(", ST_SRID(\"" + currentGeometry.getLocalName() + "\"))");
} else {
out.write(", " + currentSRID + ")");
}
out.write(closingParenthesis);
out.write(" = 1");
}
}
|
package org.nuxeo.drive.service.impl;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.nuxeo.drive.adapter.FileSystemItem;
import org.nuxeo.drive.service.FileSystemChangeFinder;
import org.nuxeo.drive.service.NuxeoDriveEvents;
import org.nuxeo.drive.service.NuxeoDriveManager;
import org.nuxeo.drive.service.SynchronizationRoots;
import org.nuxeo.drive.service.TooManyChangesException;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.CoreSession;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.DocumentRef;
import org.nuxeo.ecm.core.api.DocumentSecurityException;
import org.nuxeo.ecm.core.api.IdRef;
import org.nuxeo.ecm.platform.audit.api.AuditReader;
import org.nuxeo.ecm.platform.audit.api.ExtendedInfo;
import org.nuxeo.ecm.platform.audit.api.LogEntry;
import org.nuxeo.runtime.api.Framework;
/**
* Implementation of {@link FileSystemChangeFinder} using the
* {@link AuditReader}.
*
* @author Antoine Taillefer
*/
public class AuditChangeFinder implements FileSystemChangeFinder {
private static final long serialVersionUID = 1963018967324857522L;
private static final Log log = LogFactory.getLog(AuditChangeFinder.class);
@Override
public List<FileSystemItemChange> getFileSystemChanges(CoreSession session,
Set<IdRef> lastActiveRootRefs, SynchronizationRoots activeRoots,
long lastSuccessfulSyncDate, long syncDate, int limit)
throws ClientException, TooManyChangesException {
String principalName = session.getPrincipal().getName();
List<FileSystemItemChange> changes = new ArrayList<FileSystemItemChange>();
// Note: lastActiveRootRefs is not used: we could remove it from the public API
// and from the client as well but it might be useful to optimize future
// alternative implementations FileSystemChangeFinder component so it might
// be better to leave it part of the public API as currently.
// Find changes from the log under active roots or events that are
// linked to the un-registration or deletion of formerly synchronized
// roots
List<LogEntry> entries = queryAuditEntries(session, activeRoots,
lastSuccessfulSyncDate, syncDate, limit);
// First pass over the entries: check that there was no root
// registration / un-registration during that period.
for (LogEntry entry : entries) {
if (NuxeoDriveEvents.EVENT_CATEGORY.equals(entry.getCategory())) {
// This is a root registration event for the current user:
// the list of active roots has changed and the cache might
// need to be invalidated: let's make sure we perform a
// query with the actual active roots
log.debug(String.format(
"Detected sync root change for user '%s' in audit log:"
+ " invalidating the root cache and refetching the changes.",
principalName));
NuxeoDriveManager driveManager = Framework.getLocalService(NuxeoDriveManager.class);
driveManager.invalidateSynchronizationRootsCache(principalName);
Map<String, SynchronizationRoots> synchronizationRoots = driveManager.getSynchronizationRoots(session.getPrincipal());
activeRoots = synchronizationRoots.get(session.getRepositoryName());
entries = queryAuditEntries(session, activeRoots,
lastSuccessfulSyncDate, syncDate, limit);
break;
}
}
if (entries.size() >= limit) {
throw new TooManyChangesException(
"Too many changes found in the audit logs.");
}
for (LogEntry entry : entries) {
ExtendedInfo fsIdInfo = entry.getExtendedInfos().get(
"fileSystemItemId");
if (fsIdInfo != null) {
// This document has been deleted and we just know the
// fileSystem Id and Name
String fsId = fsIdInfo.getValue(String.class);
String fsName = entry.getExtendedInfos().get(
"fileSystemItemName").getValue(String.class);
FileSystemItemChange change = new FileSystemItemChange(
entry.getEventId(), entry.getEventDate().getTime(),
entry.getRepositoryId(), entry.getDocUUID(), fsId,
fsName);
changes.add(change);
} else {
DocumentRef docRef = new IdRef(entry.getDocUUID());
if (!session.exists(docRef)) {
// deleted documents are mapped to deleted file
// system items in a separate event: no need to try
// to propagate this event.
// TODO: find a consistent way to map ACL removals as
// file system deletion change
continue;
}
DocumentModel doc = session.getDocument(docRef);
// TODO: check the facet, last root change and list of roots
// to have a special handling for the roots.
FileSystemItem fsItem = doc.getAdapter(FileSystemItem.class);
if (fsItem != null) {
FileSystemItemChange change = new FileSystemItemChange(
entry.getEventId(), entry.getEventDate().getTime(),
entry.getRepositoryId(), entry.getDocUUID(), fsItem);
changes.add(change);
}
// non-adaptable documents are ignored
}
}
return changes;
}
@SuppressWarnings("unchecked")
protected List<LogEntry> queryAuditEntries(CoreSession session,
SynchronizationRoots activeRoots, long lastSuccessfulSyncDate,
long syncDate, int limit) {
AuditReader auditService = Framework.getLocalService(AuditReader.class);
// Set fixed query parameters
Map<String, Object> params = new HashMap<String, Object>();
params.put("repositoryId", session.getRepositoryName());
// Build query and set dynamic parameters
StringBuilder auditQuerySb = new StringBuilder(
"from LogEntry log where ");
auditQuerySb.append("log.repositoryId = :repositoryId");
auditQuerySb.append(" and ");
auditQuerySb.append("(");
if (!activeRoots.paths.isEmpty()) {
// detect changes under the currently active roots for the
// current user
auditQuerySb.append("(");
auditQuerySb.append("log.category = 'eventDocumentCategory'");
auditQuerySb.append(" and (log.eventId = 'documentCreated' or log.eventId = 'documentModified' or log.eventId = 'documentMoved')");
auditQuerySb.append(" or ");
auditQuerySb.append("log.category = 'eventLifeCycleCategory'");
auditQuerySb.append(" and log.eventId = 'lifecycle_transition_event' and log.docLifeCycle != 'deleted' ");
auditQuerySb.append(") and (");
auditQuerySb.append(getCurrentRootFilteringClause(
activeRoots.paths, params));
auditQuerySb.append(") or ");
}
// detect any root (un-)registration changes for the roots previously
// seen by the current user
auditQuerySb.append("log.category = '");
auditQuerySb.append(NuxeoDriveEvents.EVENT_CATEGORY);
auditQuerySb.append("') and (");
auditQuerySb.append(getJPADateClause(lastSuccessfulSyncDate, syncDate,
params));
auditQuerySb.append(") order by log.repositoryId asc, log.eventDate desc");
String auditQuery = auditQuerySb.toString();
if (log.isDebugEnabled()) {
log.debug("Querying audit log: " + auditQuery + " with params: "
+ params);
}
List<LogEntry> entries = (List<LogEntry>) auditService.nativeQuery(
auditQuery, params, 1, limit);
// Post filter the output to remove (un)registration that are unrelated
// to the current user.
List<LogEntry> postFilteredEntries = new ArrayList<LogEntry>();
String principalName = session.getPrincipal().getName();
for (LogEntry entry : entries) {
ExtendedInfo impactedUserInfo = entry.getExtendedInfos().get(
"impactedUserName");
if (impactedUserInfo != null
&& !principalName.equals(impactedUserInfo.getValue(String.class))) {
// ignore event that only impact other users
continue;
}
if (log.isDebugEnabled()) {
log.debug(String.format("Change detected at %s: %s on %s", entry.getEventDate(),
entry.getEventId(), entry.getDocPath()));
}
postFilteredEntries.add(entry);
}
return postFilteredEntries;
}
protected String getCurrentRootFilteringClause(Set<String> rootPaths,
Map<String, Object> params) {
StringBuilder rootPathClause = new StringBuilder();
int rootPathCount = 0;
for (String rootPath : rootPaths) {
rootPathCount++;
String rootPathParam = "rootPath" + rootPathCount;
if (rootPathClause.length() > 0) {
rootPathClause.append(" or ");
}
rootPathClause.append(String.format("log.docPath like :%s",
rootPathParam));
params.put(rootPathParam, rootPath + '%');
}
return rootPathClause.toString();
}
protected String getJPADateClause(long lastSuccessfulSyncDate,
long syncDate, Map<String, Object> params) {
params.put("lastSuccessfulSyncDate", new Date(lastSuccessfulSyncDate));
params.put("syncDate", new Date(syncDate));
return "log.eventDate >= :lastSuccessfulSyncDate and log.eventDate < :syncDate";
}
protected boolean adaptDocument(FileSystemItemChange change,
CoreSession session, SynchronizationRoots synchronizationRoots)
throws ClientException {
IdRef ref = new IdRef(change.getDocUuid());
try {
DocumentModel doc = session.getDocument(ref);
// TODO: check the facet, last root change and list of roots to have
// a special handling for the roots.
FileSystemItem fsItem = doc.getAdapter(FileSystemItem.class);
if (fsItem == null) {
return false;
}
change.setFileSystemItem(fsItem);
return true;
} catch (DocumentSecurityException e) {
// This event matches a document that is not visible by the
// current user, skip it.
// TODO: how to detect ACL removal to map those as file system
// deletion change
return false;
}
}
}
|
package Frontend;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import Utils.Printer;
import VQVAD.VQVADPipeline;
import edu.cmu.sphinx.frontend.Data;
import edu.cmu.sphinx.frontend.DataEndSignal;
import edu.cmu.sphinx.frontend.DataProcessor;
import edu.cmu.sphinx.frontend.DoubleData;
import edu.cmu.sphinx.frontend.FrontEnd;
import edu.cmu.sphinx.frontend.endpoint.SpeechEndSignal;
import edu.cmu.sphinx.frontend.endpoint.SpeechMarker;
import edu.cmu.sphinx.frontend.endpoint.SpeechStartSignal;
import edu.cmu.sphinx.frontend.util.AudioFileDataSource;
public class VQVoiceActivityDetector extends AudioInputStream {
private final String TAG="VQVoiceActivityDetector";
private final FrontEnd frontend;
protected float sampleRate = 8000;
@Override
public AudioFormat getFormat() {
float sampleRate = this.sampleRate;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = false;
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits,
channels, signed, bigEndian);
return format;
}
private int i = 0;
private boolean speechEnd = false;
boolean started = false;
/**
* Creates a new voice activity detector
* @param ais input stream like LocalMicrophone or SocketMicrophone
* @param AisName name of the microphone LocalMicrophone or SocketMicrophone
* @throws MalformedURLException
*/
public VQVoiceActivityDetector(URL path, float sampleRate, String AisName) throws MalformedURLException {
super(null,new AudioFormat(sampleRate, 16, 1, true, false), 0);
this.sampleRate = sampleRate;
//audio source with 3200 byte read per read
AudioFileDataSource audioDataSource = new AudioFileDataSource(3200, null);
audioDataSource.setAudioFile(path, AisName);
frontend = setupFrontend(audioDataSource);
}
public VQVoiceActivityDetector(AudioInputStream ais, String AisName) {
super(null,new AudioFormat(ais.getFormat().getSampleRate(), 16, 1, true, false), 0);
this.sampleRate = ais.getFormat().getSampleRate();
//audio source with 3200 byte read per read
AudioFileDataSource audioDataSource = new AudioFileDataSource(3200, null);
audioDataSource.setInputStream(ais, AisName);
frontend = setupFrontend(audioDataSource);
}
public VQVoiceActivityDetector(AudioFileDataSource audioDataSource, String AisName) {
super(null,new AudioFormat(audioDataSource.getSampleRate(), 16, 1, true, false), 0);
frontend = setupFrontend(audioDataSource);
}
protected FrontEnd setupFrontend(AudioFileDataSource audioDataSource) {
ArrayList<DataProcessor> pipeline = new ArrayList<DataProcessor>();
// VQVAD pipeline
pipeline.add(new VQVADPipeline(audioDataSource));
// Mark speech start/end
pipeline.add(new SpeechMarker(200, 400, 100, 30, 100, 15.0));
return new FrontEnd(pipeline);
}
@Override
public int read(byte[] buf, int off, int len) {
Printer.printWithTimeF(TAG, "reading");
Data d=null;
System.out.println(buf.length);
//if (len == 0) return -1;
//if still in speech get data from frontend
if(!speechEnd)
{
d = frontend.getData();
} else {
speechEnd = false;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
int framesize = -1;
Printer.logLevel = Printer.FINE;
//do this while data from frontend is not null
while (d != null) {
Printer.printWithTimeF(TAG, d.getClass().getName()+" "+i);
i++;
if (!started && d instanceof SpeechStartSignal) {
started = true;
}
//check if data is DoubleData which means audio data
if (started && d instanceof DoubleData) {
//convert frame data back to raw data
DoubleData dd = (DoubleData) d;
double[] values = dd.getValues();
if (framesize == -1)
framesize = values.length * 2;
for (double value : values) {
try {
short be = new Short((short) value);
dos.writeByte(be & 0xFF);
dos.writeByte((be >> 8) & 0xFF);
} catch (IOException e) {
e.printStackTrace();
}
}
//read new data from frontend if frame size is not exceeded
if (baos.size() + framesize <= len) {
d = frontend.getData();
} else {
d = null;
}
} else if(started && d instanceof SpeechEndSignal) {
//stopp pulling if end of speech is reached
speechEnd = true;
break;
} else if (d instanceof DataEndSignal) {
speechEnd = true;
break;
} else {
//get data from frontend if data is not yet containing audio data or an end signal
d = frontend.getData();
}
}
// System.out.println("baos length: " + baos.size());
// System.out.println("offset: " + off + " buf length: " + len);
// write the converted data to the output buffer
System.arraycopy(baos.toByteArray(), 0, buf, 0, baos.size());
// TODO Auto-generated method stub
return baos.size();
}
}
|
package org.eclipse.mylar.internal.tasklist.ui.views;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.viewers.IStructuredContentProvider;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.jface.viewers.ViewerSorter;
import org.eclipse.mylar.internal.tasklist.ui.actions.AddRepositoryAction;
import org.eclipse.mylar.internal.tasklist.ui.actions.DeleteTaskRepositoryAction;
import org.eclipse.mylar.internal.tasklist.ui.actions.EditRepositoryPropertiesAction;
import org.eclipse.mylar.provisional.tasklist.ITaskRepositoryListener;
import org.eclipse.mylar.provisional.tasklist.MylarTaskListPlugin;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchActionConstants;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ViewPart;
/**
* @author Mik Kersten
*/
public class TaskRepositoriesView extends ViewPart {
public static final String ID = "org.eclipse.mylar.tasklist.repositories";
public static final String NAME = "Task Repositories View";
private TableViewer viewer;
private Action addRepositoryAction = new AddRepositoryAction();
private Action deleteRepositoryAction = new DeleteTaskRepositoryAction(this);
private Action repositoryPropertiesAction = new EditRepositoryPropertiesAction(this);
private final ITaskRepositoryListener REPOSITORY_LISTENER = new ITaskRepositoryListener() {
public void repositorySetUpdated() {
refresh();
}
};
class ViewContentProvider implements IStructuredContentProvider {
public void inputChanged(Viewer v, Object oldInput, Object newInput) {
}
public void dispose() {
}
public Object[] getElements(Object parent) {
return MylarTaskListPlugin.getRepositoryManager().getAllRepositories().toArray();
}
}
public TaskRepositoriesView() {
MylarTaskListPlugin.getRepositoryManager().addListener(REPOSITORY_LISTENER);
}
public static TaskRepositoriesView getFromActivePerspective() {
IWorkbenchPage activePage = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage();
if (activePage == null)
return null;
IViewPart view = activePage.findView(ID);
if (view instanceof TaskRepositoriesView)
return (TaskRepositoriesView) view;
return null;
}
public void createPartControl(Composite parent) {
viewer = new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL);
viewer.setContentProvider(new ViewContentProvider());
viewer.setLabelProvider(new TaskRepositoryLabelProvider());
viewer.setSorter(new ViewerSorter());
viewer.setInput(getViewSite());
hookContextMenu();
contributeToActionBars();
}
private void hookContextMenu() {
MenuManager menuMgr = new MenuManager("#PopupMenu");
menuMgr.setRemoveAllWhenShown(true);
menuMgr.addMenuListener(new IMenuListener() {
public void menuAboutToShow(IMenuManager manager) {
TaskRepositoriesView.this.fillContextMenu(manager);
}
});
Menu menu = menuMgr.createContextMenu(viewer.getControl());
viewer.getControl().setMenu(menu);
getSite().registerContextMenu(menuMgr, viewer);
}
private void contributeToActionBars() {
IActionBars bars = getViewSite().getActionBars();
fillLocalPullDown(bars.getMenuManager());
fillLocalToolBar(bars.getToolBarManager());
}
private void fillLocalPullDown(IMenuManager manager) {
manager.add(addRepositoryAction);
}
private void fillContextMenu(IMenuManager manager) {
manager.add(addRepositoryAction);
manager.add(deleteRepositoryAction);
manager.add(new Separator());
manager.add(repositoryPropertiesAction);
manager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS));
}
private void fillLocalToolBar(IToolBarManager manager) {
manager.add(addRepositoryAction);
}
/**
* Passing the focus request to the viewer's control.
*/
public void setFocus() {
viewer.getControl().setFocus();
}
public void refresh() {
if (viewer != null && !viewer.getControl().isDisposed()) {
viewer.refresh();
}
}
public TableViewer getViewer() {
return viewer;
}
}
|
package org.eclipse.mylar.tasks.ui.wizards;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.preference.StringFieldEditor;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.mylar.context.core.MylarStatusHandler;
import org.eclipse.mylar.tasks.core.AbstractRepositoryConnector;
import org.eclipse.mylar.tasks.core.TaskRepository;
import org.eclipse.mylar.tasks.ui.AbstractRepositoryConnectorUi;
import org.eclipse.mylar.tasks.ui.TasksUiPlugin;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.forms.events.ExpansionAdapter;
import org.eclipse.ui.forms.events.ExpansionEvent;
import org.eclipse.ui.forms.widgets.ExpandableComposite;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.Section;
/**
* @author Mik Kersten
* @author Rob Elves
*/
public abstract class AbstractRepositorySettingsPage extends WizardPage {
protected static final String LABEL_REPOSITORY_LABEL = "Label: ";
protected static final String LABEL_SERVER = "Server: ";
protected static final String LABEL_USER = "User ID: ";
protected static final String LABEL_PASSWORD = "Password: ";
protected static final String URL_PREFIX_HTTPS = "https:
protected static final String URL_PREFIX_HTTP = "http:
protected AbstractRepositoryConnector connector;
protected StringFieldEditor repositoryLabelEditor;
protected Combo serverUrlCombo;
private String serverVersion = TaskRepository.NO_VERSION_SPECIFIED;
protected StringFieldEditor repositoryUserNameEditor;
protected StringFieldEditor repositoryPasswordEditor;
protected StringFieldEditor httpAuthUserNameEditor;
protected StringFieldEditor httpAuthPasswordEditor;
protected StringFieldEditor proxyHostnameEditor;
protected StringFieldEditor proxyPortEditor;
protected StringFieldEditor proxyUserNameEditor;
protected StringFieldEditor proxyPasswordEditor;
protected TaskRepository repository;
private Button validateServerButton;
private Combo otherEncodingCombo;
private Button defaultEncoding;
// private Combo timeZonesCombo;
protected Button anonymousButton;
private String oldUsername;
private String oldPassword;
private String oldHttpAuthUserId;
private String oldHttpAuthPassword;
private boolean needsAnonymousLogin;
private boolean needsTimeZone;
private boolean needsEncoding;
private boolean needsHttpAuth;
private Composite container;
private Composite httpAuthComp;
private Composite proxyAuthComp;
private ExpandableComposite httpAuthExpComposite;
private ExpandableComposite proxyExpComposite;
private Set<String> repositoryUrls;
private String originalUrl;
private Button otherEncoding;
private Button httpAuthButton;
private boolean needsProxy;
private Button systemProxyButton;
private String oldProxyUsername = "";
private String oldProxyPassword = "";
// private Button proxyAuthButton;
private String oldProxyHostname = "";
private String oldProxyPort = "";
private Button proxyAuthButton;
private FormToolkit toolkit = new FormToolkit(Display.getCurrent());
public AbstractRepositorySettingsPage(String title, String description, AbstractRepositoryConnectorUi repositoryUi) {
super(title);
super.setTitle(title);
super.setDescription(description);
AbstractRepositoryConnector connector = TasksUiPlugin.getRepositoryManager().getRepositoryConnector(
repositoryUi.getRepositoryType());
this.connector = connector;
setNeedsAnonymousLogin(false);
setNeedsEncoding(true);
setNeedsTimeZone(true);
setNeedsProxy(true);
}
public void createControl(Composite parent) {
container = new Composite(parent, SWT.NULL);
FillLayout layout = new FillLayout();
container.setLayout(layout);
new Label(container, SWT.NONE).setText(LABEL_SERVER);
serverUrlCombo = new Combo(container, SWT.DROP_DOWN);
serverUrlCombo.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
isValidUrl(serverUrlCombo.getText());
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
});
serverUrlCombo.addSelectionListener(new SelectionListener() {
public void widgetDefaultSelected(SelectionEvent e) {
// ignore
}
public void widgetSelected(SelectionEvent e) {
isValidUrl(serverUrlCombo.getText());
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
});
GridDataFactory.fillDefaults().hint(300, SWT.DEFAULT).grab(true, false).applyTo(serverUrlCombo);
repositoryLabelEditor = new StringFieldEditor("", LABEL_REPOSITORY_LABEL, StringFieldEditor.UNLIMITED,
container) {
@Override
protected boolean doCheckState() {
return true;
// return isValidUrl(getStringValue());
}
@Override
protected void valueChanged() {
super.valueChanged();
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
};
// repositoryLabelEditor.setErrorMessage("error");
if (needsAnonymousLogin()) {
anonymousButton = new Button(container, SWT.CHECK);
GridDataFactory.fillDefaults().span(2, SWT.DEFAULT).applyTo(anonymousButton);
anonymousButton.setText("Anonymous Access");
anonymousButton.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
setAnonymous(anonymousButton.getSelection());
}
public void widgetDefaultSelected(SelectionEvent e) {
// ignore
}
});
// Label anonymousLabel = new Label(container, SWT.NONE);
// anonymousLabel.setText("");
}
repositoryUserNameEditor = new StringFieldEditor("", LABEL_USER, StringFieldEditor.UNLIMITED, container);
repositoryPasswordEditor = new RepositoryStringFieldEditor("", LABEL_PASSWORD, StringFieldEditor.UNLIMITED,
container);
if (repository != null) {
originalUrl = repository.getUrl();
oldUsername = repository.getUserName();
oldPassword = repository.getPassword();
if (repository.hasProperty(TaskRepository.AUTH_HTTP_USERNAME)
&& repository.hasProperty(TaskRepository.AUTH_HTTP_PASSWORD)) {
oldHttpAuthUserId = repository.getProperty(TaskRepository.AUTH_HTTP_USERNAME);
oldHttpAuthPassword = repository.getProperty(TaskRepository.AUTH_HTTP_PASSWORD);
} else {
oldHttpAuthPassword = "";
oldHttpAuthUserId = "";
}
oldProxyHostname = repository.getProperty(TaskRepository.PROXY_HOSTNAME);
oldProxyPort = repository.getProperty(TaskRepository.PROXY_PORT);
if (oldProxyHostname == null)
oldProxyHostname = "";
if (oldProxyPort == null)
oldProxyPort = "";
oldProxyUsername = repository.getProxyUsername();
oldProxyPassword = repository.getProxyPassword();
if (oldProxyUsername == null)
oldProxyUsername = "";
if (oldProxyPassword == null)
oldProxyPassword = "";
try {
String repositoryLabel = repository.getRepositoryLabel();
if (repositoryLabel != null && repositoryLabel.length() > 0) {
// repositoryLabelCombo.add(repositoryLabel);
// repositoryLabelCombo.select(0);
repositoryLabelEditor.setStringValue(repositoryLabel);
}
serverUrlCombo.setText(repository.getUrl());
repositoryUserNameEditor.setStringValue(repository.getUserName());
repositoryPasswordEditor.setStringValue(repository.getPassword());
} catch (Throwable t) {
MylarStatusHandler.fail(t, "could not set field value for: " + repository, false);
}
} else {
oldUsername = "";
oldPassword = "";
oldHttpAuthPassword = "";
oldHttpAuthUserId = "";
}
// bug 131656: must set echo char after setting value on Mac
((RepositoryStringFieldEditor) repositoryPasswordEditor).getTextControl().setEchoChar('*');
if (needsAnonymousLogin()) {
// do this after username and password widgets have been intialized
if (repository != null) {
setAnonymous(isAnonymousAccess());
}
}
// TODO: put this back if we can't get the info from all connectors
// if (needsTimeZone()) {
// Label timeZoneLabel = new Label(container, SWT.NONE);
// timeZoneLabel.setText("Repository time zone: ");
// timeZonesCombo = new Combo(container, SWT.READ_ONLY);
// String[] timeZoneIds = TimeZone.getAvailableIDs();
// Arrays.sort(timeZoneIds);
// for (String zone : timeZoneIds) {
// timeZonesCombo.add(zone);
// boolean setZone = false;
// if (repository != null) {
// if (timeZonesCombo.indexOf(repository.getTimeZoneId()) > -1) {
// timeZonesCombo.select(timeZonesCombo.indexOf(repository.getTimeZoneId()));
// setZone = true;
// if (!setZone) {
// timeZonesCombo.select(timeZonesCombo.indexOf(TimeZone.getDefault().getID()));
createAdditionalControls(container);
if (needsEncoding()) {
Label encodingLabel = new Label(container, SWT.HORIZONTAL);
encodingLabel.setText("Character Encoding:");
GridDataFactory.fillDefaults().align(SWT.TOP, SWT.DEFAULT).applyTo(encodingLabel);
Composite encodingContainer = new Composite(container, SWT.NONE);
GridLayout gridLayout = new GridLayout(2, false);
gridLayout.marginWidth = 0;
gridLayout.marginHeight = 0;
encodingContainer.setLayout(gridLayout);
defaultEncoding = new Button(encodingContainer, SWT.RADIO);
defaultEncoding.setLayoutData(new GridData(SWT.LEFT, SWT.CENTER, false, false, 2, 1));
defaultEncoding.setText("Default (" + TaskRepository.DEFAULT_CHARACTER_ENCODING + ")");
defaultEncoding.setSelection(true);
otherEncoding = new Button(encodingContainer, SWT.RADIO);
otherEncoding.setText("Other:");
otherEncodingCombo = new Combo(encodingContainer, SWT.READ_ONLY);
for (String encoding : Charset.availableCharsets().keySet()) {
if (!encoding.equals(TaskRepository.DEFAULT_CHARACTER_ENCODING)) {
otherEncodingCombo.add(encoding);
}
}
setDefaultEncoding();
otherEncoding.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (otherEncoding.getSelection()) {
defaultEncoding.setSelection(false);
otherEncodingCombo.setEnabled(true);
} else {
defaultEncoding.setSelection(true);
otherEncodingCombo.setEnabled(false);
}
}
});
if (repository != null) {
try {
String repositoryEncoding = repository.getCharacterEncoding();
if (repositoryEncoding != null) {
// !repositoryEncoding.equals(defaultEncoding))
if (otherEncodingCombo.getItemCount() > 0
&& otherEncodingCombo.indexOf(repositoryEncoding) > -1) {
otherEncodingCombo.setEnabled(true);
otherEncoding.setSelection(true);
defaultEncoding.setSelection(false);
otherEncodingCombo.select(otherEncodingCombo.indexOf(repositoryEncoding));
} else {
setDefaultEncoding();
}
}
} catch (Throwable t) {
MylarStatusHandler.fail(t, "could not set field value for: " + repository, false);
}
}
}
if (needsHttpAuth()) {
httpAuthExpComposite = toolkit.createExpandableComposite(container, Section.COMPACT | Section.TWISTIE
| Section.TITLE_BAR);
httpAuthExpComposite.clientVerticalSpacing = 0;
GridData gridData_2 = new GridData(SWT.FILL, SWT.FILL, true, false);
gridData_2.horizontalIndent = -5;
httpAuthExpComposite.setLayoutData(gridData_2);
httpAuthExpComposite.setFont(container.getFont());
httpAuthExpComposite.setBackground(container.getBackground());
httpAuthExpComposite.setText("Http Authentication");
httpAuthExpComposite.addExpansionListener(new ExpansionAdapter() {
@Override
public void expansionStateChanged(ExpansionEvent e) {
getControl().getShell().pack();
}
});
GridDataFactory.fillDefaults().span(2, SWT.DEFAULT).applyTo(httpAuthExpComposite);
httpAuthComp = toolkit.createComposite(httpAuthExpComposite, SWT.NONE);
GridLayout gridLayout2 = new GridLayout();
gridLayout2.numColumns = 2;
gridLayout2.verticalSpacing = 0;
httpAuthComp.setLayout(gridLayout2);
httpAuthComp.setBackground(container.getBackground());
httpAuthExpComposite.setClient(httpAuthComp);
httpAuthButton = new Button(httpAuthComp, SWT.CHECK);
GridDataFactory.fillDefaults().align(SWT.TOP, SWT.DEFAULT).span(2, SWT.DEFAULT).applyTo(httpAuthButton);
httpAuthButton.setText("Enabled");
httpAuthButton.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
setHttpAuth(httpAuthButton.getSelection());
}
public void widgetDefaultSelected(SelectionEvent e) {
// ignore
}
});
httpAuthUserNameEditor = new StringFieldEditor("", "User ID: ", StringFieldEditor.UNLIMITED, httpAuthComp) {
@Override
protected boolean doCheckState() {
return true;
}
@Override
protected void valueChanged() {
super.valueChanged();
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
};
httpAuthPasswordEditor = new RepositoryStringFieldEditor("", "Password: ", StringFieldEditor.UNLIMITED,
httpAuthComp);
((RepositoryStringFieldEditor) httpAuthPasswordEditor).getTextControl().setEchoChar('*');
// httpAuthGroup.setEnabled(httpAuthButton.getSelection());
httpAuthUserNameEditor.setEnabled(httpAuthButton.getSelection(), httpAuthComp);
httpAuthPasswordEditor.setEnabled(httpAuthButton.getSelection(), httpAuthComp);
setHttpAuth(oldHttpAuthPassword != null && oldHttpAuthUserId != null && !oldHttpAuthPassword.equals("")
&& !oldHttpAuthUserId.equals(""));
httpAuthExpComposite.setExpanded(httpAuthButton.getSelection());
}
if (needsProxy()) {
addProxySection();
}
validateServerButton = new Button(container, SWT.PUSH);
GridDataFactory.swtDefaults().span(2, SWT.DEFAULT).grab(false, false).applyTo(validateServerButton);
validateServerButton.setText("Validate Settings");
validateServerButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
validateSettings();
}
});
setControl(container);
}
private void addProxySection() {
proxyExpComposite = toolkit.createExpandableComposite(container, Section.COMPACT | Section.TWISTIE
| Section.TITLE_BAR);
proxyExpComposite.clientVerticalSpacing = 0;
GridData gridData_2 = new GridData(SWT.FILL, SWT.FILL, true, false);
gridData_2.horizontalIndent = -5;
proxyExpComposite.setLayoutData(gridData_2);
proxyExpComposite.setFont(container.getFont());
proxyExpComposite.setBackground(container.getBackground());
proxyExpComposite.setText("Proxy Server Configuration");
proxyExpComposite.addExpansionListener(new ExpansionAdapter() {
@Override
public void expansionStateChanged(ExpansionEvent e) {
getControl().getShell().pack();
}
});
GridDataFactory.fillDefaults().span(2, SWT.DEFAULT).applyTo(proxyExpComposite);
proxyAuthComp = toolkit.createComposite(proxyExpComposite, SWT.NONE);
GridLayout gridLayout2 = new GridLayout();
gridLayout2.numColumns = 2;
gridLayout2.verticalSpacing = 0;
proxyAuthComp.setLayout(gridLayout2);
proxyAuthComp.setBackground(container.getBackground());
proxyExpComposite.setClient(proxyAuthComp);
systemProxyButton = new Button(proxyAuthComp, SWT.CHECK);
GridDataFactory.fillDefaults().align(SWT.TOP, SWT.DEFAULT).span(2, SWT.DEFAULT).applyTo(systemProxyButton);
systemProxyButton.setText("Use Install/Update settings (default)");
systemProxyButton.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
setUseDefaultProxy(systemProxyButton.getSelection());
}
public void widgetDefaultSelected(SelectionEvent e) {
// ignore
}
});
proxyHostnameEditor = new StringFieldEditor("", "Proxy host address: ", StringFieldEditor.UNLIMITED,
proxyAuthComp) {
@Override
protected boolean doCheckState() {
return true;
}
@Override
protected void valueChanged() {
super.valueChanged();
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
};
proxyHostnameEditor.setStringValue(oldProxyHostname);
proxyPortEditor = new RepositoryStringFieldEditor("", "Proxy host port: ", StringFieldEditor.UNLIMITED,
proxyAuthComp);
proxyPortEditor.setStringValue(oldProxyPort);
proxyHostnameEditor.setEnabled(systemProxyButton.getSelection(), proxyAuthComp);
proxyPortEditor.setEnabled(systemProxyButton.getSelection(), proxyAuthComp);
proxyAuthButton = new Button(proxyAuthComp, SWT.CHECK);
GridDataFactory.fillDefaults().align(SWT.TOP, SWT.DEFAULT).span(2, SWT.DEFAULT).applyTo(proxyAuthButton);
proxyAuthButton.setText("Enable proxy authentication");
proxyAuthButton.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
setProxyAuth(proxyAuthButton.getSelection());
}
public void widgetDefaultSelected(SelectionEvent e) {
// ignore
}
});
proxyUserNameEditor = new StringFieldEditor("", "User ID: ", StringFieldEditor.UNLIMITED, proxyAuthComp) {
@Override
protected boolean doCheckState() {
return true;
}
@Override
protected void valueChanged() {
super.valueChanged();
if (getWizard() != null) {
getWizard().getContainer().updateButtons();
}
}
};
proxyPasswordEditor = new RepositoryStringFieldEditor("", "Password: ", StringFieldEditor.UNLIMITED,
proxyAuthComp);
((RepositoryStringFieldEditor) proxyPasswordEditor).getTextControl().setEchoChar('*');
// proxyPasswordEditor.setEnabled(httpAuthButton.getSelection(),
// advancedComp);
// ((StringFieldEditor)
// httpAuthPasswordEditor).setEnabled(httpAuthButton.getSelection(),
// advancedComp);
setProxyAuth(oldProxyUsername != null && oldProxyPassword != null && !oldProxyUsername.equals("")
&& !oldProxyPassword.equals(""));
setUseDefaultProxy(repository != null ? repository.useDefaultProxy() : true);
proxyExpComposite.setExpanded(!systemProxyButton.getSelection());
}
protected void setEncoding(String encoding) {
if (encoding.equals(TaskRepository.DEFAULT_CHARACTER_ENCODING)) {
setDefaultEncoding();
} else {
if (otherEncodingCombo.indexOf(encoding) != -1) {
defaultEncoding.setSelection(false);
otherEncodingCombo.setEnabled(true);
otherEncoding.setSelection(true);
otherEncodingCombo.select(otherEncodingCombo.indexOf(encoding));
} else {
setDefaultEncoding();
}
}
}
private void setDefaultEncoding() {
defaultEncoding.setSelection(true);
otherEncoding.setSelection(false);
otherEncodingCombo.setEnabled(false);
if (otherEncodingCombo.getItemCount() > 0) {
otherEncodingCombo.select(0);
}
}
public void setAnonymous(boolean selected) {
if (!needsAnonymousLogin) {
return;
}
anonymousButton.setSelection(selected);
if (selected) {
oldUsername = repositoryUserNameEditor.getStringValue();
oldPassword = (repositoryPasswordEditor).getStringValue();
repositoryUserNameEditor.setStringValue("");
(repositoryPasswordEditor).setStringValue("");
} else {
repositoryUserNameEditor.setStringValue(oldUsername);
(repositoryPasswordEditor).setStringValue(oldPassword);
}
repositoryUserNameEditor.setEnabled(!selected, container);
(repositoryPasswordEditor).setEnabled(!selected, container);
}
public void setHttpAuth(boolean selected) {
if (!needsHttpAuth) {
return;
}
httpAuthButton.setSelection(selected);
if (!selected) {
oldHttpAuthUserId = httpAuthUserNameEditor.getStringValue();
oldHttpAuthPassword = (httpAuthPasswordEditor).getStringValue();
httpAuthUserNameEditor.setStringValue(null);
(httpAuthPasswordEditor).setStringValue(null);
} else {
httpAuthUserNameEditor.setStringValue(oldHttpAuthUserId);
(httpAuthPasswordEditor).setStringValue(oldHttpAuthPassword);
}
httpAuthUserNameEditor.setEnabled(selected, httpAuthComp);
(httpAuthPasswordEditor).setEnabled(selected, httpAuthComp);
}
public void setUseDefaultProxy(boolean selected) {
if (!needsProxy) {
return;
}
systemProxyButton.setSelection(selected);
if (selected) {
oldProxyHostname = proxyHostnameEditor.getStringValue();
oldProxyPort = proxyPortEditor.getStringValue();
// proxyHostnameEditor.setStringValue(null);
// proxyPortEditor.setStringValue(null);
} else {
proxyHostnameEditor.setStringValue(oldProxyHostname);
proxyPortEditor.setStringValue(oldProxyPort);
}
proxyHostnameEditor.setEnabled(!selected, proxyAuthComp);
proxyPortEditor.setEnabled(!selected, proxyAuthComp);
proxyAuthButton.setEnabled(!selected);
setProxyAuth(proxyAuthButton.getSelection());
}
public void setProxyAuth(boolean selected) {
proxyAuthButton.setSelection(selected);
proxyAuthButton.setEnabled(!systemProxyButton.getSelection());
if (!selected) {
oldProxyUsername = proxyUserNameEditor.getStringValue();
oldProxyPassword = (proxyPasswordEditor).getStringValue();
proxyUserNameEditor.setStringValue(null);
(proxyPasswordEditor).setStringValue(null);
} else {
proxyUserNameEditor.setStringValue(oldProxyUsername);
proxyPasswordEditor.setStringValue(oldProxyPassword);
}
proxyUserNameEditor.setEnabled(selected && !systemProxyButton.getSelection(), proxyAuthComp);
proxyPasswordEditor.setEnabled(selected && !systemProxyButton.getSelection(), proxyAuthComp);
}
protected abstract void createAdditionalControls(Composite parent);
protected abstract void validateSettings();
protected abstract boolean isValidUrl(String name);
/* Public for testing. */
public static String stripSlashes(String url) {
StringBuilder sb = new StringBuilder(url.trim());
while (sb.length() > 0 && sb.charAt(sb.length() - 1) == '/') {
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
public String getRepositoryLabel() {
return repositoryLabelEditor.getStringValue();
}
public String getServerUrl() {
return stripSlashes(serverUrlCombo.getText());
}
public String getUserName() {
return repositoryUserNameEditor.getStringValue();
}
public String getPassword() {
return repositoryPasswordEditor.getStringValue();
}
public String getHttpAuthUserId() {
if (needsHttpAuth()) {
return httpAuthUserNameEditor.getStringValue();
} else {
return "";
}
}
public String getHttpAuthPassword() {
if (needsHttpAuth()) {
return httpAuthPasswordEditor.getStringValue();
} else {
return "";
}
}
public String getProxyHostname() {
if (needsProxy()) {
return proxyHostnameEditor.getStringValue();
} else {
return "";
}
}
public String getProxyPort() {
if (needsProxy()) {
return proxyPortEditor.getStringValue();
} else {
return "";
}
}
public Boolean getUseDefaultProxy() {
if (needsProxy()) {
return systemProxyButton.getSelection();
} else {
return true;
}
}
public String getProxyUsername() {
if (needsProxy()) {
return proxyUserNameEditor.getStringValue();
} else {
return "";
}
}
public String getProxyPassword() {
if (needsProxy()) {
return proxyPasswordEditor.getStringValue();
} else {
return "";
}
}
public void init(IWorkbench workbench) {
// ignore
}
public boolean isAnonymousAccess() {
return "".equals(getUserName()) && "".equals(getPassword());
}
/**
* Exposes StringFieldEditor.refreshValidState()
*
* TODO: is there a better way?
*/
private static class RepositoryStringFieldEditor extends StringFieldEditor {
public RepositoryStringFieldEditor(String name, String labelText, int style, Composite parent) {
super(name, labelText, style, parent);
}
@Override
public void refreshValidState() {
try {
super.refreshValidState();
} catch (Exception e) {
MylarStatusHandler.log(e, "problem refreshing password field");
}
}
@Override
public Text getTextControl() {
return super.getTextControl();
}
}
@Override
public boolean isPageComplete() {
boolean isComplete = false;
String url = getServerUrl();
isComplete = isUniqueUrl(url) && isValidUrl(url);
return isComplete;
}
protected boolean isUniqueUrl(String urlString) {
if (!urlString.equals(originalUrl)) {
if (repositoryUrls == null) {
List<TaskRepository> repositories = TasksUiPlugin.getRepositoryManager().getAllRepositories();
repositoryUrls = new HashSet<String>(repositories.size());
for (TaskRepository repository : repositories) {
repositoryUrls.add(repository.getUrl());
}
}
if (repositoryUrls.contains(urlString)) {
setErrorMessage("Repository already exists.");
return false;
}
}
setErrorMessage(null);
return true;
}
public void setRepository(TaskRepository repository) {
this.repository = repository;
}
public void setVersion(String previousVersion) {
if (previousVersion == null) {
serverVersion = TaskRepository.NO_VERSION_SPECIFIED;
} else {
serverVersion = previousVersion;
}
}
public String getVersion() {
return serverVersion;
}
public TaskRepository getRepository() {
return repository;
}
public String getCharacterEncoding() {
if (defaultEncoding == null) {
return null;
}
if (defaultEncoding.getSelection()) {
return TaskRepository.DEFAULT_CHARACTER_ENCODING;
} else {
if (otherEncodingCombo.getSelectionIndex() > -1) {
return otherEncodingCombo.getItem(otherEncodingCombo.getSelectionIndex());
} else {
return TaskRepository.DEFAULT_CHARACTER_ENCODING;
}
}
}
// public String getTimeZoneId() {
// return (timeZonesCombo != null) ?
// timeZonesCombo.getItem(timeZonesCombo.getSelectionIndex()) : null;
public TaskRepository createTaskRepository() {
// TaskRepository repository = new
// TaskRepository(connector.getRepositoryType(), getServerUrl(),
// getVersion(),
// getCharacterEncoding(), getTimeZoneId());
TaskRepository repository = new TaskRepository(connector.getRepositoryType(), getServerUrl(), getVersion(),
getCharacterEncoding(), "");
repository.setRepositoryLabel(getRepositoryLabel());
repository.setAuthenticationCredentials(getUserName(), getPassword());
repository.setProperty(TaskRepository.AUTH_HTTP_USERNAME, getHttpAuthUserId());
repository.setProperty(TaskRepository.AUTH_HTTP_PASSWORD, getHttpAuthPassword());
repository.setProperty(TaskRepository.PROXY_USEDEFAULT, String.valueOf(getUseDefaultProxy()));
repository.setProperty(TaskRepository.PROXY_HOSTNAME, getProxyHostname());
repository.setProperty(TaskRepository.PROXY_PORT, getProxyPort());
if (getProxyUsername() != null && getProxyPassword() != null && getProxyUsername().length() > 0
&& getProxyPassword().length() > 0) {
repository.setProxyAuthenticationCredentials(getProxyUsername(), getProxyPassword());
}
// repository.setProperty(TaskRepository.PROXY_USERNAME,
// getProxyUsername());
// repository.setProperty(TaskRepository.PROXY_PASSWORD,
// getProxyPassword());
// repository.setProperty(TaskRepository.PROXY_USERNAME,
// getHttpAuthUserId());
// repository.setProperty(TaskRepository.PROXY_PASSWORD,
// getHttpAuthPassword());
return repository;
}
public AbstractRepositoryConnector getConnector() {
return connector;
}
public boolean needsEncoding() {
return needsEncoding;
}
public boolean needsTimeZone() {
return needsTimeZone;
}
public boolean needsAnonymousLogin() {
return needsAnonymousLogin;
}
public void setNeedsEncoding(boolean needsEncoding) {
this.needsEncoding = needsEncoding;
}
public void setNeedsTimeZone(boolean needsTimeZone) {
this.needsTimeZone = needsTimeZone;
}
public boolean needsHttpAuth() {
return this.needsHttpAuth;
}
public void setNeedsHttpAuth(boolean needsHttpAuth) {
this.needsHttpAuth = needsHttpAuth;
}
public void setNeedsProxy(boolean needsProxy) {
this.needsProxy = needsProxy;
}
public boolean needsProxy() {
return this.needsProxy;
}
public void setNeedsAnonymousLogin(boolean needsAnonymousLogin) {
this.needsAnonymousLogin = needsAnonymousLogin;
}
public void updateProperties(TaskRepository repository) {
// none
}
/** for testing */
public void setUrl(String url) {
serverUrlCombo.setText(url);
}
/** for testing */
public void setUserId(String id) {
repositoryUserNameEditor.setStringValue(id);
}
/** for testing */
public void setPassword(String pass) {
repositoryPasswordEditor.setStringValue(pass);
}
}
|
package org.osgi.test.cases.http.whiteboard.junit;
import java.io.IOException;
import java.util.Arrays;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.servlet.AsyncContext;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.RequestDispatcher;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.http.runtime.dto.FilterDTO;
import org.osgi.service.http.runtime.dto.RequestInfoDTO;
import org.osgi.service.http.whiteboard.HttpWhiteboardConstants;
import org.osgi.test.cases.http.whiteboard.junit.mock.MockFilter;
import org.osgi.test.cases.http.whiteboard.junit.mock.MockServlet;
public class FilterTestCase extends BaseHttpWhiteboardTestCase {
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED_validate() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED, "true");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
ServiceRegistration<Filter> sr = context.registerService(Filter.class, new MockFilter(), properties);
serviceRegistrations.add(sr);
FilterDTO filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertTrue(filterDTO.asyncSupported);
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED, "false");
sr.setProperties(properties);
filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertFalse(filterDTO.asyncSupported);
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED, 234l);
sr.setProperties(properties);
filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertFalse(filterDTO.asyncSupported);
properties.remove(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED);
sr.setProperties(properties);
filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertFalse(filterDTO.asyncSupported);
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_async() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_ASYNC_SUPPORTED, "true");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, "ASYNC");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/b");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/b");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
final AtomicBoolean invoked = new AtomicBoolean(false);
MockServlet mockServlet = new MockServlet() {
final ExecutorService executor = Executors.newCachedThreadPool();
protected void service(HttpServletRequest req, HttpServletResponse resp) throws IOException {
doGetAsync(req.startAsync());
}
private void doGetAsync(final AsyncContext asyncContext) {
executor.submit(new Callable<Void>() {
public Void call() throws Exception {
try {
invoked.set(true);
asyncContext.dispatch("/b");
} finally {
asyncContext.complete();
}
return null;
}
});
}
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_ASYNC_SUPPORTED, "true");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srC = context.registerService(Servlet.class, mockServlet, properties);
serviceRegistrations.add(srC);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(0, requestInfoDTO.filterDTOs.length);
assertEquals(srC.getReference().getProperty(Constants.SERVICE_ID), requestInfoDTO.servletDTO.serviceId);
assertEquals("bab", request("a"));
assertTrue(invoked.get());
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_request() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(1, requestInfoDTO.filterDTOs.length);
FilterDTO filterDTO = requestInfoDTO.filterDTOs[0];
assertEquals(srA.getReference().getProperty(Constants.SERVICE_ID), filterDTO.serviceId);
assertEquals(1, filterDTO.dispatcher.length);
assertEquals("REQUEST", filterDTO.dispatcher[0]);
assertEquals("bab", request("a"));
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, "REQUEST");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
srA.setProperties(properties);
assertEquals("bab", request("a"));
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_include() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, "INCLUDE");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
FilterDTO filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertEquals(1, filterDTO.dispatcher.length);
assertEquals("INCLUDE", filterDTO.dispatcher[0]);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(0, requestInfoDTO.filterDTOs.length);
assertEquals("a", request("a"));
MockServlet mockServlet = new MockServlet() {
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
RequestDispatcher requestDispatcher = request.getRequestDispatcher("/a");
requestDispatcher.include(request, response);
}
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/b");
ServiceRegistration<?> srC = context.registerService(Servlet.class, mockServlet, properties);
serviceRegistrations.add(srC);
assertEquals("bab", request("b"));
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_forward() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, "FORWARD");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
FilterDTO filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertEquals(1, filterDTO.dispatcher.length);
assertEquals("FORWARD", filterDTO.dispatcher[0]);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(0, requestInfoDTO.filterDTOs.length);
assertEquals("a", request("a"));
MockServlet mockServlet = new MockServlet() {
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
RequestDispatcher requestDispatcher = request.getRequestDispatcher("/a");
requestDispatcher.forward(request, response);
}
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/b");
ServiceRegistration<?> srC = context.registerService(Servlet.class, mockServlet, properties);
serviceRegistrations.add(srC);
assertEquals("bab", request("b"));
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_error() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, "ERROR");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_SERVLET, "a");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
FilterDTO filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertEquals(1, filterDTO.dispatcher.length);
assertEquals("ERROR", filterDTO.dispatcher[0]);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_ERROR_PAGE, "4xx");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(0, requestInfoDTO.filterDTOs.length);
assertEquals("a", request("a"));
MockServlet mockServlet = new MockServlet() {
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/b");
ServiceRegistration<?> srC = context.registerService(Servlet.class, mockServlet, properties);
serviceRegistrations.add(srC);
assertEquals("bab", request("b"));
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_DISPATCHER_multiple() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_DISPATCHER, new String[] {"REQUEST", "ERROR"});
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_SERVLET, "a");
ServiceRegistration<?> srA = context.registerService(Filter.class, new MockFilter().around("b"), properties);
serviceRegistrations.add(srA);
FilterDTO filterDTO = getFilterDTOByName(HttpWhiteboardConstants.HTTP_WHITEBOARD_DEFAULT_CONTEXT_NAME, "a");
assertNotNull(filterDTO);
assertEquals(2, filterDTO.dispatcher.length);
Arrays.sort(filterDTO.dispatcher);
assertTrue(Arrays.binarySearch(filterDTO.dispatcher, "ERROR") >= 0);
assertTrue(Arrays.binarySearch(filterDTO.dispatcher, "REQUEST") >= 0);
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_ERROR_PAGE, "4xx");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
ServiceRegistration<?> srB = context.registerService(Servlet.class, new MockServlet().content("a"), properties);
serviceRegistrations.add(srB);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(1, requestInfoDTO.filterDTOs.length);
assertEquals("bab", request("a"));
MockServlet mockServlet = new MockServlet() {
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "b");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/b");
ServiceRegistration<?> srC = context.registerService(Servlet.class, mockServlet, properties);
serviceRegistrations.add(srC);
assertEquals("bab", request("b"));
}
public void test_table_140_5_HTTP_WHITEBOARD_FILTER_NAME() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/a");
serviceRegistrations.add(context.registerService(Servlet.class, new MockServlet().content("a"), properties));
MockFilter mockFilter = new MockFilter() {
@Override
public void init(FilterConfig config) throws ServletException {
super.init(config);
this.config = config;
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
response.getWriter().write(config.getFilterName());
chain.doFilter(request, response);
}
private FilterConfig config;
};
properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_PATTERN, "/a");
ServiceRegistration<?> srA = context.registerService(Filter.class, mockFilter, properties);
serviceRegistrations.add(srA);
RequestInfoDTO requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(1, requestInfoDTO.filterDTOs.length);
assertEquals(mockFilter.getClass().getName(), requestInfoDTO.filterDTOs[0].name);
assertEquals(mockFilter.getClass().getName() + "a", request("a"));
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_FILTER_NAME, "b");
srA.setProperties(properties);
requestInfoDTO = calculateRequestInfoDTO("/a");
assertNotNull(requestInfoDTO);
assertEquals(1, requestInfoDTO.filterDTOs.length);
assertEquals("b", requestInfoDTO.filterDTOs[0].name);
assertEquals("ba", request("a"));
}
public void test_table_140_5_HTTP_WHITEBOARD_SERVLET_PATTERN() throws Exception {
BundleContext context = getContext();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "a");
properties.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, new String[] {"", "/"});
serviceRegistrations.add(context.registerService(Servlet.class, new MockServlet().content("a"), properties));
properties = new Hashtable<String, Object>();
|
package net.sf.taverna.biocatalogue.ui;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import javax.swing.BorderFactory;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.LayoutFocusTraversalPolicy;
import net.sf.taverna.biocatalogue.model.connectivity.BioCatalogueClient;
import net.sf.taverna.biocatalogue.ui.search_results.SearchResultsMainPanel;
import net.sf.taverna.t2.ui.perspectives.biocatalogue.MainComponent;
import net.sf.taverna.t2.ui.perspectives.biocatalogue.MainComponentFactory;
import org.apache.log4j.Logger;
/**
*
* @author Sergejs Aleksejevs
*/
@SuppressWarnings("serial")
public class BioCatalogueExplorationTab extends JPanel implements HasDefaultFocusCapability
{
private final MainComponent pluginPerspectiveMainComponent;
private final BioCatalogueClient client;
private final Logger logger;
// COMPONENTS
private BioCatalogueExplorationTab thisPanel;
private SearchOptionsPanel searchOptionsPanel;
private SearchResultsMainPanel tabbedSearchResultsPanel;
public BioCatalogueExplorationTab()
{
this.thisPanel = this;
this.pluginPerspectiveMainComponent = MainComponentFactory.getSharedInstance();
this.client = BioCatalogueClient.getInstance();
this.logger = Logger.getLogger(this.getClass());
initialiseUI();
// this is to make sure that search will get focused when this tab is opened
// -- is a workaround to a bug in JVM
setFocusCycleRoot(true);
setFocusTraversalPolicy(new LayoutFocusTraversalPolicy() {
public Component getDefaultComponent(Container cont) {
return (thisPanel.getDefaultComponent());
}
});
}
private void initialiseUI()
{
this.tabbedSearchResultsPanel = new SearchResultsMainPanel();
this.searchOptionsPanel = new SearchOptionsPanel(tabbedSearchResultsPanel);
this.setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.weightx = 0.0;
c.anchor = GridBagConstraints.WEST;
c.insets = new Insets(3,10,3,10);
String baseString= "<html><b>Using service catalogue at </b>" + client.getBaseURL() + "</html>";
this.add(new JLabel(baseString), c);
c.gridx = 1;
c.gridy = 0;
c.weightx = 0.1;
c.fill = GridBagConstraints.NONE;
c.anchor = GridBagConstraints.EAST;
this.add(searchOptionsPanel, c);
c.insets = new Insets(0,0,0,0);
c.gridy++;
c.gridx = 0;
c.gridwidth = 2;
c.weightx = c.weighty = 1.0;
c.fill = GridBagConstraints.BOTH;
c.anchor = GridBagConstraints.CENTER;
this.add(tabbedSearchResultsPanel, c);
this.setBorder(BorderFactory.createEmptyBorder(20, 10, 10, 10));
}
public SearchResultsMainPanel getTabbedSearchResultsPanel() {
return tabbedSearchResultsPanel;
}
public void focusDefaultComponent() {
this.searchOptionsPanel.focusDefaultComponent();
}
public Component getDefaultComponent() {
return (this.searchOptionsPanel.getDefaultComponent());
}
public static void main(String[] args) {
JFrame f = new JFrame();
f.getContentPane().add(new BioCatalogueExplorationTab());
f.setSize(1000, 800);
f.setLocationRelativeTo(null);
f.setVisible(true);
}
}
|
package org.opendaylight.nic.pipeline_manager;
import com.google.common.base.Optional;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.DataObjectModification;
import org.opendaylight.controller.md.sal.binding.api.DataTreeChangeListener;
import org.opendaylight.controller.md.sal.binding.api.DataTreeIdentifier;
import org.opendaylight.controller.md.sal.binding.api.DataTreeModification;
import org.opendaylight.controller.md.sal.binding.api.ReadTransaction;
import org.opendaylight.controller.md.sal.binding.api.WriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.openflowplugin.openflow.md.core.sal.convertor.match.MatchConvertorImpl;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.list.Action;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.TableKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.Instructions;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.InstructionsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.Match;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.MatchBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.ApplyActionsCase;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.GoToTableCaseBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.WriteActionsCase;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.go.to.table._case.GoToTableBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.Instruction;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.InstructionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.openflow.oxm.rev150225.MatchField;
import org.opendaylight.yang.gen.v1.urn.opendaylight.openflow.oxm.rev150225.match.entries.grouping.MatchEntry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.set.field.match.SetFieldMatch;
import org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.NextTableMiss;
import org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.features.TableFeatures;
import org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.features.table.features.table.properties.TableFeatureProperties;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PipelineManagerProviderImpl implements DataTreeChangeListener<Node>, PipelineManager {
private static final Logger LOG = LoggerFactory.getLogger(PipelineManagerProviderImpl.class);
private final DataBroker dataBroker;
private final ListenerRegistration<?> nodeListener;
public PipelineManagerProviderImpl(final DataBroker dataBroker) {
LOG.info("\nPipeline Manager service Initiated");
this.dataBroker = dataBroker;
final InstanceIdentifier<Node> nodeIdentifier = InstanceIdentifier.create(Nodes.class).child(Node.class);
nodeListener = dataBroker.registerDataTreeChangeListener(new DataTreeIdentifier<>(
LogicalDatastoreType.OPERATIONAL, nodeIdentifier), this);
LOG.info("new Pipeline Manager created: {}", this);
}
@Override
public void onDataTreeChanged(Collection<DataTreeModification<Node>> changes) {
for (DataTreeModification<Node> change: changes) {
final DataObjectModification<Node> rootNode = change.getRootNode();
switch (rootNode.getModificationType()) {
case WRITE:
if (rootNode.getDataBefore() == null) {
createPipeline(rootNode.getDataAfter());
}
break;
default:
break;
}
}
}
private void createPipeline(Node node) {
List<Table> tableList = getTableList(node);
for (Table table : tableList) {
List<Short> nextIds = getNextTablesMiss(node.getId(), table.getId());
if (nextIds.isEmpty()) {
break;
}
Short nextId = Collections.min(nextIds);
Short currentId = table.getId();
addFlowGoto(node, currentId, nextId);
}
}
private void addFlowGoto(Node node, Short currentId, Short nextId) {
FlowBuilder flowBuilder = new FlowBuilder();
flowBuilder.setTableId(currentId);
flowBuilder.setHardTimeout(0);
flowBuilder.setPriority(0);
flowBuilder.setMatch(new MatchBuilder().build());
flowBuilder.setInstructions(
new InstructionsBuilder().setInstruction(Collections.singletonList(
new InstructionBuilder().setInstruction(
new GoToTableCaseBuilder().setGoToTable(
new GoToTableBuilder().setTableId(nextId).build()
).build()
).setOrder(0).build()
)).build());
String flowIdStr = "PipelineManager";
final FlowId flowId = new FlowId(flowIdStr);
final FlowKey key = new FlowKey(flowId);
flowBuilder.setKey(key);
InstanceIdentifier<Flow> flowIID = InstanceIdentifier.builder(Nodes.class)
.child(Node.class, new NodeKey(node.getId())).augmentation(FlowCapableNode.class)
.child(Table.class, new TableKey(flowBuilder.getTableId())).child(Flow.class, flowBuilder.getKey())
.build();
WriteTransaction transaction = dataBroker.newWriteOnlyTransaction();
transaction.put(LogicalDatastoreType.CONFIGURATION, flowIID, flowBuilder.build(), true);
transaction.submit();
}
private List<TableFeatureProperties> getTableFeatureProperties(final NodeId nodeId, final Short tableId) {
Node node = getDataObject(dataBroker.newReadOnlyTransaction(),
InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId)));
if (node == null) {
return Collections.emptyList();
}
FlowCapableNode flowCapableNode = node.getAugmentation(FlowCapableNode.class);
List<TableFeatures> features = flowCapableNode.getTableFeatures();
if (features == null || features.isEmpty()) {
return Collections.emptyList();
}
return features.get(tableId).getTableProperties().getTableFeatureProperties();
}
private List<Short> getNextTablesMiss(final NodeId nodeId, final Short tableId) {
for (TableFeatureProperties tableFeatureProperties : getTableFeatureProperties(nodeId, tableId)) {
if (tableFeatureProperties.getTableFeaturePropType() instanceof NextTableMiss) {
NextTableMiss nextTableMiss = (NextTableMiss) tableFeatureProperties.getTableFeaturePropType();
return nextTableMiss.getTablesMiss().getTableIds();
}
}
return Collections.emptyList();
}
@Override
public boolean setTableId(NodeId nodeId, FlowBuilder flowBuilder) {
List<Table> tableList = getTableList(nodeId);
for (Table table : tableList) {
List<TableFeatureProperties> tableFeaturePropertiesList = getTableFeatureProperties(nodeId, table.getId());
if (isFlowSupported(tableFeaturePropertiesList, flowBuilder)) {
flowBuilder.setTableId(table.getId());
return true;
}
}
return false;
}
/* InventoryDataServiceUtil.getDataObject() */
private static <T extends DataObject> T getDataObject(final ReadTransaction readOnlyTransaction, final InstanceIdentifier<T> identifier) {
Optional<T> optionalData = null;
try {
optionalData = readOnlyTransaction.read(LogicalDatastoreType.OPERATIONAL, identifier).get();
if (optionalData.isPresent()) {
return optionalData.get();
}
} catch (ExecutionException | InterruptedException e) {
LOG.error("Read transaction for identifier {} failed.", identifier, e);
}
return null;
}
private List<Table> getTableList(NodeId nodeId) {
Node node = PipelineManagerProviderImpl.getDataObject(dataBroker.newReadOnlyTransaction(),
InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId)));
return getTableList(node);
}
private List<Table> getTableList(Node node) {
FlowCapableNode flowCapableNode = node.getAugmentation(FlowCapableNode.class);
List<Table> tableList = flowCapableNode.getTable();
Collections.sort(tableList, (o1, o2) -> o1.getId().compareTo(o2.getId()));
return tableList;
}
private boolean isFlowSupported(List<TableFeatureProperties> tableFeaturePropertiesList, FlowBuilder flowBuilder) {
Instructions flowBuilderInstructions = flowBuilder.getInstructions();
if (flowBuilderInstructions == null) {
return false;
}
List<SetFieldMatch> matchList = getMatchList(tableFeaturePropertiesList);
return isMatchSupported(matchList, flowBuilder.getMatch())
&& isInstructionsSupported(tableFeaturePropertiesList, flowBuilderInstructions.getInstruction());
}
private boolean isInstructionsSupported(List<TableFeatureProperties> tableFeaturePropertiesList, List<Instruction> instructions) {
for (Instruction instruction : instructions) {
if (!isInstructionSupported(tableFeaturePropertiesList, instruction)) {
return false;
}
}
return true;
}
private boolean isInstructionSupported(List<TableFeatureProperties> tableFeaturePropertiesList, Instruction instruction) {
List<Instruction> supportedInstructions = getInstructionList(tableFeaturePropertiesList);
for (Instruction supportedInstructionProxy : supportedInstructions) {
org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.Instruction supportedInstruction = supportedInstructionProxy.getInstruction();
if (instruction.getInstruction().getImplementedInterface().equals(supportedInstruction.getImplementedInterface())) {
if (instruction.getInstruction() instanceof ApplyActionsCase) {
ApplyActionsCase applyActionsCase = (ApplyActionsCase) instruction.getInstruction();
List<Action> supportedApplyActions = getApplyActionList(tableFeaturePropertiesList);
for (Action action : applyActionsCase.getApplyActions().getAction()) {
if (!isActionSupported(supportedApplyActions, action)) {
return false;
}
}
if (instruction.getInstruction() instanceof WriteActionsCase) {
WriteActionsCase writeActionsCase = (WriteActionsCase) instruction.getInstruction();
List<Action> supportedWriteActions = getWriteActionList(tableFeaturePropertiesList);
for (Action action : writeActionsCase.getWriteActions().getAction()) {
if (!isActionSupported(supportedWriteActions, action)) {
return false;
}
}
}
}
return true;
}
}
return false;
}
private boolean isActionSupported(List<Action> supportedApplyActions, Action action) {
for (Action supportedApplyAction : supportedApplyActions) {
if (supportedApplyAction.getAction().getImplementedInterface().equals(action.getAction().getImplementedInterface())) {
return true;
}
}
return false;
}
private boolean isFieldSupported(Class<? extends MatchField> field, List<SetFieldMatch> supportedFields) {
for (SetFieldMatch supportedField : supportedFields) {
if (isFieldMatch(field, supportedField.getMatchType())) {
return true;
}
}
return false;
}
private boolean isFieldMatch(Class<? extends MatchField> field, Class<? extends org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.MatchField> matchType) {
return field.getSimpleName().equals(matchType.getSimpleName());
}
private boolean isMatchSupported(List<SetFieldMatch> supportedMatchList, Match match) {
MatchConvertorImpl matchConvertor = new MatchConvertorImpl();
List<MatchEntry> matchEntryList = matchConvertor.convert(match, null);
for (MatchEntry matchEntry : matchEntryList) {
if (!isFieldSupported(matchEntry.getOxmMatchField(), supportedMatchList)) {
return false;
}
}
return true;
}
private List<SetFieldMatch> getMatchList(List<TableFeatureProperties> tableFeaturePropertiesList) {
for (TableFeatureProperties tableFeatureProperties : tableFeaturePropertiesList) {
if (tableFeatureProperties.getTableFeaturePropType() instanceof org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Match) {
org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Match match = (org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Match) tableFeatureProperties.getTableFeaturePropType();
return match.getMatchSetfield().getSetFieldMatch();
}
}
return Collections.emptyList();
}
private List<Instruction> getInstructionList(List<TableFeatureProperties> tableFeaturePropertiesList) {
for (TableFeatureProperties tableFeatureProperties : tableFeaturePropertiesList) {
if (tableFeatureProperties.getTableFeaturePropType() instanceof org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Instructions) {
org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Instructions instructions = (org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.Instructions) tableFeatureProperties.getTableFeaturePropType();
return instructions.getInstructions().getInstruction();
}
}
return Collections.emptyList();
}
private List<Action> getApplyActionList(List<TableFeatureProperties> tableFeaturePropertiesList) {
for (TableFeatureProperties tableFeatureProperties : tableFeaturePropertiesList) {
if (tableFeatureProperties.getTableFeaturePropType() instanceof org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.ApplyActions) {
org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.ApplyActions actions = (org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.ApplyActions) tableFeatureProperties.getTableFeaturePropType();
return actions.getApplyActions().getAction();
}
}
return Collections.emptyList();
}
private List<Action> getWriteActionList(List<TableFeatureProperties> tableFeaturePropertiesList) {
for (TableFeatureProperties tableFeatureProperties : tableFeaturePropertiesList) {
if (tableFeatureProperties.getTableFeaturePropType() instanceof org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.WriteActions) {
org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.WriteActions actions = (org.opendaylight.yang.gen.v1.urn.opendaylight.table.types.rev131026.table.feature.prop.type.table.feature.prop.type.WriteActions) tableFeatureProperties.getTableFeaturePropType();
return actions.getWriteActions().getAction();
}
}
return Collections.emptyList();
}
@Override
public void stop() {
nodeListener.close();
LOG.info("Pipeline Manager destroyed: {}", this);
}
}
|
package com.intellij.codeInsight.quickfix;
import com.intellij.codeInsight.daemon.QuickFixActionRegistrar;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.project.DumbService;
import com.intellij.psi.PsiReference;
import com.intellij.util.ReflectionUtil;
import org.jetbrains.annotations.NotNull;
/**
* Register implementation of this class as 'com.intellij.codeInsight.unresolvedReferenceQuickFixProvider' extension to provide additional
* quick fixes for 'Unresolved reference' problems.
*
* @param <T> type of element you want register quick fixes for; for example, in Java language it may be {@link com.intellij.psi.PsiJavaCodeReferenceElement}
*/
public abstract class UnresolvedReferenceQuickFixProvider<T extends PsiReference> {
public static <T extends PsiReference> void registerReferenceFixes(@NotNull T ref, @NotNull QuickFixActionRegistrar registrar) {
final boolean dumb = DumbService.getInstance(ref.getElement().getProject()).isDumb();
Class<? extends PsiReference> referenceClass = ref.getClass();
for (UnresolvedReferenceQuickFixProvider<?> each : EXTENSION_NAME.getExtensionList()) {
if (dumb && !DumbService.isDumbAware(each)) {
continue;
}
if (ReflectionUtil.isAssignable(each.getReferenceClass(), referenceClass)) {
//noinspection unchecked
((UnresolvedReferenceQuickFixProvider<T>)each).registerFixes(ref, registrar);
}
}
}
private static final ExtensionPointName<UnresolvedReferenceQuickFixProvider<?>> EXTENSION_NAME = ExtensionPointName.create("com.intellij.codeInsight.unresolvedReferenceQuickFixProvider");
public abstract void registerFixes(@NotNull T ref, @NotNull QuickFixActionRegistrar registrar);
@NotNull
public abstract Class<T> getReferenceClass();
}
|
package org.jkiss.dbeaver.ui.controls.resultset;
import org.eclipse.swt.widgets.Control;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.data.DBDAttributeBindingMeta;
import org.jkiss.dbeaver.model.data.DBDDataReceiver;
import org.jkiss.dbeaver.model.exec.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Data pump for SQL queries
*/
class ResultSetDataReceiver implements DBDDataReceiver {
static final Log log = Log.getLog(ResultSetDataReceiver.class);
private ResultSetViewer resultSetViewer;
private int columnsCount;
private DBDAttributeBindingMeta[] metaColumns;
private List<Object[]> rows = new ArrayList<>();
private boolean hasMoreData;
private boolean nextSegmentRead;
private long offset;
private long maxRows;
private Map<DBCAttributeMetaData, List<Exception>> errors = new HashMap<>();
ResultSetDataReceiver(ResultSetViewer resultSetViewer)
{
this.resultSetViewer = resultSetViewer;
}
boolean isHasMoreData() {
return hasMoreData;
}
void setHasMoreData(boolean hasMoreData) {
this.hasMoreData = hasMoreData;
}
void setNextSegmentRead(boolean nextSegmentRead) {
this.nextSegmentRead = nextSegmentRead;
}
@Override
public void fetchStart(DBCSession session, final DBCResultSet resultSet, long offset, long maxRows)
throws DBCException
{
this.rows.clear();
this.offset = offset;
this.maxRows = maxRows;
if (!nextSegmentRead) {
// Get columns metadata
DBCResultSetMetaData metaData = resultSet.getMeta();
List<DBCAttributeMetaData> rsAttributes = metaData.getAttributes();
columnsCount = rsAttributes.size();
// Extract column info
metaColumns = new DBDAttributeBindingMeta[columnsCount];
for (int i = 0; i < columnsCount; i++) {
metaColumns[i] = DBUtils.getAttributeBinding(session, rsAttributes.get(i));
}
resultSetViewer.setMetaData(metaColumns);
}
}
@Override
public void fetchRow(DBCSession session, DBCResultSet resultSet)
throws DBCException
{
Object[] row = new Object[columnsCount];
for (int i = 0; i < columnsCount; i++) {
try {
row[i] = metaColumns[i].getValueHandler().fetchValueObject(
session,
resultSet,
metaColumns[i].getAttribute(),
metaColumns[i].getOrdinalPosition());
}
catch (Exception e) {
// Do not reports the same error multiple times
// There are a lot of error could occur during result set fetch
// We report certain error only once
List<Exception> errorList = errors.get(metaColumns[i].getMetaAttribute());
if (errorList == null) {
errorList = new ArrayList<>();
errors.put(metaColumns[i].getMetaAttribute(), errorList);
}
if (!errorList.contains(e)) {
log.warn("Can't read column '" + metaColumns[i].getName() + "' value", e);
errorList.add(e);
}
}
}
rows.add(row);
}
@Override
public void fetchEnd(DBCSession session, final DBCResultSet resultSet)
throws DBCException
{
if (!nextSegmentRead) {
// Read locators' metadata
ResultSetUtils.bindAttributes(session, resultSet, metaColumns, rows);
}
final List<Object[]> tmpRows = rows;
final boolean nextSegmentRead = this.nextSegmentRead;
runInUI(new Runnable() {
@Override
public void run() {
// Push data into viewer
if (!nextSegmentRead) {
resultSetViewer.updatePresentation(resultSet);
resultSetViewer.setData(tmpRows);
} else {
resultSetViewer.appendData(tmpRows);
}
// Check for more data
hasMoreData = maxRows > 0 && tmpRows.size() >= maxRows;
}
});
}
@Override
public void close()
{
nextSegmentRead = false;
errors.clear();
rows = new ArrayList<>();
}
private void runInUI(Runnable runnable) {
Control control = resultSetViewer.getControl();
if (!control.isDisposed()) {
control.getDisplay().asyncExec(runnable);
}
}
}
|
package org.jkiss.dbeaver.ext.postgresql.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.ext.postgresql.PostgreUtils;
import org.jkiss.dbeaver.model.DBPHiddenObject;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils;
import org.jkiss.dbeaver.model.impl.jdbc.struct.JDBCTableIndex;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.rdb.DBSIndexType;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
/**
* PostgreIndex
*/
public class PostgreIndex extends JDBCTableIndex<PostgreSchema, PostgreTableBase> implements DBPHiddenObject
{
private boolean isUnique;
private boolean isPrimary; // Primary index - implicit
private boolean isExclusion;
private boolean isImmediate;
private boolean isClustered;
private boolean isValid;
private boolean isCheckXMin;
private boolean isReady;
private String description;
private List<PostgreIndexColumn> columns = new ArrayList<>();
private long amId;
public PostgreIndex(PostgreTableBase parent, String indexName, ResultSet dbResult) {
super(
parent.getContainer(),
parent,
indexName,
DBSIndexType.UNKNOWN,
true);
this.isUnique = JDBCUtils.safeGetBoolean(dbResult, "indisunique");
this.isPrimary = JDBCUtils.safeGetBoolean(dbResult, "indisprimary");
this.isExclusion = JDBCUtils.safeGetBoolean(dbResult, "indisexclusion");
this.isImmediate = JDBCUtils.safeGetBoolean(dbResult, "indimmediate");
this.isClustered = JDBCUtils.safeGetBoolean(dbResult, "indisclustered");
this.isValid = JDBCUtils.safeGetBoolean(dbResult, "indisvalid");
this.isCheckXMin = JDBCUtils.safeGetBoolean(dbResult, "indcheckxmin");
this.isReady = JDBCUtils.safeGetBoolean(dbResult, "indisready");
this.description = JDBCUtils.safeGetString(dbResult, "description");
this.amId = JDBCUtils.safeGetLong(dbResult, "relam");
}
public PostgreIndex(PostgreTableBase parent, String name, DBSIndexType indexType) {
super(parent.getContainer(), parent, name, indexType, false);
}
@NotNull
@Override
public PostgreDataSource getDataSource()
{
return getTable().getDataSource();
}
@Override
@Property(viewable = true, order = 5)
public boolean isUnique()
{
return isUnique;
}
@Property(viewable = false, order = 20)
public boolean isPrimary() {
return isPrimary;
}
@Property(viewable = false, order = 21)
public boolean isExclusion() {
return isExclusion;
}
@Property(viewable = false, order = 22)
public boolean isImmediate() {
return isImmediate;
}
@Property(viewable = false, order = 23)
public boolean isClustered() {
return isClustered;
}
@Property(viewable = false, order = 24)
public boolean isValid() {
return isValid;
}
@Property(viewable = false, order = 25)
public boolean isCheckXMin() {
return isCheckXMin;
}
@Property(viewable = false, order = 26)
public boolean isReady() {
return isReady;
}
public DBSIndexType getIndexType()
{
return super.getIndexType();
}
@Nullable
@Override
@Property(viewable = true, order = 100)
public String getDescription()
{
return description;
}
@Nullable
@Property(viewable = true, order = 30)
public PostgreAccessMethod getAccessMethod(DBRProgressMonitor monitor) throws DBException {
if (amId <= 0) {
return null;
}
return PostgreUtils.getObjectById(monitor, getTable().getDatabase().accessMethodCache, getTable().getDatabase(), amId);
}
@Override
public List<PostgreIndexColumn> getAttributeReferences(DBRProgressMonitor monitor)
{
return columns;
}
public PostgreIndexColumn getColumn(String columnName)
{
return DBUtils.findObject(columns, columnName);
}
void setColumns(List<PostgreIndexColumn> columns)
{
this.columns = columns;
}
public void addColumn(PostgreIndexColumn column)
{
if (columns == null) {
columns = new ArrayList<>();
}
columns.add(column);
}
@NotNull
@Override
public String getFullQualifiedName()
{
return DBUtils.getFullQualifiedName(getDataSource(),
getTable().getContainer(),
this);
}
@Override
public boolean isHidden() {
return isPrimary;
}
}
|
import java.net.URLClassLoader;
import java.net.URL;
import java.lang.reflect.Method;
import java.io.IOException;
import java.io.InputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Arrays;
public class Main implements Runnable {
public static final String MAIN = "/" + Main.class.getName().replace('.', '/') + ".class";
public static final String WINSTONE_JAR = "/WEB-INF/winstone.jar";
private String[] args;
private String path, warfile;
private boolean debug;
private File webroot;
public Main(String[] args) throws Exception {
this.args = args;
URL mainClass = getClass().getResource(MAIN);
this.path = mainClass.toURI().getSchemeSpecificPart();
this.warfile = mainClass.getFile().replace("!" + MAIN, "").replace("file:", "");
this.debug = isDebug();
this.webroot = File.createTempFile("winstone", "webroot");
this.webroot.delete();
this.webroot.mkdirs();
this.webroot = new File(this.webroot, new File(warfile).getName());
Runtime.getRuntime().addShutdownHook(new Thread(this));
}
private URL extractWinstone() throws Exception {
InputStream jarStream = new URL("jar:" + path.replace(MAIN, WINSTONE_JAR)).openStream();
File jarFile = File.createTempFile("winstone", ".jar");
jarFile.deleteOnExit();
FileOutputStream outStream = new FileOutputStream(jarFile);
try {
byte[] buf = new byte[4096];
int bytesRead = 0;
while ((bytesRead = jarStream.read(buf)) != -1) {
outStream.write(buf, 0, bytesRead);
}
} finally {
jarStream.close();
outStream.close();
}
debug("winstone.jar extracted to " + jarFile.getPath());
return jarFile.toURI().toURL();
}
private void launchWinstone(URL jar) throws Exception {
URLClassLoader loader = new URLClassLoader(new URL[] {jar});
Class klass = Class.forName("winstone.Launcher", true, loader);
Method main = klass.getDeclaredMethod("main", new Class[] {String[].class});
String[] newargs = new String[args.length + 3];
newargs[0] = "--warfile=" + warfile;
newargs[1] = "--webroot=" + webroot;
newargs[2] = "--directoryListings=false";
System.arraycopy(args, 0, newargs, 3, args.length);
debug("invoking Winstone with: " + Arrays.deepToString(newargs));
main.invoke(null, new Object[] {newargs});
}
private void start() throws Exception {
URL u = extractWinstone();
launchWinstone(u);
}
private void debug(String msg) {
if (debug) {
System.out.println(msg);
}
}
private void delete(File f) {
if (f.isDirectory()) {
File[] children = f.listFiles();
for (int i = 0; i < children.length; i++) {
delete(children[i]);
}
}
f.delete();
}
public void run() {
delete(webroot.getParentFile());
}
public static void main(String[] args) {
try {
new Main(args).start();
} catch (Exception e) {
System.err.println("error: " + e.toString());
if (isDebug()) {
e.printStackTrace();
}
System.exit(1);
}
}
private static boolean isDebug() {
return System.getProperty("warbler.debug") != null;
}
}
|
package com.campmongoose.serversaturday.spigot.menu.anvil;
import com.campmongoose.serversaturday.common.Reference.MenuText;
import com.campmongoose.serversaturday.spigot.SpigotServerSaturday;
import com.campmongoose.serversaturday.spigot.menu.AbstractSpigotChestMenu;
import java.util.function.BiFunction;
import javax.annotation.Nonnull;
import net.wesjd.anvilgui.AnvilGUI;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
public abstract class SSAnvilGUI extends AnvilGUI {
@Nonnull
private final AbstractSpigotChestMenu prevMenu;
public SSAnvilGUI(@Nonnull Player player, @Nonnull AbstractSpigotChestMenu prevMenu, BiFunction<Player, String, String> biFunction) {
super(SpigotServerSaturday.instance(), player, MenuText.RENAME_ME, biFunction);
this.prevMenu = prevMenu;
}
@Override
public void closeInventory() {
super.closeInventory();
Bukkit.getScheduler().scheduleSyncDelayedTask(SpigotServerSaturday.instance(), prevMenu::open);
}
}
|
package org.sagebionetworks.schema.worker;
import org.apache.commons.io.IOUtils;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
import org.sagebionetworks.AsynchronousJobWorkerHelper;
import org.sagebionetworks.repo.manager.EntityManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.dataaccess.AccessRequirementManager;
import org.sagebionetworks.repo.manager.schema.JsonSchemaManager;
import org.sagebionetworks.repo.manager.schema.JsonSchemaValidationManager;
import org.sagebionetworks.repo.manager.schema.JsonSubject;
import org.sagebionetworks.repo.manager.schema.SynapseSchemaBootstrap;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.AccessRequirementDAO;
import org.sagebionetworks.repo.model.AsynchJobFailedException;
import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.RestrictableObjectType;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.annotation.v2.Annotations;
import org.sagebionetworks.repo.model.annotation.v2.AnnotationsV2TestUtils;
import org.sagebionetworks.repo.model.annotation.v2.AnnotationsValueType;
import org.sagebionetworks.repo.model.entity.BindSchemaToEntityRequest;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.helper.FileHandleObjectHelper;
import org.sagebionetworks.repo.model.helper.TermsOfUseAccessRequirementObjectHelper;
import org.sagebionetworks.repo.model.jdo.KeyFactory;
import org.sagebionetworks.repo.model.schema.CreateOrganizationRequest;
import org.sagebionetworks.repo.model.schema.CreateSchemaRequest;
import org.sagebionetworks.repo.model.schema.CreateSchemaResponse;
import org.sagebionetworks.repo.model.schema.GetValidationSchemaRequest;
import org.sagebionetworks.repo.model.schema.GetValidationSchemaResponse;
import org.sagebionetworks.repo.model.schema.JsonSchema;
import org.sagebionetworks.repo.model.schema.JsonSchemaConstants;
import org.sagebionetworks.repo.model.schema.ObjectType;
import org.sagebionetworks.repo.model.schema.Organization;
import org.sagebionetworks.repo.model.schema.Type;
import org.sagebionetworks.repo.model.schema.ValidationResults;
import org.sagebionetworks.repo.model.table.AnnotationType;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.ObjectAnnotationDTO;
import org.sagebionetworks.repo.model.table.ViewColumnModelRequest;
import org.sagebionetworks.repo.model.table.ViewColumnModelResponse;
import org.sagebionetworks.repo.model.table.ViewEntityType;
import org.sagebionetworks.repo.model.table.ViewScope;
import org.sagebionetworks.repo.model.table.ViewTypeMask;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.schema.adapter.JSONEntity;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.util.Pair;
import org.sagebionetworks.util.TimeUtils;
import org.sagebionetworks.workers.util.aws.message.RecoverableMessageException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.io.InputStream;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.when;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public class JsonSchemaWorkerIntegrationTest {
public static final long MAX_WAIT_MS = 1000 * 80;
@Autowired
private AsynchronousJobWorkerHelper asynchronousJobWorkerHelper;
@Autowired
private JsonSchemaManager jsonSchemaManager;
@Autowired
private SynapseSchemaBootstrap schemaBootstrap;
@Autowired
private UserManager userManager;
@Autowired
private JsonSchemaValidationManager jsonSchemaValidationManager;
@Autowired
private EntityManager entityManager;
@Autowired
private VelocityEngine velocityEngine;
@Autowired
private FileHandleObjectHelper fileHandleObjectHelper;
@Autowired
private TermsOfUseAccessRequirementObjectHelper termsOfUseAccessRequirementObjectHelper;
@Autowired
private AccessRequirementManager accessRequirementManager;
UserInfo adminUserInfo;
String organizationName;
String schemaName;
String semanticVersion;
JsonSchema basicSchema;
Organization organization;
String projectId;
@BeforeEach
public void before() {
jsonSchemaManager.truncateAll();
adminUserInfo = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId());
organizationName = "my.org.net";
schemaName = "some.schema";
semanticVersion = "1.1.1";
CreateOrganizationRequest createOrgRequest = new CreateOrganizationRequest();
createOrgRequest.setOrganizationName(organizationName);
organization = jsonSchemaManager.createOrganziation(adminUserInfo, createOrgRequest);
basicSchema = new JsonSchema();
basicSchema.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + schemaName
+ JsonSchemaConstants.VERSION_PRFIX + semanticVersion);
basicSchema.setDescription("basic schema for integration test");
}
@AfterEach
public void after() {
fileHandleObjectHelper.truncateAll();
jsonSchemaManager.truncateAll();
if (projectId != null) {
entityManager.deleteEntity(adminUserInfo, projectId);
}
termsOfUseAccessRequirementObjectHelper.truncateAll();
}
@Test
public void testCreateSchema() throws InterruptedException, AssertionError, AsynchJobFailedException {
CreateSchemaRequest request = new CreateSchemaRequest();
request.setSchema(basicSchema);
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, request, (CreateSchemaResponse response) -> {
assertNotNull(response);
assertNotNull(response.getNewVersionInfo());
assertEquals(adminUserInfo.getId().toString(), response.getNewVersionInfo().getCreatedBy());
assertEquals(semanticVersion, response.getNewVersionInfo().getSemanticVersion());
}, MAX_WAIT_MS);
jsonSchemaManager.deleteSchemaById(adminUserInfo, basicSchema.get$id());
assertThrows(NotFoundException.class, () -> {
jsonSchemaManager.deleteSchemaById(adminUserInfo, basicSchema.get$id());
});
}
@Test
public void testCreateSchemaCycle() throws InterruptedException, AssertionError, AsynchJobFailedException {
// one
JsonSchema one = createSchema(organizationName, "one");
one.setDescription("no cycle yet");
CreateSchemaRequest request = new CreateSchemaRequest();
request.setSchema(one);
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, request, (CreateSchemaResponse response) -> {
assertNotNull(response);
}, MAX_WAIT_MS);
// two
JsonSchema refToOne = create$RefSchema(one);
JsonSchema two = createSchema(organizationName, "two");
two.setDescription("depends on one");
two.setItems(refToOne);
request = new CreateSchemaRequest();
request.setSchema(two);
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, request, (CreateSchemaResponse response) -> {
assertNotNull(response);
}, MAX_WAIT_MS);
// update one to depend on two
one.setItems(create$RefSchema(two));
one.setDescription("now has a cycle");
CreateSchemaRequest cycleRequest = new CreateSchemaRequest();
cycleRequest.setSchema(one);
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, cycleRequest,
(CreateSchemaResponse response) -> {
fail("Should have not receive a response");
}, MAX_WAIT_MS);
}).getMessage();
assertEquals("Schema $id: 'my.org.net-one' has a circular dependency", message);
}
public CreateSchemaResponse registerSchema(JsonSchema schema) throws Exception {
CreateSchemaRequest request = new CreateSchemaRequest();
request.setSchema(schema);
System.out.println("Creating schema: '" + schema.get$id() + "'");
return asynchronousJobWorkerHelper
.assertJobResponse(adminUserInfo, request, (CreateSchemaResponse response) -> {
assertNotNull(response);
System.out.println(response.getNewVersionInfo());
}, MAX_WAIT_MS).getResponse();
}
public JsonSchema getSchemaFromClasspath(String name) throws Exception {
String json = loadStringFromClasspath(name);
return EntityFactory.createEntityFromJSONString(json, JsonSchema.class);
}
/**
* Load a JSON schema from the classpath and apply the provided velocity context before parsing.
* @param name
* @param context
* @return
* @throws Exception
*/
public JsonSchema getSchemaTemplateFromClasspath(String name, String schemaId, VelocityContext context) throws Exception {
Template tempalte = velocityEngine.getTemplate(name);
StringWriter writer = new StringWriter();
tempalte.merge(context, writer);
String json = writer.toString();
JsonSchema schema = EntityFactory.createEntityFromJSONString(json, JsonSchema.class);
schema.set$schema("http://json-schema.org/draft-07/schema
schema.set$id(schemaId);
return schema;
}
/**
* Load the file contents from the classpath.
*
* @param name
* @return
* @throws Exception
*/
public String loadStringFromClasspath(String name) throws Exception {
try (InputStream in = JsonSchemaWorkerIntegrationTest.class.getClassLoader().getResourceAsStream(name);) {
if (in == null) {
throw new IllegalArgumentException("Cannot find: '" + name + "' on the classpath");
}
return IOUtils.toString(in, "UTF-8");
}
}
@Test
public void testMainUseCase() throws Exception {
bootstrapAndCreateOrganization();
String[] schemasToRegister = { "pets/PetType.json", "pets/Pet.json", "pets/CatBreed.json", "pets/DogBreed.json",
"pets/Cat.json", "pets/Dog.json", "pets/PetPhoto.json" };
for (String fileName : schemasToRegister) {
JsonSchema schema = getSchemaFromClasspath(fileName);
registerSchema(schema);
}
JsonSchema validationSchema = jsonSchemaManager.getValidationSchema("my.organization-pets.PetPhoto");
assertNotNull(schemaBootstrap);
printJson(validationSchema);
assertNotNull(validationSchema.getDefinitions());
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.PetType-1.0.1"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.Pet"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.Pet-1.0.3"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.dog.Breed"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.cat.Breed"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.cat.Cat"));
assertTrue(validationSchema.getDefinitions().containsKey("my.organization-pets.dog.Dog"));
assertTrue(validationSchema.getDefinitions().containsKey("org.sagebionetworks-repo.model.Entity-1.0.0"));
assertTrue(validationSchema.getDefinitions().containsKey("org.sagebionetworks-repo.model.Versionable-1.0.0"));
assertTrue(validationSchema.getDefinitions()
.containsKey("org.sagebionetworks-repo.model.VersionableEntity-1.0.0"));
assertTrue(validationSchema.getDefinitions().containsKey("org.sagebionetworks-repo.model.FileEntity-1.0.0"));
String validCatJsonString = loadStringFromClasspath("pets/ValidCat.json");
JSONObject validCat = new JSONObject(validCatJsonString);
JsonSubject mockSubject = Mockito.mock(JsonSubject.class);
when(mockSubject.toJson()).thenReturn(validCat);
// this schema should be valid
ValidationResults result = jsonSchemaValidationManager.validate(validationSchema, mockSubject);
assertNotNull(result);
assertTrue(result.getIsValid());
// Changing the petType to dog should cause a schema violation.
validCat.put("petType", "dog");
result = jsonSchemaValidationManager.validate(validationSchema, mockSubject);
assertNotNull(result);
assertFalse(result.getIsValid());
assertEquals("#: 0 subschemas matched instead of one", result.getValidationErrorMessage());
printJson(result);
}
void bootstrapAndCreateOrganization() throws RecoverableMessageException, InterruptedException {
jsonSchemaManager.truncateAll();
schemaBootstrap.bootstrapSynapseSchemas();
CreateOrganizationRequest createOrgRequest = new CreateOrganizationRequest();
createOrgRequest.setOrganizationName("my.organization");
organization = jsonSchemaManager.createOrganziation(adminUserInfo, createOrgRequest);
}
@Test
public void testGetValidationSchemaWorker() throws AssertionError, AsynchJobFailedException {
CreateSchemaRequest createRequest = new CreateSchemaRequest();
createRequest.setSchema(basicSchema);
// First create the schema.
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, createRequest, (CreateSchemaResponse response) -> {
assertNotNull(response);
assertNotNull(response.getNewVersionInfo());
assertEquals(adminUserInfo.getId().toString(), response.getNewVersionInfo().getCreatedBy());
assertEquals(semanticVersion, response.getNewVersionInfo().getSemanticVersion());
}, MAX_WAIT_MS);
GetValidationSchemaRequest getRequest = new GetValidationSchemaRequest();
getRequest.set$id(basicSchema.get$id());
// Get the validation schema for this schema
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, getRequest,
(GetValidationSchemaResponse response) -> {
assertNotNull(response);
assertNotNull(response.getValidationSchema());
// the absolute $id should be returned: PLFM-6515
basicSchema.set$id(JsonSchemaManager.createAbsolute$id("my.org.net-some.schema-1.1.1"));
assertEquals(basicSchema, response.getValidationSchema());
}, MAX_WAIT_MS);
}
@Test
public void testEntitySchemaValidation() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create the schema
String fileName = "schema/SimpleFolder.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
// Add the foo annotation to the folder
folderJson.put("foo", "bar");
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
Folder resultFolder = entityManager.getEntity(adminUserInfo, folderId, Folder.class);
// wait for the folder to be valid.
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
assertEquals(JsonSchemaManager.createAbsolute$id(schema$id), t.getSchema$id());
assertEquals(resultFolder.getId(), t.getObjectId());
assertEquals(ObjectType.entity, t.getObjectType());
assertEquals(resultFolder.getEtag(), t.getObjectEtag());
});
// Removing the binding from the container should trigger removal of the results
// for the child.
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testCreateWithDryRun() throws Exception {
CreateSchemaRequest createRequest = new CreateSchemaRequest();
createRequest.setSchema(basicSchema);
createRequest.setDryRun(true);
// First create the schema.
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, createRequest, (CreateSchemaResponse response) -> {
assertNotNull(response);
assertNotNull(response.getNewVersionInfo());
assertEquals(adminUserInfo.getId().toString(), response.getNewVersionInfo().getCreatedBy());
assertEquals(semanticVersion, response.getNewVersionInfo().getSemanticVersion());
}, MAX_WAIT_MS);
// the schema should not exist
assertThrows(NotFoundException.class, () -> {
jsonSchemaManager.getSchema(basicSchema.get$id(), true);
});
}
@Test
public void testEntitySchemaValidationWithBoolean() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create the schema
String fileName = "schema/FolderWithBoolean.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
// Add the foo annotation to the folder
folderJson.put("hasBoolean", true);
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
Folder resultFolder = entityManager.getEntity(adminUserInfo, folderId, Folder.class);
// wait for the folder to be valid.
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
assertEquals(JsonSchemaManager.createAbsolute$id(schema$id), t.getSchema$id());
assertEquals(resultFolder.getId(), t.getObjectId());
assertEquals(ObjectType.entity, t.getObjectType());
assertEquals(resultFolder.getEtag(), t.getObjectEtag());
});
// Removing the binding from the container should trigger removal of the results
// for the child.
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testNoSemanticVersionSchemaRevalidationWithSchemaChange() throws Exception {
// PLFM-6757
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create the schema with no semantic version
String fileName = "schema/FolderWithBoolean.json";
JsonSchema schema1 = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema1);
String schema$id1 = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id1);
/*
* Note that we prohibit notification messages being sent on the bind because
* multiple messages will be sent out in this set up that will trigger validation, but
* the validation triggered from this bind in particular will be delayed and can cause this test
* to pass when it is not suppose to pass.
*/
boolean sendNotificationMessages = false;
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest, sendNotificationMessages);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
// Add the foo annotation to the folder
folderJson.put("hasBoolean", true);
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
Folder resultFolder = entityManager.getEntity(adminUserInfo, folderId, Folder.class);
// wait for the folder to be valid against the schema
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
assertEquals(JsonSchemaManager.createAbsolute$id(schema$id1), t.getSchema$id());
assertEquals(resultFolder.getId(), t.getObjectId());
assertEquals(ObjectType.entity, t.getObjectType());
assertEquals(resultFolder.getEtag(), t.getObjectEtag());
});
/*
* Wait 5 seconds for the possibility of lingering validation work to finish up.
* Note that if we comment out the solution to PLFM-6757
* then this test consistently fails on timeout from the 2nd wait. In other words,
* any lingering revalidation work in-progress or to-be-in-progress before this,
* is unlikely to be the reason this test will pass.
*/
Thread.sleep(5000);
// Revalidation step. Replace the schema with the same schema + an additional required field.
JsonSchema schema2 = getSchemaFromClasspath(fileName);
schema2.setRequired(Arrays.asList("requiredField"));
CreateSchemaResponse createResponse2 = registerSchema(schema2);
String schema$id2 = createResponse2.getNewVersionInfo().get$id();
// Should be the same schema being referenced
assertEquals(schema$id1, schema$id2);
// wait for the folder to be invalid against the schema
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertFalse(t.getIsValid());
assertEquals(JsonSchemaManager.createAbsolute$id(schema$id2), t.getSchema$id());
assertEquals(resultFolder.getId(), t.getObjectId());
assertEquals(ObjectType.entity, t.getObjectType());
assertEquals(resultFolder.getEtag(), t.getObjectEtag());
});
// clean up
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testGetEntityJsonWithBoundJsonSchema() throws Exception {
// PLFM-6811
// this test helps demonstrate how annotations are driven by the JsonSchema
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// build properties with fooKey single
Map<String, JsonSchema> properties = new HashMap<>();
JsonSchema fooType = new JsonSchema();
fooType.setType(Type.string);
properties.put("fooKey", fooType);
basicSchema.setProperties(properties);
// build a subschema in an allOf with barKey array
List<JsonSchema> allOfSchemas = new LinkedList<>();
Map<String, JsonSchema> properties2 = new HashMap<>();
JsonSchema subSchema = new JsonSchema();
JsonSchema typeSchemaArray2 = new JsonSchema();
JsonSchema itemsSchemaArray2 = new JsonSchema();
itemsSchemaArray2.setType(Type.string);
typeSchemaArray2.setType(Type.array);
typeSchemaArray2.setItems(itemsSchemaArray2);
properties2.put("barKey", typeSchemaArray2);
subSchema.setProperties(properties2);
allOfSchemas.add(subSchema);
basicSchema.setAllOf(allOfSchemas);
/*
* this is the basicSchema
* {
* "properties":
* {
* "fooKey": { "type": "string" }
* },
* "allOf": [ { "properties": { "barKey": { "type": "array", "items": { "type": "number" } } } } ]
* }
*/
// create the schema
CreateSchemaResponse createResponse = registerSchema(basicSchema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
// Add 3 single element array annotations to the folder
folderJson.put("fooKey", Arrays.asList("foo"));
folderJson.put("barKey", Arrays.asList("bar"));
folderJson.put("bazKey", Arrays.asList("baz"));
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
// call under test, will throw an org.json.JSONException if
// it is not a JSONArray
folderJson = entityManager.getEntityJson(folderId, false);
// barKey is defined as an array in a subschema, it will stay as an array
assertEquals(folderJson.getJSONArray("barKey").getString(0), "bar");
// bazKey is not defined in the schema, and it has single value. so it's a single value
assertEquals("baz", folderJson.getString("bazKey"));
// fooKey is defined as a single in the schema, it will become a single
assertEquals(folderJson.getString("fooKey"), "foo");
// clean up
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testGetEntityJsonWithBoundSchemaContainingReference() throws Exception {
// PLFM-6934
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// child schema, key property of with enum
JsonSchema child = new JsonSchema();
child.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + "child");
child.setType(Type.string);
child.set_enum(Arrays.asList("Alabama", "Alaska"));
// reference to child schema
JsonSchema refToChild = new JsonSchema();
refToChild.set$ref(child.get$id());
// parent contains a reference to the child
JsonSchema parent = new JsonSchema();
parent.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + "parent");
Map<String, JsonSchema> parentProps = new HashMap<>();
parentProps.put("state", refToChild);
parent.setProperties(parentProps);
parent.setRequired(Arrays.asList("state"));
// create the schemas
registerSchema(child);
CreateSchemaResponse createResponse = registerSchema(parent);
String schema$id = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
boolean sendNotificationMessages = false;
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest, sendNotificationMessages);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
folderJson.put("state", Arrays.asList("Alabama"));
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
// wait till it is valid
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
// call under test
// should not be an array
folderJson = entityManager.getEntityJson(folderId, false);
assertEquals("Alabama", folderJson.getString("state"));
// clean up
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testGetEntityJsonWithDerivedAnnotations() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create the schema
String fileName = "schema/DerivedConditionalConst.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// bind the schema to the project
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
bindRequest.setEnableDerivedAnnotations(true);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
folderJson.put("someBoolean", true);
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
folderJson = entityManager.getEntityJson(folderId, true);
assertEquals(true, folderJson.getBoolean("someBoolean"));
assertEquals(456, folderJson.getLong("unconditionalDefault"));
assertEquals("someBoolean was true", folderJson.getString("someConditional"));
assertEquals(999, folderJson.getLong("conditionalLong"));
}
@Test
public void testValidationSchemaIndexWithReindexingAndRevalidation() throws Exception {
// PLFM-6870: Validate that when a schema is changed, the validation schema index is re-indexed for
// all schemas that reference the changed schema as well as these schemas that reference those schemas,
// and that this also triggers revalidation of entities
// child schema, key property of type string
JsonSchema child = new JsonSchema();
child.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + "child");
Map<String, JsonSchema> properties = new HashMap<>();
JsonSchema typeSchema = new JsonSchema();
typeSchema.setType(Type.string);
properties.put("key", typeSchema);
child.setProperties(properties);
// reference to child schema
JsonSchema refToChild = new JsonSchema();
refToChild.set$ref(child.get$id());
// parent contains a reference to the child
JsonSchema parent = new JsonSchema();
parent.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + "parent");
parent.setAllOf(Arrays.asList(refToChild));
// create project
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create child schema
CreateSchemaResponse createResponse = registerSchema(child);
// create parent schema
createResponse = registerSchema(parent);
// bind parent schema to the project
String parentSchema$id = createResponse.getNewVersionInfo().get$id();
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(parentSchema$id);
// we want to validate on putting annotations, so don't send notifications
boolean sendNotificationMessages = false;
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest, sendNotificationMessages);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
// Add single element annotations to the folder that is valid
folderJson.put("key", Arrays.asList("foo"));
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
// wait till it is valid
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
// change child schema and register the new schema
typeSchema.setType(Type.number);
properties.put("key", typeSchema);
child.setProperties(properties);
createResponse = registerSchema(child);
// wait till it is not valid
waitForValidationResults(adminUserInfo, folderId, (ValidationResults t) -> {
assertNotNull(t);
assertFalse(t.getIsValid());
});
// clean up
entityManager.clearBoundSchema(adminUserInfo, projectId);
waitForValidationResultsToBeNotFound(adminUserInfo, folderId);
}
@Test
public void testDerivedAnnotationsAndReplication() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
folderJson.put("myAnnotation", "myAnnotationValue");
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
asynchronousJobWorkerHelper.waitForReplicationIndexData(folderId, data-> {
assertEquals(List.of(
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "myAnnotation", AnnotationType.STRING, List.of("myAnnotationValue"))
),
data.getAnnotations());
}, MAX_WAIT_MS);
// create the schema
String fileName = "schema/DerivedConditionalConst.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// Now bind the schema to the project, this will enable derived annotations from the schema that will eventually be replicated
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
bindRequest.setEnableDerivedAnnotations(true);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
asynchronousJobWorkerHelper.waitForReplicationIndexData(folderId, data-> {
assertEquals(List.of(
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "conditionalLong", AnnotationType.LONG, List.of("999"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "myAnnotation", AnnotationType.STRING, List.of("myAnnotationValue"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someConditional", AnnotationType.STRING, List.of("someBoolean was true"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "unconditionalDefault", AnnotationType.LONG, List.of("456"), true)
),
data.getAnnotations());
}, MAX_WAIT_MS);
// Now we put an explicit property that should be replicated as well
folderJson.put("someBoolean", false);
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
asynchronousJobWorkerHelper.waitForReplicationIndexData(folderId, data-> {
assertEquals(List.of(
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "myAnnotation", AnnotationType.STRING, List.of("myAnnotationValue"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someBoolean", AnnotationType.BOOLEAN, List.of("false"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someConditional", AnnotationType.STRING, List.of("someBoolean was false"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "unconditionalDefault", AnnotationType.LONG, List.of("456"), true)
),
data.getAnnotations());
}, MAX_WAIT_MS);
// Now switch the boolean
folderJson.put("someBoolean", true);
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
asynchronousJobWorkerHelper.waitForReplicationIndexData(folderId, data-> {
assertEquals(List.of(
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "conditionalLong", AnnotationType.LONG, List.of("999"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "myAnnotation", AnnotationType.STRING, List.of("myAnnotationValue"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someBoolean", AnnotationType.BOOLEAN, List.of("true"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someConditional", AnnotationType.STRING, List.of("someBoolean was true"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "unconditionalDefault", AnnotationType.LONG, List.of("456"), true)
),
data.getAnnotations());
}, MAX_WAIT_MS);
}
@Test
public void testDerivedAnnotationsAndViewColumnModelRequest() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
Project project = entityManager.getEntity(adminUserInfo, projectId, Project.class);
// create the schema
String fileName = "schema/DerivedConditionalConst.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// Now bind the schema to the project, this will enable derived annotations from the schema that will eventually be replicated
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
bindRequest.setEnableDerivedAnnotations(true);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// add a folder to the project
Folder folder = new Folder();
folder.setParentId(project.getId());
String folderId = entityManager.createEntity(adminUserInfo, folder, null);
JSONObject folderJson = entityManager.getEntityJson(folderId, false);
folderJson.put("myAnnotation", "myAnnotationValue");
folderJson = entityManager.updateEntityJson(adminUserInfo, folderId, folderJson);
asynchronousJobWorkerHelper.waitForReplicationIndexData(folderId, data-> {
assertEquals(List.of(
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "conditionalLong", AnnotationType.LONG, List.of("999"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "myAnnotation", AnnotationType.STRING, List.of("myAnnotationValue"), false),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "someConditional", AnnotationType.STRING, List.of("someBoolean was true"), true),
new ObjectAnnotationDTO(KeyFactory.stringToKey(folderId), 1L, "unconditionalDefault", AnnotationType.LONG, List.of("456"), true)
),
data.getAnnotations());
}, MAX_WAIT_MS);
ViewScope viewScope = new ViewScope()
.setViewEntityType(ViewEntityType.entityview)
.setScope(List.of(projectId))
.setViewTypeMask(ViewTypeMask.Folder.getMask());
ViewColumnModelRequest request = new ViewColumnModelRequest()
.setViewScope(viewScope);
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, request, (ViewColumnModelResponse response) -> {
List<ColumnModel> expected = List.of(
new ColumnModel().setName("myAnnotation").setColumnType(ColumnType.STRING).setMaximumSize(17L)
);
assertEquals(expected, response.getResults());
}, MAX_WAIT_MS);
// now asks to include the derived annotations
request.setIncludeDerivedAnnotations(true);
asynchronousJobWorkerHelper.assertJobResponse(adminUserInfo, request, (ViewColumnModelResponse response) -> {
List<ColumnModel> expected = List.of(
new ColumnModel().setName("conditionalLong").setColumnType(ColumnType.INTEGER),
new ColumnModel().setName("myAnnotation").setColumnType(ColumnType.STRING).setMaximumSize(17L),
new ColumnModel().setName("someConditional").setColumnType(ColumnType.STRING).setMaximumSize(20L),
new ColumnModel().setName("unconditionalDefault").setColumnType(ColumnType.INTEGER)
);
assertEquals(expected, response.getResults());
}, MAX_WAIT_MS);
}
@Test
public void testDerivedAnnotationWithAccessRequirementIdBinding() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
AccessRequirement ar1 = termsOfUseAccessRequirementObjectHelper.create((a)->{a.setName("one");});
AccessRequirement ar2 = termsOfUseAccessRequirementObjectHelper.create((a)->{a.setName("two");});
AccessRequirement ar3 = termsOfUseAccessRequirementObjectHelper.create((a)->{a.setName("three");});
// create the schema,
// DerivedWithAccessRequirementIds.json.vtp(stands for velocity) is not a json file, it's a template used by velocity to create json
String fileName = "schema/DerivedWithAccessRequirementIds.json.vtp";
JsonSchema schema = getSchemaTemplateFromClasspath(fileName, "my.organization-DerivedWithAccessRequirementIds",
new VelocityContext(Map.of("arOne", ar1.getId(), "arTwo", ar2.getId(), "arThree", ar3.getId())));
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// Now bind the schema to the project, this will enable derived annotations from
// the schema that will eventually be replicated
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
bindRequest.setEnableDerivedAnnotations(true);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// one
FileEntity fileOne = createFileWithAnnotations(projectId, "one", (c)->{
c.put("someBoolean", true);
});
waitForValidationResults(adminUserInfo, fileOne.getId(), (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
boolean includeDerivedAnnotations = true;
Annotations annotations = entityManager.getAnnotations(adminUserInfo, fileOne.getId(),
includeDerivedAnnotations);
Annotations expected = new Annotations();
expected.setId(annotations.getId());
expected.setEtag(annotations.getEtag());
AnnotationsV2TestUtils.putAnnotations(expected, "someBoolean", "true", AnnotationsValueType.BOOLEAN);
AnnotationsV2TestUtils.putAnnotations(expected, "_accessRequirementIds",
List.of(ar1.getId().toString(), ar2.getId().toString()), AnnotationsValueType.LONG);
assertEquals(expected, annotations);
Long limit = 50L;
Long offset = 0L;
// validate that the ARs are bound to the entity.
Set<Long> boundArIds = accessRequirementManager.getAccessRequirementsForSubject(adminUserInfo,
new RestrictableObjectDescriptor().setId(fileOne.getId()).setType(RestrictableObjectType.ENTITY), limit,
offset).stream().map(AccessRequirement::getId).collect(Collectors.toSet());
assertEquals(Set.of(ar1.getId(), ar2.getId()), boundArIds);
// two
FileEntity fileTwo = createFileWithAnnotations(projectId, "two", (c)->{
c.put("someBoolean", false);
});
waitForValidationResults(adminUserInfo, fileTwo.getId(), (ValidationResults t) -> {
assertNotNull(t);
assertTrue(t.getIsValid());
});
annotations = entityManager.getAnnotations(adminUserInfo, fileTwo.getId(), includeDerivedAnnotations);
expected = new Annotations();
expected.setId(annotations.getId());
expected.setEtag(annotations.getEtag());
AnnotationsV2TestUtils.putAnnotations(expected, "someBoolean", "false", AnnotationsValueType.BOOLEAN);
AnnotationsV2TestUtils.putAnnotations(expected, "_accessRequirementIds",
List.of(ar2.getId().toString(), ar3.getId().toString()), AnnotationsValueType.LONG);
assertEquals(expected, annotations);
// validate that the ARs are bound to the entity.
boundArIds = accessRequirementManager.getAccessRequirementsForSubject(adminUserInfo,
new RestrictableObjectDescriptor().setId(fileTwo.getId()).setType(RestrictableObjectType.ENTITY), limit,
offset).stream().map(AccessRequirement::getId).collect(Collectors.toSet());
assertEquals(Set.of(ar2.getId(), ar3.getId()), boundArIds);
}
/**
* If a file is invalid against a schema that includes access requirement, it should get bound
* to the bootstrapped invalid annotation access requirement. (PLFM-7412).
* @throws Exception
*/
@Test
public void testDerivedAnnotationWithAccessRequirmentIdBindingAndInvalidObject() throws Exception {
bootstrapAndCreateOrganization();
String projectId = entityManager.createEntity(adminUserInfo, new Project(), null);
// create the schema
String fileName = "schema/RequiredWithAccessRequirementIds.json";
JsonSchema schema = getSchemaFromClasspath(fileName);
CreateSchemaResponse createResponse = registerSchema(schema);
String schema$id = createResponse.getNewVersionInfo().get$id();
// Now bind the schema to the project, this will enable derived annotations from
// the schema that will eventually be replicated
BindSchemaToEntityRequest bindRequest = new BindSchemaToEntityRequest();
bindRequest.setEntityId(projectId);
bindRequest.setSchema$id(schema$id);
bindRequest.setEnableDerivedAnnotations(true);
entityManager.bindSchemaToEntity(adminUserInfo, bindRequest);
// one
FileEntity fileOne = createFileWithAnnotations(projectId, "one", (c)->{
c.put("someBoolean", "not a boolean");
});
waitForValidationResults(adminUserInfo, fileOne.getId(), (ValidationResults t) -> {
assertNotNull(t);
assertFalse(t.getIsValid());
});
Long limit = 50L;
Long offset = 0L;
// the invalid annotations lock should be bound to the entity.
Set<Long> boundArIds = accessRequirementManager.getAccessRequirementsForSubject(adminUserInfo,
new RestrictableObjectDescriptor().setId(fileOne.getId()).setType(RestrictableObjectType.ENTITY), limit,
offset).stream().map(AccessRequirement::getId).collect(Collectors.toSet());
assertEquals(Set.of(AccessRequirementDAO.INVALID_ANNOTATIONS_LOCK_ID), boundArIds);
}
/**
* Helper to create a FileEntity with the annotations defined by the provider consumer.
* @param parentId
* @param name
* @param consumer
* @return
*/
public FileEntity createFileWithAnnotations(String parentId, String name, Consumer<JSONObject> consumer) {
S3FileHandle fileHandle = fileHandleObjectHelper.createS3(h->{h.setFileName("theFile.txt");});
FileEntity file = new FileEntity().setName(name).setDataFileHandleId(fileHandle.getId()).setParentId(parentId);
String id = entityManager.createEntity(adminUserInfo, file, null);
JSONObject fileJson = entityManager.getEntityJson(id, false);
consumer.accept(fileJson);
fileJson = entityManager.updateEntityJson(adminUserInfo, id, fileJson);
return entityManager.getEntity(adminUserInfo, id, FileEntity.class);
}
/**
* Wait for the validation results
*
* @param user
* @param entityId
* @return
*/
public ValidationResults waitForValidationResults(UserInfo user, String entityId,
Consumer<ValidationResults> consumer) {
try {
return TimeUtils.waitFor(MAX_WAIT_MS, 1000L, () -> {
try {
ValidationResults validationResults = entityManager.getEntityValidationResults(user, entityId);
consumer.accept(validationResults);
return new Pair<>(Boolean.TRUE, validationResults);
} catch (Throwable e) {
System.out.println("Waiting for expected ValidationResults..." + e.getMessage());
return new Pair<>(Boolean.FALSE, null);
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Wait for the validation results to be not found.
*
* @param user
* @param entityId
*/
public void waitForValidationResultsToBeNotFound(UserInfo user, String entityId) {
try {
TimeUtils.waitFor(MAX_WAIT_MS, 1000L, () -> {
try {
ValidationResults validationResults = entityManager.getEntityValidationResults(user, entityId);
System.out.println("Waiting for expected ValidationResults to be removed...");
return new Pair<>(Boolean.FALSE, null);
} catch (NotFoundException e) {
return new Pair<>(Boolean.TRUE, null);
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void printJson(JSONEntity entity) throws JSONException, JSONObjectAdapterException {
JSONObject object = new JSONObject(EntityFactory.createJSONStringForEntity(entity));
System.out.println(object.toString(5));
}
/**
* Helper to create a schema with the given $id.
*
* @param $id
* @return
*/
public JsonSchema createSchema(String organizationName, String schemaName) {
JsonSchema schema = new JsonSchema();
schema.set$id(organizationName + JsonSchemaConstants.PATH_DELIMITER + schemaName);
return schema;
}
/**
* Helper to create a $ref to the given schema
*
* @param toRef
* @return
*/
public JsonSchema create$RefSchema(JsonSchema toRef) {
JsonSchema schema = new JsonSchema();
schema.set$ref(toRef.get$id());
return schema;
}
}
|
package som.vm;
import static som.vm.Symbols.symbolFor;
import static som.vm.constants.Classes.arrayClass;
import static som.vm.constants.Classes.booleanClass;
import static som.vm.constants.Classes.classClass;
import static som.vm.constants.Classes.doubleClass;
import static som.vm.constants.Classes.integerClass;
import static som.vm.constants.Classes.metaclassClass;
import static som.vm.constants.Classes.methodClass;
import static som.vm.constants.Classes.nilClass;
import static som.vm.constants.Classes.objectClass;
import static som.vm.constants.Classes.primitiveClass;
import static som.vm.constants.Classes.stringClass;
import static som.vm.constants.Classes.symbolClass;
import static som.vm.constants.ThreadClasses.conditionClass;
import static som.vm.constants.ThreadClasses.delayClass;
import static som.vm.constants.ThreadClasses.mutexClass;
import static som.vm.constants.ThreadClasses.threadClass;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.StringTokenizer;
import som.compiler.Disassembler;
import som.interpreter.Invokable;
import som.interpreter.TruffleCompiler;
import som.vm.constants.Blocks;
import som.vm.constants.Globals;
import som.vm.constants.Nil;
import som.vmobjects.SArray;
import som.vmobjects.SBlock;
import som.vmobjects.SClass;
import som.vmobjects.SInvokable;
import som.vmobjects.SInvokable.SMethod;
import som.vmobjects.SInvokable.SPrimitive;
import som.vmobjects.SObject;
import som.vmobjects.SSymbol;
import com.oracle.truffle.api.CompilerAsserts;
import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.Truffle;
import com.oracle.truffle.api.TruffleRuntime;
import com.oracle.truffle.api.frame.MaterializedFrame;
public final class Universe {
/**
* Associations are handles for globals with a fixed
* SSymbol and a mutable value.
*/
public static final class Association {
private final SSymbol key;
@CompilationFinal private Object value;
public Association(final SSymbol key, final Object value) {
this.key = key;
this.value = value;
}
public SSymbol getKey() {
return key;
}
public Object getValue() {
return value;
}
public void setValue(final Object value) {
TruffleCompiler.transferToInterpreterAndInvalidate("Changed global");
this.value = value;
}
}
public static void main(final String[] arguments) {
Universe u = current();
try {
u.interpret(arguments);
u.exit(0);
} catch (IllegalStateException e) {
errorExit(e.getMessage());
}
}
public Object interpret(String[] arguments) {
// Check for command line switches
arguments = handleArguments(arguments);
// Initialize the known universe
return execute(arguments);
}
private Universe() {
this.truffleRuntime = Truffle.getRuntime();
this.globals = new HashMap<SSymbol, Association>();
this.avoidExit = false;
this.alreadyInitialized = false;
this.lastExitCode = 0;
this.blockClasses = new SClass[4];
}
public TruffleRuntime getTruffleRuntime() {
return truffleRuntime;
}
public void exit(final int errorCode) {
TruffleCompiler.transferToInterpreter("exit");
// Exit from the Java system
if (!avoidExit) {
System.exit(errorCode);
} else {
lastExitCode = errorCode;
}
}
public int lastExitCode() {
return lastExitCode;
}
public static void errorExit(final String message) {
TruffleCompiler.transferToInterpreter("errorExit");
errorPrintln("Runtime Error: " + message);
current().exit(1);
}
@TruffleBoundary
public String[] handleArguments(String[] arguments) {
boolean gotClasspath = false;
String[] remainingArgs = new String[arguments.length];
int cnt = 0;
for (int i = 0; i < arguments.length; i++) {
if (arguments[i].equals("-cp")) {
if (i + 1 >= arguments.length) {
printUsageAndExit();
}
setupClassPath(arguments[i + 1]);
// Checkstyle: stop
++i; // skip class path
// Checkstyle: resume
gotClasspath = true;
} else if (arguments[i].equals("-d")) {
printAST = true;
} else {
remainingArgs[cnt++] = arguments[i];
}
}
if (!gotClasspath) {
// Get the default class path of the appropriate size
classPath = setupDefaultClassPath(0);
}
// Copy the remaining elements from the original array into the new
// array
arguments = new String[cnt];
System.arraycopy(remainingArgs, 0, arguments, 0, cnt);
// check remaining args for class paths, and strip file extension
for (int i = 0; i < arguments.length; i++) {
String[] split = getPathClassExt(arguments[i]);
if (!("".equals(split[0]))) { // there was a path
String[] tmp = new String[classPath.length + 1];
System.arraycopy(classPath, 0, tmp, 1, classPath.length);
tmp[0] = split[0];
classPath = tmp;
}
arguments[i] = split[1];
}
return arguments;
}
@TruffleBoundary
// take argument of the form "../foo/Test.som" and return
// "../foo", "Test", "som"
private String[] getPathClassExt(final String arg) {
File file = new File(arg);
String path = file.getParent();
StringTokenizer tokenizer = new StringTokenizer(file.getName(), ".");
if (tokenizer.countTokens() > 2) {
errorPrintln("Class with . in its name?");
exit(1);
}
String[] result = new String[3];
result[0] = (path == null) ? "" : path;
result[1] = tokenizer.nextToken();
result[2] = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "";
return result;
}
@TruffleBoundary
public void setupClassPath(final String cp) {
// Create a new tokenizer to split up the string of directories
StringTokenizer tokenizer = new StringTokenizer(cp, File.pathSeparator);
// Get the default class path of the appropriate size
classPath = setupDefaultClassPath(tokenizer.countTokens());
// Get the directories and put them into the class path array
for (int i = 0; tokenizer.hasMoreTokens(); i++) {
classPath[i] = tokenizer.nextToken();
}
}
@TruffleBoundary
private String[] setupDefaultClassPath(final int directories) {
// Get the default system class path
String systemClassPath = System.getProperty("system.class.path");
// Compute the number of defaults
int defaults = (systemClassPath != null) ? 2 : 1;
// Allocate an array with room for the directories and the defaults
String[] result = new String[directories + defaults];
// Insert the system class path into the defaults section
if (systemClassPath != null) {
result[directories] = systemClassPath;
}
// Insert the current directory into the defaults section
result[directories + defaults - 1] = ".";
// Return the class path
return result;
}
private void printUsageAndExit() {
// Print the usage
println("Usage: som [-options] [args...] ");
println(" ");
println("where options include: ");
println(" -cp <directories separated by " + File.pathSeparator + ">");
println(" set search path for application classes");
println(" -d enable disassembling");
// Exit
System.exit(0);
}
/**
* Start interpretation by sending the selector to the given class. This is
* mostly meant for testing currently.
*
* @param className
* @param selector
* @return
*/
public Object interpret(final String className, final String selector) {
initializeObjectSystem();
SClass clazz = loadClass(symbolFor(className));
// Lookup the initialize invokable on the system class
SInvokable initialize = clazz.getSOMClass().lookupInvokable(
symbolFor(selector));
return initialize.invoke(clazz);
}
private Object execute(final String[] arguments) {
initializeObjectSystem();
// Start the shell if no filename is given
if (arguments.length == 0) {
Shell shell = new Shell(this);
return shell.start();
}
// Lookup the initialize invokable on the system class
SInvokable initialize = systemClass.
lookupInvokable(symbolFor("initialize:"));
return initialize.invoke(new Object[] {systemObject,
SArray.create(arguments)});
}
protected void initializeObjectSystem() {
CompilerAsserts.neverPartOfCompilation();
if (alreadyInitialized) {
return;
} else {
alreadyInitialized = true;
}
// Allocate the nil object
SObject nilObject = Nil.nilObject;
// Setup the class reference for the nil object
nilObject.setClass(nilClass);
// Initialize the system classes.
initializeSystemClass(objectClass, null, "Object");
initializeSystemClass(classClass, objectClass, "Class");
initializeSystemClass(metaclassClass, classClass, "Metaclass");
initializeSystemClass(nilClass, objectClass, "Nil");
initializeSystemClass(arrayClass, objectClass, "Array");
initializeSystemClass(methodClass, objectClass, "Method");
initializeSystemClass(stringClass, objectClass, "String");
initializeSystemClass(symbolClass, stringClass, "Symbol");
initializeSystemClass(integerClass, objectClass, "Integer");
initializeSystemClass(primitiveClass, objectClass, "Primitive");
initializeSystemClass(doubleClass, objectClass, "Double");
initializeSystemClass(booleanClass, objectClass, "Boolean");
// Thread support classes
initializeSystemClass(conditionClass, objectClass, "Condition");
initializeSystemClass(delayClass, objectClass, "Delay");
initializeSystemClass(mutexClass, objectClass, "Mutex");
initializeSystemClass(threadClass, objectClass, "Thread");
trueClass = newSystemClass();
falseClass = newSystemClass();
initializeSystemClass(trueClass, booleanClass, "True");
initializeSystemClass(falseClass, booleanClass, "False");
// Load methods and fields into the system classes
loadSystemClass(objectClass);
loadSystemClass(classClass);
loadSystemClass(metaclassClass);
loadSystemClass(nilClass);
loadSystemClass(arrayClass);
loadSystemClass(methodClass);
loadSystemClass(symbolClass);
loadSystemClass(integerClass);
loadSystemClass(primitiveClass);
loadSystemClass(stringClass);
loadSystemClass(doubleClass);
loadSystemClass(booleanClass);
loadSystemClass(trueClass);
loadSystemClass(falseClass);
loadSystemClass(conditionClass);
loadSystemClass(delayClass);
loadSystemClass(mutexClass);
loadSystemClass(threadClass);
// Load the generic block class
blockClasses[0] = loadClass(symbolFor("Block"));
// Setup the true and false objects
trueObject = newInstance(trueClass);
falseObject = newInstance(falseClass);
// Load the system class and create an instance of it
systemClass = loadClass(symbolFor("System"));
systemObject = newInstance(systemClass);
// Put special objects into the dictionary of globals
setGlobal("nil", nilObject);
setGlobal("true", trueObject);
setGlobal("false", falseObject);
setGlobal("system", systemObject);
// Load the remaining block classes
loadBlockClass(1);
loadBlockClass(2);
loadBlockClass(3);
if (Globals.trueObject != trueObject) {
errorExit("Initialization went wrong for class Globals");
}
if (Blocks.blockClass1 != blockClasses[1]) {
errorExit("Initialization went wrong for class Blocks");
}
objectSystemInitialized = true;
}
public static SBlock newBlock(final SMethod method, final MaterializedFrame context) {
return SBlock.create(method, context);
}
@TruffleBoundary
public static SClass newClass(final SClass classClass) {
return new SClass(classClass);
}
@TruffleBoundary
public static SInvokable newMethod(final SSymbol signature,
final Invokable truffleInvokable, final boolean isPrimitive,
final SMethod[] embeddedBlocks) {
if (isPrimitive) {
return new SPrimitive(signature, truffleInvokable);
} else {
return new SMethod(signature, truffleInvokable, embeddedBlocks);
}
}
public static SObject newInstance(final SClass instanceClass) {
return SObject.create(instanceClass);
}
@TruffleBoundary
public static SClass newMetaclassClass() {
// Allocate the metaclass classes
SClass result = new SClass(0);
result.setClass(new SClass(0));
// Setup the metaclass hierarchy
result.getSOMClass().setClass(result);
return result;
}
@TruffleBoundary
public static SClass newSystemClass() {
// Allocate the new system class
SClass systemClass = new SClass(0);
// Setup the metaclass hierarchy
systemClass.setClass(new SClass(0));
systemClass.getSOMClass().setClass(metaclassClass);
// Return the freshly allocated system class
return systemClass;
}
private void initializeSystemClass(final SClass systemClass, final SClass superClass,
final String name) {
// Initialize the superclass hierarchy
if (superClass != null) {
systemClass.setSuperClass(superClass);
systemClass.getSOMClass().setSuperClass(superClass.getSOMClass());
} else {
systemClass.getSOMClass().setSuperClass(classClass);
}
// Initialize the array of instance fields
systemClass.setInstanceFields(SArray.create(new Object[0]));
systemClass.getSOMClass().setInstanceFields(SArray.create(new Object[0]));
// Initialize the array of instance invokables
systemClass.setInstanceInvokables(SArray.create(new Object[0]));
systemClass.getSOMClass().setInstanceInvokables(SArray.create(new Object[0]));
// Initialize the name of the system class
systemClass.setName(symbolFor(name));
systemClass.getSOMClass().setName(symbolFor(name + " class"));
// Insert the system class into the dictionary of globals
setGlobal(systemClass.getName(), systemClass);
}
@TruffleBoundary
public boolean hasGlobal(final SSymbol name) {
return globals.containsKey(name);
}
@TruffleBoundary
public Object getGlobal(final SSymbol name) {
Association assoc = globals.get(name);
if (assoc == null) {
return null;
}
return assoc.getValue();
}
@TruffleBoundary
public Association getGlobalsAssociation(final SSymbol name) {
return globals.get(name);
}
public void setGlobal(final String name, final Object value) {
setGlobal(symbolFor(name), value);
}
@TruffleBoundary
public void setGlobal(final SSymbol name, final Object value) {
Association assoc = globals.get(name);
if (assoc == null) {
assoc = new Association(name, value);
globals.put(name, assoc);
} else {
assoc.setValue(value);
}
}
public SClass getBlockClass(final int numberOfArguments) {
SClass result = blockClasses[numberOfArguments];
assert result != null || numberOfArguments == 0;
return result;
}
private void loadBlockClass(final int numberOfArguments) {
// Compute the name of the block class with the given number of
// arguments
SSymbol name = symbolFor("Block" + numberOfArguments);
assert getGlobal(name) == null;
// Get the block class for blocks with the given number of arguments
SClass result = loadClass(name, null);
// Add the appropriate value primitive to the block class
result.addInstancePrimitive(SBlock.getEvaluationPrimitive(
numberOfArguments, this, result), true);
// Insert the block class into the dictionary of globals
setGlobal(name, result);
blockClasses[numberOfArguments] = result;
}
@TruffleBoundary
public SClass loadClass(final SSymbol name) {
// Check if the requested class is already in the dictionary of globals
SClass result = (SClass) getGlobal(name);
if (result != null) { return result; }
result = loadClass(name, null);
loadPrimitives(result, false);
setGlobal(name, result);
return result;
}
private void loadPrimitives(final SClass result, final boolean isSystemClass) {
if (result == null) { return; }
// Load primitives if class defines them, or try to load optional
// primitives defined for system classes.
if (result.hasPrimitives() || isSystemClass) {
result.loadPrimitives(!isSystemClass);
}
}
@TruffleBoundary
private void loadSystemClass(final SClass systemClass) {
// Load the system class
SClass result = loadClass(systemClass.getName(), systemClass);
if (result == null) {
throw new IllegalStateException(systemClass.getName().getString()
+ " class could not be loaded. "
+ "It is likely that the class path has not been initialized properly. "
+ "Please set system property 'system.class.path' or "
+ "pass the '-cp' command-line parameter.");
}
loadPrimitives(result, true);
}
@TruffleBoundary
private SClass loadClass(final SSymbol name, final SClass systemClass) {
throw new NotYetImplementedException();
// // Try loading the class from all different paths
// for (String cpEntry : classPath) {
// try {
// // Load the class from a file and return the loaded class
// SClass result = som.compiler.SourcecodeCompiler.compileModule(cpEntry,
// name.getString(), systemClass, this);
// if (printAST) {
// Disassembler.dump(result.getSOMClass());
// Disassembler.dump(result);
// return result;
// } catch (IOException e) {
// // Continue trying different paths
// // The class could not be found.
// return null;
}
@TruffleBoundary
public SClass loadShellClass(final String stmt) throws IOException {
// Load the class from a stream and return the loaded class
SClass result = som.compiler.SourcecodeCompiler.compileClass(stmt, null,
this);
if (printAST) { Disassembler.dump(result); }
return result;
}
public void setAvoidExit(final boolean value) {
avoidExit = value;
}
@TruffleBoundary
public static void errorPrint(final String msg) {
// Checkstyle: stop
System.err.print(msg);
// Checkstyle: resume
}
@TruffleBoundary
public static void errorPrintln(final String msg) {
// Checkstyle: stop
System.err.println(msg);
// Checkstyle: resume
}
@TruffleBoundary
public static void errorPrintln() {
// Checkstyle: stop
System.err.println();
// Checkstyle: resume
}
@TruffleBoundary
public static void print(final String msg) {
// Checkstyle: stop
System.out.print(msg);
// Checkstyle: resume
}
@TruffleBoundary
public static void println(final String msg) {
// Checkstyle: stop
System.out.println(msg);
// Checkstyle: resume
}
@TruffleBoundary
public static void println() {
// Checkstyle: stop
System.out.println();
// Checkstyle: resume
}
public SObject getTrueObject() { return trueObject; }
public SObject getFalseObject() { return falseObject; }
public SObject getSystemObject() { return systemObject; }
public SClass getTrueClass() { return trueClass; }
public SClass getFalseClass() { return falseClass; }
public SClass getSystemClass() { return systemClass; }
@CompilationFinal private SObject trueObject;
@CompilationFinal private SObject falseObject;
@CompilationFinal private SObject systemObject;
@CompilationFinal private SClass trueClass;
@CompilationFinal private SClass falseClass;
@CompilationFinal private SClass systemClass;
private final HashMap<SSymbol, Association> globals;
private String[] classPath;
@CompilationFinal private boolean printAST;
private final TruffleRuntime truffleRuntime;
// TODO: this is not how it is supposed to be... it is just a hack to cope
// with the use of system.exit in SOM to enable testing
@CompilationFinal private boolean avoidExit;
private int lastExitCode;
// Optimizations
private final SClass[] blockClasses;
// Latest instance
// WARNING: this is problematic with multiple interpreters in the same VM...
@CompilationFinal private static Universe current;
@CompilationFinal private boolean alreadyInitialized;
@CompilationFinal private boolean objectSystemInitialized = false;
public boolean isObjectSystemInitialized() {
return objectSystemInitialized;
}
public static Universe current() {
if (current == null) {
current = new Universe();
}
return current;
}
}
|
package com.opengamma.livedata.server;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.fudgemsg.FudgeMsg;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.Lifecycle;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalScheme;
import com.opengamma.livedata.LiveDataSpecification;
import com.opengamma.livedata.LiveDataValueUpdateBean;
import com.opengamma.livedata.entitlement.LiveDataEntitlementChecker;
import com.opengamma.livedata.entitlement.PermissiveLiveDataEntitlementChecker;
import com.opengamma.livedata.msg.LiveDataSubscriptionRequest;
import com.opengamma.livedata.msg.LiveDataSubscriptionResponse;
import com.opengamma.livedata.msg.LiveDataSubscriptionResponseMsg;
import com.opengamma.livedata.msg.LiveDataSubscriptionResult;
import com.opengamma.livedata.msg.SubscriptionType;
import com.opengamma.livedata.normalization.StandardRules;
import com.opengamma.livedata.resolver.DistributionSpecificationResolver;
import com.opengamma.livedata.resolver.NaiveDistributionSpecificationResolver;
import com.opengamma.livedata.server.distribution.EmptyMarketDataSenderFactory;
import com.opengamma.livedata.server.distribution.MarketDataDistributor;
import com.opengamma.livedata.server.distribution.MarketDataSenderFactory;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.PerformanceCounter;
import com.opengamma.util.PublicAPI;
/**
* The base class from which most OpenGamma Live Data feed servers should
* extend. Handles most common cases for distributed contract management.
*
*/
@PublicAPI
public abstract class AbstractLiveDataServer implements Lifecycle {
private static final Logger s_logger = LoggerFactory
.getLogger(AbstractLiveDataServer.class);
private volatile MarketDataSenderFactory _marketDataSenderFactory = new EmptyMarketDataSenderFactory();
private final Collection<SubscriptionListener> _subscriptionListeners = new CopyOnWriteArrayList<SubscriptionListener>();
/** Access controlled via _subscriptionLock */
private final Set<Subscription> _currentlyActiveSubscriptions = new HashSet<Subscription>();
/** _Write_ access controlled via _subscriptionLock */
private final Map<String, Subscription> _securityUniqueId2Subscription = new ConcurrentHashMap<String, Subscription>();
/** Access controlled via _subscriptionLock */
private final Map<LiveDataSpecification, MarketDataDistributor> _fullyQualifiedSpec2Distributor = new HashMap<LiveDataSpecification, MarketDataDistributor>();
private final AtomicLong _numMarketDataUpdatesReceived = new AtomicLong(0);
private final PerformanceCounter _performanceCounter;
private final Lock _subscriptionLock = new ReentrantLock();
private DistributionSpecificationResolver _distributionSpecificationResolver = new NaiveDistributionSpecificationResolver();
private LiveDataEntitlementChecker _entitlementChecker = new PermissiveLiveDataEntitlementChecker();
private volatile ConnectionStatus _connectionStatus = ConnectionStatus.NOT_CONNECTED;
protected AbstractLiveDataServer() {
this(true);
}
/**
* You may wish to disable performance counting if you expect a high rate of messages, or to process messages on several threads.
* @param isPerformanceCountingEnabled Whether to track the message rate here. See getNumLiveDataUpdatesSentPerSecondOverLastMinute
*/
protected AbstractLiveDataServer(boolean isPerformanceCountingEnabled) {
_performanceCounter = isPerformanceCountingEnabled ? new PerformanceCounter(60) : null;
}
/**
* @return the distributionSpecificationResolver
*/
public DistributionSpecificationResolver getDistributionSpecificationResolver() {
return _distributionSpecificationResolver;
}
/**
* @param distributionSpecificationResolver
* the distributionSpecificationResolver to set
*/
public void setDistributionSpecificationResolver(
DistributionSpecificationResolver distributionSpecificationResolver) {
_distributionSpecificationResolver = distributionSpecificationResolver;
}
public MarketDataSenderFactory getMarketDataSenderFactory() {
return _marketDataSenderFactory;
}
public void setMarketDataSenderFactory(MarketDataSenderFactory marketDataSenderFactory) {
_marketDataSenderFactory = marketDataSenderFactory;
}
public void addSubscriptionListener(SubscriptionListener subscriptionListener) {
ArgumentChecker.notNull(subscriptionListener, "Subscription Listener");
_subscriptionListeners.add(subscriptionListener);
}
public void setSubscriptionListeners(
Collection<SubscriptionListener> subscriptionListeners) {
_subscriptionListeners.clear();
for (SubscriptionListener subscriptionListener : subscriptionListeners) {
addSubscriptionListener(subscriptionListener);
}
}
/**
* @return the entitlementChecker
*/
public LiveDataEntitlementChecker getEntitlementChecker() {
return _entitlementChecker;
}
/**
* @param entitlementChecker
* the entitlementChecker to set
*/
public void setEntitlementChecker(
LiveDataEntitlementChecker entitlementChecker) {
_entitlementChecker = entitlementChecker;
}
public String getDefaultNormalizationRuleSetId() {
return StandardRules.getOpenGammaRuleSetId();
}
/**
* Subscribes to the given ticker(s) using the underlying market
* data provider.
* <p>
* The return value is a map from unique ID to subscription handle.
* The map must contain an entry for each <code>uniqueId</code>.
* Failure to subscribe to any <code>uniqueId</code> should result in an exception being thrown.
*
* @param uniqueIds A collection of unique IDs. Not null. May be empty.
* @return Subscription handles corresponding to the unique IDs.
* @throws RuntimeException If subscribing to any unique IDs failed.
*/
protected abstract Map<String, Object> doSubscribe(Collection<String> uniqueIds);
/**
* Unsubscribes to the given ticker(s) using the underlying market
* data provider.
*
* @param subscriptionHandles
* Subscription handle(s) returned by {@link #doSubscribe(Collection uniqueIds)}.
* Not null. May be empty.
*/
protected abstract void doUnsubscribe(Collection<Object> subscriptionHandles);
/**
* Returns an image (i.e., all fields) from the underlying market data provider.
*
* @return The return value is a map from unique ID to subscription handle.
* The map must contain an entry for each <code>uniqueId</code>.
* Failure to snapshot any <code>uniqueId</code> should result in an exception being thrown.
* @param uniqueIds Not null. May be empty.
* @throws RuntimeException If the snapshot could not be obtained.
*/
protected abstract Map<String, FudgeMsg> doSnapshot(Collection<String> uniqueIds);
/**
* @return Identification domain that uniquely identifies securities for this
* type of server.
*/
protected abstract ExternalScheme getUniqueIdDomain();
/**
* Connects to the underlying market data provider.
* You can rely on the fact that this method is only
* called when getConnectionStatus() == ConnectionStatus.NOT_CONNECTED.
*/
protected abstract void doConnect();
/**
* Connects to the underlying market data provider.
* You can rely on the fact that this method is only
* called when getConnectionStatus() == ConnectionStatus.CONNECTED.
*/
protected abstract void doDisconnect();
/**
* In some cases, the underlying market data API may not, when a subscription is created,
* return a full image of all fields. If so, we need to get the full image explicitly.
*
* @param subscription The subscription currently being created
* @return true if a snapshot should be made when a new subscription is created, false otherwise.
*/
protected abstract boolean snapshotOnSubscriptionStartRequired(Subscription subscription);
/**
* In some cases a subscription with no data may indicate that a snapshot will have no data
*
* @param distributior The currently active distributor for the security being snapshotted
* @return true if an empty subscription indicates that the snapshot result would be empty
*/
protected boolean canSatisfySnapshotFromEmptySubscription(MarketDataDistributor distributior) {
//NOTE simon 28/11/2011: Only in the case of requiring a snapshot is it safe to use an empty snapshot from a subscription, since in the other case we may still be waiting for values
return snapshotOnSubscriptionStartRequired(distributior.getSubscription());
}
/**
* Is the server connected to underlying market data API?
*/
public enum ConnectionStatus {
/** Connection active */
CONNECTED,
/** Connection not active */
NOT_CONNECTED
}
public ConnectionStatus getConnectionStatus() {
return _connectionStatus;
}
public void setConnectionStatus(ConnectionStatus connectionStatus) {
_connectionStatus = connectionStatus;
s_logger.info("Connection status changed to " + connectionStatus);
if (connectionStatus == ConnectionStatus.NOT_CONNECTED) {
for (Subscription subscription : getSubscriptions()) {
subscription.setHandle(null);
}
}
}
void reestablishSubscriptions() {
_subscriptionLock.lock();
try {
Set<String> securities = _securityUniqueId2Subscription.keySet();
try {
Map<String, Object> subscriptions = doSubscribe(securities);
for (Entry<String, Object> entry : subscriptions.entrySet()) {
Subscription subscription = _securityUniqueId2Subscription.get(entry.getKey());
subscription.setHandle(entry.getValue());
}
} catch (RuntimeException e) {
s_logger.error("Could not reestablish subscription to {}", new Object[] {securities}, e);
}
} finally {
_subscriptionLock.unlock();
}
}
protected void verifyConnectionOk() {
if (getConnectionStatus() == ConnectionStatus.NOT_CONNECTED) {
throw new IllegalStateException("Connection to market data API down");
}
}
@Override
public synchronized boolean isRunning() {
return getConnectionStatus() == ConnectionStatus.CONNECTED;
}
@Override
public synchronized void start() {
if (getConnectionStatus() == ConnectionStatus.NOT_CONNECTED) {
connect();
}
}
@Override
public synchronized void stop() {
if (getConnectionStatus() == ConnectionStatus.CONNECTED) {
disconnect();
}
}
public synchronized void connect() {
if (getConnectionStatus() != ConnectionStatus.NOT_CONNECTED) {
throw new IllegalStateException("Can only connect if not connected");
}
doConnect();
setConnectionStatus(ConnectionStatus.CONNECTED);
}
public synchronized void disconnect() {
if (getConnectionStatus() != ConnectionStatus.CONNECTED) {
throw new IllegalStateException("Can only disconnect if connected");
}
doDisconnect();
setConnectionStatus(ConnectionStatus.NOT_CONNECTED);
}
/**
* @param securityUniqueId Security unique ID
* @return A {@code LiveDataSpecification} with default normalization
* rule used.
*/
public LiveDataSpecification getLiveDataSpecification(String securityUniqueId) {
LiveDataSpecification liveDataSpecification = new LiveDataSpecification(
getDefaultNormalizationRuleSetId(),
ExternalId.of(getUniqueIdDomain(), securityUniqueId));
return liveDataSpecification;
}
/**
* Subscribes to the market data and creates a default distributor.
*
* @param securityUniqueId Security unique ID
* @return Whether the subscription succeeded or failed
* @see #getDefaultNormalizationRuleSetId()
*/
public LiveDataSubscriptionResponse subscribe(String securityUniqueId) {
return subscribe(securityUniqueId, false);
}
/**
* Subscribes to the market data and creates a default distributor.
*
* @param securityUniqueId Security unique ID
* @param persistent See {@link MarketDataDistributor#isPersistent()}
* @return Whether the subscription succeeded or failed
* @see #getDefaultNormalizationRuleSetId()
*/
public LiveDataSubscriptionResponse subscribe(String securityUniqueId, boolean persistent) {
LiveDataSpecification liveDataSpecification = getLiveDataSpecification(securityUniqueId);
return subscribe(liveDataSpecification, persistent);
}
public LiveDataSubscriptionResponse subscribe(LiveDataSpecification liveDataSpecificationFromClient,
boolean persistent) {
Collection<LiveDataSubscriptionResponse> results = subscribe(
Collections.singleton(liveDataSpecificationFromClient),
persistent);
if (results == null || results.size() != 1) {
return getErrorResponse(
liveDataSpecificationFromClient,
LiveDataSubscriptionResult.INTERNAL_ERROR,
"subscribe() did not fulfill its contract to populate map for each live data spec");
}
LiveDataSubscriptionResponse result = results.iterator().next();
if (!liveDataSpecificationFromClient.equals(result.getRequestedSpecification())) {
return getErrorResponse(
liveDataSpecificationFromClient,
LiveDataSubscriptionResult.INTERNAL_ERROR,
"Expected a subscription result for " + liveDataSpecificationFromClient + " but received one for " + result.getRequestedSpecification());
}
return result;
}
public Collection<LiveDataSubscriptionResponse> subscribe(
Collection<LiveDataSpecification> liveDataSpecificationsFromClient, boolean persistent) {
ArgumentChecker.notNull(liveDataSpecificationsFromClient, "Subscriptions to be created");
s_logger.info("Subscribe requested for {}, persistent = {}", liveDataSpecificationsFromClient, persistent);
verifyConnectionOk();
Collection<LiveDataSubscriptionResponse> responses = new ArrayList<LiveDataSubscriptionResponse>();
Map<String, Subscription> securityUniqueId2NewSubscription = new HashMap<String, Subscription>();
Map<String, LiveDataSpecification> securityUniqueId2SpecFromClient = new HashMap<String, LiveDataSpecification>();
_subscriptionLock.lock();
try {
Map<LiveDataSpecification, DistributionSpecification> distrSpecs = getDistributionSpecificationResolver().resolve(liveDataSpecificationsFromClient);
for (LiveDataSpecification specFromClient : liveDataSpecificationsFromClient) {
// this is the only place where subscribe() can 'partially' fail
DistributionSpecification distributionSpec = distrSpecs.get(specFromClient);
if (distributionSpec == null) {
s_logger.info("Unable to work out distribution spec for specification " + specFromClient);
responses.add(getErrorResponse(specFromClient, LiveDataSubscriptionResult.NOT_PRESENT, "Unable to work out distribution spec"));
continue;
}
LiveDataSpecification fullyQualifiedSpec = distributionSpec.getFullyQualifiedLiveDataSpecification();
Subscription subscription = getSubscription(fullyQualifiedSpec);
if (subscription != null) {
s_logger.info("Already subscribed to {}", fullyQualifiedSpec);
subscription.createDistributor(distributionSpec, persistent);
responses.add(getSubscriptionResponse(specFromClient, distributionSpec));
} else {
String securityUniqueId = fullyQualifiedSpec.getIdentifier(getUniqueIdDomain());
if (securityUniqueId == null) {
responses.add(getErrorResponse(specFromClient, LiveDataSubscriptionResult.INTERNAL_ERROR,
"Qualified spec " + fullyQualifiedSpec + " does not contain ID of domain " + getUniqueIdDomain()));
continue;
}
subscription = new Subscription(securityUniqueId, getMarketDataSenderFactory());
subscription.createDistributor(distributionSpec, persistent);
securityUniqueId2NewSubscription.put(subscription.getSecurityUniqueId(), subscription);
securityUniqueId2SpecFromClient.put(subscription.getSecurityUniqueId(), specFromClient);
}
}
//Allow checks here, before we do the snapshot or the subscribe
checkSubscribe(securityUniqueId2NewSubscription.keySet());
// In some cases, the underlying market data API may not, when the subscription is started,
// return a full image of all fields. If so, we need to get the full image explicitly.
Collection<String> newSubscriptionsForWhichSnapshotIsRequired = new ArrayList<String>();
for (Subscription subscription : securityUniqueId2NewSubscription.values()) {
if (snapshotOnSubscriptionStartRequired(subscription)) {
newSubscriptionsForWhichSnapshotIsRequired.add(subscription.getSecurityUniqueId());
}
}
s_logger.info("Subscription snapshot required for {}", newSubscriptionsForWhichSnapshotIsRequired);
Map<String, FudgeMsg> snapshots = doSnapshot(newSubscriptionsForWhichSnapshotIsRequired);
for (Map.Entry<String, FudgeMsg> snapshot : snapshots.entrySet()) {
Subscription subscription = securityUniqueId2NewSubscription.get(snapshot.getKey());
subscription.initialSnapshotReceived(snapshot.getValue());
}
// Setup the subscriptions in the underlying data provider.
for (Subscription subscription : securityUniqueId2NewSubscription.values()) {
// this is necessary so we don't lose any updates immediately after doSubscribe(). See AbstractLiveDataServer#liveDataReceived()
// and how it calls AbstractLiveDataServer#getSubscription()
_securityUniqueId2Subscription.put(subscription.getSecurityUniqueId(), subscription);
}
s_logger.info("Creating underlying market data API subscription to {}", securityUniqueId2NewSubscription.keySet());
Map<String, Object> subscriptionHandles = doSubscribe(securityUniqueId2NewSubscription.keySet());
// Set up data structures
for (Map.Entry<String, Object> subscriptionHandle : subscriptionHandles.entrySet()) {
String securityUniqueId = subscriptionHandle.getKey();
Object handle = subscriptionHandle.getValue();
LiveDataSpecification specFromClient = securityUniqueId2SpecFromClient.get(securityUniqueId);
Subscription subscription = securityUniqueId2NewSubscription.get(securityUniqueId);
subscription.setHandle(handle);
for (SubscriptionListener listener : _subscriptionListeners) {
try {
listener.subscribed(subscription);
} catch (RuntimeException e) {
s_logger.error("Listener " + listener + " subscribe failed", e);
}
}
_currentlyActiveSubscriptions.add(subscription);
if (subscription.getDistributionSpecifications().size() != 1) {
responses.add(getErrorResponse(specFromClient, LiveDataSubscriptionResult.INTERNAL_ERROR,
"The subscription should only have 1 distribution specification at the moment: " + subscription));
continue;
}
for (MarketDataDistributor distributor : subscription.getDistributors()) {
_fullyQualifiedSpec2Distributor.put(distributor.getFullyQualifiedLiveDataSpecification(),
distributor);
responses.add(getSubscriptionResponse(specFromClient, distributor.getDistributionSpec()));
}
s_logger.info("Created {}", subscription);
}
} catch (RuntimeException e) {
s_logger.info("Unexpected exception thrown when subscribing. Cleaning up.");
for (Subscription subscription : securityUniqueId2NewSubscription.values()) {
_securityUniqueId2Subscription.remove(subscription.getSecurityUniqueId());
for (MarketDataDistributor distributor : subscription.getDistributors()) {
_fullyQualifiedSpec2Distributor.remove(distributor.getFullyQualifiedLiveDataSpecification());
}
}
_currentlyActiveSubscriptions.removeAll(securityUniqueId2NewSubscription.values());
throw e;
} finally {
_subscriptionLock.unlock();
}
return responses;
}
/**
* Check that a subscription request is valid.
* Will be called before any snapshot or subscribe requests for the keys
* @param uniqueIds The unique ids for which a subscribe is being requested
*/
protected void checkSubscribe(Set<String> uniqueIds) {
//Do nothing by default
}
/**
* Returns a snapshot of the requested market data.
* If the server already subscribes to the market data,
* the last known value from that subscription is used.
* Otherwise a snapshot is requested from the underlying market data API.
*
* @param liveDataSpecificationsFromClient What snapshot(s) are being requested. Not empty
* @return Responses to snapshot requests. Some, or even all, of them might be failures.
* @throws RuntimeException If no snapshot could be obtained due to unexpected error.
*/
public Collection<LiveDataSubscriptionResponse> snapshot(Collection<LiveDataSpecification> liveDataSpecificationsFromClient) {
ArgumentChecker.notNull(liveDataSpecificationsFromClient, "Snapshots to be obtained");
s_logger.info("Snapshot requested for {}", liveDataSpecificationsFromClient);
verifyConnectionOk();
Collection<LiveDataSubscriptionResponse> responses = new ArrayList<LiveDataSubscriptionResponse>();
Collection<String> snapshotsToActuallyDo = new ArrayList<String>();
Map<String, LiveDataSpecification> securityUniqueId2LiveDataSpecificationFromClient = new HashMap<String, LiveDataSpecification>();
Map<LiveDataSpecification, DistributionSpecification> resolved = getDistributionSpecificationResolver().resolve(liveDataSpecificationsFromClient);
for (LiveDataSpecification liveDataSpecificationFromClient : liveDataSpecificationsFromClient) {
DistributionSpecification distributionSpec = resolved.get(liveDataSpecificationFromClient);
LiveDataSpecification fullyQualifiedSpec = distributionSpec.getFullyQualifiedLiveDataSpecification();
MarketDataDistributor currentlyActiveDistributor = getMarketDataDistributor(distributionSpec);
if (currentlyActiveDistributor != null) {
if (currentlyActiveDistributor.getSnapshot() != null) {
//NOTE simon 28/11/2011: We presume that all the fields were provided in one go, all or nothing.
s_logger.debug("Able to satisfy {} from existing LKV", liveDataSpecificationFromClient);
LiveDataValueUpdateBean snapshot = currentlyActiveDistributor.getSnapshot();
responses.add(getSnapshotResponse(liveDataSpecificationFromClient, snapshot));
continue;
} else if (canSatisfySnapshotFromEmptySubscription(currentlyActiveDistributor)) {
//BBG-91 - don't requery when an existing subscription indicates that the snapshot will fail
s_logger.debug("Able to satisfy failed snapshot {} from existing LKV", liveDataSpecificationFromClient);
responses.add(getErrorResponse(liveDataSpecificationFromClient, LiveDataSubscriptionResult.INTERNAL_ERROR,
"Existing subscription for " + currentlyActiveDistributor.getDistributionSpec().getMarketDataId()
+ " failed to retrieve a snapshot. Perhaps requeried fields are unavailable."));
continue;
} else {
s_logger.debug("Can't use existing subscription to satisfy {} from existing LKV", liveDataSpecificationFromClient);
}
}
String securityUniqueId = fullyQualifiedSpec.getIdentifier(getUniqueIdDomain());
if (securityUniqueId == null) {
responses.add(getErrorResponse(
liveDataSpecificationFromClient,
LiveDataSubscriptionResult.INTERNAL_ERROR,
"Qualified spec " + fullyQualifiedSpec + " does not contain ID of domain " + getUniqueIdDomain()));
continue;
}
snapshotsToActuallyDo.add(securityUniqueId);
securityUniqueId2LiveDataSpecificationFromClient.put(securityUniqueId, liveDataSpecificationFromClient);
}
s_logger.debug("Need to actually snapshot {}", snapshotsToActuallyDo);
Map<String, FudgeMsg> snapshots = doSnapshot(snapshotsToActuallyDo);
for (Map.Entry<String, FudgeMsg> snapshotEntry : snapshots.entrySet()) {
String securityUniqueId = snapshotEntry.getKey();
FudgeMsg msg = snapshotEntry.getValue();
LiveDataSpecification liveDataSpecFromClient = securityUniqueId2LiveDataSpecificationFromClient.get(securityUniqueId);
DistributionSpecification distributionSpec = getDistributionSpecificationResolver()
.resolve(liveDataSpecFromClient);
FudgeMsg normalizedMsg = distributionSpec.getNormalizedMessage(msg, securityUniqueId);
if (normalizedMsg == null) {
responses.add(getErrorResponse(
liveDataSpecFromClient,
LiveDataSubscriptionResult.INTERNAL_ERROR,
"When snapshot for " + securityUniqueId + " was run through normalization, the message disappeared. This" +
" indicates there are buggy normalization rules in place, or that buggy (or unexpected) data was" +
" received from the underlying market data API. Check your normalization rules. Raw, unnormalized msg = "
+ msg));
continue;
}
LiveDataValueUpdateBean snapshot = new LiveDataValueUpdateBean(0, distributionSpec.getFullyQualifiedLiveDataSpecification(), normalizedMsg);
responses.add(getSnapshotResponse(liveDataSpecFromClient, snapshot));
}
return responses;
}
/**
* If you want to force a snapshot - i.e., always a request a snapshot from the underlying API -
* you can use this method.
*
* @param securityUniqueId Security unique ID
* @return The snapshot
*/
public FudgeMsg doSnapshot(String securityUniqueId) {
Map<String, FudgeMsg> snapshots = doSnapshot(Collections.singleton(securityUniqueId));
FudgeMsg snapshot = snapshots.get(securityUniqueId);
if (snapshot == null) {
throw new OpenGammaRuntimeException("doSnapshot() did not fulfill its contract to populate map for each unique ID");
}
return snapshot;
}
/**
* Processes a market data subscription request by going through the steps of
* resolution, entitlement check, and subscription.
*
* @param subscriptionRequest Request from client telling what to subscribe to
* @return LiveDataSubscriptionResponseMsg Sent back to the client of this server
*/
public LiveDataSubscriptionResponseMsg subscriptionRequestMade(
LiveDataSubscriptionRequest subscriptionRequest) {
try {
return subscriptionRequestMadeImpl(subscriptionRequest);
} catch (Exception e) {
s_logger.error("Failed to subscribe to " + subscriptionRequest, e);
ArrayList<LiveDataSubscriptionResponse> responses = new ArrayList<LiveDataSubscriptionResponse>();
for (LiveDataSpecification requestedSpecification : subscriptionRequest.getSpecifications()) {
responses.add(getErrorResponse(
requestedSpecification,
LiveDataSubscriptionResult.INTERNAL_ERROR,
e.getMessage()));
}
return new LiveDataSubscriptionResponseMsg(subscriptionRequest
.getUser(), responses);
}
}
protected LiveDataSubscriptionResponseMsg subscriptionRequestMadeImpl(
LiveDataSubscriptionRequest subscriptionRequest) {
boolean persistent = subscriptionRequest.getType().equals(SubscriptionType.PERSISTENT);
ArrayList<LiveDataSubscriptionResponse> responses = new ArrayList<LiveDataSubscriptionResponse>();
ArrayList<LiveDataSpecification> snapshots = new ArrayList<LiveDataSpecification>();
ArrayList<LiveDataSpecification> subscriptions = new ArrayList<LiveDataSpecification>();
Map<LiveDataSpecification, DistributionSpecification> distributionSpecifications = getDistributionSpecificationResolver().resolve(subscriptionRequest.getSpecifications());
ArrayList<LiveDataSpecification> distributable = new ArrayList<LiveDataSpecification>();
for (LiveDataSpecification requestedSpecification : subscriptionRequest
.getSpecifications()) {
try {
// Check that this spec can be found
DistributionSpecification spec = distributionSpecifications.get(requestedSpecification);
if (spec == null) {
responses.add(new LiveDataSubscriptionResponse(requestedSpecification,
LiveDataSubscriptionResult.NOT_PRESENT, "Could not build distribution specification for "
+ requestedSpecification, null, null, null));
} else {
distributable.add(requestedSpecification);
}
} catch (Exception e) {
s_logger.error("Failed to subscribe to " + requestedSpecification, e);
responses.add(new LiveDataSubscriptionResponse(requestedSpecification,
LiveDataSubscriptionResult.INTERNAL_ERROR,
e.getMessage(),
null,
null,
null));
}
}
Map<LiveDataSpecification, Boolean> entitled = getEntitlementChecker().isEntitled(subscriptionRequest.getUser(), distributable);
for (Entry<LiveDataSpecification, Boolean> entry : entitled.entrySet()) {
LiveDataSpecification requestedSpecification = entry.getKey();
try {
Boolean entitlement = entry.getValue();
if (!entitlement) {
String msg = subscriptionRequest.getUser() + " is not entitled to " + requestedSpecification;
s_logger.info(msg);
responses.add(new LiveDataSubscriptionResponse(
requestedSpecification,
LiveDataSubscriptionResult.NOT_AUTHORIZED,
msg,
null,
null,
null));
continue;
}
// Pass to the right bucket by type
if (subscriptionRequest.getType() == SubscriptionType.SNAPSHOT) {
snapshots.add(requestedSpecification);
} else {
subscriptions.add(requestedSpecification);
}
} catch (Exception e) {
s_logger.error("Failed to subscribe to " + requestedSpecification, e);
responses.add(new LiveDataSubscriptionResponse(requestedSpecification,
LiveDataSubscriptionResult.INTERNAL_ERROR,
e.getMessage(),
null,
null,
null));
}
}
if (!snapshots.isEmpty()) {
try {
responses.addAll(snapshot(snapshots));
} catch (Exception e) {
for (LiveDataSpecification requestedSpecification : snapshots) {
responses.add(getErrorResponse(
requestedSpecification,
LiveDataSubscriptionResult.INTERNAL_ERROR,
e.getMessage()));
}
}
}
if (!subscriptions.isEmpty()) {
try {
responses.addAll(subscribe(subscriptions, persistent));
} catch (Exception e) {
for (LiveDataSpecification requestedSpecification : subscriptions) {
responses.add(getErrorResponse(
requestedSpecification,
LiveDataSubscriptionResult.INTERNAL_ERROR,
e.getMessage()));
}
}
}
return new LiveDataSubscriptionResponseMsg(subscriptionRequest
.getUser(), responses);
}
/**
* Unsubscribes from market data. All distributors related to that
* subscription will be stopped.
*
* @param securityUniqueId Security unique ID
* @return true if a market data subscription was actually removed. false
* otherwise.
*/
public boolean unsubscribe(String securityUniqueId) {
Subscription sub = getSubscription(securityUniqueId);
if (sub == null) {
return false;
}
return unsubscribe(sub);
}
/**
* Unsubscribes from market data. All distributors related to that
* subscription will be stopped.
*
* @param subscription What to unsubscribe from
* @return true if a market data subscription was actually removed. false
* otherwise.
*/
public boolean unsubscribe(Subscription subscription) {
ArgumentChecker.notNull(subscription, "Subscription");
verifyConnectionOk();
boolean actuallyUnsubscribed = false;
_subscriptionLock.lock();
try {
if (isSubscribedTo(subscription)) {
s_logger.info("Unsubscribing from {}", subscription);
actuallyUnsubscribed = true;
Object subscriptionHandle = subscription.getHandle();
if (subscriptionHandle != null) {
doUnsubscribe(Collections.singleton(subscriptionHandle)); // todo, optimize to use batch
}
_currentlyActiveSubscriptions.remove(subscription);
_securityUniqueId2Subscription.remove(subscription
.getSecurityUniqueId());
for (MarketDataDistributor distributor : subscription.getDistributors()) {
_fullyQualifiedSpec2Distributor.remove(distributor.getFullyQualifiedLiveDataSpecification());
}
subscription.removeAllDistributors();
for (SubscriptionListener listener : _subscriptionListeners) {
try {
listener.unsubscribed(subscription);
} catch (RuntimeException e) {
s_logger.error("Listener unsubscribe failed", e);
}
}
s_logger.info("Unsubscribed from {}", subscription);
} else {
s_logger
.warn(
"Received unsubscription request for non-active subscription: {}",
subscription);
}
} finally {
_subscriptionLock.unlock();
}
return actuallyUnsubscribed;
}
/**
* Stops a market data distributor. If the distributor is
* persistent, this call will be a no-op. If you want
* to stop a persistent distributor, make it non-persistent first.
* <p>
* If the subscription to which the distributor belongs no longer
* has any active distributors after this, that subscription will be deleted.
*
* @param distributor The distributor to stop
* @return true if a distributor was actually stopped. false
* otherwise.
*/
public boolean stopDistributor(MarketDataDistributor distributor) {
ArgumentChecker.notNull(distributor, "Distributor");
_subscriptionLock.lock();
try {
MarketDataDistributor realDistributor = getMarketDataDistributor(distributor.getDistributionSpec());
if (realDistributor != distributor) {
return false;
}
if (distributor.isPersistent()) {
return false;
}
distributor.getSubscription().removeDistributor(distributor);
_fullyQualifiedSpec2Distributor.remove(distributor.getFullyQualifiedLiveDataSpecification());
if (distributor.getSubscription().getDistributors().isEmpty()) {
unsubscribe(distributor.getSubscription());
}
} finally {
_subscriptionLock.unlock();
}
return true;
}
public boolean isSubscribedTo(String securityUniqueId) {
return _securityUniqueId2Subscription.containsKey(securityUniqueId);
}
public boolean isSubscribedTo(LiveDataSpecification fullyQualifiedSpec) {
_subscriptionLock.lock();
try {
return _fullyQualifiedSpec2Distributor.containsKey(fullyQualifiedSpec);
} finally {
_subscriptionLock.unlock();
}
}
public boolean isSubscribedTo(Subscription subscription) {
return getSubscriptions().contains(subscription);
}
public void liveDataReceived(String securityUniqueId,
FudgeMsg liveDataFields) {
s_logger.debug("Live data received: {}", liveDataFields);
_numMarketDataUpdatesReceived.incrementAndGet();
if (_performanceCounter != null) {
_performanceCounter.hit();
}
Subscription subscription = getSubscription(securityUniqueId);
if (subscription == null) {
s_logger.warn("Got data for invalid security unique ID {}", securityUniqueId);
return;
}
subscription.liveDataReceived(liveDataFields);
}
public Set<String> getActiveDistributionSpecs() {
Set<String> subscriptions = new HashSet<String>();
for (Subscription subscription : getSubscriptions()) {
for (DistributionSpecification distributionSpec : subscription.getDistributionSpecifications()) {
subscriptions.add(distributionSpec.toString());
}
}
return subscriptions;
}
public Set<String> getActiveSubscriptionIds() {
Set<String> subscriptions = new HashSet<String>();
for (Subscription subscription : getSubscriptions()) {
subscriptions.add(subscription.getSecurityUniqueId());
}
return subscriptions;
}
public int getNumActiveSubscriptions() {
return getSubscriptions().size();
}
public long getNumMarketDataUpdatesReceived() {
return _numMarketDataUpdatesReceived.get();
}
/**
* @return The approximate rate of live data updates received, or -1 if tracking is disabled
*/
public double getNumLiveDataUpdatesSentPerSecondOverLastMinute() {
return _performanceCounter == null ? -1.0 : _performanceCounter.getHitsPerSecond();
}
public Set<Subscription> getSubscriptions() {
_subscriptionLock.lock();
try {
return new HashSet<Subscription>(_currentlyActiveSubscriptions);
} finally {
_subscriptionLock.unlock();
}
}
public Subscription getSubscription(LiveDataSpecification fullyQualifiedSpec) {
MarketDataDistributor distributor = getMarketDataDistributor(fullyQualifiedSpec);
if (distributor == null) {
return null;
}
return distributor.getSubscription();
}
public Subscription getSubscription(String securityUniqueId) {
//NOTE: don't need lock here, map is safe, and this operation isn't really atomic anyway
return _securityUniqueId2Subscription.get(securityUniqueId);
}
public MarketDataDistributor getMarketDataDistributor(DistributionSpecification distributionSpec) {
Subscription subscription = getSubscription(distributionSpec.getFullyQualifiedLiveDataSpecification());
if (subscription == null) {
return null;
}
return subscription.getMarketDataDistributor(distributionSpec);
}
public Map<LiveDataSpecification, MarketDataDistributor> getMarketDataDistributors(Collection<LiveDataSpecification> fullyQualifiedSpecs) {
//NOTE: this is not much (if any) faster here, but for subclasses it can be
_subscriptionLock.lock();
try {
HashMap<LiveDataSpecification, MarketDataDistributor> hashMap = new HashMap<LiveDataSpecification, MarketDataDistributor>();
for (LiveDataSpecification liveDataSpecification : fullyQualifiedSpecs) {
hashMap.put(liveDataSpecification, _fullyQualifiedSpec2Distributor.get(liveDataSpecification));
}
return hashMap;
} finally {
_subscriptionLock.unlock();
}
}
public MarketDataDistributor getMarketDataDistributor(LiveDataSpecification fullyQualifiedSpec) {
_subscriptionLock.lock();
try {
return _fullyQualifiedSpec2Distributor.get(fullyQualifiedSpec);
} finally {
_subscriptionLock.unlock();
}
}
/**
* This method is mainly useful in tests.
*
* @param securityUniqueId Security unique ID
* @return The only market data distributor associated with the
* security unique ID.
* @throws OpenGammaRuntimeException If there is no distributor
* associated with the given {@code securityUniqueId}, or
* if there is more than 1 such distributor.
*/
public MarketDataDistributor getMarketDataDistributor(String securityUniqueId) {
Subscription sub = getSubscription(securityUniqueId);
if (sub == null) {
throw new OpenGammaRuntimeException("Subscription " + securityUniqueId + " not found");
}
Collection<MarketDataDistributor> distributors = sub.getDistributors();
if (distributors.size() != 1) {
throw new OpenGammaRuntimeException(distributors.size() + " distributors found for subscription " + securityUniqueId);
}
return distributors.iterator().next();
}
protected LiveDataSubscriptionResponse getErrorResponse(LiveDataSpecification liveDataSpecificationFromClient,
LiveDataSubscriptionResult result, String message) {
return new LiveDataSubscriptionResponse(liveDataSpecificationFromClient,
result,
message,
null,
null,
null);
}
protected LiveDataSubscriptionResponse getSnapshotResponse(LiveDataSpecification liveDataSpecificationFromClient, LiveDataValueUpdateBean snapshot) {
return new LiveDataSubscriptionResponse(
liveDataSpecificationFromClient,
LiveDataSubscriptionResult.SUCCESS,
null,
snapshot.getSpecification(),
null,
snapshot);
}
protected LiveDataSubscriptionResponse getSubscriptionResponse(LiveDataSpecification liveDataSpecificationFromClient, DistributionSpecification distributionSpec) {
return new LiveDataSubscriptionResponse(
liveDataSpecificationFromClient,
LiveDataSubscriptionResult.SUCCESS,
null,
distributionSpec.getFullyQualifiedLiveDataSpecification(),
distributionSpec.getJmsTopic(),
null);
}
}
|
package uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.qc;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.Location;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.LocationPrecision;
import uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.AbstractDataAcquisitionSpringIntegrationTests;
import static org.fest.assertions.api.Assertions.assertThat;
public class QCManagerIntegrationTest extends AbstractDataAcquisitionSpringIntegrationTests {
@Autowired
private QCManager qcManager;
@Test
public void stage1AndStage2NotRunWhenLocationPrecisionIsCountryAndStage3Passes() {
// Arrange
int japanId = 156;
Location location = new Location("Japan", 138.47861, 36.09854, LocationPrecision.COUNTRY, japanId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isTrue();
assertThat(location.getAdminUnitQCGaulCode()).isNull();
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: location not an ADMIN1 or ADMIN2. QC " +
"stage 2 passed: location is a country. QC stage 3 passed: location already within HealthMap " +
"country.");
}
@Test
public void stage1NotRunWhenLocationPrecisionIsPreciseAndStage2Fails() {
// Arrange
Location location = new Location("Somewhere in the North Sea", 3.524163, 56.051420, LocationPrecision.PRECISE);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isFalse();
assertThat(location.getAdminUnitQCGaulCode()).isNull();
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: location not an ADMIN1 or ADMIN2. QC stage " +
"2 failed: location too distant from land (closest point is (4.916590,53.291620) at distance " +
"320.061km).");
}
@Test
public void passesStage1AndStage2AndStage3() {
// Arrange
int mexicoId = 14;
Location location = new Location("Estado de México, Mexico", -99.4922, 19.3318, LocationPrecision.ADMIN1,
mexicoId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isTrue();
assertThat(location.getAdminUnitQCGaulCode()).isEqualTo(1006355);
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: closest distance is 10.92% of the square " +
"root of the area. QC stage 2 passed: location already within land. QC stage 3 passed: location " +
"already within HealthMap country.");
}
@Test
public void failsStage1() {
// Arrange
int vietnamId = 152;
Location location = new Location("Huyện Cai Lậy, Tiền Giang, Vietnam", 108.69807, 7.90055,
LocationPrecision.ADMIN2, vietnamId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isFalse();
assertThat(location.getAdminUnitQCGaulCode()).isNull();
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 failed: closest distance is 2841.01% of the square " +
"root of the area (GAUL code 1002305: \"Con Dao\").");
}
@Test
public void passesStage1ButFailsStage2() {
// Arrange
int indonesiaId = 184;
Location location = new Location("Central Sulawesi, Indonesia", 121, -1, LocationPrecision.ADMIN1, indonesiaId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isFalse();
assertThat(location.getAdminUnitQCGaulCode()).isEqualTo(1013690);
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: closest distance is 9.01% of the square " +
"root of the area. QC stage 2 failed: location too distant from land (closest point is " +
"(121.208210,-1.166690) at distance 29.610km).");
}
@Test
public void passesStage2BySnappingAPointInALake() {
// Arrange
int indonesiaId = 184;
Location location = new Location("Central Sulawesi, Indonesia", 116.367, -0.270, LocationPrecision.PRECISE,
indonesiaId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isTrue();
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: location not an ADMIN1 or ADMIN2. " +
"QC stage 2 passed: location (116.367000,-0.270000) snapped to land (distance 2.209km). " +
"QC stage 3 passed: no country geometries associated with this location.");
}
@Test
public void passesStage1AndStage2ButFailsStage3() {
// Arrange
int usId = 106;
Location location = new Location("Door County, Wisconsin, United States", -87.3001, 44.91666,
LocationPrecision.ADMIN2, usId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isFalse();
assertThat(location.getAdminUnitQCGaulCode()).isEqualTo(31738);
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: closest distance is 8.76% of the square " +
"root of the area. QC stage 2 passed: location already within land. QC stage 3 failed: location " +
"too distant from HealthMap country (closest point is (-87.344990,44.814350) at distance 11.910km).");
}
@Test
public void passesStage3IfNoHealthMapCountrySpecified() {
// Arrange
Location location = new Location("Door County, Wisconsin, United States", -87.3001, 44.91666,
LocationPrecision.ADMIN2);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isTrue();
assertThat(location.getAdminUnitQCGaulCode()).isEqualTo(31738);
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: closest distance is 8.76% of the square " +
"root of the area. QC stage 2 passed: location already within land. QC stage 3 passed: no country " +
"geometries associated with this location.");
}
@Test
public void passesStage3IfHealthMapCountryHasNoGeometries() {
// Arrange
int maldivesId = 143;
Location location = new Location("Maldives", 73.46564, 5.84270, LocationPrecision.COUNTRY, maldivesId);
// Act
boolean hasPassedQc = qcManager.performQC(location);
// Assert
assertThat(hasPassedQc).isTrue();
assertThat(location.getAdminUnitQCGaulCode()).isNull();
assertThat(location.getQcMessage()).isEqualTo("QC stage 1 passed: location not an ADMIN1 or ADMIN2. QC " +
"stage 2 passed: location is a country. QC stage 3 passed: no country geometries associated with " +
"this location.");
}
}
|
// NIOFileHandle.java
package loci.common;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
public class NIOFileHandle extends AbstractNIOHandle {
//-- Constants --
/** Default NIO buffer size to facilitate buffered I/O. */
protected static final int DEFAULT_BUFFER_SIZE = 1048576;
// -- Fields --
/** The random access file object backing this FileHandle. */
protected RandomAccessFile raf;
/** The file channel backed by the random access file. */
protected FileChannel channel;
/** The absolute position within the file. */
protected long position = 0;
/** The absolute position of the start of the buffer. */
protected long bufferStartPosition = 0;
/** The buffer size. */
protected int bufferSize;
/** The buffer itself. */
protected ByteBuffer buffer;
/** Whether or not the file is opened read/write. */
protected boolean isReadWrite = false;
/** The default map mode for the file. */
protected FileChannel.MapMode mapMode = FileChannel.MapMode.READ_ONLY;
/** The buffer's byte ordering. */
protected ByteOrder order;
// -- Constructors --
/**
* Creates a random access file stream to read from, and
* optionally to write to, the file specified by the File argument.
*/
public NIOFileHandle(File file, String mode, int bufferSize)
throws IOException {
this.bufferSize = bufferSize;
validateMode(mode);
if (mode.equals("rw")) {
isReadWrite = true;
mapMode = FileChannel.MapMode.READ_WRITE;
}
raf = new RandomAccessFile(file, mode);
channel = raf.getChannel();
buffer(position, 0);
}
/**
* Creates a random access file stream to read from, and
* optionally to write to, the file specified by the File argument.
*/
public NIOFileHandle(File file, String mode) throws IOException {
this(file, mode, DEFAULT_BUFFER_SIZE);
}
/**
* Creates a random access file stream to read from, and
* optionally to write to, a file with the specified name.
*/
public NIOFileHandle(String name, String mode) throws IOException {
this(new File(name), mode);
}
// -- FileHandle and Channel API methods --
/** Gets the random access file object backing this FileHandle. */
public RandomAccessFile getRandomAccessFile() { return raf; }
/** Gets the FileChannel from this FileHandle. */
public FileChannel getFileChannel() { return channel; }
/** Gets the current buffer size. */
public int getBufferSize() { return bufferSize; }
// -- AbstractNIOHandle API methods --
/* @see AbstractNIOHandle.setLength(long) */
public void setLength(long length) throws IOException {
raf.seek(length - 1);
raf.write((byte) 0);
buffer = null;
}
// -- IRandomAccess API methods --
/* @see IRandomAccess.close() */
public void close() throws IOException {
raf.close();
}
/* @see IRandomAccess.getFilePointer() */
public long getFilePointer() {
return position;
}
/* @see IRandomAccess.length() */
public long length() throws IOException {
return raf.length();
}
/* @see IRandomAccess.getOrder() */
public ByteOrder getOrder() {
return buffer.order();
}
/* @see IRandomAccess.setOrder(ByteOrder) */
public void setOrder(ByteOrder order) {
this.order = order;
buffer.order(order);
}
/* @see IRandomAccess.read(byte[]) */
public int read(byte[] b) throws IOException {
return read(ByteBuffer.wrap(b));
}
/* @see IRandomAccess.read(byte[], int, int) */
public int read(byte[] b, int off, int len) throws IOException {
return read(ByteBuffer.wrap(b), off, len);
}
/* @see IRandomAccess.read(ByteBuffer) */
public int read(ByteBuffer buf) throws IOException {
return read(buf, 0, buf.capacity());
}
/* @see IRandomAccess.read(ByteBuffer, int, int) */
public int read(ByteBuffer buf, int off, int len) throws IOException {
buf.position(off);
buf.limit(off + len);
channel.position(position);
int readLength = channel.read(buf);
buffer(position + readLength, 0);
// Return value of NIO channel's is -1 when zero bytes are read at the end
// of the file.
return readLength == -1? 0 : readLength;
}
/* @see IRandomAccess.seek(long) */
public void seek(long pos) throws IOException {
buffer(pos, 0);
}
/* @see java.io.DataInput.readBoolean() */
public boolean readBoolean() throws IOException {
return readByte() == 1;
}
/* @see java.io.DataInput.readByte() */
public byte readByte() throws IOException {
buffer(position, 1);
position += 1;
try {
return buffer.get();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readChar() */
public char readChar() throws IOException {
buffer(position, 2);
position += 2;
try {
return buffer.getChar();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readDouble() */
public double readDouble() throws IOException {
buffer(position, 8);
position += 8;
try {
return buffer.getDouble();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readFloat() */
public float readFloat() throws IOException {
buffer(position, 4);
position += 4;
try {
return buffer.getFloat();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readFully(byte[]) */
public void readFully(byte[] b) throws IOException {
read(b);
}
/* @see java.io.DataInput.readFully(byte[], int, int) */
public void readFully(byte[] b, int off, int len) throws IOException {
read(b, off, len);
}
/* @see java.io.DataInput.readInt() */
public int readInt() throws IOException {
buffer(position, 4);
position += 4;
try {
return buffer.getInt();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readLine() */
public String readLine() throws IOException {
raf.seek(position);
String line = raf.readLine();
buffer(raf.getFilePointer(), 0);
return line;
}
/* @see java.io.DataInput.readLong() */
public long readLong() throws IOException {
buffer(position, 8);
position += 8;
try {
return buffer.getLong();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readShort() */
public short readShort() throws IOException {
buffer(position, 2);
position += 2;
try {
return buffer.getShort();
} catch (BufferUnderflowException e) {
EOFException eof = new EOFException(EOF_ERROR_MSG);
eof.initCause(e);
throw eof;
}
}
/* @see java.io.DataInput.readUnsignedByte() */
public int readUnsignedByte() throws IOException {
return (int) (readByte() & 0xFF);
}
/* @see java.io.DataInput.readUnsignedShort() */
public int readUnsignedShort() throws IOException {
return (int) readShort() & 0xFFFF;
}
/* @see java.io.DataInput.readUTF() */
public String readUTF() throws IOException {
raf.seek(position);
String utf8 = raf.readUTF();
buffer(raf.getFilePointer(), 0);
return utf8;
}
/* @see java.io.DataInput.skipBytes(int) */
public int skipBytes(int n) throws IOException {
if (n < 1) {
return 0;
}
long oldPosition = position;
long newPosition = oldPosition + Math.min(n, length());
buffer(newPosition, n);
return (int) (position - oldPosition);
}
// -- DataOutput API methods --
/* @see java.io.DataOutput.write(byte[]) */
public void write(byte[] b) throws IOException {
write(ByteBuffer.wrap(b));
}
/* @see java.io.DataOutput.write(byte[], int, int) */
public void write(byte[] b, int off, int len) throws IOException {
write(ByteBuffer.wrap(b), off, len);
}
/* @see IRandomAccess.write(ByteBuffer) */
public void write(ByteBuffer buf) throws IOException {
write(buf, 0, buf.capacity());
}
/* @see IRandomAccess.write(ByteBuffer, int, int) */
public void write(ByteBuffer buf, int off, int len) throws IOException {
validateLength(len);
buf.position(off);
buf.limit(off + len);
channel.position(position);
buffer(position + channel.write(buf), 0);
}
/* @see java.io.DataOutput.write(int b) */
public void write(int b) throws IOException {
writeByte(b);
}
/* @see java.io.DataOutput.writeBoolean(boolean) */
public void writeBoolean(boolean v) throws IOException {
writeByte(v ? 1 : 0);
}
/* @see java.io.DataOutput.writeByte(int) */
public void writeByte(int v) throws IOException {
validateLength(1);
buffer(position, 1);
buffer.put((byte) v);
position += 1;
}
/* @see java.io.DataOutput.writeBytes(String) */
public void writeBytes(String s) throws IOException {
write(s.getBytes());
}
/* @see java.io.DataOutput.writeChar(int) */
public void writeChar(int v) throws IOException {
validateLength(2);
buffer(position, 2);
buffer.putChar((char) v);
position += 2;
}
/* @see java.io.DataOutput.writeChars(String) */
public void writeChars(String s) throws IOException {
write(s.getBytes("UTF-16BE"));
}
/* @see java.io.DataOutput.writeDouble(double) */
public void writeDouble(double v) throws IOException {
validateLength(8);
buffer(position, 8);
buffer.putDouble(v);
position += 8;
}
/* @see java.io.DataOutput.writeFloat(float) */
public void writeFloat(float v) throws IOException {
validateLength(4);
buffer(position, 4);
buffer.putFloat(v);
position += 4;
}
/* @see java.io.DataOutput.writeInt(int) */
public void writeInt(int v) throws IOException {
validateLength(4);
buffer(position, 4);
buffer.putInt(v);
position += 4;
}
/* @see java.io.DataOutput.writeLong(long) */
public void writeLong(long v) throws IOException {
validateLength(8);
buffer(position, 8);
buffer.putLong(v);
position += 8;
}
/* @see java.io.DataOutput.writeShort(int) */
public void writeShort(int v) throws IOException {
validateLength(2);
buffer(position, 2);
buffer.putShort((short) v);
position += 2;
}
/* @see java.io.DataOutput.writeUTF(String) */
public void writeUTF(String str) throws IOException {
// NB: number of bytes written is greater than the length of the string
int strlen = str.getBytes("UTF-8").length + 2;
validateLength(strlen);
buffer(position, strlen);
raf.seek(position);
raf.writeUTF(str);
position += strlen;
}
/**
* Aligns the NIO buffer, maps it if it is not currently and sets all
* relevant positions and offsets.
* @param offset The location within the file to read from.
* @param size The requested read length.
* @throws IOException If there is an issue mapping, aligning or allocating
* the buffer.
*/
private void buffer(long offset, int size) throws IOException {
position = offset;
long newPosition = offset + size;
if (newPosition < bufferStartPosition
|| newPosition > bufferStartPosition + bufferSize
|| buffer == null) {
bufferStartPosition = Math.min(offset, length() - 1);
long newSize = Math.min(length() - bufferStartPosition, bufferSize);
if (newSize < size && newSize == bufferSize) newSize = size;
if (newSize + bufferStartPosition > length()) {
newSize = length() - bufferStartPosition;
}
offset = bufferStartPosition;
ByteOrder byteOrder = buffer == null ? order : getOrder();
try {
buffer = channel.map(mapMode, bufferStartPosition, newSize);
} catch (IOException e) {
// for an explanation of why the following is necessary
// This is not a problem with JDK 1.6 and higher but can be a
// problem with earlier JVMs.
System.gc();
System.runFinalization();
buffer = channel.map(mapMode, bufferStartPosition, newSize);
}
if (byteOrder != null) setOrder(byteOrder);
//System.gc();
//System.runFinalization();
}
buffer.position((int) (offset - bufferStartPosition));
}
}
|
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.Interval;
import java.util.*;
switch (_input.LA(1) /* next symbol */) { //*** https://www.antlr.org/api/Java/org/antlr/v4/runtime/IntStream.html#LA(int)
|
package io.quantumdb.core.planner;
import java.util.LinkedHashMap;
import lombok.Data;
@Data
public class MigratorFunction {
public enum Stage {
INITIAL, CONSECUTIVE
}
private final String name;
private final LinkedHashMap<String, String> parameters;
private final String createStatement;
private final String dropStatement;
}
|
package net.runelite.client.plugins.specialcounter;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
@AllArgsConstructor
@Getter
@ToString
enum Boss
{
ABYSSAL_SIRE("Abyssal sire", 1.25d),
CALLISTO("Callisto", 1.225d),
CERBERUS("Cerberus", 1.15d),
CHAOS_ELEMENTAL("Chaos elemental", 1.075d),
CORPOREAL_BEAST("Corporeal Beast", 1.55d),
GENERAL_GRAARDOR("General Graardor", 1.325d),
GIANT_MOLE("Giant Mole", 1.075d),
KALPHITE_QUEEN("Kalphite Queen", 1.05d),
KING_BLACK_DRAGON("King Black Dragon", 1.075d),
KRIL_TSUROTH("K'ril Tsutsaroth", 1.375d),
VENETENATIS("Venenatis", 1.4d),
VETION("Vet'ion", 1.225d);
private final String name;
private final double modifier; // Some NPCs have a modifier to the experience a player receives.
public static Boss getBoss(String name)
{
for (Boss boss : values())
{
if (boss.getName().equals(name))
{
return boss;
}
}
return null;
}
}
|
package net.runelite.client.plugins.twitch;
import com.google.common.base.Strings;
import com.google.inject.Provides;
import java.time.temporal.ChronoUnit;
import java.util.Map;
import javax.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.ChatMessageType;
import net.runelite.api.Client;
import net.runelite.api.GameState;
import net.runelite.client.events.ConfigChanged;
import net.runelite.client.chat.ChatColorType;
import net.runelite.client.chat.ChatMessageBuilder;
import net.runelite.client.chat.ChatMessageManager;
import net.runelite.client.chat.ChatboxInputListener;
import net.runelite.client.chat.CommandManager;
import net.runelite.client.chat.QueuedMessage;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.eventbus.Subscribe;
import net.runelite.client.events.ChatboxInput;
import net.runelite.client.events.PrivateMessageInput;
import net.runelite.client.plugins.Plugin;
import net.runelite.client.plugins.PluginDescriptor;
import net.runelite.client.plugins.twitch.irc.TwitchIRCClient;
import net.runelite.client.plugins.twitch.irc.TwitchListener;
import net.runelite.client.task.Schedule;
@PluginDescriptor(
name = "Twitch",
description = "Integrates Twitch chat",
enabledByDefault = false
)
@Slf4j
public class TwitchPlugin extends Plugin implements TwitchListener, ChatboxInputListener
{
@Inject
private TwitchConfig twitchConfig;
@Inject
private Client client;
@Inject
private ChatMessageManager chatMessageManager;
@Inject
private CommandManager commandManager;
private TwitchIRCClient twitchIRCClient;
@Override
protected void startUp()
{
connect();
commandManager.register(this);
}
@Override
protected void shutDown()
{
if (twitchIRCClient != null)
{
twitchIRCClient.close();
twitchIRCClient = null;
}
commandManager.unregister(this);
}
@Provides
TwitchConfig provideConfig(ConfigManager configManager)
{
return configManager.getConfig(TwitchConfig.class);
}
private synchronized void connect()
{
if (twitchIRCClient != null)
{
log.debug("Terminating Twitch client {}", twitchIRCClient);
twitchIRCClient.close();
twitchIRCClient = null;
}
if (!Strings.isNullOrEmpty(twitchConfig.username())
&& !Strings.isNullOrEmpty(twitchConfig.oauthToken())
&& !Strings.isNullOrEmpty(twitchConfig.channel()))
{
String channel = twitchConfig.channel().toLowerCase();
if (!channel.startsWith("
{
channel = "#" + channel;
}
log.debug("Connecting to Twitch as {}", twitchConfig.username());
twitchIRCClient = new TwitchIRCClient(
this,
twitchConfig.username(),
twitchConfig.oauthToken(),
channel
);
twitchIRCClient.start();
}
}
@Schedule(period = 30, unit = ChronoUnit.SECONDS, asynchronous = true)
public void checkClient()
{
if (twitchIRCClient != null)
{
if (twitchIRCClient.isConnected())
{
twitchIRCClient.pingCheck();
}
if (!twitchIRCClient.isConnected())
{
log.debug("Reconnecting...");
connect();
}
}
}
@Subscribe
public void onConfigChanged(ConfigChanged configChanged)
{
if (!configChanged.getGroup().equals("twitch"))
{
return;
}
connect();
}
private void addChatMessage(String sender, String message)
{
String chatMessage = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append(message)
.build();
chatMessageManager.queue(QueuedMessage.builder()
.type(ChatMessageType.FRIENDSCHAT)
.sender("Twitch")
.name(sender)
.runeLiteFormattedMessage(chatMessage)
.timestamp((int) (System.currentTimeMillis() / 1000))
.build());
}
@Override
public void privmsg(Map<String, String> tags, String message)
{
if (client.getGameState() != GameState.LOGGED_IN)
{
return;
}
String displayName = tags.get("display-name");
addChatMessage(displayName, message);
}
@Override
public void roomstate(Map<String, String> tags)
{
log.debug("Room state: {}", tags);
}
@Override
public void usernotice(Map<String, String> tags, String message)
{
log.debug("Usernotice tags: {} message: {}", tags, message);
if (client.getGameState() != GameState.LOGGED_IN)
{
return;
}
String sysmsg = tags.get("system-msg");
addChatMessage("[System]", sysmsg);
}
@Override
public boolean onChatboxInput(ChatboxInput chatboxInput)
{
String message = chatboxInput.getValue();
if (message.startsWith("
{
message = message.substring(2);
if (message.isEmpty() || twitchIRCClient == null)
{
return true;
}
twitchIRCClient.privmsg(message);
addChatMessage(twitchConfig.username(), message);
return true;
}
return false;
}
@Override
public boolean onPrivateMessageInput(PrivateMessageInput privateMessageInput)
{
return false;
}
}
|
package com.sg.java8.training.optional;
import java.util.Optional;
/**
* A few {@link java.util.Optional} usage samples
*/
public class OptionalMain {
public static void main(String[] args) {
final String nullable = "8"; // in a real use-case, it would be a value obtained from some other processing
final int parsedValue = Optional.ofNullable(nullable)
.map(Integer::parseInt)
.orElse(0);
final int imperativeValue;
if (nullable != null) {
imperativeValue = Integer.parseInt(nullable);
} else {
imperativeValue = 0;
}
// recommended way of using Optional
final int parsedPositiveValue = Optional.ofNullable(nullable)
.map(Integer::parseInt)
.orElseThrow(() -> new IllegalArgumentException("Cannot parse"));
final String nullableValue = System.currentTimeMillis() % 2 == 0 ? null : "some value";
final Optional<String> optionalString = Optional.ofNullable(nullableValue);
// two ways of using the optional wrapped value
// 1: use the .isPresent & .get methods
if (optionalString.isPresent()) {
final String wrappedValue = optionalString.get();
// use the unwrapped value
}
// 2: apply a Consumer on the wrapped value
optionalString.ifPresent(value -> System.out.println(value));
Optional<Integer> integerOptional = Optional.empty();
System.out.println(integerOptional.get()); // throws a NoSuchElementException
}
}
|
package org.jboss.forge.addon.scaffold;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.forge.addon.javaee.servlet.ServletFacet_3_1;
import org.jboss.forge.addon.parser.java.facets.JavaSourceFacet;
import org.jboss.forge.addon.projects.Project;
import org.jboss.forge.addon.projects.ProjectFactory;
import org.jboss.forge.addon.scaffold.faces.FacesScaffoldProvider;
import org.jboss.forge.addon.shell.test.ShellTest;
import org.jboss.forge.addon.ui.result.CompositeResult;
import org.jboss.forge.addon.ui.result.Failed;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.arquillian.AddonDependencies;
import org.jboss.forge.arquillian.AddonDependency;
import org.jboss.forge.arquillian.archive.AddonArchive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
*
* Test class for {@link FacesScaffoldProvider}
*
* @author <a href="ggastald@redhat.com">George Gastaldi</a>
*/
@RunWith(Arquillian.class)
public class FacesScaffoldTest
{
@Deployment
@AddonDependencies({
@AddonDependency(name = "org.jboss.forge.addon:scaffold-faces"),
@AddonDependency(name = "org.jboss.forge.addon:maven"),
@AddonDependency(name = "org.jboss.forge.addon:shell-test-harness"),
@AddonDependency(name = "org.jboss.forge.furnace.container:cdi")
})
public static AddonArchive getDeployment()
{
AddonArchive archive = ShrinkWrap
.create(AddonArchive.class)
.addClass(ProjectHelper.class)
.addBeansXML();
return archive;
}
@Inject
ProjectFactory projectFactory;
@Inject
ShellTest shellTest;
@Test
public void testScaffoldSetup() throws Exception
{
shellTest.execute("project-new --named demo" + System.currentTimeMillis(), 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-entity --named Customer", 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-field --named firstName", 5, TimeUnit.SECONDS);
Result result = shellTest.execute("scaffold-setup", 5, TimeUnit.SECONDS);
Assert.assertThat(result, is(instanceOf(CompositeResult.class)));
}
@Test
public void shouldCreateOneErrorPageForEachErrorCode() throws Exception
{
shellTest.execute("project-new --named demo" + System.currentTimeMillis(), 5, TimeUnit.SECONDS);
shellTest.execute("servlet-setup --servletVersion 3.1", 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-entity --named Customer", 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-field --named firstName", 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-entity --named Publisher", 5, TimeUnit.SECONDS);
shellTest.execute("jpa-new-field --named firstName", 5, TimeUnit.SECONDS);
Assert.assertThat(shellTest.execute("scaffold-setup", 5, TimeUnit.SECONDS), not(instanceOf(Failed.class)));
Project project = projectFactory.findProject(shellTest.getShell().getCurrentResource());
Assert.assertTrue(project.hasFacet(ServletFacet_3_1.class));
ServletFacet_3_1 servletFacet = project.getFacet(ServletFacet_3_1.class);
Assert.assertNotNull(servletFacet.getConfig());
String entityPackageName = project.getFacet(JavaSourceFacet.class).getBasePackage() + ".model";
Result scaffoldGenerate1 = shellTest
.execute(("scaffold-generate --webRoot /admin --targets " + entityPackageName + ".Customer"), 10,
TimeUnit.SECONDS);
Assert.assertThat(scaffoldGenerate1, not(instanceOf(Failed.class)));
Assert.assertEquals(2, servletFacet.getConfig().getAllErrorPage().size());
Result scaffoldGenerate2 = shellTest
.execute(("scaffold-generate --webRoot /admin --targets " + entityPackageName + ".Publisher"), 10,
TimeUnit.SECONDS);
Assert.assertThat(scaffoldGenerate2, not(instanceOf(Failed.class)));
Assert.assertEquals(2, servletFacet.getConfig().getAllErrorPage().size());
}
}
|
/*
* $Id: LockssTestCase.java,v 1.19 2003-01-15 18:16:57 tal Exp $
*/
package org.lockss.test;
import java.util.*;
import java.io.*;
import java.net.*;
import org.lockss.util.*;
import junit.framework.TestCase;
import junit.framework.TestResult;
public class LockssTestCase extends TestCase {
/** Timeout duration for timeouts that are expected to time out. Setting
* this higher makes normal tests take longer, setting it too low might
* cause failing tests to erroneously succeed on slow or busy
* machines. */
public static int TIMEOUT_SHOULD = 300;
/** Timeout duration for timeouts that are expected not to time out.
* This should be set high to ensure catching failures. */
public static final int DEFAULT_TIMEOUT_SHOULDNT = 2000;
public static int TIMEOUT_SHOULDNT = DEFAULT_TIMEOUT_SHOULDNT;
List tmpDirs;
List doLaters;
public LockssTestCase(String msg) {
super(msg);
Integer timeout = Integer.getInteger("org.lockss.test.timeout.shouldnt");
if (timeout != null) {
TIMEOUT_SHOULDNT = timeout.intValue();
}
}
/** Create and return the name of a temp dir. The dir is created within
* the default temp file dir.
* It will be deleted following the test, by tearDown(). (So if you
* override tearDown(), be sure to call <code>super.tearDown()</code>.)
* @return The newly created directory
*/
public File getTempDir() throws IOException {
File tmpdir = FileUtil.createTempDir("locksstest", null);
if (tmpdir != null) {
if (tmpDirs == null) {
tmpDirs = new LinkedList();
}
tmpDirs.add(tmpdir);
}
return tmpdir;
}
/** Remove any temp dirs, cancel any outstanding {@link DoLater}s */
public void tearDown() throws Exception {
if (tmpDirs != null) {
for (ListIterator iter = tmpDirs.listIterator(); iter.hasNext(); ) {
File dir = (File)iter.next();
if (FileUtil.delTree(dir)) {
iter.remove();
}
}
}
if (doLaters != null) {
List copy = new ArrayList(doLaters);
for (Iterator iter = copy.iterator(); iter.hasNext(); ) {
DoLater doer = (DoLater)iter.next();
doer.cancel();
}
// do NOT set doLaters to null here. It may be referenced by
// exiting DoLaters. It won't hurt anything because the next test
// will create a new instance of the test case, and get a different
// doLaters list
}
super.tearDown();
}
/** Asserts that two Maps are equal (contain the same mappings).
* If they are not an AssertionFailedError is thrown. */
static public void assertEqual(String message, Map expected, Map actual) {
if (expected == null && actual == null) {
return;
}
if (expected != null && expected.equals(actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that two Maps are equal (contain the same mappings).
* If they are not an AssertionFailedError is thrown. */
static public void assertEqual(Map expected, Map actual) {
assertEqual(null, expected, actual);
}
/** Asserts that two collections are isomorphic. If they are not
* an AssertionFailedError is thrown. */
static public void assertIsomorphic(String message,
Collection expected, Collection actual) {
if (CollectionUtil.isIsomorphic(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that two collections are isomorphic. If they are not
* an AssertionFailedError is thrown. */
static public void assertIsomorphic(Collection expected, Collection actual) {
assertIsomorphic(null, expected, actual);
}
/** Asserts that the array is isomorphic with the collection. If not
* an AssertionFailedError is thrown. */
static public void assertIsomorphic(String message,
Object expected[], Collection actual) {
if (CollectionUtil.isIsomorphic(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the array is isomorphic with the collection. If not
* an AssertionFailedError is thrown. */
static public void assertIsomorphic(Object expected[], Collection actual) {
assertIsomorphic(null, expected, actual);
}
/** Asserts that the array is isomorphic with the collection behind the
* iterator. If not an AssertionFailedError is thrown. */
static public void assertIsomorphic(String message,
Object expected[], Iterator actual) {
if (CollectionUtil.isIsomorphic(new ArrayIterator(expected), actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the array is isomorphic with the collection behind the
* iterator. If not an AssertionFailedError is thrown. */
static public void assertIsomorphic(Object expected[], Iterator actual) {
assertIsomorphic(null, expected, actual);
}
/** Asserts that the two boolean arrays have equal contents */
public static void assertEquals(boolean[] expected, boolean[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two boolean arrays have equal contents */
public static void assertEquals(String message,
boolean[] expected, boolean[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two byte arrays have equal contents */
public static void assertEquals(byte[] expected, byte[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two byte arrays have equal contents */
public static void assertEquals(String message,
byte[] expected, byte[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two char arrays have equal contents */
public static void assertEquals(char[] expected, char[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two char arrays have equal contents */
public static void assertEquals(String message,
char[] expected, char[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two double arrays have equal contents */
public static void assertEquals(double[] expected, double[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two double arrays have equal contents */
public static void assertEquals(String message,
double[] expected, double[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two float arrays have equal contents */
public static void assertEquals(float[] expected, float[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two float arrays have equal contents */
public static void assertEquals(String message,
float[] expected, float[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two int arrays have equal contents */
public static void assertEquals(int[] expected, int[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two int arrays have equal contents */
public static void assertEquals(String message,
int[] expected, int[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two short arrays have equal contents */
public static void assertEquals(short[] expected, short[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two short arrays have equal contents */
public static void assertEquals(String message,
short[] expected, short[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two long arrays have equal contents */
public static void assertEquals(long[] expected, long[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two long arrays have equal contents */
public static void assertEquals(String message,
long[] expected, long[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/** Asserts that the two Object arrays have equal contents */
public static void assertEquals(Object[] expected, Object[] actual) {
assertEquals(null, expected, actual);
}
/** Asserts that the two Object arrays have equal contents */
public static void assertEquals(String message,
Object[] expected, Object[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
// tk do a better job of printing collections
static private void failNotEquals(String message,
Object expected, Object actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+expected+"> but was:<"+actual+">");
}
static private void failNotEquals(String message,
int[] expected, int[] actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+arrayString(expected)+
"> but was:<"+arrayString(actual)+">");
}
static protected Object[] objArray(int[] a) {
Object[] o = new Object[a.length];
for (int ix = 0; ix < a.length; ix++) {
o[ix] = new Integer(a[ix]);
}
return o;
}
static protected String arrayString(int[] a) {
return StringUtil.separatedString(objArray(a), ", ");
}
static private void failNotEquals(String message,
byte[] expected, byte[] actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+ByteArray.toHexString(expected)+
"> but was:<"+ByteArray.toHexString(actual)+">");
// fail(formatted+"expected:<"+arrayString(expected)+
// "> but was:<"+arrayString(actual)+">");
}
static protected Object[] objArray(byte[] a) {
Object[] o = new Object[a.length];
for (int ix = 0; ix < a.length; ix++) {
o[ix] = new Integer(a[ix]);
}
return o;
}
static protected String arrayString(byte[] a) {
return StringUtil.separatedString(objArray(a), ", ");
}
static private void failNotEquals(String message,
Object[] expected, Object actual) {
failNotEquals(message,
"[" + StringUtil.separatedString(expected, ", ") + "]",
actual);
}
/** Asserts that the two DatagramPackets have equal contents */
public static void assertEquals(DatagramPacket expected,
DatagramPacket actual) {
assertEquals(expected.getAddress(), actual.getAddress());
assertEquals(expected.getPort(), actual.getPort());
assertEquals(expected.getLength(), actual.getLength());
assertEquals(expected.getOffset(), actual.getOffset());
assertTrue(Arrays.equals(expected.getData(), actual.getData()));
}
/** Abstraction to do something in another thread, after a delay,
* unless cancelled. If the sceduled activity is still pending when the
* test completes, it is cancelled by tearDown().
* <br>For one-off use:<pre>
* final Object obj = ...;
* DoLater doer = new DoLater(1000) {
* protected void doit() {
* obj.method(...);
* }
* };
* doer.start();</pre>
*
* Or, for convenient repeated use of a particular delayed operation,
* define a class that extends <code>DoLater</code>,
* with a constructor that calls
* <code>super(wait)</code> and stores any other necessary args into
* instance vars, and a <code>doit()</code> method that does whatever needs
* to be done. And a convenience method to create and start it.
* For example, <code>Interrupter</code> is defined as:<pre>
* public class Interrupter extends DoLater {
* private Thread thread;
* Interrupter(long waitMs, Thread thread) {
* super(waitMs);
* this.thread = thread;
* }
*
* protected void doit() {
* thread.interrupt();
* }
* }
*
* public Interrupter interruptMeIn(long ms) {
* Interrupter i = new Interrupter(ms, Thread.currentThread());
* i.start();
* return i;
* }</pre>
*
* Then, to protect a test with a timeout:<pre>
* Interrupter intr = null;
* try {
* intr = interruptMeIn(1000);
* // perform a test that should complete in less than one second
* intr.cancel();
* } finally {
* if (intr.did()) {
* fail("operation failed to complete in one second");
* }
* }</pre>
* The <code>cancel()</code> ensures that the interrupt will not
* happen after the try block completes. (This is not necessary at the
* end of a test case, as any pending interrupters will be cancelled
* by tearDown.)
*/
protected abstract class DoLater extends Thread {
private long wait;
private boolean want = true;
private boolean did = false;
protected DoLater(long waitMs) {
wait = waitMs;
}
/** Must override this to perform desired action */
protected abstract void doit();
/** Return true iff action was taken */
public boolean did() {
return did;
}
/** Cancel the action iff it hasn't already started. If it has started,
* wait until it completes. (Thus when <code>cancel()</code> returns, it
* is safe to destroy any environment on which the action relies.)
*/
public synchronized void cancel() {
if (want) {
want = false;
this.interrupt();
}
}
public final void run() {
try {
synchronized (LockssTestCase.this) {
if (doLaters == null) {
doLaters = Collections.synchronizedList(new LinkedList());
}
}
doLaters.add(this);
if (wait != 0) {
TimerUtil.sleep(wait);
}
synchronized (this) {
if (want) {
want = false;
did = true;
doit();
}
}
} catch (InterruptedException e) {
// exit thread
} finally {
doLaters.remove(this);
}
}
}
/** Interrupter interrupts a thread in a while */
public class Interrupter extends DoLater {
private Thread thread;
private boolean threadDump = false;
Interrupter(long waitMs, Thread thread) {
super(waitMs);
this.thread = thread;
}
/** Interrupt the thread */
protected void doit() {
if (threadDump) {
try {
DebugUtils.getInstance().threadDump();
} catch (Exception e) {
}
}
thread.interrupt();
}
/** Interrupt the thread */
public void setThreadDump() {
threadDump = true;
}
}
/** Interrupt current thread in a while */
public Interrupter interruptMeIn(long ms) {
Interrupter i = new Interrupter(ms, Thread.currentThread());
i.start();
return i;
}
/** Interrupt current thread in a while, first printing a thread dump */
public Interrupter interruptMeIn(long ms, boolean threadDump) {
Interrupter i = new Interrupter(ms, Thread.currentThread());
if (threadDump) {
i.setThreadDump();
}
i.start();
return i;
}
}
|
/*
* $Id: LockssTestCase.java,v 1.60 2004-12-14 22:02:50 troberts Exp $
*/
package org.lockss.test;
import java.util.*;
import java.io.*;
import java.net.*;
import org.lockss.util.*;
import org.lockss.config.ConfigManager;
import org.lockss.daemon.*;
import junit.framework.TestCase;
import junit.framework.TestResult;
import org.apache.oro.text.regex.*;
public class LockssTestCase extends TestCase {
protected static Logger log =
Logger.getLoggerWithInitialLevel("LockssTest",
Logger.getInitialDefaultLevel());
/** Timeout duration for timeouts that are expected to time out. Setting
* this higher makes normal tests take longer, setting it too low might
* cause failing tests to erroneously succeed on slow or busy
* machines. */
public static int TIMEOUT_SHOULD = 300;
/** Timeout duration for timeouts that are expected not to time out.
* This should be set high to ensure catching failures. */
public static final int DEFAULT_TIMEOUT_SHOULDNT = 2000;
public static int TIMEOUT_SHOULDNT = DEFAULT_TIMEOUT_SHOULDNT;
private MockLockssDaemon mockDaemon = null;
List tmpDirs;
List doLaters = null;
public LockssTestCase(String msg) {
this();
setName(msg);
}
public LockssTestCase() {
super();
Integer timeout = Integer.getInteger("org.lockss.test.timeout.shouldnt");
if (timeout != null) {
TIMEOUT_SHOULDNT = timeout.intValue();
}
}
/**
* Create and return the name of a temp dir. The dir is created within
* the default temp file dir.
* It will be deleted following the test, by tearDown(). (So if you
* override tearDown(), be sure to call <code>super.tearDown()</code>.)
* @return The newly created directory
* @throws IOException
*/
public File getTempDir() throws IOException {
File tmpdir = FileTestUtil.createTempDir("locksstest", null);
if (tmpdir != null) {
if (tmpDirs == null) {
tmpDirs = new LinkedList();
}
tmpDirs.add(tmpdir);
}
return tmpdir;
}
/**
* Return the MockLockssDaemon instance for this testcase. All test code
* should use this method rather than creating a MockLockssDaemon.
*/
public MockLockssDaemon getMockLockssDaemon() {
return mockDaemon;
}
/** Create a fresh config manager, MockLockssDaemon */
protected void setUp() throws Exception {
// Logger.resetLogs();
mockDaemon = new MockLockssDaemon();
super.setUp();
disableThreadWatchdog();
ConfigManager.makeConfigManager();
}
/**
* Remove any temp dirs, cancel any outstanding {@link
* org.lockss.test.LockssTestCase.DoLater}s
* @throws Exception
*/
protected void tearDown() throws Exception {
if (doLaters != null) {
List copy;
synchronized (this) {
copy = new ArrayList(doLaters);
}
for (Iterator iter = copy.iterator(); iter.hasNext(); ) {
DoLater doer = (DoLater)iter.next();
doer.cancel();
}
// do NOT set doLaters to null here. It may be referenced by
// exiting DoLaters. It won't hurt anything because the next test
// will create a new instance of the test case, and get a different
// doLaters list
}
// XXX this should be folded into LockssDaemon shutdown
ConfigManager cfg = ConfigManager.getConfigManager();
if (cfg != null) {
cfg.stopService();
}
TimerQueue.stopTimerQueue();
// delete temp dirs
if (tmpDirs != null && !isKeepTempFiles()) {
for (ListIterator iter = tmpDirs.listIterator(); iter.hasNext(); ) {
File dir = (File)iter.next();
if (FileTestUtil.delTree(dir)) {
log.debug2("deltree(" + dir + ") = true");
iter.remove();
} else {
log.debug2("deltree(" + dir + ") = false");
}
}
}
super.tearDown();
if (Boolean.getBoolean("org.lockss.test.threadDump")) {
DebugUtils.getInstance().threadDump();
TimerUtil.guaranteedSleep(1000);
}
enableThreadWatchdog();
}
public static boolean isKeepTempFiles() {
return Boolean.getBoolean("org.lockss.keepTempFiles");
}
protected void disableThreadWatchdog() {
LockssThread.disableWatchdog(true);
}
protected void enableThreadWatchdog() {
LockssThread.disableWatchdog(false);
}
double successRate;
int successMaxRepetitions;
int successMaxFailures;
/** Causes the current test case to be repeated if it fails, ultimately
* succeeding if the success rate is sufficiently high. If a test is
* repeated, a message will be written to System.err. Repetitions are
* not reflected in test statistics.
* @param rate the minimum success rate between 0 and 1 (successes /
* attempts) necessary for the test ultimately to succeed.
* @param maxRepetitions the maximum number of times the test will be
* repeated in an attempt to achieve the specified success rate.
* @see #successRateSetUp()
* @see #successRateTearDown()
*/
protected void assertSuccessRate(double rate, int maxRepetitions) {
if (successMaxFailures == 0) {
successRate = rate;
successMaxRepetitions = maxRepetitions;
successMaxFailures = maxRepetitions - ((int)(rate * maxRepetitions));
}
}
/**
* Runs the bare test sequence, repeating if necessary to achieve the
* specified success rate.
* @see #assertSuccessRate
* @exception Throwable if any exception is thrown
*/
public void runBare() throws Throwable {
int rpt = 0;
int failures = 0;
successRateSetUp();
try {
while (true) {
try {
// log the name of the test case (testFoo() method)
log.debug("Testcase " + getName());
super.runBare();
} catch (Throwable e) {
if (++failures > successMaxFailures) {
rpt++;
throw e;
}
}
if (++rpt >= successMaxRepetitions) {
break;
}
if ((((double)(rpt - failures)) / ((double)rpt)) > successRate) {
break;
}
}
} finally {
if (successMaxFailures > 0 && failures > 0) {
System.err.println(getName() + " failed " + failures +
" of " + rpt + " tries, " +
((failures > successMaxFailures) ? "not " : "") +
"achieving a " + successRate + " success rate.");
}
successRateTearDown();
}
}
/** Called once (before setUp()) before a set of repetitions of a test
* case that uses assertSuccessRate(). (setUp() is called before each
* repetition.) */
protected void successRateSetUp() {
successMaxFailures = 0;
}
/** Called once (after tearDown()) after a set of repetitions of a test
* case that uses assertSuccessRate(). (tearDown() is called after each
* repetition.) */
protected void successRateTearDown() {
}
/**
* Asserts that two Maps are equal (contain the same mappings).
* If they are not an AssertionFailedError is thrown.
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
static public void assertEqual(String message, Map expected, Map actual) {
if (expected == null && actual == null) {
return;
}
if (expected != null && expected.equals(actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that an int is positive
*/
static public void assertPositive(int value) {
assertPositive(null, value);
}
static public void assertPositive(String msg, int value) {
StringBuffer sb = new StringBuffer();
if (msg != null) {
sb.append(msg);
sb.append(" ");
}
sb.append("Expected a positive value but got ");
sb.append(value);
assertTrue(sb.toString(), value>0);
}
/**
* Asserts that an int is negative
*/
static public void assertNegative(int value) {
assertNegative(null, value);
}
static public void assertNegative(String msg, int value) {
StringBuffer sb = new StringBuffer();
if (msg != null) {
sb.append(msg);
sb.append(" ");
}
sb.append("Expected a positive value but got ");
sb.append(value);
assertTrue(sb.toString(), value<0);
}
/**
* Asserts that c1.compareTo(c2) > 0 and c2.compareTo(c1) < 0
*/
static public void assertGreaterThan(Comparable c1, Comparable c2) {
assertPositive(c1.compareTo(c2));
assertNegative(c2.compareTo(c1));
}
/**
* Asserts that two Maps are equal (contain the same mappings).
* If they are not an AssertionFailedError is thrown.
* @param expected the expected value
* @param actual the actual value
*/
static public void assertEqual(Map expected, Map actual) {
assertEqual(null, expected, actual);
}
/**
* Asserts that two collections are isomorphic. If they are not
* an AssertionFailedError is thrown.
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(String message,
Collection expected, Collection actual) {
if (CollectionUtil.isIsomorphic(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that two collections are isomorphic. If they are not
* an AssertionFailedError is thrown.
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(Collection expected, Collection actual) {
assertIsomorphic(null, expected, actual);
}
/**
* Asserts that the array is isomorphic with the collection. If not
* an AssertionFailedError is thrown.
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(String message,
Object expected[], Collection actual) {
if (CollectionUtil.isIsomorphic(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the array is isomorphic with the collection. If not
* an AssertionFailedError is thrown.
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(Object expected[], Collection actual) {
assertIsomorphic(null, expected, actual);
}
/**
* Asserts that the array is isomorphic with the collection behind the
* iterator. If not an AssertionFailedError is thrown.
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(String message,
Object expected[], Iterator actual) {
if (CollectionUtil.isIsomorphic(new ArrayIterator(expected), actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the array is isomorphic with the collection behind the
* iterator. If not an AssertionFailedError is thrown.
* @param expected the expected value
* @param actual the actual value
*/
static public void assertIsomorphic(Object expected[], Iterator actual) {
assertIsomorphic(null, expected, actual);
}
/**
* Asserts that the two boolean arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(boolean[] expected, boolean[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two boolean arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
boolean[] expected, boolean[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two byte arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(byte[] expected, byte[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two byte arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
byte[] expected, byte[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two char arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(char[] expected, char[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two char arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
char[] expected, char[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two double arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(double[] expected, double[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two double arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
double[] expected, double[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two float arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(float[] expected, float[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two float arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
float[] expected, float[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two int arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(int[] expected, int[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two int arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
int[] expected, int[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two short arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(short[] expected, short[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two short arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
short[] expected, short[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two long arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(long[] expected, long[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two long arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
long[] expected, long[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two Object arrays have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(Object[] expected, Object[] actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two Object arrays have equal contents
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
Object[] expected, Object[] actual) {
if (Arrays.equals(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that the two URLs are equal
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(URL expected, URL actual) {
assertEquals(null, expected, actual);
}
/**
* Asserts that the two URLs are equal
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(String message,
URL expected, URL actual) {
if (UrlUtil.equalUrls(expected, actual)) {
return;
}
failNotEquals(message, expected, actual);
}
/**
* Asserts that two objects are not equal. If they are not
* an AssertionFailedError is thrown with the given message.
* @param message the message to give on failure
* @param expected the expected value
* @param actual the actual value
*/
public static void assertNotEquals(String message,
Object expected, Object actual) {
if ((expected == null && actual == null) ||
(expected != null && expected.equals(actual))) {
failEquals(message, expected, actual);
}
}
/**
* Asserts that two objects are not equal. If they are not
* an AssertionFailedError is thrown with the given message.
* @param expected the expected value
* @param actual the actual value
*/
public static void assertNotEquals(Object expected, Object actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(long expected, long actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
long expected, long actual) {
assertNotEquals(message, new Long(expected), new Long(actual));
}
public static void assertNotEquals(int expected, int actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
int expected, int actual) {
assertNotEquals(message, new Integer(expected), new Integer(actual));
}
public static void assertNotEquals(short expected, short actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
short expected, short actual) {
assertNotEquals(message, new Short(expected), new Short(actual));
}
public static void assertNotEquals(byte expected, byte actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
byte expected, byte actual) {
assertNotEquals(message, new Byte(expected), new Byte(actual));
}
public static void assertNotEquals(char expected, char actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
char expected, char actual) {
assertNotEquals(message, new Character(expected), new Character(actual));
}
public static void assertNotEquals(boolean expected, boolean actual) {
assertNotEquals(null, expected, actual);
}
public static void assertNotEquals(String message,
boolean expected, boolean actual) {
assertNotEquals(message, new Boolean(expected), new Boolean(actual));
}
public static void assertNotEquals(double expected, double actual,
double delta) {
assertNotEquals(null, expected, actual, delta);
}
public static void assertNotEquals(String message, double expected,
double actual, double delta) {
// handle infinity specially since subtracting to infinite
//values gives NaN and the the following test fails
if (Double.isInfinite(expected)) {
if (expected == actual){
failEquals(message, new Double(expected), new Double(actual));
}
} else if ((Math.abs(expected-actual) <= delta)) {
// Because comparison with NaN always returns false
failEquals(message, new Double(expected), new Double(actual));
}
}
public static void assertNotEquals(float expected, float actual,
float delta) {
assertNotEquals(null, expected, actual, delta);
}
public static void assertNotEquals(String message, float expected,
float actual, float delta) {
// handle infinity specially since subtracting to infinite
//values gives NaN and the the following test fails
if (Double.isInfinite(expected)) {
if (expected == actual){
failEquals(message, new Float(expected), new Float(actual));
}
} else if ((Math.abs(expected-actual) <= delta)) {
// Because comparison with NaN always returns false
failEquals(message, new Float(expected), new Float(actual));
}
}
public static void assertEmpty(Collection coll) {
assertEmpty(null, coll);
}
public static void assertEmpty(String message, Collection coll) {
if (coll.size() > 0) {
StringBuffer sb = new StringBuffer();
if (message != null) {
sb.append(message);
sb.append(" ");
}
sb.append("Expected empty Collection, but containted ");
sb.append(coll);
fail(sb.toString());
}
}
public static void assertEmpty(Map map) {
assertEmpty(null, map);
}
public static void assertEmpty(String message, Map map) {
if (map.size() > 0) {
StringBuffer sb = new StringBuffer();
if (message != null) {
sb.append(message);
sb.append(" ");
}
sb.append("Expected empty Map, but contained ");
sb.append(map);
fail(sb.toString());
}
}
public static void assertContainsAll(Collection coll,
Object[] elements) {
for (int i = 0; i < elements.length; i++) {
assertContains(coll, elements[i]);
}
}
public static void assertContains(Collection coll, Object element) {
assertContains(null, coll, element);
}
public static void assertContains(String msg, Collection coll,
Object element) {
if (!coll.contains(element)) {
StringBuffer sb = new StringBuffer();
if (msg != null) {
sb.append(msg);
sb.append(" ");
}
sb.append("Collection doesn't contain expected element: ");
sb.append(element);
fail(sb.toString());
}
}
public static void assertDoesNotContain(Collection coll, Object element) {
assertDoesNotContain(null, coll, element);
}
public static void assertDoesNotContain(String msg, Collection coll,
Object element) {
if (coll.contains(element)) {
StringBuffer sb = new StringBuffer();
if (msg != null) {
sb.append(msg);
sb.append(" ");
}
sb.append("Collection contains unexpected element: ");
sb.append(element);
fail(sb.toString());
}
}
private static void failEquals(String message,
Object expected, Object actual) {
StringBuffer sb = new StringBuffer();
if (message != null) {
sb.append(message);
sb.append(" ");
}
sb.append("expected not equals, but both were ");
sb.append(expected);
fail(sb.toString());
}
// tk do a better job of printing collections
static protected void failNotEquals(String message,
Object expected, Object actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+expected+"> but was:<"+actual+">");
}
static protected void failNotEquals(String message,
int[] expected, int[] actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+arrayString(expected)+
"> but was:<"+arrayString(actual)+">");
}
static protected Object[] objArray(int[] a) {
Object[] o = new Object[a.length];
for (int ix = 0; ix < a.length; ix++) {
o[ix] = new Integer(a[ix]);
}
return o;
}
static protected String arrayString(int[] a) {
return StringUtil.separatedString(objArray(a), ", ");
}
static private void failNotEquals(String message,
long[] expected, long[] actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+arrayString(expected)+
"> but was:<"+arrayString(actual)+">");
}
static protected Object[] objArray(long[] a) {
Object[] o = new Object[a.length];
for (int ix = 0; ix < a.length; ix++) {
o[ix] = new Long(a[ix]);
}
return o;
}
static protected String arrayString(long[] a) {
return StringUtil.separatedString(objArray(a), ", ");
}
static private void failNotEquals(String message,
byte[] expected, byte[] actual) {
String formatted= "";
if (message != null)
formatted= message+" ";
fail(formatted+"expected:<"+ByteArray.toHexString(expected)+
"> but was:<"+ByteArray.toHexString(actual)+">");
// fail(formatted+"expected:<"+arrayString(expected)+
// "> but was:<"+arrayString(actual)+">");
}
static protected Object[] objArray(byte[] a) {
Object[] o = new Object[a.length];
for (int ix = 0; ix < a.length; ix++) {
o[ix] = new Integer(a[ix]);
}
return o;
}
static protected String arrayString(byte[] a) {
return StringUtil.separatedString(objArray(a), ", ");
}
static private void failNotEquals(String message,
Object[] expected, Object actual) {
failNotEquals(message,
"[" + StringUtil.separatedString(expected, ", ") + "]",
actual);
}
/**
* Asserts that the two DatagramPackets have equal contents
* @param expected the expected value
* @param actual the actual value
*/
public static void assertEquals(DatagramPacket expected,
DatagramPacket actual) {
assertEquals(expected.getAddress(), actual.getAddress());
assertEquals(expected.getPort(), actual.getPort());
assertEquals(expected.getLength(), actual.getLength());
assertEquals(expected.getOffset(), actual.getOffset());
assertTrue(Arrays.equals(expected.getData(), actual.getData()));
}
/**
* Asserts that two collections have all the same elements of the same
* cardinality; tries to give useful output if it fails
*/
public static void assertSameElements(Collection expected,
Collection actual) {
assertTrue("Expected "+expected+" but was "+actual,
org.apache.commons.collections.
CollectionUtils.isEqualCollection(expected, actual));
}
/**
* Asserts that a string matches the content of a reader
*/
public static void assertReaderMatchesString(String expected, Reader reader)
throws IOException{
int len = expected.length() * 2;
char[] ca = new char[len];
StringBuffer actual = new StringBuffer(expected.length());
int n;
while ((n = reader.read(ca)) != -1) {
actual.append(ca, 0, n);
}
assertEquals(expected, actual.toString());
}
/**
* Asserts that a string matches the content of a reader. Old, character
* at a time version. Should be integrated into tests because it
* possibly causes different behavior in the stream under test.
*/
public static void assertReaderMatchesStringSlow(String expected,
Reader reader)
throws IOException{
StringBuffer actual = new StringBuffer(expected.length());
int kar;
while ((kar = reader.read()) != -1) {
actual.append((char)kar);
}
assertEquals(expected, actual.toString());
}
/** Convenience method to compile an RE */
protected static Pattern compileRe(String re) {
return RegexpUtil.uncheckedCompile(re);
}
/** Convenience method to match an RE */
protected static boolean isMatchRe(String s, Pattern re) {
return RegexpUtil.getMatcher().contains(s, re);
}
/** Convenience method to compile and match an RE */
protected static boolean isMatchRe(String s, String re) {
return isMatchRe(s, RegexpUtil.uncheckedCompile(re));
}
/**
* Asserts that a string matches a regular expression. The match is
* unanchored; use "^...$" to ensure that the entire string is matched.
*/
public static void assertMatchesRE(String regexp, String string) {
assertMatchesRE(null, regexp, string);
}
/**
* Asserts that a string matches a regular expression. The match is
* unanchored; use "^...$" to ensure that the entire string is matched.
*/
public static void assertMatchesRE(String msg,
String regexp, String string) {
if (msg == null) {
msg = "String \"" + string + "\" does not match RE: " + regexp;
}
assertTrue(msg, isMatchRe(string, regexp));
}
/**
* Asserts that a string does not match a regular expression
*/
public static void assertNotMatchesRE(String regexp, String string) {
assertNotMatchesRE(null, regexp, string);
}
/**
* Asserts that a string does not match a regular expression
*/
public static void assertNotMatchesRE(String msg,
String regexp, String string) {
if (msg == null) {
msg = "String \"" + string + "\" should not match RE: " + regexp;
}
assertFalse(msg, isMatchRe(string, regexp));
}
/** Assert that a collection cannot be modified, <i>ie</i>, that all of
* the following methods, plus the collection's iterator().remove()
* method, throw UnsupportedOperationException: add(), addAll(), clear(),
* remove(), removeAll(), retainAll() */
public void assertUnmodifiable(Collection coll) {
List list = ListUtil.list("bar");
try {
coll.add("foo");
fail("add() didn't throw");
} catch (UnsupportedOperationException e) {
}
try {
coll.addAll(list);
fail("addAll() didn't throw");
} catch (UnsupportedOperationException e) {
}
try {
coll.clear();
fail("clear() didn't throw");
} catch (UnsupportedOperationException e) {
}
try {
coll.remove("foo");
fail("remove() didn't throw");
} catch (UnsupportedOperationException e) {
}
try {
coll.removeAll(list);
fail("removeAll() didn't throw");
} catch (UnsupportedOperationException e) {
}
try {
coll.retainAll(list);
fail("retainAll() didn't throw");
} catch (UnsupportedOperationException e) {
}
Iterator iter = coll.iterator();
iter.next();
try {
iter.remove();
fail("iterator().remove() didn't throw");
} catch (UnsupportedOperationException e) {
}
}
/** Abstraction to do something in another thread, after a delay,
* unless cancelled. If the scheduled activity is still pending when the
* test completes, it is cancelled by tearDown().
* <br>For one-off use:<pre>
* final Object obj = ...;
* DoLater doer = new DoLater(1000) {
* protected void doit() {
* obj.method(...);
* }
* };
* doer.start();</pre>
*
* Or, for convenient repeated use of a particular delayed operation,
* define a class that extends <code>DoLater</code>,
* with a constructor that calls
* <code>super(wait)</code> and stores any other necessary args into
* instance vars, and a <code>doit()</code> method that does whatever needs
* to be done. And a convenience method to create and start it.
* For example, <code>Interrupter</code> is defined as:<pre>
* public class Interrupter extends DoLater {
* private Thread thread;
* Interrupter(long waitMs, Thread thread) {
* super(waitMs);
* this.thread = thread;
* }
*
* protected void doit() {
* thread.interrupt();
* }
* }
*
* public Interrupter interruptMeIn(long ms) {
* Interrupter i = new Interrupter(ms, Thread.currentThread());
* i.start();
* return i;
* }</pre>
*
* Then, to protect a test with a timeout:<pre>
* Interrupter intr = null;
* try {
* intr = interruptMeIn(1000);
* // perform a test that should complete in less than one second
* intr.cancel();
* } finally {
* if (intr.did()) {
* fail("operation failed to complete in one second");
* }
* }</pre>
* The <code>cancel()</code> ensures that the interrupt will not
* happen after the try block completes. (This is not necessary at the
* end of a test case, as any pending interrupters will be cancelled
* by tearDown.)
*/
protected abstract class DoLater extends Thread {
private long wait;
private boolean want = true;
private boolean did = false;
protected DoLater(long waitMs) {
wait = waitMs;
}
/** Must override this to perform desired action */
protected abstract void doit();
/**
* Return true iff action was taken
* @return true iff taken
*/
public boolean did() {
return did;
}
/** Cancel the action iff it hasn't already started. If it has started,
* wait until it completes. (Thus when <code>cancel()</code> returns, it
* is safe to destroy any environment on which the action relies.)
*/
public synchronized void cancel() {
if (want) {
want = false;
this.interrupt();
}
}
public final void run() {
try {
synchronized (LockssTestCase.this) {
if (doLaters == null) {
doLaters = new LinkedList();
}
doLaters.add(this);
}
if (wait != 0) {
TimerUtil.sleep(wait);
}
synchronized (this) {
if (want) {
want = false;
did = true;
doit();
}
}
} catch (InterruptedException e) {
// exit thread
} finally {
synchronized (LockssTestCase.this) {
doLaters.remove(this);
}
}
}
}
/** Interrupter interrupts a thread in a while */
public class Interrupter extends DoLater {
private Thread thread;
private boolean threadDump = false;
Interrupter(long waitMs, Thread thread) {
super(waitMs);
setPriority(thread.getPriority() + 1);
this.thread = thread;
}
/** Interrupt the thread */
protected void doit() {
log.debug("Interrupting");
if (threadDump) {
try {
DebugUtils.getInstance().threadDump();
TimerUtil.guaranteedSleep(1000);
} catch (Exception e) {
}
}
thread.interrupt();
}
/** Interrupt the thread */
public void setThreadDump() {
threadDump = true;
}
}
/**
* Interrupt current thread in a while
* @param ms interval to wait before interrupting
* @return an Interrupter
*/
public Interrupter interruptMeIn(long ms) {
Interrupter i = new Interrupter(ms, Thread.currentThread());
i.start();
return i;
}
/**
* Interrupt current thread in a while, first printing a thread dump
* @param ms interval to wait before interrupting
* @param threadDump true if thread dump wanted
* @return an Interrupter
*/
public Interrupter interruptMeIn(long ms, boolean threadDump) {
Interrupter i = new Interrupter(ms, Thread.currentThread());
if (threadDump) {
i.setThreadDump();
}
i.start();
return i;
}
}
|
// This file is part of Serleena.
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
package com.kyloth.serleena.sensors;
import android.content.Context;
import android.location.Location;
import com.kyloth.serleena.common.GeoPoint;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* Concretizza ILocationReachedManager
*
* @author Filippo Sestini <sestini.filippo@gmail,com>
* @version 1.0.0
*/
class LocationReachedManager implements ILocationReachedManager {
private static final int LOCATION_RADIUS = 15;
private final Map<ILocationReachedObserver, GeoPoint> observers;
private final Map<ILocationReachedObserver, IWakeupObserver> alarms;
private final IWakeupManager wm;
private final ILocationManager locMan;
/**
* Crea un oggetto LocationReachedManager.
*
* L'oggetto utilizza altre risorse di sensoristica del dispositivo,
* fornitegli tramite parametri al costruttore.
*
* @param bkgrLocMan Gestore di notifiche sulla posizione utente.
*/
public LocationReachedManager(IBackgroundLocationManager bkgrLocMan) {
if (bkgrLocMan == null)
throw new IllegalArgumentException("Illegal location manager");
this.bkgrLocMan = bkgrLocMan;
this.observers = new HashMap<>();
}
@Override
public synchronized void attachObserver(ILocationReachedObserver observer,
GeoPoint location)
throws IllegalArgumentException {
if (observer == null)
throw new IllegalArgumentException("Illegal null observer");
if (location == null)
throw new IllegalArgumentException("Illegal null location");
observers.put(observer, location);
if (observers.size() == 1)
bkgrLocMan.attachObserver(this, LOCATION_UPDATE_INTERVAL);
}
@Override
public synchronized void detachObserver(ILocationReachedObserver observer)
throws IllegalArgumentException {
if (observer == null)
throw new IllegalArgumentException("Illegal null observer");
if (observers.containsKey(observer)) {
observers.remove(observer);
if (observers.size() == 0)
bkgrLocMan.detachObserver(this);
}
}
}
|
package de.qaware.chronix.shared.ServerConfig;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.qaware.chronix.shared.dockerUtil.DockerBuildOptions;
import de.qaware.chronix.shared.dockerUtil.DockerRunOptions;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
@XmlRootElement
public class ServerConfigRecord {
private String serverAddress;
private LinkedList<DockerRunOptions> tsdbRunRecords;
private LinkedList<DockerBuildOptions> tsdbBuildRecords;
private LinkedList<String> externalTimeSeriesDataBaseImplementations;
private Map<String, String> tsdbDockerFilesDirectoryMap;
public ServerConfigRecord(){}
public ServerConfigRecord(String serverAddress){
this.serverAddress = serverAddress;
this.tsdbRunRecords = new LinkedList<>();
this.tsdbBuildRecords = new LinkedList<>();
this.externalTimeSeriesDataBaseImplementations = new LinkedList<>();
this.tsdbDockerFilesDirectoryMap = new HashMap<>();
}
public ServerConfigRecord(ServerConfigRecord otherRecord){
this.serverAddress = otherRecord.getServerAddress();
this.tsdbRunRecords = new LinkedList<>(otherRecord.getTsdbRunRecords());
this.tsdbBuildRecords = new LinkedList<>(otherRecord.getTsdbBuildRecords());
this.externalTimeSeriesDataBaseImplementations = new LinkedList<>(otherRecord.getExternalTimeSeriesDataBaseImplementations());
this.tsdbDockerFilesDirectoryMap = new HashMap<>(otherRecord.getTsdbDockerFilesDirectoryMap());
}
//setter
public void setServerAddress(String serverAddress) {
this.serverAddress = serverAddress;
}
public void setTsdbRunRecords(LinkedList<DockerRunOptions> tsdbRunRecords) {
this.tsdbRunRecords = tsdbRunRecords;
}
public void setTsdbBuildRecords(LinkedList<DockerBuildOptions> tsdbBuildRecords) {
this.tsdbBuildRecords = tsdbBuildRecords;
}
public void setExternalTimeSeriesDataBaseImplementations(LinkedList<String> externalTimeSeriesDataBaseImplementations) {
this.externalTimeSeriesDataBaseImplementations = externalTimeSeriesDataBaseImplementations;
}
public void setTsdbDockerFilesDirectoryMap(Map<String, String> tsdbDockerFilesDirectoryMap) {
this.tsdbDockerFilesDirectoryMap = tsdbDockerFilesDirectoryMap;
}
//getter
public String getServerAddress() {
return serverAddress;
}
public LinkedList<DockerRunOptions> getTsdbRunRecords() {
return tsdbRunRecords;
}
public LinkedList<DockerBuildOptions> getTsdbBuildRecords() {
return tsdbBuildRecords;
}
public LinkedList<String> getExternalTimeSeriesDataBaseImplementations() {
return externalTimeSeriesDataBaseImplementations;
}
public Map<String, String> getTsdbDockerFilesDirectoryMap() {
return tsdbDockerFilesDirectoryMap;
}
@JsonIgnore
@Override
public boolean equals(Object o){
if(o instanceof ServerConfigRecord
&& ((ServerConfigRecord) o).getServerAddress().equals(serverAddress)){
return true;
} else {
return false;
}
}
@JsonIgnore
@Override
public int hashCode(){
return serverAddress.hashCode();
}
}
|
package org.semagrow.connector.sparql.selector;
import org.semagrow.art.Loggable;
import org.semagrow.selector.Site;
import org.semagrow.selector.SourceMetadata;
import org.semagrow.selector.SourceSelector;
import org.semagrow.selector.SourceSelectorWrapper;
import org.semagrow.connector.sparql.SPARQLSite;
import org.eclipse.rdf4j.model.Resource;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Value;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.Dataset;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.algebra.StatementPattern;
import org.eclipse.rdf4j.query.algebra.TupleExpr;
import org.eclipse.rdf4j.query.algebra.helpers.StatementPatternCollector;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.semagrow.connector.sparql.execution.SPARQLRepository;
import java.net.URL;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.*;
/**
* ASK Source Selector.
*
* <p>Implementation of SourceSelector that tries to execute ASK queries to identify
* the data sources that hold triples that match the given triple patterns. This class
* extends SourceSelectorWrapper, and thus relies on a wrapped SourceSelector that
* provides the initial list of candidate data sources.
*
* <p>Note that if any exceptions are thrown when connecting to the remote data sources,
* this SourceSelector simples returns "true" (matching triples exist). This avoids
* rejecting data sources that hold relevant triples because of transient errors.
*
* @author Antonios Troumpoukis
* @author Stasinos Konstantopoulos
*/
public class AskSourceSelector extends SourceSelectorWrapper implements SourceSelector
{
private org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger( AskSourceSelector.class );
static private ExecutorService executor;
public AskSourceSelector( SourceSelector selector ) {
super( selector );
executor = Executors.newCachedThreadPool();
}
/*
* SourceSelector IMPLEMENTATION
*/
@Override
public Collection<SourceMetadata> getSources(StatementPattern pattern, Dataset dataset, BindingSet bindings )
{
Collection<SourceMetadata> list = super.getSources( pattern, dataset, bindings );
return restrictSourceList( pattern, list );
}
private Collection<SourceMetadata> getSources( Iterable<StatementPattern> patterns, Dataset dataset, BindingSet bindings )
{
Collection<SourceMetadata> list = new LinkedList<SourceMetadata>();
for( StatementPattern p : patterns ) {
list.addAll( this.getSources(p, dataset, bindings) );
}
return list;
}
@Override
public Collection<SourceMetadata> getSources( TupleExpr expr, Dataset dataset, BindingSet bindings )
{
if( expr instanceof StatementPattern ) {
return getSources((StatementPattern)expr, dataset, bindings);
}
Collection<StatementPattern> patterns = StatementPatternCollector.process( expr );
return getSources( patterns, dataset, bindings );
}
/*
* PRIVATE HELPERS
*/
/**
* This method returns a list of {link SourceMetadata} objects that refer to data sources that
* contain at least one triple that matches {@code pattern}. The input {@code list} of candidate
* data sources is not modified.
* <p>
* This method is the entry point to the specific functionality of this class, and all the methods
* above that implement the SourceSelector interface must use this method. This allows all performance
* related logging to be implemented here.
* @param pattern The triple pattern that guides data source selection
* @param list The list of candidate data sources
* @return The subset of the data sources in list that contain triples matching the pattern
*/
@Loggable
private Collection<SourceMetadata> restrictSourceList( StatementPattern pattern, Collection<SourceMetadata> list )
{
Collection<SourceMetadata> restrictedList = new LinkedList<SourceMetadata>();
Collection<Callable<SourceMetadata>> todo = new LinkedList<Callable<SourceMetadata>>();
for(SourceMetadata metadata : list) {
Collection<Site> sources = metadata.getSites();
SourceMetadata m = metadata;
Callable<SourceMetadata> f = () -> {
if (sources.iterator().next() instanceof SPARQLSite) {
boolean ask = askPattern(pattern, ((SPARQLSite) sources.iterator().next()).getURL(), true);
return ask ? m : null;
}
else {
return m;
}
};
todo.add(f);
}
try {
List<Future<SourceMetadata>> list1 = executor.invokeAll(todo);
for (Future<SourceMetadata> fut : list1) {
if (fut.isDone()) {
SourceMetadata m = null;
try {
m = fut.get();
} catch (ExecutionException e) {
logger.info( "AskSourceSelector Future execution", e);
}
if (m != null) {
restrictedList.add(m);
}
}
}
} catch (InterruptedException e) {
logger.info( "AskSourceSelector interrupted", e);
}
return restrictedList;
}
/**
* This method checks if a SPARQL endpoint serves at least one triple that matches {@pattern}.
* Not all endpoint implementations support ASK queries. If the ASK query fails, this method can
* fall back to querying {@code SELECT * WHERE { pattern } LIMIT 1. Note that his can be very slow
* for some endpoint implementations, unfortunately usually those that do not support ASK in the
* first place.
* @param pattern The triple pattern to check
* @param source The URL of the SPARQL endpoint
* @param boolean allow_select If true, then the method is allowed to fall back to SELECT
* @return false if it has been established that {@code source} does not contain any matching triples, true otherwise
*/
private boolean askPattern(StatementPattern pattern, URL source, boolean allow_select )
{
boolean retv;
Value s = pattern.getSubjectVar().getValue();
Value p = pattern.getPredicateVar().getValue();
Value o = pattern.getObjectVar().getValue();
Value c = pattern.getContextVar() != null ? pattern.getContextVar().getValue() : null;
Repository rep;
retv = true;
rep = new SPARQLRepository( source.toString() );
RepositoryConnection conn = null;
try {
rep.initialize();
conn = rep.getConnection();
}
catch( org.eclipse.rdf4j.repository.RepositoryException ex ) {
// Failed to contact source
// Log a warnig and reply "true" just in case this is a transient failure
logger.warn( "Failed to contact source to ASK about pattern {}. Exception: {}",
pattern.toString(), ex.getMessage() );
}
if( conn!= null ) {
try {
retv = (c != null && c instanceof IRI) ?
conn.hasStatement( (Resource)s, (IRI)p, o, true, (IRI) c ) :
conn.hasStatement( (Resource)s, (IRI)p, o, true);
allow_select = false; // No need to use this any more
}
catch( org.eclipse.rdf4j.repository.RepositoryException ex ) {
// Failed to contact source
// Log a warning and reply "true" just in case this is a transient failure
logger.warn( "Failed to contact source to ASK about pattern {}. Exception: {}",
pattern.toString(), ex.getMessage() );
}
}
if( allow_select && (conn!=null) ) {
String qs = "SELECT * WHERE { ?S ?P ?O } LIMIT 1";
if (c != null)
qs = "SELECT * WHERE { GRAPH ?C { ?S ?P ?O } } LIMIT 1";
TupleQuery q;
try {
q = conn.prepareTupleQuery( org.eclipse.rdf4j.query.QueryLanguage.SPARQL, qs );
if( s != null ) { q.setBinding( "S", s ); }
if( p != null ) { q.setBinding( "P", p ); }
if( o != null ) { q.setBinding( "O", o ); }
if (c != null ) { q.setBinding( "C", c ); }
}
catch( org.eclipse.rdf4j.query.MalformedQueryException ex ) {
throw new AssertionError();
// ASSERTION ERROR: This can never happen
}
catch( org.eclipse.rdf4j.repository.RepositoryException ex ) {
// Failed to contact source
// Log a warning and reply "true" just in case this is a transient failure
logger.warn( "Failed to contact source to ASK about pattern {}. Exception: {}",
pattern.toString(), ex.getMessage() );
q = null;
}
try {
retv = q.evaluate().hasNext();
}
catch( org.eclipse.rdf4j.query.QueryEvaluationException ex ) {
// Failed to contact source
// Log a warnig and reply "true" just in case this is a transient failure
logger.warn( "Failed to contact source to execute query {}. Exception: {}",
q.toString(), ex.getMessage() );
}
catch( NullPointerException ex ) { /* NOOP: q was not prepared above */ }
}
try { conn.close(); }
catch( NullPointerException ex ) { /* NPOP: the connection failed to open above */ }
catch( org.eclipse.rdf4j.repository.RepositoryException ex ) { }
return retv;
}
}
|
package com.yahoo.squidb.sample.modules;
import android.content.Context;
import android.net.Uri;
import com.yahoo.squidb.data.AbstractModel;
import com.yahoo.squidb.data.SquidDatabase;
import com.yahoo.squidb.data.UriNotifier;
import com.yahoo.squidb.sample.HelloSquiDBApplication;
import com.yahoo.squidb.sample.TaskListActivity;
import com.yahoo.squidb.sample.database.TasksDatabase;
import com.yahoo.squidb.sample.models.Tag;
import com.yahoo.squidb.sample.models.Task;
import com.yahoo.squidb.sql.SqlTable;
import java.util.Set;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
@Module(injects = {TaskListActivity.class})
public class HelloSquiDBModule {
private Context applicationContext;
HelloSquiDBModule(HelloSquiDBApplication app) {
this.applicationContext = app;
}
@Provides
@ForApplicaton
Context provideContext() {
return applicationContext;
}
@Provides
@Singleton
// We want the database instance to be a singleton
TasksDatabase provideTasksDatabase(@ForApplicaton Context context) {
TasksDatabase database = new TasksDatabase(context);
// Setting up a UriNotifier will sent ContentObserver notifications to Uris on table writes
database.registerDataChangedNotifier(new UriNotifier(Task.TABLE, Tag.TABLE) {
@Override
protected boolean accumulateNotificationObjects(Set<Uri> uris, SqlTable<?> table, SquidDatabase database,
DBOperation operation, AbstractModel modelValues, long rowId) {
return uris.add(Task.CONTENT_URI);
}
});
return database;
}
}
|
package com.insightfullogic.honest_profiler.delivery.javafx.profile;
import com.insightfullogic.honest_profiler.core.ProfileListener;
import com.insightfullogic.honest_profiler.core.collector.Profile;
import com.insightfullogic.honest_profiler.core.filters.ProfileFilter;
import javafx.application.Platform;
import org.slf4j.Logger;
public class CachingProfileListener implements ProfileListener {
private final Logger logger;
private final FlatViewModel flatModel;
private final TreeViewModel treeModel;
private final TraceCountViewModel countModel;
private final ProfileFilter profileFilter;
private Profile lastProfile;
public CachingProfileListener(
final Logger logger,
final FlatViewModel flatModel,
final TreeViewModel treeModel,
final TraceCountViewModel countModel,
final ProfileFilter profileFilter) {
this.logger = logger;
this.flatModel = flatModel;
this.treeModel = treeModel;
this.countModel = countModel;
this.profileFilter = profileFilter;
}
@Override
public void accept(Profile profile) {
lastProfile = profile;
profileFilter.accept(profile);
// All UI updates must go through here.
onFxThread(() -> {
flatModel.accept(profile);
treeModel.accept(profile);
countModel.accept(profile);
});
}
public void reflushLastProfile() {
if (lastProfile != null) {
accept(lastProfile);
}
}
// ViewModel instances can happily update the UI
// without worrying about threading implications
private void onFxThread(final Runnable block) {
try {
if (Platform.isFxApplicationThread()) {
block.run();
} else {
Platform.runLater(block);
}
} catch (Throwable t) {
logger.error(t.getMessage(), t);
}
}
}
|
package de.fernunihagen.dna.scalephant.distribution.membership;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.fernunihagen.dna.scalephant.ScalephantService;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceAddEvent;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceChangedEvent;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceDeleteEvent;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceEvent;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceEventCallback;
import de.fernunihagen.dna.scalephant.distribution.membership.event.DistributedInstanceState;
import de.fernunihagen.dna.scalephant.network.client.ScalephantClient;
public class MembershipConnectionService implements ScalephantService, DistributedInstanceEventCallback {
/**
* The server connections
*/
protected final Map<InetSocketAddress, ScalephantClient> serverConnections;
/**
* The known instances
*/
protected final Map<InetSocketAddress, DistributedInstance> knownInstances;
/**
* The blacklisted instances, no connection will be created to these systems
*/
protected Set<InetSocketAddress> blacklist = new HashSet<>();
protected static MembershipConnectionService instance = null;
/**
* The Logger
*/
private final static Logger logger = LoggerFactory.getLogger(MembershipConnectionService.class);
private MembershipConnectionService() {
final HashMap<InetSocketAddress, ScalephantClient> connectionMap = new HashMap<InetSocketAddress, ScalephantClient>();
serverConnections = Collections.synchronizedMap(connectionMap);
final HashMap<InetSocketAddress, DistributedInstance> instanceMap = new HashMap<InetSocketAddress, DistributedInstance>();
knownInstances = Collections.synchronizedMap(instanceMap);
}
/**
* Get the instance of the membership connection service
* @return
*/
public static synchronized MembershipConnectionService getInstance() {
if(instance == null) {
instance = new MembershipConnectionService();
}
return instance;
}
@Override
protected Object clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException("Unable to clone a singleton");
}
/**
* Add a system to the blacklist
* @param distributedInstance
*/
public void addSystemToBlacklist(final DistributedInstance distributedInstance) {
blacklist.add(distributedInstance.getInetSocketAddress());
}
/**
* Init the subsystem
*/
@Override
public void init() {
DistributedInstanceManager.getInstance().registerListener(this);
// Create connections to existing instances
final List<DistributedInstance> instances = DistributedInstanceManager.getInstance().getInstances();
if(instances.isEmpty()) {
logger.warn("The list of instances is empty");
}
for(final DistributedInstance distributedInstance : instances) {
createOrTerminateConnetion(distributedInstance);
}
}
/**
* Shutdown the subsystem
*/
@Override
public void shutdown() {
DistributedInstanceManager.getInstance().removeListener(this);
// Close all connections
synchronized (serverConnections) {
for(final InetSocketAddress instance : serverConnections.keySet()) {
final ScalephantClient client = serverConnections.get(instance);
logger.info("Disconnecting from: " + instance);
client.disconnect();
}
serverConnections.clear();
knownInstances.clear();
}
}
/**
* Get the name for the subsystem
*/
@Override
public String getServicename() {
return "Mambership Connection Service";
}
/**
* Add a new connection to a scalephant system
* @param distributedInstance
*/
protected synchronized void createOrTerminateConnetion(final DistributedInstance distributedInstance) {
// Create only connections to readonly or readwrite systems
if(distributedInstance.getState() == DistributedInstanceState.UNKNOWN) {
terminateConnection(distributedInstance);
} else {
createConnection(distributedInstance);
}
}
/**
* Create a new connection to the given instance
* @param distributedInstance
*/
protected void createConnection(final DistributedInstance distributedInstance) {
if(serverConnections.containsKey(distributedInstance.getInetSocketAddress())) {
logger.info("We already have a connection to: " + distributedInstance);
return;
}
if(blacklist.contains(distributedInstance.getInetSocketAddress())) {
logger.info("Not creating a connection to the blacklisted sysetm: " + distributedInstance);
return;
}
logger.info("Opening connection to instance: " + distributedInstance);
final ScalephantClient client = new ScalephantClient(distributedInstance.getInetSocketAddress());
final boolean result = client.connect();
if(! result) {
logger.info("Unable to open connection to: " + distributedInstance);
} else {
logger.info("Connection successfully established: " + distributedInstance);
serverConnections.put(distributedInstance.getInetSocketAddress(), client);
knownInstances.put(distributedInstance.getInetSocketAddress(), distributedInstance);
}
}
/**
* Terminate the connection to a missing scalephant system
* @param distributedInstance
*/
protected synchronized void terminateConnection(final DistributedInstance distributedInstance) {
if(! serverConnections.containsKey(distributedInstance.getInetSocketAddress())) {
return;
}
logger.info("Closing connection to dead instance: " + distributedInstance);
knownInstances.remove(distributedInstance.getInetSocketAddress());
final ScalephantClient client = serverConnections.remove(distributedInstance.getInetSocketAddress());
client.disconnect();
}
/**
* Handle membership events
*/
@Override
public void distributedInstanceEvent(final DistributedInstanceEvent event) {
if(event instanceof DistributedInstanceAddEvent) {
createOrTerminateConnetion(event.getInstance());
} else if(event instanceof DistributedInstanceChangedEvent) {
createOrTerminateConnetion(event.getInstance());
} else if(event instanceof DistributedInstanceDeleteEvent) {
terminateConnection(event.getInstance());
} else {
logger.warn("Unknown event: " + event);
}
}
/**
* Get the connection for the instance
* @param instance
* @return
*/
public ScalephantClient getConnectionForInstance(final DistributedInstance instance) {
return serverConnections.get(instance.getInetSocketAddress());
}
/**
* Return the number of connections
* @return
*/
public int getNumberOfConnections() {
return serverConnections.size();
}
/**
* Get all connections
* @return
*/
public List<ScalephantClient> getAllConnections() {
return new ArrayList<ScalephantClient>(serverConnections.values());
}
/**
* Get a list with all distributed instances we have connections to
* @return
*/
public List<DistributedInstance> getAllInstances() {
return new ArrayList<DistributedInstance>(knownInstances.values());
}
}
|
package org.objectweb.proactive.core.component.controller;
import org.objectweb.proactive.annotation.PublicAPI;
/**
* The priority controller interface. This controller manage the priority for methods exposed by the
* component interfaces. It's an optional controller. If it is added in the definition of the
* membrane the default component activity will be
* {@link org.objectweb.proactive.core.component.body.ComponentActivityPriority ComponentActivityPriority}
* otherwise
* {@link org.objectweb.proactive.core.component.body.ComponentActivityPriority ComponentActivity}.
*
* @author The ProActive Team
*/
@PublicAPI
//@snippet-start prioritycontroller
public interface PriorityController {
/**
* All the possible kind of priority for a request on a component.
*
*/
public enum RequestPriority {
/**
* Functional priority
*/
F,
/**
* Non-Functional priority
*/
NF1,
/**
* Non-Functional priority higher than Functional priority (F)
*/
NF2,
/**
* Non-Functional priority higher than Functional priority (F) and Non-Functional priority
* (NF1 and NF2)
*/
NF3;
}
/**
* Set priority of all methods named 'methodName' in the interface 'interfaceName' to
* 'priority'.
*
* @param interfaceName
* Name of the component interface providing the method
* @param methodName
* Name of the method on which set the priority
* @param priority
* The priority
*/
public void setPriority(String interfaceName, String methodName, RequestPriority priority);
/**
* Set priority of the method named 'methodName' with the signature defined by 'parametersTypes'
* in the interface 'interfaceName' to 'priority'.
*
* @param interfaceName
* Name of the component interface providing the method
* @param methodName
* Name of the method on which set the priority
* @param parametersTypes
* The type of the method's parameters signature
* @param priority
* The priority
*/
public void setPriority(String interfaceName, String methodName, Class<?>[] parametersTypes,
RequestPriority priority);
/**
* Get the priority for a given method.
*
* @param interfaceName
* Name of the component interface
* @param methodName
* Name of the method
* @param parametersTypes
* The type of the method's parameters signature
* @return The priority of the given method
*/
public RequestPriority getPriority(String interfaceName, String methodName, Class<?>[] parametersTypes);
}
//@snippet-end prioritycontroller
|
package ca.corefacility.bioinformatics.irida.ria.integration.projects;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.AnnotationConfigContextLoader;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import ca.corefacility.bioinformatics.irida.config.IridaApiPropertyPlaceholderConfig;
import ca.corefacility.bioinformatics.irida.config.data.IridaApiJdbcDataSourceConfig;
import ca.corefacility.bioinformatics.irida.ria.integration.pages.LoginPage;
import ca.corefacility.bioinformatics.irida.ria.integration.pages.projects.ProjectSamplesPage;
import ca.corefacility.bioinformatics.irida.ria.integration.utilities.TestUtilities;
import com.github.springtestdbunit.DbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.github.springtestdbunit.annotation.DatabaseTearDown;
import com.google.common.collect.ImmutableList;
/**
* <p> Integration test to ensure that the Project Details Page. </p>
*
* @author Josh Adam <josh.adam@phac-aspc.gc.ca>
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(loader = AnnotationConfigContextLoader.class, classes = { IridaApiJdbcDataSourceConfig.class,
IridaApiPropertyPlaceholderConfig.class })
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class })
@ActiveProfiles("it")
@DatabaseSetup("/ca/corefacility/bioinformatics/irida/ria/web/projects/ProjectSamplesView.xml")
@DatabaseTearDown("classpath:/ca/corefacility/bioinformatics/irida/test/integration/TableReset.xml")
public class ProjectSamplesPageIT {
private static final Logger logger = LoggerFactory.getLogger(ProjectSamplesPageIT.class);
private WebDriver driver;
private ProjectSamplesPage page;
@Before
public void setUp() {
driver = TestUtilities.setDriverDefaults(new ChromeDriver());
this.page = new ProjectSamplesPage(driver);
}
@After
public void destroy() {
driver.quit();
}
@Test
public void testInitialPageSetUp() {
logger.info("Testing page set up for: Project Samples");
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertTrue(page.getTitle().contains("Samples"));
assertEquals(10, page.getNumberOfSamplesDisplayed());
}
@Test
public void testPaging() {
logger.info("Testing paging for: Project Samples");
LoginPage.loginAsAdmin(driver);
page.goToPage();
// Initial setup
assertFalse(page.isFirstButtonEnabled());
assertFalse(page.isPreviousButtonEnabled());
assertTrue(page.isNextButtonEnabled());
assertTrue(page.isLastButtonEnabled());
assertEquals(1, page.getGetSelectedPageNumber());
// Second Page
page.selectPage(2);
assertEquals(2, page.getGetSelectedPageNumber());
assertTrue(page.isFirstButtonEnabled());
assertTrue(page.isPreviousButtonEnabled());
assertTrue(page.isNextButtonEnabled());
assertTrue(page.isLastButtonEnabled());
assertEquals(10, page.getNumberOfSamplesDisplayed());
// Third Page (1 element)
page.selectPage(3);
assertTrue(page.isFirstButtonEnabled());
assertTrue(page.isPreviousButtonEnabled());
assertFalse(page.isNextButtonEnabled());
assertFalse(page.isLastButtonEnabled());
assertEquals(3, page.getGetSelectedPageNumber());
assertEquals(1, page.getNumberOfSamplesDisplayed());
// Previous Button
page.clickPreviousPageButton();
assertEquals(2, page.getGetSelectedPageNumber());
page.clickPreviousPageButton();
assertEquals(1, page.getGetSelectedPageNumber());
// Next Button
page.clickNextPageButton();
assertEquals(2, page.getGetSelectedPageNumber());
page.clickNextPageButton();
assertEquals(3, page.getGetSelectedPageNumber());
// First and List page buttons
page.clickFirstPageButton();
assertEquals(1, page.getGetSelectedPageNumber());
assertFalse(page.isFirstButtonEnabled());
page.clickLastPageButton();
assertEquals(3, page.getGetSelectedPageNumber());
assertFalse(page.isLastButtonEnabled());
assertTrue(page.isFirstButtonEnabled());
assertEquals(1, page.getNumberOfSamplesDisplayed());
}
@Test
public void testSelectSamples() {
logger.info("Testing selecting samples for: Project Samples");
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertEquals(0, page.getNumberOfSamplesSelected());
selectFirstThreeSamples();
assertEquals(3, page.getNumberOfSamplesSelected());
page.selectSampleByRow(1);
assertEquals(2, page.getNumberOfSamplesSelected());
}
@Test
public void testPagingWithSelectingSamples() {
logger.info("Testing paging with selecting samples for: Project Samples");
List<Integer> page1 = ImmutableList.of(0, 1, 6);
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertEquals(0, page.getNumberOfSamplesSelected());
page1.forEach(page::selectSampleByRow);
assertEquals(3, page.getNumberOfSamplesSelected());
assertTrue(page.isRowSelected(6));
// Let's go to the second page
page.clickNextPageButton();
for (int row : page1) {
assertFalse(page.isRowSelected(row));
}
assertEquals(0, page.getNumberOfSamplesSelected());
page.selectSampleByRow(2);
// Let's jump around a little
jumpAroundLists();
// Make sure samples are still selected on the first page
page.selectPage(1);
for (int row : page1) {
assertTrue(page.isRowSelected(row));
}
assertEquals(3, page.getNumberOfSamplesSelected());
// Deselect first page samples
page1.forEach(page::selectSampleByRow);
assertEquals(0, page.getNumberOfSamplesSelected());
jumpAroundLists();
page.selectPage(1);
assertEquals(0, page.getNumberOfSamplesSelected());
}
@Test
public void testSelectedSampleCount() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertEquals(0, page.getTotalSelectedSamplesCount());
page.selectSampleByRow(0);
page.selectSampleByRow(1);
assertEquals(2, page.getTotalSelectedSamplesCount());
page.clickNextPageButton();
assertEquals(2, page.getTotalSelectedSamplesCount());
page.selectSampleByRow(5);
assertEquals(3, page.getTotalSelectedSamplesCount());
page.clickLastPageButton();
assertEquals(3, page.getTotalSelectedSamplesCount());
page.selectSampleByRow(0);
assertEquals(4, page.getTotalSelectedSamplesCount());
page.selectSampleByRow(0);
assertEquals(3, page.getTotalSelectedSamplesCount());
page.clickFirstPageButton();
assertEquals(3, page.getTotalSelectedSamplesCount());
page.selectSampleByRow(0);
page.selectSampleByRow(1);
assertEquals(1, page.getTotalSelectedSamplesCount());
page.clickLastPageButton();
assertEquals(1, page.getTotalSelectedSamplesCount());
}
@Test
public void testDefaultMerge() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertEquals(0, page.getTotalSelectedSamplesCount());
assertFalse(page.isBtnEnabled("samplesOptionsBtn"));
page.selectSampleByRow(0);
page.selectSampleByRow(1);
assertEquals(2, page.getTotalSelectedSamplesCount());
assertTrue(page.isBtnEnabled("samplesOptionsBtn"));
page.clickBtn("samplesOptionsBtn");
page.clickBtn("mergeBtn");
assertTrue(page.isItemVisible("merge-samples-modal"));
page.clickBtn("confirmMergeBtn");
assertTrue(page.checkSuccessNotification());
assertEquals(0, page.getTotalSelectedSamplesCount());
}
@Test
public void testRenameMerge() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertEquals(0, page.getTotalSelectedSamplesCount());
assertFalse(page.isBtnEnabled("samplesOptionsBtn"));
page.selectSampleByRow(0);
page.selectSampleByRow(1);
assertEquals(2, page.getTotalSelectedSamplesCount());
page.clickBtn("samplesOptionsBtn");
page.clickBtn("mergeBtn");
assertTrue(page.isItemVisible("merge-samples-modal"));
// Try entering a name that is too short
assertTrue(page.isBtnEnabled("confirmMergeBtn"));
page.enterNewMergeSampleName("HI");
assertTrue(page.isItemVisible("merge-length-error"));
assertFalse(page.isBtnEnabled("confirmMergeBtn"));
// Try entering a name with spaces
page.enterNewMergeSampleName("HI BOB I AM WRONG");
assertTrue(page.isItemVisible("merge-format-error"));
assertFalse(page.isBtnEnabled("confirmMergeBtn"));
// Try to enter a proper name name
String oriName = page.getSampleNameByRow(0);
String newLongName = "LONGERNAME";
page.enterNewMergeSampleName(newLongName);
assertFalse(page.isItemVisible("merge-length-error"));
assertFalse(page.isItemVisible("merge-format-error"));
assertTrue(page.isBtnEnabled("confirmMergeBtn"));
page.clickBtn("confirmMergeBtn");
assertTrue(page.checkSuccessNotification());
String updatedName = page.getSampleNameByRow(0);
assertFalse(oriName.equals(updatedName));
assertTrue(updatedName.equals(newLongName));
}
@Test
public void testProjectUserCannotCopyOrMoveFilesToAnotherProject() {
LoginPage.loginAsUser(driver);
page.goToPage();
assertFalse(page.isElementOnScreen("copyBtn"));
assertFalse(page.isElementOnScreen("moveBtn"));
}
@Test
public void testCopySamplesAsManagerToManagedProject() {
LoginPage.login(driver, "project1Manager", "Password1");
// Make sure the project to copy to is empty to begin with
page.goToPage("2");
assertEquals(0, page.getNumberOfSamplesDisplayed());
page.goToPage();
assertTrue(page.isElementOnScreen("copyBtn"));
assertTrue(page.isElementOnScreen("moveBtn"));
// Should be able to copy files to a project that they are a manager of.
selectFirstThreeSamples();
page.clickBtn("samplesOptionsBtn");
page.clickBtn("copyBtn");
assertTrue(page.isItemVisible("copy-samples-modal"));
page.selectProjectByName("2", "confirm-copy-samples");
assertTrue(page.isBtnEnabled("confirm-copy-samples"));
page.clickBtn("confirm-copy-samples");
page.checkSuccessNotification();
// Check to make sure the samples where copied there
page.goToPage("2");
assertEquals(3, page.getNumberOfSamplesDisplayed());
}
@Test
public void testMoveSamplesAsManagerToManagedProject() {
LoginPage.login(driver, "project1Manager", "Password1");
// Make sure the project to copy to is empty to begin with
page.goToPage("2");
assertEquals(0, page.getNumberOfSamplesDisplayed());
page.goToPage();
// Should be able to copy files to a project that they are a manager of.
selectFirstThreeSamples();
page.clickBtn("samplesOptionsBtn");
page.clickBtn("moveBtn");
assertTrue(page.isItemVisible("move-samples-modal"));
page.selectProjectByName("2", "confirm-move-samples");
assertTrue(page.isBtnEnabled("confirm-move-samples"));
page.clickBtn("confirm-move-samples");
page.checkSuccessNotification();
// Check to make sure the samples where copied there
page.goToPage("2");
assertEquals(3, page.getNumberOfSamplesDisplayed());
}
@Test
public void testCopySamplesAsManagerToUnmanagedProject() {
LoginPage.login(driver, "project1Manager", "Password1");
page.goToPage();
assertFalse(page.isBtnEnabled("samplesOptionsBtn"));
// Should be able to copy files to a project that they are a manager of.
selectFirstThreeSamples();
page.clickBtn("samplesOptionsBtn");
page.clickBtn("copyBtn");
assertTrue(page.isItemVisible("copy-samples-modal"));
page.selectProjectByName("3", "confirm-copy-samples");
assertFalse("Since the project does not exist in the list, they cannot copy files to it.",
page.isBtnEnabled("confirm-copy-samples"));
}
@Test
public void testAdminCopyFromAnyProjectToAnyProject() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
selectFirstThreeSamples();
//Admin is not on project5
page.clickBtn("samplesOptionsBtn");
page.clickBtn("copyBtn");
assertTrue(page.isItemVisible("copy-samples-modal"));
page.selectProjectByName("5", "confirm-copy-samples");
assertTrue(page.isBtnEnabled("confirm-copy-samples"));
page.clickBtn("confirm-copy-samples");
page.checkSuccessNotification();
// Check to make sure the samples where copied there
page.goToPage("5");
assertEquals(3, page.getNumberOfSamplesDisplayed());
}
@Test
public void testMultiSelection() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
// Test selecting a page
assertEquals(0, page.getTotalNumberOfSamplesSelected());
page.clickBtn("selectBtn");
page.clickBtn("selectPageBtn");
assertEquals(10, page.getTotalNumberOfSamplesSelected());
// Test clearing the selections
page.clickBtn("selectBtn");
page.clickBtn("selectNoneBtn");
assertEquals(0, page.getTotalNumberOfSamplesSelected());
// Test select all
page.clickBtn("selectBtn");
page.clickBtn("selectAllBtn");
assertEquals(21, page.getTotalNumberOfSamplesSelected());
// Test clearing again
page.clickBtn("selectBtn");
page.clickBtn("selectNoneBtn");
assertEquals(0, page.getTotalNumberOfSamplesSelected());
// Select random samples on one page and then all on the second
selectFirstThreeSamples();
assertEquals(3, page.getTotalNumberOfSamplesSelected());
page.clickNextPageButton();
page.clickBtn("selectBtn");
page.clickBtn("selectPageBtn");
assertEquals(13, page.getTotalNumberOfSamplesSelected());
page.clickBtn("selectBtn");
page.clickBtn("selectAllBtn");
assertEquals(21, page.getTotalNumberOfSamplesSelected());
}
@Test
public void testExportLinker() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
assertFalse(page.isBtnEnabled("exportOptionsBtn"));
page.selectSampleByRow(0);
assertTrue(page.isBtnEnabled("exportOptionsBtn"));
page.clickBtn("exportOptionsBtn");
page.clickBtn("exportLinkerBtn");
assertTrue(page.isItemVisible("linker-modal"));
assertEquals(1, getSampleFlagCount(page.getLinkerScriptText()));
page.clickBtn("linkerCloseBtn");
// Select all samples
page.clickBtn("selectBtn");
page.clickBtn("selectAllBtn");
page.clickBtn("exportOptionsBtn");
page.clickBtn("exportLinkerBtn");
assertEquals(0, getSampleFlagCount(page.getLinkerScriptText()));
page.clickBtn("linkerCloseBtn");
page.selectSampleByRow(0);
int selectedCount = page.getTotalSelectedSamplesCount();
page.clickBtn("exportOptionsBtn");
page.clickBtn("exportLinkerBtn");
String command = page.getLinkerScriptText();
assertEquals(selectedCount, getSampleFlagCount(command));
}
@Test
public void testTableSorts() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
// Page should be sorted by creation date first
assertTrue(page.isTableSortedAscByCreationDate());
page.sortTableByCreatedDate();
assertFalse(page.isTableSortedAscByCreationDate());
assertTrue(page.isTableSortedDescByCreationDate());
// Sort by name
page.sortTableByName();
assertTrue(page.isTableSortedDescBySampleName());
page.sortTableByName();
assertFalse(page.isTableSortedDescBySampleName());
assertTrue(page.isTableSortedAscBySampleName());
}
@Test
public void testSampleFilter() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
// Filter by name
page.filterByName("ple1");
assertEquals(1, page.getFilteredSampleCount());
page.filterByName("5");
assertEquals(17, page.getFilteredSampleCount());
page.filterByName(" ");
// Filter by organism
page.filterByOrganism("coli");
assertEquals(3, page.getFilteredSampleCount());
page.filterByOrganism("Listeria");
assertEquals(2, page.getFilteredSampleCount());
}
@Test
@Ignore
public void testGalaxyErrorMessages() {
LoginPage.loginAsAdmin(driver);
page.goToPage();
// Select samples
page.clickBtn("selectBtn");
page.clickBtn("selectAllBtn");
// Click send to Galaxy button
page.clickBtn("exportOptionsBtn");
page.clickBtn("exportGalaxyBtn");
assertTrue(page.isGalaxyModalOpen());
assertTrue(page.isGalaxySubmitBtnEnabled());
// Try empty email
page.updateGalaxyEmail("");
assertTrue(page.isEmailErrorRequiredMessageDisplayed());
assertFalse(page.isGalaxySubmitBtnEnabled());
// Try a bad email
page.updateGalaxyEmail("aaaa");
assertTrue(page.isEmailErrorFormatMessageDisaplayed());
assertFalse(page.isGalaxySubmitBtnEnabled());
// Send good email
page.updateGalaxyEmail("test@test.com");
assertTrue(page.isGalaxySubmitBtnEnabled());
// Remove the data library name
page.updateGalaxyDataLibraryInput("");
assertTrue(page.isLibraryErrorRequiredMessageDisplayed());
assertFalse(page.isGalaxySubmitBtnEnabled());
// Send good library name
page.updateGalaxyDataLibraryInput("TestLibrary");
assertTrue(page.isGalaxySubmitBtnEnabled());
}
private int getSampleFlagCount(String command) {
Pattern pattern = Pattern.compile("-s");
Matcher matcher = pattern.matcher(command);
int count = 0;
while (matcher.find()) {
count++;
}
return count;
}
private void selectFirstThreeSamples() {
page.selectSampleByRow(0);
page.selectSampleByRow(1);
page.selectSampleByRow(2);
}
private void jumpAroundLists() {
page.selectPage(1);
page.selectPage(3);
page.selectPage(2);
page.selectPage(1);
page.selectPage(2);
}
}
|
package com.grayben.riskExtractor.htmlScorer.nodeVisitor.setup.annotation;
import com.grayben.riskExtractor.htmlScorer.partScorers.Scorer;
import org.jsoup.nodes.Attributes;
import org.jsoup.nodes.Element;
import org.jsoup.parser.Tag;
import java.util.*;
public class AnnotatedElement extends Element {
private Map<String, Integer> scores;
public Map<String, Integer> getScores() {
return Collections.unmodifiableMap(scores);
}
public AnnotatedElement(Element element, Map<String, Integer> scores) {
this(element.tag(), element.baseUri(), element.attributes(), scores);
}
public AnnotatedElement(Tag tag, String baseUri, Attributes attributes, Map<String, Integer> scores) {
super(tag, baseUri, attributes);
this.scores = scores;
}
public AnnotatedElement(Tag tag, String baseUri, Map<String, Integer> scores) {
super(tag, baseUri);
this.scores = scores;
}
public static class TreeAssembler {
//input fields
private final List<Element> elementsToAttach;
private final Set<? extends Scorer<Element>> elementScorers;
//output fields
private AnnotatedElement rootAnnotation;
public AnnotatedElement getRootAnnotation() {
return rootAnnotation;
}
//internal working fields
private Random random;
private Element currentElement;
private HashMap<String, Integer> currentIsolatedScores;
private AnnotatedElement parentAnnotation;
private HashMap<String, Integer> parentCumulativeScores;
private AnnotatedElement childAnnotation;
private HashMap<String, Integer> childCumulativeScores;
public TreeAssembler(
final List<Element> elementsToAttach,
final Set<? extends Scorer<Element>> elementScorers
) {
validateInitParams(elementsToAttach, elementScorers);
this.elementsToAttach = elementsToAttach;
this.elementScorers = elementScorers;
this.random = new Random(System.currentTimeMillis());
initialiseMaps();
//setup the first parent/child AnnotatedElement pair
plantSeedling();
assembleInTree();
}
private void validateInitParams(
List<Element> elements,
Set<? extends Scorer<Element>> elementScorers
) {
if (elements == null) {
throw new NullPointerException(
"The input elements list was null"
);
}
if (elements.size() < 2) {
throw new IllegalArgumentException(
"The input element list did not have at least 2 elements"
);
}
if (elementScorers == null) {
throw new NullPointerException(
"The input element scorers set was null"
);
}
if (elementScorers.isEmpty()) {
throw new IllegalArgumentException(
"The input element scorers set was empty"
);
}
}
private void initialiseMaps() {
//make sure all the fields are instantiated, contain keys and default values
currentIsolatedScores = new HashMap<>();
parentCumulativeScores = new HashMap<>();
childCumulativeScores = new HashMap<>();
for (Scorer<Element> scorer : this.elementScorers) {
currentIsolatedScores.put(scorer.getScoreLabel(), Scorer.DEFAULT_SCORE);
parentCumulativeScores.put(scorer.getScoreLabel(), Scorer.DEFAULT_SCORE);
childCumulativeScores.put(scorer.getScoreLabel(), Scorer.DEFAULT_SCORE);
}
}
private void plantSeedling() {
//parent
currentElement = elementsToAttach.remove(0);
currentIsolatedScores = isolatedScore(currentElement);
parentCumulativeScores = copyScores(currentIsolatedScores);
parentAnnotation = new AnnotatedElement(currentElement, parentCumulativeScores);
parentAnnotation.text(currentElement.ownText());
//child
currentElement = elementsToAttach.remove(0);
currentIsolatedScores = isolatedScore(currentElement);
childCumulativeScores = addScores(parentCumulativeScores, currentIsolatedScores);
childAnnotation = new AnnotatedElement(currentElement, childCumulativeScores);
childAnnotation.text(currentElement.ownText());
parentAnnotation.appendChild(childAnnotation);
//set root of hierarchy to be built
rootAnnotation = parentAnnotation;
}
private AnnotatedElement assembleInTree() {
while (elementsToAttach.isEmpty() == false) {
currentElement = elementsToAttach.remove(0);
currentIsolatedScores = isolatedScore(currentElement);
//25% chance of moving down the tree
if (random.nextInt() % 4 == 0) {
parentCumulativeScores = childCumulativeScores;
childCumulativeScores = null;
parentAnnotation = childAnnotation;
childAnnotation = null;
}
childCumulativeScores = addScores(parentCumulativeScores, currentIsolatedScores);
childAnnotation = new AnnotatedElement(currentElement, childCumulativeScores);
childAnnotation.text(currentElement.ownText());
//50/50 chance: whether to append or prepend sibling
if(random.nextInt() % 2 == 0)
parentAnnotation.prependChild(childAnnotation);
else
parentAnnotation.appendChild(childAnnotation);
}
return rootAnnotation;
}
private HashMap<String, Integer> copyScores(HashMap<String, Integer> source) {
HashMap<String, Integer> newMap = (HashMap<String, Integer>) source.clone();
return newMap;
}
private HashMap<String, Integer> isolatedScore(Element element) {
HashMap<String, Integer> scores = new HashMap<>();
for (Scorer<Element> scorer : elementScorers) {
scores.put(scorer.getScoreLabel(), scorer.score(element));
}
return scores;
}
private HashMap<String, Integer> addScores(Map<String, Integer> map1, Map<String, Integer> map2) {
assert map1.keySet().equals(map2.keySet());
HashMap<String, Integer> mapSum = new HashMap<>();
for (Map.Entry<String, Integer> entry1 : map1.entrySet()) {
String key = entry1.getKey();
Integer score1 = entry1.getValue();
Integer score2 = map2.get(key);
Integer scoreSum = score1 + score2;
mapSum.put(key, scoreSum);
}
return mapSum;
}
}
}
|
// $Id: whichjava.java,v 1.3 2000-07-25 19:50:51 d3j191 Exp $
public class whichjava implements Runnable {
public static void main(String[] args){
System.out.println("The Java Version in your path is " + System.getProperty("java.version"));
if (System.getProperty("java.version").indexOf("1.2")>=0){System.exit((int)0);};
if (System.getProperty("java.version").indexOf("1.3")>=0){System.exit((int)0);};
System.out.println (" This code requires Java 1.2 or greater"); System.exit((int)1);
}
public void init(){};
public void start(){};
public void run(){};
}
|
package com.systematic.trading.strategy;
import com.systematic.trading.model.EquityClass;
import com.systematic.trading.strategy.Strategy;
import com.systematic.trading.strategy.StrategyFactory;
import com.systematic.trading.strategy.confirmation.Confirmation;
import com.systematic.trading.strategy.entry.Entry;
import com.systematic.trading.strategy.entry.TradingStrategyConfirmationEntry;
import com.systematic.trading.strategy.entry.TradingStrategyIndicatorEntry;
import com.systematic.trading.strategy.entry.TradingStrategyOperatorEntry;
import com.systematic.trading.strategy.entry.TradingStrategyPeriodicEntry;
import com.systematic.trading.strategy.entry.size.EntrySize;
import com.systematic.trading.strategy.exit.Exit;
import com.systematic.trading.strategy.exit.TradingStrategyExit;
import com.systematic.trading.strategy.exit.size.ExitSize;
import com.systematic.trading.strategy.indicator.Indicator;
import com.systematic.trading.strategy.operator.Operator;
import com.systematic.trading.strategy.operator.TradingStrategyAndOperator;
import com.systematic.trading.strategy.operator.TradingStrategyOrOperator;
import com.systematic.trading.strategy.periodic.Periodic;
/**
* Implementation of a TradingStrategyExpressionLanguage using a facade to aggregate specialist factories.
*
* @author CJ Hare
*/
public class TradingStrategyFactory implements StrategyFactory {
@Override
public Strategy strategy( final Entry entry, final EntrySize entryPositionSizing, final Exit exit,
final ExitSize exitPositionSizing, final EquityClass type, final int scale ) {
return new TradingStrategy(entry, entryPositionSizing, exit, exitPositionSizing, type, scale);
}
@Override
public Entry entry( final Entry leftEntry, final Operator operator, final Entry righEntry ) {
return new TradingStrategyOperatorEntry(leftEntry, operator, righEntry);
}
@Override
public Entry entry( final Entry anchor, final Confirmation confirmBy, final Entry confirmation ) {
return new TradingStrategyConfirmationEntry(anchor, confirmBy, confirmation);
}
@Override
public Entry entry( final Indicator indicator ) {
return new TradingStrategyIndicatorEntry(indicator);
}
@Override
public Entry entry( final Periodic periodic ) {
return new TradingStrategyPeriodicEntry(periodic);
}
@Override
public Exit exit() {
return new TradingStrategyExit();
}
@Override
public Operator operator( final Operator.Selection operator ) {
switch (operator) {
case OR:
return new TradingStrategyOrOperator();
case AND:
default:
return new TradingStrategyAndOperator();
}
}
}
|
Kpackage org.sagebionetworks.bridge.stormpath;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.commons.httpclient.auth.AuthScope.ANY;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.SortedMap;
import javax.annotation.Resource;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.SimpleHttpConnectionManager;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthPolicy;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.sagebionetworks.bridge.config.BridgeConfig;
import org.sagebionetworks.bridge.config.BridgeConfigFactory;
import org.sagebionetworks.bridge.crypto.Encryptor;
import org.sagebionetworks.bridge.dao.AccountDao;
import org.sagebionetworks.bridge.exceptions.BadRequestException;
import org.sagebionetworks.bridge.exceptions.BridgeServiceException;
import org.sagebionetworks.bridge.exceptions.EntityAlreadyExistsException;
import org.sagebionetworks.bridge.exceptions.EntityNotFoundException;
import org.sagebionetworks.bridge.exceptions.ServiceUnavailableException;
import org.sagebionetworks.bridge.json.BridgeObjectMapper;
import org.sagebionetworks.bridge.models.Email;
import org.sagebionetworks.bridge.models.EmailVerification;
import org.sagebionetworks.bridge.models.PasswordReset;
import org.sagebionetworks.bridge.models.SignIn;
import org.sagebionetworks.bridge.models.SignUp;
import org.sagebionetworks.bridge.models.accounts.Account;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.studies.StudyIdentifier;
import org.sagebionetworks.bridge.services.StudyService;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.stormpath.sdk.account.AccountList;
import com.stormpath.sdk.account.Accounts;
import com.stormpath.sdk.application.Application;
import com.stormpath.sdk.authc.AuthenticationResult;
import com.stormpath.sdk.authc.UsernamePasswordRequest;
import com.stormpath.sdk.client.Client;
import com.stormpath.sdk.directory.Directory;
import com.stormpath.sdk.group.Group;
import com.stormpath.sdk.group.GroupMembership;
import com.stormpath.sdk.impl.resource.AbstractResource;
import com.stormpath.sdk.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component("stormpathAccountDao")
public class StormpathAccountDao implements AccountDao {
private static Logger logger = LoggerFactory.getLogger(StormpathAccountDao.class);
private Application application;
private Client client;
private StudyService studyService;
private SortedMap<Integer,Encryptor> encryptors = Maps.newTreeMap();
@Resource(name = "stormpathApplication")
public void setStormpathApplication(Application application) {
this.application = application;
}
@Resource(name = "stormpathClient")
public void setStormpathClient(Client client) {
this.client = client;
}
@Autowired
public void setStudyService(StudyService studyService) {
this.studyService = studyService;
}
@Resource(name="encryptorList")
public void setEncryptors(List<Encryptor> list) {
for (Encryptor encryptor : list) {
encryptors.put(encryptor.getVersion(), encryptor);
}
}
@Override
public Iterator<Account> getAllAccounts() {
Iterator<Account> combinedIterator = null;
for (Study study : studyService.getStudies()) {
Iterator<Account> studyIterator = getStudyAccounts(study);
if (combinedIterator == null) {
combinedIterator = studyIterator;
} else {
combinedIterator = Iterators.concat(combinedIterator, studyIterator);
}
}
return combinedIterator;
}
@Override
public Iterator<Account> getStudyAccounts(Study study) {
checkNotNull(study);
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
return new StormpathAccountIterator(study, encryptors, directory.getAccounts().iterator());
}
@Override
public Account verifyEmail(StudyIdentifier study, EmailVerification verification) {
checkNotNull(study);
checkNotNull(verification);
try {
com.stormpath.sdk.account.Account acct = client.verifyAccountEmail(verification.getSptoken());
return (acct == null) ? null : new StormpathAccount(study, acct, encryptors);
} catch(ResourceException e) {
rethrowResourceException(e, null);
}
return null;
}
@Override
public void resendEmailVerificationToken(StudyIdentifier studyIdentifier, Email email) {
checkNotNull(email);
// This is painful, it's not in the Java SDK. I hope we can come back to this when it's in their SDK
// and move it over.
SimpleHttpConnectionManager manager = new SimpleHttpConnectionManager();
int status = 202; // The Stormpath resend method returns 202 "Accepted" when successful
byte[] responseBody = new byte[0];
try {
BridgeConfig config = BridgeConfigFactory.getConfig();
String bodyJson = "{\"login\":\""+email.getEmail()+"\"}";
HttpClient client = new HttpClient(manager);
PostMethod post = new PostMethod(this.application.getHref() + "/verificationEmails");
post.setRequestHeader("Accept", "application/json");
post.setRequestHeader("Content-Type", "application/json");
post.setRequestHeader("Bridge-Study", studyIdentifier.getIdentifier());
post.setRequestEntity(new StringRequestEntity(bodyJson, "application/json", "UTF-8"));
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(
config.getStormpathId().trim(), config.getStormpathSecret().trim());
client.getState().setCredentials(ANY, creds);
client.getParams().setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, Lists.newArrayList(AuthPolicy.DIGEST));
client.getParams().setAuthenticationPreemptive(true);
status = client.executeMethod(post);
responseBody = post.getResponseBody();
} catch(ResourceException e) {
rethrowResourceException(e, null);
} catch(Throwable throwable) {
throw new BridgeServiceException(throwable);
} finally {
manager.shutdown();
}
// If it *wasn't* a 202, then there should be a JSON message included with the response...
if (status != 202) {
// One common response, that the email no longer exists, we have mapped to a 404, so do that
// here as well. Otherwise we treat it on the API side as a 503 error, a service unavailable problem.
JsonNode node = null;
try {
node = BridgeObjectMapper.get().readTree(responseBody);
} catch(IOException e) {
throw new BridgeServiceException(e);
}
String message = node.get("message").asText();
if (message.contains("does not match a known resource")) {
throw new EntityNotFoundException(Account.class);
}
throw new ServiceUnavailableException(message);
}
}
@Override
public void requestResetPassword(Study study, Email email) {
checkNotNull(study);
checkNotNull(email);
try {
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
application.sendPasswordResetEmail(email.getEmail(), directory);
} catch (ResourceException e) {
rethrowResourceException(e, null);
}
}
@Override
public void resetPassword(PasswordReset passwordReset) {
checkNotNull(passwordReset);
try {
com.stormpath.sdk.account.Account account = application.verifyPasswordResetToken(passwordReset.getSptoken());
account.setPassword(passwordReset.getPassword());
account.save();
} catch (ResourceException e) {
rethrowResourceException(e, null);
}
}
@Override
public Account authenticate(Study study, SignIn signIn) {
checkNotNull(study);
checkNotNull(signIn);
checkArgument(isNotBlank(signIn.getUsername()));
checkArgument(isNotBlank(signIn.getPassword()));
try {
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
UsernamePasswordRequest request = new UsernamePasswordRequest(signIn.getUsername(), signIn.getPassword(), directory);
AuthenticationResult result = application.authenticateAccount(request);
if (result.getAccount() != null) {
return new StormpathAccount(study.getStudyIdentifier(), result.getAccount(), encryptors);
}
} catch (ResourceException e) {
rethrowResourceException(e, null);
}
throw new BridgeServiceException("Authentication failed");
}
@Override
public Account getAccount(Study study, String email) {
checkNotNull(study);
checkArgument(isNotBlank(email));
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
AccountList accounts = directory.getAccounts(Accounts.where(Accounts.email().eqIgnoreCase(email))
.withCustomData().withGroups().withGroupMemberships());
if (accounts.iterator().hasNext()) {
com.stormpath.sdk.account.Account acct = accounts.iterator().next();
return new StormpathAccount(study.getStudyIdentifier(), acct, encryptors);
}
return null;
}
@Override
public void signUp(Study study, SignUp signUp, boolean sendEmail) {
checkNotNull(study);
checkNotNull(signUp);
com.stormpath.sdk.account.Account acct = client.instantiate(com.stormpath.sdk.account.Account.class);
Account account = new StormpathAccount(study.getStudyIdentifier(), acct, encryptors);
account.setUsername(signUp.getUsername());
account.setEmail(signUp.getEmail());
account.setFirstName(StormpathAccount.PLACEHOLDER_STRING);
account.setLastName(StormpathAccount.PLACEHOLDER_STRING);
acct.setPassword(signUp.getPassword());
if (signUp.getRoles() != null) {
account.getRoles().addAll(signUp.getRoles());
}
try {
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
directory.createAccount(acct, sendEmail);
if (!account.getRoles().isEmpty()) {
updateGroups(directory, account);
}
} catch(ResourceException e) {
rethrowResourceException(e, account);
}
}
@Override
public void updateAccount(Study study, Account account) {
checkNotNull(study);
checkNotNull(account);
com.stormpath.sdk.account.Account acct =((StormpathAccount)account).getAccount();
if (acct == null) {
throw new BridgeServiceException("Account has not been initialized correctly (use new account methods)");
}
try {
Directory directory = client.getResource(study.getStormpathHref(), Directory.class);
updateGroups(directory, account);
acct.getCustomData().save();
// This will throw an exception if the account object has not changed, which it may not have
// if this call was made simply to persist a change in the groups. To get around this, we dig
// into the implementation internals of the account because the Stormpath code is tracking the
// dirty state of the object.
AbstractResource res = (AbstractResource)acct;
if (res.isDirty()) {
acct.save();
}
} catch(ResourceException e) {
rethrowResourceException(e, account);
}
}
@Override
public void deleteAccount(Study study, String email) {
checkNotNull(study);
checkArgument(isNotBlank(email));
Account account = getAccount(study, email);
com.stormpath.sdk.account.Account acct =((StormpathAccount)account).getAccount();
acct.delete();
}
private void rethrowResourceException(ResourceException e, Account account) {
logger.info(String.format("Stormpath error: %s: %s", e.getCode(), e.getMessage()));
switch(e.getCode()) {
case 2001: // must be unique (email isn't unique)
throw new EntityAlreadyExistsException(account, "Account already exists.");
// These are validation errors, like "password doesn't include an upper-case character"
case 400:
case 2007:
case 2008:
throw new BadRequestException(e.getDeveloperMessage());
case 404:
case 7100: // Password is bad. Just return not found in this case.
case 7102: // Login attempt failed because the Account is not verified.
case 7104: // Account not found in the directory
case 2016: // Property value does not match a known resource. Somehow this equals not found.
throw new EntityNotFoundException(Account.class);
default:
throw new ServiceUnavailableException(e);
}
}
private void updateGroups(Directory directory, Account account) {
Set<String> roles = Sets.newHashSet(account.getRoles());
com.stormpath.sdk.account.Account acct = ((StormpathAccount)account).getAccount();
// Remove any memberships that don't match a role
for (GroupMembership membership : acct.getGroupMemberships()) {
String groupName = membership.getGroup().getName();
if (!roles.contains(groupName)) {
// In membership, but not the current list of roles... remove from memberships
membership.delete();
} else {
roles.remove(groupName);
}
}
// Any roles left over need to be added if the group exists
for (Group group : directory.getGroups()) {
String groupName = group.getName();
if (roles.contains(groupName)) {
// In roles, but not currently in membership... add to memberships
acct.addGroup(group);
}
}
}
}
|
package org.vaadin.addons.tuningdatefield.demo;
import static org.joda.time.DateTimeConstants.JULY;
import static org.joda.time.DateTimeConstants.JUNE;
import static org.joda.time.DateTimeConstants.MAY;
import static org.vaadin.addons.tuningdatefield.demo.Absence.AbsenceDuration.AFTERNOON;
import static org.vaadin.addons.tuningdatefield.demo.Absence.AbsenceDuration.FULLDAY;
import static org.vaadin.addons.tuningdatefield.demo.Absence.AbsenceDuration.MORNING;
import java.util.Locale;
import java.util.Set;
import javax.servlet.annotation.WebServlet;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormat;
import org.vaadin.addons.tuningdatefield.CellItemCustomizerAdapter;
import org.vaadin.addons.tuningdatefield.TuningDateField;
import org.vaadin.addons.tuningdatefield.demo.Absence.AbsenceDuration;
import org.vaadin.addons.tuningdatefield.event.DateChangeEvent;
import org.vaadin.addons.tuningdatefield.event.DateChangeListener;
import com.google.gwt.thirdparty.guava.common.collect.Sets;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.data.fieldgroup.FieldGroup;
import com.vaadin.data.util.BeanItem;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinServlet;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.UI;
import de.jollyday.Holiday;
import de.jollyday.HolidayManager;
@SuppressWarnings("serial")
@Theme("tuning_datefield_demo")
public class TuningDateFieldDemoUI extends UI {
private FormLayout layout;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.