answer
stringlengths 17
10.2M
|
|---|
package com.xruby.runtime.value;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.StringReader;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.regex.Pattern;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import com.xruby.compiler.RubyCompiler;
import com.xruby.compiler.codegen.CompilationResults;
import com.xruby.compiler.codegen.NameFactory;
import com.xruby.runtime.builtin.RubyTypesUtil;
import com.xruby.runtime.javasupport.JavaClass;
import com.xruby.runtime.lang.*;
import com.xruby.runtime.lang.annotation.RubyLevelMethod;
import com.xruby.runtime.lang.annotation.RubyLevelModule;
@RubyLevelModule(name="Kernel")
public class RubyKernelModule {
@RubyLevelMethod(name="==", alias={"equal?", "eql?"})
public static RubyValue opEqual(RubyValue receiver, RubyValue arg) {
return ObjectFactory.createBoolean(receiver == arg);
}
@RubyLevelMethod(name="===")
public static RubyValue objEqual(RubyValue receiver, RubyValue arg) {
if (receiver == arg) {
return RubyConstant.QTRUE;
} else {
boolean result = RubyAPI.callPublicOneArgMethod(receiver, arg, null, RubyID.equalID).isTrue();
return ObjectFactory.createBoolean(result);
}
}
@RubyLevelMethod(name="class")
public static RubyValue objRubyClass(RubyValue receiver) {
RubyClass klass = receiver.getRubyClass();
return klass != null ? klass.getRealClass() : ObjectFactory.NIL_VALUE;
}
// FIXME: Kernel_clone should be revised.
@RubyLevelMethod(name="clone", alias="dup")
public static RubyValue objClone(RubyValue receiver) {
return (RubyValue) receiver.clone();
}
@RubyLevelMethod(name="to_s")
public static RubyValue anyObjToS(RubyValue receiver) {
String className = receiver.getRubyClass().getName();
return ObjectFactory.createString("#<" + className + ":0x" + Integer.toHexString(receiver.hashCode()) + "x>");
}
@RubyLevelMethod(name="inspect")
public static RubyValue objInsepct(RubyValue receiver) {
if (!(receiver instanceof RubyObject)) {
return RubyAPI.callPublicNoArgMethod(receiver, null, RubyID.toSID);
}
StringBuffer sb = new StringBuffer();
sb.append("
sb.append(receiver.getRubyClass().getRealClass().getName());
sb.append(":0x");
int hash = receiver.hashCode();
sb.append(Integer.toHexString(hash));
String sep = "";
Map vars = receiver.getInstanceVariables();
if (vars != null) {
for (Iterator iter = vars.keySet().iterator(); iter.hasNext();) {
RubyID id = (RubyID)iter.next();
sb.append(sep);
sb.append(" ");
sb.append(id.toString());
sb.append("=");
sb.append(((RubyString)RubyAPI.callPublicNoArgMethod((RubyValue)vars.get(id), null, RubyID.inspectID))).toString();
sep = ",";
}
}
sb.append(">");
return ObjectFactory.createString(sb.toString());
}
@RubyLevelMethod(name="methods")
public static RubyValue objMethods(RubyValue receiver) {
RubyArray a = new RubyArray();
receiver.collectMethodNames(a, RubyMethod.ALL);
return a;
}
@RubyLevelMethod(name="singleton_methods")
public static RubyValue objSingletonMethods(RubyValue receiver) {
return objSingletonMethod(receiver, true);
}
@RubyLevelMethod(name="singleton_methods")
public static RubyValue objSingletonMethods(RubyValue receiver, RubyValue arg) {
return objSingletonMethod(receiver, arg.isTrue());
}
private static RubyValue objSingletonMethod(RubyValue receiver, boolean all) {
RubyArray a = new RubyArray();
if(receiver.getRubyClass().isSingleton()) {
RubyClass rubyClass = receiver.getRubyClass();
rubyClass.collectOwnMethodNames(a, RubyMethod.PUBLIC);
rubyClass = rubyClass.getSuperClass();
if (all) {
while(rubyClass != null && rubyClass.isSingleton()) {
rubyClass.collectOwnMethodNames(a, RubyMethod.PUBLIC);
rubyClass = rubyClass.getSuperClass();
}
}
}
return a;
}
// FIXME:This method should be module.
@RubyLevelMethod(name="raise", alias="fail")
public static RubyValue raise(RubyValue value, RubyArray args) {
RubyExceptionValue e;
if (null == args) {
//With no arguments, raises the exception in $! or raises a RuntimeError if $! is nil.
RubyValue v = GlobalVariables.get("$!");
if (ObjectFactory.NIL_VALUE == v) {
e = new RubyExceptionValue(RubyRuntime.RuntimeErrorClass, "");
} else {
e = (RubyExceptionValue)v;
}
} else if (1 == args.size() && (args.get(0) instanceof RubyString)) {
//With a single String argument, raises a RuntimeError with the string as a message.
e = new RubyExceptionValue(RubyRuntime.RuntimeErrorClass, ((RubyString) args.get(0)).toString());
} else if (args.get(0) instanceof RubyExceptionValue) {
//Otherwise, the first parameter should be the name of an Exception class
//(or an object that returns an Exception when sent exception). The optional second
//parameter sets the message associated with the exception, and the third parameter
//is an array of callback information.
e = (RubyExceptionValue) args.get(0);
if (args.size() > 1) {
e.setMessage(((RubyString) args.get(1)).toString());
}
} else {
RubyClass v = (RubyClass) args.get(0);
e = new RubyExceptionValue(v, 1 == args.size() ? "" : ((RubyString) args.get(1)).toString());
}
throw new RubyException(e);
}
@RubyLevelMethod(name="exit", module=true)
public static RubyValue exit(RubyValue receiver) {
// TODO should raise SystemExit exception and call at_exit blocks
System.exit(0);
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="exit", module=true)
public static RubyValue exit(RubyValue receiver, RubyValue arg) {
// TODO should raise SystemExit exception and call at_exit blocks
int status;
if (arg == RubyConstant.QTRUE) {
status = 0;
} else if (arg == RubyConstant.QFALSE) {
status = 1;
} else {
status = arg.toInt();
}
System.exit(status);
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="loop", module=true)
public static RubyValue loop(RubyValue receiver, RubyArray args, RubyBlock block) {
if (null == block) {
throw new RubyException(RubyRuntime.LocalJumpErrorClass, "in `loop': no block given");
}
for (; ;) {
RubyValue v = block.invoke(receiver, args);
if (block.breakedOrReturned()) {
return v;
}
}
}
@RubyLevelMethod(name="kind_of?")
public static RubyValue kindOf(RubyValue receiver, RubyValue arg, RubyBlock block) {
return ObjectFactory.createBoolean(RubyAPI.isKindOf(arg, receiver));
}
@RubyLevelMethod(name="instance_of?")
public static RubyValue instance_of(RubyValue receiver, RubyValue arg) {
return ObjectFactory.createBoolean(receiver.getRubyClass().getRealClass() == arg);
}
@RubyLevelMethod(name="respond_to?")
public static RubyValue respond_to(RubyValue receiver, RubyArray args) {
if (null == args || args.size() < 1) {
int actual_argc = (null == args ) ? 0 : args.size();
throw new RubyException(RubyRuntime.ArgumentErrorClass, "in `respond_to': wrong number of arguments (" + actual_argc + " for 1)");
}
boolean include_private = (ObjectFactory.TRUE_VALUE == args.get(1));
RubyID mid = RubyID.intern(args.get(0).toStr());
return ObjectFactory.createBoolean(hasMethod(receiver, mid, include_private));
}
private static boolean hasMethod(RubyValue receiver, RubyID mid, boolean include_private) {
if (include_private) {
return (null != receiver.findMethod(mid));
} else {
return (null != receiver.findPublicMethod(mid));
}
}
@RubyLevelMethod(name="send", alias="__send__")
public static RubyValue send(RubyValue receiver, RubyBlock block) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "no method name given");
}
@RubyLevelMethod(name="send", alias="__send__")
public static RubyValue send(RubyValue receiver, RubyValue value, RubyBlock block) {
RubyID mid = RubyID.intern(value.toStr());
return RubyAPI.callNoArgMethod(receiver, block, mid);
}
// @RubyLevelMethod(name="send", alias="__send__")
// public static RubyValue send(RubyValue receiver, RubyValue arg0, RubyValue arg1, RubyBlock block) {
// RubyID mid = RubyID.intern(arg0.toStr());
// return RubyAPI.callOneArgMethod(receiver, arg1, block, mid);
@RubyLevelMethod(name="send", alias="__send__")
public static RubyValue send(RubyValue receiver, RubyArray args, RubyBlock block) {
RubyValue method_name = args.delete_at(0);
RubyID mid = RubyID.intern(method_name.toStr());
if (args.size() == 1) {
return RubyAPI.callOneArgMethod(receiver, args.get(0), block, mid);
} else {
return RubyAPI.callMethod(receiver, args, block, mid);
}
}
@RubyLevelMethod(name="instance_eval")
public static RubyValue instanceEval(RubyValue receiver, RubyArray args, RubyBlock block) {
if (null == args && null == block) {
throw new RubyException(RubyRuntime.ArgumentErrorClass,
"block not supplied");
}
if (null != args) {
RubyString program_text = (RubyString) args.get(0);
RubyBinding binding = new RubyBinding();
binding.setScope((RubyModule) receiver);
binding.setSelf(receiver);
return eval(program_text, binding, null);
} else {
block.setSelf(receiver);
return block.invoke(receiver);
}
}
public static RubyValue eval(RubyString program_text, RubyBinding binding, String file_name) {
RubyCompiler compiler = new RubyCompiler(binding, false);
try {
CompilationResults codes = compiler.compileString(file_name, program_text.toString());
RubyProgram p = codes.getRubyProgram();
if (null != binding) {
return p.invoke(binding.getSelf(), binding.getVariables(), binding.getBlock(), binding.getScope());
} else {
return p.invoke();
}
} catch (RecognitionException e) {
throw new RubyException(RubyRuntime.SyntaxErrorClass, e.toString());
} catch (TokenStreamException e) {
throw new RubyException(RubyRuntime.SyntaxErrorClass, e.toString());
} catch (InstantiationException e) {
throw new RubyException(e.toString());
} catch (IllegalAccessException e) {
throw new RubyException(e.toString());
}
}
@RubyLevelMethod(name="eval", module=true)
public static RubyValue eval(RubyValue receiver, RubyArray args, RubyBlock block) {
RubyString program_text = (RubyString) args.get(0);
RubyBinding binding = null;
if (args.get(1) instanceof RubyBinding) {
binding = (RubyBinding) args.get(1);
}
String file_name = null;
if (args.size() > 2) {
file_name = ((RubyString)args.get(2)).toString();
}
return eval(program_text, binding, file_name);
}
@RubyLevelMethod(name="method")
public static RubyValue objMethod(RubyValue receiver, RubyValue arg) {
String method_name = arg.toStr();
RubyID mid = RubyID.intern(method_name);
RubyMethod m = receiver.findMethod(mid);
if (null == m) {
throw new RubyException(RubyRuntime.NameErrorClass, "public method '" + method_name + "' can not be found in '" + receiver.getRubyClass().getName() + "'");
}
return ObjectFactory.createMethod(receiver, method_name, m);
}
@RubyLevelMethod(name="public_methods")
public static RubyValue publicMethods(RubyValue receiver) {
RubyArray a = new RubyArray();
receiver.collectMethodNames(a, RubyMethod.PUBLIC);
return a;
}
@RubyLevelMethod(name="caller", module=true)
public static RubyValue call(RubyValue receiver) {
return FrameManager.caller(0);
}
@RubyLevelMethod(name="caller", module=true)
public static RubyValue call(RubyValue receiver, RubyValue arg) {
return FrameManager.caller(arg.toInt());
}
@RubyLevelMethod(name="throw", module=true)
public static RubyValue throwMethod(RubyValue receiver, RubyArray args) {
if (null == args || args.size() < 1) {
int actual_argc = (null == args ) ? 0 : args.size();
throw new RubyException(RubyRuntime.ArgumentErrorClass, "in `throw': wrong number of arguments (" + actual_argc + " for 1)");
}
RubyExceptionValue e;
if (args.get(0) instanceof RubySymbol ||
args.get(0) instanceof RubyString) {
e = new RubyExceptionValueForThrow(args.get(0), args.get(1));
} else if (args.get(0) instanceof RubyExceptionValue) {
e = (RubyExceptionValue)args.get(0);
} else if (args.get(0) instanceof RubyClass) {
RubyClass c = (RubyClass)args.get(0);
e = new RubyExceptionValue(c, c.getName() + " is not a symbol");
} else {
e = new RubyExceptionValue(RubyRuntime.ArgumentErrorClass, args.get(0).toString() + " is not a symbol");
}
throw new RubyException(e);
}
@RubyLevelMethod(name="catch", module=true)
public static RubyValue catchMethod(RubyValue receiver, RubyValue arg, RubyBlock block) {
if (!(arg instanceof RubySymbol)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, arg.toString() + " is not a symbol");
}
try {
block.invoke(receiver);
} catch (RubyException e) {
RubyValue ev = RubyAPI.convertRubyException2RubyValue(e);
if (ev instanceof RubyExceptionValueForThrow) {
RubyExceptionValueForThrow v = (RubyExceptionValueForThrow) ev;
if (v.isSameSymbol(arg)) {
return v.getReturnValue();
}
}
throw e;
}
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="untrace_var", module=true)
public static RubyValue untrace_var(RubyValue receiver, RubyArray args, RubyBlock block) {
if (null == args || args.size() < 1) {
int actual_argc = (null == args ) ? 0 : args.size();
throw new RubyException(RubyRuntime.ArgumentErrorClass, "in `untrace_var': wrong number of arguments (" + actual_argc + " for 1)");
}
if (!(args.get(0) instanceof RubySymbol)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, args.get(0).toString() + " is not a symbol");
}
String name = ((RubySymbol) args.get(0)).toString();
RubyValue v = args.get(1);
if (v == ObjectFactory.NIL_VALUE) {
GlobalVariables.removeAllTraceProc(name);
} else if (v instanceof RubyProc) {
GlobalVariables.removeTraceProc(name, (RubyProc) v);
}
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="trace_var", module=true)
public static RubyValue trace_var(RubyValue receiver, RubyArray args, RubyBlock block) {
if (null == args || args.size() < 1) {
int actual_argc = (null == args ) ? 0 : args.size();
throw new RubyException(RubyRuntime.ArgumentErrorClass, "in `trace_var': wrong number of arguments (" + actual_argc + " for 1)");
}
if (!(args.get(0) instanceof RubySymbol)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, args.get(0).toString() + " is not a symbol");
}
String name = ((RubySymbol) args.get(0)).toString();
RubyValue v = args.get(1);
if (v instanceof RubyProc) {
GlobalVariables.addTraceProc(name, (RubyProc) v);
} else if (null != block) {
GlobalVariables.addTraceProc(name, ObjectFactory.createProc(block));
} else {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "tried to create Proc object without a block");
}
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="block_given?", alias="iterator?", module=true)
public static RubyValue blockGivenP(RubyValue receiver, RubyBlock block) {
return (null == block) ? ObjectFactory.FALSE_VALUE : ObjectFactory.TRUE_VALUE;
}
@RubyLevelMethod(name="Float", module=true)
public static RubyFloat toFloat(RubyValue receiver, RubyValue arg) {
return arg.toRubyFloat();
}
@RubyLevelMethod(name="Integer", module=true)
public static RubyInteger toInteger(RubyValue receiver, RubyValue arg) {
return arg.toRubyInteger();
}
private static long lastSeed_ = 0;
private static Random random_ = new Random();
@RubyLevelMethod(name="srand")
public static RubyValue srand(RubyValue receiver) {
// TODO seeds the generator using a combination of the time, the process id, and a sequence number.
return srand(0);
}
@RubyLevelMethod(name="srand")
public static RubyValue srand(RubyValue receiver, RubyValue arg) {
long seed;
if (arg instanceof RubyFixnum) {
seed = arg.toInt();
} else {
// FIXME: check
seed = ((RubyBignum)arg).longValue();
}
return srand(seed);
}
private static RubyValue srand(long seed) {
random_.setSeed(seed);
long r = lastSeed_;
lastSeed_ = seed;
return RubyBignum.bignorm(r);
}
@RubyLevelMethod(name="rand")
public static RubyValue rand(RubyValue receiver) {
// TODO seeds the generator using a combination of the time, the process id, and a sequence number.
return rand(0);
}
@RubyLevelMethod(name="rand")
public static RubyValue rand(RubyValue receiver, RubyValue arg) {
// TODO check
return rand(arg.toInt());
}
private static RubyValue rand(int max) {
if (max < 0) {
max = -max;
}
if (0 == max) {
return ObjectFactory.createFloat(random_.nextGaussian());
} else {
return ObjectFactory.createFixnum(random_.nextInt(max));
}
}
@RubyLevelMethod(name="puts", module=true)
public static RubyValue puts(RubyValue receiver) {
return _puts(GlobalVariables.get("$stdout"));
}
@RubyLevelMethod(name="puts", module=true)
public static RubyValue puts(RubyValue receiver, RubyArray args) {
return _puts(GlobalVariables.get("$stdout"), args);
}
public static RubyValue _puts(RubyValue receiver) {
RubyAPI.callPublicOneArgMethod(receiver, ObjectFactory.createString("\n"), null, RubyID.writeID);
return ObjectFactory.NIL_VALUE;
}
public static RubyValue _puts(RubyValue receiver, RubyArray args) {
RubyString value = null;
for (RubyValue arg : args) {
if (ObjectFactory.NIL_VALUE == arg) {
value = ObjectFactory.createString("nil\n");
} else if (arg instanceof RubyString) {
value = (RubyString) arg;
value.appendString("\n");
} else {
RubyValue str = RubyAPI.callPublicNoArgMethod(arg, null, RubyID.toSID);
value = (RubyString) str;
value.appendString("\n");
}
}
RubyAPI.callPublicOneArgMethod(receiver, value, null, RubyID.writeID);
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="print", module=true)
public static RubyValue print(RubyValue receiver) {
return _print(GlobalVariables.get("$stdout"));
}
@RubyLevelMethod(name="print", module=true)
public static RubyValue print(RubyValue receiver, RubyArray args) {
return _print(GlobalVariables.get("$stdout"), args);
}
public static RubyValue _print(RubyValue receiver) {
// if no argument given, print `$_'
return _print(receiver, new RubyArray(GlobalVariables.get("$_")));
}
public static RubyValue _print(RubyValue receiver, RubyArray args) {
int size = args.size();
for (int i = 0; i < size; ++i) {
// insert the output field separator($,) if not nil
if (i > 0 && GlobalVariables.get("$,") != ObjectFactory.NIL_VALUE) {
RubyAPI.callPublicOneArgMethod(receiver, GlobalVariables.get("$,"), null, RubyID.writeID);
}
if (args.get(i) == ObjectFactory.NIL_VALUE) {
RubyAPI.callPublicOneArgMethod(receiver, ObjectFactory.createString("nil"), null, RubyID.writeID);
} else {
RubyAPI.callPublicOneArgMethod(receiver, args.get(i), null, RubyID.writeID);
}
}
// if the output record separator($\) is not nil, it will be appended to the output.
if (GlobalVariables.get("$\\") != ObjectFactory.NIL_VALUE) {
RubyAPI.callPublicOneArgMethod(receiver, GlobalVariables.get("$\\"), null, RubyID.writeID);
}
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="printf", module=true)
public static RubyValue printf(RubyValue receiver, RubyArray args) {
String fmt = args.get(0).toStr();
System.out.printf(fmt, RubyKernelModule.buildFormatArg(args, 1));
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="sprintf", module=true)
public static RubyValue sprintf(RubyValue receiver, RubyArray args) {
String fmt = args.get(0).toStr();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
ps.printf(fmt, RubyKernelModule.buildFormatArg(args, 1));
ps.flush();
ps.close();
return ObjectFactory.createString(baos.toString());
}
@RubyLevelMethod(name="p", module=true)
public static RubyValue p(RubyValue receiver) {
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="p", module=true)
public static RubyValue p(RubyValue receiver, RubyArray args) {
for (RubyValue arg : args) {
RubyValue str = RubyAPI.callNoArgMethod(arg, null, RubyID.inspectID);
RubyString value = (RubyString) str;
value.appendString("\n");
System.out.print(value.toString());
}
return ObjectFactory.NIL_VALUE;
}
@RubyLevelMethod(name="gets", module=true)
public static RubyValue gets(RubyValue receiver) {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String s = null;
try {
s = in.readLine();
} catch (IOException e) {
}
GlobalVariables.set((null == s ? ObjectFactory.NIL_VALUE : ObjectFactory.createString(s)), "$_");
return GlobalVariables.get("$_");
}
@RubyLevelMethod(name="object_id", alias={"__id__", "hash"})
public static RubyValue objectId(RubyValue receiver) {
//Object.hashCode() javadoc:
//As much as is reasonably practical, the hashCode method defined
//by class Object does return distinct integers for distinct objects.
return ObjectFactory.createFixnum(receiver.hashCode());
}
@RubyLevelMethod(name="extend")
public static RubyValue extend(RubyValue receiver) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (0 for 1)");
}
@RubyLevelMethod(name="extend")
public static RubyValue extend(RubyValue receiver, RubyValue arg) {
RubyAPI.callPublicOneArgMethod(arg, receiver, null, RubyID.extendObjectID);
return receiver;
}
@RubyLevelMethod(name="extend")
public static RubyValue extend(RubyValue receiver, RubyArray args) {
for (RubyValue v : args) {
RubyAPI.callPublicOneArgMethod(v, receiver, null, RubyID.extendObjectID);
}
return receiver;
}
@RubyLevelMethod(name="freeze")
public static RubyValue freeze(RubyValue receiver) {
receiver.freeze();
return receiver;
}
@RubyLevelMethod(name="frozen?")
public static RubyValue run(RubyValue receiver) {
return ObjectFactory.createBoolean(receiver.frozen());
}
@RubyLevelMethod(name="method_missing", module=true)
public static RubyValue methodMissing(RubyValue receiver, RubyValue arg) {
RubySymbol method_name = (RubySymbol)arg;
RubyClass klass = receiver.getRubyClass();
klass = (klass != null) ? klass.getRealClass() : null;
throw new RubyException(RubyRuntime.NoMethodErrorClass, "undefined method '" + method_name.toString() + "' for " + klass.getName());
}
@RubyLevelMethod(name="sleep", module=true)
public static RubyValue sleep(RubyValue receiver, RubyValue arg) {
long milliseconds = RubyTypesUtil.convertToJavaLong(arg)*1000;
long startTime = System.currentTimeMillis();
RubyThread.sleep(milliseconds);
long endTime = System.currentTimeMillis();
return ObjectFactory.createFixnum((int)Math.round((endTime-startTime)/1000.0));
}
private static Pattern packagePattern = Pattern.compile("\\.");
@RubyLevelMethod(name="require_java", alias="import", module=true)
protected RubyValue requireJava(RubyValue receiver, RubyValue arg, RubyBlock block) {
String className = arg.toStr();
String[] names = packagePattern.split(className);
String name = names[names.length - 1];
if(name.equals("*")){
JavaClass.addPackage(className.substring(0, className.lastIndexOf('.')));
}else{
try {
Class clazz = Class.forName(className);
JavaClass.createJavaClass(clazz);
} catch (ClassNotFoundException e) {
throw new RubyException("Couldn't find class " + className.toString());
}
}
RubyRuntime.setJavaSupported(true);
return ObjectFactory.TRUE_VALUE;
}
@RubyLevelMethod(name="__load_with_reflection__", module=true)
public static RubyValue loadWithReflection(RubyValue receiver, RubyValue arg, RubyBlock block) {
String required_file = arg.toStr();
String name = NameFactory.createMainClassName(required_file);
try {
Class c = Class.forName(name);
Object o = c.newInstance();
RubyProgram p = (RubyProgram) o;
//$".push(file_name) unless $".include?(file_name)
RubyArray a = (RubyArray)GlobalVariables.get("$\"");
if (a.include(arg) == ObjectFactory.FALSE_VALUE) {
a.push(arg);
}
p.invoke();
return ObjectFactory.TRUE_VALUE;
} catch (ClassNotFoundException e) {
return ObjectFactory.FALSE_VALUE;
} catch (InstantiationException e) {
return ObjectFactory.FALSE_VALUE;
} catch (IllegalAccessException e) {
return ObjectFactory.FALSE_VALUE;
}
}
@RubyLevelMethod(name="binding", module=true)
public static RubyValue binding(RubyValue receiver, RubyArray args) {
//compiler will do the magic and insert Binding object
return args.get(0);
}
@RubyLevelMethod(name="lambda", alias="proc", module=true)
public static RubyValue lambda(RubyValue receiver, RubyBlock block) {
block.setCreatedByLambda();
return ObjectFactory.createProc(block);
}
@RubyLevelMethod(name="at_exit", module=true)
public static RubyValue atExit(RubyValue receiver, RubyBlock block) {
if (null == block) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "called without a block");
}
AtExitBlocks.registerBlock(block);
return ObjectFactory.createProc(block);
}
@RubyLevelMethod(name="gsub", module=true)
public static RubyValue gsub(RubyValue receiver, RubyArray args, RubyBlock block) {
if (!(GlobalVariables.get("$_") instanceof RubyString)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "$_ value need to be String (" + GlobalVariables.get("$LAST_READ_LINE").getRubyClass().getName() + " given)");
}
RubyValue r = ((RubyString)GlobalVariables.get("$_")).gsub_danger(args, block);
return GlobalVariables.set(r, "$_");
}
@RubyLevelMethod(name="gsub!", module=true)
public static RubyValue gsubBang(RubyValue receiver, RubyArray args, RubyBlock block) {
if (!(GlobalVariables.get("$_") instanceof RubyString)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "$_ value need to be String (" + GlobalVariables.get("$_").getRubyClass().getName() + " given)");
}
RubyValue r = ((RubyString)GlobalVariables.get("$_")).gsub_danger(args, block);
if (r != ObjectFactory.NIL_VALUE) {
GlobalVariables.set(r, "$_");
}
return r;
}
@RubyLevelMethod(name="sub", module=true)
public static RubyValue sub(RubyValue receiver, RubyArray args, RubyBlock block) {
if (!(GlobalVariables.get("$_") instanceof RubyString)) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "$_ value need to be String (" + GlobalVariables.get("$LAST_READ_LINE").getRubyClass().getName() + " given)");
}
RubyValue r = ((RubyString)GlobalVariables.get("$_")).gsub_danger(args, block);
return GlobalVariables.set(r, "$_");
}
private static int RDWR = 2;
private static int CREAT = 256;
private static int EXCL = 1024;
@RubyLevelMethod(name="open")
public static RubyValue open(RubyValue receiver, RubyArray args, RubyBlock block) {
String filename = args.get(0).toStr();
RubyIO io;
if (args.size() <= 1) {
io = ObjectFactory.createFile(filename, "r");
} else if (args.get(1) instanceof RubyFixnum) {
String mode = "r";
int i = ((RubyFixnum)args.get(1)).toInt();
if ((i & RDWR) != 0) {
mode = mode + "w";
}
io = ObjectFactory.createFile(filename, mode);
} else {
RubyString mode = (RubyString) args.get(1);
io = ObjectFactory.createFile(filename, mode.toString());
}
if (null == block) {
return io;
} else {
RubyValue v = block.invoke(receiver, io);
io.close();
return v;
}
}
public static Object[] buildFormatArg(RubyArray args, int start) {
Object[] raw_args = new Object[args.size() - start];
for (int i = 0; i < args.size() - start; ++i) {
Object v = args.get(i + start);
if (v instanceof RubyFixnum) {
raw_args[i] = new Integer(((RubyFixnum) v).toInt());
} else if (v instanceof RubyFloat) {
raw_args[i] = new Double(((RubyFloat) v).doubleValue());
} else {
raw_args[i] = v;
}
}
return raw_args;
}
}
|
package org.openhab.binding.zwave.internal.protocol.serialmessage;
import java.util.ArrayList;
import org.openhab.binding.zwave.internal.protocol.SerialMessage;
import org.openhab.binding.zwave.internal.protocol.ZWaveController;
import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageClass;
import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessagePriority;
import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class processes a serial message from the zwave controller
* @author Chris Jackson
* @since 1.5.0
*/
public class SerialApiGetInitDataMessageClass extends ZWaveCommandProcessor {
private static final Logger logger = LoggerFactory.getLogger(SerialApiGetInitDataMessageClass.class);
private ArrayList<Integer>zwaveNodes = new ArrayList<Integer>();
private static final int NODE_BYTES = 29; // 29 bytes = 232 bits, one for each supported node by Z-Wave;
public SerialMessage doRequest() {
return new SerialMessage(SerialMessageClass.SerialApiGetInitData, SerialMessageType.Request, SerialMessageClass.SerialApiGetInitData, SerialMessagePriority.High);
}
@Override
public boolean handleResponse(ZWaveController zController, SerialMessage lastSentMessage, SerialMessage incomingMessage) {
logger.debug(String.format("Got MessageSerialApiGetInitData response."));
int nodeBytes = incomingMessage.getMessagePayloadByte(2);
if (nodeBytes != NODE_BYTES) {
logger.error("Invalid number of node bytes = {}", nodeBytes);
return false;
}
int nodeId = 1;
// loop bytes
for (int i = 3;i < 3 + nodeBytes;i++) {
int incomingByte = incomingMessage.getMessagePayloadByte(i);
// loop bits in byte
for (int j=0;j<8;j++) {
int b1 = incomingByte & (int)Math.pow(2.0D, j);
int b2 = (int)Math.pow(2.0D, j);
if (b1 == b2) {
logger.info("NODE {}: Node found", nodeId);
zwaveNodes.add(nodeId);
}
nodeId++;
}
}
logger.info("ZWave API {} API", incomingMessage.getMessagePayloadByte(1) & 0x01 ? "Controller" : "Slave");
logger.info("ZWave Controller is {} Controller", incomingMessage.getMessagePayloadByte(1) & 0x04 ? "Primary" : "Secondary");
logger.info("
logger.info(String.format("# Nodes = %d", zwaveNodes.size()));
logger.info("
checkTransactionComplete(lastSentMessage, incomingMessage);
return true;
}
public ArrayList<Integer> getNodes() {
return zwaveNodes;
}
}
|
package de.cooperate.modeling.graphical.papyrus.extensions;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.papyrus.infra.core.editor.BackboneException;
import org.eclipse.papyrus.infra.ui.contentoutline.IPapyrusContentOutlinePage;
import org.eclipse.papyrus.infra.ui.editor.IMultiDiagramEditor;
import org.eclipse.papyrus.uml.tools.providers.UMLLabelProvider;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.IPropertyListener;
import org.eclipse.ui.views.contentoutline.ContentOutlinePage;
import org.eclipse.ui.views.properties.tabbed.ITabbedPropertySheetPageContributor;
public class PapyrusContentOutlinePage extends ContentOutlinePage implements IPapyrusContentOutlinePage {
private TreeViewer viewer;
private IMultiDiagramEditor editor;
public PapyrusContentOutlinePage() {
super();
}
@Override
public void createControl(Composite parent) {
super.createControl(parent);
viewer = getTreeViewer();
viewer.setContentProvider(new PapyrusContentProvider());
viewer.setLabelProvider(new UMLLabelProvider());
viewer.setInput(editor.getActiveEditor());
}
@Override
public void init(IMultiDiagramEditor arg0) throws BackboneException {
editor = arg0;
editor.addPropertyListener(new IPropertyListener() {
@Override
public void propertyChanged(Object source, int propId) {
PapyrusContentOutlinePage.this.viewer.setInput(editor.getActiveEditor());
}
});
}
}
|
package com.example.administrator.myapplication;
import android.Manifest;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
//import android.location.Location;
//import android.location.LocationListener;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.View;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationAvailability;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationServices;
import com.google.zxing.integration.android.IntentIntegrator;
import com.google.zxing.integration.android.IntentResult;
public class MainActivity extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, LocationListener {
private static final int CONTENT_REQUEST = 1337;
public GoogleApiClient googleApiClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
});
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.setDrawerListener(toggle);
toggle.syncState();
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
navigationView.setNavigationItemSelectedListener(this);
Button captureBtn = (Button) findViewById(R.id.capture);
captureBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int REQUEST_CAMERA = 0;
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "IMG_" + timeStamp + ".jpg";
File f = new File(Environment.getExternalStorageDirectory()
, "DCIM/Camera/" + imageFileName);
Uri uri = Uri.fromFile(f);
intent.putExtra(MediaStore.EXTRA_OUTPUT, uri);
startActivityForResult(Intent.createChooser(intent
, "Take a picture with"), REQUEST_CAMERA);
}
});
Button scanBtn = (Button) findViewById(R.id.scan);
scanBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
IntentIntegrator scanIntegrator = new IntentIntegrator(MainActivity.this);
scanIntegrator.initiateScan();
}
});
// LocationManager locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
// MyCurrentLoctionListener locationListener = new MyCurrentLoctionListener();
// Here, thisActivity is the current activity
googleApiClient = new GoogleApiClient.Builder(this)
.addApi(LocationServices.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
//Log.v("xxxxxx", LocationManager.GPS_PROVIDER.toString());
//locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this);
}
@Override
protected void onStart() {
super.onStart();
googleApiClient.connect();
}
@Override
protected void onStop() {
super.onStop();
if (googleApiClient != null && googleApiClient.isConnected()) {
googleApiClient.disconnect();
}
}
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
switch (requestCode) {
case 1: {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
} else {
}
return;
}
// other 'case' lines to check for other
}
}
@Override
public void onBackPressed() {
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
super.onBackPressed();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("StatementWithEmptyBody")
@Override
public boolean onNavigationItemSelected(MenuItem item) {
// Handle navigation view item clicks here.
int id = item.getItemId();
if (id == R.id.nav_camera) {
// Handle the camera action
} else if (id == R.id.nav_gallery) {
} else if (id == R.id.nav_slideshow) {
} else if (id == R.id.nav_manage) {
} else if (id == R.id.nav_share) {
} else if (id == R.id.nav_send) {
}
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
System.gc();
IntentResult scanningResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, data);
if (scanningResult != null) {
//we have a result
Log.v("xxxxxx", " |NNNN.msn");
String scanContent = scanningResult.getContents();
String scanFormat = scanningResult.getFormatName();
TextView textView = (TextView) findViewById(R.id.textView);
//String contents = data.getStringExtra("SCAN_RESULT");
textView.setText(scanContent+" "+scanFormat);
Toast toast = Toast.makeText(getApplicationContext(),
scanContent+" "+scanFormat, Toast.LENGTH_SHORT);
toast.show();
}
if (resultCode == RESULT_OK) {
if (requestCode == CONTENT_REQUEST) {
/*Intent i=new Intent(Intent.ACTION_VIEW);
i.setDataAndType(Uri.fromFile(output), "image/jpeg");
startActivity(i);
finish();*/
// Save a file: path for use with ACTION_VIEW intents
//mCurrentPhotoPath = "file:" + image.getAbsolutePath();
/*Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);*/
Log.v("xxxxxx", " |NNNN.msn");
//data.putExtra(MediaStore.EXTRA_OUTPUT, "android.jpg");
//use imageUri here to access the image
//Bundle extras = data.getExtras();
//Log.e("URI",imageUri.toString());
//Bitmap bmp = (Bitmap) extras.get("data");
// here you will get the image as bitmap
/*File outFile = new File(Environment.getExternalStorageDirectory(), "myname.jpeg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(outFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
photo.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.flush();
fos.close();*/
}
else if (resultCode == RESULT_CANCELED) {
Toast.makeText(this, "Picture was not taken", Toast.LENGTH_SHORT);
} else if (resultCode == 2)
{
Log.v("xxxxxx", "tttttttttt");
//TextView textView = (TextView) findViewById(R.id.textView);
String contents = data.getStringExtra("SCAN_RESULT");
//textView.setText(contents);
/*if (resultCode == RESULT_OK)
{
TextView textView = (TextView) findViewById(R.id.textView);
String contents = data.getStringExtra("SCAN_RESULT");
String format = data.getStringExtra("SCAN_RESULT_FORMAT");
textView.setText("Result : " + contents);
}*/
}
}
}
@Override
public void onConnected(Bundle bundle) {
LocationAvailability locationAvailability = LocationServices.FusedLocationApi.getLocationAvailability(googleApiClient);
if(locationAvailability.isLocationAvailable()) {
LocationRequest locationRequest = new LocationRequest()
.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY)
.setInterval(5000);
LocationServices.FusedLocationApi.requestLocationUpdates(googleApiClient, locationRequest, this);
} else {
Log.v("xxxxxx","qwrqwrwerwer");
// Do something when location provider not available
}
}
@Override
public void onLocationChanged(Location location) {
TextView textView = (TextView) findViewById(R.id.textView);
// textView.setText("Latitude : " + location.getLatitude() + "\n" +
// "Longistudesmd : " + location.getLongitude());
}
@Override
public void onConnectionSuspended(int i) {
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
}
|
package com.martin.kantidroid.ui.timetable;
import android.os.Bundle;
import android.support.design.widget.TextInputLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import com.bumptech.glide.Glide;
import com.martin.kantidroid.R;
import com.martin.kantidroid.logic.Util;
public class TimetableActivity extends AppCompatActivity implements View.OnClickListener {
private TextInputLayout mTilClass;
private EditText mClass;
private Button mDownload;
private View mLayoutImage, mLayoutList;
private RecyclerView mDownloads;
private boolean mHasError;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_timetable);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final ActionBar ab = getSupportActionBar();
ab.setDisplayHomeAsUpEnabled(true);
mTilClass = (TextInputLayout) findViewById(R.id.tilClass);
mTilClass.setErrorEnabled(true);
mClass = (EditText) findViewById(R.id.etClass);
mDownload = (Button) findViewById(R.id.bDownload);
mLayoutImage = findViewById(R.id.llImage);
mLayoutList = findViewById(R.id.flList);
mDownloads = (RecyclerView) findViewById(R.id.rvDownloads);
if (savedInstanceState != null) {
mHasError = savedInstanceState.getBoolean("mHasError");
}
else {
mHasError = false;
}
if (mHasError) {
mTilClass.setError(getString(R.string.timetable_error_noclass));
}
if (/*!hasDownloads*/ true) {
mLayoutList.setVisibility(View.INVISIBLE);
} else {
mLayoutImage.setVisibility(View.INVISIBLE);
}
Glide.with(this).load(R.drawable.kanti).into((ImageView) findViewById(R.id.ivNothing));
mDownload.setOnClickListener(this);
}
@Override
public void onClick(View view) {
String classUrl = Util.getClassUrl(mClass.getText().toString());
if (!classUrl.contentEquals("error")) {
mTilClass.setError(null);
mHasError = false;
// Try downloading
}
else {
mTilClass.setError(getString(R.string.timetable_error_noclass));
mHasError = true;
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Let's not reload the main activity
onBackPressed();
return true;
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean("mHasError", mHasError);
}
}
|
package com.nestedworld.nestedworld.ui.welcome;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.TextInputEditText;
import android.support.design.widget.TextInputLayout;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.view.View;
import android.widget.Toast;
import com.nestedworld.nestedworld.R;
import com.nestedworld.nestedworld.helpers.input.InputChecker;
import com.nestedworld.nestedworld.helpers.session.SessionHelper;
import com.nestedworld.nestedworld.network.http.callback.NestedWorldHttpCallback;
import com.nestedworld.nestedworld.network.http.errorHandler.RetrofitErrorHandler;
import com.nestedworld.nestedworld.network.http.implementation.NestedWorldHttpApi;
import com.nestedworld.nestedworld.network.http.models.response.users.auth.ForgotPasswordResponse;
import com.nestedworld.nestedworld.network.http.models.response.users.auth.SignInResponse;
import com.nestedworld.nestedworld.ui.base.BaseAppCompatActivity;
import com.nestedworld.nestedworld.ui.base.BaseFragment;
import com.nestedworld.nestedworld.ui.mainMenu.MainMenuActivity;
import com.rey.material.widget.ProgressView;
import butterknife.BindView;
import butterknife.OnClick;
import retrofit2.Response;
/**
* A placeholder fragment containing a simple view.
*/
public class LoginFragment extends BaseFragment {
public final static String FRAGMENT_NAME = LoginFragment.class.getSimpleName();
@BindView(R.id.editText_userEmail)
TextInputEditText etEmail;
@BindView(R.id.textInputLayout_userEmail)
TextInputLayout textInputLayoutUserEmail;
@BindView(R.id.editText_userPassword)
TextInputEditText etPassword;
@BindView(R.id.textInputLayout_userPassword)
TextInputLayout textInputLayoutUserPassword;
@BindView(R.id.progressView)
ProgressView progressView;
/*
** Public method
*/
public static void load(@NonNull final FragmentManager fragmentManager) {
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
fragmentTransaction.setCustomAnimations(R.anim.fade_out, R.anim.fade_in);
fragmentTransaction.replace(R.id.container, new LoginFragment());
fragmentTransaction.addToBackStack(FRAGMENT_NAME);
fragmentTransaction.commit();
}
/*
** Life Cycle
*/
@Override
protected int getLayoutResource() {
return R.layout.fragment_login;
}
@Override
protected void init(@NonNull View rootView, @Nullable Bundle savedInstanceState) {
}
/*
** ButterKnife callback
*/
@OnClick(R.id.nav_back)
public void back() {
//Check if fragment hasn't been detach
if (mContext == null) {
return;
}
((BaseAppCompatActivity) mContext).onBackPressed();
}
@OnClick(R.id.button_login)
public void login() {
//Check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Retrieve user input
String email = etEmail.getText().toString();
String password = etPassword.getText().toString();
//Check input
//TODO check input (should be a warning)
/*if (!checkInputForLogin(email, password)) {
return;
}*/
//Send login request
sendLoginRequest(email, password);
}
@OnClick(R.id.textView_forgotPassword)
public void forgotPassword() {
//Check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Retrieve user input
String email = etEmail.getText().toString();
//Check user input
if (!checkInputForForgotPassword(email)) {
return;
}
sendForgotPasswordRequest(email);
}
/*
** Internal method
*/
private boolean checkInputForLogin(@NonNull final String email, @NonNull final String password) {
//Check email
if (!InputChecker.checkEmailFormat(email)) {
textInputLayoutUserEmail.setError(getString(R.string.error_emailInvalid));
return false;
} else {
textInputLayoutUserEmail.setErrorEnabled(false);
}
//Check password
if (!InputChecker.checkPasswordFormat(password)) {
textInputLayoutUserPassword.setError(getString(R.string.error_passwordTooShort));
return false;
} else {
textInputLayoutUserPassword.setErrorEnabled(false);
}
return true;
}
private boolean checkInputForForgotPassword(@NonNull final String email) {
//We don't care about email, stop display error on it
textInputLayoutUserPassword.setErrorEnabled(false);
//Check email
if (!InputChecker.checkEmailFormat(email)) {
textInputLayoutUserEmail.setError(getString(R.string.error_emailInvalid));
return false;
} else {
textInputLayoutUserEmail.setErrorEnabled(false);
}
return true;
}
private void sendLoginRequest(@NonNull final String email, @NonNull final String password) {
//Start loading animation
progressView.start();
//Send request
NestedWorldHttpApi.getInstance().signIn(email, password).enqueue(new NestedWorldHttpCallback<SignInResponse>() {
@Override
public void onSuccess(@NonNull Response<SignInResponse> response) {
//Check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Stop the loading animation
progressView.stop();
//Create a new session
SessionHelper.newSession(email, password, response.body().token);
//display the mainMenu and stop the launchActivity
startActivity(MainMenuActivity.class);
((BaseAppCompatActivity) mContext).finish();
}
@Override
public void onError(@NonNull KIND errorKind, @Nullable Response<SignInResponse> response) {
//Check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Stop loading animation
progressView.stop();
//Get error message
String errorMessage = RetrofitErrorHandler.getErrorMessage(mContext, errorKind, getString(R.string.error_request_login), response);
//Display error message
Toast.makeText(mContext, errorMessage, Toast.LENGTH_LONG).show();
}
});
}
private void sendForgotPasswordRequest(@NonNull final String email) {
//Send request
NestedWorldHttpApi.getInstance().forgotPassword(email).enqueue(new NestedWorldHttpCallback<ForgotPasswordResponse>() {
@Override
public void onSuccess(@NonNull Response<ForgotPasswordResponse> response) {
//check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Warn user an email has been send
Toast.makeText(mContext, getString(R.string.registration_msg_passwordSend), Toast.LENGTH_LONG).show();
}
@Override
public void onError(@NonNull KIND errorKind, @Nullable Response<ForgotPasswordResponse> response) {
//check if fragment hasn't been detach
if (mContext == null) {
return;
}
//Get error message
String errorMessage = RetrofitErrorHandler.getErrorMessage(mContext, errorKind, getString(R.string.error_request_forgotPassword), response);
//Display error message
Toast.makeText(mContext, errorMessage, Toast.LENGTH_LONG).show();
}
});
}
}
|
package cdc;
import java.awt.Point;
import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import entity.Item;
import entity.ItemInfo;
import entity.Player;
import entity.PlayerInfo;
import entity.Monster;
import entity.Projector;
import tcp.codes;
public class CDC
{
final static int MaxPlayerno=4;
private static Point playerinitlocation[];
private Map<Integer,Player> player;
private Map<Integer,Item> item;
private Map<Integer,Monster> monster;
private Map<Integer,Projector> projector;
private int itemid=0;
private int monsterid=0;
private int projectorid=0;
public static CDC uniqueinstance;
private CDC()
{
player=new ConcurrentHashMap<>();
item=new ConcurrentHashMap<>();
monster=new ConcurrentHashMap<>();
projector=new ConcurrentHashMap<>();
}
public static synchronized CDC getInstance()
{
if(uniqueinstance==null)
{
uniqueinstance=new CDC();
}
return uniqueinstance;
}
public Map getPlayer(){return player;}
public Map getItem(){return item;}
public Map getMonster(){return monster;}
public Map getProjector(){return projector;}
public void keyDown(int clientno,int action)
{
assert player.get(clientno)!=null:"The clientno is invalid";
if(action==codes.ATTACK)
player.get(clientno).playerAttack();
else
player.get(clientno).playerMove(action);
}
public void keyRelease(int clientno,int action)
{
assert player.get(clientno)!=null:"The clientno is invalid";
if(action==codes.ATTACK)
player.get(clientno).attackingEnd();
else
player.get(clientno).movingEnd();
}
public int getMonsterNewId()
{
int tmp=monsterid;
monsterid+=1;
return tmp;
}
public int getItemNewId()
{
int tmp=itemid;
itemid+=1;
return tmp;
}
public int getProjectorId()
{
int tmp=projectorid;
projectorid+=1;
return tmp;
}
public void addPlayer(int clientno,int type)
{
assert clientno>=0&&clientno<4:"The clientno is invalid";
player.put(clientno,new Player(type,playerinitlocation[clientno],PlayerInfo.getInstance().getTypeInfo(type)));
}
public void addItem(Point point,int type)
{
Item tmp=new Item(point,type,ItemInfo.getInstance().getTypeInfo(type));
item.putIfAbsent(itemid,tmp);
itemid+=1;
}
public Vector getUpdatInfo()
{
Vector<String> v=new Vector<String>();
int cnt=0;
for(Map.Entry<Integer,Player> entry:player.entrySet())
{
String str="";
str=entry.toString();
v.add(cnt,str);
cnt+=1;
}
for(Map.Entry<Integer,Monster> entry:monster.entrySet())
{
String str;
str=entry.toString();
v.add(cnt,str);
cnt+=1;
}
for(Map.Entry<Integer,Item> entry:item.entrySet())
{
String str;
str=entry.toString();
v.add(cnt,str);
cnt+=1;
}
return v;
}
/*public static void main(String[] args)
{
}*/
}
|
package com.yeokm1.nussocprintandroid.fragments;
import android.content.Intent;
import android.content.pm.ApplicationInfo;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.method.ScrollingMovementMethod;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import com.yeokhengmeng.craftsupportemailintent.CraftIntentEmail;
import com.yeokhengmeng.craftsupportemailintent.CraftSupportEmail;
import com.yeokhengmeng.craftsupportemailintent.GetInfoSummary;
import com.yeokm1.nussocprintandroid.R;
import java.text.SimpleDateFormat;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
public class HelpFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_help, container, false);
String helpText = generateHelpText();
TextView helpTextView = (TextView) view.findViewById(R.id.help_textview);
helpTextView.setMovementMethod(new ScrollingMovementMethod());
helpTextView.setText(helpText);
Button sourceCodeButton = (Button) view.findViewById(R.id.help_source_code_button);
sourceCodeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://github.com/yeokm1/nus-soc-print"));
startActivity(browserIntent);
}
});
final Button emailButton = (Button) view.findViewById(R.id.help_problems_button);
emailButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CraftSupportEmail emailGen = new CraftSupportEmail(getActivity(), "yeokm1@gmail.com", "NUS SOC Print Android");
CraftIntentEmail emailIntent = new CraftIntentEmail();
emailGen.appendMinimumDetailsToContent();
emailGen.appendAppDetailsToContent();
String emailEndText = getString(R.string.help_support_email_end_text);
emailGen.appendContent(emailEndText);
Intent intent = emailGen.generateIntentWithNewTaskFlag();
boolean status = emailGen.sendIntent(getActivity(), intent);
}
});
return view;
}
private String generateHelpText(){
String textFormat = getString(R.string.help_text);
String compileTime = getCompileDateTime();
String packageDetails = getPackageVersion();
String helpText = String.format(textFormat, packageDetails, compileTime);
return helpText;
}
private String getCompileDateTime(){
try{
ApplicationInfo ai = getActivity().getPackageManager().getApplicationInfo(getActivity().getPackageName(), 0);
ZipFile zf = new ZipFile(ai.sourceDir);
ZipEntry ze = zf.getEntry("classes.dex");
long time = ze.getTime();
String s = new SimpleDateFormat("dd MMM yyyy").format(new java.util.Date(time));
zf.close();
return s;
}catch(Exception e){
}
return "";
}
private String getPackageVersion(){
GetInfoSummary infoSummary = new GetInfoSummary(getActivity().getApplicationContext());
return infoSummary.getPackageVersionAndName();
}
}
|
package org.apache.usergrid.persistence.collection.serialization.impl;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.netflix.astyanax.serializers.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.marshal.BooleanType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.usergrid.persistence.collection.EntitySet;
import org.apache.usergrid.persistence.collection.MvccEntity;
import org.apache.usergrid.persistence.collection.exception.CollectionRuntimeException;
import org.apache.usergrid.persistence.collection.exception.DataCorruptionException;
import org.apache.usergrid.persistence.collection.exception.EntityTooLargeException;
import org.apache.usergrid.persistence.collection.mvcc.entity.impl.MvccEntityImpl;
import org.apache.usergrid.persistence.collection.serialization.MvccEntitySerializationStrategy;
import org.apache.usergrid.persistence.collection.serialization.SerializationFig;
import org.apache.usergrid.persistence.core.astyanax.CassandraFig;
import org.apache.usergrid.persistence.core.astyanax.ColumnParser;
import org.apache.usergrid.persistence.core.astyanax.IdRowCompositeSerializer;
import org.apache.usergrid.persistence.core.astyanax.MultiTennantColumnFamily;
import org.apache.usergrid.persistence.core.astyanax.MultiTennantColumnFamilyDefinition;
import org.apache.usergrid.persistence.core.astyanax.ScopedRowKey;
import org.apache.usergrid.persistence.core.astyanax.ScopedRowKeySerializer;
import org.apache.usergrid.persistence.core.scope.ApplicationScope;
import org.apache.usergrid.persistence.model.entity.Entity;
import org.apache.usergrid.persistence.model.entity.EntityMap;
import org.apache.usergrid.persistence.model.entity.Id;
import org.apache.usergrid.persistence.model.util.EntityUtils;
import org.apache.usergrid.persistence.model.util.UUIDGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.serializers.AbstractSerializer;
import com.netflix.astyanax.serializers.BooleanSerializer;
import rx.Observable;
import rx.Scheduler;
import rx.schedulers.Schedulers;
/**
* V3 Serialization Implementation
*/
public class MvccEntitySerializationStrategyV3Impl implements MvccEntitySerializationStrategy {
public final static int VERSION = 1;
private static final IdRowCompositeSerializer ID_SER = IdRowCompositeSerializer.get();
private static final ScopedRowKeySerializer<Id> ROW_KEY_SER = new ScopedRowKeySerializer<>( ID_SER );
private static final MultiTennantColumnFamily<ScopedRowKey<Id>, Boolean> CF_ENTITY_DATA =
new MultiTennantColumnFamily<>( "Entity_Version_Data_V3", ROW_KEY_SER, BooleanSerializer.get() );
private static final Boolean COL_VALUE = Boolean.TRUE;
private final EntitySerializer entitySerializer;
private static final Logger log = LoggerFactory.getLogger( MvccLogEntrySerializationStrategyImpl.class );
protected final Keyspace keyspace;
protected final SerializationFig serializationFig;
protected final CassandraFig cassandraFig;
@Inject
public MvccEntitySerializationStrategyV3Impl( final Keyspace keyspace, final SerializationFig serializationFig,
final CassandraFig cassandraFig ) {
this.keyspace = keyspace;
this.serializationFig = serializationFig;
this.cassandraFig = cassandraFig;
this.entitySerializer = new EntitySerializer( serializationFig );
}
@Override
public MutationBatch write( final ApplicationScope applicationScope, final MvccEntity entity ) {
Preconditions.checkNotNull( applicationScope, "applicationScope is required" );
Preconditions.checkNotNull( entity, "entity is required" );
final Id entityId = entity.getId();
final UUID version = entity.getVersion();
Optional<EntityMap> map = EntityMap.fromEntity(entity.getEntity());
return doWrite( applicationScope, entityId, version, colMutation -> colMutation.putColumn( COL_VALUE,
entitySerializer.toByteBuffer( new EntityWrapper(entityId,entity.getVersion(), entity.getStatus(), VERSION, map.isPresent() ? map.get() : null ) ) ) );
}
@Override
public EntitySet load( final ApplicationScope applicationScope, final Collection<Id> entityIds,
final UUID maxVersion ) {
Preconditions.checkNotNull( applicationScope, "applicationScope is required" );
Preconditions.checkNotNull( entityIds, "entityIds is required" );
Preconditions.checkArgument( entityIds.size() > 0, "entityIds is required" );
Preconditions.checkNotNull( maxVersion, "version is required" );
if ( entityIds.size() > serializationFig.getMaxLoadSize() ) {
throw new IllegalArgumentException(
"requested load size cannot be over configured maximum of " + serializationFig.getMaxLoadSize() );
}
final Id applicationId = applicationScope.getApplication();
final List<ScopedRowKey<Id>> rowKeys = new ArrayList<>( entityIds.size() );
for ( final Id entityId : entityIds ) {
final ScopedRowKey<Id> rowKey =
ScopedRowKey.fromKey( applicationId, entityId );
rowKeys.add( rowKey );
}
/**
* Our settings may mean we exceed our maximum thrift buffer size. If we do, we have to make multiple
* requests, not just one.
* Perform the calculations and the appropriate request patterns
*
*/
final int maxEntityResultSizeInBytes = serializationFig.getMaxEntitySize() * entityIds.size();
//if we're less than 1, set the number of requests to 1
final int numberRequests = Math.max( 1, maxEntityResultSizeInBytes / cassandraFig.getThriftBufferSize() );
final int entitiesPerRequest = entityIds.size() / numberRequests;
final Scheduler scheduler;
//if it's a single request, run it on the same thread
if ( numberRequests == 1 ) {
scheduler = Schedulers.immediate();
}
//if it's more than 1 request, run them on the I/O scheduler
else {
scheduler = Schedulers.io();
}
final EntitySetImpl entitySetResults = Observable.from( rowKeys )
//buffer our entities per request, then for that buffer, execute the query in parallel (if neccessary)
.buffer( entitiesPerRequest ).flatMap( listObservable -> {
//here, we execute our query then emit the items either in parallel, or on the current thread
// if we have more than 1 request
return Observable.just( listObservable ).map( scopedRowKeys -> {
try {
return keyspace.prepareQuery( CF_ENTITY_DATA ).getKeySlice( rowKeys )
.withColumnSlice( COL_VALUE ).execute().getResult();
}
catch ( ConnectionException e ) {
throw new CollectionRuntimeException( null, applicationScope,
"An error occurred connecting to cassandra", e );
}
} ).subscribeOn( scheduler );
}, 10 ).collect( () -> new EntitySetImpl( entityIds.size() ), ( ( entitySet, rows ) -> {
final Iterator<Row<ScopedRowKey<Id>, Boolean>> latestEntityColumns = rows.iterator();
while ( latestEntityColumns.hasNext() ) {
final Row<ScopedRowKey<Id>, Boolean> row = latestEntityColumns.next();
final ColumnList<Boolean> columns = row.getColumns();
if ( columns.size() == 0 ) {
continue;
}
final Id entityId = row.getKey().getKey();
final Column<Boolean> column = columns.getColumnByIndex( 0 );
final MvccEntity parsedEntity =
new MvccColumnParser( entityId, entitySerializer ).parseColumn( column );
entitySet.addEntity( parsedEntity );
}
} ) ).toBlocking().last();
return entitySetResults;
}
@Override
public Iterator<MvccEntity> loadDescendingHistory( final ApplicationScope applicationScope, final Id entityId,
final UUID version, final int fetchSize ) {
Preconditions.checkNotNull( applicationScope, "applicationScope is required" );
Preconditions.checkNotNull( entityId, "entity id is required" );
Preconditions.checkNotNull( version, "version is required" );
Preconditions.checkArgument( fetchSize > 0, "max Size must be greater than 0" );
throw new UnsupportedOperationException( "This version does not support loading history" );
}
@Override
public Iterator<MvccEntity> loadAscendingHistory( final ApplicationScope applicationScope, final Id entityId,
final UUID version, final int fetchSize ) {
Preconditions.checkNotNull( applicationScope, "applicationScope is required" );
Preconditions.checkNotNull( entityId, "entity id is required" );
Preconditions.checkNotNull( version, "version is required" );
Preconditions.checkArgument( fetchSize > 0, "max Size must be greater than 0" );
throw new UnsupportedOperationException( "This version does not support loading history" );
}
@Override
public Optional<MvccEntity> load( final ApplicationScope scope, final Id entityId ) {
final EntitySet results = load( scope, Collections.singleton( entityId ), UUIDGenerator.newTimeUUID() );
return Optional.fromNullable( results.getEntity( entityId ));
}
@Override
public MutationBatch mark( final ApplicationScope applicationScope, final Id entityId, final UUID version ) {
Preconditions.checkNotNull(applicationScope, "applicationScope is required");
Preconditions.checkNotNull(entityId, "entity id is required");
Preconditions.checkNotNull(version, "version is required");
return doWrite(applicationScope, entityId, version, colMutation ->
colMutation.putColumn(COL_VALUE,
entitySerializer.toByteBuffer(new EntityWrapper(entityId, version, MvccEntity.Status.DELETED, VERSION, null))
)
);
}
@Override
public MutationBatch delete( final ApplicationScope applicationScope, final Id entityId, final UUID version ) {
Preconditions.checkNotNull( applicationScope, "applicationScope is required" );
Preconditions.checkNotNull( entityId, "entity id is required" );
Preconditions.checkNotNull( version, "version is required" );
return doWrite( applicationScope, entityId, version, colMutation -> colMutation.deleteColumn( Boolean.TRUE ) );
}
@Override
public java.util.Collection getColumnFamilies() {
//create the CF entity data. We want it reversed b/c we want the most recent version at the top of the
//row for fast seeks
MultiTennantColumnFamilyDefinition cf =
new MultiTennantColumnFamilyDefinition( CF_ENTITY_DATA, BytesType.class.getSimpleName(),
BooleanType.class.getSimpleName() ,
BytesType.class.getSimpleName(), MultiTennantColumnFamilyDefinition.CacheOption.KEYS );
return Collections.singleton( cf );
}
/**
* Do the write on the correct row for the entity id with the operation
*/
private MutationBatch doWrite( final ApplicationScope applicationScope, final Id entityId, final UUID version, final RowOp op ) {
final MutationBatch batch = keyspace.prepareMutationBatch();
final Id applicationId = applicationScope.getApplication();
final ScopedRowKey<Id> rowKey =
ScopedRowKey.fromKey( applicationId, entityId );
final long timestamp = version.timestamp();
op.doOp( batch.withRow( CF_ENTITY_DATA, rowKey ).setTimestamp( timestamp ) );
return batch;
}
@Override
public int getImplementationVersion() {
return CollectionDataVersions.LOG_REMOVAL.getVersion();
}
/**
* Converts raw columns the to MvccEntity representation
*/
private static final class MvccColumnParser implements ColumnParser<Boolean, MvccEntity> {
private final Id id;
private final AbstractSerializer<EntityWrapper> entityJsonSerializer;
private MvccColumnParser( final Id id, final AbstractSerializer<EntityWrapper> entityJsonSerializer ) {
this.id = id;
this.entityJsonSerializer = entityJsonSerializer;
}
@Override
public MvccEntity parseColumn( Column<Boolean> column ) {
final EntityWrapper deSerialized;
try {
deSerialized = column.getValue( entityJsonSerializer );
}
catch ( DataCorruptionException e ) {
log.error(
"DATA CORRUPTION DETECTED when de-serializing entity with Id {}. This means the"
+ " write was truncated.", id, e );
//return an empty entity, we can never load this one, and we don't want it to bring the system
//to a grinding halt
//TODO fix this
return new MvccEntityImpl( id, UUIDGenerator.newTimeUUID(), MvccEntity.Status.DELETED, Optional.<Entity>absent() );
}
Optional<Entity> entity = deSerialized.getOptionalEntity() ;
return new MvccEntityImpl( id, deSerialized.getVersion(), deSerialized.getStatus(), entity );
}
}
/**
* We should only ever create this once, since this impl is a singleton
*/
public final class EntitySerializer extends AbstractSerializer<EntityWrapper> {
private final JsonFactory JSON_FACTORY = new JsonFactory();
private final ObjectMapper MAPPER = new ObjectMapper( JSON_FACTORY );
private SerializationFig serializationFig;
public EntitySerializer( final SerializationFig serializationFig ) {
this.serializationFig = serializationFig;
// mapper.enable(SerializationFeature.INDENT_OUTPUT); don't indent output,
// causes slowness
MAPPER.enableDefaultTypingAsProperty( ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT, "@class" );
}
@Override
public ByteBuffer toByteBuffer( final EntityWrapper wrapper ) {
if ( wrapper == null ) {
return null;
}
wrapper.setSerailizationVersion(VERSION);
//mark this version as empty
if ( wrapper.getEntityMap() == null ) {
//we're empty
try {
return ByteBuffer.wrap(MAPPER.writeValueAsBytes(wrapper));
}catch (JsonProcessingException jpe){
throw new RuntimeException( "Unable to serialize entity", jpe );
}
}
//we have an entity but status is not complete don't allow it
if ( wrapper.getStatus() != MvccEntity.Status.COMPLETE ) {
throw new UnsupportedOperationException( "Only states " + MvccEntity.Status.DELETED + " and " + MvccEntity.Status.COMPLETE + " are supported" );
}
wrapper.setStatus(MvccEntity.Status.COMPLETE);
//Convert to internal entity map
final byte[] wrapperBytes;
try {
wrapperBytes = MAPPER.writeValueAsBytes(wrapper);
final int maxEntrySize = serializationFig.getMaxEntitySize();
if (wrapperBytes.length > maxEntrySize) {
throw new EntityTooLargeException(Entity.fromMap(wrapper.getEntityMap()), maxEntrySize, wrapperBytes.length,
"Your entity cannot exceed " + maxEntrySize + " bytes. The entity you tried to save was "
+ wrapperBytes.length + " bytes");
}
}
catch ( JsonProcessingException jpe ) {
throw new RuntimeException( "Unable to serialize entity", jpe );
}
return ByteBuffer.wrap(wrapperBytes);
}
@Override
public EntityWrapper fromByteBuffer( final ByteBuffer byteBuffer ) {
/**
* We intentionally turn data corruption exceptions when we're unable to de-serialize
* the data in cassandra. If this occurs, we'll never be able to de-serialize it
* and it should be considered lost. This is an error that is occurring due to a bug
* in serializing the entity. This is a lazy recognition + repair signal for deployment with
* existing systems.
*/
EntityWrapper entityWrapper;
try {
entityWrapper = MAPPER.readValue(byteBuffer.array(), EntityWrapper.class);
if ( VERSION != entityWrapper.getSerailizationVersion()) {
throw new UnsupportedOperationException( "A version of type " + entityWrapper.getSerailizationVersion() + " is unsupported" );
}
}
catch ( Exception e ) {
if( log.isDebugEnabled() ){
log.debug("Entity Wrapper Deserialized: " + StringSerializer.get().fromByteBuffer(byteBuffer));
}
throw new DataCorruptionException( "Unable to read entity data", e );
}
// it's been deleted, remove it
if ( entityWrapper.getEntityMap() == null) {
return new EntityWrapper( entityWrapper.getId(), entityWrapper.getVersion(),MvccEntity.Status.DELETED, VERSION,null );
}
entityWrapper.setStatus(MvccEntity.Status.COMPLETE);
// it's partial by default
return entityWrapper;
}
}
/**
* Simple bean wrapper for state and entity
*/
public static class EntityWrapper {
private Id id;
private MvccEntity.Status status;
private UUID version;
private EntityMap entityMap;
private int serailizationVersion;
public EntityWrapper( ) {
}
public EntityWrapper( final Id id , final UUID version, final MvccEntity.Status status, final int serailizationVersion, final EntityMap entity ) {
this.setStatus(status);
this.version= version;
this.entityMap = entity;
this.id = id;
this.setSerailizationVersion(serailizationVersion);
}
/**
* do not store status because its based on either the entity being null (deleted) or not null (complete)
* @return
*/
@JsonIgnore()
public MvccEntity.Status getStatus() {
return status;
}
public void setStatus(MvccEntity.Status status) {
this.status = status;
}
@JsonSerialize()
public Id getId() {
return id;
}
@JsonSerialize()
public UUID getVersion() {
return version;
}
@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
public EntityMap getEntityMap() {
return entityMap;
}
@JsonSerialize()
public int getSerailizationVersion() {
return serailizationVersion;
}
public void setSerailizationVersion(int serailizationVersion) {
this.serailizationVersion = serailizationVersion;
}
@JsonIgnore
public Optional<Entity> getOptionalEntity() {
Optional<Entity> entityReturn = Optional.fromNullable(Entity.fromMap(getEntityMap()));
//Inject the id into it.
if (entityReturn.isPresent()) {
EntityUtils.setId(entityReturn.get(), getId());
EntityUtils.setVersion(entityReturn.get(), getVersion());
}
;
return entityReturn;
}
}
/**
* Simple callback to perform puts and deletes with a common row setup code
*/
private static interface RowOp {
/**
* The operation to perform on the row
*/
void doOp( ColumnListMutation<Boolean> colMutation );
}
}
|
package org.openlmis.core.model.repository;
import android.content.Context;
import android.util.Log;
import com.google.inject.Inject;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.misc.TransactionManager;
import com.j256.ormlite.stmt.Where;
import org.openlmis.core.LMISApp;
import org.openlmis.core.exceptions.LMISException;
import org.openlmis.core.manager.SharedPreferenceMgr;
import org.openlmis.core.model.BaseInfoItem;
import org.openlmis.core.model.Period;
import org.openlmis.core.model.Product;
import org.openlmis.core.model.ProductProgram;
import org.openlmis.core.model.Program;
import org.openlmis.core.model.RegimenItem;
import org.openlmis.core.model.ReportTypeForm;
import org.openlmis.core.model.RnRForm;
import org.openlmis.core.model.RnrFormItem;
import org.openlmis.core.model.StockCard;
import org.openlmis.core.model.StockMovementItem;
import org.openlmis.core.model.helper.RnrFormHelper;
import org.openlmis.core.model.service.RequisitionPeriodService;
import org.openlmis.core.persistence.DbUtil;
import org.openlmis.core.persistence.GenericDao;
import org.openlmis.core.persistence.LmisSqliteOpenHelper;
import org.openlmis.core.utils.Constants;
import org.openlmis.core.utils.DateUtil;
import org.roboguice.shaded.goole.common.base.Function;
import org.roboguice.shaded.goole.common.collect.FluentIterable;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Callable;
public class RnrFormRepository {
@Inject
DbUtil dbUtil;
@Inject
StockRepository stockRepository;
@Inject
RegimenRepository regimenRepository;
@Inject
RnrFormItemRepository rnrFormItemRepository;
@Inject
RegimenItemRepository regimenItemRepository;
@Inject
RnrFormSignatureRepository signatureRepository;
@Inject
BaseInfoItemRepository baseInfoItemRepository;
@Inject
ProductProgramRepository productProgramRepository;
@Inject
ProgramRepository programRepository;
@Inject
ReportTypeFormRepository reportTypeFormRepository;
@Inject
RnrFormHelper rnrFormHelper;
GenericDao<RnRForm> genericDao;
GenericDao<RnrFormItem> rnrFormItemGenericDao;
private Context context;
protected String programCode;
@Inject
private RequisitionPeriodService requisitionPeriodService;
@Inject
public StockMovementRepository stockMovementRepository;
@Inject
public RnrFormRepository(Context context) {
genericDao = new GenericDao<>(RnRForm.class, context);
rnrFormItemGenericDao = new GenericDao<>(RnrFormItem.class, context);
this.context = context;
}
public RnRForm initNormalRnrForm(Date periodEndDate) throws LMISException {
RnRForm rnrForm = initRnRForm(periodEndDate, RnRForm.Emergency.No);
return createInitRnrForm(rnrForm);
}
public RnRForm initEmergencyRnrForm(Date periodEndDate, List<StockCard> stockCards) throws LMISException {
RnRForm rnRForm = initRnRForm(periodEndDate, RnRForm.Emergency.Yes);
rnRForm.setRnrFormItemListWrapper(generateRnrFormItems(rnRForm, stockCards));
return rnRForm;
}
public void create(RnRForm rnRForm) throws LMISException {
genericDao.create(rnRForm);
}
public void createRnRsWithItems(final List<RnRForm> forms) throws LMISException {
try {
TransactionManager.callInTransaction(LmisSqliteOpenHelper.getInstance(context).getConnectionSource(), new Callable<Object>() {
@Override
public Object call() throws Exception {
for (RnRForm form : forms) {
createOrUpdateWithItems(form);
}
return null;
}
});
} catch (SQLException e) {
throw new LMISException(e);
}
}
public void createOrUpdateWithItems(final RnRForm form) throws LMISException {
try {
TransactionManager.callInTransaction(LmisSqliteOpenHelper.getInstance(context).getConnectionSource(), new Callable<Object>() {
@Override
public Object call() throws Exception {
genericDao.createOrUpdate(form);
createOrUpdateRnrWrappers(form);
genericDao.refresh(form);
return null;
}
});
} catch (SQLException e) {
throw new LMISException(e);
}
}
public boolean isPeriodUnique(final RnRForm form) {
try {
return null == dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, RnRForm>() {
@Override
public RnRForm operate(Dao<RnRForm, String> dao) throws SQLException {
return dao.queryBuilder().where().eq("program_id", form.getProgram().getId()).and().eq("status", RnRForm.STATUS.AUTHORIZED).and().eq("periodBegin", form.getPeriodBegin()).and().eq("periodEnd", form.getPeriodEnd()).queryForFirst();
}
});
} catch (LMISException e) {
e.reportToFabric();
}
return false;
}
public List<RnRForm> list() throws LMISException {
return genericDao.queryForAll();
}
public List<RnRForm> listInclude(RnRForm.Emergency includeEmergency, String programCode) throws LMISException {
ReportTypeForm reportTypeForm = reportTypeFormRepository.getReportType(programCode);
return listInclude(includeEmergency, programCode, reportTypeForm);
}
public List<RnRForm> listInclude(RnRForm.Emergency includeEmergency, String programCode, ReportTypeForm reportTypeForm) throws LMISException {
return list(programCode, includeEmergency.Emergency(), reportTypeForm);
}
public List<RnRForm> queryAllUnsyncedForms() throws LMISException {
List<RnRForm> unsyncedRnr = listNotSynchronizedFromStarTime();
deleteDeactivatedAndUnsupportedProductItems(unsyncedRnr);
return unsyncedRnr;
}
public RnRForm queryUnAuthorized() throws LMISException {
final Program program = programRepository.queryByCode(programCode);
ReportTypeForm reportTypeForm = reportTypeFormRepository.getReportType(programCode);
if (program == null) {
throw new LMISException("Program cannot be null !");
}
RnRForm rnRForm = dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, RnRForm>() {
@Override
public RnRForm operate(Dao<RnRForm, String> dao) throws SQLException {
return dao.queryBuilder().where().eq("program_id", program.getId()).and().between("periodBegin", reportTypeForm.getStartTime(), new Date()).and().ne("status", RnRForm.STATUS.AUTHORIZED).queryForFirst();
}
});
assignCategoryForRnrItems(rnRForm);
return rnRForm;
}
public RnRForm queryRnRForm(final long id) throws LMISException {
RnRForm rnRForm = dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, RnRForm>() {
@Override
public RnRForm operate(Dao<RnRForm, String> dao) throws SQLException {
return dao.queryBuilder().where().eq("id", id).queryForFirst();
}
});
assignCategoryForRnrItems(rnRForm);
return rnRForm;
}
protected void deleteDeactivatedAndUnsupportedProductItems(List<RnRForm> rnRForms) throws LMISException {
for (RnRForm rnRForm : rnRForms) {
String programCode = rnRForm.getProgram().getProgramCode();
List<String> programCodes = programRepository.queryProgramCodesByProgramCodeOrParentCode(programCode);
List<String> supportedProductCodes = FluentIterable.from(productProgramRepository.listActiveProductProgramsByProgramCodes(programCodes)).transform(new Function<ProductProgram, String>() {
@Override
public String apply(ProductProgram productProgram) {
return productProgram.getProductCode();
}
}).toList();
rnrFormItemRepository.deleteFormItems(rnRForm.getDeactivatedAndUnsupportedProductItems(supportedProductCodes));
}
}
public List<RnrFormItem> generateRnrFormItems(final RnRForm form, List<StockCard> stockCards) throws LMISException {
List<RnrFormItem> rnrFormItems = new ArrayList<>();
List<String> programCodes = programRepository.queryProgramCodesByProgramCodeOrParentCode(form.getProgram().getProgramCode());
for (StockCard stockCard : stockCards) {
RnrFormItem rnrFormItem = createRnrFormItemByPeriod(stockCard, form.getPeriodBegin(), form.getPeriodEnd());
rnrFormItem.setForm(form);
rnrFormItems.add(rnrFormItem);
rnrFormItem.setCategory(productProgramRepository.queryByCode(rnrFormItem.getProduct().getCode(), programCodes).getCategory());
}
return rnrFormItems;
}
public void removeRnrForm(RnRForm form) throws LMISException {
if (form != null) {
rnrFormItemRepository.deleteFormItems(form.getRnrFormItemListWrapper());
regimenItemRepository.deleteRegimenItems(form.getRegimenItemListWrapper());
baseInfoItemRepository.batchDelete(form.getBaseInfoItemListWrapper());
signatureRepository.batchDelete(form.getSignaturesWrapper());
genericDao.delete(form);
}
}
public boolean hasRequisitionData() {
try {
List<RnRForm> list = list();
if (list != null && list.size() > 0) {
return true;
}
} catch (LMISException e) {
e.reportToFabric();
}
return false;
}
public boolean hasOldDate() {
List<RnRForm> list = null;
try {
list = list();
} catch (LMISException e) {
e.reportToFabric();
}
Date dueDateShouldDataLivedInDB = DateUtil.dateMinusMonth(new Date(), SharedPreferenceMgr.getInstance().getMonthOffsetThatDefinedOldData());
if (hasRequisitionData()) {
for (RnRForm rnrForm : list) {
if (rnrForm.getPeriodEnd().before(dueDateShouldDataLivedInDB)) {
return true;
}
}
}
return false;
}
protected List<RnRForm> listUnsynced() throws LMISException {
return dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, List<RnRForm>>() {
@Override
public List<RnRForm> operate(Dao<RnRForm, String> dao) throws SQLException {
return dao.queryBuilder().where().eq("synced", false).and().eq("status", RnRForm.STATUS.AUTHORIZED).query();
}
});
}
protected List<RnRForm> listNotSynchronizedFromStarTime() throws LMISException {
List<RnRForm> rnRForms = new ArrayList<>();
for (Constants.Program program : Constants.PROGRAMES) {
rnRForms.addAll(listNotSynchronizedFromReportStartTime(program.getCode()));
}
return rnRForms;
}
protected RnrFormItem createRnrFormItemByPeriod(StockCard stockCard, Date startDate, Date endDate) throws LMISException {
RnrFormItem rnrFormItem = new RnrFormItem();
List<StockMovementItem> stockMovementItems = stockMovementRepository.queryStockItemsByCreatedDate(stockCard.getId(), startDate, endDate);
if (stockMovementItems.isEmpty()) {
rnrFormHelper.initRnrFormItemWithoutMovement(rnrFormItem, lastRnrInventory(stockCard));
} else {
rnrFormItem.setInitialAmount(stockMovementItems.get(0).calculatePreviousSOH());
rnrFormHelper.assignTotalValues(rnrFormItem, stockMovementItems);
}
rnrFormItem.setProduct(stockCard.getProduct());
return rnrFormItem;
}
protected List<RegimenItem> generateRegimeItems(RnRForm form) throws LMISException {
return new ArrayList<>();
}
protected List<BaseInfoItem> generateBaseInfoItems(RnRForm form) {
return new ArrayList<>();
}
private RnRForm initRnRForm(Date periodEndDate, RnRForm.Emergency emergency) throws LMISException {
final Program program = programRepository.queryByCode(programCode);
if (program == null) {
throw new LMISException("Program cannot be null !");
}
Period period = requisitionPeriodService.generateNextPeriod(programCode, periodEndDate);
return RnRForm.init(program, period, emergency.Emergency());
}
private RnRForm createInitRnrForm(final RnRForm rnrForm) throws LMISException {
try {
TransactionManager.callInTransaction(LmisSqliteOpenHelper.getInstance(context).getConnectionSource(), new Callable<Object>() {
@Override
public Object call() throws Exception {
create(rnrForm);
List<StockCard> stockCards = stockRepository.getStockCardsBeforePeriodEnd(rnrForm);
rnrFormItemRepository.batchCreateOrUpdate(generateRnrFormItems(rnrForm, stockCards));
regimenItemRepository.batchCreateOrUpdate(generateRegimeItems(rnrForm));
baseInfoItemRepository.batchCreateOrUpdate(generateBaseInfoItems(rnrForm));
genericDao.refresh(rnrForm);
return null;
}
});
} catch (SQLException e) {
throw new LMISException(e);
}
assignCategoryForRnrItems(rnrForm);
return rnrForm;
}
private void assignCategoryForRnrItems(RnRForm rnrForm) throws LMISException {
if (rnrForm == null || rnrForm.getRnrFormItemListWrapper() == null) {
return;
}
List<String> programCodes = programRepository.queryProgramCodesByProgramCodeOrParentCode(programCode);
for (RnrFormItem item : rnrForm.getRnrFormItemListWrapper()) {
if (item.getProduct() != null) {
item.setCategory(productProgramRepository.queryByCode(item.getProduct().getCode(), programCodes).getCategory());
}
}
}
protected long lastRnrInventory(StockCard stockCard) throws LMISException {
return lastRnrInventory(stockCard.getProduct());
}
protected long lastRnrInventory(Product product) throws LMISException {
List<RnRForm> rnRForms = listInclude(RnRForm.Emergency.No, programCode);
if (rnRForms.isEmpty() || rnRForms.size() == 1) {
return 0;
}
List<RnrFormItem> rnrFormItemListWrapper = rnRForms.get(rnRForms.size() - 2).getRnrFormItemListWrapper();
for (RnrFormItem item : rnrFormItemListWrapper) {
if (item.getProduct().getId() == product.getId()) {
return item.getInventory();
}
}
return 0;
}
private List<RnRForm> list(String programCode, final boolean isWithEmergency, ReportTypeForm typeForm) throws LMISException {
final long programId = programRepository.queryByCode(programCode).getId();
return dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, List<RnRForm>>() {
@Override
public List<RnRForm> operate(Dao<RnRForm, String> dao) throws SQLException {
Where<RnRForm, String> where = dao.queryBuilder().orderBy("periodBegin", true).where();
where.in("program_id", programId).and().between("periodBegin", typeForm.getStartTime(), new Date());
if (!isWithEmergency) {
where.and().eq("emergency", false);
}
return where.query();
}
});
}
private List<RnRForm> listNotSynchronizedFromReportStartTime(String programCode) throws LMISException {
long programId;
ReportTypeForm reportTypeForm;
try {
programId = programRepository.queryByCode(programCode).getId();
reportTypeForm = reportTypeFormRepository.getReportType(programCode);
} catch (Exception e) {
return new ArrayList<>();
}
if(reportTypeForm == null) {
return new ArrayList<>();
}
return dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, List<RnRForm>>() {
@Override
public List<RnRForm> operate(Dao<RnRForm, String> dao) throws SQLException {
Where<RnRForm, String> where = dao.queryBuilder().where().eq("program_id", programId).and().
eq("synced", false).and().
eq("status", RnRForm.STATUS.AUTHORIZED).and().
between("periodBegin", reportTypeForm.getStartTime(), new Date());
return where.query();
}
});
}
private List<RnRForm> list(String programCode, final boolean isWithEmergency) throws LMISException {
final long programId = programRepository.queryByCode(programCode).getId();
return dbUtil.withDao(RnRForm.class, new DbUtil.Operation<RnRForm, List<RnRForm>>() {
@Override
public List<RnRForm> operate(Dao<RnRForm, String> dao) throws SQLException {
Where<RnRForm, String> where = dao.queryBuilder().orderBy("periodBegin", true).where();
where.in("program_id", programId);
if (!isWithEmergency) {
where.and().eq("emergency", false);
}
return where.query();
}
});
}
public void createAndRefresh(RnRForm rnRForm) throws LMISException {
create(rnRForm);
genericDao.refresh(rnRForm);
}
private void createOrUpdateRnrWrappers(RnRForm form) throws SQLException, LMISException {
rnrFormItemRepository.batchCreateOrUpdate(form.getRnrFormItemListWrapper());
signatureRepository.batchCreateOrUpdate(form.getSignaturesWrapper());
regimenItemRepository.batchCreateOrUpdate(form.getRegimenItemListWrapper());
baseInfoItemRepository.batchCreateOrUpdate(form.getBaseInfoItemListWrapper());
}
public void deleteOldData() {
String dueDateShouldDataLivedInDB = DateUtil.formatDate(DateUtil.dateMinusMonth(new Date(), SharedPreferenceMgr.getInstance().getMonthOffsetThatDefinedOldData()), DateUtil.DB_DATE_FORMAT);
String rawSqlDeleteRnrFormItems = "DELETE FROM rnr_form_items "
+ "WHERE form_id IN (SELECT id FROM rnr_forms WHERE periodEnd < '" + dueDateShouldDataLivedInDB + "' );";
String rawSqlDeleteSignature = "DELETE FROM rnr_form_signature "
+ "WHERE form_id IN (SELECT id FROM rnr_forms WHERE periodEnd < '" + dueDateShouldDataLivedInDB + "' );";
String rawSqlDeleteRegimeItems = "DELETE FROM regime_items "
+ "WHERE form_id IN (SELECT id FROM rnr_forms WHERE periodEnd < '" + dueDateShouldDataLivedInDB + "' );";
String rawSqlDeleteBaseInfoItems = "DELETE FROM rnr_baseInfo_items "
+ "WHERE rnRForm_id IN (SELECT id FROM rnr_forms WHERE periodEnd < '" + dueDateShouldDataLivedInDB + "' );";
String rawSqlDeleteRnrForms = "DELETE FROM rnr_forms "
+ "WHERE periodEnd < '" + dueDateShouldDataLivedInDB + "'; ";
LmisSqliteOpenHelper.getInstance(LMISApp.getContext()).getWritableDatabase().execSQL(rawSqlDeleteRnrFormItems);
LmisSqliteOpenHelper.getInstance(LMISApp.getContext()).getWritableDatabase().execSQL(rawSqlDeleteBaseInfoItems);
LmisSqliteOpenHelper.getInstance(LMISApp.getContext()).getWritableDatabase().execSQL(rawSqlDeleteRegimeItems);
LmisSqliteOpenHelper.getInstance(LMISApp.getContext()).getWritableDatabase().execSQL(rawSqlDeleteSignature);
LmisSqliteOpenHelper.getInstance(LMISApp.getContext()).getWritableDatabase().execSQL(rawSqlDeleteRnrForms);
}
}
|
package lombok.javac.handlers;
import java.util.ArrayList;
import javax.lang.model.element.Modifier;
import org.mangosdk.spi.ProviderFor;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCAnnotation;
import com.sun.tools.javac.tree.JCTree.JCArrayTypeTree;
import com.sun.tools.javac.tree.JCTree.JCBlock;
import com.sun.tools.javac.tree.JCTree.JCClassDecl;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCFieldAccess;
import com.sun.tools.javac.tree.JCTree.JCIdent;
import com.sun.tools.javac.tree.JCTree.JCIf;
import com.sun.tools.javac.tree.JCTree.JCLiteral;
import com.sun.tools.javac.tree.JCTree.JCMethodDecl;
import com.sun.tools.javac.tree.JCTree.JCModifiers;
import com.sun.tools.javac.tree.JCTree.JCPrimitiveTypeTree;
import com.sun.tools.javac.tree.JCTree.JCStatement;
import com.sun.tools.javac.tree.JCTree.JCTypeApply;
import com.sun.tools.javac.tree.JCTree.JCTypeParameter;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.util.List;
import com.sun.tools.javac.util.ListBuffer;
import com.sun.tools.javac.util.Name;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Builder.ObtainVia;
import lombok.ConfigurationKeys;
import lombok.Singular;
import lombok.ToString;
import lombok.core.AST.Kind;
import lombok.core.AnnotationValues;
import lombok.core.HandlerPriority;
import lombok.core.handlers.HandlerUtil;
import lombok.core.handlers.InclusionExclusionUtils.Included;
import lombok.experimental.NonFinal;
import lombok.javac.Javac;
import lombok.javac.JavacAnnotationHandler;
import lombok.javac.JavacNode;
import lombok.javac.JavacTreeMaker;
import lombok.javac.handlers.HandleConstructor.SkipIfConstructorExists;
import lombok.javac.handlers.JavacSingularsRecipes.JavacSingularizer;
import lombok.javac.handlers.JavacSingularsRecipes.SingularData;
import static lombok.core.handlers.HandlerUtil.*;
import static lombok.javac.handlers.JavacHandlerUtil.*;
import static lombok.javac.Javac.*;
import static lombok.javac.JavacTreeMaker.TypeTag.*;
@ProviderFor(JavacAnnotationHandler.class)
@HandlerPriority(-1024) //-2^10; to ensure we've picked up @FieldDefault's changes (-2048) but @Value hasn't removed itself yet (-512), so that we can error on presence of it on the builder classes.
public class HandleBuilder extends JavacAnnotationHandler<Builder> {
private HandleConstructor handleConstructor = new HandleConstructor();
private static final boolean toBoolean(Object expr, boolean defaultValue) {
if (expr == null) return defaultValue;
if (expr instanceof JCLiteral) return ((Integer) ((JCLiteral) expr).value) != 0;
return ((Boolean) expr).booleanValue();
}
private static class BuilderFieldData {
JCExpression type;
Name rawName;
Name name;
Name nameOfDefaultProvider;
Name nameOfSetFlag;
SingularData singularData;
ObtainVia obtainVia;
JavacNode obtainViaNode;
JavacNode originalFieldNode;
java.util.List<JavacNode> createdFields = new ArrayList<JavacNode>();
}
@Override public void handle(AnnotationValues<Builder> annotation, JCAnnotation ast, JavacNode annotationNode) {
handleFlagUsage(annotationNode, ConfigurationKeys.BUILDER_FLAG_USAGE, "@Builder");
Builder builderInstance = annotation.getInstance();
// These exist just to support the 'old' lombok.experimental.Builder, which had these properties. lombok.Builder no longer has them.
boolean fluent = toBoolean(annotation.getActualExpression("fluent"), true);
boolean chain = toBoolean(annotation.getActualExpression("chain"), true);
String builderMethodName = builderInstance.builderMethodName();
String buildMethodName = builderInstance.buildMethodName();
String builderClassName = builderInstance.builderClassName();
String toBuilderMethodName = "toBuilder";
boolean toBuilder = builderInstance.toBuilder();
java.util.List<Name> typeArgsForToBuilder = null;
if (builderMethodName == null) builderMethodName = "builder";
if (buildMethodName == null) buildMethodName = "build";
if (builderClassName == null) builderClassName = "";
if (!checkName("builderMethodName", builderMethodName, annotationNode)) return;
if (!checkName("buildMethodName", buildMethodName, annotationNode)) return;
if (!builderClassName.isEmpty()) {
if (!checkName("builderClassName", builderClassName, annotationNode)) return;
}
deleteAnnotationIfNeccessary(annotationNode, Builder.class, "lombok.experimental.Builder");
JavacNode parent = annotationNode.up();
java.util.List<BuilderFieldData> builderFields = new ArrayList<BuilderFieldData>();
JCExpression returnType;
List<JCTypeParameter> typeParams = List.nil();
List<JCExpression> thrownExceptions = List.nil();
Name nameOfBuilderMethod;
JavacNode tdParent;
JavacNode fillParametersFrom = parent.get() instanceof JCMethodDecl ? parent : null;
boolean addCleaning = false;
boolean isStatic = true;
if (parent.get() instanceof JCClassDecl) {
tdParent = parent;
JCClassDecl td = (JCClassDecl) tdParent.get();
ListBuffer<JavacNode> allFields = new ListBuffer<JavacNode>();
boolean valuePresent = (hasAnnotation(lombok.Value.class, parent) || hasAnnotation("lombok.experimental.Value", parent));
for (JavacNode fieldNode : HandleConstructor.findAllFields(tdParent, true)) {
JCVariableDecl fd = (JCVariableDecl) fieldNode.get();
JavacNode isDefault = findAnnotation(Builder.Default.class, fieldNode, false);
boolean isFinal = (fd.mods.flags & Flags.FINAL) != 0 || (valuePresent && !hasAnnotation(NonFinal.class, fieldNode));
BuilderFieldData bfd = new BuilderFieldData();
bfd.rawName = fd.name;
bfd.name = removePrefixFromField(fieldNode);
bfd.type = fd.vartype;
bfd.singularData = getSingularData(fieldNode);
bfd.originalFieldNode = fieldNode;
if (bfd.singularData != null && isDefault != null) {
isDefault.addError("@Builder.Default and @Singular cannot be mixed.");
findAnnotation(Builder.Default.class, fieldNode, true);
isDefault = null;
}
if (fd.init == null && isDefault != null) {
isDefault.addWarning("@Builder.Default requires an initializing expression (' = something;').");
findAnnotation(Builder.Default.class, fieldNode, true);
isDefault = null;
}
if (fd.init != null && isDefault == null) {
if (isFinal) continue;
fieldNode.addWarning("@Builder will ignore the initializing expression entirely. If you want the initializing expression to serve as default, add @Builder.Default. If it is not supposed to be settable during building, make the field final.");
}
if (isDefault != null) {
bfd.nameOfDefaultProvider = parent.toName("$default$" + bfd.name);
bfd.nameOfSetFlag = parent.toName(bfd.name + "$set");
JCMethodDecl md = generateDefaultProvider(bfd.nameOfDefaultProvider, fieldNode, td.typarams);
recursiveSetGeneratedBy(md, ast, annotationNode.getContext());
if (md != null) injectMethod(tdParent, md);
}
addObtainVia(bfd, fieldNode);
builderFields.add(bfd);
allFields.append(fieldNode);
}
handleConstructor.generateConstructor(tdParent, AccessLevel.PACKAGE, List.<JCAnnotation>nil(), allFields.toList(), false, null, SkipIfConstructorExists.I_AM_BUILDER, annotationNode);
returnType = namePlusTypeParamsToTypeReference(tdParent.getTreeMaker(), td.name, td.typarams);
typeParams = td.typarams;
thrownExceptions = List.nil();
nameOfBuilderMethod = null;
if (builderClassName.isEmpty()) builderClassName = td.name.toString() + "Builder";
} else if (fillParametersFrom != null && fillParametersFrom.getName().toString().equals("<init>")) {
JCMethodDecl jmd = (JCMethodDecl) fillParametersFrom.get();
if (!jmd.typarams.isEmpty()) {
annotationNode.addError("@Builder is not supported on constructors with constructor type parameters.");
return;
}
tdParent = parent.up();
JCClassDecl td = (JCClassDecl) tdParent.get();
returnType = namePlusTypeParamsToTypeReference(tdParent.getTreeMaker(), td.name, td.typarams);
typeParams = td.typarams;
thrownExceptions = jmd.thrown;
nameOfBuilderMethod = null;
if (builderClassName.isEmpty()) builderClassName = td.name.toString() + "Builder";
} else if (fillParametersFrom != null) {
tdParent = parent.up();
JCClassDecl td = (JCClassDecl) tdParent.get();
JCMethodDecl jmd = (JCMethodDecl) fillParametersFrom.get();
isStatic = (jmd.mods.flags & Flags.STATIC) != 0;
JCExpression fullReturnType = jmd.restype;
returnType = fullReturnType;
typeParams = jmd.typarams;
thrownExceptions = jmd.thrown;
nameOfBuilderMethod = jmd.name;
if (returnType instanceof JCTypeApply) {
returnType = cloneType(tdParent.getTreeMaker(), returnType, ast, annotationNode.getContext());
}
if (builderClassName.isEmpty()) {
if (returnType instanceof JCFieldAccess) {
builderClassName = ((JCFieldAccess) returnType).name.toString() + "Builder";
} else if (returnType instanceof JCIdent) {
Name n = ((JCIdent) returnType).name;
for (JCTypeParameter tp : typeParams) {
if (tp.name.equals(n)) {
annotationNode.addError("@Builder requires specifying 'builderClassName' if used on methods with a type parameter as return type.");
return;
}
}
builderClassName = n.toString() + "Builder";
} else if (returnType instanceof JCPrimitiveTypeTree) {
builderClassName = returnType.toString() + "Builder";
if (Character.isLowerCase(builderClassName.charAt(0))) {
builderClassName = Character.toTitleCase(builderClassName.charAt(0)) + builderClassName.substring(1);
}
} else if (returnType instanceof JCTypeApply) {
JCExpression clazz = ((JCTypeApply) returnType).clazz;
if (clazz instanceof JCFieldAccess) {
builderClassName = ((JCFieldAccess) clazz).name + "Builder";
} else if (clazz instanceof JCIdent) {
builderClassName = ((JCIdent) clazz).name + "Builder";
}
}
if (builderClassName.isEmpty()) {
// This shouldn't happen.
System.err.println("Lombok bug ID#20140614-1651: javac HandleBuilder: return type to name conversion failed: " + returnType.getClass());
builderClassName = td.name.toString() + "Builder";
}
}
if (toBuilder) {
final String TO_BUILDER_NOT_SUPPORTED = "@Builder(toBuilder=true) is only supported if you return your own type.";
if (returnType instanceof JCArrayTypeTree) {
annotationNode.addError(TO_BUILDER_NOT_SUPPORTED);
return;
}
Name simpleName;
String pkg;
List<JCExpression> tpOnRet = List.nil();
if (fullReturnType instanceof JCTypeApply) {
tpOnRet = ((JCTypeApply) fullReturnType).arguments;
}
JCExpression namingType = returnType;
if (returnType instanceof JCTypeApply) namingType = ((JCTypeApply) returnType).clazz;
if (namingType instanceof JCIdent) {
simpleName = ((JCIdent) namingType).name;
pkg = null;
} else if (namingType instanceof JCFieldAccess) {
JCFieldAccess jcfa = (JCFieldAccess) namingType;
simpleName = jcfa.name;
pkg = unpack(jcfa.selected);
if (pkg.startsWith("ERR:")) {
String err = pkg.substring(4, pkg.indexOf("__ERR__"));
annotationNode.addError(err);
return;
}
} else {
annotationNode.addError("Expected a (parameterized) type here instead of a " + namingType.getClass().getName());
return;
}
if (pkg != null && !parent.getPackageDeclaration().equals(pkg)) {
annotationNode.addError(TO_BUILDER_NOT_SUPPORTED);
return;
}
if (!tdParent.getName().contentEquals(simpleName)) {
annotationNode.addError(TO_BUILDER_NOT_SUPPORTED);
return;
}
List<JCTypeParameter> tpOnMethod = jmd.typarams;
List<JCTypeParameter> tpOnType = ((JCClassDecl) tdParent.get()).typarams;
typeArgsForToBuilder = new ArrayList<Name>();
for (JCTypeParameter tp : tpOnMethod) {
int pos = -1;
int idx = -1;
for (JCExpression tOnRet : tpOnRet) {
idx++;
if (!(tOnRet instanceof JCIdent)) continue;
if (((JCIdent) tOnRet).name != tp.name) continue;
pos = idx;
}
if (pos == -1 || tpOnType.size() <= pos) {
annotationNode.addError("@Builder(toBuilder=true) requires that each type parameter on the static method is part of the typeargs of the return value. Type parameter " + tp.name + " is not part of the return type.");
return;
}
typeArgsForToBuilder.add(tpOnType.get(pos).name);
}
}
} else {
annotationNode.addError("@Builder is only supported on types, constructors, and methods.");
return;
}
if (fillParametersFrom != null) {
for (JavacNode param : fillParametersFrom.down()) {
if (param.getKind() != Kind.ARGUMENT) continue;
BuilderFieldData bfd = new BuilderFieldData();
JCVariableDecl raw = (JCVariableDecl) param.get();
bfd.name = raw.name;
bfd.rawName = raw.name;
bfd.type = raw.vartype;
bfd.singularData = getSingularData(param);
bfd.originalFieldNode = param;
addObtainVia(bfd, param);
builderFields.add(bfd);
}
}
JavacNode builderType = findInnerClass(tdParent, builderClassName);
if (builderType == null) {
builderType = makeBuilderClass(isStatic, annotationNode, tdParent, builderClassName, typeParams, ast);
} else {
JCClassDecl builderTypeDeclaration = (JCClassDecl) builderType.get();
if (isStatic && !builderTypeDeclaration.getModifiers().getFlags().contains(Modifier.STATIC)) {
annotationNode.addError("Existing Builder must be a static inner class.");
return;
} else if (!isStatic && builderTypeDeclaration.getModifiers().getFlags().contains(Modifier.STATIC)) {
annotationNode.addError("Existing Builder must be a non-static inner class.");
return;
}
sanityCheckForMethodGeneratingAnnotationsOnBuilderClass(builderType, annotationNode);
/* generate errors for @Singular BFDs that have one already defined node. */ {
for (BuilderFieldData bfd : builderFields) {
SingularData sd = bfd.singularData;
if (sd == null) continue;
JavacSingularizer singularizer = sd.getSingularizer();
if (singularizer == null) continue;
if (singularizer.checkForAlreadyExistingNodesAndGenerateError(builderType, sd)) {
bfd.singularData = null;
}
}
}
}
for (BuilderFieldData bfd : builderFields) {
if (bfd.singularData != null && bfd.singularData.getSingularizer() != null) {
if (bfd.singularData.getSingularizer().requiresCleaning()) {
addCleaning = true;
break;
}
}
if (bfd.obtainVia != null) {
if (bfd.obtainVia.field().isEmpty() == bfd.obtainVia.method().isEmpty()) {
bfd.obtainViaNode.addError("The syntax is either @ObtainVia(field = \"fieldName\") or @ObtainVia(method = \"methodName\").");
return;
}
if (bfd.obtainVia.method().isEmpty() && bfd.obtainVia.isStatic()) {
bfd.obtainViaNode.addError("@ObtainVia(isStatic = true) is not valid unless 'method' has been set.");
return;
}
}
}
generateBuilderFields(builderType, builderFields, ast);
if (addCleaning) {
JavacTreeMaker maker = builderType.getTreeMaker();
JCVariableDecl uncleanField = maker.VarDef(maker.Modifiers(Flags.PRIVATE), builderType.toName("$lombokUnclean"), maker.TypeIdent(CTC_BOOLEAN), null);
injectFieldAndMarkGenerated(builderType, uncleanField);
}
if (constructorExists(builderType) == MemberExistsResult.NOT_EXISTS) {
JCMethodDecl cd = HandleConstructor.createConstructor(AccessLevel.PACKAGE, List.<JCAnnotation>nil(), builderType, List.<JavacNode>nil(), false, annotationNode);
if (cd != null) injectMethod(builderType, cd);
}
for (BuilderFieldData bfd : builderFields) {
makeSetterMethodsForBuilder(builderType, bfd, annotationNode, fluent, chain);
}
if (methodExists(buildMethodName, builderType, -1) == MemberExistsResult.NOT_EXISTS) {
JCMethodDecl md = generateBuildMethod(tdParent, isStatic, buildMethodName, nameOfBuilderMethod, returnType, builderFields, builderType, thrownExceptions, ast, addCleaning);
if (md != null) injectMethod(builderType, md);
}
if (methodExists("toString", builderType, 0) == MemberExistsResult.NOT_EXISTS) {
java.util.List<Included<JavacNode, ToString.Include>> fieldNodes = new ArrayList<Included<JavacNode, ToString.Include>>();
for (BuilderFieldData bfd : builderFields) {
for (JavacNode f : bfd.createdFields) {
fieldNodes.add(new Included<JavacNode, ToString.Include>(f, null, true));
}
}
JCMethodDecl md = HandleToString.createToString(builderType, fieldNodes, true, false, FieldAccess.ALWAYS_FIELD, ast);
if (md != null) injectMethod(builderType, md);
}
if (addCleaning) injectMethod(builderType, generateCleanMethod(builderFields, builderType, ast));
if (methodExists(builderMethodName, tdParent, -1) == MemberExistsResult.NOT_EXISTS) {
JCMethodDecl md = generateBuilderMethod(isStatic, builderMethodName, builderClassName, annotationNode, tdParent, typeParams);
recursiveSetGeneratedBy(md, ast, annotationNode.getContext());
if (md != null) injectMethod(tdParent, md);
}
if (toBuilder) {
switch (methodExists(toBuilderMethodName, tdParent, 0)) {
case EXISTS_BY_USER:
annotationNode.addWarning("Not generating toBuilder() as it already exists.");
return;
case NOT_EXISTS:
List<JCTypeParameter> tps = typeParams;
if (typeArgsForToBuilder != null) {
ListBuffer<JCTypeParameter> lb = new ListBuffer<JCTypeParameter>();
JavacTreeMaker maker = tdParent.getTreeMaker();
for (Name n : typeArgsForToBuilder) {
lb.append(maker.TypeParameter(n, List.<JCExpression>nil()));
}
tps = lb.toList();
}
JCMethodDecl md = generateToBuilderMethod(toBuilderMethodName, builderClassName, tdParent, tps, builderFields, fluent, ast);
if (md != null) injectMethod(tdParent, md);
}
}
recursiveSetGeneratedBy(builderType.get(), ast, annotationNode.getContext());
}
private static String unpack(JCExpression expr) {
StringBuilder sb = new StringBuilder();
unpack(sb, expr);
return sb.toString();
}
private static void unpack(StringBuilder sb, JCExpression expr) {
if (expr instanceof JCIdent) {
sb.append(((JCIdent) expr).name.toString());
return;
}
if (expr instanceof JCFieldAccess) {
JCFieldAccess jcfa = (JCFieldAccess) expr;
unpack(sb, jcfa.selected);
sb.append(".").append(jcfa.name.toString());
return;
}
if (expr instanceof JCTypeApply) {
sb.setLength(0);
sb.append("ERR:");
sb.append("@Builder(toBuilder=true) is not supported if returning a type with generics applied to an intermediate.");
sb.append("__ERR__");
return;
}
sb.setLength(0);
sb.append("ERR:");
sb.append("Expected a type of some sort, not a " + expr.getClass().getName());
sb.append("__ERR__");
}
private JCMethodDecl generateToBuilderMethod(String toBuilderMethodName, String builderClassName, JavacNode type, List<JCTypeParameter> typeParams, java.util.List<BuilderFieldData> builderFields, boolean fluent, JCAnnotation ast) {
// return new ThingieBuilder<A, B>().setA(this.a).setB(this.b);
JavacTreeMaker maker = type.getTreeMaker();
ListBuffer<JCExpression> typeArgs = new ListBuffer<JCExpression>();
for (JCTypeParameter typeParam : typeParams) {
typeArgs.append(maker.Ident(typeParam.name));
}
JCExpression call = maker.NewClass(null, List.<JCExpression>nil(), namePlusTypeParamsToTypeReference(maker, type.toName(builderClassName), typeParams), List.<JCExpression>nil(), null);
JCExpression invoke = call;
for (BuilderFieldData bfd : builderFields) {
Name setterName = fluent ? bfd.name : type.toName(HandlerUtil.buildAccessorName("set", bfd.name.toString()));
JCExpression arg;
if (bfd.obtainVia == null || !bfd.obtainVia.field().isEmpty()) {
arg = maker.Select(maker.Ident(type.toName("this")), bfd.obtainVia == null ? bfd.rawName : type.toName(bfd.obtainVia.field()));
} else {
if (bfd.obtainVia.isStatic()) {
JCExpression c = maker.Select(maker.Ident(type.toName(type.getName())), type.toName(bfd.obtainVia.method()));
arg = maker.Apply(List.<JCExpression>nil(), c, List.<JCExpression>of(maker.Ident(type.toName("this"))));
} else {
JCExpression c = maker.Select(maker.Ident(type.toName("this")), type.toName(bfd.obtainVia.method()));
arg = maker.Apply(List.<JCExpression>nil(), c, List.<JCExpression>nil());
}
}
invoke = maker.Apply(List.<JCExpression>nil(), maker.Select(invoke, setterName), List.of(arg));
}
JCStatement statement = maker.Return(invoke);
JCBlock body = maker.Block(0, List.<JCStatement>of(statement));
return maker.MethodDef(maker.Modifiers(Flags.PUBLIC), type.toName(toBuilderMethodName), namePlusTypeParamsToTypeReference(maker, type.toName(builderClassName), typeParams), List.<JCTypeParameter>nil(), List.<JCVariableDecl>nil(), List.<JCExpression>nil(), body, null);
}
private JCMethodDecl generateCleanMethod(java.util.List<BuilderFieldData> builderFields, JavacNode type, JCTree source) {
JavacTreeMaker maker = type.getTreeMaker();
ListBuffer<JCStatement> statements = new ListBuffer<JCStatement>();
for (BuilderFieldData bfd : builderFields) {
if (bfd.singularData != null && bfd.singularData.getSingularizer() != null) {
bfd.singularData.getSingularizer().appendCleaningCode(bfd.singularData, type, source, statements);
}
}
statements.append(maker.Exec(maker.Assign(maker.Select(maker.Ident(type.toName("this")), type.toName("$lombokUnclean")), maker.Literal(CTC_BOOLEAN, 0))));
JCBlock body = maker.Block(0, statements.toList());
return maker.MethodDef(maker.Modifiers(Flags.PUBLIC), type.toName("$lombokClean"), maker.Type(Javac.createVoidType(type.getSymbolTable(), CTC_VOID)), List.<JCTypeParameter>nil(), List.<JCVariableDecl>nil(), List.<JCExpression>nil(), body, null);
/*
* if (shouldReturnThis) {
methodType = cloneSelfType(field);
}
if (methodType == null) {
//WARNING: Do not use field.getSymbolTable().voidType - that field has gone through non-backwards compatible API changes within javac1.6.
methodType = treeMaker.Type(Javac.createVoidType(treeMaker, CTC_VOID));
shouldReturnThis = false;
}
*/
}
private JCMethodDecl generateBuildMethod(JavacNode tdParent, boolean isStatic, String buildName, Name builderName, JCExpression returnType, java.util.List<BuilderFieldData> builderFields, JavacNode type, List<JCExpression> thrownExceptions, JCTree source, boolean addCleaning) {
JavacTreeMaker maker = type.getTreeMaker();
JCExpression call;
ListBuffer<JCStatement> statements = new ListBuffer<JCStatement>();
if (addCleaning) {
JCExpression notClean = maker.Unary(CTC_NOT, maker.Select(maker.Ident(type.toName("this")), type.toName("$lombokUnclean")));
JCStatement invokeClean = maker.Exec(maker.Apply(List.<JCExpression>nil(), maker.Ident(type.toName("$lombokClean")), List.<JCExpression>nil()));
JCIf ifUnclean = maker.If(notClean, invokeClean, null);
statements.append(ifUnclean);
}
for (BuilderFieldData bfd : builderFields) {
if (bfd.singularData != null && bfd.singularData.getSingularizer() != null) {
bfd.singularData.getSingularizer().appendBuildCode(bfd.singularData, type, source, statements, bfd.name, "this");
}
}
ListBuffer<JCExpression> args = new ListBuffer<JCExpression>();
for (BuilderFieldData bfd : builderFields) {
if (bfd.nameOfSetFlag != null) {
statements.append(maker.VarDef(maker.Modifiers(0L), bfd.name, cloneType(maker, bfd.type, source, tdParent.getContext()), maker.Select(maker.Ident(type.toName("this")), bfd.name)));
statements.append(maker.If(maker.Unary(CTC_NOT, maker.Ident(bfd.nameOfSetFlag)), maker.Exec(maker.Assign(maker.Ident(bfd.name),maker.Apply(typeParameterNames(maker, ((JCClassDecl) tdParent.get()).typarams), maker.Select(maker.Ident(((JCClassDecl) tdParent.get()).name), bfd.nameOfDefaultProvider), List.<JCExpression>nil()))), null));
}
args.append(maker.Ident(bfd.name));
}
if (addCleaning) {
statements.append(maker.Exec(maker.Assign(maker.Select(maker.Ident(type.toName("this")), type.toName("$lombokUnclean")), maker.Literal(CTC_BOOLEAN, 1))));
}
if (builderName == null) {
call = maker.NewClass(null, List.<JCExpression>nil(), returnType, args.toList(), null);
statements.append(maker.Return(call));
} else {
ListBuffer<JCExpression> typeParams = new ListBuffer<JCExpression>();
for (JCTypeParameter tp : ((JCClassDecl) type.get()).typarams) {
typeParams.append(maker.Ident(tp.name));
}
JCExpression callee = maker.Ident(((JCClassDecl) type.up().get()).name);
if (!isStatic) callee = maker.Select(callee, type.up().toName("this"));
JCExpression fn = maker.Select(callee, builderName);
call = maker.Apply(typeParams.toList(), fn, args.toList());
if (returnType instanceof JCPrimitiveTypeTree && CTC_VOID.equals(typeTag(returnType))) {
statements.append(maker.Exec(call));
} else {
statements.append(maker.Return(call));
}
}
JCBlock body = maker.Block(0, statements.toList());
return maker.MethodDef(maker.Modifiers(Flags.PUBLIC), type.toName(buildName), returnType, List.<JCTypeParameter>nil(), List.<JCVariableDecl>nil(), thrownExceptions, body, null);
}
public JCMethodDecl generateDefaultProvider(Name methodName, JavacNode fieldNode, List<JCTypeParameter> params) {
JavacTreeMaker maker = fieldNode.getTreeMaker();
JCVariableDecl field = (JCVariableDecl) fieldNode.get();
JCStatement statement = maker.Return(field.init);
field.init = null;
JCBlock body = maker.Block(0, List.<JCStatement>of(statement));
int modifiers = Flags.PRIVATE | Flags.STATIC;
return maker.MethodDef(maker.Modifiers(modifiers), methodName, cloneType(maker, field.vartype, field, fieldNode.getContext()), copyTypeParams(fieldNode, params), List.<JCVariableDecl>nil(), List.<JCExpression>nil(), body, null);
}
public JCMethodDecl generateBuilderMethod(boolean isStatic, String builderMethodName, String builderClassName, JavacNode source, JavacNode type, List<JCTypeParameter> typeParams) {
JavacTreeMaker maker = type.getTreeMaker();
ListBuffer<JCExpression> typeArgs = new ListBuffer<JCExpression>();
for (JCTypeParameter typeParam : typeParams) {
typeArgs.append(maker.Ident(typeParam.name));
}
JCExpression call = maker.NewClass(null, List.<JCExpression>nil(), namePlusTypeParamsToTypeReference(maker, type.toName(builderClassName), typeParams), List.<JCExpression>nil(), null);
JCStatement statement = maker.Return(call);
JCBlock body = maker.Block(0, List.<JCStatement>of(statement));
int modifiers = Flags.PUBLIC;
if (isStatic) modifiers |= Flags.STATIC;
return maker.MethodDef(maker.Modifiers(modifiers), type.toName(builderMethodName), namePlusTypeParamsToTypeReference(maker, type.toName(builderClassName), typeParams), copyTypeParams(source, typeParams), List.<JCVariableDecl>nil(), List.<JCExpression>nil(), body, null);
}
public void generateBuilderFields(JavacNode builderType, java.util.List<BuilderFieldData> builderFields, JCTree source) {
int len = builderFields.size();
java.util.List<JavacNode> existing = new ArrayList<JavacNode>();
for (JavacNode child : builderType.down()) {
if (child.getKind() == Kind.FIELD) existing.add(child);
}
for (int i = len - 1; i >= 0; i
BuilderFieldData bfd = builderFields.get(i);
if (bfd.singularData != null && bfd.singularData.getSingularizer() != null) {
bfd.createdFields.addAll(bfd.singularData.getSingularizer().generateFields(bfd.singularData, builderType, source));
} else {
JavacNode field = null, setFlag = null;
for (JavacNode exists : existing) {
Name n = ((JCVariableDecl) exists.get()).name;
if (n.equals(bfd.name)) field = exists;
if (n.equals(bfd.nameOfSetFlag)) setFlag = exists;
}
JavacTreeMaker maker = builderType.getTreeMaker();
if (field == null) {
JCModifiers mods = maker.Modifiers(Flags.PRIVATE);
JCVariableDecl newField = maker.VarDef(mods, bfd.name, cloneType(maker, bfd.type, source, builderType.getContext()), null);
field = injectFieldAndMarkGenerated(builderType, newField);
}
if (setFlag == null && bfd.nameOfSetFlag != null) {
JCModifiers mods = maker.Modifiers(Flags.PRIVATE);
JCVariableDecl newField = maker.VarDef(mods, bfd.nameOfSetFlag, maker.TypeIdent(CTC_BOOLEAN), null);
injectFieldAndMarkGenerated(builderType, newField);
}
bfd.createdFields.add(field);
}
}
}
public void makeSetterMethodsForBuilder(JavacNode builderType, BuilderFieldData fieldNode, JavacNode source, boolean fluent, boolean chain) {
boolean deprecate = isFieldDeprecated(fieldNode.originalFieldNode);
if (fieldNode.singularData == null || fieldNode.singularData.getSingularizer() == null) {
makeSimpleSetterMethodForBuilder(builderType, deprecate, fieldNode.createdFields.get(0), fieldNode.nameOfSetFlag, source, fluent, chain);
} else {
fieldNode.singularData.getSingularizer().generateMethods(fieldNode.singularData, deprecate, builderType, source.get(), fluent, chain);
}
}
private void makeSimpleSetterMethodForBuilder(JavacNode builderType, boolean deprecate, JavacNode fieldNode, Name nameOfSetFlag, JavacNode source, boolean fluent, boolean chain) {
Name fieldName = ((JCVariableDecl) fieldNode.get()).name;
for (JavacNode child : builderType.down()) {
if (child.getKind() != Kind.METHOD) continue;
JCMethodDecl methodDecl = (JCMethodDecl) child.get();
Name existingName = methodDecl.name;
if (existingName.equals(fieldName) && !isTolerate(fieldNode, methodDecl)) return;
}
String setterName = fluent ? fieldNode.getName() : HandlerUtil.buildAccessorName("set", fieldNode.getName());
JavacTreeMaker maker = fieldNode.getTreeMaker();
JCMethodDecl newMethod = HandleSetter.createSetter(Flags.PUBLIC, deprecate, fieldNode, maker, setterName, nameOfSetFlag, chain, source, List.<JCAnnotation>nil(), List.<JCAnnotation>nil());
injectMethod(builderType, newMethod);
}
public JavacNode findInnerClass(JavacNode parent, String name) {
for (JavacNode child : parent.down()) {
if (child.getKind() != Kind.TYPE) continue;
JCClassDecl td = (JCClassDecl) child.get();
if (td.name.contentEquals(name)) return child;
}
return null;
}
public JavacNode makeBuilderClass(boolean isStatic, JavacNode source, JavacNode tdParent, String builderClassName, List<JCTypeParameter> typeParams, JCAnnotation ast) {
JavacTreeMaker maker = tdParent.getTreeMaker();
int modifiers = Flags.PUBLIC;
if (isStatic) modifiers |= Flags.STATIC;
JCModifiers mods = maker.Modifiers(modifiers);
JCClassDecl builder = maker.ClassDef(mods, tdParent.toName(builderClassName), copyTypeParams(source, typeParams), null, List.<JCExpression>nil(), List.<JCTree>nil());
return injectType(tdParent, builder);
}
private void addObtainVia(BuilderFieldData bfd, JavacNode node) {
for (JavacNode child : node.down()) {
if (!annotationTypeMatches(ObtainVia.class, child)) continue;
AnnotationValues<ObtainVia> ann = createAnnotation(ObtainVia.class, child);
bfd.obtainVia = ann.getInstance();
bfd.obtainViaNode = child;
deleteAnnotationIfNeccessary(child, ObtainVia.class);
return;
}
}
/**
* Returns the explicitly requested singular annotation on this node (field
* or parameter), or null if there's no {@code @Singular} annotation on it.
*
* @param node The node (field or method param) to inspect for its name and potential {@code @Singular} annotation.
*/
private SingularData getSingularData(JavacNode node) {
for (JavacNode child : node.down()) {
if (!annotationTypeMatches(Singular.class, child)) continue;
Name pluralName = node.getKind() == Kind.FIELD ? removePrefixFromField(node) : ((JCVariableDecl) node.get()).name;
AnnotationValues<Singular> ann = createAnnotation(Singular.class, child);
deleteAnnotationIfNeccessary(child, Singular.class);
String explicitSingular = ann.getInstance().value();
if (explicitSingular.isEmpty()) {
if (Boolean.FALSE.equals(node.getAst().readConfiguration(ConfigurationKeys.SINGULAR_AUTO))) {
node.addError("The singular must be specified explicitly (e.g. @Singular(\"task\")) because auto singularization is disabled.");
explicitSingular = pluralName.toString();
} else {
explicitSingular = autoSingularize(pluralName.toString());
if (explicitSingular == null) {
node.addError("Can't singularize this name; please specify the singular explicitly (i.e. @Singular(\"sheep\"))");
explicitSingular = pluralName.toString();
}
}
}
Name singularName = node.toName(explicitSingular);
JCExpression type = null;
if (node.get() instanceof JCVariableDecl) {
type = ((JCVariableDecl) node.get()).vartype;
}
String name = null;
List<JCExpression> typeArgs = List.nil();
if (type instanceof JCTypeApply) {
typeArgs = ((JCTypeApply) type).arguments;
type = ((JCTypeApply) type).clazz;
}
name = type.toString();
String targetFqn = JavacSingularsRecipes.get().toQualified(name);
JavacSingularizer singularizer = JavacSingularsRecipes.get().getSingularizer(targetFqn);
if (singularizer == null) {
node.addError("Lombok does not know how to create the singular-form builder methods for type '" + name + "'; they won't be generated.");
return null;
}
return new SingularData(child, singularName, pluralName, typeArgs, targetFqn, singularizer);
}
return null;
}
}
|
package org.activiti.spring.conformance.set0;
import static org.activiti.spring.conformance.set0.Set0RuntimeTestConfiguration.collectedEvents;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.catchThrowable;
import org.activiti.api.model.shared.event.RuntimeEvent;
import org.activiti.api.process.model.ProcessInstance;
import org.activiti.api.process.model.builders.ProcessPayloadBuilder;
import org.activiti.api.process.model.events.BPMNActivityEvent;
import org.activiti.api.process.model.events.BPMNSequenceFlowTakenEvent;
import org.activiti.api.process.model.events.ProcessRuntimeEvent;
import org.activiti.api.process.runtime.ProcessRuntime;
import org.activiti.api.runtime.shared.NotFoundException;
import org.activiti.spring.conformance.util.security.SecurityUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
public class ProcessInstanceOperationsTest {
private final String processKey = "usertaskwi-4d5c4312-e8fc-4766-a727-b55a4d3255e9";
@Autowired
private ProcessRuntime processRuntime;
@Autowired
private SecurityUtil securityUtil;
@Before
public void cleanUp() {
collectedEvents.clear();
}
@Test
public void shouldBeAbleToStartAndDeleteProcessInstance() {
securityUtil.logInAs("user1");
//when
ProcessInstance processInstance = processRuntime.start(ProcessPayloadBuilder
.start()
.withProcessDefinitionKey(processKey)
.withBusinessKey("my-business-key")
.withProcessInstanceName("my-process-instance-name")
.build());
//then
assertThat(processInstance).isNotNull();
assertThat(processInstance.getStatus()).isEqualTo(ProcessInstance.ProcessInstanceStatus.RUNNING);
assertThat(processInstance.getBusinessKey()).isEqualTo("my-business-key");
assertThat(processInstance.getName()).isEqualTo("my-process-instance-name");
assertThat(collectedEvents).extracting(RuntimeEvent::getEventType).containsExactly(
ProcessRuntimeEvent.ProcessEvents.PROCESS_CREATED,
ProcessRuntimeEvent.ProcessEvents.PROCESS_STARTED,
BPMNActivityEvent.ActivityEvents.ACTIVITY_STARTED,
BPMNActivityEvent.ActivityEvents.ACTIVITY_COMPLETED,
BPMNSequenceFlowTakenEvent.SequenceFlowEvents.SEQUENCE_FLOW_TAKEN,
BPMNActivityEvent.ActivityEvents.ACTIVITY_STARTED);
collectedEvents.clear();
ProcessInstance deletedProcessInstance = processRuntime.delete(ProcessPayloadBuilder.delete(processInstance.getId()));
assertThat(deletedProcessInstance.getStatus()).isEqualTo(ProcessInstance.ProcessInstanceStatus.DELETED);
assertThat(collectedEvents).extracting(RuntimeEvent::getEventType).containsExactly(
BPMNActivityEvent.ActivityEvents.ACTIVITY_CANCELLED,
ProcessRuntimeEvent.ProcessEvents.PROCESS_CANCELLED);
// No Process Instance should be found
Throwable throwable = catchThrowable(() -> processRuntime.processInstance(deletedProcessInstance.getId()));
assertThat(throwable)
.isInstanceOf(NotFoundException.class);
}
@Test
public void shouldBeAbleToStartSuspendAndResumeProcessInstance() {
securityUtil.logInAs("user1");
//when
ProcessInstance processInstance = processRuntime.start(ProcessPayloadBuilder
.start()
.withProcessDefinitionKey(processKey)
.withBusinessKey("my-business-key")
.withProcessInstanceName("my-process-instance-name")
.build());
//then
assertThat(processInstance).isNotNull();
assertThat(processInstance.getStatus()).isEqualTo(ProcessInstance.ProcessInstanceStatus.RUNNING);
assertThat(processInstance.getBusinessKey()).isEqualTo("my-business-key");
assertThat(processInstance.getName()).isEqualTo("my-process-instance-name");
assertThat(collectedEvents).extracting(RuntimeEvent::getEventType).containsExactly(
ProcessRuntimeEvent.ProcessEvents.PROCESS_CREATED,
ProcessRuntimeEvent.ProcessEvents.PROCESS_STARTED,
BPMNActivityEvent.ActivityEvents.ACTIVITY_STARTED,
BPMNActivityEvent.ActivityEvents.ACTIVITY_COMPLETED,
BPMNSequenceFlowTakenEvent.SequenceFlowEvents.SEQUENCE_FLOW_TAKEN,
BPMNActivityEvent.ActivityEvents.ACTIVITY_STARTED);
collectedEvents.clear();
ProcessInstance suspendedProcessInstance = processRuntime.suspend(ProcessPayloadBuilder.suspend(processInstance.getId()));
assertThat(suspendedProcessInstance.getStatus()).isEqualTo(ProcessInstance.ProcessInstanceStatus.SUSPENDED);
assertThat(collectedEvents).extracting(RuntimeEvent::getEventType).containsExactly(ProcessRuntimeEvent.ProcessEvents.PROCESS_SUSPENDED);
collectedEvents.clear();
ProcessInstance resumedProcessInstance = processRuntime.resume(ProcessPayloadBuilder.resume(suspendedProcessInstance.getId()));
assertThat(resumedProcessInstance.getStatus()).isEqualTo(ProcessInstance.ProcessInstanceStatus.RUNNING);
assertThat(collectedEvents).extracting(RuntimeEvent::getEventType).containsExactly(ProcessRuntimeEvent.ProcessEvents.PROCESS_RESUMED);
}
}
|
package pl.temomuko.autostoprace.domain.model;
import android.net.Uri;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.gson.annotations.SerializedName;
import java.util.Date;
public class LocationRecord implements Comparable<LocationRecord>, Parcelable {
private int mId;
private double mLatitude;
private double mLongitude;
private String mMessage;
private String mAddress;
private String mCountry;
private String mCountryCode;
private Date mServerReceiptDate;
@Nullable
private String mImageLocation;
public LocationRecord() {
}
public LocationRecord(double latitude, double longitude, String message, String address,
String country, String countryCode, @Nullable Uri imageLocation) {
mLatitude = latitude;
mLongitude = longitude;
mMessage = message;
mAddress = address;
mCountry = country;
mCountryCode = countryCode;
mImageLocation = imageLocation == null ? null : imageLocation.toString();
}
@Override
public int compareTo(@NonNull LocationRecord another) {
int dateCompareResult = getDateCompareResult(another);
if (dateCompareResult == 0) {
return Integer.valueOf(another.getId()).compareTo(mId);
} else {
return dateCompareResult;
}
}
@SuppressWarnings("RedundantIfStatement")
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LocationRecord that = (LocationRecord) o;
if (mId != that.mId) return false;
if (Double.compare(that.mLatitude, mLatitude) != 0) return false;
if (Double.compare(that.mLongitude, mLongitude) != 0) return false;
if (!mMessage.equals(that.mMessage)) return false;
if (mAddress != null ? !mAddress.equals(that.mAddress) : that.mAddress != null)
return false;
if (mCountry != null ? !mCountry.equals(that.mCountry) : that.mCountry != null)
return false;
if (mCountryCode != null ? !mCountryCode.equals(that.mCountryCode) : that.mCountryCode != null)
return false;
if (mServerReceiptDate != null ? !mServerReceiptDate.equals(that.mServerReceiptDate) : that.mServerReceiptDate != null)
return false;
return true;
}
@Override
public int hashCode() {
int result;
long temp;
result = mId;
temp = Double.doubleToLongBits(mLatitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(mLongitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + mMessage.hashCode();
result = 31 * result + (mAddress != null ? mAddress.hashCode() : 0);
result = 31 * result + (mCountry != null ? mCountry.hashCode() : 0);
result = 31 * result + (mCountryCode != null ? mCountryCode.hashCode() : 0);
result = 31 * result + (mServerReceiptDate != null ? mServerReceiptDate.hashCode() : 0);
return result;
}
private int getDateCompareResult(@NonNull LocationRecord another) {
int dateCompareResult;
if (mServerReceiptDate == null && another.getServerReceiptDate() == null) {
dateCompareResult = 0;
} else if (mServerReceiptDate == null) {
dateCompareResult = -1;
} else if (another.getServerReceiptDate() == null) {
dateCompareResult = 1;
} else {
dateCompareResult = another.getServerReceiptDate().compareTo(mServerReceiptDate);
}
return dateCompareResult;
}
public int getId() {
return mId;
}
public double getLatitude() {
return mLatitude;
}
public double getLongitude() {
return mLongitude;
}
public String getMessage() {
return mMessage;
}
@Nullable
public String getAddress() {
return mAddress;
}
public String getCountry() {
return mCountry;
}
public String getCountryCode() {
return mCountryCode;
}
public Date getServerReceiptDate() {
return mServerReceiptDate;
}
public Uri getImageUri() {
return mImageLocation == null ? null : Uri.parse(mImageLocation);
}
@Nullable
public String getImageLocationString() {
return mImageLocation;
}
public void setId(int id) {
mId = id;
}
public void setLatitude(double latitude) {
mLatitude = latitude;
}
public void setLongitude(double longitude) {
mLongitude = longitude;
}
public void setMessage(String message) {
mMessage = message;
}
public void setAddress(String address) {
mAddress = address;
}
public void setCountry(String country) {
mCountry = country;
}
public void setCountryCode(String countryCode) {
mCountryCode = countryCode;
}
public void setServerReceiptDate(Date serverReceiptDate) {
mServerReceiptDate = serverReceiptDate;
}
public void setImageLocationString(@Nullable String imageLocationString) {
mImageLocation = imageLocationString;
}
/* Parcel */
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mId);
dest.writeDouble(mLatitude);
dest.writeDouble(mLongitude);
dest.writeString(mMessage);
dest.writeString(mAddress);
dest.writeString(mCountry);
dest.writeString(mCountryCode);
dest.writeLong(mServerReceiptDate != null ? mServerReceiptDate.getTime() : -1L);
dest.writeString(mImageLocation);
}
protected LocationRecord(Parcel in) {
this.mId = in.readInt();
this.mLatitude = in.readDouble();
this.mLongitude = in.readDouble();
this.mMessage = in.readString();
this.mAddress = in.readString();
this.mCountry = in.readString();
this.mCountryCode = in.readString();
long tmpServerReceiptDate = in.readLong();
this.mServerReceiptDate = tmpServerReceiptDate == -1 ? null : new Date(tmpServerReceiptDate);
this.mImageLocation = in.readString();
}
public static final Creator<LocationRecord> CREATOR = new Creator<LocationRecord>() {
@Override
public LocationRecord createFromParcel(Parcel source) {
return new LocationRecord(source);
}
@Override
public LocationRecord[] newArray(int size) {
return new LocationRecord[size];
}
};
}
|
package org.wso2.carbon.apimgt.rest.api.publisher.impl;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.apimgt.core.api.APIPublisher;
import org.wso2.carbon.apimgt.core.exception.APIManagementException;
import org.wso2.carbon.apimgt.core.exception.ExceptionCodes;
import org.wso2.carbon.apimgt.core.impl.APIPublisherImpl;
import org.wso2.carbon.apimgt.core.models.DocumentContent;
import org.wso2.carbon.apimgt.core.models.DocumentInfo;
import org.wso2.carbon.apimgt.rest.api.publisher.common.SampleTestObjectCreator;
import org.wso2.carbon.apimgt.rest.api.publisher.utils.RestAPIPublisherUtil;
import org.wso2.carbon.messaging.CarbonMessage;
import org.wso2.msf4j.Request;
import javax.ws.rs.core.Response;
import java.net.URISyntaxException;
import java.util.UUID;
import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(PowerMockRunner.class)
@PrepareForTest(RestAPIPublisherUtil.class)
public class ApisApiServiceImplTestCase {
private static final Logger log = LoggerFactory.getLogger(ApisApiServiceImplTestCase.class);
private static final String USER = "admin";
@Test
public void testDeleteApi () throws Exception {
printTestMethodName();
ApisApiServiceImpl apisApiService = new ApisApiServiceImpl();
APIPublisher apiPublisher = Mockito.mock(APIPublisherImpl.class);
PowerMockito.mockStatic(RestAPIPublisherUtil.class);
PowerMockito.when(RestAPIPublisherUtil.getApiPublisher(USER)).
thenReturn(apiPublisher);
String api1Id = UUID.randomUUID().toString();
Mockito.doNothing().doThrow(new IllegalArgumentException()).when(apiPublisher).deleteAPI(api1Id);
Response response = apisApiService.apisApiIdDelete(api1Id, null, null, getRequest());
assertEquals(response.getStatus(), 200);
}
@Test
public void testDeleteApiErrorCase () throws Exception {
printTestMethodName();
ApisApiServiceImpl apisApiService = new ApisApiServiceImpl();
APIPublisher apiPublisher = Mockito.mock(APIPublisherImpl.class);
PowerMockito.mockStatic(RestAPIPublisherUtil.class);
PowerMockito.when(RestAPIPublisherUtil.getApiPublisher(USER)).
thenReturn(apiPublisher);
String api1Id = UUID.randomUUID().toString();
Mockito.doThrow(new APIManagementException("Error Occurred", ExceptionCodes.API_NOT_FOUND))
.when(apiPublisher).deleteAPI(api1Id);
Response response = apisApiService.apisApiIdDelete(api1Id, null, null, getRequest());
assertEquals(response.getStatus(), 404);
assertTrue(response.getEntity().toString().contains("API not found"));
}
@Test
public void testApisApiIdDocumentsDocumentIdContentGetInline () throws Exception {
String inlineContent = "INLINE CONTENT";
printTestMethodName();
ApisApiServiceImpl apisApiService = new ApisApiServiceImpl();
APIPublisher apiPublisher = Mockito.mock(APIPublisherImpl.class);
PowerMockito.mockStatic(RestAPIPublisherUtil.class);
PowerMockito.when(RestAPIPublisherUtil.getApiPublisher(USER)).
thenReturn(apiPublisher);
String api1Id = UUID.randomUUID().toString();
String documentId = UUID.randomUUID().toString();
DocumentInfo documentInfo = SampleTestObjectCreator.createDefaultDocumentationInfo().build();
DocumentContent documentContent = DocumentContent.newDocumentContent().inlineContent(inlineContent)
.documentInfo(documentInfo).build();
Mockito.doReturn(documentContent).doThrow(new IllegalArgumentException()).when(apiPublisher).
getDocumentationContent(documentId);
Response response = apisApiService.
apisApiIdDocumentsDocumentIdContentGet(api1Id, documentId, null, null, null, getRequest());
assertEquals(response.getStatus(), 200);
assertEquals(inlineContent, response.getEntity().toString());
}
@Test
public void testApisApiIdDocumentsDocumentIdContentGetFile() throws Exception {
String fileName = "mytext.txt";
printTestMethodName();
ApisApiServiceImpl apisApiService = new ApisApiServiceImpl();
APIPublisher apiPublisher = Mockito.mock(APIPublisherImpl.class);
PowerMockito.mockStatic(RestAPIPublisherUtil.class);
PowerMockito.when(RestAPIPublisherUtil.getApiPublisher(USER)).
thenReturn(apiPublisher);
String api1Id = UUID.randomUUID().toString();
String documentId = UUID.randomUUID().toString();
DocumentInfo documentInfo = SampleTestObjectCreator.createDefaultDocumentationInfo()
.sourceType(DocumentInfo.SourceType.FILE).fileName(fileName).build();
DocumentContent documentContent = DocumentContent.newDocumentContent().documentInfo(documentInfo).build();
Mockito.doReturn(documentContent).doThrow(new IllegalArgumentException()).when(apiPublisher).
getDocumentationContent(documentId);
Response response = apisApiService.
apisApiIdDocumentsDocumentIdContentGet(api1Id, documentId, null, null, null, getRequest());
assertEquals(response.getStatus(), 200);
assertTrue(response.getStringHeaders().get("Content-Disposition").toString().contains(fileName));
}
@Test
public void testApisApiIdDocumentsDocumentIdContentGetErrorAPIManagementException() throws Exception {
String fileName = "mytext.txt";
printTestMethodName();
ApisApiServiceImpl apisApiService = new ApisApiServiceImpl();
APIPublisher apiPublisher = Mockito.mock(APIPublisherImpl.class);
PowerMockito.mockStatic(RestAPIPublisherUtil.class);
PowerMockito.when(RestAPIPublisherUtil.getApiPublisher(USER)).
thenReturn(apiPublisher);
String api1Id = UUID.randomUUID().toString();
String documentId = UUID.randomUUID().toString();
Mockito.doThrow(new APIManagementException("Error Occurred",
ExceptionCodes.DOCUMENT_CONTENT_NOT_FOUND)).when(apiPublisher).
getDocumentationContent(documentId);
Response response = apisApiService.
apisApiIdDocumentsDocumentIdContentGet(api1Id, documentId, null, null, null, getRequest());
assertEquals(response.getStatus(), 404);
assertTrue(response.getEntity().toString().contains("Document content not found"));
}
// Sample request to be used by tests
private Request getRequest() throws Exception {
CarbonMessage carbonMessage = Mockito.mock(CarbonMessage.class);
Request request = new Request(carbonMessage);
PowerMockito.whenNew(Request.class).withArguments(carbonMessage).thenReturn(request);
return request;
}
private static void printTestMethodName () {
log.info("
"
}
}
|
package org.opendaylight.controller.forwardingrulesmanager.internal;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import org.eclipse.osgi.framework.console.CommandInterpreter;
import org.eclipse.osgi.framework.console.CommandProvider;
import org.opendaylight.controller.clustering.services.CacheConfigException;
import org.opendaylight.controller.clustering.services.CacheExistException;
import org.opendaylight.controller.clustering.services.ICacheUpdateAware;
import org.opendaylight.controller.clustering.services.IClusterContainerServices;
import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.configuration.IConfigurationContainerAware;
import org.opendaylight.controller.connectionmanager.IConnectionManager;
import org.opendaylight.controller.forwardingrulesmanager.FlowConfig;
import org.opendaylight.controller.forwardingrulesmanager.FlowEntry;
import org.opendaylight.controller.forwardingrulesmanager.FlowEntryInstall;
import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManager;
import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManagerAware;
import org.opendaylight.controller.forwardingrulesmanager.PortGroup;
import org.opendaylight.controller.forwardingrulesmanager.PortGroupChangeListener;
import org.opendaylight.controller.forwardingrulesmanager.PortGroupConfig;
import org.opendaylight.controller.forwardingrulesmanager.PortGroupProvider;
import org.opendaylight.controller.forwardingrulesmanager.implementation.data.FlowEntryDistributionOrder;
import org.opendaylight.controller.sal.action.Action;
import org.opendaylight.controller.sal.action.ActionType;
import org.opendaylight.controller.sal.action.Controller;
import org.opendaylight.controller.sal.action.Flood;
import org.opendaylight.controller.sal.action.Output;
import org.opendaylight.controller.sal.action.PopVlan;
import org.opendaylight.controller.sal.connection.ConnectionLocality;
import org.opendaylight.controller.sal.core.Config;
import org.opendaylight.controller.sal.core.ContainerFlow;
import org.opendaylight.controller.sal.core.IContainer;
import org.opendaylight.controller.sal.core.IContainerLocalListener;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.core.Property;
import org.opendaylight.controller.sal.core.UpdateType;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService;
import org.opendaylight.controller.sal.match.Match;
import org.opendaylight.controller.sal.match.MatchType;
import org.opendaylight.controller.sal.utils.EtherTypes;
import org.opendaylight.controller.sal.utils.GlobalConstants;
import org.opendaylight.controller.sal.utils.HexEncode;
import org.opendaylight.controller.sal.utils.IObjectReader;
import org.opendaylight.controller.sal.utils.IPProtocols;
import org.opendaylight.controller.sal.utils.NodeConnectorCreator;
import org.opendaylight.controller.sal.utils.NodeCreator;
import org.opendaylight.controller.sal.utils.ObjectReader;
import org.opendaylight.controller.sal.utils.ObjectWriter;
import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.opendaylight.controller.switchmanager.IInventoryListener;
import org.opendaylight.controller.switchmanager.ISwitchManager;
import org.opendaylight.controller.switchmanager.ISwitchManagerAware;
import org.opendaylight.controller.switchmanager.Subnet;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class that manages forwarding rule installation and removal per container of
* the network. It also maintains the central repository of all the forwarding
* rules installed on the network nodes.
*/
public class ForwardingRulesManager implements
IForwardingRulesManager,
PortGroupChangeListener,
IContainerLocalListener,
ISwitchManagerAware,
IConfigurationContainerAware,
IInventoryListener,
IObjectReader,
ICacheUpdateAware<Object,Object>,
CommandProvider,
IFlowProgrammerListener {
private static final String NODEDOWN = "Node is Down";
private static final String SUCCESS = StatusCode.SUCCESS.toString();
private static final Logger log = LoggerFactory.getLogger(ForwardingRulesManager.class);
private static final String PORTREMOVED = "Port removed";
private static final Logger logsync = LoggerFactory.getLogger("FRMsync");
private String frmFileName;
private String portGroupFileName;
private ConcurrentMap<Integer, FlowConfig> staticFlows;
private ConcurrentMap<Integer, Integer> staticFlowsOrdinal;
private ConcurrentMap<String, PortGroupConfig> portGroupConfigs;
private ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>> portGroupData;
private ConcurrentMap<String, Object> TSPolicies;
private boolean inContainerMode; // being used by global instance only
protected boolean stopping;
/*
* Flow database. It's the software view of what was requested to install
* and what is installed on the switch. It is indexed by the entry itself.
* The entry's hashcode resumes the network node index, the flow's priority
* and the flow's match. The value element is a class which contains the
* flow entry pushed by the applications modules and the respective
* container flow merged version. In absence of container flows, the two
* flow entries are the same.
*/
private ConcurrentMap<FlowEntry, FlowEntry> originalSwView;
private ConcurrentMap<FlowEntryInstall, FlowEntryInstall> installedSwView;
/*
* Per node and per group indexing
*/
private ConcurrentMap<Node, List<FlowEntryInstall>> nodeFlows;
private ConcurrentMap<String, List<FlowEntryInstall>> groupFlows;
/*
* Inactive flow list. This is for the global instance of FRM It will
* contain all the flow entries which were installed on the global container
* when the first container is created.
*/
private ConcurrentMap<FlowEntry, FlowEntry> inactiveFlows;
private IContainer container;
private Set<IForwardingRulesManagerAware> frmAware =
Collections.synchronizedSet(new HashSet<IForwardingRulesManagerAware>());
private PortGroupProvider portGroupProvider;
private IFlowProgrammerService programmer;
private IClusterContainerServices clusterContainerService = null;
private ISwitchManager switchManager;
private Thread frmEventHandler;
protected BlockingQueue<FRMEvent> pendingEvents;
// Distributes FRM programming in the cluster
private IConnectionManager connectionManager;
/*
* Name clustered caches used to support FRM entry distribution these are by
* necessity non-transactional as long as need to be able to synchronize
* states also while a transaction is in progress
*/
static final String WORKORDERCACHE = "frm.workOrder";
static final String WORKSTATUSCACHE = "frm.workStatus";
/*
* Data structure responsible for distributing the FlowEntryInstall requests
* in the cluster. The key value is entry that is being either Installed or
* Updated or Delete. The value field is the same of the key value in case
* of Installation or Deletion, it's the new entry in case of Modification,
* this because the clustering caches don't allow null values.
*
* The logic behind this data structure is that the controller that initiate
* the request will place the order here, someone will pick it and then will
* remove from this data structure because is being served.
*
* TODO: We need to have a way to cleanup this data structure if entries are
* not picked by anyone, which is always a case can happen especially on
* Node disconnect cases.
*/
protected ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall> workOrder;
/*
* Data structure responsible for retrieving the results of the workOrder
* submitted to the cluster.
*
* The logic behind this data structure is that the controller that has
* executed the order will then place the result in workStatus signaling
* that there was a success or a failure.
*
* TODO: The workStatus entries need to have a lifetime associated in case
* of requestor controller leaving the cluster.
*/
protected ConcurrentMap<FlowEntryDistributionOrder, Status> workStatus;
/*
* Local Map used to hold the Future which a caller can use to monitor for
* completion
*/
private ConcurrentMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask> workMonitor =
new ConcurrentHashMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask>();
/*
* Max pool size for the executor
*/
private static final int maxPoolSize = 10;
/**
* @param e
* Entry being installed/updated/removed
* @param u
* New entry will be placed after the update operation. Valid
* only for UpdateType.CHANGED, null for all the other cases
* @param t
* Type of update
* @return a Future object for monitoring the progress of the result, or
* null in case the processing should take place locally
*/
private FlowEntryDistributionOrderFutureTask distributeWorkOrder(FlowEntryInstall e, FlowEntryInstall u,
UpdateType t) {
// A null entry it's an unexpected condition, anyway it's safe to keep
// the handling local
if (e == null) {
return null;
}
Node n = e.getNode();
if (connectionManager.getLocalityStatus(n) == ConnectionLocality.NOT_LOCAL) {
// Create the work order and distribute it
FlowEntryDistributionOrder fe =
new FlowEntryDistributionOrder(e, t, clusterContainerService.getMyAddress());
// First create the monitor job
FlowEntryDistributionOrderFutureTask ret = new FlowEntryDistributionOrderFutureTask(fe);
logsync.trace("Node {} not local so sending fe {}", n, fe);
workMonitor.put(fe, ret);
if (t.equals(UpdateType.CHANGED)) {
// Then distribute the work
workOrder.put(fe, u);
} else {
// Then distribute the work
workOrder.put(fe, e);
}
logsync.trace("WorkOrder requested");
// Now create an Handle to monitor the execution of the operation
return ret;
}
logsync.trace("Node {} could be local. so processing Entry:{} UpdateType:{}", n, e, t);
return null;
}
/**
* Adds a flow entry onto the network node It runs various validity checks
* and derive the final container flows merged entries that will be
* attempted to be installed
*
* @param flowEntry
* the original flow entry application requested to add
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status addEntry(FlowEntry flowEntry, boolean async) {
// Sanity Check
if (flowEntry == null || flowEntry.getNode() == null) {
String msg = "Invalid FlowEntry";
String logMsg = msg + ": {}";
log.warn(logMsg, flowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
/*
* Derive the container flow merged entries to install In presence of N
* container flows, we may end up with N different entries to install...
*/
List<FlowEntryInstall> toInstallList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows());
// Container Flow conflict Check
if (toInstallList.isEmpty()) {
String msg = "Flow Entry conflicts with all Container Flows";
String logMsg = msg + ": {}";
log.warn(logMsg, flowEntry);
return new Status(StatusCode.CONFLICT, msg);
}
// Derive the list of entries good to be installed
List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>();
for (FlowEntryInstall entry : toInstallList) {
// Conflict Check: Verify new entry would not overwrite existing
// ones
if (this.installedSwView.containsKey(entry)) {
log.warn("Operation Rejected: A flow with same match and priority exists on the target node");
log.trace("Aborting to install {}", entry);
continue;
}
toInstallSafe.add(entry);
}
// Declare failure if all the container flow merged entries clash with
// existing entries
if (toInstallSafe.size() == 0) {
String msg = "A flow with same match and priority exists on the target node";
String logMsg = msg + ": {}";
log.warn(logMsg, flowEntry);
return new Status(StatusCode.CONFLICT, msg);
}
// Try to install an entry at the time
Status error = new Status(null, null);
Status succeded = null;
boolean oneSucceded = false;
for (FlowEntryInstall installEntry : toInstallSafe) {
// Install and update database
Status ret = addEntriesInternal(installEntry, async);
if (ret.isSuccess()) {
oneSucceded = true;
/*
* The first successful status response will be returned For the
* asynchronous call, we can discard the container flow
* complication for now and assume we will always deal with one
* flow only per request
*/
succeded = ret;
} else {
error = ret;
log.warn("Failed to install the entry: {}. The failure is: {}", installEntry, ret.getDescription());
}
}
return (oneSucceded) ? succeded : error;
}
/**
* Given a flow entry and the list of container flows, it returns the list
* of container flow merged flow entries good to be installed on this
* container. If the list of container flows is null or empty, the install
* entry list will contain only one entry, the original flow entry. If the
* flow entry is congruent with all the N container flows, then the output
* install entry list will contain N entries. If the output list is empty,
* it means the passed flow entry conflicts with all the container flows.
*
* @param cFlowList
* The list of container flows
* @return the list of container flow merged entries good to be installed on
* this container
*/
private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request, List<ContainerFlow> cFlowList) {
List<FlowEntryInstall> toInstallList = new ArrayList<FlowEntryInstall>(1);
if (container.getContainerFlows() == null || container.getContainerFlows().isEmpty()) {
// No container flows => entry good to be installed unchanged
toInstallList.add(new FlowEntryInstall(request.clone(), null));
} else {
// Create the list of entries to be installed. If the flow entry is
// not congruent with any container flow, no install entries will be
// created
for (ContainerFlow cFlow : container.getContainerFlows()) {
if (cFlow.allowsFlow(request.getFlow())) {
toInstallList.add(new FlowEntryInstall(request.clone(), cFlow));
}
}
}
return toInstallList;
}
/**
* Modify a flow entry with a new one It runs various validity check and
* derive the final container flows merged flow entries to work with
*
* @param currentFlowEntry
* @param newFlowEntry
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status modifyEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry, boolean async) {
Status retExt;
// Sanity checks
if (currentFlowEntry == null || currentFlowEntry.getNode() == null || newFlowEntry == null
|| newFlowEntry.getNode() == null) {
String msg = "Modify: Invalid FlowEntry";
String logMsg = msg + ": {} or {}";
log.warn(logMsg, currentFlowEntry, newFlowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (!currentFlowEntry.getNode().equals(newFlowEntry.getNode())
|| !currentFlowEntry.getFlowName().equals(newFlowEntry.getFlowName())) {
String msg = "Modify: Incompatible Flow Entries";
String logMsg = msg + ": {} and {}";
log.warn(logMsg, currentFlowEntry, newFlowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Equality Check
if (currentFlowEntry.getFlow().equals(newFlowEntry.getFlow())) {
String msg = "Modify skipped as flows are the same";
String logMsg = msg + ": {} and {}";
log.debug(logMsg, currentFlowEntry, newFlowEntry);
return new Status(StatusCode.SUCCESS, msg);
}
/*
* Conflict Check: Verify the new entry would not conflict with an
* existing one. This is a loose check on the previous original flow
* entry requests. No check on the container flow merged flow entries
* (if any) yet
*/
FlowEntry sameMatchOriginalEntry = originalSwView.get(newFlowEntry);
if (sameMatchOriginalEntry != null && !sameMatchOriginalEntry.equals(currentFlowEntry)) {
String msg = "Operation Rejected: Another flow with same match and priority exists on the target node";
String logMsg = msg + ": {}";
log.warn(logMsg, currentFlowEntry);
return new Status(StatusCode.CONFLICT, msg);
}
// Derive the installed and toInstall entries
List<FlowEntryInstall> installedList = deriveInstallEntries(currentFlowEntry.clone(),
container.getContainerFlows());
List<FlowEntryInstall> toInstallList = deriveInstallEntries(newFlowEntry.clone(), container.getContainerFlows());
if (toInstallList.isEmpty()) {
String msg = "Modify Operation Rejected: The new entry conflicts with all the container flows";
String logMsg = msg + ": {}";
log.warn(logMsg, newFlowEntry);
log.warn(msg);
return new Status(StatusCode.CONFLICT, msg);
}
/*
* If the two list sizes differ, it means the new flow entry does not
* satisfy the same number of container flows the current entry does.
* This is only possible when the new entry and current entry have
* different match. In this scenario the modification would ultimately
* be handled as a remove and add operations in the protocol plugin.
*
* Also, if any of the new flow entries would clash with an existing
* one, we cannot proceed with the modify operation, because it would
* fail for some entries and leave stale entries on the network node.
* Modify path can be taken only if it can be performed completely, for
* all entries.
*
* So, for the above two cases, to simplify, let's decouple the modify
* in: 1) remove current entries 2) install new entries
*/
Status succeeded = null;
boolean decouple = false;
if (installedList.size() != toInstallList.size()) {
log.info("Modify: New flow entry does not satisfy the same "
+ "number of container flows as the original entry does");
decouple = true;
}
List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>();
for (FlowEntryInstall installEntry : toInstallList) {
/*
* Conflict Check: Verify the new entry would not overwrite another
* existing one
*/
FlowEntryInstall sameMatchEntry = installedSwView.get(installEntry);
if (sameMatchEntry != null && !sameMatchEntry.getOriginal().equals(currentFlowEntry)) {
log.info("Modify: new container flow merged flow entry clashes with existing flow");
decouple = true;
} else {
toInstallSafe.add(installEntry);
}
}
if (decouple) {
// Remove current entries
for (FlowEntryInstall currEntry : installedList) {
this.removeEntryInternal(currEntry, async);
}
// Install new entries
for (FlowEntryInstall newEntry : toInstallSafe) {
succeeded = this.addEntriesInternal(newEntry, async);
}
} else {
/*
* The two list have the same size and the entries to install do not
* clash with any existing flow on the network node. We assume here
* (and might be wrong) that the same container flows that were
* satisfied by the current entries are the same that are satisfied
* by the new entries. Let's take the risk for now.
*
* Note: modification has to be complete. If any entry modification
* fails, we need to stop, restore the already modified entries, and
* declare failure.
*/
Status retModify = null;
int i = 0;
int size = toInstallList.size();
while (i < size) {
// Modify and update database
retModify = modifyEntryInternal(installedList.get(i), toInstallList.get(i), async);
if (retModify.isSuccess()) {
i++;
} else {
break;
}
}
// Check if uncompleted modify
if (i < size) {
log.warn("Unable to perform a complete modify for all the container flows merged entries");
// Restore original entries
int j = 0;
while (j < i) {
log.info("Attempting to restore initial entries");
retExt = modifyEntryInternal(toInstallList.get(i), installedList.get(i), async);
if (retExt.isSuccess()) {
j++;
} else {
break;
}
}
// Fatal error, recovery failed
if (j < i) {
String msg = "Flow recovery failed ! Unrecoverable Error";
log.error(msg);
return new Status(StatusCode.INTERNALERROR, msg);
}
}
succeeded = retModify;
}
/*
* The first successful status response will be returned. For the
* asynchronous call, we can discard the container flow complication for
* now and assume we will always deal with one flow only per request
*/
return succeeded;
}
/**
* This is the function that modifies the final container flows merged
* entries on the network node and update the database. It expects that all
* the validity checks are passed
*
* @param currentEntries
* @param newEntries
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status modifyEntryInternal(FlowEntryInstall currentEntries, FlowEntryInstall newEntries, boolean async) {
FlowEntryDistributionOrderFutureTask futureStatus =
distributeWorkOrder(currentEntries, newEntries, UpdateType.CHANGED);
if (futureStatus != null) {
Status retStatus = new Status(StatusCode.UNDEFINED);
try {
retStatus = futureStatus.get();
if (retStatus.getCode()
.equals(StatusCode.TIMEOUT)) {
// A timeout happened, lets cleanup the workMonitor
workMonitor.remove(futureStatus.getOrder());
}
} catch (InterruptedException e) {
log.error("", e);
} catch (ExecutionException e) {
log.error("", e);
}
return retStatus;
} else {
// Modify the flow on the network node
Status status = async ? programmer.modifyFlowAsync(currentEntries.getNode(), currentEntries.getInstall()
.getFlow(), newEntries.getInstall()
.getFlow()) : programmer.modifyFlow(currentEntries.getNode(), currentEntries.getInstall()
.getFlow(), newEntries.getInstall()
.getFlow());
if (!status.isSuccess()) {
log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", newEntries.getInstall(),
status.getDescription());
return status;
}
log.trace("Modified {} => {}", currentEntries.getInstall(), newEntries.getInstall());
// Update DB
newEntries.setRequestId(status.getRequestId());
updateLocalDatabase(currentEntries, false);
updateLocalDatabase(newEntries, true);
return status;
}
}
/**
* Remove a flow entry. If the entry is not present in the software view
* (entry or node not present), it return successfully
*
* @param flowEntry
* the flow entry to remove
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status removeEntry(FlowEntry flowEntry, boolean async) {
Status error = new Status(null, null);
// Sanity Check
if (flowEntry == null || flowEntry.getNode() == null) {
String msg = "Invalid FlowEntry";
String logMsg = msg + ": {}";
log.warn(logMsg, flowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Derive the container flows merged installed entries
List<FlowEntryInstall> installedList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows());
Status succeeded = null;
boolean atLeastOneRemoved = false;
for (FlowEntryInstall entry : installedList) {
if (!installedSwView.containsKey(entry)) {
String logMsg = "Removal skipped (not present in software view) for flow entry: {}";
log.debug(logMsg, flowEntry);
if (installedList.size() == 1) {
// If we had only one entry to remove, we are done
return new Status(StatusCode.SUCCESS);
} else {
continue;
}
}
// Remove and update DB
Status ret = removeEntryInternal(entry, async);
if (!ret.isSuccess()) {
error = ret;
log.warn("Failed to remove the entry: {}. The failure is: {}", entry.getInstall(), ret.getDescription());
if (installedList.size() == 1) {
// If we had only one entry to remove, this is fatal failure
return error;
}
} else {
succeeded = ret;
atLeastOneRemoved = true;
}
}
/*
* No worries if full removal failed. Consistency checker will take care
* of removing the stale entries later, or adjusting the software
* database if not in sync with hardware
*/
return (atLeastOneRemoved) ? succeeded : error;
}
/**
* This is the function that removes the final container flows merged entry
* from the network node and update the database. It expects that all the
* validity checks are passed
*
* @param entry
* the flow entry to remove
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status removeEntryInternal(FlowEntryInstall entry, boolean async) {
FlowEntryDistributionOrderFutureTask futureStatus = distributeWorkOrder(entry, null, UpdateType.REMOVED);
if (futureStatus != null) {
Status retStatus = new Status(StatusCode.UNDEFINED);
try {
retStatus = futureStatus.get();
if (retStatus.getCode()
.equals(StatusCode.TIMEOUT)) {
// A timeout happened, lets cleanup the workMonitor
workMonitor.remove(futureStatus.getOrder());
}
} catch (InterruptedException e) {
log.error("", e);
} catch (ExecutionException e) {
log.error("", e);
}
return retStatus;
} else {
// Mark the entry to be deleted (for CC just in case we fail)
entry.toBeDeleted();
// Remove from node
Status status = async ? programmer.removeFlowAsync(entry.getNode(), entry.getInstall()
.getFlow()) : programmer.removeFlow(entry.getNode(), entry.getInstall()
.getFlow());
if (!status.isSuccess()) {
log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(),
status.getDescription());
return status;
}
log.trace("Removed {}", entry.getInstall());
// Update DB
updateLocalDatabase(entry, false);
return status;
}
}
/**
* This is the function that installs the final container flow merged entry
* on the network node and updates the database. It expects that all the
* validity and conflict checks are passed. That means it does not check
* whether this flow would conflict or overwrite an existing one.
*
* @param entry
* the flow entry to install
* @param async
* the flag indicating if this is a asynchronous request
* @return the status of this request. In case of asynchronous call, it will
* contain the unique id assigned to this request
*/
private Status addEntriesInternal(FlowEntryInstall entry, boolean async) {
FlowEntryDistributionOrderFutureTask futureStatus = distributeWorkOrder(entry, null, UpdateType.ADDED);
if (futureStatus != null) {
Status retStatus = new Status(StatusCode.UNDEFINED);
try {
retStatus = futureStatus.get();
if (retStatus.getCode()
.equals(StatusCode.TIMEOUT)) {
// A timeout happened, lets cleanup the workMonitor
workMonitor.remove(futureStatus.getOrder());
}
} catch (InterruptedException e) {
log.error("", e);
} catch (ExecutionException e) {
log.error("", e);
}
return retStatus;
} else {
// Install the flow on the network node
Status status = async ? programmer.addFlowAsync(entry.getNode(), entry.getInstall()
.getFlow()) : programmer.addFlow(entry.getNode(), entry.getInstall()
.getFlow());
if (!status.isSuccess()) {
log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(),
status.getDescription());
return status;
}
log.trace("Added {}", entry.getInstall());
// Update DB
entry.setRequestId(status.getRequestId());
updateLocalDatabase(entry, true);
return status;
}
}
/**
* Returns true if the flow conflicts with all the container's flows. This
* means that if the function returns true, the passed flow entry is
* congruent with at least one container flow, hence it is good to be
* installed on this container.
*
* @param flowEntry
* @return true if flow conflicts with all the container flows, false
* otherwise
*/
private boolean entryConflictsWithContainerFlows(FlowEntry flowEntry) {
List<ContainerFlow> cFlowList = container.getContainerFlows();
// Validity check and avoid unnecessary computation
// Also takes care of default container where no container flows are
// present
if (cFlowList == null || cFlowList.isEmpty()) {
return false;
}
for (ContainerFlow cFlow : cFlowList) {
if (cFlow.allowsFlow(flowEntry.getFlow())) {
// Entry is allowed by at least one container flow: good to go
return false;
}
}
return true;
}
private ConcurrentMap.Entry<Integer, FlowConfig> getStaticFlowEntry(String name, Node node) {
for (ConcurrentMap.Entry<Integer, FlowConfig> flowEntry : staticFlows.entrySet()) {
FlowConfig flowConfig = flowEntry.getValue();
if (flowConfig.isByNameAndNodeIdEqual(name, node)) {
return flowEntry;
}
}
return null;
}
private void updateLocalDatabase(FlowEntryInstall entry, boolean add) {
// Update the software view
updateSwViewes(entry, add);
// Update node indexed flow database
updateNodeFlowsDB(entry, add);
// Update group indexed flow database
updateGroupFlowsDB(entry, add);
}
/*
* Update the node mapped flows database
*/
private void updateSwViewes(FlowEntryInstall flowEntries, boolean add) {
if (add) {
originalSwView.put(flowEntries.getOriginal(), flowEntries.getOriginal());
installedSwView.put(flowEntries, flowEntries);
} else {
originalSwView.remove(flowEntries.getOriginal());
installedSwView.remove(flowEntries);
}
}
/*
* Update the node mapped flows database
*/
private void updateNodeFlowsDB(FlowEntryInstall flowEntries, boolean add) {
Node node = flowEntries.getNode();
List<FlowEntryInstall> nodeIndeces = this.nodeFlows.get(node);
if (nodeIndeces == null) {
if (!add) {
return;
} else {
nodeIndeces = new ArrayList<FlowEntryInstall>();
}
}
if (add) {
nodeIndeces.add(flowEntries);
} else {
nodeIndeces.remove(flowEntries);
}
// Update cache across cluster
if (nodeIndeces.isEmpty()) {
this.nodeFlows.remove(node);
} else {
this.nodeFlows.put(node, nodeIndeces);
}
}
/*
* Update the group name mapped flows database
*/
private void updateGroupFlowsDB(FlowEntryInstall flowEntries, boolean add) {
String groupName = flowEntries.getGroupName();
// Flow may not be part of a group
if (groupName == null) {
return;
}
List<FlowEntryInstall> indices = this.groupFlows.get(groupName);
if (indices == null) {
if (!add) {
return;
} else {
indices = new ArrayList<FlowEntryInstall>();
}
}
if (add) {
indices.add(flowEntries);
} else {
indices.remove(flowEntries);
}
// Update cache across cluster
if (indices.isEmpty()) {
this.groupFlows.remove(groupName);
} else {
this.groupFlows.put(groupName, indices);
}
}
/**
* Remove a flow entry that has been added previously First checks if the
* entry is effectively present in the local database
*/
@SuppressWarnings("unused")
private Status removeEntry(Node node, String flowName) {
FlowEntryInstall target = null;
// Find in database
for (FlowEntryInstall entry : installedSwView.values()) {
if (entry.equalsByNodeAndName(node, flowName)) {
target = entry;
break;
}
}
// If it is not there, stop any further processing
if (target == null) {
return new Status(StatusCode.SUCCESS, "Entry is not present");
}
// Remove from node
Status status = programmer.removeFlow(target.getNode(), target.getInstall().getFlow());
// Update DB
if (status.isSuccess()) {
updateLocalDatabase(target, false);
} else {
// log the error
log.warn("SDN Plugin failed to remove the flow: {}. The failure is: {}", target.getInstall(),
status.getDescription());
}
return status;
}
@Override
public Status installFlowEntry(FlowEntry flowEntry) {
Status status;
if (isContainerModeAllowed(flowEntry)) {
status = addEntry(flowEntry, false);
} else {
String msg = "Controller in container mode: Install Refused";
String logMsg = msg + ": {}";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(logMsg, flowEntry);
}
return status;
}
@Override
public Status installFlowEntryAsync(FlowEntry flowEntry) {
Status status;
if (isContainerModeAllowed(flowEntry)) {
status = addEntry(flowEntry, true);
} else {
String msg = "Controller in container mode: Install Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(msg);
}
return status;
}
@Override
public Status uninstallFlowEntry(FlowEntry flowEntry) {
Status status;
if (isContainerModeAllowed(flowEntry)) {
status = removeEntry(flowEntry, false);
} else {
String msg = "Controller in container mode: Uninstall Refused";
String logMsg = msg + ": {}";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(logMsg, flowEntry);
}
return status;
}
@Override
public Status uninstallFlowEntryAsync(FlowEntry flowEntry) {
Status status;
if (isContainerModeAllowed(flowEntry)) {
status = removeEntry(flowEntry, true);
} else {
String msg = "Controller in container mode: Uninstall Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(msg);
}
return status;
}
@Override
public Status modifyFlowEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) {
Status status = null;
if (isContainerModeAllowed(currentFlowEntry)) {
status = modifyEntry(currentFlowEntry, newFlowEntry, false);
} else {
String msg = "Controller in container mode: Modify Refused";
String logMsg = msg + ": {}";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(logMsg, newFlowEntry);
}
return status;
}
@Override
public Status modifyFlowEntryAsync(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) {
Status status = null;
if (isContainerModeAllowed(currentFlowEntry)) {
status = modifyEntry(currentFlowEntry, newFlowEntry, true);
} else {
String msg = "Controller in container mode: Modify Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(msg);
}
return status;
}
/**
* Returns whether the specified flow entry is allowed to be
* installed/removed/modified based on the current container mode status.
* This call always returns true in the container instance of forwarding
* rules manager. It is meant for the global instance only (default
* container) of forwarding rules manager. Idea is that for assuring
* container isolation of traffic, flow installation in default container is
* blocked when in container mode (containers are present). The only flows
* that are allowed in container mode in the default container are the
* proactive flows, the ones automatically installed on the network node
* which forwarding mode has been configured to "proactive". These flows are
* needed by controller to discover the nodes topology and to discover the
* attached hosts for some SDN switches.
*
* @param flowEntry
* The flow entry to be installed/removed/modified
* @return true if not in container mode or if flowEntry is internally
* generated
*/
private boolean isContainerModeAllowed(FlowEntry flowEntry) {
return (!inContainerMode) ? true : flowEntry.isInternal();
}
@Override
public Status modifyOrAddFlowEntry(FlowEntry newFlowEntry) {
/*
* Run a check on the original entries to decide whether to go with a
* add or modify method. A loose check means only check against the
* original flow entry requests and not against the installed flow
* entries which are the result of the original entry merged with the
* container flow(s) (if any). The modifyFlowEntry method in presence of
* conflicts with the Container flows (if any) would revert back to a
* delete + add pattern
*/
FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry);
if (currentFlowEntry != null) {
return modifyFlowEntry(currentFlowEntry, newFlowEntry);
} else {
return installFlowEntry(newFlowEntry);
}
}
@Override
public Status modifyOrAddFlowEntryAsync(FlowEntry newFlowEntry) {
/*
* Run a check on the original entries to decide whether to go with a
* add or modify method. A loose check means only check against the
* original flow entry requests and not against the installed flow
* entries which are the result of the original entry merged with the
* container flow(s) (if any). The modifyFlowEntry method in presence of
* conflicts with the Container flows (if any) would revert back to a
* delete + add pattern
*/
FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry);
if (currentFlowEntry != null) {
return modifyFlowEntryAsync(currentFlowEntry, newFlowEntry);
} else {
return installFlowEntryAsync(newFlowEntry);
}
}
@Override
public Status uninstallFlowEntryGroup(String groupName) {
if (groupName == null || groupName.isEmpty()) {
return new Status(StatusCode.BADREQUEST, "Invalid group name");
}
if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) {
return new Status(StatusCode.BADREQUEST, "Internal static flows group cannot be deleted through this api");
}
if (inContainerMode) {
String msg = "Controller in container mode: Group Uninstall Refused";
String logMsg = msg + ": {}";
log.warn(logMsg, groupName);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
int toBeRemoved = 0;
String error = "";
if (groupFlows.containsKey(groupName)) {
List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName));
toBeRemoved = list.size();
for (FlowEntryInstall entry : list) {
Status status = this.removeEntry(entry.getOriginal(), false);
if (status.isSuccess()) {
toBeRemoved -= 1;
} else {
error = status.getDescription();
}
}
}
return (toBeRemoved == 0) ? new Status(StatusCode.SUCCESS) : new Status(StatusCode.INTERNALERROR,
"Not all the flows were removed: " + error);
}
@Override
public Status uninstallFlowEntryGroupAsync(String groupName) {
if (groupName == null || groupName.isEmpty()) {
return new Status(StatusCode.BADREQUEST, "Invalid group name");
}
if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) {
return new Status(StatusCode.BADREQUEST, "Static flows group cannot be deleted through this api");
}
if (inContainerMode) {
String msg = "Controller in container mode: Group Uninstall Refused";
String logMsg = msg + ": {}";
log.warn(logMsg, groupName);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (groupFlows.containsKey(groupName)) {
List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName));
for (FlowEntryInstall entry : list) {
this.removeEntry(entry.getOriginal(), true);
}
}
return new Status(StatusCode.SUCCESS);
}
@Override
public boolean checkFlowEntryConflict(FlowEntry flowEntry) {
return entryConflictsWithContainerFlows(flowEntry);
}
/**
* Updates all installed flows because the container flow got updated This
* is obtained in two phases on per node basis: 1) Uninstall of all flows 2)
* Reinstall of all flows This is needed because a new container flows
* merged flow may conflict with an existing old container flows merged flow
* on the network node
*/
protected void updateFlowsContainerFlow() {
Set<FlowEntry> toReInstall = new HashSet<FlowEntry>();
// First remove all installed entries
for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> entry : installedSwView.entrySet()) {
FlowEntryInstall current = entry.getValue();
// Store the original entry
toReInstall.add(current.getOriginal());
// Remove the old couples. No validity checks to be run, use the
// internal remove
this.removeEntryInternal(current, false);
}
// Then reinstall the original entries
for (FlowEntry entry : toReInstall) {
// Reinstall the original flow entries, via the regular path: new
// cFlow merge + validations
this.installFlowEntry(entry);
}
}
private void nonClusterObjectCreate() {
originalSwView = new ConcurrentHashMap<FlowEntry, FlowEntry>();
installedSwView = new ConcurrentHashMap<FlowEntryInstall, FlowEntryInstall>();
nodeFlows = new ConcurrentHashMap<Node, List<FlowEntryInstall>>();
groupFlows = new ConcurrentHashMap<String, List<FlowEntryInstall>>();
TSPolicies = new ConcurrentHashMap<String, Object>();
staticFlowsOrdinal = new ConcurrentHashMap<Integer, Integer>();
portGroupConfigs = new ConcurrentHashMap<String, PortGroupConfig>();
portGroupData = new ConcurrentHashMap<PortGroupConfig, Map<Node, PortGroup>>();
staticFlows = new ConcurrentHashMap<Integer, FlowConfig>();
inactiveFlows = new ConcurrentHashMap<FlowEntry, FlowEntry>();
}
private void registerWithOSGIConsole() {
BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass()).getBundleContext();
bundleContext.registerService(CommandProvider.class.getName(), this, null);
}
@Override
public void setTSPolicyData(String policyname, Object o, boolean add) {
if (add) {
/* Check if this policy already exists */
if (!(TSPolicies.containsKey(policyname))) {
TSPolicies.put(policyname, o);
}
} else {
TSPolicies.remove(policyname);
}
if (frmAware != null) {
synchronized (frmAware) {
for (IForwardingRulesManagerAware frma : frmAware) {
try {
frma.policyUpdate(policyname, add);
} catch (Exception e) {
log.warn("Exception on callback", e);
}
}
}
}
}
@Override
public Map<String, Object> getTSPolicyData() {
return TSPolicies;
}
@Override
public Object getTSPolicyData(String policyName) {
if (TSPolicies.containsKey(policyName)) {
return TSPolicies.get(policyName);
} else {
return null;
}
}
@Override
public List<FlowEntry> getFlowEntriesForGroup(String policyName) {
List<FlowEntry> list = new ArrayList<FlowEntry>();
if (policyName != null && !policyName.trim().isEmpty()) {
for (Map.Entry<FlowEntry, FlowEntry> entry : this.originalSwView.entrySet()) {
if (policyName.equals(entry.getKey().getGroupName())) {
list.add(entry.getKey().clone());
}
}
}
return list;
}
@Override
public List<FlowEntry> getInstalledFlowEntriesForGroup(String policyName) {
List<FlowEntry> list = new ArrayList<FlowEntry>();
if (policyName != null && !policyName.trim().isEmpty()) {
for (Map.Entry<FlowEntryInstall, FlowEntryInstall> entry : this.installedSwView.entrySet()) {
if (policyName.equals(entry.getKey().getGroupName())) {
list.add(entry.getKey().getInstall().clone());
}
}
}
return list;
}
@Override
public void addOutputPort(Node node, String flowName, List<NodeConnector> portList) {
for (FlowEntryInstall flow : this.nodeFlows.get(node)) {
if (flow.getFlowName().equals(flowName)) {
FlowEntry currentFlowEntry = flow.getOriginal();
FlowEntry newFlowEntry = currentFlowEntry.clone();
for (NodeConnector dstPort : portList) {
newFlowEntry.getFlow().addAction(new Output(dstPort));
}
Status error = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (error.isSuccess()) {
log.info("Ports {} added to FlowEntry {}", portList, flowName);
} else {
log.warn("Failed to add ports {} to Flow entry {}. The failure is: {}", portList,
currentFlowEntry.toString(), error.getDescription());
}
return;
}
}
log.warn("Failed to add ports to Flow {} on Node {}: Entry Not Found", flowName, node);
}
@Override
public void removeOutputPort(Node node, String flowName, List<NodeConnector> portList) {
for (FlowEntryInstall index : this.nodeFlows.get(node)) {
FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
FlowEntry currentFlowEntry = flow.getOriginal();
FlowEntry newFlowEntry = currentFlowEntry.clone();
for (NodeConnector dstPort : portList) {
Action action = new Output(dstPort);
newFlowEntry.getFlow().removeAction(action);
}
Status status = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (status.isSuccess()) {
log.info("Ports {} removed from FlowEntry {}", portList, flowName);
} else {
log.warn("Failed to remove ports {} from Flow entry {}. The failure is: {}", portList,
currentFlowEntry.toString(), status.getDescription());
}
return;
}
}
log.warn("Failed to remove ports from Flow {} on Node {}: Entry Not Found", flowName, node);
}
/*
* This function assumes the target flow has only one output port
*/
@Override
public void replaceOutputPort(Node node, String flowName, NodeConnector outPort) {
FlowEntry currentFlowEntry = null;
FlowEntry newFlowEntry = null;
// Find the flow
for (FlowEntryInstall index : this.nodeFlows.get(node)) {
FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
currentFlowEntry = flow.getOriginal();
break;
}
}
if (currentFlowEntry == null) {
log.warn("Failed to replace output port for flow {} on node {}: Entry Not Found", flowName, node);
return;
}
// Create a flow copy with the new output port
newFlowEntry = currentFlowEntry.clone();
Action target = null;
for (Action action : newFlowEntry.getFlow().getActions()) {
if (action.getType() == ActionType.OUTPUT) {
target = action;
break;
}
}
newFlowEntry.getFlow().removeAction(target);
newFlowEntry.getFlow().addAction(new Output(outPort));
// Modify on network node
Status status = modifyEntry(currentFlowEntry, newFlowEntry, false);
if (status.isSuccess()) {
log.info("Output port replaced with {} for flow {} on node {}", outPort, flowName, node);
} else {
log.warn("Failed to replace output port for flow {} on node {}. The failure is: {}", flowName, node,
status.getDescription());
}
return;
}
@Override
public NodeConnector getOutputPort(Node node, String flowName) {
for (FlowEntryInstall index : this.nodeFlows.get(node)) {
FlowEntryInstall flow = this.installedSwView.get(index);
if (flow.getFlowName().equals(flowName)) {
for (Action action : flow.getOriginal().getFlow().getActions()) {
if (action.getType() == ActionType.OUTPUT) {
return ((Output) action).getPort();
}
}
}
}
return null;
}
private void cacheStartup() {
allocateCaches();
retrieveCaches();
}
private void allocateCaches() {
if (this.clusterContainerService == null) {
log.warn("Un-initialized clusterContainerService, can't create cache");
return;
}
log.debug("Allocating caches for Container {}", container.getName());
try {
clusterContainerService.createCache("frm.originalSwView",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.installedSwView",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.inactiveFlows",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.nodeFlows",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.groupFlows",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.staticFlows",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.staticFlowsOrdinal",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.portGroupConfigs",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.portGroupData",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache("frm.TSPolicies",
EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
clusterContainerService.createCache(WORKSTATUSCACHE,
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL, IClusterServices.cacheMode.ASYNC));
clusterContainerService.createCache(WORKORDERCACHE,
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL, IClusterServices.cacheMode.ASYNC));
} catch (CacheConfigException cce) {
log.error("CacheConfigException");
} catch (CacheExistException cce) {
log.error("CacheExistException");
}
}
@SuppressWarnings({ "unchecked" })
private void retrieveCaches() {
ConcurrentMap<?, ?> map;
if (this.clusterContainerService == null) {
log.warn("un-initialized clusterContainerService, can't retrieve cache");
nonClusterObjectCreate();
return;
}
log.debug("Retrieving Caches for Container {}", container.getName());
map = clusterContainerService.getCache("frm.originalSwView");
if (map != null) {
originalSwView = (ConcurrentMap<FlowEntry, FlowEntry>) map;
} else {
log.error("Retrieval of frm.originalSwView cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.installedSwView");
if (map != null) {
installedSwView = (ConcurrentMap<FlowEntryInstall, FlowEntryInstall>) map;
} else {
log.error("Retrieval of frm.installedSwView cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.inactiveFlows");
if (map != null) {
inactiveFlows = (ConcurrentMap<FlowEntry, FlowEntry>) map;
} else {
log.error("Retrieval of frm.inactiveFlows cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.nodeFlows");
if (map != null) {
nodeFlows = (ConcurrentMap<Node, List<FlowEntryInstall>>) map;
} else {
log.error("Retrieval of cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.groupFlows");
if (map != null) {
groupFlows = (ConcurrentMap<String, List<FlowEntryInstall>>) map;
} else {
log.error("Retrieval of frm.groupFlows cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.staticFlows");
if (map != null) {
staticFlows = (ConcurrentMap<Integer, FlowConfig>) map;
} else {
log.error("Retrieval of frm.staticFlows cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.staticFlowsOrdinal");
if (map != null) {
staticFlowsOrdinal = (ConcurrentMap<Integer, Integer>) map;
} else {
log.error("Retrieval of frm.staticFlowsOrdinal cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.portGroupConfigs");
if (map != null) {
portGroupConfigs = (ConcurrentMap<String, PortGroupConfig>) map;
} else {
log.error("Retrieval of frm.portGroupConfigs cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.portGroupData");
if (map != null) {
portGroupData = (ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>>) map;
} else {
log.error("Retrieval of frm.portGroupData allocation failed for Container {}", container.getName());
}
map = clusterContainerService.getCache("frm.TSPolicies");
if (map != null) {
TSPolicies = (ConcurrentMap<String, Object>) map;
} else {
log.error("Retrieval of frm.TSPolicies cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache(WORKORDERCACHE);
if (map != null) {
workOrder = (ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall>) map;
} else {
log.error("Retrieval of " + WORKORDERCACHE + " cache failed for Container {}", container.getName());
}
map = clusterContainerService.getCache(WORKSTATUSCACHE);
if (map != null) {
workStatus = (ConcurrentMap<FlowEntryDistributionOrder, Status>) map;
} else {
log.error("Retrieval of " + WORKSTATUSCACHE + " cache failed for Container {}", container.getName());
}
}
private boolean flowConfigExists(FlowConfig config) {
// Flow name has to be unique on per node id basis
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().isByNameAndNodeIdEqual(config)) {
return true;
}
}
return false;
}
@Override
public Status addStaticFlow(FlowConfig config) {
// Configuration object validation
Status status = config.validate(container);
if (!status.isSuccess()) {
log.warn("Invalid Configuration for flow {}. The failure is {}", config, status.getDescription());
String error = "Invalid Configuration (" + status.getDescription() + ")";
config.setStatus(error);
return new Status(StatusCode.BADREQUEST, error);
}
return addStaticFlowInternal(config, false);
}
/**
* Private method to add a static flow configuration which does not run any
* validation on the passed FlowConfig object. If restore is set to true,
* configuration is stored in configuration database regardless the
* installation on the network node was successful. This is useful at boot
* when static flows are present in startup configuration and are read
* before the switches connects.
*
* @param config
* The static flow configuration
* @param restore
* if true, the configuration is stored regardless the
* installation on the network node was successful
* @return The status of this request
*/
private Status addStaticFlowInternal(FlowConfig config, boolean restore) {
boolean multipleFlowPush = false;
String error;
Status status;
config.setStatus(SUCCESS);
// Presence check
if (flowConfigExists(config)) {
error = "Entry with this name on specified switch already exists";
log.warn("Entry with this name on specified switch already exists: {}", config);
config.setStatus(error);
return new Status(StatusCode.CONFLICT, error);
}
if ((config.getIngressPort() == null) && config.getPortGroup() != null) {
for (String portGroupName : portGroupConfigs.keySet()) {
if (portGroupName.equalsIgnoreCase(config.getPortGroup())) {
multipleFlowPush = true;
break;
}
}
if (!multipleFlowPush) {
log.warn("Invalid Configuration(Invalid PortGroup Name) for flow {}", config);
error = "Invalid Configuration (Invalid PortGroup Name)";
config.setStatus(error);
return new Status(StatusCode.BADREQUEST, error);
}
}
/*
* If requested program the entry in hardware first before updating the
* StaticFlow DB
*/
if (!multipleFlowPush) {
// Program hw
if (config.installInHw()) {
FlowEntry entry = config.getFlowEntry();
status = this.installFlowEntry(entry);
if (!status.isSuccess()) {
config.setStatus(status.getDescription());
if (!restore) {
return status;
}
}
}
}
/*
* When the control reaches this point, either of the following
* conditions is true 1. This is a single entry configuration (non
* PortGroup) and the hardware installation is successful 2. This is a
* multiple entry configuration (PortGroup) and hardware installation is
* NOT done directly on this event. 3. The User prefers to retain the
* configuration in Controller and skip hardware installation.
*
* Hence it is safe to update the StaticFlow DB at this point.
*
* Note : For the case of PortGrouping, it is essential to have this DB
* populated before the PortGroupListeners can query for the DB
* triggered using portGroupChanged event...
*/
Integer ordinal = staticFlowsOrdinal.get(0);
staticFlowsOrdinal.put(0, ++ordinal);
staticFlows.put(ordinal, config);
if (multipleFlowPush) {
PortGroupConfig pgconfig = portGroupConfigs.get(config.getPortGroup());
Map<Node, PortGroup> existingData = portGroupData.get(pgconfig);
if (existingData != null) {
portGroupChanged(pgconfig, existingData, true);
}
}
return new Status(StatusCode.SUCCESS);
}
private void addStaticFlowsToSwitch(Node node) {
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
if (config.getNode().equals(node)) {
if (config.installInHw() && !config.getStatus().equals(SUCCESS)) {
Status status = this.installFlowEntryAsync(config.getFlowEntry());
config.setStatus(status.getDescription());
}
}
}
// Update cluster cache
refreshClusterStaticFlowsStatus(node);
}
private void updateStaticFlowConfigsOnNodeDown(Node node) {
log.trace("Updating Static Flow configs on node down: {}", node);
List<Integer> toRemove = new ArrayList<Integer>();
for (Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
if (config.installInHw() && config.getNode().equals(node)) {
if (config.isInternalFlow()) {
// Take note of this controller generated static flow
toRemove.add(entry.getKey());
} else {
config.setStatus(NODEDOWN);
}
}
}
// Remove controller generated static flows for this node
for (Integer index : toRemove) {
staticFlows.remove(index);
}
// Update cluster cache
refreshClusterStaticFlowsStatus(node);
}
private void updateStaticFlowConfigsOnContainerModeChange(UpdateType update) {
log.trace("Updating Static Flow configs on container mode change: {}", update);
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
if (config.installInHw() && !config.isInternalFlow()) {
switch (update) {
case ADDED:
config.setStatus("Removed from node because in container mode");
break;
case REMOVED:
config.setStatus(SUCCESS);
break;
default:
}
}
}
// Update cluster cache
refreshClusterStaticFlowsStatus(null);
}
@Override
public Status removeStaticFlow(FlowConfig config) {
/*
* No config.isInternal() check as NB does not take this path and GUI
* cannot issue a delete on an internal generated flow. We need this
* path to be accessible when switch mode is changed from proactive to
* reactive, so that we can remove the internal generated LLDP and ARP
* punt flows
*/
// Look for the target configuration entry
Integer key = 0;
FlowConfig target = null;
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().isByNameAndNodeIdEqual(config)) {
key = entry.getKey();
target = entry.getValue();
break;
}
}
if (target == null) {
return new Status(StatusCode.NOTFOUND, "Entry Not Present");
}
// Program the network node
Status status = this.uninstallFlowEntry(config.getFlowEntry());
// Update configuration database if programming was successful
if (status.isSuccess()) {
staticFlows.remove(key);
}
return status;
}
@Override
public Status removeStaticFlow(String name, Node node) {
// Look for the target configuration entry
Integer key = 0;
FlowConfig target = null;
for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
if (mapEntry.getValue().isByNameAndNodeIdEqual(name, node)) {
key = mapEntry.getKey();
target = mapEntry.getValue();
break;
}
}
if (target == null) {
return new Status(StatusCode.NOTFOUND, "Entry Not Present");
}
// Validity check for api3 entry point
if (target.isInternalFlow()) {
String msg = "Invalid operation: Controller generated flow cannot be deleted";
String logMsg = msg + ": {}";
log.warn(logMsg, name);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (target.isPortGroupEnabled()) {
String msg = "Invalid operation: Port Group flows cannot be deleted through this API";
String logMsg = msg + ": {}";
log.warn(logMsg, name);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Program the network node
Status status = this.removeEntry(target.getFlowEntry(), false);
// Update configuration database if programming was successful
if (status.isSuccess()) {
staticFlows.remove(key);
}
return status;
}
@Override
public Status modifyStaticFlow(FlowConfig newFlowConfig) {
// Validity check for api3 entry point
if (newFlowConfig.isInternalFlow()) {
String msg = "Invalid operation: Controller generated flow cannot be modified";
String logMsg = msg + ": {}";
log.warn(logMsg, newFlowConfig);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Validity Check
Status status = newFlowConfig.validate(container);
if (!status.isSuccess()) {
String msg = "Invalid Configuration (" + status.getDescription() + ")";
newFlowConfig.setStatus(msg);
log.warn("Invalid Configuration for flow {}. The failure is {}", newFlowConfig, status.getDescription());
return new Status(StatusCode.BADREQUEST, msg);
}
FlowConfig oldFlowConfig = null;
Integer index = null;
for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
FlowConfig entry = mapEntry.getValue();
if (entry.isByNameAndNodeIdEqual(newFlowConfig.getName(), newFlowConfig.getNode())) {
oldFlowConfig = entry;
index = mapEntry.getKey();
break;
}
}
if (oldFlowConfig == null) {
String msg = "Attempt to modify a non existing static flow";
String logMsg = msg + ": {}";
log.warn(logMsg, newFlowConfig);
return new Status(StatusCode.NOTFOUND, msg);
}
// Do not attempt to reinstall the flow, warn user
if (newFlowConfig.equals(oldFlowConfig)) {
String msg = "No modification detected";
log.info("Static flow modification skipped. New flow and old flow are the same: {}", newFlowConfig);
return new Status(StatusCode.SUCCESS, msg);
}
// If flow is installed, program the network node
status = new Status(StatusCode.SUCCESS, "Saved in config");
if (oldFlowConfig.installInHw()) {
status = this.modifyFlowEntry(oldFlowConfig.getFlowEntry(), newFlowConfig.getFlowEntry());
}
// Update configuration database if programming was successful
if (status.isSuccess()) {
newFlowConfig.setStatus(status.getDescription());
staticFlows.put(index, newFlowConfig);
}
return status;
}
@Override
public Status toggleStaticFlowStatus(String name, Node node) {
return toggleStaticFlowStatus(getStaticFlow(name, node));
}
@Override
public Status toggleStaticFlowStatus(FlowConfig config) {
if (config == null) {
String msg = "Invalid request: null flow config";
log.warn(msg);
return new Status(StatusCode.BADREQUEST, msg);
}
// Validity check for api3 entry point
if (config.isInternalFlow()) {
String msg = "Invalid operation: Controller generated flow cannot be modified";
String logMsg = msg + ": {}";
log.warn(logMsg, config);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Find the config entry
Integer key = 0;
FlowConfig target = null;
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig conf = entry.getValue();
if (conf.isByNameAndNodeIdEqual(config)) {
key = entry.getKey();
target = conf;
break;
}
}
if (target != null) {
Status status = target.validate(container);
if (!status.isSuccess()) {
log.warn(status.getDescription());
return status;
}
status = (target.installInHw()) ? this.uninstallFlowEntry(target.getFlowEntry()) : this
.installFlowEntry(target.getFlowEntry());
if (status.isSuccess()) {
// Update Configuration database
target.setStatus(SUCCESS);
target.toggleInstallation();
staticFlows.put(key, target);
}
return status;
}
return new Status(StatusCode.NOTFOUND, "Unable to locate the entry. Failed to toggle status");
}
/**
* Reinsert all static flows entries in the cache to force cache updates in
* the cluster. This is useful when only some parameters were changed in the
* entries, like the status.
*
* @param node
* The node for which the static flow configurations have to be
* refreshed. If null, all nodes static flows will be refreshed.
*/
private void refreshClusterStaticFlowsStatus(Node node) {
// Refresh cluster cache
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (node == null || entry.getValue().getNode().equals(node)) {
staticFlows.put(entry.getKey(), entry.getValue());
}
}
}
/**
* Uninstall all the non-internal Flow Entries present in the software view.
* If requested, a copy of each original flow entry will be stored in the
* inactive list so that it can be re-applied when needed (This is typically
* the case when running in the default container and controller moved to
* container mode) NOTE WELL: The routine as long as does a bulk change will
* operate only on the entries for nodes locally attached so to avoid
* redundant operations initiated by multiple nodes
*
* @param preserveFlowEntries
* if true, a copy of each original entry is stored in the
* inactive list
*/
private void uninstallAllFlowEntries(boolean preserveFlowEntries) {
log.info("Uninstalling all non-internal flows");
List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>();
// Store entries / create target list
for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> mapEntry : installedSwView.entrySet()) {
FlowEntryInstall flowEntries = mapEntry.getValue();
// Skip internal generated static flows
if (!flowEntries.isInternal()) {
toRemove.add(flowEntries);
// Store the original entries if requested
if (preserveFlowEntries) {
inactiveFlows.put(flowEntries.getOriginal(), flowEntries.getOriginal());
}
}
}
// Now remove the entries
for (FlowEntryInstall flowEntryHw : toRemove) {
Node n = flowEntryHw.getNode();
if (n != null && connectionManager.getLocalityStatus(n) == ConnectionLocality.LOCAL) {
Status status = this.removeEntryInternal(flowEntryHw, false);
if (!status.isSuccess()) {
log.warn("Failed to remove entry: {}. The failure is: {}", flowEntryHw, status.getDescription());
}
} else {
log.debug("Not removing entry {} because not connected locally, the remote guy will do it's job",
flowEntryHw);
}
}
}
/**
* Re-install all the Flow Entries present in the inactive list The inactive
* list will be empty at the end of this call This function is called on the
* default container instance of FRM only when the last container is deleted
*/
private void reinstallAllFlowEntries() {
log.info("Reinstalling all inactive flows");
for (FlowEntry flowEntry : this.inactiveFlows.keySet()) {
this.addEntry(flowEntry, false);
}
// Empty inactive list in any case
inactiveFlows.clear();
}
@Override
public List<FlowConfig> getStaticFlows() {
return getStaticFlowsOrderedList(staticFlows, staticFlowsOrdinal.get(0).intValue());
}
// TODO: need to come out with a better algorithm for maintaining the order
// of the configuration entries
// with actual one, index associated to deleted entries cannot be reused and
// map grows...
private List<FlowConfig> getStaticFlowsOrderedList(ConcurrentMap<Integer, FlowConfig> flowMap, int maxKey) {
List<FlowConfig> orderedList = new ArrayList<FlowConfig>();
for (int i = 0; i <= maxKey; i++) {
FlowConfig entry = flowMap.get(i);
if (entry != null) {
orderedList.add(entry);
}
}
return orderedList;
}
@Override
public FlowConfig getStaticFlow(String name, Node node) {
ConcurrentMap.Entry<Integer, FlowConfig> entry = getStaticFlowEntry(name, node);
if(entry != null) {
return entry.getValue();
}
return null;
}
@Override
public List<FlowConfig> getStaticFlows(Node node) {
List<FlowConfig> list = new ArrayList<FlowConfig>();
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().onNode(node)) {
list.add(entry.getValue());
}
}
return list;
}
@Override
public List<String> getStaticFlowNamesForNode(Node node) {
List<String> list = new ArrayList<String>();
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().onNode(node)) {
list.add(entry.getValue().getName());
}
}
return list;
}
@Override
public List<Node> getListNodeWithConfiguredFlows() {
Set<Node> set = new HashSet<Node>();
for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
set.add(entry.getValue().getNode());
}
return new ArrayList<Node>(set);
}
@SuppressWarnings("unchecked")
private void loadFlowConfiguration() {
ObjectReader objReader = new ObjectReader();
ConcurrentMap<Integer, FlowConfig> confList = (ConcurrentMap<Integer, FlowConfig>) objReader.read(this,
frmFileName);
ConcurrentMap<String, PortGroupConfig> pgConfig = (ConcurrentMap<String, PortGroupConfig>) objReader.read(this,
portGroupFileName);
if (pgConfig != null) {
for (ConcurrentMap.Entry<String, PortGroupConfig> entry : pgConfig.entrySet()) {
addPortGroupConfig(entry.getKey(), entry.getValue().getMatchString(), true);
}
}
if (confList == null) {
return;
}
int maxKey = 0;
for (Integer key : confList.keySet()) {
if (key.intValue() > maxKey) {
maxKey = key.intValue();
}
}
for (FlowConfig conf : getStaticFlowsOrderedList(confList, maxKey)) {
addStaticFlowInternal(conf, true);
}
}
@Override
public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException {
return ois.readObject();
}
@Override
public Status saveConfig() {
return saveConfigInternal();
}
private Status saveConfigInternal() {
ObjectWriter objWriter = new ObjectWriter();
ConcurrentMap<Integer, FlowConfig> nonDynamicFlows = new ConcurrentHashMap<Integer, FlowConfig>();
for (Integer ordinal : staticFlows.keySet()) {
FlowConfig config = staticFlows.get(ordinal);
// Do not save dynamic and controller generated static flows
if (config.isDynamic() || config.isInternalFlow()) {
continue;
}
nonDynamicFlows.put(ordinal, config);
}
objWriter.write(nonDynamicFlows, frmFileName);
objWriter.write(new ConcurrentHashMap<String, PortGroupConfig>(portGroupConfigs), portGroupFileName);
return new Status(StatusCode.SUCCESS, null);
}
@Override
public void subnetNotify(Subnet sub, boolean add) {
}
private void installImplicitARPReplyPunt(Node node) {
if (node == null) {
return;
}
List<String> puntAction = new ArrayList<String>();
puntAction.add(ActionType.CONTROLLER.toString());
FlowConfig allowARP = new FlowConfig();
allowARP.setInstallInHw(true);
allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP Reply" + FlowConfig.INTERNALSTATICFLOWEND);
allowARP.setPriority("500");
allowARP.setNode(node);
allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase());
allowARP.setDstMac(HexEncode.bytesToHexString(switchManager.getControllerMAC()));
allowARP.setActions(puntAction);
addStaticFlowInternal(allowARP, true); // skip validation on internal static flow name
}
/**
* (non-Javadoc)
*
* @see org.opendaylight.controller.switchmanager.ISwitchManagerAware#modeChangeNotify(org.opendaylight.controller.sal.core.Node,
* boolean)
*
* This method can be called from within the OSGi framework context,
* given the programming operation can take sometime, it not good
* pratice to have in it's context operations that can take time,
* hence moving off to a different thread for async processing.
*/
private ExecutorService executor;
@Override
public void modeChangeNotify(final Node node, final boolean proactive) {
Callable<Status> modeChangeCallable = new Callable<Status>() {
@Override
public Status call() throws Exception {
List<FlowConfig> defaultConfigs = new ArrayList<FlowConfig>();
List<String> puntAction = new ArrayList<String>();
puntAction.add(ActionType.CONTROLLER.toString());
FlowConfig allowARP = new FlowConfig();
allowARP.setInstallInHw(true);
allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP" + FlowConfig.INTERNALSTATICFLOWEND);
allowARP.setPriority("1");
allowARP.setNode(node);
allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue())
.toUpperCase());
allowARP.setActions(puntAction);
defaultConfigs.add(allowARP);
FlowConfig allowLLDP = new FlowConfig();
allowLLDP.setInstallInHw(true);
allowLLDP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt LLDP" + FlowConfig.INTERNALSTATICFLOWEND);
allowLLDP.setPriority("1");
allowLLDP.setNode(node);
allowLLDP.setEtherType("0x" + Integer.toHexString(EtherTypes.LLDP.intValue())
.toUpperCase());
allowLLDP.setActions(puntAction);
defaultConfigs.add(allowLLDP);
List<String> dropAction = new ArrayList<String>();
dropAction.add(ActionType.DROP.toString());
FlowConfig dropAllConfig = new FlowConfig();
dropAllConfig.setInstallInHw(true);
dropAllConfig.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Catch-All Drop"
+ FlowConfig.INTERNALSTATICFLOWEND);
dropAllConfig.setPriority("0");
dropAllConfig.setNode(node);
dropAllConfig.setActions(dropAction);
defaultConfigs.add(dropAllConfig);
log.info("Forwarding mode for node {} set to {}", node, (proactive ? "proactive" : "reactive"));
for (FlowConfig fc : defaultConfigs) {
Status status = (proactive) ? addStaticFlowInternal(fc, false) : removeStaticFlow(fc);
if (status.isSuccess()) {
log.info("{} Proactive Static flow: {}", (proactive ? "Installed" : "Removed"), fc.getName());
} else {
log.warn("Failed to {} Proactive Static flow: {}", (proactive ? "install" : "remove"),
fc.getName());
}
}
return new Status(StatusCode.SUCCESS);
}
};
/*
* Execute the work outside the caller context, this could be an
* expensive operation and we don't want to block the caller for it.
*/
this.executor.submit(modeChangeCallable);
}
/**
* Remove from the databases all the flows installed on the node
*
* @param node
*/
private void cleanDatabaseForNode(Node node) {
log.info("Cleaning Flow database for Node {}", node);
if (nodeFlows.containsKey(node)) {
List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(nodeFlows.get(node));
for (FlowEntryInstall entry : toRemove) {
updateLocalDatabase(entry, false);
}
}
}
private boolean doesFlowContainNodeConnector(Flow flow, NodeConnector nc) {
if (nc == null) {
return false;
}
Match match = flow.getMatch();
if (match.isPresent(MatchType.IN_PORT)) {
NodeConnector matchPort = (NodeConnector) match.getField(MatchType.IN_PORT).getValue();
if (matchPort.equals(nc)) {
return true;
}
}
List<Action> actionsList = flow.getActions();
if (actionsList != null) {
for (Action action : actionsList) {
if (action instanceof Output) {
NodeConnector actionPort = ((Output) action).getPort();
if (actionPort.equals(nc)) {
return true;
}
}
}
}
return false;
}
@Override
public void notifyNode(Node node, UpdateType type, Map<String, Property> propMap) {
this.pendingEvents.offer(new NodeUpdateEvent(type, node));
}
@Override
public void notifyNodeConnector(NodeConnector nodeConnector, UpdateType type, Map<String, Property> propMap) {
boolean updateStaticFlowCluster = false;
switch (type) {
case ADDED:
break;
case CHANGED:
Config config = (propMap == null) ? null : (Config) propMap.get(Config.ConfigPropName);
if (config != null) {
switch (config.getValue()) {
case Config.ADMIN_DOWN:
log.trace("Port {} is administratively down: uninstalling interested flows", nodeConnector);
updateStaticFlowCluster = removeFlowsOnNodeConnectorDown(nodeConnector);
break;
case Config.ADMIN_UP:
log.trace("Port {} is administratively up: installing interested flows", nodeConnector);
updateStaticFlowCluster = installFlowsOnNodeConnectorUp(nodeConnector);
break;
case Config.ADMIN_UNDEF:
break;
default:
}
}
break;
case REMOVED:
// This is the case where a switch port is removed from the SDN agent space
log.trace("Port {} was removed from our control: uninstalling interested flows", nodeConnector);
updateStaticFlowCluster = removeFlowsOnNodeConnectorDown(nodeConnector);
break;
default:
}
if (updateStaticFlowCluster) {
refreshClusterStaticFlowsStatus(nodeConnector.getNode());
}
}
/*
* It goes through the static flows configuration, it identifies the ones
* which have the specified node connector as input or output port and
* install them on the network node if they are marked to be installed in
* hardware and their status shows they were not installed yet
*/
private boolean installFlowsOnNodeConnectorUp(NodeConnector nodeConnector) {
boolean updated = false;
List<FlowConfig> flowConfigForNode = getStaticFlows(nodeConnector.getNode());
for (FlowConfig flowConfig : flowConfigForNode) {
if (doesFlowContainNodeConnector(flowConfig.getFlow(), nodeConnector)) {
if (flowConfig.installInHw() && !flowConfig.getStatus().equals(SUCCESS)) {
Status status = this.installFlowEntry(flowConfig.getFlowEntry());
if (!status.isSuccess()) {
flowConfig.setStatus(status.getDescription());
} else {
flowConfig.setStatus(SUCCESS);
}
updated = true;
}
}
}
return updated;
}
/*
* Remove from the network node all the flows which have the specified node
* connector as input or output port. If any of the flow entry is a static
* flow, it updates the correspondent configuration.
*/
private boolean removeFlowsOnNodeConnectorDown(NodeConnector nodeConnector) {
boolean updated = false;
List<FlowEntryInstall> nodeFlowEntries = nodeFlows.get(nodeConnector.getNode());
if (nodeFlowEntries == null) {
return updated;
}
for (FlowEntryInstall fei : new ArrayList<FlowEntryInstall>(nodeFlowEntries)) {
if (doesFlowContainNodeConnector(fei.getInstall().getFlow(), nodeConnector)) {
Status status = this.removeEntryInternal(fei, true);
if (!status.isSuccess()) {
continue;
}
/*
* If the flow entry is a static flow, then update its
* configuration
*/
if (fei.getGroupName().equals(FlowConfig.STATICFLOWGROUP)) {
FlowConfig flowConfig = getStaticFlow(fei.getFlowName(), fei.getNode());
if (flowConfig != null) {
flowConfig.setStatus(PORTREMOVED);
updated = true;
}
}
}
}
return updated;
}
private FlowConfig getDerivedFlowConfig(FlowConfig original, String configName, Short port) {
FlowConfig derivedFlow = new FlowConfig(original);
derivedFlow.setDynamic(true);
derivedFlow.setPortGroup(null);
derivedFlow.setName(original.getName() + "_" + configName + "_" + port);
derivedFlow.setIngressPort(port + "");
return derivedFlow;
}
private void addPortGroupFlows(PortGroupConfig config, Node node, PortGroup data) {
for (FlowConfig staticFlow : staticFlows.values()) {
if (staticFlow.getPortGroup() == null) {
continue;
}
if ((staticFlow.getNode().equals(node)) && (staticFlow.getPortGroup().equals(config.getName()))) {
for (Short port : data.getPorts()) {
FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port);
addStaticFlowInternal(derivedFlow, false);
}
}
}
}
private void removePortGroupFlows(PortGroupConfig config, Node node, PortGroup data) {
for (FlowConfig staticFlow : staticFlows.values()) {
if (staticFlow.getPortGroup() == null) {
continue;
}
if (staticFlow.getNode().equals(node) && staticFlow.getPortGroup().equals(config.getName())) {
for (Short port : data.getPorts()) {
FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port);
removeStaticFlow(derivedFlow);
}
}
}
}
@Override
public void portGroupChanged(PortGroupConfig config, Map<Node, PortGroup> data, boolean add) {
log.info("PortGroup Changed for: {} Data: {}", config, portGroupData);
Map<Node, PortGroup> existingData = portGroupData.get(config);
if (existingData != null) {
for (Map.Entry<Node, PortGroup> entry : data.entrySet()) {
PortGroup existingPortGroup = existingData.get(entry.getKey());
if (existingPortGroup == null) {
if (add) {
existingData.put(entry.getKey(), entry.getValue());
addPortGroupFlows(config, entry.getKey(), entry.getValue());
}
} else {
if (add) {
existingPortGroup.getPorts().addAll(entry.getValue().getPorts());
addPortGroupFlows(config, entry.getKey(), entry.getValue());
} else {
existingPortGroup.getPorts().removeAll(entry.getValue().getPorts());
removePortGroupFlows(config, entry.getKey(), entry.getValue());
}
}
}
} else {
if (add) {
portGroupData.put(config, data);
for (Node swid : data.keySet()) {
addPortGroupFlows(config, swid, data.get(swid));
}
}
}
}
@Override
public boolean addPortGroupConfig(String name, String regex, boolean restore) {
PortGroupConfig config = portGroupConfigs.get(name);
if (config != null) {
return false;
}
if ((portGroupProvider == null) && !restore) {
return false;
}
if ((portGroupProvider != null) && (!portGroupProvider.isMatchCriteriaSupported(regex))) {
return false;
}
config = new PortGroupConfig(name, regex);
portGroupConfigs.put(name, config);
if (portGroupProvider != null) {
portGroupProvider.createPortGroupConfig(config);
}
return true;
}
@Override
public boolean delPortGroupConfig(String name) {
PortGroupConfig config = portGroupConfigs.get(name);
if (config == null) {
return false;
}
if (portGroupProvider != null) {
portGroupProvider.deletePortGroupConfig(config);
}
portGroupConfigs.remove(name);
return true;
}
private void usePortGroupConfig(String name) {
PortGroupConfig config = portGroupConfigs.get(name);
if (config == null) {
return;
}
if (portGroupProvider != null) {
Map<Node, PortGroup> data = portGroupProvider.getPortGroupData(config);
portGroupData.put(config, data);
}
}
@Override
public Map<String, PortGroupConfig> getPortGroupConfigs() {
return portGroupConfigs;
}
public boolean isPortGroupSupported() {
if (portGroupProvider == null) {
return false;
}
return true;
}
public void setIContainer(IContainer s) {
this.container = s;
}
public void unsetIContainer(IContainer s) {
if (this.container == s) {
this.container = null;
}
}
@Override
public PortGroupProvider getPortGroupProvider() {
return portGroupProvider;
}
public void unsetPortGroupProvider(PortGroupProvider portGroupProvider) {
this.portGroupProvider = null;
}
public void setPortGroupProvider(PortGroupProvider portGroupProvider) {
this.portGroupProvider = portGroupProvider;
portGroupProvider.registerPortGroupChange(this);
for (PortGroupConfig config : portGroupConfigs.values()) {
portGroupProvider.createPortGroupConfig(config);
}
}
public void setFrmAware(IForwardingRulesManagerAware obj) {
this.frmAware.add(obj);
}
public void unsetFrmAware(IForwardingRulesManagerAware obj) {
this.frmAware.remove(obj);
}
void setClusterContainerService(IClusterContainerServices s) {
log.debug("Cluster Service set");
this.clusterContainerService = s;
}
void unsetClusterContainerService(IClusterContainerServices s) {
if (this.clusterContainerService == s) {
log.debug("Cluster Service removed!");
this.clusterContainerService = null;
}
}
private String getContainerName() {
if (container == null) {
return GlobalConstants.DEFAULT.toString();
}
return container.getName();
}
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
*
*/
void init() {
frmFileName = GlobalConstants.STARTUPHOME.toString() + "frm_staticflows_" + this.getContainerName() + ".conf";
portGroupFileName = GlobalConstants.STARTUPHOME.toString() + "portgroup_" + this.getContainerName() + ".conf";
inContainerMode = false;
if (portGroupProvider != null) {
portGroupProvider.registerPortGroupChange(this);
}
cacheStartup();
registerWithOSGIConsole();
/*
* If we are not the first cluster node to come up, do not initialize
* the static flow entries ordinal
*/
if (staticFlowsOrdinal.size() == 0) {
staticFlowsOrdinal.put(0, Integer.valueOf(0));
}
pendingEvents = new LinkedBlockingQueue<FRMEvent>();
// Initialize the event handler thread
frmEventHandler = new Thread(new Runnable() {
@Override
public void run() {
while (!stopping) {
try {
final FRMEvent event = pendingEvents.take();
if (event == null) {
log.warn("Dequeued null event");
continue;
}
log.trace("Dequeued {} event", event.getClass().getSimpleName());
if (event instanceof NodeUpdateEvent) {
NodeUpdateEvent update = (NodeUpdateEvent) event;
Node node = update.getNode();
switch (update.getUpdateType()) {
case ADDED:
addStaticFlowsToSwitch(node);
break;
case REMOVED:
cleanDatabaseForNode(node);
updateStaticFlowConfigsOnNodeDown(node);
break;
default:
}
} else if (event instanceof ErrorReportedEvent) {
ErrorReportedEvent errEvent = (ErrorReportedEvent) event;
processErrorEvent(errEvent);
} else if (event instanceof WorkOrderEvent) {
/*
* Take care of handling the remote Work request
*/
Runnable r = new Runnable() {
@Override
public void run() {
WorkOrderEvent work = (WorkOrderEvent) event;
FlowEntryDistributionOrder fe = work.getFe();
if (fe != null) {
logsync.trace("Executing the workOrder {}", fe);
Status gotStatus = null;
FlowEntryInstall feiCurrent = fe.getEntry();
FlowEntryInstall feiNew = workOrder.get(fe);
switch (fe.getUpType()) {
case ADDED:
gotStatus = addEntriesInternal(feiCurrent, false);
break;
case CHANGED:
gotStatus = modifyEntryInternal(feiCurrent, feiNew, false);
break;
case REMOVED:
gotStatus = removeEntryInternal(feiCurrent, false);
break;
}
// Remove the Order
workOrder.remove(fe);
logsync.trace(
"The workOrder has been executed and now the status is being returned {}", fe);
// Place the status
workStatus.put(fe, gotStatus);
} else {
log.warn("Not expected null WorkOrder", work);
}
}
};
if(executor != null) {
executor.execute(r);
}
} else if (event instanceof WorkStatusCleanup) {
/*
* Take care of handling the remote Work request
*/
WorkStatusCleanup work = (WorkStatusCleanup) event;
FlowEntryDistributionOrder fe = work.getFe();
if (fe != null) {
logsync.trace("The workStatus {} is being removed", fe);
workStatus.remove(fe);
} else {
log.warn("Not expected null WorkStatus", work);
}
} else if (event instanceof ContainerFlowChangeEvent) {
/*
* Whether it is an addition or removal, we have to
* recompute the merged flows entries taking into
* account all the current container flows because
* flow merging is not an injective function
*/
updateFlowsContainerFlow();
} else {
log.warn("Dequeued unknown event {}", event.getClass()
.getSimpleName());
}
} catch (InterruptedException e) {
// clear pending events
pendingEvents.clear();
}
}
}
}, "FRM EventHandler Collector");
}
/**
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
*
*/
void destroy() {
// Interrupt the thread
frmEventHandler.interrupt();
// Clear the pendingEvents queue
pendingEvents.clear();
frmAware.clear();
workMonitor.clear();
}
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
*
*/
void start() {
// Initialize graceful stop flag
stopping = false;
// Allocate the executor service
this.executor = Executors.newFixedThreadPool(maxPoolSize);
// Start event handler thread
frmEventHandler.start();
/*
* Read startup and build database if we have not already gotten the
* configurations synced from another node
*/
if (staticFlows.isEmpty()) {
loadFlowConfiguration();
}
}
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
*/
void stop() {
stopping = true;
uninstallAllFlowEntries(false);
// Shutdown executor
this.executor.shutdownNow();
// Now walk all the workMonitor and wake up the one sleeping because
// destruction is happening
for (FlowEntryDistributionOrder fe : workMonitor.keySet()) {
FlowEntryDistributionOrderFutureTask task = workMonitor.get(fe);
task.cancel(true);
}
}
public void setFlowProgrammerService(IFlowProgrammerService service) {
this.programmer = service;
}
public void unsetFlowProgrammerService(IFlowProgrammerService service) {
if (this.programmer == service) {
this.programmer = null;
}
}
public void setSwitchManager(ISwitchManager switchManager) {
this.switchManager = switchManager;
}
public void unsetSwitchManager(ISwitchManager switchManager) {
if (this.switchManager == switchManager) {
this.switchManager = null;
}
}
@Override
public void tagUpdated(String containerName, Node n, short oldTag, short newTag, UpdateType t) {
if (!container.getName().equals(containerName)) {
return;
}
}
@Override
public void containerFlowUpdated(String containerName, ContainerFlow previous, ContainerFlow current, UpdateType t) {
if (!container.getName().equals(containerName)) {
return;
}
log.trace("Container {}: Updating installed flows because of container flow change: {} {}",
container.getName(), t, current);
ContainerFlowChangeEvent ev = new ContainerFlowChangeEvent(previous, current, t);
pendingEvents.offer(ev);
}
@Override
public void nodeConnectorUpdated(String containerName, NodeConnector nc, UpdateType t) {
if (!container.getName().equals(containerName)) {
return;
}
boolean updateStaticFlowCluster = false;
switch (t) {
case REMOVED:
log.trace("Port {} was removed from container: uninstalling interested flows", nc);
updateStaticFlowCluster = removeFlowsOnNodeConnectorDown(nc);
break;
case ADDED:
log.trace("Port {} was added to container: reinstall interested flows", nc);
updateStaticFlowCluster = installFlowsOnNodeConnectorUp(nc);
break;
case CHANGED:
break;
default:
}
if (updateStaticFlowCluster) {
refreshClusterStaticFlowsStatus(nc.getNode());
}
}
@Override
public void containerModeUpdated(UpdateType update) {
// Only default container instance reacts on this event
if (!container.getName().equals(GlobalConstants.DEFAULT.toString())) {
return;
}
switch (update) {
case ADDED:
/*
* Controller is moving to container mode. We are in the default
* container context, we need to remove all our non-internal flows
* to prevent any container isolation breakage. We also need to
* preserve our flow so that they can be re-installed if we move
* back to non container mode (no containers).
*/
this.inContainerMode = true;
this.uninstallAllFlowEntries(true);
break;
case REMOVED:
this.inContainerMode = false;
this.reinstallAllFlowEntries();
break;
default:
}
// Update our configuration DB
updateStaticFlowConfigsOnContainerModeChange(update);
}
protected abstract class FRMEvent {
}
private class NodeUpdateEvent extends FRMEvent {
private final Node node;
private final UpdateType update;
public NodeUpdateEvent(UpdateType update, Node node) {
this.update = update;
this.node = node;
}
public UpdateType getUpdateType() {
return update;
}
public Node getNode() {
return node;
}
}
private class ErrorReportedEvent extends FRMEvent {
private final long rid;
private final Node node;
private final Object error;
public ErrorReportedEvent(long rid, Node node, Object error) {
this.rid = rid;
this.node = node;
this.error = error;
}
public long getRequestId() {
return rid;
}
public Object getError() {
return error;
}
public Node getNode() {
return node;
}
}
private class WorkOrderEvent extends FRMEvent {
private FlowEntryDistributionOrder fe;
private FlowEntryInstall newEntry;
/**
* @param fe
* @param newEntry
*/
WorkOrderEvent(FlowEntryDistributionOrder fe, FlowEntryInstall newEntry) {
this.fe = fe;
this.newEntry = newEntry;
}
/**
* @return the fe
*/
public FlowEntryDistributionOrder getFe() {
return fe;
}
/**
* @return the newEntry
*/
public FlowEntryInstall getNewEntry() {
return newEntry;
}
}
private class ContainerFlowChangeEvent extends FRMEvent {
private final ContainerFlow previous;
private final ContainerFlow current;
private final UpdateType type;
public ContainerFlowChangeEvent(ContainerFlow previous, ContainerFlow current, UpdateType type) {
this.previous = previous;
this.current = current;
this.type = type;
}
public ContainerFlow getPrevious() {
return this.previous;
}
public ContainerFlow getCurrent() {
return this.current;
}
public UpdateType getType() {
return this.type;
}
}
private class WorkStatusCleanup extends FRMEvent {
private FlowEntryDistributionOrder fe;
/**
* @param fe
*/
WorkStatusCleanup(FlowEntryDistributionOrder fe) {
this.fe = fe;
}
/**
* @return the fe
*/
public FlowEntryDistributionOrder getFe() {
return fe;
}
}
/*
* OSGI COMMANDS
*/
@Override
public String getHelp() {
StringBuffer help = new StringBuffer();
help.append("
help.append("\t printMatrixData - Prints the Matrix Configs\n");
help.append("\t addMatrixConfig <name> <regex>\n");
help.append("\t delMatrixConfig <name>\n");
help.append("\t useMatrixConfig <name>\n");
return help.toString();
}
public void _printMatrixData(CommandInterpreter ci) {
ci.println("Configs : ");
ci.println("
ci.println(portGroupConfigs);
ci.println("Data : ");
ci.println("
ci.println(portGroupData);
}
public void _addMatrixConfig(CommandInterpreter ci) {
String name = ci.nextArgument();
String regex = ci.nextArgument();
addPortGroupConfig(name, regex, false);
}
public void _delMatrixConfig(CommandInterpreter ci) {
String name = ci.nextArgument();
delPortGroupConfig(name);
}
public void _useMatrixConfig(CommandInterpreter ci) {
String name = ci.nextArgument();
usePortGroupConfig(name);
}
public void _arpPunt(CommandInterpreter ci) {
String switchId = ci.nextArgument();
long swid = HexEncode.stringToLong(switchId);
Node node = NodeCreator.createOFNode(swid);
installImplicitARPReplyPunt(node);
}
public void _frmaddflow(CommandInterpreter ci) throws UnknownHostException {
Node node = null;
String nodeId = ci.nextArgument();
if (nodeId == null) {
ci.print("Node id not specified");
return;
}
try {
node = NodeCreator.createOFNode(Long.valueOf(nodeId));
} catch (NumberFormatException e) {
ci.print("Node id not a number");
return;
}
ci.println(this.programmer.addFlow(node, getSampleFlow(node)));
}
public void _frmremoveflow(CommandInterpreter ci) throws UnknownHostException {
Node node = null;
String nodeId = ci.nextArgument();
if (nodeId == null) {
ci.print("Node id not specified");
return;
}
try {
node = NodeCreator.createOFNode(Long.valueOf(nodeId));
} catch (NumberFormatException e) {
ci.print("Node id not a number");
return;
}
ci.println(this.programmer.removeFlow(node, getSampleFlow(node)));
}
private Flow getSampleFlow(Node node) throws UnknownHostException {
NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node);
NodeConnector oport = NodeConnectorCreator.createOFNodeConnector((short) 30, node);
byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc };
byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f };
InetAddress srcIP = InetAddress.getByName("172.28.30.50");
InetAddress dstIP = InetAddress.getByName("171.71.9.52");
InetAddress ipMask = InetAddress.getByName("255.255.255.0");
InetAddress ipMask2 = InetAddress.getByName("255.0.0.0");
short ethertype = EtherTypes.IPv4.shortValue();
short vlan = (short) 27;
byte vlanPr = 3;
Byte tos = 4;
byte proto = IPProtocols.TCP.byteValue();
short src = (short) 55000;
short dst = 80;
/*
* Create a SAL Flow aFlow
*/
Match match = new Match();
match.setField(MatchType.IN_PORT, port);
match.setField(MatchType.DL_SRC, srcMac);
match.setField(MatchType.DL_DST, dstMac);
match.setField(MatchType.DL_TYPE, ethertype);
match.setField(MatchType.DL_VLAN, vlan);
match.setField(MatchType.DL_VLAN_PR, vlanPr);
match.setField(MatchType.NW_SRC, srcIP, ipMask);
match.setField(MatchType.NW_DST, dstIP, ipMask2);
match.setField(MatchType.NW_TOS, tos);
match.setField(MatchType.NW_PROTO, proto);
match.setField(MatchType.TP_SRC, src);
match.setField(MatchType.TP_DST, dst);
List<Action> actions = new ArrayList<Action>();
actions.add(new Output(oport));
actions.add(new PopVlan());
actions.add(new Flood());
actions.add(new Controller());
return new Flow(match, actions);
}
@Override
public Status saveConfiguration() {
return saveConfig();
}
public void _frmNodeFlows(CommandInterpreter ci) {
String nodeId = ci.nextArgument();
Node node = Node.fromString(nodeId);
if (node == null) {
ci.println("frmNodeFlows <node> [verbose]");
return;
}
boolean verbose = false;
String verboseCheck = ci.nextArgument();
if (verboseCheck != null) {
verbose = verboseCheck.equals("true");
}
if (!nodeFlows.containsKey(node)) {
return;
}
// Dump per node database
for (FlowEntryInstall entry : nodeFlows.get(node)) {
if (!verbose) {
ci.println(node + " " + installedSwView.get(entry).getFlowName());
} else {
ci.println(node + " " + installedSwView.get(entry).toString());
}
}
}
public void _frmGroupFlows(CommandInterpreter ci) {
String group = ci.nextArgument();
if (group == null) {
ci.println("frmGroupFlows <group> [verbose]");
return;
}
boolean verbose = false;
String verboseCheck = ci.nextArgument();
if (verboseCheck != null) {
verbose = verboseCheck.equalsIgnoreCase("true");
}
if (!groupFlows.containsKey(group)) {
return;
}
// Dump per node database
ci.println("Group " + group + ":\n");
for (FlowEntryInstall flowEntry : groupFlows.get(group)) {
if (!verbose) {
ci.println(flowEntry.getNode() + " " + flowEntry.getFlowName());
} else {
ci.println(flowEntry.getNode() + " " + flowEntry.toString());
}
}
}
public void _frmProcessErrorEvent(CommandInterpreter ci) throws UnknownHostException {
Node node = null;
long reqId = 0L;
String nodeId = ci.nextArgument();
if (nodeId == null) {
ci.print("Node id not specified");
return;
}
String requestId = ci.nextArgument();
if (requestId == null) {
ci.print("Request id not specified");
return;
}
try {
node = NodeCreator.createOFNode(Long.valueOf(nodeId));
} catch (NumberFormatException e) {
ci.print("Node id not a number");
return;
}
try {
reqId = Long.parseLong(requestId);
} catch (NumberFormatException e) {
ci.print("Request id not a number");
return;
}
// null for error object is good enough for now
ErrorReportedEvent event = new ErrorReportedEvent(reqId, node, null);
this.processErrorEvent(event);
}
@Override
public void flowRemoved(Node node, Flow flow) {
log.trace("Received flow removed notification on {} for {}", node, flow);
// For flow entry identification, only node, match and priority matter
FlowEntryInstall test = new FlowEntryInstall(new FlowEntry("", "", flow, node), null);
FlowEntryInstall installedEntry = this.installedSwView.get(test);
if (installedEntry == null) {
log.trace("Entry is not known to us");
return;
}
// Update Static flow status
Integer key = 0;
FlowConfig target = null;
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
FlowConfig conf = entry.getValue();
if (conf.isByNameAndNodeIdEqual(installedEntry.getFlowName(), node)) {
key = entry.getKey();
target = conf;
break;
}
}
if (target != null) {
// Update Configuration database
target.toggleInstallation();
target.setStatus(SUCCESS);
staticFlows.put(key, target);
}
// Update software views
this.updateLocalDatabase(installedEntry, false);
}
@Override
public void flowErrorReported(Node node, long rid, Object err) {
log.trace("Got error {} for message rid {} from node {}", new Object[] { err, rid, node });
pendingEvents.offer(new ErrorReportedEvent(rid, node, err));
}
private void processErrorEvent(ErrorReportedEvent event) {
Node node = event.getNode();
long rid = event.getRequestId();
Object error = event.getError();
String errorString = (error == null) ? "Not provided" : error.toString();
/*
* If this was for a flow install, remove the corresponding entry from
* the software view. If it was a Looking for the rid going through the
* software database. TODO: A more efficient rid <-> FlowEntryInstall
* mapping will have to be added in future
*/
FlowEntryInstall target = null;
List<FlowEntryInstall> flowEntryInstallList = nodeFlows.get(node);
// flowEntryInstallList could be null.
// so check for it.
if(flowEntryInstallList != null) {
for (FlowEntryInstall index : flowEntryInstallList) {
FlowEntryInstall entry = installedSwView.get(index);
if(entry != null) {
if (entry.getRequestId() == rid) {
target = entry;
break;
}
}
}
}
if (target != null) {
// This was a flow install, update database
this.updateLocalDatabase(target, false);
// also update the config
if(FlowConfig.STATICFLOWGROUP.equals(target.getGroupName())) {
ConcurrentMap.Entry<Integer, FlowConfig> staticFlowEntry = getStaticFlowEntry(target.getFlowName(),target.getNode());
// staticFlowEntry should never be null.
// the null check is just an extra defensive check.
if(staticFlowEntry != null) {
staticFlows.remove(staticFlowEntry.getKey());
}
}
}
// Notify listeners
if (frmAware != null) {
synchronized (frmAware) {
for (IForwardingRulesManagerAware frma : frmAware) {
try {
frma.requestFailed(rid, errorString);
} catch (Exception e) {
log.warn("Failed to notify {}", frma);
}
}
}
}
}
@Override
public Status solicitStatusResponse(Node node, boolean blocking) {
Status rv = new Status(StatusCode.INTERNALERROR);
if (this.programmer != null) {
if (blocking) {
rv = programmer.syncSendBarrierMessage(node);
} else {
rv = programmer.asyncSendBarrierMessage(node);
}
}
return rv;
}
public void unsetIConnectionManager(IConnectionManager s) {
if (s == this.connectionManager) {
this.connectionManager = null;
}
}
public void setIConnectionManager(IConnectionManager s) {
this.connectionManager = s;
}
@Override
public void entryCreated(Object key, String cacheName, boolean originLocal) {
/*
* Do nothing
*/
}
@Override
public void entryUpdated(Object key, Object new_value, String cacheName, boolean originLocal) {
if (originLocal) {
/*
* Local updates are of no interest
*/
return;
}
if (cacheName.equals(WORKORDERCACHE)) {
logsync.trace("Got a WorkOrderCacheUpdate for {}", key);
/*
* This is the case of one workOrder becoming available, so we need
* to dispatch the work to the appropriate handler
*/
FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key;
FlowEntryInstall fei = fe.getEntry();
if (fei == null) {
return;
}
Node n = fei.getNode();
if (connectionManager.getLocalityStatus(n) == ConnectionLocality.LOCAL) {
logsync.trace("workOrder for fe {} processed locally", fe);
// I'm the controller in charge for the request, queue it for
// processing
pendingEvents.offer(new WorkOrderEvent(fe, (FlowEntryInstall) new_value));
}
} else if (cacheName.equals(WORKSTATUSCACHE)) {
logsync.trace("Got a WorkStatusCacheUpdate for {}", key);
/*
* This is the case of one workOrder being completed and a status
* returned
*/
FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key;
/*
* Check if the order was initiated by this controller in that case
* we need to actually look at the status returned
*/
if (fe.getRequestorController()
.equals(clusterContainerService.getMyAddress())) {
FlowEntryDistributionOrderFutureTask fet = workMonitor.remove(fe);
if (fet != null) {
logsync.trace("workStatus response is for us {}", fe);
// Signal we got the status
fet.gotStatus(fe, workStatus.get(fe));
pendingEvents.offer(new WorkStatusCleanup(fe));
}
}
}
}
@Override
public void entryDeleted(Object key, String cacheName, boolean originLocal) {
/*
* Do nothing
*/
}
}
|
package org.eclipse.birt.report.item.crosstab.internal.ui.editors.commands;
import org.eclipse.birt.report.designer.ui.util.ExceptionUtil;
import org.eclipse.birt.report.item.crosstab.core.de.CrosstabCellHandle;
import org.eclipse.birt.report.item.crosstab.core.de.CrosstabReportItemHandle;
import org.eclipse.birt.report.item.crosstab.core.de.DimensionViewHandle;
import org.eclipse.birt.report.item.crosstab.core.de.LevelViewHandle;
import org.eclipse.birt.report.item.crosstab.core.util.CrosstabUtil;
import org.eclipse.birt.report.item.crosstab.internal.ui.editors.model.BaseCrosstabAdapter;
import org.eclipse.birt.report.item.crosstab.internal.ui.editors.model.CrosstabAdaptUtil;
import org.eclipse.birt.report.item.crosstab.internal.ui.editors.model.CrosstabCellAdapter;
import org.eclipse.birt.report.item.crosstab.internal.ui.editors.model.CrosstabHandleAdapter;
import org.eclipse.birt.report.item.crosstab.internal.ui.editors.model.VirtualCrosstabCellAdapter;
import org.eclipse.birt.report.item.crosstab.ui.i18n.Messages;
import org.eclipse.birt.report.model.api.DataItemHandle;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.ExtendedItemHandle;
import org.eclipse.birt.report.model.api.LevelAttributeHandle;
import org.eclipse.birt.report.model.api.olap.DimensionHandle;
import org.eclipse.birt.report.model.api.olap.LevelHandle;
/**
* Add the Dimension handle to the cross tab.When drag the Dimension handle to
* the column or row area of the cross tab, execute the command.
*/
public class AddLevelAttributeHandleCommand extends AbstractCrosstabCommand
{
private BaseCrosstabAdapter handleAdpter;
/**
* Column or the row type.See the ICrosstabConstants row and column axis
* type.
*/
private int type = -1;
private LevelAttributeHandle[] levelAttributeHandles;
private DimensionHandle[] dimensionHandles;
private DimensionHandle dimensionHandle;
private Object after;
/**
* Trans name
*/
// private static final String NAME = "Add DiminsionViewHandle";
private static final String NAME = Messages.getString( "AddDimensionViewHandleCommand.TransName" );//$NON-NLS-1$
/**
* Constructor
*
* @param handleAdpter
* @param type
* @param levelHandle
*/
public AddLevelAttributeHandleCommand( CrosstabCellAdapter handleAdpter,
int type, DimensionHandle dimensionHandle,
LevelAttributeHandle[] levelAttrHandles, Object after )
{
super( dimensionHandle );
this.dimensionHandle = dimensionHandle;
this.levelAttributeHandles = levelAttrHandles;
setHandleAdpter( handleAdpter );
setType( type );
setDimensionHandles( new DimensionHandle[]{
dimensionHandle
} );
this.after = after;
setLabel( NAME );
}
public AddLevelAttributeHandleCommand( CrosstabHandleAdapter handleAdpter,
int type, DimensionHandle dimensionHandle,
LevelAttributeHandle[] levelAttrHandles )
{
super( dimensionHandle );
this.dimensionHandle = dimensionHandle;
this.levelAttributeHandles = levelAttrHandles;
setHandleAdpter( handleAdpter );
setType( type );
setDimensionHandles( new DimensionHandle[]{
dimensionHandle
} );
setLabel( NAME );
}
/**
* Sets the handle adapter
*
* @param handleAdpter
*/
public void setHandleAdpter( BaseCrosstabAdapter handleAdpter )
{
this.handleAdpter = handleAdpter;
}
/**
* Gets the tyoe
*
* @return
*/
public int getType( )
{
return type;
}
/**
* Sets the type
*
* @param type
* ICrosstabConstants.COLUMN_AXIS_TYPE or
* ICrosstabConstants.ROW_AXIS_TYPE
*/
public void setType( int type )
{
this.type = type;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.gef.commands.Command#canExecute()
*/
public boolean canExecute( )
{
return getType( ) != VirtualCrosstabCellAdapter.IMMACULATE_TYPE;
}
public CrosstabReportItemHandle getCrosstabHandle( )
{
if ( this.handleAdpter instanceof CrosstabHandleAdapter )
{
return (CrosstabReportItemHandle) ( (CrosstabHandleAdapter) this.handleAdpter ).getCrosstabItemHandle( );
}
if ( this.handleAdpter instanceof CrosstabCellAdapter )
{
return ( (CrosstabCellAdapter) this.handleAdpter ).getCrosstabCellHandle( )
.getCrosstab( );
}
return null;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.gef.commands.Command#execute()
*/
public void execute( )
{
if ( this.levelAttributeHandles != null
&& this.levelAttributeHandles.length > 0 )
{
transStart( NAME );
CrosstabReportItemHandle crosstabHandle = getCrosstabHandle( );
try
{
// if dimension is not in the crosstab, then add it
DimensionViewHandle viewHandle = null;
int position = findCellPosition( );
if ( CrosstabUtil.canContain( crosstabHandle,
this.dimensionHandle ) )
{
if ( crosstabHandle.getCube( ) == null )
{
crosstabHandle.setCube( CrosstabAdaptUtil.getCubeHandle( dimensionHandle ) );
}
viewHandle = crosstabHandle.insertDimension( dimensionHandle,
getType( ),
position );
}
else
{
viewHandle = crosstabHandle.getDimension( getType( ),
position - 1 );
}
// if level attribute's level is not in the crosstab, then add
LevelHandle levelHandle = (LevelHandle) this.levelAttributeHandles[0].getElementHandle( );
if ( levelHandle == null )
{
rollBack( );
return;
}
LevelViewHandle levelViewHandle = null;
if ( viewHandle.getLevel( levelHandle.getQualifiedName( ) ) == null )
{
DataItemHandle dataHandle = CrosstabAdaptUtil.createColumnBindingAndDataItem( (ExtendedItemHandle) crosstabHandle.getModelHandle( ),
levelHandle );
levelViewHandle = viewHandle.insertLevel( levelHandle,
viewHandle.getLevelCount( ) );
CrosstabCellHandle cellHandle = levelViewHandle.getCell( );
cellHandle.addContent( dataHandle );
CrosstabUtil.addLabelToHeader( levelViewHandle );
}
else
{
levelViewHandle = viewHandle.getLevel( levelHandle.getQualifiedName( ) );
}
position = findPosition( );
// add level attribute to crosstab
for ( LevelAttributeHandle lah : this.levelAttributeHandles )
{
DataItemHandle dataHandle = CrosstabAdaptUtil.createColumnBindingAndDataItem( (ExtendedItemHandle) crosstabHandle.getModelHandle( ),
lah );
CrosstabCellHandle cellHandle = levelViewHandle.getCell( );
if ( position > 0 )
cellHandle.addContent( dataHandle, position );
else
cellHandle.addContent( dataHandle );
}
transEnd( );
}
catch ( Exception e )
{
rollBack( );
ExceptionUtil.handle( e );
}
}
}
private int findCellPosition( )
{
if ( this.handleAdpter instanceof CrosstabCellAdapter )
{
int base = CrosstabAdaptUtil.getDimensionViewHandle( (ExtendedItemHandle) ( (CrosstabCellAdapter) handleAdpter ).getCrosstabCellHandle( )
.getModelHandle( ) )
.getModelHandle( )
.getIndex( );
return base + 1;
}
return 0;
}
private int findPosition( )
{
if ( after instanceof DesignElementHandle )
{
return ( (DesignElementHandle) after ).getIndex( );
}
return 0;
}
/**
* @return
*/
public DimensionHandle[] getDimensionHandles( )
{
return dimensionHandles;
}
/**
* @param dimensionHandle
*/
public void setDimensionHandles( DimensionHandle[] dimensionHandles )
{
this.dimensionHandles = dimensionHandles;
}
}
|
package pl.edu.icm.coansys.disambiguation.author.pig.extractor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultDataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.edu.icm.coansys.commons.java.StackTraceExtractor;
import pl.edu.icm.coansys.disambiguation.author.pig.normalizers.AuthorToInitials;
import pl.edu.icm.coansys.disambiguation.author.pig.normalizers.PigNormalizer;
import pl.edu.icm.coansys.disambiguation.features.FeatureInfo;
import pl.edu.icm.coansys.models.DocumentProtos.Author;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentWrapper;
/**
*
* @author pdendek
* @author mwos
*/
public class EXTRACT_CONTRIBDATA_GIVENDATA extends EvalFunc<DataBag> {
private static final Logger logger = LoggerFactory
.getLogger(EXTRACT_CONTRIBDATA_GIVENDATA.class);
private List<DisambiguationExtractorDocument> des4Doc = new ArrayList<DisambiguationExtractorDocument>();
private List<DisambiguationExtractorAuthor> des4Author = new ArrayList<DisambiguationExtractorAuthor>();
private List<String> des4DocNameOrId = new ArrayList<String>(),
des4AuthorNameOrId = new ArrayList<String>();
private String language = null;
private boolean skipEmptyFeatures = false;
private boolean useIdsForExtractors = false;
private DisambiguationExtractorFactory extrFactory = new DisambiguationExtractorFactory();
@Override
public Schema outputSchema(Schema p_input) {
try {
return Schema.generateNestedSchema(DataType.BAG);
} catch (FrontendException e) {
logger.error("Error in creating output schema:", e);
throw new IllegalStateException(e);
}
}
private void setDisambiguationExtractor(String featureinfo) throws InstantiationException, IllegalAccessException, ClassNotFoundException
{
List<FeatureInfo> features = FeatureInfo
.parseFeatureInfoString(featureinfo);
String ExtractorDocClassName = new DisambiguationExtractorDocument()
.getClass().getSimpleName();
String ExtractorAuthorClassName = new DisambiguationExtractorAuthor()
.getClass().getSimpleName();
DisambiguationExtractor extractor;
String currentClassNameOrId;
for (int i = 0; i < features.size(); i++) {
extractor = extrFactory.create(features.get(i));
String currentSuperClassName = extractor.getClass().getSuperclass()
.getSimpleName();
if (useIdsForExtractors) {
currentClassNameOrId = extrFactory.toExId(extractor.getClass()
.getSimpleName());
} else {
currentClassNameOrId = extractor.getClass().getSimpleName();
}
if (currentSuperClassName.equals(ExtractorDocClassName)) {
des4Doc.add((DisambiguationExtractorDocument) extractor);
des4DocNameOrId.add(currentClassNameOrId);
} else if (currentSuperClassName.equals(ExtractorAuthorClassName)) {
des4Author.add((DisambiguationExtractorAuthor) extractor);
des4AuthorNameOrId.add(currentClassNameOrId);
} else {
String m = "Cannot create extractor: "
+ extractor.getClass().getSimpleName()
+ ". Its superclass: " + currentSuperClassName
+ " does not match to any superclass.";
logger.error(m);
throw new ClassNotFoundException(m);
}
}
}
public EXTRACT_CONTRIBDATA_GIVENDATA(String featureinfo) throws Exception {
setDisambiguationExtractor(featureinfo);
}
public EXTRACT_CONTRIBDATA_GIVENDATA(String featureinfo, String lang) throws InstantiationException, IllegalAccessException, ClassNotFoundException
{
this.language = lang;
setDisambiguationExtractor(featureinfo);
}
public EXTRACT_CONTRIBDATA_GIVENDATA(String featureinfo, String lang,
String skipEmptyFeatures) throws InstantiationException, IllegalAccessException, ClassNotFoundException {
this.language = lang;
this.skipEmptyFeatures = Boolean.parseBoolean(skipEmptyFeatures);
setDisambiguationExtractor(featureinfo);
}
public EXTRACT_CONTRIBDATA_GIVENDATA(String featureinfo, String lang,
String skipEmptyFeatures, String useIdsForExtractors) throws InstantiationException, IllegalAccessException, ClassNotFoundException
{
this.language = lang;
this.skipEmptyFeatures = Boolean.parseBoolean(skipEmptyFeatures);
this.useIdsForExtractors = Boolean.parseBoolean(useIdsForExtractors);
setDisambiguationExtractor(featureinfo);
}
private boolean checkLanguage() {
return (language != null && !language.equalsIgnoreCase("all")
&& !language.equalsIgnoreCase("null") && !language.equals(""));
}
@Override
public DataBag exec(Tuple input) throws IOException {
if (input == null || input.size() == 0) {
return null;
}
try {
DataByteArray dba = (DataByteArray) input.get(0);
DocumentWrapper dw = DocumentWrapper.parseFrom(dba.get());
dba = null;
// metadata
DocumentMetadata dm = dw.getDocumentMetadata();
String docKey = dm.getKey();
dw = null;
// result bag with tuples, which des4Doccribes each contributor
DataBag ret = new DefaultDataBag();
// TODO: Checking for author clones should be in importers
// START IMPORTER PART
// getting full author list (probably with duplicates)
List<Author> dplAuthors = dm.getBasicMetadata().getAuthorList();
Map <String, Author> filteredAuthors =
new HashMap <String, Author> ( dplAuthors.size() );
// removing clones or duplicates (cid - initials hash)
PigNormalizer toInitials = new AuthorToInitials();
// creating disambiguation extractor only for normalizer
DisambiguationExtractor disam_extractor =
new DisambiguationExtractor();
for ( Author a : dplAuthors ) {
Author b = filteredAuthors.put( a.getKey(), a );
if ( b != null ) {
//cId is inside map already. Checking whether cId is cloned or
//duplicated for different data or incorrectly attributed for different authors
String aInit = (String) toInitials.normalize( a );
String bInit = (String) toInitials.normalize( b );
Object aNorm = disam_extractor.normalizeExtracted( aInit );
Object bNorm = disam_extractor.normalizeExtracted( bInit );
if ( a.equals( b ) ) {
// all authors data are equal
// AUTHOR B (AS CLONE A) SCHOULD BE REMOVED FROM DOCUMENT'S AUTHOR LIST IN IMPORTERS
logger.info( "Author metadata clones with key: " + a.getKey() +
" in document with key: " + docKey );
} else if ( aNorm.equals( bNorm ) ) {
logger.info( "Duplicated author key: "
+ a.getKey() + " for different metadata (except initials)" +
" in document with key: " + docKey );
} else {
logger.error( "Duplicated aurhor key: "
+ a.getKey() + " for different authors: " + aInit
+ ", " + bInit +
" in document with key: " + docKey );
}
}
}
Collection<Author> authors = filteredAuthors.values();
//END IMPORTER PART
// TODO: builder for document metadata,
// replace old author list (with duplicates) with new (filtered)
// we want replace it, because in the other way EX_AUTH_SNAMES will
// give us feature description with duplicates OR we would need to
// write there the same filter as above.
// Or include author clones checking in IMPORTERS.
// in arrays we are storing DataBags from extractors
DataBag[] extractedDocObj = new DataBag[des4Doc.size()];
DataBag[] extractedAuthorObj;
Map<String, DataBag> map = new HashMap<String, DataBag>();
Map<String, DataBag> finalMap;
if (checkLanguage()) {
for (int i = 0; i < des4Doc.size(); i++) {
extractedDocObj[i] = des4Doc.get(i).extract(dm, language);
}
} else {
for (int i = 0; i < des4Doc.size(); i++) {
extractedDocObj[i] = des4Doc.get(i).extract(dm);
}
}
// adding to map extractor name and features' data
for (int i = 0; i < des4Doc.size(); i++) {
if (extractedDocObj[i] == null) {
continue;
}
if (extractedDocObj[i].size() == 0 && skipEmptyFeatures) {
continue;
}
map.put(des4DocNameOrId.get(i), extractedDocObj[i]);
}
extractedDocObj = null;
// creating disambiguation extractor only for normalizer
EX_AUTH_INITIALS auth_initials_ex = new EX_AUTH_INITIALS ();
// bag making tuples (one tuple for one contributor from document)
// with replicated metadata for
int i = -1;
for ( Author a : authors )
{
i++;
// here we have sure that Object = Integer
Object normalizedSname = auth_initials_ex
.normalizeExtracted( a );
String cId = a.getKey();
finalMap = new HashMap<String, DataBag>(map);
// put author metadata into finalMap
extractedAuthorObj = new DataBag[des4Author.size()];
if (checkLanguage()) {
for (int j = 0; j < des4Author.size(); j++) {
extractedAuthorObj[j] = des4Author.get(j).extract(dm,
i, language);
}
} else {
for (int j = 0; j < des4Author.size(); j++) {
extractedAuthorObj[j] = des4Author.get(j)
.extract(dm, i);
}
}
// adding to map extractor name and features' data
for (int j = 0; j < des4Author.size(); j++) {
if (extractedAuthorObj[j] == null) {
continue;
}
if (extractedAuthorObj[i].size() == 0 && skipEmptyFeatures) {
continue;
}
finalMap.put(des4AuthorNameOrId.get(j),
extractedAuthorObj[j]);
}
extractedAuthorObj = null;
Object[] to = new Object[] { docKey, cId, normalizedSname, finalMap };
Tuple t = TupleFactory.getInstance()
.newTuple(Arrays.asList(to));
ret.add(t);
}
map = null;
dm = null;
return ret;
} catch(ArrayIndexOutOfBoundsException ex){
try{
Matcher m = Pattern.compile("([0-9]+)").matcher(ex.getMessage());
Integer i = Integer.parseInt(m.group(1));
logger.error("ArrayIndexOutOfBoundException. " +
"Possible cause is connected either with class "
+des4Author.get(i).getId()+" or "+des4Doc.get(i).getId());
throw ex;
}catch(Exception e){
logger.error("Error in processing input row:", ex);
throw new IOException("Caught exception processing input row:\n"
+ StackTraceExtractor.getStackTrace(ex));
}
} catch (Exception e) {
logger.error("Error in processing input row:", e);
throw new IOException("Caught exception processing input row:\n"
+ StackTraceExtractor.getStackTrace(e));
}
}
}
|
package dk.netarkivet.common.distribute;
import java.util.Collection;
import org.mortbay.log.Log;
import dk.netarkivet.common.CommonSettings;
import dk.netarkivet.common.distribute.arcrepository.Replica;
import dk.netarkivet.common.distribute.arcrepository.ReplicaType;
import dk.netarkivet.common.exceptions.ArgumentNotValid;
import dk.netarkivet.common.exceptions.IllegalState;
import dk.netarkivet.common.exceptions.UnknownID;
import dk.netarkivet.common.utils.Settings;
/**
* This singleton class is in charge of giving out the correct channels.
*/
public class Channels {
/**
* Channel type prefixes for the current set of channels.
*/
private static final String ALLBA_CHANNEL_PREFIX = "ALL_BA";
private static final String ANYBA_CHANNEL_PREFIX = "ANY_BA";
private static final String THEBAMON_CHANNEL_PREFIX = "THE_BAMON";
private static final String THESCHED_CHANNEL_PREFIX = "THE_SCHED";
private static final String THEREPOS_CHANNEL_PREFIX = "THE_REPOS";
private static final String ANYLOWHACO_CHANNEL_PREFIX
= "ANY_LOWPRIORITY_HACO";
private static final String ANYHIGHHACO_CHANNEL_PREFIX
= "ANY_HIGHPRIORITY_HACO";
private static final String THISREPOSCLIENT_CHANNEL_PREFIX
= "THIS_REPOS_CLIENT";
private static final String ERROR_CHANNEL_PREFIX = "ERROR";
private static final String INDEXSERVER_CHANNEL_PREFIX = "INDEX_SERVER";
private static final String THISINDEXCLIENT_CHANNEL_PREFIX
= "THIS_INDEX_CLIENT";
private static final String MONITOR_CHANNEL_PREFIX = "MONITOR";
/**
* Prefix for the channel used to send {@link dk.netarkivet.harvester.harvesting.
* distribute.CrawlProgressMessage}s.
*/
private static final String HARVEST_MONITOR_CHANNEL_PREFIX = "HARVESTMON";
/**
* Prefix for the channel used to send {@link HarvesterStatusMessage}s.
*/
private static final String HARVEST_DISPATCHER_CHANNEL_PREFIX = "HARVESTDISP";
private static final String THECR_CHANNEL_PREFIX = "THE_CR";
/** Channel part separator. */
public static final String CHANNEL_PART_SEPARATOR = "_";
/**
* The one existing instance of the Channels object. Not accessible from the
* outside at all.
*/
private static Channels instance;
/**
* Accessor for singleton internally.
*
* @return the <code>Channels</code> object for this singleton.
*/
private static Channels getInstance() {
if (instance == null) {
instance = new Channels();
}
return instance;
}
/**
* Contains the collection of replicas.
*/
private final Collection<Replica> replicas = Replica.getKnown();
/**
* This is the container for the replica which is used by applications
* that only communicate with local processes.
*/
private final Replica useReplica = Replica.getReplicaFromId(
Settings.get(CommonSettings.USE_REPLICA_ID));
/**
* The constructor of Channels class.
* Validates that the current value of the setting USE_REPLICA_ID
* corresponds to one of the replicas listed in the settings.
* Furthermore we here fill content in the ALL_BA_ARRAY, ANY_BA_ARRAY,
* THE_BAMON_ARRAY, and initialize ALL_BA, ANY_BA, and THE_BAMON.
*
* @throws UnknownID If one of the replicas has an unhandled replica type.
*/
private Channels() {
// index count
int i = 0;
int useReplicaIndex = -1;
// go through all replicas and initialize their channels.
for(Replica rep : replicas) {
if(rep.getType() == ReplicaType.BITARCHIVE) {
// Bitarchive has 'ALL_BA', 'ANY_BA' and 'THE_BAMON'.
ALL_BA_ARRAY[i] = new ChannelID(ALLBA_CHANNEL_PREFIX,
rep.getId(), ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.TOPIC);
ANY_BA_ARRAY[i] = new ChannelID(ANYBA_CHANNEL_PREFIX,
rep.getId(), ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.QUEUE);
THE_BAMON_ARRAY[i] = new ChannelID(THEBAMON_CHANNEL_PREFIX,
rep.getId(), ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.QUEUE);
THE_CR_ARRAY[i] = null;
} else if(rep.getType() == ReplicaType.CHECKSUM){
// Checksum has only 'THE_CR'.
ALL_BA_ARRAY[i] = null;
ANY_BA_ARRAY[i] = null;
THE_BAMON_ARRAY[i] = null;
THE_CR_ARRAY[i] = new ChannelID(THECR_CHANNEL_PREFIX,
rep.getId(), ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.QUEUE);
} else {
// Throw an exception when unknown replica type.
throw new UnknownID("The replica '" + rep + "' does not have "
+ "a valid replica type.");
}
// find the 'useReplica'
if(rep == useReplica) {
useReplicaIndex = i;
}
i++;
}
// validate the index of the useReplica
if(useReplicaIndex < 0 || useReplicaIndex >= replicas.size()) {
// issue an error, if the use replica could not be found.
throw new ArgumentNotValid(
"The useReplica '" + useReplica + "' was not found in the "
+ "list of replicas: '" + replicas + "'.");
}
// set the channels for the useReplica
ALL_BA = ALL_BA_ARRAY[useReplicaIndex];
ANY_BA = ANY_BA_ARRAY[useReplicaIndex];
THE_BAMON = THE_BAMON_ARRAY[useReplicaIndex];
THE_CR = THE_CR_ARRAY[useReplicaIndex];
}
/**
* Method for retrieving the list of replicas used for the channels.
* The replica ids are in the same order as their channels.
*
* @return The replica ids in the same order as their channels.
*/
public static Collection<Replica> getReplicas() {
return getInstance().replicas;
}
/**
* Returns the queue on which HarvestControllers reply with status messages
* to the HarvestScheduler.
*
* @return the <code>ChannelID</code> object for the queue on which
* HarvestControllers reply with status messages to the
* HarvestScheduler
*/
public static ChannelID getTheSched() {
return getInstance().THE_SCHED;
}
private final ChannelID THE_SCHED = new ChannelID(THESCHED_CHANNEL_PREFIX,
ChannelID.COMMON, ChannelID.NO_IP, ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/**
* Returns the queue which is used by the scheduler to send doOneCrawl to
* Harvest Controllers of high priority (selective harvests).
*
* @return That channel (queue)
*/
public static ChannelID getAnyHighpriorityHaco() {
return getInstance().ANY_HIGHPRIORITY_HACO;
}
/**
* Returns the queue which is used by the scheduler to send doOneCrawl to
* Harvest Controllers of low priority (snapshot harvests).
*
* @return That channel (queue)
*/
public static ChannelID getAnyLowpriorityHaco() {
return getInstance().ANY_LOWPRIORITY_HACO;
}
private final ChannelID ANY_LOWPRIORITY_HACO = new ChannelID(
ANYLOWHACO_CHANNEL_PREFIX, ChannelID.COMMON, ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.QUEUE);
private final ChannelID ANY_HIGHPRIORITY_HACO = new ChannelID(
ANYHIGHHACO_CHANNEL_PREFIX, ChannelID.COMMON, ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID, ChannelID.QUEUE);
/**
* Returns the one-per-client queue on which client receives replies from
* the arcrepository.
*
* @return the <code>ChannelID</code> object for this queue.
*/
public static ChannelID getThisReposClient() {
return getInstance().THIS_REPOS_CLIENT;
}
private final ChannelID THIS_REPOS_CLIENT = new ChannelID(
THISREPOSCLIENT_CHANNEL_PREFIX, ChannelID.COMMON,
ChannelID.INCLUDE_IP, ChannelID.INCLUDE_APPLINST_ID,
ChannelID.QUEUE);
/**
* Returns the queue on which all messages to the Repository are sent.
*
* @return the <code>ChannelID</code> object for this queue.
*/
public static ChannelID getTheRepos() {
return getInstance().THE_REPOS;
}
private final ChannelID THE_REPOS = new ChannelID(THEREPOS_CHANNEL_PREFIX,
ChannelID.COMMON, ChannelID.NO_IP, ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/**
* Returns BAMON channels for every known bitarchive (replica).
*
* @return An array of BAMON channels - one per bitarchive (replica)
*/
public static final ChannelID[] getAllArchives_BAMONs() {
return getInstance().THE_BAMON_ARRAY;
}
private final ChannelID[] THE_BAMON_ARRAY =
new ChannelID[replicas.size()];
public static ChannelID getTheBamon() throws IllegalState {
ChannelID res = getInstance().THE_BAMON;
if (res == null) {
throw new IllegalState(
"The channel for the bitarchive monitor "
+ " cannot to be retrieved for replica '"
+ getInstance().useReplica
+ "'.");
}
return res;
}
/**
* Implementation notice: This cannot be initialized directly in the field,
* as it uses THE_BAMON_ARRAY, which is initialized in the constructor.
*/
private final ChannelID THE_BAMON;
/**
* Returns the channels for the all Checksum replicas.
*
* @return An array of THE_CR channels - one for each replica, though only
* the checksum replicas have values (the others are null).
*/
public static final ChannelID[] getAllArchives_CRs() {
return getInstance().THE_CR_ARRAY;
}
/** The array containing the 'THE_CR' channels.*/
private final ChannelID[] THE_CR_ARRAY
= new ChannelID[replicas.size()];
public static ChannelID getTheCR() throws IllegalState {
ChannelID res = getInstance().THE_CR;
if (res == null) {
throw new IllegalState("A bitarchive replica does not have the "
+ "channel for communicating with a checksum replica.");
}
return res;
}
/** The 'THE_CR' channel for this replica. This has the value 'null' if
* the replica is not a checksum replica.*/
private final ChannelID THE_CR;
/**
* Returns ALL_BA channels for every known bitarchive.
*
* @return An array of ALL_BA channels - one per bitarchive
*/
public static final ChannelID[] getAllArchives_ALL_BAs() {
return getInstance().ALL_BA_ARRAY;
}
/**
* ALL_BA is the topic on which a Bitarchive client publishes get, correct
* and batch messages to all connected Bitarchive machines. The following is
* the list of ALL_BA for all archives (i.e. archive replicas).
*/
private final ChannelID[] ALL_BA_ARRAY
= new ChannelID[replicas.size()];
public static ChannelID getAllBa() throws IllegalState {
ChannelID res = getInstance().ALL_BA;
if (res == null) {
throw new IllegalState("A checksum replica does not have the "
+ "channels for communicating with a bitarchive replica.");
}
return res;
}
/**
* Implementation notice: This cannot be initialized directly in the field,
* as it uses ALL_BA_ARRAY, which is initialized in the constructor.
*/
private final ChannelID ALL_BA;
/**
* Returns ANY_BA channels for every known bitarchive.
*
* @return An array of ANY_BA channels - one per bitarchive
*/
public static final ChannelID[] getAllArchives_ANY_BAs() {
return getInstance().ANY_BA_ARRAY;
}
/**
* Queue on which upload requests are sent out to bitarchive servers. The
* following is the list of ANY_BA for all archives.
*/
private final ChannelID[] ANY_BA_ARRAY
= new ChannelID[replicas.size()];
public static ChannelID getAnyBa() throws IllegalState {
ChannelID res = getInstance().ANY_BA;
if (res == null) {
throw new IllegalState("A checksum replica does not have the "
+ "channels for communicating with a bitarchive replica.");
}
return res;
}
/**
* Implementation notice: This cannot be initialized directly in the field,
* as it uses ANY_BA_ARRAY, which is initialized in the constructor.
*/
private final ChannelID ANY_BA;
/**
* Returns the queue on which to put errors which are not handled elsewhere.
*
* @return the <code>ChannelID</code> object for this queue.
*/
public static ChannelID getError() {
return getInstance().ERROR;
}
private final ChannelID ERROR = new ChannelID(ERROR_CHANNEL_PREFIX,
ChannelID.COMMON, ChannelID.NO_IP, ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/**
* Given an replica, returns the BAMON queue to which batch jobs
* must be sent in order to run them on that bitarchive.
*
* @param replicaId The id of the replica
* @return the channel
* @throws ArgumentNotValid
* if the replicaId is null, unknown, or empty string
*/
public static ChannelID getBaMonForReplica(String replicaId)
throws ArgumentNotValid {
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "replicaId");
ChannelID[] bamons = getAllArchives_BAMONs();
for (ChannelID bamon : bamons) {
if (bamon != null && bamon.getName().equals(
Settings.get(CommonSettings.ENVIRONMENT_NAME)
+ CHANNEL_PART_SEPARATOR + replicaId
+ CHANNEL_PART_SEPARATOR
+ THEBAMON_CHANNEL_PREFIX)) {
return bamon;
}
}
throw new ArgumentNotValid("Did not find a BAMON queue for '"
+ replicaId + "'");
}
public static ChannelID getTheCrForReplica(String replicaId) {
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId");
ChannelID[] crs = getAllArchives_CRs();
for (ChannelID cr : crs) {
if (cr != null
&& cr.getName().equals(
Settings.get(CommonSettings.ENVIRONMENT_NAME)
+ CHANNEL_PART_SEPARATOR + replicaId
+ CHANNEL_PART_SEPARATOR
+ THECR_CHANNEL_PREFIX)) {
return cr;
}
}
throw new ArgumentNotValid("Did not find a checksum queue for '"
+ replicaId + "'");
}
/**
* Method for extracting the replica from the name of the identifier
* channel.
*
* @param channelName The name of the identification channel for the
* replica.
* @return Replica who the identification channel belong to.
* @throws UnknownID If the replicaId does not point to a know replica.
* @throws ArgumentNotValid If the channelName is either null or empty.
*/
public static Replica retrieveReplicaFromIdentifierChannel(
String channelName) throws UnknownID, ArgumentNotValid {
ArgumentNotValid.checkNotNullOrEmpty(channelName, "String channelName");
if (channelName.contains(THECR_CHANNEL_PREFIX)) {
// environmentName ## replicaId ## THE_CR
String[] parts = channelName.split(CHANNEL_PART_SEPARATOR);
return Replica.getReplicaFromId(parts[1]);
} else if (channelName.contains(THEBAMON_CHANNEL_PREFIX)) {
// environmentName ## replicaId ## THE_BAMON
String[] parts = channelName.split(CHANNEL_PART_SEPARATOR);
return Replica.getReplicaFromId(parts[1]);
}
String errMsg = "The current channel name, '" + channelName
+ "' does not refer to an identification channel";
Log.warn(errMsg);
throw new UnknownID(errMsg);
}
/**
* The method for retrieving the name of the identification channel for
* a replica based on the Id of this replica.
*
* @param replicaId The id for the replica whose identification channel
* name should be retrieved.
* @return The name of the identification channel for the replica.
* @throws UnknownID If no replica with the given replica id is known.
* @throws ArgumentNotValid If the replicaId is null or empty.
*/
public static String retrieveReplicaChannelNameFromReplicaId(
String replicaId) throws UnknownID, ArgumentNotValid {
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId");
return Replica.getReplicaFromId(replicaId)
.getIdentificationChannel().getName();
}
/**
* The method for retrieving the identification channel for a replica
* based on the Id of this replica.
*
* @param replicaId The id for the replica whose identification channel
* name should be retrieved.
* @return The identification channel for the replica.
* @throws UnknownID If no replica with the given replica id is known.
* @throws ArgumentNotValid If the replicaId is null or empty.
*/
public static ChannelID retrieveReplicaChannelFromReplicaId(
String replicaId)
throws UnknownID, ArgumentNotValid {
ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId");
return Replica.getReplicaFromId(replicaId).getIdentificationChannel();
}
/**
* Returns the queue for sending messages to the IndexServer
* application.
*
* @return the <code>ChannelID</code> object for this queue.
*/
public static ChannelID getTheIndexServer() {
return getInstance().THE_INDEX_SERVER;
}
private final ChannelID THE_INDEX_SERVER = new ChannelID(
INDEXSERVER_CHANNEL_PREFIX,
ChannelID.COMMON,
ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/**
* Returns the queue for getting responses from the IndexServer
* application.
*
* @return the <code>ChannelID</code> object for this queue.
*/
public static ChannelID getThisIndexClient() {
return getInstance().THIS_INDEX_CLIENT;
}
//TODO Should we use client channels for all our servers?
private final ChannelID THIS_INDEX_CLIENT = new ChannelID(
THISINDEXCLIENT_CHANNEL_PREFIX,
ChannelID.COMMON,
ChannelID.INCLUDE_IP,
ChannelID.INCLUDE_APPLINST_ID,
ChannelID.QUEUE);
/** Return the queue for the monitor registry.
*
* @return the <code>ChannelID</code> object for the queue.
*/
public static ChannelID getTheMonitorServer() {
return getInstance().THE_MONITOR_SERVER;
}
private final ChannelID THE_MONITOR_SERVER = new ChannelID(
MONITOR_CHANNEL_PREFIX,
ChannelID.COMMON,
ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/** Return the queue for the harvest monitor registry.
*
* @return the <code>ChannelID</code> object for the queue.
*/
public static ChannelID getHarvestMonitorChannel() {
return getInstance().HARVEST_MONITOR;
}
private final ChannelID HARVEST_MONITOR = new ChannelID(
HARVEST_MONITOR_CHANNEL_PREFIX,
ChannelID.COMMON,
ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID,
ChannelID.QUEUE);
/** Return the queue for the harvest dispatcher.
*
* @return the <code>ChannelID</code> object for the queue.
*/
public static ChannelID getHarvestDispatcherChannel() {
return getInstance().HARVEST_DISPATCHER;
}
private final ChannelID HARVEST_DISPATCHER = new ChannelID(
HARVEST_DISPATCHER_CHANNEL_PREFIX,
ChannelID.COMMON,
ChannelID.NO_IP,
ChannelID.NO_APPLINST_ID,
ChannelID.TOPIC);
/**
* Reset the instance to re-read the settings. Only for use in tests.
*/
static void reset() {
instance = null;
}
/**
* Is a given name a ChannelName for a Topic or a Queue.
* @param name a given name
* @return true, if arg name contains the string "_ALL_"
*/
public static boolean isTopic(String name) {
ArgumentNotValid.checkNotNullOrEmpty(name, "String name");
return name.contains("_TOPIC");
}
}
|
package com.blackfriar.controllers;
import com.blackfriar.domain.Beer;
import com.blackfriar.BeerService;
import com.blackfriar.assemblers.BeerResourceAssembler;
import com.blackfriar.exceptions.BeerNotFoundException;
import com.blackfriar.resources.BeerResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.Link;
import org.springframework.hateoas.MediaTypes;
import org.springframework.hateoas.Resources;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import java.util.List;
import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo;
import static org.springframework.hateoas.mvc.ControllerLinkBuilder.methodOn;
@Controller
@RequestMapping(path="/api", produces = MediaTypes.HAL_JSON_VALUE)
public class BeerController {
@Autowired
private BeerService beerService;
@Autowired
private BeerResourceAssembler beerResourceAssembler;
@RequestMapping(value = "beers", produces = MediaTypes.HAL_JSON_VALUE)
@ResponseStatus(value = HttpStatus.OK)
public ResponseEntity<?> getAllBeers() {
List<Beer> allBeers = beerService.getAllBeers();
List<BeerResource> beerResources = beerResourceAssembler.toResources(allBeers);
Link link = linkTo(methodOn(BeerController.class).getAllBeers()).withSelfRel();
Resources<BeerResource> beerResources1 = new Resources<BeerResource>(beerResources, link);
return new ResponseEntity(beerResources1, HttpStatus.OK);
}
@RequestMapping(value = "beers/{id}", produces = MediaTypes.HAL_JSON_VALUE)
public ResponseEntity<BeerResource> findById(@PathVariable Long id) {
Beer beer = beerService.getById(id)
.orElseThrow(() -> new BeerNotFoundException());
BeerResource beerResource = beerResourceAssembler.toResource(beer);
return new ResponseEntity<BeerResource>(beerResource, HttpStatus.OK);
}
}
|
package editor.database.characteristics;
import java.awt.Color;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.List;
import java.util.stream.Collectors;
/**
* This enum represents one of the five colors of Magic: The Gathering.
*
* @author Alec Roelke
*/
public enum ManaType
{
/**
* Colorless mana. While this is not a color, it is a type.
*/
COLORLESS("Colorless", 'C', null),
/**
* White mana.
*/
WHITE("White", 'W', Color.YELLOW.darker()),
/**
* Blue mana.
*/
BLUE("Blue", 'U', Color.BLUE),
/**
* Black mana.
*/
BLACK("Black", 'B', Color.BLACK),
/**
* Red mana.
*/
RED("Red", 'R', Color.RED),
/**
* Green mana.
*/
GREEN("Green", 'G', Color.GREEN.darker());
/**
* Get the ManaTypes that represent colors, which is all of them except
* {@link #COLORLESS}.
*
* @return the colors of Magic, which is the list of ManaTypes
* minus the ManaTypes that do not represent colors.
*/
public static ManaType[] colors()
{
return new ManaType[]{WHITE, BLUE, BLACK, RED, GREEN};
}
public static ManaType parseManaType(char color) throws IllegalArgumentException
{
ManaType type = tryParseManaType(color);
if (type == null)
throw new IllegalArgumentException("Illegal color shorthand '" + color + "'");
return type;
}
public static ManaType parseManaType(String color) throws IllegalArgumentException
{
ManaType type = tryParseManaType(color);
if (type == null)
throw new IllegalArgumentException("Illegal color string \"" + color + "\"");
return type;
}
/**
* Sort a list of ManaTypes in color order. It is recommended to use this rather than
* using Java's built-in sorting functions.
*
* @param colors List of ManaTypes to sort
*/
public static void sort(List<ManaType> colors)
{
var counts = new EnumMap<ManaType, Integer>(ManaType.class);
for (ManaType type : colors)
counts.compute(type, (k, v) -> v == null ? 1 : v + 1);
var unique = Arrays.stream(colors()).filter(counts::containsKey).collect(Collectors.toList());
switch (unique.size())
{
case 2:
if (unique.get(0).colorOrder(unique.get(1)) > 0)
Collections.reverse(unique);
break;
case 3:
while (unique.get(0).distanceFrom(unique.get(1)) != unique.get(1).distanceFrom(unique.get(2)))
Collections.rotate(unique, 1);
break;
case 4:
ManaType missing = Arrays.stream(colors()).filter((m) -> !counts.containsKey(m)).collect(Collectors.toList()).get(0);
while (missing.distanceFrom(unique.get(0)) != 1)
Collections.rotate(unique, 1);
break;
default:
// Don't have to do anything if there are 0, 1, or all 5 colors
break;
}
colors.clear();
if (counts.containsKey(COLORLESS))
colors.addAll(Collections.nCopies(counts.get(COLORLESS), COLORLESS));
for (ManaType type : unique)
colors.addAll(Collections.nCopies(counts.get(type), type));
}
/**
* Get a ManaType from a character. Acceptable characters are 'w,' 'u,' 'b,'
* 'r,' 'g,' or 'c,' case insensitive.
*
* @param color Character to get a color from
* @return ManaType that corresponds to the character, or null if there is none
*/
public static ManaType tryParseManaType(char color)
{
for (ManaType c : ManaType.values())
if (Character.toLowerCase(c.shorthand) == Character.toLowerCase(color))
return c;
return null;
}
/**
* Get a ManaType from a String. Acceptable values are "white," "w," "blue,"
* "u," "black," "b," "red," "r," "green," "g," "colorless," or "c," case
* insensitive.
*
* @param color string to create an ManaType from
* @return the ManaType that corresponds to the String, or null if there is none
*/
public static ManaType tryParseManaType(String color)
{
for (ManaType c : ManaType.values())
if (c.name.equalsIgnoreCase(color) || color.equalsIgnoreCase(String.valueOf(c.shorthand)))
return c;
return null;
}
/**
* Color corresponding to this ManaType (should be null for colorless).
*/
public final Color color;
/**
* String representation of this ManaType.
*/
private final String name;
/**
* Single-character shorthand for this ManaType.
*/
private final char shorthand;
/**
* Create a new ManaType.
*
* @param n String representation of the new ManaType
* @param s single-character shorthand representation of the new ManaType
* @param c color corresponding to this ManaType
*/
ManaType(final String n, final char s, final Color c)
{
name = n;
shorthand = s;
color = c;
}
/**
* Compare this ManaType to another ManaType according to the order they would appear in
* a mana cost. The ordering is determined according to the direction around the color pie
* in which the distance from this ManaType to the other is shortest.
*
* @param other ManaType to compare to
* @return a negative number if this ManaType should come first, 0 if they are the same,
* or a positive number if it should come after.
*/
public int colorOrder(ManaType other)
{
if (this == COLORLESS && other == COLORLESS)
return 0;
else if (this == COLORLESS)
return -1;
else if (other == COLORLESS)
return 1;
else
{
int diff = compareTo(other);
return Math.abs(diff) <= 2 ? diff : -diff;
}
}
/**
* Get the shortest distance around the color pie from this ManaType to the other.
*
* @param other ManaType to compare to
* @return The distance around the color pie from this color to the other ManaType.
*/
public int distanceFrom(ManaType other)
{
if (this == COLORLESS || other == COLORLESS)
throw new IllegalArgumentException("Colorless is not a color");
return (other.ordinal() - ordinal() + colors().length) % colors().length;
}
/**
* Get the one-character shorthand for this ManaType.
*
* @return a one-character shorthand for the name of this ManaType.
*/
public char shorthand()
{
return shorthand;
}
@Override
public String toString()
{
return name;
}
}
|
package edu.mit.streamjit.apps.test;
import com.google.common.collect.ImmutableList;
import edu.mit.streamjit.api.Filter;
import edu.mit.streamjit.api.Identity;
import edu.mit.streamjit.api.Joiner;
import edu.mit.streamjit.api.OneToOneElement;
import edu.mit.streamjit.api.Pipeline;
import edu.mit.streamjit.api.RoundrobinJoiner;
import edu.mit.streamjit.api.RoundrobinSplitter;
import edu.mit.streamjit.api.Splitjoin;
import edu.mit.streamjit.api.Splitter;
import edu.mit.streamjit.api.StreamElement;
import edu.mit.streamjit.impl.common.PrintStreamVisitor;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
/**
* Generates random streams.
*
* TODO: This (and all of test/) really doesn't belong under apps/; we should
* have a separate package for sanity/regression tests (basically anything that
* isn't real-world).
* @author Jeffrey Bosboom <jeffreybosboom@gmail.com>
* @since 7/26/2013
*/
public final class StreamFuzzer {
public interface FuzzElement {
public OneToOneElement<Integer, Integer> instantiate();
public String toJava();
@Override
public boolean equals(Object other);
@Override
public int hashCode();
}
private static final int MAX_DEPTH = 5;
public static FuzzElement generate() {
return makeStream(MAX_DEPTH);
}
private static final Random rng = new Random();
private static final int FILTER_PROB = 50, PIPELINE_PROB = 25, SPLITJOIN_PROB = 25;
private static FuzzElement makeStream(int depthLimit) {
int r = rng.nextInt(FILTER_PROB + PIPELINE_PROB + SPLITJOIN_PROB);
if (depthLimit == 0 || r < FILTER_PROB) {
return makeFilter();
} else if (r < FILTER_PROB + PIPELINE_PROB) {
return makePipeline(depthLimit);
} else if (r < FILTER_PROB + PIPELINE_PROB + SPLITJOIN_PROB) {
return makeSplitjoin(depthLimit);
} else
throw new AssertionError(r);
}
private static FuzzFilter makeFilter() {
return new FuzzFilter(Identity.class, ImmutableList.of());
}
private static final int MAX_PIPELINE_LENGTH = 5;
private static FuzzPipeline makePipeline(int depthLimit) {
int length = rng.nextInt(MAX_PIPELINE_LENGTH) + 1;
ImmutableList.Builder<FuzzElement> elements = ImmutableList.builder();
for (int i = 0; i < length; ++i)
elements.add(makeStream(depthLimit - 1));
return new FuzzPipeline(elements.build());
}
private static final int MAX_SPLITJOIN_BRANCHES = 5;
private static FuzzSplitjoin makeSplitjoin(int depthLimit) {
int numBranches = rng.nextInt(MAX_SPLITJOIN_BRANCHES) + 1;
ImmutableList.Builder<FuzzElement> branches = ImmutableList.builder();
for (int i = 0; i < numBranches; ++i)
branches.add(makeStream(depthLimit - 1));
return new FuzzSplitjoin(makeSplitter(), makeJoiner(), branches.build());
}
private static FuzzSplitter makeSplitter() {
return new FuzzSplitter(RoundrobinSplitter.class, ImmutableList.of());
}
private static FuzzJoiner makeJoiner() {
return new FuzzJoiner(RoundrobinJoiner.class, ImmutableList.of());
}
private static final com.google.common.base.Joiner ARG_JOINER = com.google.common.base.Joiner.on(", ");
private static class FuzzStreamElement<T extends StreamElement<Integer, Integer>> {
private final Class<? extends T> filterClass;
private final ImmutableList<Object> arguments;
private transient Constructor<? extends T> constructor;
protected FuzzStreamElement(Class<? extends T> filterClass, ImmutableList<Object> arguments) {
this.filterClass = filterClass;
this.arguments = arguments;
}
public T instantiate() {
if (constructor == null)
constructor = findConstructor();
try {
return constructor.newInstance(arguments.toArray());
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
throw new AssertionError("Failed to instantiate "+constructor+" with "+arguments, ex);
}
}
private Constructor<? extends T> findConstructor() {
@SuppressWarnings("unchecked")
Constructor<? extends T>[] constructors = (Constructor<T>[])filterClass.getConstructors();
List<Constructor<? extends T>> retvals = new ArrayList<>();
Map<Constructor<? extends T>, Throwable> exceptions = new HashMap<>();
for (Constructor<? extends T> ctor : constructors)
try {
ctor.newInstance(arguments.toArray());
retvals.add(ctor);
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
exceptions.put(ctor, ex);
}
if (retvals.isEmpty())
throw new AssertionError("Couldn't create a "+filterClass+" from "+arguments+": exceptions "+exceptions);
if (retvals.size() > 1)
throw new AssertionError("Creating a "+filterClass+" from "+arguments+" was ambiguous: "+retvals);
return retvals.get(0);
}
public String toJava() {
//This will generate unchecked code if the filter is generic.
return "new " + filterClass.getCanonicalName() + "(" + ARG_JOINER.join(arguments)+")";
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final FuzzStreamElement<T> other = (FuzzStreamElement<T>)obj;
if (!Objects.equals(this.filterClass, other.filterClass))
return false;
if (!Objects.equals(this.arguments, other.arguments))
return false;
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 41 * hash + Objects.hashCode(this.filterClass);
hash = 41 * hash + Objects.hashCode(this.arguments);
return hash;
}
}
private static final class FuzzFilter extends FuzzStreamElement<Filter<Integer, Integer>> implements FuzzElement {
@SuppressWarnings({"unchecked","rawtypes"})
private FuzzFilter(Class<? extends Filter> filterClass, ImmutableList<Object> arguments) {
super((Class<Filter<Integer, Integer>>)filterClass, arguments);
}
@Override
public Filter<Integer, Integer> instantiate() {
return super.instantiate();
}
//use inherited equals()/hashCode()
}
private static final class FuzzPipeline implements FuzzElement {
private final ImmutableList<FuzzElement> elements;
private FuzzPipeline(ImmutableList<FuzzElement> elements) {
this.elements = elements;
}
@Override
public Pipeline<Integer, Integer> instantiate() {
Pipeline<Integer, Integer> pipeline = new Pipeline<>();
for (FuzzElement e : elements)
pipeline.add(e.instantiate());
return pipeline;
}
@Override
public String toJava() {
List<String> args = new ArrayList<>(elements.size());
for (FuzzElement e : elements)
args.add(e.toJava());
return "new Pipeline(" + ARG_JOINER.join(args) + ")";
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final FuzzPipeline other = (FuzzPipeline)obj;
if (!Objects.equals(this.elements, other.elements))
return false;
return true;
}
@Override
public int hashCode() {
int hash = 5;
hash = 59 * hash + Objects.hashCode(this.elements);
return hash;
}
}
/**
* Can't implement FuzzElement because Splitter isn't a OneToOneElement, but
* can still share the instantiation code.
*/
private static final class FuzzSplitter extends FuzzStreamElement<Splitter<Integer, Integer>> {
@SuppressWarnings({"unchecked","rawtypes"})
private FuzzSplitter(Class<? extends Splitter> filterClass, ImmutableList<Object> arguments) {
super((Class<Splitter<Integer, Integer>>)filterClass, arguments);
}
@Override
public Splitter<Integer, Integer> instantiate() {
return super.instantiate();
}
//use inherited equals()/hashCode()
}
/**
* See comments on FuzzSplitter.
*/
private static final class FuzzJoiner extends FuzzStreamElement<Joiner<Integer, Integer>> {
@SuppressWarnings({"unchecked","rawtypes"})
private FuzzJoiner(Class<? extends Joiner> filterClass, ImmutableList<Object> arguments) {
super((Class<Joiner<Integer, Integer>>)filterClass, arguments);
}
@Override
public Joiner<Integer, Integer> instantiate() {
return super.instantiate();
}
//use inherited equals()/hashCode()
}
private static final class FuzzSplitjoin implements FuzzElement {
private final FuzzSplitter splitter;
private final FuzzJoiner joiner;
private final ImmutableList<FuzzElement> branches;
private FuzzSplitjoin(FuzzSplitter splitter, FuzzJoiner joiner, ImmutableList<FuzzElement> branches) {
this.splitter = splitter;
this.joiner = joiner;
this.branches = branches;
}
@Override
public OneToOneElement<Integer, Integer> instantiate() {
Splitjoin<Integer, Integer> splitjoin = new Splitjoin<>(splitter.instantiate(), joiner.instantiate());
for (FuzzElement e : branches)
splitjoin.add(e.instantiate());
return splitjoin;
}
@Override
public String toJava() {
List<String> args = new ArrayList<>(branches.size()+2);
args.add(splitter.toJava());
for (FuzzElement e : branches)
args.add(e.toJava());
args.add(joiner.toJava());
return "new Splitjoin(" + ARG_JOINER.join(args) + ")";
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final FuzzSplitjoin other = (FuzzSplitjoin)obj;
if (!Objects.equals(this.splitter, other.splitter))
return false;
if (!Objects.equals(this.joiner, other.joiner))
return false;
if (!Objects.equals(this.branches, other.branches))
return false;
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 71 * hash + Objects.hashCode(this.splitter);
hash = 71 * hash + Objects.hashCode(this.joiner);
hash = 71 * hash + Objects.hashCode(this.branches);
return hash;
}
}
public static void main(String[] args) {
FuzzElement fuzz = StreamFuzzer.generate();
OneToOneElement<Integer, Integer> stream = fuzz.instantiate();
stream.visit(new PrintStreamVisitor(System.out));
System.out.println(fuzz.toJava());
}
}
|
package edu.wpi.first.wpilibj.templates;
/**
* The RobotMap is a mapping from the ports sensors and actuators are wired into
* to a variable name. This provides flexibility changing wiring, makes checking
* the wiring easier and significantly reduces the number of magic numbers
* floating around.
*/
public class RobotMap {
// For example to map the left and right motors, you could define the
// following variables to use with your drivetrain subsystem.
// public static final int leftMotor = 1;
// public static final int rightMotor = 2;
// If you are using multiple modules, make sure to define both the port
// number and the module. For example you with a rangefinder:
// public static final int rangefinderPort = 1;
// public static final int rangefinderModule = 1;
// driveTrain
public static final int FRONT_LEFT_JAGUAR = 1;
public static final int BACK_LEFT_JAGUAR = 2;
public static final int FRONT_RIGHT_JAGUAR = 9;
public static final int BACK_RIGHT_JAGUAR = 10;
public static final int DRIVE_ENCODER_RIGHT_1 = 11;
public static final int DRIVE_ENCODER_RIGHT_2 = 12;
public static final int DRIVE_ENCODER_LEFT_1 = 13;
public static final int DRIVE_ENCODER_LEFT_2 = 14;
public static final int ARRIVED_LEFT_PORT = 6;
public static final int ARRIVED_RIGHT_PORT = 5;
public static final double WHEEL_DIAMETER = 8 * 2.54;
// elevator
public static final int ELEVATOR_TOP_DI_PORT = 2;
public static final int ELEVATOR_BOTTOM_DI_PORT = 3;
public static final int ELEVATOR_WHEEL_PORT = 3;
public static final double ELEVATOR_UP_SPEED = 0.5;
public static final double ELEVATOR_DOWN_SPEED = -0.4;
public static final int ELEVATOR_UP_TIMEOUT = 5;
public static double ELEVATOR_DOWN_TIMEOUT = 5;
// blocker
public static final int BLOCKER_RELAY_PORT = 2;
public static final int BLOCKER_TOP_DI_PORT = 4;
public static final int BLOCKER_BOTTOM_DI_PORT = 9;
public static final int BLOCKER_TIMEOUT = 2;
// door
public static final int DOOR_RELAY_PORT = 1;
public static final int DOOR_TOP_DI_PORT = 8;
public static final int DOOR_BOTTOM_DI_PORT = 7;
public static final int DOOR_TIMEOUT = 2;
public static final long SLEEP_BETWEEN_DOOR_OPEN_AND_CLOSE = 1300;
// driving PID
public static final double DRIVE_PID_TOLERANCE = 0;
public static final double DRIVER_KP = 0;
public static final double DRIVER_KI = 0;
public static final double DRIVER_KD = 0;
public static final long DRIVER_DT = 0;
public static final double AUTONOMOUS_DRIVE_DESTINATION = 0;
public static final int ENCODER_TICKS_IN_CYCLE = 70;
public static final int PID_TIMEOUT = 3;
public static final long A_LITTLE_BACKWARDS_TIMEOUT = 500;
public static final double A_LITTLE_BACKWARDS_SPEED = 0.2;
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.DriverStationLCD.Line;
//import edu.wpi.first.wpilibj.RobotDrive;
//import edu.wpi.first.wpilibj.SimpleRobot;
//import edu.wpi.first.wpilibj.templates.Shooter;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Team3373 extends SimpleRobot{
/**
* This function is called once each time the robot enters autonomous mode.
*/
int StageOneMotorPWM = 1; //Declares channel of StageOne PWM
int StageTwoMotorPWM = 2; //Declares channel of StageTwo PWM
Servo frontCameraServo = new Servo(6);
Talon StageOneTalon = new Talon(1, 1); //Creates instance of StageOne PWM
Talon StageTwoTalon = new Talon(1, 2); //Creates instance of StageTwo PWM
DriverStationLCD LCD = DriverStationLCD.getInstance();
//SmartDashboard smartDashboard;
Joystick shootStick = new Joystick(2);
Shooter objShooter = new Shooter(this);
//Deadband objDeadband = new Deadband();
Timer robotTimer = new Timer();
boolean shootA;
boolean shootB;
boolean shootX;
boolean shootY;
boolean shootRB;
boolean shootLB;
boolean shootBack;
boolean shootStart;
boolean test;
double shootLX = shootStick.getRawAxis(1);
double shootLY = shootStick.getRawAxis(2);
double shootTriggers = shootStick.getRawAxis(3);
double shootRX = shootStick.getRawAxis(4);
double shootRY = shootStick.getRawAxis(5);
double shootDP = shootStick.getRawAxis(6);
double ShooterSpeedStage2 = 0;//was StageTwoTalon.get()
double percentageScaler = 0.75;
double ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;//was StageOneTalon.get()
double ShooterSpeedMax = 5300.0;
double ShooterSpeedAccel = 250;
double stageOneScaler = .5; //What stage one is multiplied by in order to make it a pecentage of stage 2
double PWMMax = 1; //maximum voltage sent to motor
double MaxScaler = PWMMax/5300;
double ShooterSpeedScale = MaxScaler * ShooterSpeedMax; //Scaler for voltage to RPM. Highly experimental!!
double currentRPMT2 = StageTwoTalon.get()*ShooterSpeedScale;
double currentRPMT1 = currentRPMT2*stageOneScaler;
double target;
double RPMModifier = 250;
double idle = 1 * ShooterSpeedScale;
double off = 0;
double Scaler = 5936;
double change;
double startTime = 9000000;
double backTime = 90000000;
double aTime = 900000000;
double bTime = 900000000;
boolean flagA;
boolean flagB;
boolean flagX;
boolean flagY;
boolean flagStart;
boolean flagBack;
boolean flagBack2;
public Team3373(){
}
public void autonomous() {
for (int i = 0; i < 4; i++) {
}
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
robotTimer.start();
while (isOperatorControl() & isDisabled()){
}
flagA = true;
flagB = true;
flagX = true;
flagY = true;
flagStart = true;
flagBack = true;
flagBack2 = false;
while (isOperatorControl() & isEnabled()){
shootA = shootStick.getRawButton(1);
shootB = shootStick.getRawButton(2);
shootX = shootStick.getRawButton(3);
shootY = shootStick.getRawButton(4);
shootRB = shootStick.getRawButton(5);
shootLB = shootStick.getRawButton(6);
shootBack = shootStick.getRawButton(7);
shootStart = shootStick.getRawButton(8);
shootLX = shootStick.getRawAxis(1);
shootLY = shootStick.getRawAxis(2);
shootTriggers = shootStick.getRawAxis(3);
shootRX = shootStick.getRawAxis(4);
shootRY = shootStick.getRawAxis(5);
shootDP = shootStick.getRawAxis(6);
ShooterSpeedStage1 = ShooterSpeedStage2 * percentageScaler;
StageOneTalon.set(ShooterSpeedStage1);
StageTwoTalon.set(ShooterSpeedStage2);
if (shootStart && flagStart) {
ShooterSpeedStage2 = objShooter.start();
flagStart = false;
} else if (shootA && flagA){//increases stage 2
ShooterSpeedStage2 = objShooter.increaseSpeed(ShooterSpeedStage2);
flagA = false;
} else if (shootB && flagB){//decrease stage 2
ShooterSpeedStage2 = objShooter.decreaseSpeed(ShooterSpeedStage2);
flagB = false;
} else if (shootX && flagX){//increases percentage between Stage1 and Stage2
percentageScaler = objShooter.decreasePercentage(percentageScaler);
flagX = false;
} else if (shootY && flagY){//decreases percentage between Stage1 and Stage2
percentageScaler = objShooter.decreasePercentage(percentageScaler);
flagY = false;
} else if (shootBack && flagBack){//turns off
ShooterSpeedStage2 = objShooter.stop();
percentageScaler = 0.75;
}
//if (shootBack && flagBack){
// flagBack2 = true;
if (!shootA && !flagA) { //toggles
flagA = true;
} else if (!shootB && !flagB){
flagB = true;
}else if (!shootX && !flagX){
flagX = true;
}else if (!shootY && !flagY){
flagY = true;
} else if (!shootStart && !flagStart){
flagStart = true;
}else if (!shootBack && !flagBack){
flagBack = true;
//flagBack2 = false;
}
//try {Thread.sleep(1000);} catch(Exception e){}
//String percentage = Double.toString();
double speedOne = StageOneTalon.get();
String speed1 = Double.toString(speedOne);
double speedTwo = StageTwoTalon.get();
String speed2 = Double.toString(speedTwo);
LCD.println(Line.kUser3, 1, ((StageOneTalon.get()/StageTwoTalon.get()) *100) + " %");
LCD.println(Line.kUser4, 1,"S1:" + speed1);
LCD.println(Line.kUser5, 1,"S2:" + speed2);
LCD.println(Line.kUser1, 1, "RPM1: " + (speedOne * Scaler));
LCD.println(Line.kUser2, 1, "RPM2: " + (speedTwo * Scaler));
LCD.updateLCD();
frontCameraServo.set(0.0);
double servoPosition = frontCameraServo.get();
String tilt = Double.toString(servoPosition);
LCD.println(Line.kUser6, 1, "Servo: " + tilt);
LCD.updateLCD();
try{
Thread.sleep(1000);
}
catch(Exception e){
}
frontCameraServo.set(1.0);
servoPosition = frontCameraServo.get();
tilt = Double.toString(servoPosition);
LCD.println(Line.kUser6, 1, "Servo: " + tilt);
LCD.updateLCD();
/*if (shootA & !flagA) { //increases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing A");
LCD.updateLCD();
flagA = true;
}
if (!shootA & flagA) { //if a is not pressed and it has been pressed set it to false
flagA = false;
}
if (shootB & !flagB) { //decreases speed
objShooter.speedChange();
LCD.println(Line.kUser2, 1, "Pressing B");
LCD.updateLCD();
flagB = true;
}
if (!shootB & flagB) { //if b is not pressed and it has been pressed set it to false
flagB = false;
}
if (shootX & stageOneScaler <= 100 & !flagX){
stageOneScaler += 0.05;
//changes stage1 percentage of stage2 adds 5%
LCD.println(Line.kUser6, 1, "Adding 5% to Stage One Percentile");
LCD.updateLCD();
flagX = true;
}
if (!shootX & flagX) { //if x is not pressed and it has been pressed set it to false
flagX = false;
}
if (shootY & !flagY){
objShooter.percentageSubtract();
LCD.println(Line.kUser2, 1, "Pressing Y");
LCD.updateLCD();
}*/
String currentTime = Double.toString(robotTimer.get());
LCD.println(Line.kUser6, 1, currentTime);
}
}
}
|
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import java.util.ArrayList;
import java.util.List;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.EEFFeatureEditorDialog;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.impl.EsbFactoryImpl;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFPropertyViewUtil;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFRegistryKeyPropertyEditorDialog;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
import org.wso2.developerstudio.esb.form.editors.article.providers.NamedEntityDescriptor;
// End of user code
public class SequencesPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, SequencesPropertiesEditionPart {
protected Text description;
protected Text commentsList;
protected Button editCommentsList;
protected EList commentsListList;
protected Text name;
protected Button recieveSequence;
protected Text associatedProxy;
protected Button editAssociatedProxy;
protected EList associatedProxyList;
protected ReferencesTable templateParameters;
protected List<ViewerFilter> templateParametersBusinessFilters = new ArrayList<ViewerFilter>();
protected List<ViewerFilter> templateParametersFilters = new ArrayList<ViewerFilter>();
protected Button traceEnabled;
protected Button statisticsEnabled;
protected GridData templateParametersData;
// Start of user code for onError widgets declarations
protected RegistryKeyProperty onError;
protected Text onErrorText;
protected Control[] onErrorElements;
protected Control[] nameElements;
protected Control[] traceEnabledElements;
protected Control[] staticsEnabledElements;
protected Control[] createTemplateParameterElements;
protected Composite propertiesGroup;
// End of user code
/**
* For {@link ISection} use only.
*/
public SequencesPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public SequencesPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
Form form = widgetFactory.createForm(parent);
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return form;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence sequencesStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = sequencesStep.addStep(EsbViewsRepository.Sequences.Properties.class);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.description);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.commentsList);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.name);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.recieveSequence);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.associatedProxy);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.templateParameters);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.traceEnabled);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.statisticsEnabled);
propertiesStep.addStep(EsbViewsRepository.Sequences.Properties.onError);
composer = new PartComposer(sequencesStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.Sequences.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.description) {
return createDescriptionText(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.commentsList) {
return createCommentsListMultiValuedEditor(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.name) {
return createNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.recieveSequence) {
return createRecieveSequenceCheckbox(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.associatedProxy) {
return createAssociatedProxyMultiValuedEditor(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.templateParameters) {
return createTemplateParametersTableComposition(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.traceEnabled) {
return createTraceEnabledCheckbox(widgetFactory, parent);
}
if (key == EsbViewsRepository.Sequences.Properties.statisticsEnabled) {
return createStatisticsEnabledCheckbox(widgetFactory, parent);
}
// Start of user code for onError addToPart creation
if (key == EsbViewsRepository.Sequences.Properties.onError) {
return createOnError(widgetFactory, parent);
}
// End of user code
return parent;
}
};
composer.compose(view);
}
/**
* @generated NOT
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.SequencesPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
protected Composite createDescriptionText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.Sequences.Properties.description, EsbMessages.SequencesPropertiesEditionPart_DescriptionLabel);
description = widgetFactory.createText(parent, ""); //$NON-NLS-1$
description.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData descriptionData = new GridData(GridData.FILL_HORIZONTAL);
description.setLayoutData(descriptionData);
description.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.description,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.description,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, description.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
description.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.description, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText()));
}
}
});
EditingUtils.setID(description, EsbViewsRepository.Sequences.Properties.description);
EditingUtils.setEEFtype(description, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.description, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createDescriptionText
// End of user code
return parent;
}
protected Composite createCommentsListMultiValuedEditor(FormToolkit widgetFactory, Composite parent) {
commentsList = widgetFactory.createText(parent, "", SWT.READ_ONLY); //$NON-NLS-1$
GridData commentsListData = new GridData(GridData.FILL_HORIZONTAL);
commentsListData.horizontalSpan = 2;
commentsList.setLayoutData(commentsListData);
EditingUtils.setID(commentsList, EsbViewsRepository.Sequences.Properties.commentsList);
EditingUtils.setEEFtype(commentsList, "eef::MultiValuedEditor::field"); //$NON-NLS-1$
editCommentsList = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Sequences.Properties.commentsList, EsbMessages.SequencesPropertiesEditionPart_CommentsListLabel), SWT.NONE);
GridData editCommentsListData = new GridData();
editCommentsList.setLayoutData(editCommentsListData);
editCommentsList.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
EEFFeatureEditorDialog dialog = new EEFFeatureEditorDialog(
commentsList.getShell(), "Sequences", new AdapterFactoryLabelProvider(adapterFactory), //$NON-NLS-1$
commentsListList, EsbPackage.eINSTANCE.getEsbElement_CommentsList().getEType(), null,
false, true,
null, null);
if (dialog.open() == Window.OK) {
commentsListList = dialog.getResult();
if (commentsListList == null) {
commentsListList = new BasicEList();
}
commentsList.setText(commentsListList.toString());
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.commentsList, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new BasicEList(commentsListList)));
setHasChanged(true);
}
}
});
EditingUtils.setID(editCommentsList, EsbViewsRepository.Sequences.Properties.commentsList);
EditingUtils.setEEFtype(editCommentsList, "eef::MultiValuedEditor::browsebutton"); //$NON-NLS-1$
// Start of user code for createCommentsListMultiValuedEditor
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createNameText(FormToolkit widgetFactory, Composite parent) {
Control nameLabel = createDescription(parent, EsbViewsRepository.Sequences.Properties.name, EsbMessages.SequencesPropertiesEditionPart_NameLabel);
name = widgetFactory.createText(parent, ""); //$NON-NLS-1$
name.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData nameData = new GridData(GridData.FILL_HORIZONTAL);
name.setLayoutData(nameData);
name.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.name,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.name,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, name.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
name.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.name, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText()));
}
}
});
EditingUtils.setID(name, EsbViewsRepository.Sequences.Properties.name);
EditingUtils.setEEFtype(name, "eef::Text"); //$NON-NLS-1$
Control nameHelp = FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.name, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createNameText
nameElements = new Control[] { nameLabel, name, nameHelp };
// End of user code
return parent;
}
protected Composite createRecieveSequenceCheckbox(FormToolkit widgetFactory, Composite parent) {
recieveSequence = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Sequences.Properties.recieveSequence, EsbMessages.SequencesPropertiesEditionPart_RecieveSequenceLabel), SWT.CHECK);
recieveSequence.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.recieveSequence, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new Boolean(recieveSequence.getSelection())));
}
});
GridData recieveSequenceData = new GridData(GridData.FILL_HORIZONTAL);
recieveSequenceData.horizontalSpan = 2;
recieveSequence.setLayoutData(recieveSequenceData);
EditingUtils.setID(recieveSequence, EsbViewsRepository.Sequences.Properties.recieveSequence);
EditingUtils.setEEFtype(recieveSequence, "eef::Checkbox"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.recieveSequence, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createRecieveSequenceCheckbox
// End of user code
return parent;
}
protected Composite createAssociatedProxyMultiValuedEditor(FormToolkit widgetFactory, Composite parent) {
associatedProxy = widgetFactory.createText(parent, "", SWT.READ_ONLY); //$NON-NLS-1$
GridData associatedProxyData = new GridData(GridData.FILL_HORIZONTAL);
associatedProxyData.horizontalSpan = 2;
associatedProxy.setLayoutData(associatedProxyData);
EditingUtils.setID(associatedProxy, EsbViewsRepository.Sequences.Properties.associatedProxy);
EditingUtils.setEEFtype(associatedProxy, "eef::MultiValuedEditor::field"); //$NON-NLS-1$
editAssociatedProxy = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Sequences.Properties.associatedProxy, EsbMessages.SequencesPropertiesEditionPart_AssociatedProxyLabel), SWT.NONE);
GridData editAssociatedProxyData = new GridData();
editAssociatedProxy.setLayoutData(editAssociatedProxyData);
editAssociatedProxy.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
EEFFeatureEditorDialog dialog = new EEFFeatureEditorDialog(
associatedProxy.getShell(), "Sequences", new AdapterFactoryLabelProvider(adapterFactory), //$NON-NLS-1$
associatedProxyList, EsbPackage.eINSTANCE.getSequences_AssociatedProxy().getEType(), null,
false, true,
null, null);
if (dialog.open() == Window.OK) {
associatedProxyList = dialog.getResult();
if (associatedProxyList == null) {
associatedProxyList = new BasicEList();
}
associatedProxy.setText(associatedProxyList.toString());
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.associatedProxy, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new BasicEList(associatedProxyList)));
setHasChanged(true);
}
}
});
EditingUtils.setID(editAssociatedProxy, EsbViewsRepository.Sequences.Properties.associatedProxy);
EditingUtils.setEEFtype(editAssociatedProxy, "eef::MultiValuedEditor::browsebutton"); //$NON-NLS-1$
// Start of user code for createAssociatedProxyMultiValuedEditor
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createTemplateParametersTableComposition(FormToolkit widgetFactory, Composite parent) {
Control[] previousControls = propertiesGroup.getChildren();
this.templateParameters = new ReferencesTable(getDescription(EsbViewsRepository.Sequences.Properties.templateParameters, EsbMessages.SequencesPropertiesEditionPart_TemplateParametersLabel), new ReferencesTableListener() {
public void handleAdd() {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.templateParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null));
templateParameters.refresh();
}
public void handleEdit(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.templateParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element));
templateParameters.refresh();
}
public void handleMove(EObject element, int oldIndex, int newIndex) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.templateParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex));
templateParameters.refresh();
}
public void handleRemove(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.templateParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element));
templateParameters.refresh();
}
public void navigateTo(EObject element) { }
});
for (ViewerFilter filter : this.templateParametersFilters) {
this.templateParameters.addFilter(filter);
}
this.templateParameters.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.templateParameters, EsbViewsRepository.FORM_KIND));
this.templateParameters.createControls(parent, widgetFactory);
this.templateParameters.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
if (e.item != null && e.item.getData() instanceof EObject) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.templateParameters, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData()));
}
}
});
templateParametersData = new GridData(GridData.FILL_HORIZONTAL);
templateParametersData.horizontalSpan = 3;
this.templateParameters.setLayoutData(templateParametersData);
this.templateParameters.setLowerBound(0);
this.templateParameters.setUpperBound(-1);
templateParameters.setID(EsbViewsRepository.Sequences.Properties.templateParameters);
templateParameters.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$
// Start of user code for createTemplateParametersTableComposition
Control[] newControls = propertiesGroup.getChildren();
createTemplateParameterElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createTraceEnabledCheckbox(FormToolkit widgetFactory, Composite parent) {
traceEnabled = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Sequences.Properties.traceEnabled, EsbMessages.SequencesPropertiesEditionPart_TraceEnabledLabel), SWT.CHECK);
traceEnabled.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.traceEnabled, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new Boolean(traceEnabled.getSelection())));
}
});
GridData traceEnabledData = new GridData(GridData.FILL_HORIZONTAL);
traceEnabledData.horizontalSpan = 2;
traceEnabled.setLayoutData(traceEnabledData);
EditingUtils.setID(traceEnabled, EsbViewsRepository.Sequences.Properties.traceEnabled);
EditingUtils.setEEFtype(traceEnabled, "eef::Checkbox"); //$NON-NLS-1$
Control traceEnabledHelp = FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.traceEnabled, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createTraceEnabledCheckbox
traceEnabledElements = new Control[] { traceEnabled, traceEnabledHelp };
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createStatisticsEnabledCheckbox(FormToolkit widgetFactory, Composite parent) {
statisticsEnabled = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Sequences.Properties.statisticsEnabled, EsbMessages.SequencesPropertiesEditionPart_StatisticsEnabledLabel), SWT.CHECK);
statisticsEnabled.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(SequencesPropertiesEditionPartForm.this, EsbViewsRepository.Sequences.Properties.statisticsEnabled, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new Boolean(statisticsEnabled.getSelection())));
}
});
GridData statisticsEnabledData = new GridData(GridData.FILL_HORIZONTAL);
statisticsEnabledData.horizontalSpan = 2;
statisticsEnabled.setLayoutData(statisticsEnabledData);
EditingUtils.setID(statisticsEnabled, EsbViewsRepository.Sequences.Properties.statisticsEnabled);
EditingUtils.setEEFtype(statisticsEnabled, "eef::Checkbox"); //$NON-NLS-1$
Control statEnableHelp = FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.statisticsEnabled, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createStatisticsEnabledCheckbox
staticsEnabledElements = new Control[] { statisticsEnabled, statEnableHelp };
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getDescription()
*
*/
public String getDescription() {
return description.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setDescription(String newValue)
*
*/
public void setDescription(String newValue) {
if (newValue != null) {
description.setText(newValue);
} else {
description.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.description);
if (eefElementEditorReadOnlyState && description.isEnabled()) {
description.setEnabled(false);
description.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !description.isEnabled()) {
description.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getCommentsList()
*
*/
public EList getCommentsList() {
return commentsListList;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setCommentsList(EList newValue)
*
*/
public void setCommentsList(EList newValue) {
commentsListList = newValue;
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.commentsList);
if (eefElementEditorReadOnlyState && commentsList.isEnabled()) {
commentsList.setEnabled(false);
commentsList.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !commentsList.isEnabled()) {
commentsList.setEnabled(true);
}
}
public void addToCommentsList(Object newValue) {
commentsListList.add(newValue);
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
}
public void removeToCommentsList(Object newValue) {
commentsListList.remove(newValue);
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getName()
*
*/
public String getName() {
return name.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setName(String newValue)
*
*/
public void setName(String newValue) {
if (newValue != null) {
name.setText(newValue);
} else {
name.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.name);
if (eefElementEditorReadOnlyState && name.isEnabled()) {
name.setEnabled(false);
name.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !name.isEnabled()) {
name.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getRecieveSequence()
*
*/
public Boolean getRecieveSequence() {
return Boolean.valueOf(recieveSequence.getSelection());
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setRecieveSequence(Boolean newValue)
*
*/
public void setRecieveSequence(Boolean newValue) {
if (newValue != null) {
recieveSequence.setSelection(newValue.booleanValue());
} else {
recieveSequence.setSelection(false);
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.recieveSequence);
if (eefElementEditorReadOnlyState && recieveSequence.isEnabled()) {
recieveSequence.setEnabled(false);
recieveSequence.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !recieveSequence.isEnabled()) {
recieveSequence.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getAssociatedProxy()
*
*/
public EList getAssociatedProxy() {
return associatedProxyList;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setAssociatedProxy(EList newValue)
*
*/
public void setAssociatedProxy(EList newValue) {
associatedProxyList = newValue;
if (newValue != null) {
associatedProxy.setText(associatedProxyList.toString());
} else {
associatedProxy.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.associatedProxy);
if (eefElementEditorReadOnlyState && associatedProxy.isEnabled()) {
associatedProxy.setEnabled(false);
associatedProxy.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !associatedProxy.isEnabled()) {
associatedProxy.setEnabled(true);
}
}
public void addToAssociatedProxy(Object newValue) {
associatedProxyList.add(newValue);
if (newValue != null) {
associatedProxy.setText(associatedProxyList.toString());
} else {
associatedProxy.setText(""); //$NON-NLS-1$
}
}
public void removeToAssociatedProxy(Object newValue) {
associatedProxyList.remove(newValue);
if (newValue != null) {
associatedProxy.setText(associatedProxyList.toString());
} else {
associatedProxy.setText(""); //$NON-NLS-1$
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#initTemplateParameters(EObject current, EReference containingFeature, EReference feature)
*/
public void initTemplateParameters(ReferencesTableSettings settings) {
if (current.eResource() != null && current.eResource().getResourceSet() != null)
this.resourceSet = current.eResource().getResourceSet();
ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider();
templateParameters.setContentProvider(contentProvider);
templateParameters.setInput(settings);
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.templateParameters);
if (eefElementEditorReadOnlyState && templateParameters.isEnabled()) {
templateParameters.setEnabled(false);
templateParameters.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !templateParameters.isEnabled()) {
templateParameters.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#updateTemplateParameters()
*
*/
public void updateTemplateParameters() {
templateParameters.refresh();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#addFilterTemplateParameters(ViewerFilter filter)
*
*/
public void addFilterToTemplateParameters(ViewerFilter filter) {
templateParametersFilters.add(filter);
if (this.templateParameters != null) {
this.templateParameters.addFilter(filter);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#addBusinessFilterTemplateParameters(ViewerFilter filter)
*
*/
public void addBusinessFilterToTemplateParameters(ViewerFilter filter) {
templateParametersBusinessFilters.add(filter);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#isContainedInTemplateParametersTable(EObject element)
*
*/
public boolean isContainedInTemplateParametersTable(EObject element) {
return ((ReferencesTableSettings)templateParameters.getInput()).contains(element);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getTraceEnabled()
*
*/
public Boolean getTraceEnabled() {
return Boolean.valueOf(traceEnabled.getSelection());
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setTraceEnabled(Boolean newValue)
*
*/
public void setTraceEnabled(Boolean newValue) {
if (newValue != null) {
traceEnabled.setSelection(newValue.booleanValue());
} else {
traceEnabled.setSelection(false);
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.traceEnabled);
if (eefElementEditorReadOnlyState && traceEnabled.isEnabled()) {
traceEnabled.setEnabled(false);
traceEnabled.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !traceEnabled.isEnabled()) {
traceEnabled.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#getStatisticsEnabled()
*
*/
public Boolean getStatisticsEnabled() {
return Boolean.valueOf(statisticsEnabled.getSelection());
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.SequencesPropertiesEditionPart#setStatisticsEnabled(Boolean newValue)
*
*/
public void setStatisticsEnabled(Boolean newValue) {
if (newValue != null) {
statisticsEnabled.setSelection(newValue.booleanValue());
} else {
statisticsEnabled.setSelection(false);
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Sequences.Properties.statisticsEnabled);
if (eefElementEditorReadOnlyState && statisticsEnabled.isEnabled()) {
statisticsEnabled.setEnabled(false);
statisticsEnabled.setToolTipText(EsbMessages.Sequences_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !statisticsEnabled.isEnabled()) {
statisticsEnabled.setEnabled(true);
}
}
// Start of user code for onError specific getters and setters implementation
@Override
public RegistryKeyProperty getOnError() {
return onError;
}
@Override
public void setOnError(RegistryKeyProperty registryKeyProperty) {
if(registryKeyProperty != null) {
onErrorText.setText(registryKeyProperty.getKeyValue());
onError = registryKeyProperty;
}
}
// End of user code
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.Sequences_Part_Title;
}
// Start of user code additional methods
/**
* @generated NOT
*/
protected Composite createOnError(FormToolkit widgetFactory, Composite parent) {
Control onErrorLabel = createDescription(parent,
EsbViewsRepository.Sequences.Properties.onError,
EsbMessages.SequencesPropertiesEditionPart_OnErrorLabel);
widgetFactory.paintBordersFor(parent);
if (onError == null) {
onError = EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty();
}
String initValueExpression = onError.getKeyValue().isEmpty() ? "" : onError.getKeyValue();
onErrorText = widgetFactory.createText(parent, initValueExpression, SWT.READ_ONLY);
onErrorText.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData valueData = new GridData(GridData.FILL_HORIZONTAL);
onErrorText.setLayoutData(valueData);
onErrorText.addMouseListener(new MouseAdapter() {
@Override
public void mouseDown( MouseEvent event ) {
EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(),
SWT.NULL, onError, new ArrayList<NamedEntityDescriptor>());
dialog.open();
onErrorText.setText(onError.getKeyValue());
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.onError, PropertiesEditionEvent.COMMIT,
PropertiesEditionEvent.SET, null, getOnError()));
}
});
onErrorText.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
if (!EEFPropertyViewUtil.isReservedKeyCombination(e)) {
EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(),
SWT.NULL, onError, new ArrayList<NamedEntityDescriptor>());
dialog.open();
onErrorText.setText(onError.getKeyValue());
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
SequencesPropertiesEditionPartForm.this,
EsbViewsRepository.Sequences.Properties.onError, PropertiesEditionEvent.COMMIT,
PropertiesEditionEvent.SET, null, getOnError()));
}
}
});
EditingUtils.setID(onErrorText, EsbViewsRepository.Sequences.Properties.onError);
EditingUtils.setEEFtype(onErrorText, "eef::Text");
Control onErrorHelp = FormUtils.createHelpButton(widgetFactory, parent,
propertiesEditionComponent.getHelpContent(EsbViewsRepository.Sequences.Properties.onError,
EsbViewsRepository.FORM_KIND),
null); // $NON-NLS-1$
onErrorElements = new Control[] { onErrorLabel, onErrorText, onErrorHelp };
return parent;
}
@Override
public void refresh() {
super.refresh();
validate();
}
EEFPropertyViewUtil viewUtil = new EEFPropertyViewUtil(view);
public void validate() {
viewUtil.clearElements(new Composite[] { propertiesGroup });
/*for (Control control : propertiesGroup.getChildren()) {
if (!(control instanceof Composite)) {
viewUtil.clearElement(control);
}
}*/
viewUtil.showEntry(nameElements, false);
viewUtil.showEntry(onErrorElements, false);
viewUtil.showEntry(traceEnabledElements, false);
viewUtil.showEntry(staticsEnabledElements, false);
view.layout(true, true);
}
// End of user code
}
|
package gov.nih.nci.calab.dto.common;
import gov.nih.nci.calab.domain.AssociatedFile;
import gov.nih.nci.calab.domain.DerivedDataFile;
import gov.nih.nci.calab.domain.Keyword;
import gov.nih.nci.calab.domain.LabFile;
import gov.nih.nci.calab.domain.OutputFile;
import gov.nih.nci.calab.domain.Report;
import gov.nih.nci.calab.service.util.CalabConstants;
import gov.nih.nci.calab.service.util.CananoConstants;
import gov.nih.nci.calab.service.util.StringUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* This class represents attributes of a lab file to be viewed in a view page.
*
* @author pansu
*
*/
public class LabFileBean {
private String title;
private String description;
private String comments;
private String[] keywords = new String[0];
private String[] visibilityGroups;
private Date createdDate;
private String createdBy;
private String id;
private String path;
private String name;
private String type;
private String keywordsStr;
private String visibilityStr;
/*
* name to be displayed as a part of the drop-down list
*/
private String displayName;
public LabFileBean() {
super();
// TODO Auto-generated constructor stub
}
public LabFileBean(LabFile charFile, String fileType) {
this.id = charFile.getId().toString();
this.name = charFile.getFilename();
this.path = charFile.getPath();
this.title = charFile.getTitle();
this.description = charFile.getDescription();
this.createdBy = charFile.getCreatedBy();
this.createdDate = charFile.getCreatedDate();
if (charFile instanceof DerivedDataFile) {
List<String> allkeywords = new ArrayList<String>();
for (Keyword keyword : ((DerivedDataFile) charFile)
.getKeywordCollection()) {
allkeywords.add(keyword.getName());
}
allkeywords.toArray(this.keywords);
}
this.type = fileType;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public Date getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String[] getKeywords() {
return keywords;
}
public void setKeywords(String[] keywords) {
this.keywords = keywords;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String[] getVisibilityGroups() {
return visibilityGroups;
}
public void setVisibilityGroups(String[] visibilityGroups) {
this.visibilityGroups = visibilityGroups;
}
public DerivedDataFile getDomainObject() {
DerivedDataFile labfile = new DerivedDataFile();
if (id != null && id.length() > 0) {
labfile.setId(new Long(id));
}
labfile.setCreatedBy(createdBy);
labfile.setCreatedDate(createdDate);
labfile.setDescription(description);
labfile.setFilename(name);
labfile.setPath(path);
labfile.setTitle(title);
return labfile;
}
public String getDisplayName() {
displayName = path.replaceAll("/decompressedFiles", "");
return displayName;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getKeywordsStr() {
keywordsStr = StringUtils.join(keywords, "<br>");
return keywordsStr;
}
public String getVisibilityStr() {
visibilityStr = StringUtils.join(visibilityGroups, "<br>");
return visibilityStr;
}
}
|
package org.jaxen.expr;
import org.jaxen.JaxenException;
import org.jaxen.saxpath.Operator;
/**
* An abstract factory used to create individual path component objects.
*
*/
public interface XPathFactory
{
/**
* Create a new <code>XPathExpr</code> from an <code>Expr</code>.
*
* @param rootExpr the expression wrapped by the resuolting XPathExpr
* @return an XPathExpr wrapping the root expression
* @throws JaxenException
*/
XPathExpr createXPath( Expr rootExpr ) throws JaxenException;
/**
* Create a new path expression.
*
* @param filterExpr the filter expression that starts the path expression
* @param locationPath the location path that follows the filter expression
* @return a path expression formed by concatenating the two arguments
* @throws JaxenException
*/
PathExpr createPathExpr( FilterExpr filterExpr,
LocationPath locationPath ) throws JaxenException;
/**
* Create a new empty relative location path.
*
* @return an empty relative location path
* @throws JaxenException
*/
LocationPath createRelativeLocationPath() throws JaxenException;
/**
* Create a new empty absolute location path.
*
* @return an empty absolute location path
* @throws JaxenException
*/
LocationPath createAbsoluteLocationPath() throws JaxenException;
/**
* Returns a new XPath Or expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @return <code><i>lhs</i> or <i>rhs</i></code>
* @throws JaxenException
*/
BinaryExpr createOrExpr( Expr lhs,
Expr rhs ) throws JaxenException;
/**
* Returns a new XPath And expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @return <code><i>lhs</i> and <i>rhs</i></code>
* @throws JaxenException
*/
BinaryExpr createAndExpr( Expr lhs,
Expr rhs ) throws JaxenException;
/**
* Returns a new XPath equality expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @param equalityOperator <code>Operator.EQUALS</code> or <code>Operator.NOT_EQUALS</code>
* @return <code><i>lhs</i> = <i>rhs</i></code> or <code><i>lhs</i> != <i>rhs</i></code>
* @throws JaxenException if the third argument is not
* <code>Operator.EQUALS</code> or <code>Operator.NOT_EQUALS</code>
*/
BinaryExpr createEqualityExpr( Expr lhs,
Expr rhs,
int equalityOperator ) throws JaxenException;
/**
* Returns a new XPath relational expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @param relationalOperator <code>Operator.LESS_THAN</code>, <code>Operator.GREATER_THAN</code>,
* <code>Operator.LESS_THAN_EQUALS</code>, or <code>Operator.GREATER_THAN_EQUALS</code>
* @return <code><i>lhs</i> <i>relationalOperator</i> <i>rhs</i></code> or <code><i>lhs</i> != <i>rhs</i></code>
* @throws JaxenException if the third argument is not a relational operator constant
*/
BinaryExpr createRelationalExpr( Expr lhs,
Expr rhs,
int relationalOperator ) throws JaxenException;
/**
* Returns a new XPath additive expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @param additiveOperator <code>Operator.ADD</code> or <code>Operator.SUBTRACT</code>
* @return <code><i>lhs</i> + <i>rhs</i></code> or <code><i>lhs</i> - <i>rhs</i></code>
* @throws JaxenException if the third argument is not
* <code>Operator.ADD</code> or <code>Operator.SUBTRACT</code>
*/
BinaryExpr createAdditiveExpr( Expr lhs,
Expr rhs,
int additiveOperator ) throws JaxenException;
/**
* Returns a new XPath multiplicative expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @param multiplicativeOperator <code>Operator.MULTIPLY</code>,
* <code>Operator.DIV</code>, or <code>Operator.MOD</code>
* @return <code><i>lhs</i> * <i>rhs</i></code>, <code><i>lhs</i> div <i>rhs</i></code>,
* or <code><i>lhs</i> mod <i>rhs</i></code>
* @throws JaxenException if the third argument is not a multiplicative operator constant
*/
BinaryExpr createMultiplicativeExpr( Expr lhs,
Expr rhs,
int multiplicativeOperator ) throws JaxenException;
/**
* Returns a new XPath unary expression.
*
* @param expr the expression to be negated
* @param unaryOperator <code>Operator.NEGATIVE</code>
* @return <code>- <i>expr</i></code> or <code><i>expr</i></code>
* @throws JaxenException
*/
Expr createUnaryExpr( Expr expr,
int unaryOperator ) throws JaxenException;
/**
* Returns a new XPath union expression.
*
* @param lhs the left hand side of the expression
* @param rhs the right hand side of the expression
* @return <code><i>lhs</i> | <i>rhs</i></code></code>
* @throws JaxenException
*/
UnionExpr createUnionExpr( Expr lhs,
Expr rhs ) throws JaxenException;
/**
* Returns a new XPath filter expression.
*
* @param expr the basic expression to which the predicate will be added
* @return the expression with an empty predicate set
* @throws JaxenException
*/
FilterExpr createFilterExpr( Expr expr ) throws JaxenException;
/**
* Create a new function call expression.
*
* @param prefix the namespace prefix of the function
* @param functionName the local name of the function
* @return a function with an empty argument list
* @throws JaxenException
*/
FunctionCallExpr createFunctionCallExpr( String prefix,
String functionName ) throws JaxenException;
/**
* Create a number expression.
*
* @param number the value
* @return a number expression wrapping that value
* @throws JaxenException
*/
NumberExpr createNumberExpr( int number ) throws JaxenException;
/**
* Create a number expression.
*
* @param number the value
* @return a number expression wrapping that value
* @throws JaxenException
*/
NumberExpr createNumberExpr( double number ) throws JaxenException;
/**
* Create a string literal expression.
*
* @param number the value
* @return a literal expression wrapping that value
* @throws JaxenException
*/
LiteralExpr createLiteralExpr( String literal ) throws JaxenException;
/**
* Create a new variable reference expression.
*
* @param prefix the namespace prefix of the variable
* @param functionName the local name of the variable
* @return a variable expression
* @throws JaxenException
*/
VariableReferenceExpr createVariableReferenceExpr( String prefix,
String variableName ) throws JaxenException;
/**
* Create a step with a named node-test.
*
* @param axis the axis to create the name-test on
* @param prefix the namespace prefix for the test
* @param localName the local name for the test
* @return a name step
* @throws JaxenException if <code>axis</code> is not one of the axis constants????
*/
Step createNameStep( int axis,
String prefix,
String localName ) throws JaxenException;
/**
* Create a step with a node() node-test.
*
* @param axis the axis to create the node-test on
* @return an all node step
* @throws JaxenException if <code>axis</code> is not one of the axis constants????
*/
Step createAllNodeStep( int axis ) throws JaxenException;
/**
* Create a step with a <code>comment()</code> node-test.
*
* @param axis the axis to create the <code>comment()</code> node-test on
* @return a comment node step
* @throws JaxenException if <code>axis</code> is not one of the axis constants????
*/
Step createCommentNodeStep( int axis ) throws JaxenException;
/**
* Create a step with a <code>text()</code> node-test.
*
* @param axis the axis to create the <code>text()</code> node-test on
* @return a text node step
* @throws JaxenException if <code>axis</code> is not one of the axis constants????
*/
Step createTextNodeStep( int axis ) throws JaxenException;
/**
* Create a step with a <code>processing-instruction()</code> node-test.
*
* @param axis the axis to create the <code>processing-instruction()</code> node-test on
* @param name the target to match, may be empty
* @return a processing instruction node step
* @throws JaxenException if <code>axis</code> is not one of the axis constants????
*/
Step createProcessingInstructionNodeStep( int axis,
String name ) throws JaxenException;
/**
* Create from the supplied expression.
*
* @param the expression to evaluate in the predicate
* @return a predicate
* @throws JaxenException
*/
Predicate createPredicate( Expr predicateExpr ) throws JaxenException;
/**
* Create an empty predicate set.
*
* @return an empty predicate set
* @throws JaxenException
*/
PredicateSet createPredicateSet() throws JaxenException;
}
|
package gov.nih.nci.calab.dto.workflow;
import java.util.List;
public class AssayBean {
private String assayId;
private String assayName;
private String assayType;
private String assayStr;
private List runBeans;
public AssayBean() {
super();
}
public AssayBean(String assayId, String assayName, String assayType) {
super();
// TODO Auto-generated constructor stub
this.assayId = assayId;
this.assayName = assayName;
this.assayType = assayType;
}
public String getAssayId() {
return assayId;
}
public void setAssayId(String assayId) {
this.assayId = assayId;
}
public String getAssayName() {
return assayName;
}
public void setAssayName(String assayName) {
this.assayName = assayName;
}
public String getAssayType() {
return assayType;
}
public void setAssayType(String assayType) {
this.assayType = assayType;
}
public String getAssayStr() {
return this.assayType + " : " + this.assayName;
}
// public void setAssayStr(String assayStr) {
// this.assayStr = assayStr;
public List getRunBeans() {
return runBeans;
}
public void setRunBeans(List runBeans) {
this.runBeans = runBeans;
}
}
|
package ceylon.language;
public final class String extends Object
{
public final java.lang.String value;
private String(java.lang.String s) {
value = s;
}
public java.lang.String toJavaString() {
return value;
}
public static ceylon.language.String instance(java.lang.String s) {
return new ceylon.language.String(s);
}
}
|
package org.wso2.carbon.gateway.httploadbalancer.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.gateway.core.config.Parameter;
import org.wso2.carbon.gateway.core.config.ParameterHolder;
import org.wso2.carbon.gateway.core.config.dsl.external.WUMLConfigurationBuilder;
import org.wso2.carbon.gateway.core.outbound.OutboundEndpoint;
import org.wso2.carbon.gateway.httploadbalancer.constants.LoadBalancerConstants;
import org.wso2.carbon.gateway.httploadbalancer.context.LoadBalancerConfigContext;
import org.wso2.carbon.gateway.httploadbalancer.mediator.LoadBalancerMediatorBuilder;
import org.wso2.carbon.gateway.httploadbalancer.outbound.LBOutboundEndpoint;
import org.wso2.carbon.gateway.httploadbalancer.utils.CommonUtil;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* A Class responsible for loading LB config from WUMLBaseListenerImpl.java to LoadBalancerConfigContext.
* <p>
* All validations and conversions are done here.
*/
public class LoadBalancerConfigHolder {
private static final Logger log = LoggerFactory.getLogger(LoadBalancerConfigHolder.class);
private ParameterHolder loadbalancerConfigs;
private WUMLConfigurationBuilder.IntegrationFlow integrationFlow;
private final LoadBalancerConfigContext context;
/**
* Default Constructor.
*/
public LoadBalancerConfigHolder() {
this.loadbalancerConfigs = new ParameterHolder();
this.context = new LoadBalancerConfigContext();
}
public ParameterHolder getLoadbalancerConfigs() {
return loadbalancerConfigs;
}
public void setLoadbalancerConfigs(ParameterHolder loadbalancerConfigs) {
this.loadbalancerConfigs = loadbalancerConfigs;
}
public WUMLConfigurationBuilder.IntegrationFlow getIntegrationFlow() {
return integrationFlow;
}
public void setIntegrationFlow(WUMLConfigurationBuilder.IntegrationFlow integrationFlow) {
this.integrationFlow = integrationFlow;
}
/**
* @param param Parameter to be added to Config,
*/
public void addToConfig(Parameter param) {
loadbalancerConfigs.addParameter(param);
//Parameter addedParam = this.getFromConfig(param.getName());
//log.info(addedParam.getName() + " : " + addedParam.getValue());
}
/**
* @param paramName parameterName to be removed from config.
*/
public void removeFromConfig(String paramName) {
loadbalancerConfigs.removeParameter(paramName);
}
/**
* @return returns all configs.
*/
public ParameterHolder getAllConfigs() {
return loadbalancerConfigs;
}
/**
* @param paramName parameterName
* @return Parameter object corresponding to that name.
*/
public Parameter getFromConfig(String paramName) {
return loadbalancerConfigs.getParameter(paramName);
}
/**
* @param integrationFlow integrationFlow object.
* <p>
* It performs validation and also initializes LoadBalancerConfigHolder.
*/
public void configureLoadBalancerMediator(WUMLConfigurationBuilder.IntegrationFlow integrationFlow) {
this.integrationFlow = integrationFlow;
Set<Map.Entry<String, OutboundEndpoint>> entrySet = integrationFlow.
getGWConfigHolder().getOutboundEndpoints().entrySet();
/**
* Since all OutboundEndpoint Objects MUST be accessed via LBOutboundEndpoint, we are doing this.
* Here we are creating LBOutboundEndpoint Map similar to OutboundEndpoint Map.
* See LBOutboundEndpoint class to understand it better.
*/
Map<String, LBOutboundEndpoint> lbOutboundEndpointMap = new ConcurrentHashMap<>();
for (Map.Entry entry : entrySet) {
lbOutboundEndpointMap.put(entry.getKey().toString(),
new LBOutboundEndpoint((OutboundEndpoint) entry.getValue()));
}
context.setLbOutboundEndpoints(lbOutboundEndpointMap);
validateConfig();
LoadBalancerMediatorBuilder.configure(this.integrationFlow.getGWConfigHolder(), context);
}
/**
* @param timeOut
* @return boolean
* <p>
* This method is used to check whether scheduled is within limit or not.
*/
private boolean isWithInLimit(int timeOut) {
if (timeOut <= LoadBalancerConstants.MAX_TIMEOUT_VAL) {
return true;
} else {
return false;
}
}
/**
* @param endpoints <p>
* Populates cookie handling maps.
*/
private void populateCookieMaps(Map<String, OutboundEndpoint> endpoints) {
//Initializing cookie maps.
context.initCookieMaps();
int index = 1;
Set<Map.Entry<String, OutboundEndpoint>> entrySet = endpoints.entrySet();
for (Map.Entry entry : entrySet) {
context.addToCookieToOutboundEPKeyMap(
LoadBalancerConstants.COOKIE_PREFIX + String.valueOf(index),
entry.getKey().toString());
context.addToOutboundEPTOCookieMap(
CommonUtil.getHostAndPort(((OutboundEndpoint) entry.getValue()).getUri()),
LoadBalancerConstants.COOKIE_PREFIX + String.valueOf(index));
index++;
}
}
/**Algorithm related validations.*/
private void validateAlgorithm() {
if (this.getFromConfig(LoadBalancerConstants.ALGORITHM_NAME).getValue().
equals(LoadBalancerConstants.ROUND_ROBIN)) {
context.setAlgorithm(this.getFromConfig(LoadBalancerConstants.ALGORITHM_NAME).getValue());
log.info("Algorithm : " + context.getAlgorithm());
} else if (this.getFromConfig(LoadBalancerConstants.ALGORITHM_NAME).getValue().
equals(LoadBalancerConstants.STRICT_IP_HASHING)) {
context.setAlgorithm(this.getFromConfig(LoadBalancerConstants.ALGORITHM_NAME).getValue());
log.info("Algorithm : " + context.getAlgorithm());
} else {
log.error("Currently this algorithm type is not supported...");
}
}
/**Session persistence related validations.*/
private void validatePersistence() {
String persistenceType = this.getFromConfig(LoadBalancerConstants.PERSISTENCE_TYPE).getValue();
if (persistenceType.equals(LoadBalancerConstants.NO_PERSISTENCE)) {
context.setPersistence(persistenceType);
log.info("Persistence : " + context.getPersistence());
} else if (persistenceType.equals(LoadBalancerConstants.APPLICATION_COOKIE)) {
context.setPersistence(persistenceType);
log.info("Persistence : " + context.getPersistence());
populateCookieMaps(integrationFlow.getGWConfigHolder().getOutboundEndpoints());
} else if (persistenceType.equals(LoadBalancerConstants.LB_COOKIE)) {
//TODO: Populate cookie map.
context.setPersistence(persistenceType);
log.info("Persistence : " + context.getPersistence());
/** TODO: Discuss this.
if (loadbalancerConfigs.getParameter(LoadBalancerConstants.PERSISTENCE_SESSION_TIME_OUT) != null) {
String sessionTimeout = this.getFromConfig
(LoadBalancerConstants.PERSISTENCE_SESSION_TIME_OUT).getValue();
int sessTimeout = CommonUtil.getTimeInMilliSeconds(sessionTimeout);
if (isWithInLimit(sessTimeout)) {
context.setSessionPersistenceTimeout(sessTimeout);
log.info("Persistence Timeout : " + context.getSessionPersistenceTimeout());
} else {
//TODO: Is this okay..?
context.setSessionPersistenceTimeout(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("Value greater than Max limit. Loading default value...Persistence Timeout : " +
context.getSessionPersistenceTimeout());
}
} else {
log.info("For LB_COOKIE session cookie time out has to be specified...");
//TODO: Is this okay..?
context.setSessionPersistenceTimeout(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("For LB_COOKIE session cookie time out has to be specified.. Loading default value..." +
"Persistence Timeout : " + context.getSessionPersistenceTimeout());
}**/
populateCookieMaps(integrationFlow.getGWConfigHolder().getOutboundEndpoints());
} else if (persistenceType.equals(LoadBalancerConstants.CLIENT_IP_HASHING)) {
context.setPersistence(persistenceType);
log.info("Persistence : " + context.getPersistence());
}
}
/**SSL related validations.**/
private void validateSSL() {
if (this.getFromConfig(LoadBalancerConstants.SSL_TYPE).getValue().
equals(LoadBalancerConstants.NO_SSL)) {
context.setSslType(this.getFromConfig(LoadBalancerConstants.SSL_TYPE).getValue());
log.info("SSL Support : " + context.getSslType());
} else {
log.info("Currently this type of SSL is not supported..");
}
}
/**HealthCheck related validations.*/
private void validateHealthCheck() {
/**
*For PASSIVE_HEALTH_CHECK.
*/
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue().
equals(LoadBalancerConstants.PASSIVE_HEALTH_CHECK)) {
context.setHealthCheck(this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue());
log.info("HEALTH CHECK TYPE : " + context.getHealthCheck());
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_REQUEST_TIMEOUT) != null) {
String hcReqTimeOut = this.getFromConfig
(LoadBalancerConstants.HEALTH_CHECK_REQUEST_TIMEOUT).getValue();
int timeout = CommonUtil.getTimeInMilliSeconds(hcReqTimeOut);
if (isWithInLimit(timeout)) {
context.setReqTimeout(timeout);
log.info("Request TIME_OUT : " + context.getReqTimeout());
} else {
//TODO: Is this okay..?
context.setReqTimeout(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("Exceeded TIMEOUT LIMIT. Loading DEFAULT value for " +
"Request TIME_OUT : " + context.getReqTimeout());
}
} else {
//TODO: Is this okay..?
context.setReqTimeout(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("LB_REQUEST_TIMEOUT NOT SPECIFIED. Loading DEFAULT value for " +
"Request TIME_OUT : " + context.getReqTimeout());
}
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_UNHEALTHY_RETRIES) != null) {
String hcUHRetries = this.getFromConfig
(LoadBalancerConstants.HEALTH_CHECK_UNHEALTHY_RETRIES).getValue();
int uhRetries = CommonUtil.getRetriesCount(hcUHRetries);
context.setUnHealthyRetries(uhRetries);
log.info(LoadBalancerConstants.HEALTH_CHECK_UNHEALTHY_RETRIES + " : " + context.getUnHealthyRetries());
} else {
//TODO: Is this okay..?
context.setUnHealthyRetries(LoadBalancerConstants.DEFAULT_RETRIES);
log.error("UNHEALTHY_RETRIES_VALUE NOT SPECIFIED.. Loading default value." +
LoadBalancerConstants.HEALTH_CHECK_UNHEALTHY_RETRIES + " : " +
context.getUnHealthyRetries());
}
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_RETRIES) != null) {
String hcHRetries = this.getFromConfig
(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_RETRIES).getValue();
int hRetries = CommonUtil.getRetriesCount(hcHRetries);
context.setHealthyRetries(hRetries);
log.info(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_RETRIES + " : " + context.getHealthyRetries());
} else {
//TODO: Is this okay..?
context.setHealthyRetries(LoadBalancerConstants.DEFAULT_RETRIES);
log.error("HEALTHY_RETRIES_VALUE NOT SPECIFIED.. Loading default value." +
LoadBalancerConstants.HEALTH_CHECK_HEALTHY_RETRIES + " : " + context.getHealthyRetries());
}
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_CHECK_INTERVAL) != null) {
String hcHCInterval = this.getFromConfig
(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_CHECK_INTERVAL).getValue();
int interval = CommonUtil.getTimeInMilliSeconds(hcHCInterval);
if (isWithInLimit(interval)) {
context.setHealthycheckInterval(interval);
log.info(LoadBalancerConstants.HEALTH_CHECK_HEALTHY_CHECK_INTERVAL + " : " +
context.getHealthycheckInterval());
} else {
//TODO: Is this okay..?
context.setHealthycheckInterval(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("Exceeded HEALTHY_CHECK_TIMEOUT LIMIT. Loading DEFAULT value for " +
LoadBalancerConstants.HEALTH_CHECK_HEALTHY_CHECK_INTERVAL + " : " +
context.getHealthycheckInterval());
}
} else {
//TODO: Is this okay..?
context.setHealthycheckInterval(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
log.error("HEALTHY_CHECK_TIMEOUT LIMIT NOT SPECIFIED. Loading DEFAULT value for " +
LoadBalancerConstants.HEALTH_CHECK_HEALTHY_CHECK_INTERVAL + " : " +
context.getHealthycheckInterval());
}
validateTestRequest();
//For DEFAULT_HEALTH_CHECK
} else if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue().
equals(LoadBalancerConstants.DEFAULT_HEALTH_CHECK)) {
context.setHealthCheck(this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue());
log.info("HEALTH CHECK TYPE : " + context.getHealthCheck());
context.setUnHealthyRetries(LoadBalancerConstants.DEFAULT_RETRIES);
context.setHealthyRetries(LoadBalancerConstants.DEFAULT_RETRIES);
context.setReqTimeout(LoadBalancerConstants.DEFAULT_REQ_TIMEOUT);
context.setHealthycheckInterval(LoadBalancerConstants.DEFAULT_HEALTHY_CHECK_INTERVAL);
validateTestRequest();
//FOR NO_HEALTH_CHECK
} else if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue().
equals(LoadBalancerConstants.NO_HEALTH_CHECK)) {
context.setHealthCheck(this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TYPE).getValue());
log.info("HEALTH CHECK TYPE : " + context.getHealthCheck());
}
}
private void validateTestRequest() {
if (this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TEST_REQUEST) != null) {
this.context.setTestRequest(
this.getFromConfig(LoadBalancerConstants.HEALTH_CHECK_TEST_REQUEST).getValue());
log.info("TestRequest : " + context.getTestRequest());
} else {
log.info("TestRequest : " + context.getTestRequest());
log.error("Health Check is enabled, but no testRequest is provided. LB will try only to make" +
"Inet Socket connection to check timeOut. ");
}
}
/**
* This method validates a given configuration, if anything is missing default value will be added.
* TODO: check default values limit.
*/
private void validateConfig() {
validateAlgorithm();
validatePersistence();
validateSSL();
validateHealthCheck();
}
}
|
package org.xwiki.extension.internal;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.configuration.ConfigurationSource;
import org.xwiki.container.Container;
import org.xwiki.extension.ExtensionManagerConfiguration;
import org.xwiki.extension.repository.ExtensionRepositoryId;
/**
* Default implementation of {@link ExtensionManagerConfiguration}.
*
* @version $Id$
*/
@Component
@Singleton
public class DefaultExtensionManagerConfiguration implements ExtensionManagerConfiguration
{
/**
* Used to parse repositories entries from the configuration.
*/
private static final Pattern REPOSITORYIDPATTERN = Pattern.compile("([^:]+):([^:]+):(.+)");
/**
* The type identifier for a maven repository.
*/
private static final String TYPE_MAVEN = "maven";
/**
* The type identifier for a xwiki repository.
*/
private static final String TYPE_XWIKI = "xwiki";
/**
* Used to manipulate xwiki.properties files.
*/
@Inject
@Named("xwikiproperties")
private ConfigurationSource configurationSource;
/**
* The logger to log.
*/
@Inject
private Logger logger;
/**
* Used to get work directory.
*/
@Inject
private Container container;
// Cache
/**
* @see DefaultExtensionManagerConfiguration#getLocalRepository()
*/
private File localRepository;
/**
* @return extension manage home folder
*/
public File getHome()
{
return new File(this.container.getApplicationContext().getPermanentDirectory(), "extension/");
}
@Override
public File getLocalRepository()
{
if (this.localRepository == null) {
String localRepositoryPath = this.configurationSource.getProperty("extension.localRepository");
if (localRepositoryPath == null) {
this.localRepository = new File(getHome(), "repository/");
} else {
this.localRepository = new File(localRepositoryPath);
}
}
return this.localRepository;
}
@Override
public List<ExtensionRepositoryId> getRepositories()
{
List<ExtensionRepositoryId> repositories = new ArrayList<ExtensionRepositoryId>();
List<String> repositoryStrings =
this.configurationSource.getProperty("extension.repositories", Collections.<String> emptyList());
if (repositoryStrings != null && !repositoryStrings.isEmpty()) {
for (String repositoryString : repositoryStrings) {
if (StringUtils.isNotBlank(repositoryString)) {
try {
ExtensionRepositoryId extensionRepositoryId = parseRepository(repositoryString);
repositories.add(extensionRepositoryId);
} catch (Exception e) {
this.logger.warn("Faild to parse repository [" + repositoryString + "] from configuration", e);
}
} else {
this.logger.debug("Empty repository id found in the configuration");
}
}
} else {
try {
repositories.add(new ExtensionRepositoryId("maven-xwiki", TYPE_MAVEN, new URI(
"http://nexus.xwiki.org/nexus/content/groups/public")));
repositories.add(new ExtensionRepositoryId("extensions.xwiki.org", TYPE_XWIKI, new URI(
"http://extensions.xwiki.org/xwiki/rest/")));
} catch (Exception e) {
// Should never happen
}
}
return repositories;
}
/**
* Create a {@link ExtensionRepositoryId} from a string entry.
*
* @param repositoryString the repository configuration entry
* @return the {@link ExtensionRepositoryId}
* @throws URISyntaxException Failed to create an {@link URI} object from the configuration entry
* @throws ExtensionManagerConfigurationException Failed to parse configuration
*/
private ExtensionRepositoryId parseRepository(String repositoryString) throws URISyntaxException,
ExtensionManagerConfigurationException
{
Matcher matcher = REPOSITORYIDPATTERN.matcher(repositoryString);
if (matcher.matches()) {
return new ExtensionRepositoryId(matcher.group(1), matcher.group(2), new URI(matcher.group(3)));
}
throw new ExtensionManagerConfigurationException("Don't match repository configuration [" + repositoryString
+ "]");
}
}
|
package test.org.opentdc.wtt;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.cxf.jaxrs.client.WebClient;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.opentdc.addressbooks.AddressbookModel;
import org.opentdc.wtt.CompanyModel;
import org.opentdc.wtt.ProjectModel;
import org.opentdc.wtt.WttService;
import test.org.opentdc.AbstractTestClient;
import test.org.opentdc.addressbooks.AddressbookTest;
public class ProjectTest extends AbstractTestClient {
public static final String PATH_EL_PROJECT = "project";
private WebClient wttWC = null;
private WebClient addressbookWC = null;
private CompanyModel company = null;
private AddressbookModel addressbook = null;
@Before
public void initializeTests() {
wttWC = initializeTest(CompanyTest.API_URL, WttService.class);
addressbookWC = AddressbookTest.createAddressbookWebClient();
addressbook = AddressbookTest.createAddressbook(addressbookWC, this.getClass().getName());
company = CompanyTest.createCompany(wttWC, addressbookWC, addressbook, this.getClass().getName(), "MY_DESC");
}
@After
public void cleanupTest() {
AddressbookTest.cleanup(addressbookWC, addressbook.getId(), this.getClass().getName());
CompanyTest.cleanup(wttWC, company.getId(), this.getClass().getName());
}
@Test
public void testProjectModelEmptyConstructor() {
// new() -> _pm
ProjectModel _pm = new ProjectModel();
assertNull("id should not be set by empty constructor", _pm.getId());
assertNull("title should not be set by empty constructor", _pm.getTitle());
assertNull("description should not be set by empty constructor", _pm.getDescription());
}
@Test
public void testProjectModelConstructor() {
// new("MY_TITLE", "MY_DESC") -> _pm
ProjectModel _pm = new ProjectModel("MY_TITLE", "MY_DESC");
assertNull("id should not be set by constructor", _pm.getId());
assertEquals("title should be set by constructor", "MY_TITLE", _pm.getTitle());
assertEquals("description should be set by constructor", "MY_DESC", _pm.getDescription());
}
@Test
public void testProjectIdAttributeChange() {
// new() -> _pm -> _pm.setId()
ProjectModel _pm = new ProjectModel();
assertNull("id should not be set by constructor", _pm.getId());
_pm.setId("MY_ID");
assertEquals("id should have changed:", "MY_ID", _pm.getId());
}
@Test
public void testProjectTitleAttributeChange() {
// new() -> _pm -> _pm.setTitle()
ProjectModel _pm = new ProjectModel();
assertNull("title should not be set by empty constructor", _pm.getTitle());
_pm.setTitle("MY_TITLE");
assertEquals("title should have changed:", "MY_TITLE", _pm.getTitle());
}
@Test
public void testProjectDescriptionAttributeChange() {
// new() -> _pm -> _pm.setDescription()
ProjectModel _pm = new ProjectModel();
assertNull("description should not be set by empty constructor", _pm.getDescription());
_pm.setDescription("MY_DESC");
assertEquals("description should have changed:", "MY_DESC", _pm.getDescription());
}
@Test
public void testProjectCreatedBy() {
// new() -> _pm -> _pm.setCreatedBy()
ProjectModel _pm = new ProjectModel();
assertNull("createdBy should not be set by empty constructor", _pm.getCreatedBy());
_pm.setCreatedBy("MY_NAME");
assertEquals("createdBy should have changed", "MY_NAME", _pm.getCreatedBy());
}
@Test
public void testProjectCreatedAt() {
// new() -> _pm -> _pm.setCreatedAt()
ProjectModel _pm = new ProjectModel();
assertNull("createdAt should not be set by empty constructor", _pm.getCreatedAt());
_pm.setCreatedAt(new Date());
assertNotNull("createdAt should have changed", _pm.getCreatedAt());
}
@Test
public void testProjectModifiedBy() {
// new() -> _pm -> _pm.setModifiedBy()
ProjectModel _pm = new ProjectModel();
assertNull("modifiedBy should not be set by empty constructor", _pm.getModifiedBy());
_pm.setModifiedBy("MY_NAME");
assertEquals("modifiedBy should have changed", "MY_NAME", _pm.getModifiedBy());
}
@Test
public void testProjectModifiedAt() {
// new() -> _pm -> _pm.setModifiedAt()
ProjectModel _pm = new ProjectModel();
assertNull("modifiedAt should not be set by empty constructor", _pm.getModifiedAt());
_pm.setModifiedAt(new Date());
assertNotNull("modifiedAt should have changed", _pm.getModifiedAt());
}
@Test
public void testProjectCreateReadDeleteWithEmptyConstructor() {
// new() -> _pm1
ProjectModel _pm1 = new ProjectModel();
assertNull("id should not be set by empty constructor", _pm1.getId());
assertNull("title should not be set by empty constructor", _pm1.getTitle());
assertNull("description should not be set by empty constructor", _pm1.getDescription());
// create(_pm1) -> BAD_REQUEST (because of empty title)
Response _response = wttWC.replacePath("/").post(_pm1);
assertEquals("create() should return with status BAD_REQUEST", Status.BAD_REQUEST.getStatusCode(), _response.getStatus());
_pm1.setTitle("testProjectCreateReadDeleteWithEmptyConstructor");
// create(_pm1) -> _pm2
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// validate _pm1 (local object)
assertNull("create() should not change the id of the local object", _pm1.getId());
assertEquals("create() should not change the title of the local object", "testProjectCreateReadDeleteWithEmptyConstructor", _pm1.getTitle());
assertNull("create() should not change the description of the local object", _pm1.getDescription());
// validate _pm2 (remote object returned from create())
assertNotNull("create() should set a valid id on the remote object returned", _pm2.getId());
assertEquals("create() should not change the title", "testProjectCreateReadDeleteWithEmptyConstructor", _pm2.getTitle());
assertNull("create() should not change the description", _pm2.getDescription());
// read(_pm2) -> _pm3
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read(" + _pm2.getId() + ") should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
// validate _pm3 (remoted objecte returned from read())
assertEquals("id of returned object should be the same", _pm2.getId(), _pm3.getId());
assertEquals("title of returned object should be unchanged after remote create", _pm2.getTitle(), _pm3.getTitle());
assertEquals("description of returned object should be unchanged after remote create", _pm2.getDescription(), _pm3.getDescription());
// delete(_pm3)
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm3.getId()).delete();
assertEquals("delete(" + _pm3.getId() + ") should return with status NO_CONTENT:", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectCreateReadDelete() {
// new(1) -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectCreateReadDelete", "MY_DESC");
assertNull("id should not be set by constructor", _pm1.getId());
assertEquals("title should be set by constructor", "testProjectCreateReadDelete", _pm1.getTitle());
assertEquals("description should be set by constructor", "MY_DESC", _pm1.getDescription());
// create(_pm1) -> _pm2
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// validate _pm1 (local object)
assertNull("id should still be null after remote create", _pm1.getId());
assertEquals("create() should not change the title", "testProjectCreateReadDelete", _pm1.getTitle());
assertEquals("craete() should not change the description", "MY_DESC", _pm1.getDescription());
// validate _pm2 (remote object returned from create())
assertNotNull("id of returned object should be set", _pm2.getId());
assertEquals("create() should not change the title", "testProjectCreateReadDelete", _pm2.getTitle());
assertEquals("create() should not change the description", "MY_DESC", _pm2.getDescription());
// read(_pm2) -> _pm3
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read(" + _pm2.getId() + ") should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
// validate _pm3 (remote object returned from read())
assertEquals("read() should not change the id", _pm2.getId(), _pm3.getId());
assertEquals("read() should not change the title", _pm2.getTitle(), _pm3.getTitle());
assertEquals("read() should not change the description", _pm2.getDescription(), _pm3.getDescription());
// delete(_pm3)
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm3.getId()).delete();
assertEquals("delete(" + _pm3.getId() + ") should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testCreateProjectWithClientSideId() {
// new() -> _pm1 -> _pm1.setId()
ProjectModel _pm1 = new ProjectModel("testCreateProjectWithClientSideId", "MY_DESC");
_pm1.setId("LOCAL_ID");
assertEquals("id should have changed", "LOCAL_ID", _pm1.getId());
// create(_pm1) -> BAD_REQUEST
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
assertEquals("create() with an id generated by the client should be denied by the server", Status.BAD_REQUEST.getStatusCode(), _response.getStatus());
}
@Test
public void testCreateProjectWithDuplicateId() {
// create(new()) -> _pm1
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT)
.post(new ProjectModel("testCreateProjectWithDuplicateId1", "MY_DESC1"));
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm1 = _response.readEntity(ProjectModel.class);
// new() -> _pm2 -> _pm2.setId(_pm1.getId())
ProjectModel _pm2 = new ProjectModel("testCreateProjectWithDuplicateId2", "MY_DESC2");
_pm2.setId(_pm1.getId()); // wrongly create a 2nd ProjectModel object with the same ID
// create(_pm2) -> CONFLICT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm2);
assertEquals("create() with a duplicate id should be denied by the server", Status.CONFLICT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectList() {
ArrayList<ProjectModel> _localList = new ArrayList<ProjectModel>();
Response _response = null;
for (int i = 0; i < LIMIT; i++) {
// create(new()) -> _localList
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT)
.post(new ProjectModel("testProjectList" + i, "MY_DESC" + i));
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
_localList.add(_response.readEntity(ProjectModel.class));
}
// list(/) -> _remoteList
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).get();
assertEquals("list() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
List<ProjectModel> _remoteList = new ArrayList<ProjectModel>(wttWC.getCollection(ProjectModel.class));
ArrayList<String> _remoteListIds = new ArrayList<String>();
for (ProjectModel _pm : _remoteList) {
_remoteListIds.add(_pm.getId());
}
for (ProjectModel _pm : _localList) {
assertTrue("project <" + _pm.getId() + "> should be listed", _remoteListIds.contains(_pm.getId()));
}
for (ProjectModel _pm : _localList) {
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
_response.readEntity(ProjectModel.class);
}
for (ProjectModel _pm : _localList) {
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
}
@Test
public void testProjectCreate() {
// new(1) -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectCreate1", "MY_DESC1");
// new(2) -> _pm2
ProjectModel _pm2 = new ProjectModel("testProjectCreate2", "MY_DESC2");
// create(_pm1) -> _pm3
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
// create(_pm2) -> _pm4
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm2);
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm4 = _response.readEntity(ProjectModel.class);
// validate _pm3
assertNotNull("ID should be set", _pm3.getId());
assertEquals("title1 should be set correctly", "testProjectCreate1", _pm3.getTitle());
assertEquals("description1 should be set correctly", "MY_DESC1", _pm3.getDescription());
// validate _pm4
assertNotNull("ID should be set", _pm4.getId());
assertEquals("title2 should be set correctly", "testProjectCreate2", _pm4.getTitle());
assertEquals("description2 should be set correctly", "MY_DESC2", _pm4.getDescription());
assertThat(_pm4.getId(), not(equalTo(_pm3.getId())));
// delete(_pm3) -> NO_CONTENT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm3.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
// delete(_pm4) -> NO_CONTENT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm4.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectCreateDouble() {
// create(new()) -> _pm
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT)
.post(new ProjectModel("testProjectCreateDouble", "MY_DESC"));
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm = _response.readEntity(ProjectModel.class);
assertNotNull("ID should be set:", _pm.getId());
// create(_pm) -> CONFLICT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm);
assertEquals("create() with a duplicate id should be denied by the server", Status.CONFLICT.getStatusCode(), _response.getStatus());
// delete(_pm) -> NO_CONTENT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectRead() {
ArrayList<ProjectModel> _localList = new ArrayList<ProjectModel>();
Response _response = null;
wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT);
for (int i = 0; i < LIMIT; i++) {
_response = wttWC.post(new ProjectModel("testProjectRead" + i, "MY_DESC" + i));
assertEquals("create() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
_localList.add(_response.readEntity(ProjectModel.class));
}
// test read on each local element
for (ProjectModel _pm : _localList) {
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
_response.readEntity(ProjectModel.class);
}
// test read on each listed element
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).get();
assertEquals("list() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
List<ProjectModel> _remoteList = new ArrayList<ProjectModel>(wttWC.getCollection(ProjectModel.class));
ProjectModel _tmpObj = null;
for (ProjectModel _pm : _remoteList) {
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
_tmpObj = _response.readEntity(ProjectModel.class);
assertEquals("ID should be unchanged when reading a project", _pm.getId(), _tmpObj.getId());
}
for (ProjectModel _pm : _localList) {
_response = wttWC.replacePath(_pm.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
}
@Test
public void testProjectMultiRead() {
// new() -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectMultiRead", "MY_DESC");
// create(_pm1) -> _p2
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// read(_pm2) -> _pm3
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
assertEquals("ID should be unchanged after read:", _pm2.getId(), _pm3.getId());
// read(_pm2) -> _pm4
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm4 = _response.readEntity(ProjectModel.class);
// but: the two objects are not equal !
assertEquals("ID should be the same:", _pm3.getId(), _pm4.getId());
assertEquals("title should be the same:", _pm3.getTitle(), _pm4.getTitle());
assertEquals("description should be the same:", _pm3.getDescription(), _pm4.getDescription());
assertEquals("ID should be the same:", _pm3.getId(), _pm2.getId());
assertEquals("title should be the same:", _pm3.getTitle(), _pm2.getTitle());
assertEquals("description should be the same:", _pm3.getDescription(), _pm2.getDescription());
// delete(_pm2)
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectUpdate() {
// new() -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectUpdate", "MY_DESC");
// create(_pm1) -> _pm2
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// change the attributes
// update(_pm2) -> _pm3
_pm2.setTitle("MY_TITLE");
_pm2.setDescription("MY_DESC");
wttWC.type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON);
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).put(_pm2);
assertEquals("update() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
assertNotNull("ID should be set", _pm3.getId());
assertEquals("ID should be unchanged", _pm2.getId(), _pm3.getId());
assertEquals("title should have changed", "MY_TITLE", _pm3.getTitle());
assertEquals("description should have changed", "MY_DESC", _pm3.getDescription());
// reset the attributes
// update(_pm2) -> _pm4
_pm2.setTitle("MY_TITLE2");
_pm2.setDescription("MY_DESC2");
wttWC.type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON);
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).put(_pm2);
assertEquals("update() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm4 = _response.readEntity(ProjectModel.class);
assertNotNull("ID should be set", _pm4.getId());
assertEquals("ID should be unchanged", _pm2.getId(), _pm4.getId());
assertEquals("title should have changed", "MY_TITLE2", _pm4.getTitle());
assertEquals("description should have changed", "MY_DESC2", _pm4.getDescription());
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectDelete(
) {
// new() -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectDelete", "MY_DESC");
// create(_pm1) -> _pm2
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// read(_pm2) -> _pm3
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
assertEquals("ID should be unchanged when reading a project (remote):", _pm2.getId(), _pm3.getId());
// delete(_pm2) -> OK
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
// read the deleted object twice
// read(_pm2) -> NOT_FOUND
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status NOT_FOUND", Status.NOT_FOUND.getStatusCode(), _response.getStatus());
// read(_pm2) -> NOT_FOUND
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status NOT_FOUND", Status.NOT_FOUND.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectDoubleDelete() {
// new() -> _pm1
ProjectModel _pm1 = new ProjectModel("testProjectDoubleDelete", "MY_DESC");
// create(_pm1) -> _pm2
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).post(_pm1);
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// read(_pm2) -> OK
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
// delete(_pm2) -> OK
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
// read(_pm2) -> NOT_FOUND
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status NOT_FOUND", Status.NOT_FOUND.getStatusCode(), _response.getStatus());
// delete _pm2 -> NOT_FOUND
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).delete();
assertEquals("delete() should return with status NOT_FOUND", Status.NOT_FOUND.getStatusCode(), _response.getStatus());
// read _pm2 -> NOT_FOUND
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm2.getId()).get();
assertEquals("read() should return with status NOT_FOUND", Status.NOT_FOUND.getStatusCode(), _response.getStatus());
}
@Test
public void testProjectModifications() {
// create(new ProjectModel()) -> _pm1
Response _response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT)
.post(new ProjectModel("testProjectModifications", "MY_DESC"));
ProjectModel _pm1 = _response.readEntity(ProjectModel.class);
// test createdAt and createdBy
assertNotNull("create() should set createdAt", _pm1.getCreatedAt());
assertNotNull("create() should set createdBy", _pm1.getCreatedBy());
// test modifiedAt and modifiedBy (= same as createdAt/createdBy)
assertNotNull("create() should set modifiedAt", _pm1.getModifiedAt());
assertNotNull("create() should set modifiedBy", _pm1.getModifiedBy());
assertEquals("createdAt and modifiedAt should be identical after create()", _pm1.getCreatedAt(), _pm1.getModifiedAt());
assertEquals("createdBy and modifiedBy should be identical after create()", _pm1.getCreatedBy(), _pm1.getModifiedBy());
// update(_pm1) -> _pm2
_pm1.setTitle("NEW_TITLE");
wttWC.type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON);
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm1.getId()).put(_pm1);
assertEquals("update() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm2 = _response.readEntity(ProjectModel.class);
// test createdAt and createdBy (unchanged)
assertEquals("update() should not change createdAt", _pm1.getCreatedAt(), _pm2.getCreatedAt());
assertEquals("update() should not change createdBy", _pm1.getCreatedBy(), _pm2.getCreatedBy());
// test modifiedAt and modifiedBy (= different from createdAt/createdBy)
assertTrue(_pm2.getModifiedAt().compareTo(_pm2.getCreatedAt()) >= 0);
// TODO: in our case, the modifying user will be the same; how can we test, that modifiedBy really changed ?
// assertThat(_pm2.getModifiedBy(), not(equalTo(_pm2.getCreatedBy())));
// update(_pm1) with modifiedBy/At set on client side -> ignored by server
_pm1.setModifiedBy("MYSELF");
_pm1.setModifiedAt(new Date(1000));
wttWC.type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON);
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm1.getId()).put(_pm1);
assertEquals("update() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
ProjectModel _pm3 = _response.readEntity(ProjectModel.class);
// test, that modifiedBy really ignored the client-side value "MYSELF"
assertThat(_pm1.getModifiedBy(), not(equalTo(_pm3.getModifiedBy())));
// check whether the client-side modifiedAt() is ignored
assertThat(_pm1.getModifiedAt(), not(equalTo(_pm3.getModifiedAt())));
// delete(_o) -> NO_CONTENT
_response = wttWC.replacePath("/").path(company.getId()).path(PATH_EL_PROJECT).path(_pm1.getId()).delete();
assertEquals("delete() should return with status NO_CONTENT", Status.NO_CONTENT.getStatusCode(), _response.getStatus());
}
public static ProjectModel createProject(
WebClient wttWC,
String companyId,
String title,
String description)
{
ProjectModel _pm = new ProjectModel();
_pm.setTitle(title);
_pm.setDescription(description);
Response _response = wttWC.replacePath("/").path(companyId).path(PATH_EL_PROJECT).post(_pm);
assertEquals("post() should return with status OK", Status.OK.getStatusCode(), _response.getStatus());
return _response.readEntity(ProjectModel.class);
}
}
|
package org.hswebframework.web.workflow.service.imp;
import io.vavr.Lazy;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.activiti.engine.repository.ProcessDefinition;
import org.activiti.engine.task.Task;
import org.apache.commons.codec.digest.DigestUtils;
import org.hswebframework.expands.script.engine.DynamicScriptEngine;
import org.hswebframework.expands.script.engine.DynamicScriptEngineFactory;
import org.hswebframework.expands.script.engine.ExecuteResult;
import org.hswebframework.web.BusinessException;
import org.hswebframework.web.authorization.Authentication;
import org.hswebframework.web.authorization.AuthenticationHolder;
import org.hswebframework.web.authorization.AuthenticationPredicate;
import org.hswebframework.web.authorization.exception.AccessDenyException;
import org.hswebframework.web.organizational.authorization.PersonnelAuthentication;
import org.hswebframework.web.organizational.authorization.PersonnelAuthenticationHolder;
import org.hswebframework.web.workflow.dao.entity.ActivityConfigEntity;
import org.hswebframework.web.workflow.dao.entity.ListenerConfig;
import org.hswebframework.web.workflow.dao.entity.ProcessDefineConfigEntity;
import org.hswebframework.web.workflow.dimension.CandidateDimension;
import org.hswebframework.web.workflow.dimension.CandidateDimensionParser;
import org.hswebframework.web.workflow.dimension.DimensionContext;
import org.hswebframework.web.workflow.dimension.PermissionDimensionParser;
import org.hswebframework.web.workflow.listener.ProcessEvent;
import org.hswebframework.web.workflow.listener.ProcessEventListener;
import org.hswebframework.web.workflow.listener.TaskEvent;
import org.hswebframework.web.workflow.listener.TaskEventListener;
import org.hswebframework.web.workflow.service.ActivityConfigService;
import org.hswebframework.web.workflow.service.ProcessDefineConfigService;
import org.hswebframework.web.workflow.service.config.ProcessConfigurationService;
import org.hswebframework.web.workflow.service.config.CandidateInfo;
import org.hswebframework.web.workflow.service.config.ActivityConfiguration;
import org.hswebframework.web.workflow.service.config.ProcessConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.stream.Collectors;
/**
* @author zhouhao
* @since 3.0.0-RC
*/
@Service
@Slf4j
public class ProcessConfigurationServiceImpl implements ProcessConfigurationService {
@Autowired
private ProcessDefineConfigService defineConfigService;
@Autowired
private ActivityConfigService activityConfigService;
@Autowired
private PermissionDimensionParser permissionDimensionParser;
@Autowired
private CandidateDimensionParser candidateDimensionParser;
private static final EmptyActivityConfiguration emptyConfiguration = new EmptyActivityConfiguration();
@Override
public ActivityConfiguration getActivityConfiguration(String doingUser, String processDefineId, String activityId) {
ActivityConfigEntity configEntity = activityConfigService.selectByProcessDefineIdAndActivityId(processDefineId, activityId);
if (configEntity == null) {
return emptyConfiguration;
}
return new ActivityConfiguration() {
@Override
public String getFormId() {
return configEntity.getFormId();
}
@Override
public boolean canClaim(Task task, String userId) {
return getCandidateInfo(task)
.stream()
.map(CandidateInfo::user)
.anyMatch(user -> user.getUser().getId().equals(userId));
}
@Override
@SuppressWarnings("all")
public List<CandidateInfo> getCandidateInfo(Task task) {
return Lazy.val(() -> {
DimensionContext context = new DimensionContext();
context.setCreatorId(doingUser);
context.setActivityId(activityId);
context.setProcessDefineId(processDefineId);
context.setTask(task);
CandidateDimension dimension = candidateDimensionParser
.parse(context, configEntity.getCandidateDimension());
return dimension.getCandidateUserIdList()
.stream()
.distinct()
.map(userId ->
Lazy.val(() -> new CandidateInfo() {
@Override
public Authentication user() {
return AuthenticationHolder.get(userId);
}
@Override
public PersonnelAuthentication person() {
return PersonnelAuthenticationHolder.getByUserId(userId);
}
}, CandidateInfo.class))
.collect(Collectors.toList());
}, List.class);
}
@Override
public TaskEventListener getTaskListener(String eventType) {
if (CollectionUtils.isEmpty(configEntity.getListeners())) {
return null;
}
return configEntity
.getListeners()
.stream()
.filter(config -> eventType.equals(config.getEventType()))
.map(ProcessConfigurationServiceImpl.this::<TaskEvent>createTaskEventListener)
.collect(Collectors.collectingAndThen(Collectors.toList(),
list -> event -> list.forEach(listener -> listener.accept(event))));
}
};
}
@SneakyThrows
protected <T> Consumer<T> createTaskEventListener(ListenerConfig listenerConfig) {
DynamicScriptEngine engine = DynamicScriptEngineFactory.getEngine(listenerConfig.getLanguage());
if (null != engine) {
String scriptId = DigestUtils.md5Hex(listenerConfig.getScript());
if (!engine.compiled(scriptId)) {
engine.compile(scriptId, listenerConfig.getScript());
}
return event -> {
Map<String, Object> context = new HashMap<>();
context.put("event", event);
ExecuteResult result = engine.execute(scriptId, context);
if (!result.isSuccess()) {
throw new BusinessException(":" + result.getMessage(), result.getException());
}
};
} else {
log.warn(":{}", listenerConfig.getLanguage());
}
return null;
}
@Override
public ProcessConfiguration getProcessConfiguration(String processDefineId) {
ProcessDefineConfigEntity entity = defineConfigService.selectByProcessDefineId(processDefineId);
if (entity == null) {
return emptyConfiguration;
}
return new ProcessConfiguration() {
@Override
public String getFormId() {
return entity.getFormId();
}
@Override
public void assertCanStartProcess(String userId, ProcessDefinition definition) {
if (!canStartProcess(userId, definition)) {
throw new AccessDenyException("");
}
}
@Override
public boolean canStartProcess(String userId, ProcessDefinition definition) {
if (StringUtils.isEmpty(entity.getPermissionDimension()) || "*".equals(entity.getPermissionDimension())) {
return true;
}
AuthenticationPredicate predicate = permissionDimensionParser.parse(entity.getPermissionDimension());
if (null != predicate) {
return predicate.test(AuthenticationHolder.get(userId));
}
return true;
}
@Override
public ProcessEventListener getProcessListener(String eventType) {
if (CollectionUtils.isEmpty(entity.getListeners())) {
return null;
}
return entity
.getListeners()
.stream()
.filter(config -> eventType.equals(config.getEventType()))
.map(ProcessConfigurationServiceImpl.this::<ProcessEvent>createTaskEventListener)
.collect(Collectors.collectingAndThen(Collectors.toList(),
list -> event -> list.forEach(listener -> listener.accept(event))));
}
};
}
private final static class EmptyActivityConfiguration implements ActivityConfiguration, ProcessConfiguration {
@Override
public String getFormId() {
return null;
}
@Override
public boolean canClaim(Task task, String userId) {
return false;
}
@Override
public List<CandidateInfo> getCandidateInfo(Task task) {
return new java.util.ArrayList<>();
}
@Override
public void assertCanStartProcess(String userId, ProcessDefinition definition) {
}
@Override
public boolean canStartProcess(String userId, ProcessDefinition definition) {
return true;
}
@Override
public ProcessEventListener getProcessListener(String eventType) {
return null;
}
@Override
public TaskEventListener getTaskListener(String eventType) {
return null;
}
}
}
|
package org.wso2.carbon.gateway.httploadbalancer.algorithm.simple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.gateway.httploadbalancer.algorithm.LoadBalancingAlgorithm;
import org.wso2.carbon.gateway.httploadbalancer.constants.LoadBalancerConstants;
import org.wso2.carbon.gateway.httploadbalancer.context.LoadBalancerConfigContext;
import org.wso2.carbon.gateway.httploadbalancer.outbound.LBOutboundEndpoint;
import org.wso2.carbon.messaging.CarbonCallback;
import org.wso2.carbon.messaging.CarbonMessage;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Implementation of LeastResponseTime.
* <p>
* All Endpoints are assumed to have equal weights.
*/
public class LeastResponseTime implements LoadBalancingAlgorithm, Simple {
private static final Logger log = LoggerFactory.getLogger(LeastResponseTime.class);
private final Object lock = new Object();
private List<LBOutboundEPLeastRT> lbOutboundEPLeastRTs = new ArrayList<>();
private Map<String, LBOutboundEPLeastRT> map;
private static final int WINDOW = 10;
private int windowTracker = 0;
private int index = 0;
/**
* Constructor.
*
* @param lbOutboundEndpoints
*/
public LeastResponseTime(List<LBOutboundEndpoint> lbOutboundEndpoints) {
this.setLBOutboundEndpoints(lbOutboundEndpoints);
}
/**
* @return the name of implemented LB algorithm.
*/
@Override
public String getName() {
return LoadBalancerConstants.LEAST_RESPONSE_TIME;
}
/**
* @param lbOutboundEPs list of all Outbound Endpoints to be load balanced.
*/
@Override
public void setLBOutboundEndpoints(List<LBOutboundEndpoint> lbOutboundEPs) {
synchronized (this.lock) {
map = new ConcurrentHashMap<>();
for (LBOutboundEndpoint endpoint : lbOutboundEPs) {
this.lbOutboundEPLeastRTs.add(new LBOutboundEPLeastRT(endpoint));
}
}
}
/**
* @param lbOutboundEndpoint outboundEndpoint to be added to the existing list.
* <p>
* This method will be used to add an endpoint once it
* is back to healthy state.
* <p>
* Adding is different here. We have to get it from map,
* reset its properties and add it back to the list.
*/
@Override
public void addLBOutboundEndpoint(LBOutboundEndpoint lbOutboundEndpoint) {
synchronized (this.lock) {
if (map.containsKey(lbOutboundEndpoint.getName())) {
if (this.lbOutboundEPLeastRTs.contains(map.get(lbOutboundEndpoint.getName()))) {
log.error(lbOutboundEndpoint.getName() + " already exists in list..");
} else {
map.get(lbOutboundEndpoint.getName()).resetResponseTimeProperties(); //This is MUST.
this.lbOutboundEPLeastRTs.add(map.get(lbOutboundEndpoint.getName()));
}
} else {
log.error("Cannot add a new endpoint like this. Use setLBOutboundEndpoints method" +
" or Constructor..");
}
}
}
/**
* @param lbOutboundEndpoint outboundEndpoint to be removed from existing list.
* <p>
* This method will be used to remove an unHealthyEndpoint.
* <p>
* NOTE: for this algorithm, we are not removing from map.
* But, we are removing from list.
* <p>
* We are doing this because, for health check we need it.
*/
@Override
public void removeLBOutboundEndpoint(LBOutboundEndpoint lbOutboundEndpoint) {
synchronized (this.lock) {
if (map.containsKey(lbOutboundEndpoint.getName())) {
if (this.lbOutboundEPLeastRTs.contains(map.get(lbOutboundEndpoint.getName()))) {
this.lbOutboundEPLeastRTs.remove(map.get(lbOutboundEndpoint.getName()));
} else {
log.error(lbOutboundEndpoint.getName() + " has already been removed from list..");
}
} else {
log.error(lbOutboundEndpoint.getName() + " is not in map..");
}
}
}
private void computeRatio() {
int meanResponseTime = 0;
for (LBOutboundEPLeastRT endPoint : this.lbOutboundEPLeastRTs) {
synchronized (endPoint.getLock()) {
meanResponseTime += endPoint.getAvgResponseTime();
}
}
if (meanResponseTime % 2 == 0) {
meanResponseTime = (meanResponseTime / this.lbOutboundEPLeastRTs.size());
} else {
meanResponseTime = ((meanResponseTime / this.lbOutboundEPLeastRTs.size()) + 1);
}
for (LBOutboundEPLeastRT endPoint : this.lbOutboundEPLeastRTs) {
synchronized (endPoint.getLock()) {
endPoint.setPercentage((100 - ((endPoint.getAvgResponseTime() / meanResponseTime) * 100)));
if (endPoint.getPercentage() > 0) {
endPoint.setMaxRequestsPerWindow(((endPoint.getPercentage() * WINDOW) / 100));
} else {
endPoint.setMaxRequestsPerWindow(1);
}
endPoint.setCurrentRequests(0); //Resetting is MUST.
if (log.isDebugEnabled()) {
log.debug(endPoint.getName() + " RT : " + endPoint.getAvgResponseTime() +
" Curr : " + endPoint.getCurrentRequests() + " Max : "
+ endPoint.getMaxRequestsPerWindow());
}
}
}
}
/**
* For getting next LBOutboundEndpoint.
* This method is called after locking, so don't worry.
*/
private void incrementIndex() {
this.index++;
this.index %= this.lbOutboundEPLeastRTs.size();
}
public void incrementWindowTracker() {
this.windowTracker++;
}
private LBOutboundEPLeastRT getNextEndpoint() {
LBOutboundEPLeastRT endPoint = null;
int counter = 0;
while (true) {
if (this.lbOutboundEPLeastRTs.get(this.index).getCurrentRequests() <
this.lbOutboundEPLeastRTs.get(this.index).getMaxRequestsPerWindow()) {
endPoint = this.lbOutboundEPLeastRTs.get(this.index);
break;
} else {
incrementIndex();
}
if (counter > lbOutboundEPLeastRTs.size()) { // This case will never occur. Just for safety.
endPoint = this.lbOutboundEPLeastRTs.get(this.index);
break;
}
counter++;
}
incrementIndex();
return endPoint;
}
/**
* @param cMsg Carbon Message has all headers required to make decision.
* @param context LoadBalancerConfigContext.
* @return the next LBOutboundEndpoint according to implemented LB algorithm.
*/
@Override
public LBOutboundEndpoint getNextLBOutboundEndpoint(CarbonMessage cMsg, LoadBalancerConfigContext context) {
LBOutboundEndpoint endPoint = null;
synchronized (this.lock) {
if (this.lbOutboundEPLeastRTs != null && this.lbOutboundEPLeastRTs.size() > 0) {
if (this.lbOutboundEPLeastRTs.size() > 1 && this.windowTracker >= WINDOW) {
computeRatio();
this.windowTracker = 0;
}
// It is okay to do roundRobin for first few requests till it reaches WINDOW size.
// After that it'll be proper LeastResponseTime based load distribution.
LBOutboundEPLeastRT outboundEPLeastRT = this.getNextEndpoint();
endPoint = outboundEPLeastRT.getLbOutboundEndpoint();
if (log.isDebugEnabled()) {
log.debug(outboundEPLeastRT.getName() + " RT : " + outboundEPLeastRT.getAvgResponseTime() +
" Curr : " + outboundEPLeastRT.getCurrentRequests() + " Max : "
+ outboundEPLeastRT.getMaxRequestsPerWindow());
}
} else {
log.error("No OutboundEndpoint is available..");
}
}
return endPoint;
}
/**
* Each implementation of LB algorithm will have certain values pertained to it.
* (Eg: Round robin keeps track of index of OutboundEndpoint).
* Implementation of this method will resetHealthPropertiesToDefault them.
*/
@Override
public void reset() {
synchronized (this.lock) {
if (this.lbOutboundEPLeastRTs.size() > 0 && this.index >= this.lbOutboundEPLeastRTs.size()) {
this.index %= this.lbOutboundEPLeastRTs.size();
} else if (this.lbOutboundEPLeastRTs.size() == 0) {
this.index = 0;
}
}
}
public void setAvgResponseTime(LBOutboundEndpoint lbOutboundEndpoint, int newTime) {
map.get(lbOutboundEndpoint.getName()).computeAndSetAvgResponseTime(newTime);
}
public boolean receive(CarbonMessage carbonMessage, CarbonCallback carbonCallback,
LoadBalancerConfigContext context,
LBOutboundEndpoint lbOutboundEndpoint) throws Exception {
map.get(lbOutboundEndpoint.getName()).receive(carbonMessage, carbonCallback, context);
return false;
}
/**
* @return Object used for locking.
*/
@Override
public Object getLock() {
return this.lock;
}
/**
* We need few additional attributes for LeastResponseTime algorithm.
* <p>
* So, we are creating an inner class specially for this.
*/
private class LBOutboundEPLeastRT {
private LBOutboundEndpoint lbOutboundEndpoint;
/**
* These attributes are for LeastResponseTime Algorithm.
*/
private int avgResponseTime = 0; // This stores running average.
private int percentage = 100;
private int maxRequestsPerWindow = WINDOW;
private int currentRequests = 0; //This stores current no of requests in window.
LBOutboundEPLeastRT(LBOutboundEndpoint lbOutboundEndpoint) {
this.lbOutboundEndpoint = lbOutboundEndpoint;
map.put(this.lbOutboundEndpoint.getName(), this);
}
public String getName() {
return this.lbOutboundEndpoint.getName();
}
public LBOutboundEndpoint getLbOutboundEndpoint() {
return this.lbOutboundEndpoint;
}
void setPercentage(int percentage) {
this.percentage = percentage;
}
int getPercentage() {
return this.percentage;
}
int getCurrentRequests() {
return this.currentRequests;
}
void setCurrentRequests(int currentRequests) {
this.currentRequests = currentRequests;
}
int getMaxRequestsPerWindow() {
return maxRequestsPerWindow;
}
void setMaxRequestsPerWindow(int maxRequestsPerWindow) {
this.maxRequestsPerWindow = maxRequestsPerWindow;
}
private void incrementCurrentRequests() {
this.currentRequests++;
}
/**
* @param newTime Most resent response time of the endpoint.
* Calculates Running average of response time of that endpoint.
*/
void computeAndSetAvgResponseTime(int newTime) {
if (this.avgResponseTime != 0) { //For first time we should not divide by 2.
if ((this.avgResponseTime + newTime) % 2 == 0) {
this.avgResponseTime = (this.avgResponseTime + newTime) / 2; // Dividing by 2.
} else {
this.avgResponseTime = (((this.avgResponseTime + newTime) / 2) + 1);
}
} else {
this.avgResponseTime = newTime;
}
}
int getAvgResponseTime() {
return this.avgResponseTime;
}
/**
* @param carbonMessage
* @param carbonCallback
* @param context
* @return
* @throws Exception NOTE: When this algorithm mode is chosen, all requests are sent through this method only.
* So currentRequests will be incremented in both the cases.
* (i.e.) In Endpoint chosen by persistence and in endpoint chosen by algorithm.
*/
boolean receive(CarbonMessage carbonMessage, CarbonCallback carbonCallback,
LoadBalancerConfigContext context) throws Exception {
synchronized (lock) {
this.incrementCurrentRequests(); // Increments currentRequests for this LBOutboundEPLeastRT
incrementWindowTracker(); // To keep track of no requests elapsed for this current window
}
this.lbOutboundEndpoint.receive(carbonMessage, carbonCallback, context);
return false;
}
void resetResponseTimeProperties() {
avgResponseTime = 0;
percentage = 100;
maxRequestsPerWindow = WINDOW;
currentRequests = 0;
}
public Object getLock() {
return this.lbOutboundEndpoint.getLock();
}
}
}
|
package com.concurrent_ruby.ext;
import java.io.IOException;
import org.jruby.Ruby;
import org.jruby.RubyClass;
import org.jruby.RubyModule;
import org.jruby.RubyObject;
import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ObjectAllocator;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.runtime.load.Library;
import org.jruby.runtime.Block;
import org.jruby.runtime.Visibility;
import org.jruby.runtime.ThreadContext;
import org.jruby.util.unsafe.UnsafeHolder;
public class SynchronizationLibrary implements Library {
private static final ObjectAllocator JRUBY_OBJECT_ALLOCATOR = new ObjectAllocator() {
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
return new JRubyObject(runtime, klazz);
}
};
private static final ObjectAllocator OBJECT_ALLOCATOR = new ObjectAllocator() {
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
return new Object(runtime, klazz);
}
};
private static final ObjectAllocator ABSTRACT_LOCKABLE_OBJECT_ALLOCATOR = new ObjectAllocator() {
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
return new AbstractLockableObject(runtime, klazz);
}
};
private static final ObjectAllocator JRUBY_LOCKABLE_OBJECT_ALLOCATOR = new ObjectAllocator() {
public IRubyObject allocate(Ruby runtime, RubyClass klazz) {
return new JRubyLockableObject(runtime, klazz);
}
};
public void load(Ruby runtime, boolean wrap) throws IOException {
RubyModule synchronizationModule = runtime.
defineModule("Concurrent").
defineModuleUnder("Synchronization");
defineClass(runtime, synchronizationModule, "AbstractObject", "JRubyObject",
JRubyObject.class, JRUBY_OBJECT_ALLOCATOR);
defineClass(runtime, synchronizationModule, "JRubyObject", "Object",
Object.class, OBJECT_ALLOCATOR);
defineClass(runtime, synchronizationModule, "Object", "AbstractLockableObject",
AbstractLockableObject.class, ABSTRACT_LOCKABLE_OBJECT_ALLOCATOR);
defineClass(runtime, synchronizationModule, "AbstractLockableObject", "JRubyLockableObject",
JRubyLockableObject.class, JRUBY_LOCKABLE_OBJECT_ALLOCATOR);
}
private RubyClass defineClass(Ruby runtime, RubyModule namespace, String parentName, String name,
Class javaImplementation, ObjectAllocator allocator) {
final RubyClass parentClass = namespace.getClass(parentName);
if (parentClass == null) {
System.out.println("not found " + parentName);
throw runtime.newRuntimeError(namespace.toString() + "::" + parentName + " is missing");
}
final RubyClass newClass = namespace.defineClassUnder(name, parentClass, allocator);
newClass.defineAnnotatedMethods(javaImplementation);
return newClass;
}
// Facts:
// - all ivar reads are without any synchronisation of fences see
// https://github.com/jruby/jruby/blob/master/core/src/main/java/org/jruby/runtime/ivars/VariableAccessor.java#L110-110
// - writes depend on UnsafeHolder.U, null -> SynchronizedVariableAccessor, !null -> StampedVariableAccessor
// SynchronizedVariableAccessor wraps with synchronized block, StampedVariableAccessor uses fullFence or
// volatilePut
@JRubyClass(name = "JRubyObject", parent = "AbstractObject")
public static class JRubyObject extends RubyObject {
private static volatile ThreadContext threadContext = null;
public JRubyObject(Ruby runtime, RubyClass metaClass) {
super(runtime, metaClass);
}
@JRubyMethod
public IRubyObject initialize(ThreadContext context) {
return this;
}
@JRubyMethod(name = "full_memory_barrier", visibility = Visibility.PRIVATE)
public IRubyObject fullMemoryBarrier(ThreadContext context) {
// Prevent reordering of ivar writes with publication of this instance
if (UnsafeHolder.U == null || !UnsafeHolder.SUPPORTS_FENCES) {
// Assuming that following volatile read and write is not eliminated it simulates fullFence.
// If it's eliminated it'll cause problems only on non-x86 platforms.
final ThreadContext oldContext = threadContext;
threadContext = context;
} else {
UnsafeHolder.fullFence();
}
return context.nil;
}
@JRubyMethod(name = "instance_variable_get_volatile", visibility = Visibility.PROTECTED)
public IRubyObject instanceVariableGetVolatile(ThreadContext context, IRubyObject name) {
// Ensure we ses latest value with loadFence
if (UnsafeHolder.U == null || !UnsafeHolder.SUPPORTS_FENCES) {
// piggybacking on volatile read, simulating loadFence
final ThreadContext oldContext = threadContext;
return instance_variable_get(context, name);
} else {
UnsafeHolder.loadFence();
return instance_variable_get(context, name);
}
}
@JRubyMethod(name = "instance_variable_set_volatile", visibility = Visibility.PROTECTED)
public IRubyObject InstanceVariableSetVolatile(ThreadContext context, IRubyObject name, IRubyObject value) {
// Ensure we make last update visible
if (UnsafeHolder.U == null || !UnsafeHolder.SUPPORTS_FENCES) {
// piggybacking on volatile write, simulating storeFence
final IRubyObject result = instance_variable_set(name, value);
threadContext = context;
return result;
} else {
// JRuby uses StampedVariableAccessor which calls fullFence
// so no additional steps needed.
// See https://github.com/jruby/jruby/blob/master/core/src/main/java/org/jruby/runtime/ivars/StampedVariableAccessor.java#L151-L159
return instance_variable_set(name, value);
}
}
}
@JRubyClass(name = "Object", parent = "JRubyObject")
public static class Object extends JRubyObject {
public Object(Ruby runtime, RubyClass metaClass) {
super(runtime, metaClass);
}
}
@JRubyClass(name = "AbstractLockableObject", parent = "Object")
public static class AbstractLockableObject extends Object {
public AbstractLockableObject(Ruby runtime, RubyClass metaClass) {
super(runtime, metaClass);
}
}
@JRubyClass(name = "JRubyLockableObject", parent = "AbstractLockableObject")
public static class JRubyLockableObject extends JRubyObject {
public JRubyLockableObject(Ruby runtime, RubyClass metaClass) {
super(runtime, metaClass);
}
@JRubyMethod(name = "synchronize", visibility = Visibility.PROTECTED)
public IRubyObject rubySynchronize(ThreadContext context, Block block) {
synchronized (this) {
return block.yield(context, null);
}
}
@JRubyMethod(name = "ns_wait", optional = 1, visibility = Visibility.PROTECTED)
public IRubyObject nsWait(ThreadContext context, IRubyObject[] args) {
Ruby runtime = context.runtime;
if (args.length > 1) {
throw runtime.newArgumentError(args.length, 1);
}
Double timeout = null;
if (args.length > 0 && !args[0].isNil()) {
timeout = args[0].convertToFloat().getDoubleValue();
if (timeout < 0) {
throw runtime.newArgumentError("time interval must be positive");
}
}
if (Thread.interrupted()) {
throw runtime.newConcurrencyError("thread interrupted");
}
boolean success = false;
try {
success = context.getThread().wait_timeout(this, timeout);
} catch (InterruptedException ie) {
throw runtime.newConcurrencyError(ie.getLocalizedMessage());
} finally {
// An interrupt or timeout may have caused us to miss
// a notify that we consumed, so do another notify in
// case someone else is available to pick it up.
if (!success) {
this.notify();
}
}
return this;
}
@JRubyMethod(name = "ns_signal", visibility = Visibility.PROTECTED)
public IRubyObject nsSignal(ThreadContext context) {
notify();
return this;
}
@JRubyMethod(name = "ns_broadcast", visibility = Visibility.PROTECTED)
public IRubyObject nsBroadcast(ThreadContext context) {
notifyAll();
return this;
}
}
}
|
package io.subutai.core.hubmanager.impl.environment.state.build;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import io.subutai.common.command.CommandCallback;
import io.subutai.common.command.CommandException;
import io.subutai.common.command.CommandResult;
import io.subutai.common.command.RequestBuilder;
import io.subutai.common.command.Response;
import io.subutai.common.peer.Host;
import io.subutai.common.peer.HostNotFoundException;
import io.subutai.core.hubmanager.api.RestClient;
import io.subutai.core.hubmanager.api.exception.HubManagerException;
import io.subutai.core.hubmanager.impl.environment.state.Context;
import io.subutai.core.hubmanager.impl.environment.state.StateHandler;
import io.subutai.hub.share.dto.ansible.AnsibleDto;
import io.subutai.hub.share.dto.ansible.Group;
import io.subutai.hub.share.dto.environment.EnvironmentPeerDto;
public class ConfigureEnvironmentStateHandler extends StateHandler
{
private static final String ENV_APPS_URL = "/rest/v1/environments/%s/apps";
private static final String TMP_DIR = "/root/";
private long commandTimeout = 5L;
public final RestClient restClient;
public ConfigureEnvironmentStateHandler( Context ctx )
{
super( ctx, "Configure environment" );
restClient = ctx.restClient;
}
@Override
protected Object doHandle( EnvironmentPeerDto peerDto ) throws HubManagerException
{
logStart();
AnsibleDto ansibleDto = peerDto.getAnsibleDto();
if ( ansibleDto != null )
{
startConfiguration( ansibleDto, peerDto );
}
logEnd();
return peerDto;
}
private void startConfiguration( AnsibleDto ansibleDto, EnvironmentPeerDto peerDto )
{
if ( ansibleDto.getCommandTimeout() != null )
{
commandTimeout = ansibleDto.getCommandTimeout();
}
prepareHostsFile( ansibleDto.getAnsibleContainerId(), ansibleDto.getGroups() );
copyRepoUnpack( ansibleDto.getAnsibleContainerId(), ansibleDto.getRepoLink() );
runAnsibleScript( ansibleDto, peerDto.getEnvironmentInfo().getId() );
}
private void runAnsibleScript( AnsibleDto ansibleDto, String envSubutaiId )
{
final String containerId = ansibleDto.getAnsibleContainerId();
final String dirLocation = getDirLocation( ansibleDto.getRepoLink() );
final String mainAnsibleScript = ansibleDto.getAnsibleRootFile();
String extraVars = ansibleDto.getVars();
if ( StringUtils.isEmpty( ansibleDto.getVars() ) )
{
extraVars = "{}";
}
String cmd =
String.format( "cd %s; ansible-playbook %s --extra-vars %s", TMP_DIR + dirLocation, mainAnsibleScript,
extraVars );
try
{
runCmdAsync( containerId, cmd, envSubutaiId );
}
catch ( Exception e )
{
log.error( "Error configuring environment", e );
}
}
private String getDirLocation( String repoLink )
{
repoLink = repoLink.replaceAll( "https://github.com/", "" );
repoLink = repoLink.replaceAll( "/archive/", "-" );
repoLink = repoLink.replaceAll( ".zip", "" );
return repoLink.split( "/" )[1];
}
private void copyRepoUnpack( final String containerId, final String repoLink )
{
try
{
int count = 1;
boolean reachable = isReachable( "www.github.com", containerId );
while ( !reachable && count < 5 ) //break after 5th try
{
TimeUnit.SECONDS.sleep( count * 2 );
reachable = isReachable( "www.github.com", containerId );
count++;
log.info( "No internet connection on container host {}", containerId );
}
String cmd = String.format( "cd %s; bash get_unzip.sh %s", TMP_DIR, repoLink );
runCmd( containerId, cmd );
}
catch ( Exception e )
{
log.error( "Error configuring environment", e );
}
}
private boolean isReachable( String address, String containerId ) throws Exception
{
Host host = ctx.localPeer.getContainerHostById( containerId );
CommandResult result =
host.execute( new RequestBuilder( "ping" ).withCmdArgs( "-w", "10", "-c", "3", address ) );
return result.hasSucceeded();
}
private void prepareHostsFile( final String containerId, Set<io.subutai.hub.share.dto.ansible.Group> groups )
{
String groupName = "";
String inventoryLine = "";
try
{
for ( Group group : groups )
{
groupName = String.format( "[%s]", group.getName() );
runCmd( containerId,
String.format( "grep -q -F '%s' /etc/ansible/hosts || echo '%s' >> /etc/ansible/hosts",
groupName, groupName ) );
for ( io.subutai.hub.share.dto.ansible.Host host : group.getHosts() )
{
inventoryLine = format( host ).trim();
runCmd( containerId,
String.format( "grep -q -F '%s' /etc/ansible/hosts || echo '%s' >> /etc/ansible/hosts",
inventoryLine, inventoryLine ) );
}
}
}
catch ( Exception e )
{
log.error( "Error configuring environment", e );
}
}
private String runCmd( String containerId, String cmd ) throws HostNotFoundException, CommandException
{
Host host = ctx.localPeer.getContainerHostById( containerId );
RequestBuilder rb =
new RequestBuilder( cmd ).withTimeout( ( int ) TimeUnit.MINUTES.toSeconds( commandTimeout ) );
CommandResult result = host.execute( rb );
String msg = result.getStdOut();
if ( !result.hasSucceeded() )
{
msg += " " + result.getStdErr();
log.error( "Error configuring environment: {}", result );
}
return msg;
}
private static String format( io.subutai.hub.share.dto.ansible.Host host )
{
String inventoryLineFormat = "%s ansible_user=%s template=%s ansible_ssh_host=%s";
//if template has python3, default is python2
if ( host.getPythonPath() != null )
{
inventoryLineFormat += " ansible_python_interpreter=" + host.getPythonPath();
}
return String.format( inventoryLineFormat, host.getHostname(), host.getAnsibleUser(), host.getTemplateName(),
host.getIp() );
}
private void runCmdAsync( String containerId, String cmd, String envSubutaiId )
throws HostNotFoundException, CommandException
{
Host host = ctx.localPeer.getContainerHostById( containerId );
RequestBuilder rb =
new RequestBuilder( cmd ).withTimeout( ( int ) TimeUnit.MINUTES.toSeconds( commandTimeout ) );
AnsibleCallback ansibleCallback = new AnsibleCallback( envSubutaiId );
host.executeAsync( rb, ansibleCallback );
while ( !ansibleCallback.hasCompleted() )
{
try
{
Thread.sleep( 5000 );
}
catch ( InterruptedException e )
{
log.error( e.getMessage() );
}
}
}
private class AnsibleCallback implements CommandCallback
{
final String envSubutaiId;
private boolean hasCompleted = false;
private List<Integer> cache = new ArrayList<>();
AnsibleCallback( String envSubutaiId )
{
this.envSubutaiId = envSubutaiId;
}
@Override
public void onResponse( final Response response, final CommandResult commandResult )
{
if ( cache.contains( response.getResponseNumber() ) )
{
return;
}
else
{
cache.add( response.getResponseNumber() );
}
AnsibleDto ansibleDto = new AnsibleDto();
ansibleDto.setState( AnsibleDto.State.IN_PROGRESS );
if ( commandResult.hasCompleted() )
{
if ( commandResult.getExitCode() != 0 )
{
ansibleDto.setState( AnsibleDto.State.FAILED );
ansibleDto.setLogs( commandResult.getStdErr() + commandResult.getStdOut() );
}
else
{
ansibleDto.setState( AnsibleDto.State.SUCCESS );
ansibleDto.setLogs( commandResult.getStdOut() );
cache.clear();
}
hasCompleted = commandResult.hasCompleted();
}
else
{
ansibleDto.setLogs( commandResult.getStdOut() );
}
String path = String.format( ENV_APPS_URL, this.envSubutaiId );
restClient.post( path, ansibleDto );
}
public boolean hasCompleted()
{
return hasCompleted;
}
}
}
|
package jsaf.intf.windows.identity;
import java.util.Collection;
import java.util.NoSuchElementException;
import java.util.regex.Pattern;
import jsaf.identity.IdentityException;
import jsaf.intf.util.ISearchable;
import jsaf.intf.util.ISearchable.Condition;
import jsaf.intf.util.ILoggable;
import jsaf.provider.windows.identity.SID;
/**
* Representation of a Windows user/group store.
*
* @author David A. Solin
* @version %I% %G%
* @since 1.0
*/
public interface IDirectory extends ILoggable {
SID lookupSID(String netbiosName) throws IllegalArgumentException, NoSuchElementException, IdentityException;
/**
* Returns a Principal (User or Group) given a SID.
*
* @throws NoSuchElementException if no principal exists for the specified SID value
*
* @since 1.5.0
*/
IPrincipal getPrincipal(SID sid) throws NoSuchElementException, IdentityException;
/**
* Returns the SID for the local machine.
*
* @since 1.5.0
*/
SID getComputerSid() throws IdentityException;
/**
* Resolve all members of the specified group, including sub-groups and their members, recursively.
*
* @since 1.5.0
*/
Collection<IPrincipal> getAllMembers(IGroup group) throws IdentityException;
/**
* Does the LSA (Local Security Authority) recognize this SID?
*
* @since 1.5.0
*/
boolean isLocal(SID sid) throws IdentityException;
/**
* Access an ISearchable for the Local Security Authority.
*
* @since 1.5.0
*/
ISearchable<IPrincipal> getSearcher() throws IdentityException;
/**
* A search condition for retrieving all the service SIDs.
*
* @since 1.5.0
*/
DirCondition SERVICES = new DirCondition(DirCondition.FIELD_SID, Condition.TYPE_PATTERN, Pattern.compile("^S-1-5-80-"));
/**
* A search condition indicating that fully-expanded results (i.e., including pre-fetched group membership data) are preferred.
*
* @since 1.5.0
*/
DirCondition EXPAND = new DirCondition(DirCondition.EXPANSION_CONTROL, Condition.TYPE_EQUALITY, Boolean.TRUE);
/**
* A search condition indicating that full data retrieval may be deferred in the interests of query performance.
*
* @since 1.5.0
*/
DirCondition FAST = new DirCondition(DirCondition.EXPANSION_CONTROL, Condition.TYPE_EQUALITY, Boolean.FALSE);
/**
* Base ISearchable.Condition subclass for IDirectory search conditions.
*
* @since 1.5.0
*/
public static final class DirCondition extends Condition {
/**
* Create a Condition for searching a windows IDirectory.
*/
public DirCondition(int field, int type, Object arg) {
super(field, type, arg);
}
/**
* Condition field for a SID pattern.
*
* Supports the following condition types:
* TYPE_PATTERN - search for a SID matching the java.util.regex.Pattern value
* TYPE_ANY - retrieve multiple IPrincipals given a java.util.Collection<SID>
*
* @since 1.5.0
*/
public static final int FIELD_SID = 1000;
/**
* Condition field for a principal name (as in, the String returned by IPrincipal.getName()).
*
* Supports the following condition types:
* TYPE_PATTERN - search for a principal name matching the java.util.regex.Pattern value
* TYPE_ANY - retrieve multiple IPrincipals given a java.util.Collection<String>
*
* @since 1.5.0
*/
public static final int FIELD_NAME = 1001;
/**
* On certain machines, such as domain controllers, there may be a great many users and groups
* defined in the Local Security Authority. In such cases, it can be very time-consuming to resolve
* group membership information for every principal in the store. Therefore, it is assumed that
* implementations of ISearchable<IPrincipal> may include a delayed-expansion feature, where group
* membership data for a principal is queried only when absolutely required.
*
* This search condition field makes it possible for an API client to specify the preferred behavior
* of such a feature with respect to the results of any particular search. When this field is not
* included in search conditions, the behavior of an implementation with respect to delayed expansion
* is not defined. In fact, there is no requirement that delayed expansion be implemented at all.
*
* Supports the following condition type:
* TYPE_EQUALITY - Use with argument Boolean.TRUE to return only expanded IPrincipals;
* use with argument Boolean.FALSE to return un-expanded IPrincipals
*
* @since 1.5.0
*/
public static final int EXPANSION_CONTROL = 2000;
}
}
|
package org.splevo.jamopp.vpm.analyzer.programdependency.references;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.emftext.language.java.commons.Commentable;
import org.splevo.jamopp.util.JaMoPPElementUtil;
import org.splevo.vpm.analyzer.VPMAnalyzerUtil;
public class RobillardReferenceSelector implements ReferenceSelector {
@SuppressWarnings("unused")
private static Logger logger = Logger.getLogger(RobillardReferenceSelector.class);
private RobillardReferenceSelectorSwitch selectorSwitch = new RobillardReferenceSelectorSwitch();
@Override
public List<Commentable> getReferencedElements(Commentable commentable) {
if (VPMAnalyzerUtil.isNullOrProxy(commentable)) {
return new ArrayList<Commentable>();
}
List<Commentable> referencedElements = selectorSwitch.doSwitch(commentable);
for (Commentable element : referencedElements) {
if (VPMAnalyzerUtil.isNullOrProxy(element)) {
referencedElements.remove(element);
}
}
return referencedElements;
}
/**
* Ignore all references not represented in the logic defined by robillard et al.
*
* {@inheritDoc}
*/
@Override
public boolean ignoreReference(Commentable source1, Commentable source2, Commentable target) {
if (source1 == source2) {
return true;
}
if (JaMoPPElementUtil.isParentOf(source1, target)) {
return false;
}
if (JaMoPPElementUtil.isParentOf(source2, target)) {
return false;
}
return false;
}
}
|
package org.opencb.opencga.storage.hadoop.variant.pending;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.opencb.opencga.storage.hadoop.utils.AbstractHBaseDataWriter;
import org.opencb.opencga.storage.hadoop.utils.HBaseManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.*;
public class PendingVariantsDBCleaner extends AbstractHBaseDataWriter<byte[], Delete> {
public static final int MAX_PENDING_REGIONS_TO_COMPACT = 2;
private final PendingVariantsDescriptor descriptor;
private final Deque<HRegionLocation> regions = new LinkedList<>();
private RegionLocator regionLocator;
private final Logger logger = LoggerFactory.getLogger(PendingVariantsDBCleaner.class);
public PendingVariantsDBCleaner(HBaseManager hBaseManager, String tableName, PendingVariantsDescriptor descriptor) {
super(hBaseManager, tableName);
this.descriptor = descriptor;
descriptor.checkValidPendingTableName(tableName);
}
@Override
public boolean pre() {
try {
descriptor.createTableIfNeeded(tableName, hBaseManager);
regionLocator = hBaseManager.getConnection().getRegionLocator(TableName.valueOf(tableName));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return true;
}
@Override
protected BufferedMutatorParams buildBufferedMutatorParams() {
// Set write buffer size to 10GB to ensure that will only be triggered manually on flush
return super.buildBufferedMutatorParams().writeBufferSize(10L * 1024L * 1024L * 1024L);
}
@Override
protected List<Delete> convert(List<byte[]> batch) throws IOException {
List<Delete> deletes = new ArrayList<>(batch.size());
for (byte[] rowKey : batch) {
Delete delete = new Delete(rowKey);
deletes.add(delete);
}
for (int i = 0; i < batch.size(); i += 50) {
HRegionLocation region = regionLocator.getRegionLocation(batch.get(i));
if (!regions.contains(region)) {
regions.add(region);
}
}
while (regions.size() > MAX_PENDING_REGIONS_TO_COMPACT) {
// If the regions list contains more than X elements, start running major_compacts.
compactRegions(Collections.singletonList(regions.pollFirst()));
}
return deletes;
}
@Override
public boolean post() {
super.post();
compactRegions(new ArrayList<>(regions));
return true;
}
/**
* Major compact given regions.
*
* @param regions List of regions to compact
*/
private void compactRegions(List<HRegionLocation> regions) {
try (Admin admin = hBaseManager.getConnection().getAdmin()) {
for (HRegionLocation region : regions) {
try {
logger.info("Major compact region " + region.toString());
admin.majorCompactRegion(region.getRegionInfo().getRegionName());
} catch (Exception e) {
// Do not propagate exceptions. This is an optional step that might fail in some scenarios,
// like if the region changes (e.g. split)
logger.warn("Error compacting region: " + e.getMessage());
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
|
package org.xwiki.security.authorization.internal;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.security.authorization.AbstractWikiTestCase;
import org.xwiki.security.authorization.AuthorizationManager;
import org.xwiki.security.authorization.Right;
import org.xwiki.security.authorization.testwikibuilding.LegacyTestWiki;
import com.xpn.xwiki.XWikiContext;
public class AuthorizationManagerTest extends AbstractWikiTestCase
{
private AuthorizationManager authorizationManager;
@Override
@Before
public void setUp() throws Exception
{
super.setUp();
this.authorizationManager = getComponentManager().getInstance(AuthorizationManager.class);
}
protected void assertAccessTrue(String message, Right right, DocumentReference userReference,
EntityReference entityReference, XWikiContext ctx) throws Exception
{
setContext(ctx);
Assert.assertTrue(message, this.authorizationManager.hasAccess(right, userReference, entityReference));
}
protected void assertAccessFalse(String message, Right right, DocumentReference userReference,
EntityReference entityReference, XWikiContext ctx) throws Exception
{
setContext(ctx);
Assert.assertFalse(message, this.authorizationManager.hasAccess(right, userReference, entityReference));
}
// Tests
@Test
public void testGlobalUserInEmptySubWiki() throws Exception
{
LegacyTestWiki testWiki = new LegacyTestWiki(getMockery(), getComponentManager(), "emptySubWiki.xml", false);
XWikiContext ctx = testWiki.getXWikiContext();
ctx.setDatabase("wiki2");
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.VIEW,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.EDIT,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.COMMENT,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.DELETE,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.REGISTER,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.ADMIN,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.PROGRAM,
new DocumentReference("wiki", "XWiki", "user"), new DocumentReference("wiki2", "Space", "Page"), ctx);
}
@Test
public void testPublicAccess() throws Exception
{
LegacyTestWiki testWiki = new LegacyTestWiki(getMockery(), getComponentManager(), "empty.xml", false);
XWikiContext ctx = testWiki.getXWikiContext();
ctx.setDatabase("wiki");
DocumentReference user = null;
EntityReference document = new DocumentReference("wiki", "Space", "Page");
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.LOGIN, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.VIEW, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.EDIT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.DELETE, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.REGISTER, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.COMMENT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.PROGRAM, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.ADMIN, user,
document, ctx);
}
@Test
public void testPublicAccessOnTopLevel() throws Exception
{
LegacyTestWiki testWiki = new LegacyTestWiki(getMockery(), getComponentManager(), "empty.xml", false);
XWikiContext ctx = testWiki.getXWikiContext();
ctx.setDatabase("wiki");
DocumentReference user = null;
EntityReference document = null;
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.LOGIN, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.VIEW, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.EDIT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.DELETE, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.REGISTER, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.COMMENT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.PROGRAM, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.ADMIN, user,
document, ctx);
}
@Test
public void testRightOnTopLevel() throws Exception
{
LegacyTestWiki testWiki = new LegacyTestWiki(getMockery(), getComponentManager(), "empty.xml", false);
XWikiContext ctx = testWiki.getXWikiContext();
ctx.setDatabase("wiki");
DocumentReference user = new DocumentReference("wiki", "XWiki", "user");
EntityReference document = null;
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.LOGIN, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.VIEW, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.EDIT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.DELETE, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.REGISTER, user,
document, ctx);
assertAccessTrue("User from global wiki should have the same rights on empty subwiki", Right.COMMENT, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.PROGRAM, user,
document, ctx);
assertAccessFalse("User from global wiki should have the same rights on empty subwiki", Right.ADMIN, user,
document, ctx);
}
// Cache tests
@Test
public void testRightOnUserAndDelete() throws Exception
{
LegacyTestWiki testWiki = new LegacyTestWiki(getMockery(), getComponentManager(), "usersAndGroups.xml", false);
XWikiContext ctx = testWiki.getXWikiContext();
ctx.setDatabase("wiki");
assertAccessTrue("User should have view right", Right.VIEW, new DocumentReference("wiki", "XWiki", "user"),
new DocumentReference("wiki", "Space", "Page"), ctx);
assertAccessTrue("User should have view right", Right.VIEW, new DocumentReference("wiki", "XWiki", "user2"),
new DocumentReference("wiki", "Space", "Page"), ctx);
testWiki.deleteUser("user", "wiki");
assertAccessFalse("User should have view right", Right.VIEW, new DocumentReference("wiki", "XWiki", "user"),
new DocumentReference("wiki", "Space", "Page"), ctx);
assertAccessTrue("User should have view right", Right.VIEW, new DocumentReference("wiki", "XWiki", "user2"),
new DocumentReference("wiki", "Space", "Page"), ctx);
}
}
|
package org.gemoc.executionframework.ui.views.engine.actions;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuCreator;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.ui.IWorkbenchPartSite;
import org.gemoc.commons.eclipse.ui.ViewHelper;
import org.gemoc.executionframework.ui.views.engine.EnginesStatusView;
import org.gemoc.executionframework.ui.views.engine.IEngineSelectionListener;
import org.gemoc.gemoc_language_workbench.api.core.ExecutionMode;
import org.gemoc.gemoc_language_workbench.api.core.IExecutionEngine;
import org.gemoc.gemoc_language_workbench.api.core.EngineStatus.RunStatus;
public abstract class AbstractEngineAction extends Action implements IMenuCreator, IEngineSelectionListener{
public AbstractEngineAction(){
super("fake", AS_PUSH_BUTTON);
setMenuCreator(this);
setEnabled(false);
init();
updateButton();
EnginesStatusView view = ViewHelper.retrieveView(EnginesStatusView.ID);
view.addEngineSelectionListener(this);
}
public AbstractEngineAction(int style){
super("fake", style);
setMenuCreator(this);
setEnabled(false);
init();
updateButton();
EnginesStatusView view = ViewHelper.retrieveView(EnginesStatusView.ID);
view.addEngineSelectionListener(this);
}
protected void init(){
}
protected void updateButton(){
}
@Override
public void dispose()
{
EnginesStatusView view = ViewHelper.retrieveView(EnginesStatusView.ID);
view.removeEngineSelectionListener(this);
}
protected void showMessage(IWorkbenchPartSite partSite, String message) {
MessageDialog.openInformation(
partSite.getShell(),
"Gemoc Engines Status",
message);
}
private IExecutionEngine _currentSelectedEngine;
public IExecutionEngine getCurrentSelectedEngine(){
return _currentSelectedEngine;
}
@Override
public void engineSelectionChanged(IExecutionEngine engine)
{
_currentSelectedEngine = engine;
if (_currentSelectedEngine == null)
{
setEnabled(false);
}
else
{
setEnabled(
!_currentSelectedEngine.getRunningStatus().equals(RunStatus.Stopped)
&& _currentSelectedEngine.getExecutionContext().getExecutionMode().equals(ExecutionMode.Animation));
}
}
@Override
public Menu getMenu(Control parent) {
return null;
}
@Override
public Menu getMenu(Menu parent) {
return null;
}
}
|
package com.spotify.flo;
import io.grpc.Context;
public final class Tracing {
public static final Context.Key<String> TASK_ID = Context.keyWithDefault("task-id", "");
public static final Context.Key<String> TASK_NAME = Context.keyWithDefault("task-name", "");
public static final Context.Key<String> TASK_ARGS = Context.keyWithDefault("task-args", "");
private Tracing() {
throw new UnsupportedOperationException();
}
}
|
package fr.openwide.core.basicapp.web.application.administration.template;
import static fr.openwide.core.basicapp.web.application.property.BasicApplicationWebappPropertyIds.PORTFOLIO_ITEMS_PER_PAGE;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.wicket.Page;
import org.apache.wicket.Session;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.data.IDataProvider;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.wicketstuff.wiquery.core.events.MouseEvent;
import fr.openwide.core.basicapp.core.business.user.model.User;
import fr.openwide.core.basicapp.core.business.user.service.IUserService;
import fr.openwide.core.basicapp.core.security.model.BasicApplicationPermissionConstants;
import fr.openwide.core.basicapp.core.util.binding.Bindings;
import fr.openwide.core.basicapp.web.application.administration.component.UserSearchPanel;
import fr.openwide.core.basicapp.web.application.administration.export.UserExcelTableExport;
import fr.openwide.core.basicapp.web.application.administration.form.AbstractUserPopup;
import fr.openwide.core.basicapp.web.application.administration.model.AbstractUserDataProvider;
import fr.openwide.core.basicapp.web.application.common.renderer.ActionRenderers;
import fr.openwide.core.basicapp.web.application.common.typedescriptor.user.UserTypeDescriptor;
import fr.openwide.core.commons.util.functional.Predicates2;
import fr.openwide.core.jpa.more.business.sort.ISort;
import fr.openwide.core.spring.property.service.IPropertyService;
import fr.openwide.core.wicket.markup.html.link.EmailLink;
import fr.openwide.core.wicket.more.condition.Condition;
import fr.openwide.core.wicket.more.export.excel.component.AbstractExcelExportAjaxLink;
import fr.openwide.core.wicket.more.export.excel.component.ExcelExportWorkInProgressModalPopupPanel;
import fr.openwide.core.wicket.more.link.model.PageModel;
import fr.openwide.core.wicket.more.markup.html.action.AbstractOneParameterAjaxAction;
import fr.openwide.core.wicket.more.markup.html.factory.ModelFactories;
import fr.openwide.core.wicket.more.markup.html.feedback.FeedbackUtils;
import fr.openwide.core.wicket.more.markup.html.link.BlankLink;
import fr.openwide.core.wicket.more.markup.html.template.js.jquery.plugins.bootstrap.modal.behavior.AjaxModalOpenBehavior;
import fr.openwide.core.wicket.more.markup.repeater.table.DecoratedCoreDataTablePanel;
import fr.openwide.core.wicket.more.markup.repeater.table.builder.DataTableBuilder;
import fr.openwide.core.wicket.more.markup.repeater.table.column.AbstractCoreColumn;
import fr.openwide.core.wicket.more.model.BindingModel;
import fr.openwide.core.wicket.more.rendering.BooleanRenderer;
public abstract class AdministrationUserPortfolioTemplate<U extends User> extends AdministrationTemplate {
private static final long serialVersionUID = 1824247169136460059L;
@SpringBean
private IUserService userService;
@SpringBean
private IPropertyService propertyService;
protected UserTypeDescriptor<U> typeDescriptor;
public AdministrationUserPortfolioTemplate(PageParameters parameters, UserTypeDescriptor<U> typeDescriptor, IModel<String> pageTitleModel) {
super(parameters);
this.typeDescriptor = typeDescriptor;
AbstractUserPopup<U> addPopup = createAddPopup("addPopup");
final AbstractUserDataProvider<U> dataProvider = newDataProvider();
DecoratedCoreDataTablePanel<U, ?> dataTablePanel =
createDataTable("dataTable", dataProvider, propertyService.get(PORTFOLIO_ITEMS_PER_PAGE));
add(
new Label("title", pageTitleModel),
new UserSearchPanel<>("searchPanel", dataTablePanel, typeDescriptor, dataProvider),
dataTablePanel,
addPopup,
new BlankLink("addButton")
.add(
new AjaxModalOpenBehavior(addPopup, MouseEvent.CLICK)
)
);
// Export Excel
ExcelExportWorkInProgressModalPopupPanel loadingPopup = new ExcelExportWorkInProgressModalPopupPanel("loadingPopup");
add(
loadingPopup,
new AbstractExcelExportAjaxLink("exportExcelButton", loadingPopup, "export-users-") {
private static final long serialVersionUID = 1L;
@Override
protected Workbook generateWorkbook() {
UserExcelTableExport export = new UserExcelTableExport(this);
return export.generate(dataProvider);
}
}
);
}
protected abstract AbstractUserDataProvider<U> newDataProvider();
protected abstract AbstractUserPopup<U> createAddPopup(String wicketId);
protected DecoratedCoreDataTablePanel<U, ?> createDataTable(String wicketId, final IDataProvider<U> dataProvider,
int itemsPerPage) {
PageModel<Page> pageModel = new PageModel<Page>(this);
return DataTableBuilder.start(dataProvider)
.addLabelColumn(new ResourceModel("business.user.userName"), Bindings.user().userName())
.withClass("text text-md")
.withLink(AdministrationUserDescriptionTemplate.<U>mapper().setParameter2(pageModel))
.addLabelColumn(new ResourceModel("business.user.lastName"), Bindings.user().lastName())
.withClass("text text-md")
.addLabelColumn(new ResourceModel("business.user.firstName"), Bindings.user().firstName())
.withClass("text text-md")
.addColumn(new AbstractCoreColumn<U, ISort<?>>(new ResourceModel("business.user.email")) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(Item<ICellPopulator<U>> cellItem, String componentId, IModel<U> rowModel) {
IModel<String> emailModel = BindingModel.of(rowModel, Bindings.user().email());
cellItem.add(
new EmailLink(componentId, emailModel) {
private static final long serialVersionUID = 1L;
@Override
protected void onComponentTag(ComponentTag tag) {
tag.setName("a");
super.onComponentTag(tag);
}
}
.add(Condition.predicate(emailModel, Predicates2.hasText()).thenShow())
);
}
})
.withClass("text text-md")
.addBootstrapBadgeColumn(new ResourceModel("business.user.active"), Bindings.user().active(), BooleanRenderer.get())
.withClass("icon")
.addActionColumn()
.addLink(ActionRenderers.view(), AdministrationUserDescriptionTemplate.<U>mapper().setParameter2(pageModel))
.addConfirmAction(ActionRenderers.delete())
.title(ModelFactories.stringResourceModel(
"administration.user.delete.confirmation.title",
Bindings.user().fullName()
))
.content(ModelFactories.stringResourceModel(
"administration.user.delete.confirmation.text",
Bindings.user().fullName()
))
.confirm()
.onClick(new AbstractOneParameterAjaxAction<IModel<U>>() {
private static final long serialVersionUID = 1L;
@Override
public void execute(AjaxRequestTarget target, IModel<U> parameter) {
try {
userService.delete(parameter.getObject());
Session.get().success(getString("common.delete.success"));
} catch (Exception e) {
Session.get().error(getString("common.delete.error"));
}
target.add(getPage());
dataProvider.detach();
FeedbackUtils.refreshFeedback(target, getPage());
}
})
.whenPermission(BasicApplicationPermissionConstants.DELETE)
.hidePlaceholder()
.withClassOnElements("btn-xs")
.end()
.withClass("actions actions-2x")
.withNoRecordsResourceKey("administration.user.noUsers")
.decorate()
.ajaxPagers()
.build(wicketId, itemsPerPage);
}
@SuppressWarnings("rawtypes")
@Override
protected Class<? extends AdministrationUserPortfolioTemplate> getSecondMenuPage() {
return getClass();
}
}
|
package cpw.mods.fml.common.launcher;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.NonOptionArgumentSpec;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
import cpw.mods.fml.relauncher.FMLLaunchHandler;
import net.minecraft.launchwrapper.ITweaker;
import net.minecraft.launchwrapper.LaunchClassLoader;
public class FMLTweaker implements ITweaker {
private List<String> args;
private File gameDir;
private File assetsDir;
private String profile;
private static URI jarLocation;
private String[] array;
private List<ITweaker> cascadedTweaks;
private String profileName;
private OptionSet parsedOptions;
private ArgumentAcceptingOptionSpec<String> cascadedTweaksOption;
@Override
public void acceptOptions(List<String> args, File gameDir, File assetsDir, String profile)
{
this.gameDir = (gameDir == null ? new File(".") : gameDir);
this.assetsDir = assetsDir;
this.profile = profile;
try
{
jarLocation = getClass().getProtectionDomain().getCodeSource().getLocation().toURI();
}
catch (URISyntaxException e)
{
Logger.getLogger("FMLTWEAK").log(Level.SEVERE, "Missing URI information for FML tweak");
throw Throwables.propagate(e);
}
OptionParser optionParser = new OptionParser();
cascadedTweaksOption = optionParser.accepts("cascadedTweaks", "Additional tweaks to be called by FML, implementing ITweaker").withRequiredArg().ofType(String.class).withValuesSeparatedBy(',');
ArgumentAcceptingOptionSpec<String> profileNameOption = optionParser.accepts("profileName", "A profile name, parsed by FML to control mod loading and such").withRequiredArg().ofType(String.class);
optionParser.allowsUnrecognizedOptions();
NonOptionArgumentSpec<String> nonOptions = optionParser.nonOptions();
parsedOptions = optionParser.parse(args.toArray(new String[args.size()]));
if (parsedOptions.has(profileNameOption))
{
profileName = profileNameOption.value(parsedOptions);
}
this.args = parsedOptions.valuesOf(nonOptions);
this.cascadedTweaks = Lists.newArrayList();
}
@Override
public void injectIntoClassLoader(LaunchClassLoader classLoader)
{
computeCascadedTweaks(classLoader);
classLoader.addTransformerExclusion("cpw.mods.fml.repackage.");
classLoader.addTransformerExclusion("cpw.mods.fml.relauncher.");
classLoader.addTransformerExclusion("cpw.mods.fml.common.asm.transformers.");
classLoader.addClassLoaderExclusion("LZMA.");
FMLLaunchHandler.configureForClientLaunch(classLoader, this);
runAdditionalTweaks(classLoader);
FMLLaunchHandler.appendCoreMods();
}
void computeCascadedTweaks(LaunchClassLoader classLoader)
{
if (parsedOptions.has(cascadedTweaksOption))
{
for (String tweaker : cascadedTweaksOption.values(parsedOptions))
{
try
{
classLoader.addClassLoaderExclusion(tweaker.substring(0,tweaker.lastIndexOf('.')));
Class<? extends ITweaker> tweakClass = (Class<? extends ITweaker>) Class.forName(tweaker,true,classLoader);
ITweaker additionalTweak = tweakClass.newInstance();
cascadedTweaks.add(additionalTweak);
}
catch (Exception e)
{
Logger.getLogger("FMLTWEAK").log(Level.INFO, "Missing additional tweak class "+tweaker);
}
}
}
}
void runAdditionalTweaks(LaunchClassLoader classLoader)
{
List<String> fmlArgs = Lists.newArrayList(args);
fmlArgs.add("--fmlIsPresent");
for (ITweaker tweak : cascadedTweaks)
{
tweak.acceptOptions(fmlArgs, gameDir, assetsDir, profile);
tweak.injectIntoClassLoader(classLoader);
}
}
@Override
public String getLaunchTarget()
{
return "net.minecraft.client.main.Main";
}
@Override
public String[] getLaunchArguments()
{
String[] array = args.toArray(new String[args.size()]);
for (ITweaker tweak: cascadedTweaks)
{
array = ObjectArrays.concat(tweak.getLaunchArguments(), array, String.class);
}
if (gameDir != null && !Arrays.asList(array).contains("--gameDir"))
{
array = ObjectArrays.concat(gameDir.getAbsolutePath(),array);
array = ObjectArrays.concat("--gameDir",array);
}
if (assetsDir != null && !Arrays.asList(array).contains("--assetsDir"))
{
array = ObjectArrays.concat(assetsDir.getAbsolutePath(),array);
array = ObjectArrays.concat("--assetsDir",array);
}
if (profile != null && !Arrays.asList(array).contains("--version"))
{
array = ObjectArrays.concat(profile,array);
array = ObjectArrays.concat("--version",array);
}
else if (!Arrays.asList(array).contains("--version"))
{
array = ObjectArrays.concat("UnknownFMLProfile",array);
array = ObjectArrays.concat("--version",array);
}
return array;
}
public File getGameDir()
{
return gameDir;
}
public static URI getJarLocation()
{
return jarLocation;
}
public void injectCascadingTweak(ITweaker tweaker)
{
cascadedTweaks.add(tweaker);
}
}
|
package org.elasticsearch.xpack.searchablesnapshots;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.IndexLongFieldRange;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.SnapshotsService;
import org.elasticsearch.snapshots.mockstore.MockRepository;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotAction;
import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest;
import org.elasticsearch.xpack.searchablesnapshots.cache.CacheService;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING;
import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
public class SearchableSnapshotsCanMatchOnCoordinatorIntegTests extends BaseSearchableSnapshotsIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(LocalStateSearchableSnapshots.class, MockTransportService.TestPlugin.class, MockRepository.Plugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
// Use an unbound cache so we can recover the searchable snapshot completely all the times
.put(CacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES))
// Have a shared cache of reasonable size available on each node because tests randomize over frozen and cold allocation
.put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ofMb(randomLongBetween(1, 10)))
.build();
}
public void testSearchableSnapshotShardsAreSkippedWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
final String dataNodeHoldingRegularIndex = internalCluster().startDataOnlyNode();
final String dataNodeHoldingSearchableSnapshot = internalCluster().startDataOnlyNode();
final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, dataNodeHoldingSearchableSnapshot);
final String indexOutsideSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final int indexOutsideSearchRangeShardCount = randomIntBetween(1, 3);
createIndexWithTimestamp(indexOutsideSearchRange, indexOutsideSearchRangeShardCount, Settings.EMPTY);
final String indexWithinSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final int indexWithinSearchRangeShardCount = randomIntBetween(1, 3);
createIndexWithTimestamp(
indexWithinSearchRange,
indexWithinSearchRangeShardCount,
Settings.builder()
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingRegularIndex)
.build()
);
final int totalShards = indexOutsideSearchRangeShardCount + indexWithinSearchRangeShardCount;
// Either add data outside of the range, or documents that don't have timestamp data
final boolean indexDataWithTimestamp = randomBoolean();
if (indexDataWithTimestamp) {
indexDocumentsWithTimestampWithinDate(indexOutsideSearchRange, between(1, 1000), "2020-11-26T%02d:%02d:%02d.%09dZ");
} else {
indexRandomDocs(indexOutsideSearchRange, between(0, 1000));
}
// Index enough documents to ensure that all shards have at least some documents
int numDocsWithinRange = between(100, 1000);
indexDocumentsWithTimestampWithinDate(indexWithinSearchRange, numDocsWithinRange, "2020-11-28T%02d:%02d:%02d.%09dZ");
final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
createRepository(repositoryName, "mock");
final SnapshotId snapshotId = createSnapshot(repositoryName, "snapshot-1", List.of(indexOutsideSearchRange)).snapshotId();
assertAcked(client().admin().indices().prepareDelete(indexOutsideSearchRange));
final String searchableSnapshotIndexOutsideSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
// Block the repository for the node holding the searchable snapshot shards
// to delay its restore
blockDataNode(repositoryName, dataNodeHoldingSearchableSnapshot);
// Force the searchable snapshot to be allocated in a particular node
Settings restoredIndexSettings = Settings.builder()
.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), Boolean.FALSE.toString())
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingSearchableSnapshot)
.build();
final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest(
searchableSnapshotIndexOutsideSearchRange,
repositoryName,
snapshotId.getName(),
indexOutsideSearchRange,
restoredIndexSettings,
Strings.EMPTY_ARRAY,
false,
randomFrom(MountSearchableSnapshotRequest.Storage.values())
);
client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet();
final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange);
assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS));
DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex());
assertThat(timestampFieldType, nullValue());
final boolean includeIndexCoveringSearchRangeInSearchRequest = randomBoolean();
List<String> indicesToSearch = new ArrayList<>();
if (includeIndexCoveringSearchRangeInSearchRequest) {
indicesToSearch.add(indexWithinSearchRange);
}
indicesToSearch.add(searchableSnapshotIndexOutsideSearchRange);
SearchRequest request = new SearchRequest().indices(indicesToSearch.toArray(new String[0]))
.source(
new SearchSourceBuilder().query(
QueryBuilders.rangeQuery(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD)
.from("2020-11-28T00:00:00.000000000Z", true)
.to("2020-11-29T00:00:00.000000000Z")
)
);
if (includeIndexCoveringSearchRangeInSearchRequest) {
SearchResponse searchResponse = client().search(request).actionGet();
// All the regular index searches succeeded
assertThat(searchResponse.getSuccessfulShards(), equalTo(indexWithinSearchRangeShardCount));
// All the searchable snapshots shard search failed
assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount));
assertThat(searchResponse.getSkippedShards(), equalTo(0));
assertThat(searchResponse.getTotalShards(), equalTo(totalShards));
} else {
// All shards failed, since all shards are unassigned and the IndexMetadata min/max timestamp
// is not available yet
expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());
}
// Allow the searchable snapshots to be finally mounted
unblockNode(repositoryName, dataNodeHoldingSearchableSnapshot);
waitUntilRecoveryIsDone(searchableSnapshotIndexOutsideSearchRange);
ensureGreen(searchableSnapshotIndexOutsideSearchRange);
final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange);
final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange();
final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex());
assertThat(dateFieldType, notNullValue());
final DateFieldMapper.Resolution resolution = dateFieldType.resolution();
assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true));
if (indexDataWithTimestamp) {
assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY)));
assertThat(
updatedTimestampMillisRange.getMin(),
greaterThanOrEqualTo(resolution.convert(Instant.parse("2020-11-26T00:00:00Z")))
);
assertThat(updatedTimestampMillisRange.getMax(), lessThanOrEqualTo(resolution.convert(Instant.parse("2020-11-27T00:00:00Z"))));
} else {
assertThat(updatedTimestampMillisRange, sameInstance(IndexLongFieldRange.EMPTY));
}
// Stop the node holding the searchable snapshots, and since we defined
// the index allocation criteria to require the searchable snapshot
// index to be allocated in that node, the shards should remain unassigned
internalCluster().stopNode(dataNodeHoldingSearchableSnapshot);
waitUntilAllShardsAreUnassigned(updatedIndexMetadata.getIndex());
if (includeIndexCoveringSearchRangeInSearchRequest) {
SearchResponse newSearchResponse = client().search(request).actionGet();
assertThat(newSearchResponse.getSkippedShards(), equalTo(indexOutsideSearchRangeShardCount));
assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards));
assertThat(newSearchResponse.getFailedShards(), equalTo(0));
assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards));
assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange));
} else {
if (indexOutsideSearchRangeShardCount == 1) {
expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());
} else {
SearchResponse newSearchResponse = client().search(request).actionGet();
// When all shards are skipped, at least one of them should be queried in order to
// provide a proper search response.
assertThat(newSearchResponse.getSkippedShards(), equalTo(indexOutsideSearchRangeShardCount - 1));
assertThat(newSearchResponse.getSuccessfulShards(), equalTo(indexOutsideSearchRangeShardCount - 1));
assertThat(newSearchResponse.getFailedShards(), equalTo(1));
assertThat(newSearchResponse.getTotalShards(), equalTo(indexOutsideSearchRangeShardCount));
}
}
}
public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
final String dataNodeHoldingRegularIndex = internalCluster().startDataOnlyNode();
final String dataNodeHoldingSearchableSnapshot = internalCluster().startDataOnlyNode();
final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, dataNodeHoldingSearchableSnapshot);
final String indexOutsideSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final int indexOutsideSearchRangeShardCount = randomIntBetween(1, 3);
createIndexWithTimestamp(
indexOutsideSearchRange,
indexOutsideSearchRangeShardCount,
Settings.builder()
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingRegularIndex)
.build()
);
indexDocumentsWithTimestampWithinDate(indexOutsideSearchRange, between(1, 1000), "2020-11-26T%02d:%02d:%02d.%09dZ");
final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
createRepository(repositoryName, "mock");
final SnapshotId snapshotId = createSnapshot(repositoryName, "snapshot-1", List.of(indexOutsideSearchRange)).snapshotId();
final String searchableSnapshotIndexOutsideSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
// Block the repository for the node holding the searchable snapshot shards
// to delay its restore
blockDataNode(repositoryName, dataNodeHoldingSearchableSnapshot);
// Force the searchable snapshot to be allocated in a particular node
Settings restoredIndexSettings = Settings.builder()
.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), Boolean.FALSE.toString())
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingSearchableSnapshot)
.build();
final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest(
searchableSnapshotIndexOutsideSearchRange,
repositoryName,
snapshotId.getName(),
indexOutsideSearchRange,
restoredIndexSettings,
Strings.EMPTY_ARRAY,
false,
randomFrom(MountSearchableSnapshotRequest.Storage.values())
);
client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet();
final int searchableSnapshotShardCount = indexOutsideSearchRangeShardCount;
final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange);
assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS));
DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex());
assertThat(timestampFieldType, nullValue());
SearchRequest request = new SearchRequest().indices(indexOutsideSearchRange, searchableSnapshotIndexOutsideSearchRange)
.source(
new SearchSourceBuilder().query(
QueryBuilders.rangeQuery(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD)
.from("2020-11-28T00:00:00.000000000Z", true)
.to("2020-11-29T00:00:00.000000000Z")
)
);
final int totalShards = indexOutsideSearchRangeShardCount + searchableSnapshotShardCount;
SearchResponse searchResponse = client().search(request).actionGet();
// All the regular index searches succeeded
assertThat(searchResponse.getSuccessfulShards(), equalTo(indexOutsideSearchRangeShardCount));
// All the searchable snapshots shard search failed
assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount));
assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount));
assertThat(searchResponse.getTotalShards(), equalTo(totalShards));
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
// Allow the searchable snapshots to be finally mounted
unblockNode(repositoryName, dataNodeHoldingSearchableSnapshot);
waitUntilRecoveryIsDone(searchableSnapshotIndexOutsideSearchRange);
ensureGreen(searchableSnapshotIndexOutsideSearchRange);
final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange);
final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange();
final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex());
assertThat(dateFieldType, notNullValue());
final DateFieldMapper.Resolution resolution = dateFieldType.resolution();
assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true));
assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY)));
assertThat(updatedTimestampMillisRange.getMin(), greaterThanOrEqualTo(resolution.convert(Instant.parse("2020-11-26T00:00:00Z"))));
assertThat(updatedTimestampMillisRange.getMax(), lessThanOrEqualTo(resolution.convert(Instant.parse("2020-11-27T00:00:00Z"))));
// Stop the node holding the searchable snapshots, and since we defined
// the index allocation criteria to require the searchable snapshot
// index to be allocated in that node, the shards should remain unassigned
internalCluster().stopNode(dataNodeHoldingSearchableSnapshot);
waitUntilAllShardsAreUnassigned(updatedIndexMetadata.getIndex());
// busy assert since computing the time stamp field from the cluster state happens off of the CS applier thread and thus can be
// slightly delayed
assertBusy(() -> {
SearchResponse newSearchResponse = client().search(request).actionGet();
// All the regular index searches succeeded
assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards));
assertThat(newSearchResponse.getFailedShards(), equalTo(0));
// We have to query at least one node to construct a valid response, and we pick
// a shard that's available in order to construct the search response
assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1));
assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards));
assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L));
});
}
public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCoordinatingNode() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
final String dataNodeHoldingRegularIndex = internalCluster().startDataOnlyNode();
final String dataNodeHoldingSearchableSnapshot = internalCluster().startDataOnlyNode();
final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, dataNodeHoldingSearchableSnapshot);
final String indexWithinSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final int indexWithinSearchRangeShardCount = randomIntBetween(1, 3);
createIndexWithTimestamp(
indexWithinSearchRange,
indexWithinSearchRangeShardCount,
Settings.builder()
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingRegularIndex)
.build()
);
indexDocumentsWithTimestampWithinDate(indexWithinSearchRange, between(1, 1000), "2020-11-28T%02d:%02d:%02d.%09dZ");
final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
createRepository(repositoryName, "mock");
final SnapshotId snapshotId = createSnapshot(repositoryName, "snapshot-1", List.of(indexWithinSearchRange)).snapshotId();
assertAcked(client().admin().indices().prepareDelete(indexWithinSearchRange));
final String searchableSnapshotIndexWithinSearchRange = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
// Block the repository for the node holding the searchable snapshot shards
// to delay its restore
blockDataNode(repositoryName, dataNodeHoldingSearchableSnapshot);
// Force the searchable snapshot to be allocated in a particular node
Settings restoredIndexSettings = Settings.builder()
.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), Boolean.FALSE.toString())
.put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingSearchableSnapshot)
.build();
final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest(
searchableSnapshotIndexWithinSearchRange,
repositoryName,
snapshotId.getName(),
indexWithinSearchRange,
restoredIndexSettings,
Strings.EMPTY_ARRAY,
false,
randomFrom(MountSearchableSnapshotRequest.Storage.values())
);
client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet();
final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexWithinSearchRange);
assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS));
DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex());
assertThat(timestampFieldType, nullValue());
SearchRequest request = new SearchRequest().indices(searchableSnapshotIndexWithinSearchRange)
.source(
new SearchSourceBuilder().query(
QueryBuilders.rangeQuery(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD)
.from("2020-11-28T00:00:00.000000000Z", true)
.to("2020-11-29T00:00:00.000000000Z")
)
);
// All shards failed, since all shards are unassigned and the IndexMetadata min/max timestamp
// is not available yet
expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());
// Allow the searchable snapshots to be finally mounted
unblockNode(repositoryName, dataNodeHoldingSearchableSnapshot);
waitUntilRecoveryIsDone(searchableSnapshotIndexWithinSearchRange);
ensureGreen(searchableSnapshotIndexWithinSearchRange);
final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexWithinSearchRange);
final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange();
final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex());
assertThat(dateFieldType, notNullValue());
final DateFieldMapper.Resolution resolution = dateFieldType.resolution();
assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true));
assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY)));
assertThat(updatedTimestampMillisRange.getMin(), greaterThanOrEqualTo(resolution.convert(Instant.parse("2020-11-28T00:00:00Z"))));
assertThat(updatedTimestampMillisRange.getMax(), lessThanOrEqualTo(resolution.convert(Instant.parse("2020-11-29T00:00:00Z"))));
// Stop the node holding the searchable snapshots, and since we defined
// the index allocation criteria to require the searchable snapshot
// index to be allocated in that node, the shards should remain unassigned
internalCluster().stopNode(dataNodeHoldingSearchableSnapshot);
waitUntilAllShardsAreUnassigned(updatedIndexMetadata.getIndex());
// The range query matches but the shards that are unavailable, in that case the search fails, as all shards that hold
// data are unavailable
expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());
}
private void createIndexWithTimestamp(String indexName, int numShards, Settings extraSettings) throws IOException {
assertAcked(
client().admin()
.indices()
.prepareCreate(indexName)
.setMapping(
XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD)
.field("type", randomFrom("date", "date_nanos"))
.field("format", "strict_date_optional_time_nanos")
.endObject()
.endObject()
.endObject()
)
.setSettings(indexSettingsNoReplicas(numShards).put(INDEX_SOFT_DELETES_SETTING.getKey(), true).put(extraSettings))
);
ensureGreen(indexName);
}
private void indexDocumentsWithTimestampWithinDate(String indexName, int docCount, String timestampTemplate) throws Exception {
final List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 0; i < docCount; i++) {
indexRequestBuilders.add(
client().prepareIndex(indexName)
.setSource(
DataStream.TimestampField.FIXED_TIMESTAMP_FIELD,
String.format(
Locale.ROOT,
timestampTemplate,
between(0, 23),
between(0, 59),
between(0, 59),
randomLongBetween(0, 999999999L)
)
)
);
}
indexRandom(true, false, indexRequestBuilders);
assertThat(
client().admin().indices().prepareForceMerge(indexName).setOnlyExpungeDeletes(true).setFlush(true).get().getFailedShards(),
equalTo(0)
);
refresh(indexName);
forceMerge();
}
private IndexMetadata getIndexMetadata(String indexName) {
return client().admin()
.cluster()
.prepareState()
.clear()
.setMetadata(true)
.setIndices(indexName)
.get()
.getState()
.metadata()
.index(indexName);
}
private void waitUntilRecoveryIsDone(String index) throws Exception {
assertBusy(() -> {
RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries(index).get();
assertThat(recoveryResponse.hasRecoveries(), equalTo(true));
for (List<RecoveryState> value : recoveryResponse.shardRecoveryStates().values()) {
for (RecoveryState recoveryState : value) {
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
}
}
});
}
private void waitUntilAllShardsAreUnassigned(Index index) throws Exception {
awaitClusterState(state -> state.getRoutingTable().index(index).allPrimaryShardsUnassigned());
}
}
|
package org.carlspring.strongbox.io;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.security.NoSuchAlgorithmException;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.inject.Inject;
import javax.xml.bind.JAXBException;
import org.apache.commons.io.IOUtils;
import org.apache.maven.artifact.Artifact;
import org.carlspring.maven.commons.util.ArtifactUtils;
import org.carlspring.strongbox.artifact.coordinates.ArtifactCoordinates;
import org.carlspring.strongbox.artifact.coordinates.MavenArtifactCoordinates;
import org.carlspring.strongbox.providers.io.RepositoryFileSystemProvider;
import org.carlspring.strongbox.providers.io.RepositoryPath;
import org.carlspring.strongbox.providers.layout.LayoutProvider;
import org.carlspring.strongbox.providers.layout.LayoutProviderRegistry;
import org.carlspring.strongbox.storage.Storage;
import org.carlspring.strongbox.storage.repository.Repository;
import org.carlspring.strongbox.testing.TestCaseWithMavenArtifactGenerationAndIndexing;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* @author mtodorov
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class ArtifactOutputStreamTest
extends TestCaseWithMavenArtifactGenerationAndIndexing
{
public static final String REPOSITORY_RELEASES = "aos-releases";
@Inject
private LayoutProviderRegistry layoutProviderRegistry;
@Before
public void initialize()
throws Exception
{
createRepository(STORAGE0, REPOSITORY_RELEASES, false);
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES).getAbsolutePath(),
"org.carlspring.foo:temp-file-test:1.2.3:jar");
}
@After
public void removeRepositories()
throws IOException, JAXBException
{
removeRepositories(getRepositoriesToClean());
}
public static Set<Repository> getRepositoriesToClean()
{
Set<Repository> repositories = new LinkedHashSet<>();
repositories.add(createRepositoryMock(STORAGE0, REPOSITORY_RELEASES));
return repositories;
}
@Test
public void testCreateWithTemporaryLocation()
throws IOException,
NoSuchAlgorithmException
{
final Storage storage = getConfiguration().getStorage("storage0");
final Repository repository = storage.getRepository("releases");
final Artifact artifact = ArtifactUtils.getArtifactFromGAVTC("org.carlspring.foo:temp-file-test:1.2.3:jar");
final ArtifactCoordinates coordinates = new MavenArtifactCoordinates(artifact);
LayoutProvider layoutProvider = layoutProviderRegistry.getProvider(repository.getLayout());
RepositoryPath artifactPath = layoutProvider.resolve(repository, coordinates);
RepositoryFileSystemProvider provider = (RepositoryFileSystemProvider) artifactPath.getFileSystem()
.provider();
RepositoryPath artifactPathTemp = provider.getTempPath(artifactPath);
final ArtifactOutputStream afos = new ArtifactOutputStream(Files.newOutputStream(artifactPathTemp),
coordinates);
ByteArrayInputStream bais = new ByteArrayInputStream("This is a test\n".getBytes());
IOUtils.copy(bais, afos);
assertTrue("Failed to create temporary artifact file!", Files.exists(artifactPathTemp));
afos.close();
provider.moveFromTemporaryDirectory(artifactPath);
assertTrue("Failed to the move temporary artifact file to original location!", Files.exists(artifactPath));
}
@Test
public void testCreateWithTemporaryLocationNoMoveOnClose()
throws IOException,
NoSuchAlgorithmException
{
final Storage storage = getConfiguration().getStorage("storage0");
final Repository repository = storage.getRepository("releases");
final Artifact artifact = ArtifactUtils.getArtifactFromGAVTC("org.carlspring.foo:temp-file-test:1.2.4:jar");
final ArtifactCoordinates coordinates = new MavenArtifactCoordinates(artifact);
LayoutProvider layoutProvider = layoutProviderRegistry.getProvider(repository.getLayout());
RepositoryPath artifactPath = layoutProvider.resolve(repository, coordinates);
RepositoryFileSystemProvider provider = (RepositoryFileSystemProvider) artifactPath.getFileSystem()
.provider();
RepositoryPath artifactPathTemp = provider.getTempPath(artifactPath);
final ArtifactOutputStream afos = new ArtifactOutputStream(Files.newOutputStream(artifactPathTemp),
coordinates);
ByteArrayInputStream bais = new ByteArrayInputStream("This is a test\n".getBytes());
IOUtils.copy(bais, afos);
assertTrue("Failed to create temporary artifact file!", Files.exists(artifactPathTemp));
afos.close();
assertFalse("Should not have move temporary the artifact file to original location!",
Files.exists(artifactPath));
assertTrue("Should not have move temporary the artifact file to original location!",
Files.exists(artifactPathTemp));
}
}
|
package com.polyvi.xface.core;
import java.io.File;
import java.io.IOException;
import java.util.List;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Looper;
import com.polyvi.xface.XStartParams;
import com.polyvi.xface.configXml.XPreInstallPackageItem;
import com.polyvi.xface.configXml.XSysConfigInfo;
import com.polyvi.xface.configXml.XSysConfigParser;
import com.polyvi.xface.configXml.XTagNotFoundException;
import com.polyvi.xface.util.XConstant;
import com.polyvi.xface.util.XFileUtils;
import com.polyvi.xface.util.XLog;
import com.polyvi.xface.util.XStrings;
public class XConfiguration {
private static final String CLASS_NAME = XConfiguration.class
.getSimpleName();
private static final String APPS_FOLDER_NAME = "applications";
public static final String SYS_DATA_DIR_NAME = "sys_data";
private static final int TAG_SYSTEM_EXIT_CODE = 1;
private boolean mWorkDirectoryChanged = false;
// config.xmlconfigInfo
private XSysConfigInfo mSysConfigInfo;
private enum WorkDirConfig {
TAG_MAIN_MEMORY_ONLY,
TAG_EXTERNAL_MEMORY_CARD_ONLY, // FlashROMSD/TF
TAG_EXTERNAL_MEMORY_CARD_FIRST
};
static public enum WorkDirStrategy {
MEMORY, SDCARD
};
/** singleton */
private static XConfiguration instance = null;
private String mWorkDir;
/** view */
private boolean mTelLinkEnabled = true;
private XConfiguration() {
}
public static XConfiguration getInstance() {
if (instance == null) {
instance = new XConfiguration();
}
return instance;
}
/**
*
*
* @param context
* @throws XTagNotFoundException
*/
public void readConfig(Context context)
throws XTagNotFoundException {
XSysConfigParser sysConfigParser = new XSysConfigParser(context);
mSysConfigInfo = sysConfigParser.parseConfig();
}
/**
*
*
* @param workDir
*
*/
public void setWorkDirectory(String workDir) {
this.mWorkDir = workDir;
}
/**
* application
*
* @return application
*/
public String getAppInstallDir() {
return this.mWorkDir + APPS_FOLDER_NAME + File.separator;
}
/**
* sys_data
*
* @return sys_data
*/
public String getSysDataDir() {
return this.getAppInstallDir() + SYS_DATA_DIR_NAME + File.separator;
}
/**
*
*
* @return
*/
public String getWorkDirectory() {
return this.mWorkDir;
}
public List<XPreInstallPackageItem> getPreinstallPackages() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo
.getPreinstallPackages();
}
/**
* startappid
*
* @param sysCtx
*
* @return startAppId
*/
public String getStartAppId(XISystemContext sysCtx) {
XStartParams params = sysCtx.getStartParams();
if (null != params && null != params.appId) {
// appappid,packagename
// XPreInstallPackageItem
return params.appId;
}
return (null == mSysConfigInfo) ? null : mSysConfigInfo.getStartAppId();
}
/**
* config.xmlLOG
*/
public String readLogLevel() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo.getLogLevel();
}
/**
* config.xmlsplash
*
* @return
*/
public boolean readAutoHideSplash() {
return (null == mSysConfigInfo) ? true : mSysConfigInfo
.getAutoHideSplash();
}
/**
* config.xml
*
* @return
*/
public boolean readFullscreen() {
return (null == mSysConfigInfo) ? false : mSysConfigInfo
.getFullscreen();
}
/**
* config.xmlsplash
*
* @return
*/
public String readSplashDelay() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo
.getSplashDelay();
}
/**
* config.xml
*
* @return
*/
public String readEngineVersion() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo
.getEngineVersion();
}
/**
* config.xmlbuild
*
* @return
*/
public String readEngineBuild() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo
.getEngineBuild();
}
/**
* config.xmlxface.js
*
* @return
*/
public String readLoadUrlTimeout() {
return (null == mSysConfigInfo) ? null : mSysConfigInfo
.getLoadUrlTimeout();
}
/** config.xmlxFacexFace */
/**
*
*
* @param context
* androidContext
* @param workDirName
*
*/
public void configWorkDirectory(Context context, String workDirName) {
if (null == workDirName) {
alerExitMessage(
XStrings.getInstance().getString(
XStrings.EXIT_MESSAGE_TITLE),
XStrings.getInstance().getString(
XStrings.EXIT_MESSAGE_CONTENT), context);
} else {
// config
setWorkDirectory(workDirName);
}
}
/**
* ,'/'
*
* @param context
* androidContext
* @param workDirName
*
*/
public String getWorkDirectory(Context context, String workDirName) {
String dirType = mSysConfigInfo.getWorkDir();
String baseDir = null;
int work_dir_config;
WorkDirConfig configType;
try {
work_dir_config = Integer.parseInt(dirType);
configType = WorkDirConfig.values()[--work_dir_config];
} catch (Exception e) {
return null;
}
// FIXMEandroid3.2
if (Build.VERSION.SDK_INT == 13) {
configType = WorkDirConfig.TAG_MAIN_MEMORY_ONLY;
}
switch (configType) {
case TAG_MAIN_MEMORY_ONLY:
try {
baseDir = context.getFilesDir().getCanonicalPath();
setWorkDirStrategy(context, WorkDirStrategy.MEMORY);
} catch (IOException e) {
XLog.e(CLASS_NAME,
"error when get work directory:" + e.getMessage());
e.printStackTrace();
}
break;
case TAG_EXTERNAL_MEMORY_CARD_ONLY: {
if (null == (baseDir = XFileUtils.getSdcardPath())) {
return null;
}
workDirName = XConstant.ANDROID_DIR + File.separator
+ XConstant.APP_DATA_DIR_NAME + File.separator
+ workDirName;
setWorkDirStrategy(context, WorkDirStrategy.SDCARD);
}
break;
case TAG_EXTERNAL_MEMORY_CARD_FIRST: {
if (null == (baseDir = XFileUtils.getSdcardPath())) {
try {
baseDir = context.getFilesDir().getCanonicalPath();
setWorkDirStrategy(context, WorkDirStrategy.MEMORY);
} catch (IOException e) {
XLog.e(CLASS_NAME,
"error when getWorkDirectory:" + e.getMessage());
e.printStackTrace();
}
} else {
workDirName = XConstant.ANDROID_DIR + File.separator
+ XConstant.APP_DATA_DIR_NAME + File.separator
+ workDirName;
setWorkDirStrategy(context, WorkDirStrategy.SDCARD);
}
}
break;
}
StringBuffer sb = new StringBuffer();
sb.append(baseDir);
if (!baseDir.endsWith(File.separator)) {
sb.append(File.separatorChar);
}
sb.append(workDirName);
sb.append(File.separatorChar);
File workDir = new File(sb.toString(), XConstant.PRE_INSTALL_SOURCE_ROOT);
if (!workDir.exists()) {
workDir.mkdirs();
XFileUtils.setPermission(XFileUtils.EXECUTABLE_BY_OTHER,
workDir.getAbsolutePath());
}
return sb.toString();
}
/**
*
*
* @param ctx
* @param wds
*/
private void setWorkDirStrategy(Context ctx, WorkDirStrategy wds) {
if (getWorkDirStrategy(ctx) != wds.ordinal()) {
mWorkDirectoryChanged = true;
}
SharedPreferences pref = ctx.getSharedPreferences(
XConstant.PREF_SETTING_FILE_NAME, Context.MODE_WORLD_READABLE
| Context.MODE_WORLD_WRITEABLE);
SharedPreferences.Editor editor = pref.edit();
editor.putInt(XConstant.TAG_WD_STRATEGY, wds.ordinal());
editor.commit();
}
/**
*
*
* @return
*/
public boolean isWorkDirectoryChanged() {
return mWorkDirectoryChanged;
}
/**
*
*
* @return
*/
private int getWorkDirStrategy(Context ctx) {
SharedPreferences pref = ctx.getSharedPreferences(
XConstant.PREF_SETTING_FILE_NAME, Context.MODE_WORLD_READABLE
| Context.MODE_WORLD_WRITEABLE);
int ret = pref.getInt(XConstant.TAG_WD_STRATEGY, -1);
return ret;
}
/**
* ,
*
* @param title
*
* @param exitMessage
*
* @return
*/
private void alerExitMessage(String title, String exitMessage,
Context context) {
AlertDialog.Builder builder = new Builder(context);
builder.setTitle(title);
builder.setPositiveButton(
XStrings.getInstance().getString(XStrings.CONFIRM),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
System.exit(TAG_SYSTEM_EXIT_CODE);
}
});
builder.setIcon(android.R.drawable.ic_dialog_info);
builder.setMessage(exitMessage);
builder.show();
Looper.loop();
}
/** view */
public void setTelLinkEnabled(boolean telLinkEnabled) {
this.mTelLinkEnabled = telLinkEnabled;
}
/** view */
public boolean isTelLinkEnabled() {
return mTelLinkEnabled;
}
/**
* string
*
* @param context
*/
public void loadPlatformStrings(Context context) {
XStrings.getInstance().loadPlatformStrings(context);
}
public String getOfflineCachePath(){
return getSysDataDir() + XConstant.APP_CACHE_PATH;
}
}
|
package org.andidev.webdriverextension.generator.siteaware;
import com.sun.codemodel.ClassType;
import com.sun.codemodel.CodeWriter;
import com.sun.codemodel.JClass;
import com.sun.codemodel.JClassAlreadyExistsException;
import com.sun.codemodel.JCodeModel;
import com.sun.codemodel.JDefinedClass;
import com.sun.codemodel.JExpr;
import com.sun.codemodel.JInvocation;
import com.sun.codemodel.JMethod;
import com.sun.codemodel.JMod;
import com.sun.codemodel.JVar;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import static org.andidev.annotationprocessorutils.ElementUtils.*;
import org.andidev.annotationprocessorutils.JCodeModelUtils;
import org.andidev.annotationprocessorutils.ProcessingEnvironmentCodeWriter;
import org.andidev.webdriverextension.PageObjectUtils;
import org.andidev.webdriverextension.SiteObjectUtils;
import org.andidev.webdriverextension.bot.JUnitBot;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.Builder;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.openqa.selenium.WebDriver;
//@Slf4j
public class SiteAwareBuilder implements Builder<Boolean> {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(SiteAwareBuilder.class);
// Input Elements
private ProcessingEnvironment processingEnv;
private TypeElement siteObjectElement;
private Set<TypeElement> pageObjectElements;
private TypeElement extendedObjectElement;
private JCodeModel codeModel;
// JClasses
private JDefinedClass siteAwareClass;
private JClass extendedObjectClass;
private JClass webDriverClass;
private JClass jUnitBotClass;
private JClass siteObjectClass;
private Set<JClass> pageObjectClasses;
public SiteAwareBuilder(ProcessingEnvironment processingEnv,
TypeElement siteObjectElement, Set<TypeElement> pageObjectElements) {
this.processingEnv = processingEnv;
this.siteObjectElement = siteObjectElement;
this.pageObjectElements = pageObjectElements;
}
public SiteAwareBuilder(ProcessingEnvironment processingEnv,
TypeElement siteObjectElement, Set<TypeElement> pageObjectElements,
TypeElement extendedObjectElement) {
this.processingEnv = processingEnv;
this.siteObjectElement = siteObjectElement;
this.pageObjectElements = pageObjectElements;
this.extendedObjectElement = extendedObjectElement;
}
@Override
public Boolean build() {
try {
init();
fields();
constructors();
methods();
generate();
return true;
} catch (IOException ex) {
log.error(ExceptionUtils.getStackTrace(ex));
return false;
} catch (JClassAlreadyExistsException ex) {
log.error(ExceptionUtils.getStackTrace(ex));
return false;
}
}
private void init() throws JClassAlreadyExistsException {
codeModel = new JCodeModel();
if (isExtended()) {
siteAwareClass = codeModel._class(JMod.PUBLIC | JMod.ABSTRACT, getPackageName(siteObjectElement) + ".SiteAware" + getClassName(extendedObjectElement), ClassType.CLASS);
extendedObjectClass = codeModel.ref(getFullClassName(extendedObjectElement));
siteAwareClass._extends(extendedObjectClass);
} else {
siteAwareClass = codeModel._class(JMod.PUBLIC | JMod.ABSTRACT, getPackageName(siteObjectElement) + ".SiteAware", ClassType.CLASS);
}
webDriverClass = codeModel.ref(WebDriver.class);
jUnitBotClass = codeModel.ref(JUnitBot.class);
siteObjectClass = codeModel.ref(siteObjectElement.getQualifiedName().toString());
pageObjectClasses = getCodeModelRefs(pageObjectElements);
}
private void fields() {
// Declare Web Driver
if (!hasSuperDriverField()) {
siteAwareClass.field(JMod.PRIVATE, webDriverClass, "driver");
}
// Declare SiteObject
siteAwareClass.field(JMod.PUBLIC, siteObjectClass, getSiteObjectFieldName());
// Declare PageObjects
for (JClass pageObjectClass : pageObjectClasses) {
siteAwareClass.field(JMod.PUBLIC, pageObjectClass, getPageObjectFieldName(pageObjectClass));
}
}
private void constructors() {
if (isExtended()) {
constructorExtendedConstructors();
} else {
constructorNoArgs();
constructorWebDriver();
}
}
private void methods() {
setPageObjects();
getDriver();
setDriver();
setPageObjectsDriver();
}
private void generate() throws IOException {
CodeWriter codeWriter = new ProcessingEnvironmentCodeWriter(processingEnv);
codeModel.build(codeWriter);
}
private Set<JClass> getCodeModelRefs(Set<TypeElement> elements) {
Set<JClass> codeModeModelRefs = new TreeSet<JClass>();
for (TypeElement pageObjectElement : pageObjectElements) {
codeModeModelRefs.add(codeModel.ref(pageObjectElement.getQualifiedName().toString()));
}
return codeModeModelRefs;
}
private void constructorNoArgs() {
// Create No Arguments Constructor
JMethod method = siteAwareClass.constructor(JMod.PUBLIC);
method.body().assign(JExpr.ref(getSiteObjectFieldName()), JExpr._new(siteObjectClass));
method.body().invoke("setPageObjects").arg(JExpr.ref(getSiteObjectFieldName()));
}
private void constructorWebDriver() {
// Create WebDriver Argument Constructor
JMethod method = siteAwareClass.constructor(JMod.PUBLIC);
method.param(webDriverClass, "driver");
method.body().assign(JExpr.ref(getSiteObjectFieldName()), JExpr._new(siteObjectClass));
method.body().invoke("setPageObjects").arg(JExpr.ref(getSiteObjectFieldName()));
method.body().invoke("setDriver").arg(JExpr.ref("driver"));
}
private void constructorExtendedConstructors() {
for (ExecutableElement constructorElement : getConstructors(extendedObjectElement)) {
constructorExtendedConstructor(constructorElement);
}
}
private void constructorExtendedConstructor(ExecutableElement constructorElement) {
log.debug("Creating constructor: {}", getConstructorName(constructorElement));
if (hasParameter(constructorElement, WebDriver.class)) {
log.debug("The constructor {} has class WebDriver class as parameter", getConstructorName(constructorElement));
JMethod method = siteAwareClass.constructor(JMod.PUBLIC);
int i = 1;
for (VariableElement parameter : getParameters(constructorElement)) {
String parameterName = getParamenterName(parameter);
JClass parameterClass = codeModel.ref(parameterName);
String parameterFieldName = StringUtils.uncapitalize("arg" + i++);
method.param(parameterClass, parameterFieldName);
}
JInvocation superInvocation = method.body().invoke("super");
List<JVar> parameterVars = method.params();
JCodeModelUtils.args(superInvocation, parameterVars);
method.body().assign(JExpr.ref(getSiteObjectFieldName()), JExpr._new(siteObjectClass));
method.body().invoke("setPageObjects").arg(JExpr.ref(getSiteObjectFieldName()));
} else {
log.debug("The constructor {} does not have class WebDriver class as parameter", getConstructorName(constructorElement));
JMethod method = siteAwareClass.constructor(JMod.PUBLIC);
int i = 1;
for (VariableElement parameter : getParameters(constructorElement)) {
String parameterName = getParamenterName(parameter);
JClass parameterClass = codeModel.ref(parameterName);
String parameterFieldName = StringUtils.uncapitalize("arg" + i++);
method.param(parameterClass, parameterFieldName);
}
JInvocation superInvocation = method.body().invoke("super");
List<JVar> parameterVars = method.params();
JCodeModelUtils.args(superInvocation, parameterVars);
method.body().assign(JExpr.ref(getSiteObjectFieldName()), JExpr._new(siteObjectClass));
method.body().invoke("setPageObjects").arg(JExpr.ref(getSiteObjectFieldName()));
method.body().invoke("setDriver").arg(JExpr.ref("driver"));
}
}
private void setPageObjects() {
// Create setPageObjects(...)
JMethod method = siteAwareClass.method(JMod.PRIVATE, void.class, "setPageObjects");
method.param(siteObjectClass, getSiteObjectFieldName());
for (JClass pageObjectClass : pageObjectClasses) {
method.body().assign(JExpr.ref(getPageObjectFieldName(pageObjectClass)), JExpr.ref(JExpr.ref(getSiteObjectFieldName()), getPageObjectFieldName(pageObjectClass)));
}
}
private void getDriver() {
// Create getDriver()
if (hasSuperGetDriverMethod()) {
JMethod method = siteAwareClass.method(JMod.PUBLIC, webDriverClass, "getDriver");
method.annotate(Override.class);
method.body()._return(JExpr.ref("driver"));
} else {
JMethod method = siteAwareClass.method(JMod.PUBLIC, webDriverClass, "getDriver");
method.body()._return(JExpr.ref("driver"));
}
}
private void setDriver() {
// Create setDriver(...)
if (hasSuperSetDriverMethod()) {
JMethod method = siteAwareClass.method(JMod.PUBLIC, void.class, "setDriver");
method.param(webDriverClass, "driver");
method.annotate(Override.class);
method.body().invoke(JExpr._super(), "setDriver").arg(JExpr.ref("driver"));
method.body().assign(JExpr._this().ref("driver"), JExpr.ref("driver"));
method.body().staticInvoke(jUnitBotClass, "setDriver").arg(JExpr.ref("driver"));
method.body().invoke(JExpr.ref(getSiteObjectFieldName()), "setDriver").arg(JExpr.ref("driver"));
method.body().invoke("setPageObjectsDriver").arg(JExpr.ref("driver"));
} else {
JMethod method = siteAwareClass.method(JMod.PUBLIC, void.class, "setDriver");
method.param(webDriverClass, "driver");
method.body().assign(JExpr._this().ref("driver"), JExpr.ref("driver"));
method.body().invoke(JExpr.ref(getSiteObjectFieldName()), "setDriver").arg(JExpr.ref("driver"));
method.body().invoke("setPageObjectsDriver").arg(JExpr.ref("driver"));
}
}
private void setPageObjectsDriver() {
// Create setPageObjectsDriver(...)
JMethod method = siteAwareClass.method(JMod.PRIVATE, void.class, "setPageObjectsDriver");
method.param(webDriverClass, "driver");
for (JClass pageObjectClass : pageObjectClasses) {
method.body().invoke(JExpr.ref(getPageObjectFieldName(pageObjectClass)), "setDriver").arg(JExpr.ref("driver"));
}
}
private String getSiteObjectFieldName() {
return SiteObjectUtils.getName(siteObjectElement);
}
private String getPageObjectFieldName(JClass pageObjectClass) {
for (TypeElement pageObjectElement : pageObjectElements) {
if (pageObjectElement.getQualifiedName().toString().equals(pageObjectClass.fullName())) {
return PageObjectUtils.getName(pageObjectElement);
}
}
return null;
}
private boolean isExtended() {
return extendedObjectElement != null;
}
private boolean hasSuperDriverField() {
if (isExtended()) {
log.debug("hasField(extendedObjectElement, \"driver\") = {}", hasField(extendedObjectElement, "driver"));
log.debug("isPrivate(getField(extendedObjectElement, \"driver\") = {}", isPublic(getField(extendedObjectElement, "driver")));
log.debug("isPrivate(getField(extendedObjectElement, \"driver\")) = {}", isProtected(getField(extendedObjectElement, "driver")));
if (hasField(extendedObjectElement, "driver")
&& (isPublic(getField(extendedObjectElement, "driver")) || isProtected(getField(extendedObjectElement, "driver")))) {
log.debug("hasSuperDriverField");
return true;
}
}
return false;
}
private boolean hasSuperGetDriverMethod() {
if (isExtended()) {
if (hasMethod(extendedObjectElement, "getDriver()")
&& (isPublic(getMethod(extendedObjectElement, "getDriver()")) || isProtected(getMethod(extendedObjectElement, "getDriver()")))) {
log.debug("hasSuperGetDriverMethod");
return true;
}
}
return false;
}
private boolean hasSuperSetDriverMethod() {
if (isExtended()) {
if (hasMethod(extendedObjectElement, "setDriver(org.openqa.selenium.WebDriver)")
&& (isPublic(getMethod(extendedObjectElement, "setDriver(org.openqa.selenium.WebDriver)")) || isProtected(getMethod(extendedObjectElement, "setDriver(org.openqa.selenium.WebDriver)")))) {
log.debug("hasSuperSetDriverMethod");
return true;
}
}
return false;
}
}
|
package org.xwiki.extension.repository.internal.core;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.extension.CoreExtension;
import org.xwiki.extension.Extension;
import org.xwiki.extension.ExtensionDependency;
import org.xwiki.extension.ExtensionId;
import org.xwiki.extension.ExtensionManagerConfiguration;
import org.xwiki.extension.ExtensionNotFoundException;
import org.xwiki.extension.ResolveException;
import org.xwiki.extension.repository.AbstractExtensionRepository;
import org.xwiki.extension.repository.CoreExtensionRepository;
import org.xwiki.extension.repository.DefaultExtensionRepositoryDescriptor;
import org.xwiki.extension.repository.ExtensionRepositoryManager;
import org.xwiki.extension.repository.internal.RepositoryUtils;
import org.xwiki.extension.repository.result.CollectionIterableResult;
import org.xwiki.extension.repository.result.IterableResult;
import org.xwiki.extension.repository.search.ExtensionQuery;
import org.xwiki.extension.repository.search.SearchException;
import org.xwiki.extension.version.Version;
/**
* Default implementation of {@link CoreExtensionRepository}.
*
* @version $Id$
* @since 4.0M1
*/
@Component
@Singleton
public class DefaultCoreExtensionRepository extends AbstractExtensionRepository
implements CoreExtensionRepository, Initializable
{
/**
* The core extensions.
*/
protected transient Map<String, DefaultCoreExtension> extensions = new ConcurrentHashMap<>();
/**
* The extension associated to the environment.
*/
protected transient DefaultCoreExtension environmentExtension;
/**
* The logger to log.
*/
@Inject
private transient Logger logger;
/**
* Used to scan jars to find extensions.
*/
@Inject
private transient CoreExtensionScanner scanner;
@Inject
private ExtensionRepositoryManager repositoryManager;
@Inject
private ExtensionManagerConfiguration configuration;
/**
* Default constructor.
*/
public DefaultCoreExtensionRepository()
{
super(new DefaultExtensionRepositoryDescriptor("core", "xwiki-core", null));
}
@Override
public boolean isFilterable()
{
return true;
}
@Override
public boolean isSortable()
{
return true;
}
@Override
public void initialize() throws InitializationException
{
try {
this.extensions.putAll(this.scanner.loadExtensions(this));
this.environmentExtension = this.scanner.loadEnvironmentExtension(this);
if (this.environmentExtension != null) {
this.extensions.put(this.environmentExtension.getId().getId(), this.environmentExtension);
}
// Put extensions features in the map
for (DefaultCoreExtension extension : this.extensions.values()) {
for (ExtensionId feature : extension.getExtensionFeatures()) {
this.extensions.put(feature.getId(), extension);
}
}
// Update core extensions only if there is any remote repository and it's not disabled
if (this.configuration.resolveCoreExtensions() && !this.repositoryManager.getRepositories().isEmpty()) {
// Start a background thread to get more details about the found extensions
Thread thread = new Thread(new Runnable()
{
@Override
public void run()
{
DefaultCoreExtensionRepository.this.scanner
.updateExtensions(DefaultCoreExtensionRepository.this.extensions.values());
}
});
thread.setPriority(Thread.MIN_PRIORITY);
thread.setDaemon(true);
thread.setName("Core extension repository updater");
thread.start();
}
} catch (Exception e) {
this.logger.warn("Failed to load core extensions", e);
}
}
// Repository
@Override
public CoreExtension resolve(ExtensionId extensionId) throws ResolveException
{
CoreExtension extension = getCoreExtension(extensionId.getId());
if (extension == null
|| (extensionId.getVersion() != null && !extension.getId().getVersion().equals(extensionId.getVersion()))) {
throw new ExtensionNotFoundException("Could not find extension [" + extensionId + "]");
}
return extension;
}
@Override
public CoreExtension resolve(ExtensionDependency extensionDependency) throws ResolveException
{
CoreExtension extension = getCoreExtension(extensionDependency.getId());
if (extension == null
|| (!extensionDependency.getVersionConstraint().containsVersion(extension.getId().getVersion()))) {
throw new ExtensionNotFoundException("Could not find extension dependency [" + extensionDependency + "]");
}
return extension;
}
@Override
public boolean exists(ExtensionId extensionId)
{
Extension extension = getCoreExtension(extensionId.getId());
if (extension == null
|| (extensionId.getVersion() != null && !extension.getId().getVersion().equals(extensionId.getVersion()))) {
return false;
}
return true;
}
@Override
public boolean exists(String feature)
{
return this.extensions.containsKey(feature);
}
@Override
public IterableResult<Version> resolveVersions(String id, int offset, int nb) throws ResolveException
{
Extension extension = getCoreExtension(id);
if (extension == null) {
throw new ExtensionNotFoundException("Could not find extension with id [" + id + "]");
}
Collection<Version> versions;
if (nb == 0 || offset > 0) {
versions = Collections.emptyList();
} else {
versions = Arrays.asList(extension.getId().getVersion());
}
return new CollectionIterableResult<Version>(1, offset, versions);
}
// CoreExtensionRepository
@Override
public CoreExtension getEnvironmentExtension()
{
return this.environmentExtension;
}
@Override
public int countExtensions()
{
return this.extensions.size();
}
@Override
public Collection<CoreExtension> getCoreExtensions()
{
return new ArrayList<CoreExtension>(this.extensions.values());
}
@Override
public CoreExtension getCoreExtension(String feature)
{
if (feature == null) {
return null;
}
return this.extensions.get(feature);
}
// Searchable
@Override
public IterableResult<Extension> search(String pattern, int offset, int nb) throws SearchException
{
return (IterableResult) RepositoryUtils.searchInCollection(pattern, offset, nb, this.extensions.values(), true);
}
@Override
public IterableResult<Extension> search(ExtensionQuery query) throws SearchException
{
return (IterableResult) RepositoryUtils.searchInCollection(query, this.extensions.values(), true);
}
}
|
package org.xwiki.extension.repository.internal;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.extension.Extension;
import org.xwiki.extension.ExtensionDependency;
import org.xwiki.extension.ExtensionId;
import org.xwiki.extension.ResolveException;
import org.xwiki.extension.repository.ExtensionRepository;
import org.xwiki.extension.repository.ExtensionRepositoryException;
import org.xwiki.extension.repository.ExtensionRepositoryFactory;
import org.xwiki.extension.repository.ExtensionRepositoryId;
import org.xwiki.extension.repository.ExtensionRepositoryManager;
import org.xwiki.extension.repository.SearchException;
import org.xwiki.extension.repository.Searchable;
/**
* Default implementation of {@link ExtensionRepositoryManager}.
*
* @version $Id$
*/
@Component
@Singleton
public class DefaultExtensionRepositoryManager implements ExtensionRepositoryManager
{
/**
* Used to lookup {@link ExtensionRepositoryFactory}s.
*/
@Inject
private ComponentManager componentManager;
/**
* The logger to log.
*/
@Inject
private Logger logger;
/**
* The registered repositories.
*/
private Map<String, ExtensionRepository> repositories = new ConcurrentHashMap<String, ExtensionRepository>();
@Override
public ExtensionRepository addRepository(ExtensionRepositoryId repositoryId) throws ExtensionRepositoryException
{
ExtensionRepository repository;
try {
ExtensionRepositoryFactory repositoryFactory =
this.componentManager.lookup(ExtensionRepositoryFactory.class, repositoryId.getType());
repository = repositoryFactory.createRepository(repositoryId);
addRepository(repository);
} catch (ComponentLookupException e) {
throw new ExtensionRepositoryException("Unsupported repository type[" + repositoryId.getType() + "]", e);
}
return repository;
}
@Override
public void addRepository(ExtensionRepository repository)
{
this.repositories.put(repository.getId().getId(), repository);
}
@Override
public void removeRepository(String repositoryId)
{
this.repositories.remove(repositoryId);
}
@Override
public ExtensionRepository getRepository(String repositoryId)
{
return this.repositories.get(repositoryId);
}
@Override
public Extension resolve(ExtensionId extensionId) throws ResolveException
{
Extension extension = null;
for (ExtensionRepository repository : this.repositories.values()) {
try {
extension = repository.resolve(extensionId);
return extension;
} catch (ResolveException e) {
if (this.logger.isDebugEnabled()) {
this.logger.debug("Could not find extension [{}] in repository [{}]", new Object[] {extensionId,
repository.getId(), e});
}
}
}
throw new ResolveException(MessageFormat.format("Could not find extension [{0}]", extensionId));
}
@Override
public Extension resolve(ExtensionDependency extensionDependency) throws ResolveException
{
Extension extension = null;
for (ExtensionRepository repository : this.repositories.values()) {
try {
extension = repository.resolve(extensionDependency);
return extension;
} catch (ResolveException e) {
if (this.logger.isDebugEnabled()) {
this.logger.debug("Could not find extension dependency [{}] in repository [{}]", new Object[] {
extensionDependency, repository.getId(), e});
}
}
}
throw new ResolveException(MessageFormat.format("Could not find extension dependency [{0}]",
extensionDependency));
}
@Override
public List<Extension> search(String pattern, int offset, int nb)
{
List<Extension> extensions = new ArrayList<Extension>(nb > 0 ? nb : 0);
int properOffset = offset > 0 ? offset : 0;
// A local index would avoid things like this...
int currentOffset = 0;
for (ExtensionRepository repository : this.repositories.values()) {
currentOffset = search(extensions, repository, pattern, properOffset, nb, currentOffset);
}
return extensions;
}
/**
* Search one repository.
*
* @param extensions the extensions
* @param repository the repository to search
* @param pattern the pattern to search
* @param offset the offset from where to start returning search results
* @param nb the maximum number of search results to return
* @param previousCurrentOffset the current offset from where to start returning search results
* @return the updated maximum number of search results to return
*/
private int search(List<Extension> extensions, ExtensionRepository repository, String pattern, int offset, int nb,
int previousCurrentOffset)
{
int currentOffset = previousCurrentOffset;
int currentNb = nb - extensions.size();
if (nb > 0 && currentNb == 0) {
return currentOffset;
}
if (repository instanceof Searchable) {
Searchable searchableRepository = (Searchable) repository;
List<Extension> foundExtensions;
try {
foundExtensions = searchableRepository.search(pattern, 0, offset == 0 ? currentNb : -1);
if (!foundExtensions.isEmpty()) {
if (offset - currentOffset >= foundExtensions.size()) {
currentOffset += foundExtensions.size();
} else {
int fromIndex = offset - currentOffset;
int toIndex = fromIndex + currentNb;
extensions.addAll(foundExtensions.subList(fromIndex,
(toIndex <= 0 || toIndex > foundExtensions.size()) ? foundExtensions.size() : toIndex));
currentOffset = offset;
}
}
} catch (SearchException e) {
this.logger.warn("Failed to search in repository [" + this + "]", e);
}
}
return currentOffset;
}
}
|
package org.xwiki.xclass.test.po;
import java.lang.Override;import java.lang.String;import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.xwiki.test.ui.po.InlinePage;
import org.xwiki.test.ui.po.ViewPage;
import org.xwiki.test.ui.po.editor.ClassEditPage;
/**
* Represents the sheet used to display information about a XWiki class.
*
* @version $Id$
* @since 3.4M1
*/
public class ClassSheetPage extends ViewPage
{
/**
* The link to view the class template.
*/
@FindBy(partialLinkText = "View the template page")
private WebElement templateLink;
/**
* The link to view the class sheet.
*/
@FindBy(partialLinkText = "View the sheet page")
private WebElement sheetLink;
/**
* The link to define the class. This is displayed only if the class doesn't have any properties.
*/
/**
* The link to the class editor. This is displayed only if the class has properties.
*/
@FindBy(linkText = "add or modify the class properties")
private WebElement editClassLink;
/**
* The button used to create the class sheet.
*/
@FindBy(xpath = "//input[@class = 'button' and @value = 'Create the sheet']")
private WebElement createSheetButton;
/**
* The link used to bind the class to its sheet.
*/
/**
* The button used to create the class template.
*/
@FindBy(xpath = "//input[@class = 'button' and @value = 'Create the template']")
private WebElement createTemplateButton;
/**
* The link used to add an instance of the class to the template document.
*/
/**
* The text input used to specify the space where to create a new document.
*/
@FindBy(id = "spaceName")
private WebElement spaceNameInput;
/**
* The text input used to specify the name of the document.
*/
@FindBy(id = "docName")
private WebElement documentNameInput;
/**
* The button used to create a new document based on the class template.
*/
@FindBy(xpath = "//input[@class = 'button' and @value = 'Create this document']")
private WebElement createDocumentButton;
/**
* Clicks on the template link and returns the template page
*
* @return the page that represents the class template
*/
public ViewPage clickTemplateLink()
{
templateLink.click();
return new ViewPage();
}
/**
* Clicks on the link to view the class sheet.
*
* @return the page that represents the class sheet
*/
public ViewPage clickSheetLink()
{
sheetLink.click();
return new ViewPage();
}
/**
* Clicks on the link to define the class. This link is visible only if the class has no properties yet.
*
* @return the class editor
*/
public ClassEditPage clickDefineClassLink()
{
defineClassLink.click();
return new ClassEditPage();
}
/**
* Clicks on the link to edit the class. This link is visible only if the class has properties.
*
* @return the class editor
*/
public ClassEditPage clickEditClassLink()
{
editClassLink.click();
return new ClassEditPage();
}
/**
* @param name the property name
* @param prettyName the property pretty name
* @param type the property type
* @return {@code true} if the sheet lists the specified property, {@code false} otherwise
*/
public boolean hasProperty(String name, String prettyName, String type)
{
// Pretty Name (Name: Type)
String xpath = String.format("//li[. = '%s (%s: %s)']", prettyName, name, type);
return getDriver().findElementsWithoutWaiting(By.xpath(xpath)).size() == 1;
}
/**
* Clicks on the button to create a sheet for the class that is being displayed.
*
* @return the current page, after it is reloaded
*/
public ClassSheetPage clickCreateSheetButton()
{
createSheetButton.click();
// Create a new instance because the page is reloaded.
return new ClassSheetPage();
}
/**
* Clicks on the link to bind the class to its sheet.
*
* @return the current page, after it is reloaded
*/
public ClassSheetPage clickBindSheetLink()
{
bindSheetLink.click();
// Create a new instance because the page is reloaded.
return new ClassSheetPage();
}
/**
* Clicks on the button to create the class template.
*
* @return the current page, after it is reloaded
*/
public ClassSheetPage clickCreateTemplateButton()
{
createTemplateButton.click();
return new ClassSheetPage();
}
/**
* Clicks on the link to add an instance of the class to the template document.
*
* @return the current page, after it is reloaded
*/
public ClassSheetPage clickAddObjectToTemplateLink()
{
addObjectToTemplateLink.click();
return new ClassSheetPage();
}
/**
* @return the input used to specify the name of the space where to create the new document
*/
public WebElement getSpaceNameInput()
{
return spaceNameInput;
}
/**
* @return the input used to specify the name of the new document
*/
public WebElement getDocumentNameInput()
{
return documentNameInput;
}
/**
* Clicks the button to create a new document based on the class template.
*
* @return the in-line edit mode for the new document
*/
public InlinePage clickCreateDocumentButton()
{
createDocumentButton.click();
return new InlinePage();
}
/**
* Creates a new document with the specified name, in the specified space, based on the class template.
*
* @param spaceName the name of the space where to create the new document
* @param pageName the name of the new document
* @return the in-line mode for the new document
*/
public InlinePage createNewDocument(String spaceName, String pageName)
{
spaceNameInput.clear();
spaceNameInput.sendKeys(spaceName);
documentNameInput.clear();
documentNameInput.sendKeys(pageName);
return clickCreateDocumentButton();
}
/**
* @param documentName the name of a document
* @return {@code true} if the specified document is listed as having an object of the class being viewed,
* {@code false} otherwise
*/
public boolean hasDocument(String documentName)
{
// Make sure we look inside the page content and not in some panel like My Recent Modifications.
String xpath = String.format("//div[@id = 'xwikicontent']//li//a[. = '%s']", documentName);
return getDriver().findElementsWithoutWaiting(By.xpath(xpath)).size() == 1;
}
@Override
public ClassSheetPage waitUntilPageIsLoaded()
{
getDriver().waitUntilElementIsVisible(By.id("HTheclasstemplate"));
return this;
}
}
|
package org.xwiki.observation.remote.internal.jgroups;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.text.MessageFormat;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import javax.management.MBeanServer;
import org.jgroups.ChannelException;
import org.jgroups.JChannel;
import org.jgroups.Message;
import org.jgroups.conf.ConfiguratorFactory;
import org.jgroups.conf.ProtocolStackConfigurator;
import org.jgroups.conf.XmlConfigurator;
import org.jgroups.jmx.JmxConfigurator;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.container.ApplicationContext;
import org.xwiki.container.Container;
import org.xwiki.observation.remote.NetworkAdapter;
import org.xwiki.observation.remote.RemoteEventData;
import org.xwiki.observation.remote.RemoteEventException;
import org.xwiki.observation.remote.jgroups.JGroupsReceiver;
/**
* JGroups based implementation of {@link NetworkAdapter}.
*
* @version $Id$
* @since 2.0RC1
*/
@Component
@Named("jgroups")
@Singleton
public class JGroupsNetworkAdapter implements NetworkAdapter
{
/**
* Relative path where to find jgroups channels configurations.
*/
public static final String CONFIGURATION_PATH = "observation/remote/jgroups/";
/**
* Used to lookup the receiver corresponding to the channel identifier.
*/
@Inject
private ComponentManager componentManager;
/**
* The logger to log.
*/
@Inject
private Logger logger;
/**
* The network channels.
*/
private Map<String, JChannel> channels = new ConcurrentHashMap<String, JChannel>();
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#send(org.xwiki.observation.remote.RemoteEventData)
*/
public void send(RemoteEventData remoteEvent)
{
this.logger.debug("Send JGroups remote event [" + remoteEvent + "]");
// Send the message to the whole group
Message message = new Message(null, null, remoteEvent);
// Send message to jgroups channels
for (Map.Entry<String, JChannel> entry : this.channels.entrySet()) {
try {
entry.getValue().send(message);
} catch (Exception e) {
this.logger.error("Failed to send message [" + remoteEvent + "] to the channel [" + entry.getKey()
+ "]", e);
}
}
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#startChannel(java.lang.String)
*/
public void startChannel(String channelId) throws RemoteEventException
{
if (this.channels.containsKey(channelId)) {
throw new RemoteEventException(MessageFormat.format("Channel [{0}] already started", channelId));
}
JChannel channel;
try {
channel = createChannel(channelId);
channel.connect("event");
this.channels.put(channelId, channel);
} catch (Exception e) {
throw new RemoteEventException("Failed to create channel [" + channelId + "]", e);
}
// Register the channel against the JMX Server
try {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
JmxConfigurator.registerChannel(channel, mbs, channel.getClusterName());
} catch (Exception e) {
this.logger.warn("Failed to register channel [" + channelId + "] against the JMX Server", e);
}
this.logger.info("Channel [{0}] started", channelId);
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#stopChannel(java.lang.String)
*/
public void stopChannel(String channelId) throws RemoteEventException
{
JChannel channel = this.channels.get(channelId);
if (channel == null) {
throw new RemoteEventException(MessageFormat.format("Channel [{0}] is not started", channelId));
}
channel.close();
this.channels.remove(channelId);
// Unregister the channel from the JMX Server
try {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
JmxConfigurator.unregister(channel, mbs, channel.getClusterName());
} catch (Exception e) {
this.logger.warn("Failed to unregister channel [" + channelId + "] from the JMX Server", e);
}
this.logger.info("Channel [{0}] stopped", channelId);
}
/**
* Create a new channel.
*
* @param channelId the identifier of the channel to create
* @return the new channel
* @throws ComponentLookupException failed to get default {@link JGroupsReceiver}
* @throws ChannelException failed to create channel
*/
private JChannel createChannel(String channelId) throws ComponentLookupException, ChannelException
{
// load configuration
ProtocolStackConfigurator channelConf;
try {
channelConf = loadChannelConfiguration(channelId);
} catch (IOException e) {
throw new ChannelException("Failed to load configuration for the channel [" + channelId + "]", e);
}
// get Receiver
JGroupsReceiver channelReceiver;
try {
channelReceiver = this.componentManager.lookup(JGroupsReceiver.class, channelId);
} catch (ComponentLookupException e) {
channelReceiver = this.componentManager.lookup(JGroupsReceiver.class);
}
// create channel
JChannel channel = new JChannel(channelConf);
channel.setReceiver(channelReceiver);
channel.setOpt(JChannel.LOCAL, false);
return channel;
}
/**
* Load channel configuration.
*
* @param channelId the identifier of the channel
* @return the channel configuration
* @throws IOException failed to load configuration file
* @throws ChannelException failed to creation channel configuration
*/
private ProtocolStackConfigurator loadChannelConfiguration(String channelId) throws IOException, ChannelException
{
String channelFile = channelId + ".xml";
String path = "/WEB-INF/" + CONFIGURATION_PATH + channelFile;
InputStream is = null;
try {
Container container = this.componentManager.lookup(Container.class);
ApplicationContext applicationContext = container.getApplicationContext();
if (applicationContext != null) {
is = applicationContext.getResourceAsStream(path);
}
} catch (ComponentLookupException e) {
this.logger.debug("Failed to lookup Container component.");
}
if (is == null) {
// Fallback on JGroups standard configuraton locations
is = ConfiguratorFactory.getConfigStream(channelFile);
if (is == null && !JChannel.DEFAULT_PROTOCOL_STACK.equals(channelFile)) {
// Fallback on default JGroups configuration
is = ConfiguratorFactory.getConfigStream(JChannel.DEFAULT_PROTOCOL_STACK);
}
}
return XmlConfigurator.getInstance(is);
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#stopAllChannels()
*/
public void stopAllChannels() throws RemoteEventException
{
for (Map.Entry<String, JChannel> channelEntry : this.channels.entrySet()) {
channelEntry.getValue().close();
}
this.channels.clear();
this.logger.info("All channels stopped");
}
}
|
package org.xwiki.observation.remote.internal.jgroups;
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.text.MessageFormat;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import javax.management.MBeanServer;
import org.jgroups.ChannelException;
import org.jgroups.JChannel;
import org.jgroups.Message;
import org.jgroups.conf.ConfiguratorFactory;
import org.jgroups.conf.ProtocolStackConfigurator;
import org.jgroups.conf.XmlConfigurator;
import org.jgroups.jmx.JmxConfigurator;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.container.ApplicationContext;
import org.xwiki.container.Container;
import org.xwiki.observation.remote.NetworkAdapter;
import org.xwiki.observation.remote.RemoteEventData;
import org.xwiki.observation.remote.RemoteEventException;
import org.xwiki.observation.remote.jgroups.JGroupsReceiver;
/**
* JGroups based implementation of {@link NetworkAdapter}.
*
* @version $Id$
* @since 2.0RC1
*/
@Component
@Named("jgroups")
@Singleton
public class JGroupsNetworkAdapter implements NetworkAdapter
{
/**
* Relative path where to find jgroups channels configurations.
*/
public static final String CONFIGURATION_PATH = "observation/remote/jgroups/";
/**
* Used to lookup the receiver corresponding to the channel identifier.
*/
@Inject
private ComponentManager componentManager;
/**
* The logger to log.
*/
@Inject
private Logger logger;
/**
* The network channels.
*/
private Map<String, JChannel> channels = new ConcurrentHashMap<String, JChannel>();
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#send(org.xwiki.observation.remote.RemoteEventData)
*/
public void send(RemoteEventData remoteEvent)
{
this.logger.debug("Send JGroups remote event [" + remoteEvent + "]");
// Send the message to the whole group
Message message = new Message(null, null, remoteEvent);
// Send message to jgroups channels
for (Map.Entry<String, JChannel> entry : this.channels.entrySet()) {
try {
entry.getValue().send(message);
} catch (Exception e) {
this.logger.error("Failed to send message [" + remoteEvent + "] to the channel [" + entry.getKey()
+ "]", e);
}
}
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#startChannel(java.lang.String)
*/
public void startChannel(String channelId) throws RemoteEventException
{
if (this.channels.containsKey(channelId)) {
throw new RemoteEventException(MessageFormat.format("Channel [{0}] already started", channelId));
}
JChannel channel;
try {
channel = createChannel(channelId);
channel.connect("event");
this.channels.put(channelId, channel);
} catch (Exception e) {
throw new RemoteEventException("Failed to create channel [" + channelId + "]", e);
}
// Register the channel against the JMX Server
try {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
JmxConfigurator.registerChannel(channel, mbs, channel.getClusterName());
} catch (Exception e) {
this.logger.warn("Failed to register channel [" + channelId + "] against the JMX Server", e);
}
this.logger.info("Channel [{}] started", channelId);
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#stopChannel(java.lang.String)
*/
public void stopChannel(String channelId) throws RemoteEventException
{
JChannel channel = this.channels.get(channelId);
if (channel == null) {
throw new RemoteEventException(MessageFormat.format("Channel [{0}] is not started", channelId));
}
channel.close();
this.channels.remove(channelId);
// Unregister the channel from the JMX Server
try {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
JmxConfigurator.unregister(channel, mbs, channel.getClusterName());
} catch (Exception e) {
this.logger.warn("Failed to unregister channel [" + channelId + "] from the JMX Server", e);
}
this.logger.info("Channel [{}] stopped", channelId);
}
/**
* Create a new channel.
*
* @param channelId the identifier of the channel to create
* @return the new channel
* @throws ComponentLookupException failed to get default {@link JGroupsReceiver}
* @throws ChannelException failed to create channel
*/
private JChannel createChannel(String channelId) throws ComponentLookupException, ChannelException
{
// load configuration
ProtocolStackConfigurator channelConf;
try {
channelConf = loadChannelConfiguration(channelId);
} catch (IOException e) {
throw new ChannelException("Failed to load configuration for the channel [" + channelId + "]", e);
}
// get Receiver
JGroupsReceiver channelReceiver;
try {
channelReceiver = this.componentManager.lookup(JGroupsReceiver.class, channelId);
} catch (ComponentLookupException e) {
channelReceiver = this.componentManager.lookup(JGroupsReceiver.class);
}
// create channel
JChannel channel = new JChannel(channelConf);
channel.setReceiver(channelReceiver);
channel.setOpt(JChannel.LOCAL, false);
return channel;
}
/**
* Load channel configuration.
*
* @param channelId the identifier of the channel
* @return the channel configuration
* @throws IOException failed to load configuration file
* @throws ChannelException failed to creation channel configuration
*/
private ProtocolStackConfigurator loadChannelConfiguration(String channelId) throws IOException, ChannelException
{
String channelFile = channelId + ".xml";
String path = "/WEB-INF/" + CONFIGURATION_PATH + channelFile;
InputStream is = null;
try {
Container container = this.componentManager.lookup(Container.class);
ApplicationContext applicationContext = container.getApplicationContext();
if (applicationContext != null) {
is = applicationContext.getResourceAsStream(path);
}
} catch (ComponentLookupException e) {
this.logger.debug("Failed to lookup Container component.");
}
if (is == null) {
// Fallback on JGroups standard configuraton locations
is = ConfiguratorFactory.getConfigStream(channelFile);
if (is == null && !JChannel.DEFAULT_PROTOCOL_STACK.equals(channelFile)) {
// Fallback on default JGroups configuration
is = ConfiguratorFactory.getConfigStream(JChannel.DEFAULT_PROTOCOL_STACK);
}
}
return XmlConfigurator.getInstance(is);
}
/**
* {@inheritDoc}
*
* @see org.xwiki.observation.remote.NetworkAdapter#stopAllChannels()
*/
public void stopAllChannels() throws RemoteEventException
{
for (Map.Entry<String, JChannel> channelEntry : this.channels.entrySet()) {
channelEntry.getValue().close();
}
this.channels.clear();
this.logger.info("All channels stopped");
}
}
|
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
import java.io.IOException;
import java.net.URISyntaxException;
// Start of user code for imports
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreAdapterFactory;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.EEFRuntimePlugin;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.EEFFeatureEditorDialog;
import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.custom.CLabel;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.CallTemplateParameter;
import org.wso2.developerstudio.eclipse.gmf.esb.CloudConnectorOperation;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.impl.CloudConnectorOperationImpl;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.ConnectorParameterRenderer;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.ConnectorSchemaHolder;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFPropertyViewUtil;
import org.wso2.developerstudio.eclipse.gmf.esb.presentation.ReferenceGroup;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
// End of user code
public class CloudConnectorOperationPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, CloudConnectorOperationPropertiesEditionPart {
protected Text description;
protected Text commentsList;
protected Button editCommentsList;
protected EList commentsListList;
protected Button reverse;
protected ReferencesTable connectorParameters;
protected List<ViewerFilter> connectorParametersBusinessFilters = new ArrayList<ViewerFilter>();
protected List<ViewerFilter> connectorParametersFilters = new ArrayList<ViewerFilter>();
protected Text configRef;
protected Text connectorName;
protected Text operationName;
protected Text cloudConnectorName;
protected Text availableConfigs;
protected EMFComboViewer parameterEditorType;
protected Browser browser;
// Start of user code
protected Composite propertiesGroup;
protected Control[] reverseElements;
protected Control[] commentsElements;
protected Control[] configRefElements;
protected Control[] availableConfigsElements;
protected Control[] connectorParamElements;
protected Composite filterConfigSubPropertiesGroup;
private boolean hasConnectorSchema;
protected CLabel infoLabel;
private static IDeveloperStudioLog log = Logger.getLog(EEFPropertyViewUtil.PLUGIN_ID);
// End of user code
/**
* For {@link ISection} use only.
*/
public CloudConnectorOperationPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public CloudConnectorOperationPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
* @generated NOT
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
String schemaName = EEFPropertyViewUtil.generateSchemaName(propertiesEditionComponent);
if(ConnectorSchemaHolder.getInstance().hasConnectorOperationSchema(schemaName)
&& (!EEFPropertyViewUtil.isLegacyPropertiesViewSet())) {
hasConnectorSchema = true;
} else {
hasConnectorSchema = false;
}
Form form = widgetFactory.createForm(parent);
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 1;
view.setLayout(layout);
createControls(widgetFactory, view);
return form;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence cloudConnectorOperationStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = cloudConnectorOperationStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.class);
if(hasConnectorSchema) {
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
} else {
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.connectorName);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.operationName);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName);
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType);
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.configRef);
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs);
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.description);
}
//propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.commentsList);
//propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.reverse);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.connectorName);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.operationName);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.configRef);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
// propertiesStep.addStep(EsbViewsRepository.CloudConnectorOperation.Properties.description);
composer = new PartComposer(cloudConnectorOperationStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.description) {
return createDescriptionText(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.commentsList) {
return createCommentsListMultiValuedEditor(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.reverse) {
return createReverseCheckbox(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters) {
return createConnectorParametersTableComposition(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.configRef) {
return createConfigRefText(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.connectorName) {
return createConnectorNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.operationName) {
return createOperationNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName) {
return createCloudConnectorNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs) {
return createAvailableConfigsText(widgetFactory, filterConfigSubPropertiesGroup);
}
if (key == EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType) {
return createParameterEditorTypeEMFComboViewer(widgetFactory, parent);
}
return parent;
}
};
composer.compose(view);
}
/**
* @generated NOT
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
if(hasConnectorSchema) {
propertiesGroup = new Composite(parent, SWT.TRANSPARENT);
propertiesGroup.setLocation(0, 0);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 1;
propertiesGroupLayout.horizontalSpacing = 0;
propertiesGroup.setLayout(propertiesGroupLayout);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesGroup.setLayoutData(propertiesSectionData);
} else {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.CloudConnectorOperationPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
}
return propertiesGroup;
}
protected Composite createDescriptionText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.description, EsbMessages.CloudConnectorOperationPropertiesEditionPart_DescriptionLabel);
description = widgetFactory.createText(parent, ""); //$NON-NLS-1$
description.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData descriptionData = new GridData(GridData.FILL_HORIZONTAL);
description.setLayoutData(descriptionData);
description.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.description,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.description,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, description.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
description.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.description, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText()));
}
}
});
EditingUtils.setID(description, EsbViewsRepository.CloudConnectorOperation.Properties.description);
EditingUtils.setEEFtype(description, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.description, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createDescriptionText
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createCommentsListMultiValuedEditor(FormToolkit widgetFactory, Composite parent) {
Control[] previousControls = propertiesGroup.getChildren();
commentsList = widgetFactory.createText(parent, "", SWT.READ_ONLY); //$NON-NLS-1$
GridData commentsListData = new GridData(GridData.FILL_HORIZONTAL);
commentsListData.horizontalSpan = 2;
commentsList.setLayoutData(commentsListData);
EditingUtils.setID(commentsList, EsbViewsRepository.CloudConnectorOperation.Properties.commentsList);
EditingUtils.setEEFtype(commentsList, "eef::MultiValuedEditor::field"); //$NON-NLS-1$
editCommentsList = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.CloudConnectorOperation.Properties.commentsList, EsbMessages.CloudConnectorOperationPropertiesEditionPart_CommentsListLabel), SWT.NONE);
GridData editCommentsListData = new GridData();
editCommentsList.setLayoutData(editCommentsListData);
editCommentsList.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
EEFFeatureEditorDialog dialog = new EEFFeatureEditorDialog(
commentsList.getShell(), "CloudConnectorOperation", new AdapterFactoryLabelProvider(adapterFactory), //$NON-NLS-1$
commentsListList, EsbPackage.eINSTANCE.getEsbElement_CommentsList().getEType(), null,
false, true,
null, null);
if (dialog.open() == Window.OK) {
commentsListList = dialog.getResult();
if (commentsListList == null) {
commentsListList = new BasicEList();
}
commentsList.setText(commentsListList.toString());
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.commentsList, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new BasicEList(commentsListList)));
setHasChanged(true);
}
}
});
EditingUtils.setID(editCommentsList, EsbViewsRepository.CloudConnectorOperation.Properties.commentsList);
EditingUtils.setEEFtype(editCommentsList, "eef::MultiValuedEditor::browsebutton"); //$NON-NLS-1$
// Start of user code for createCommentsListMultiValuedEditor
Control[] newControls = propertiesGroup.getChildren();
commentsElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createReverseCheckbox(FormToolkit widgetFactory, Composite parent) {
Control[] previousControls = propertiesGroup.getChildren();
reverse = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.CloudConnectorOperation.Properties.reverse, EsbMessages.CloudConnectorOperationPropertiesEditionPart_ReverseLabel), SWT.CHECK);
reverse.addSelectionListener(new SelectionAdapter() {
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*
*/
public void widgetSelected(SelectionEvent e) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.reverse, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new Boolean(reverse.getSelection())));
}
});
GridData reverseData = new GridData(GridData.FILL_HORIZONTAL);
reverseData.horizontalSpan = 2;
reverse.setLayoutData(reverseData);
EditingUtils.setID(reverse, EsbViewsRepository.CloudConnectorOperation.Properties.reverse);
EditingUtils.setEEFtype(reverse, "eef::Checkbox"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.reverse, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createReverseCheckbox
Control[] newControls = propertiesGroup.getChildren();
reverseElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return parent;
}
/**
* @param container
*
*/
protected Composite createConnectorParametersTableComposition(FormToolkit widgetFactory, Composite parent) {
// Start of user code for createConnectorParametersTableComposition
Control[] previousControls = propertiesGroup.getChildren();
if(hasConnectorSchema) {
CloudConnectorOperationImpl connectorObject = (CloudConnectorOperationImpl)propertiesEditionComponent.getEditingContext().getEObject();
String schemaName = connectorObject.getConnectorName().split("connector")[0] + "-" + connectorObject.getOperationName();
this.connectorParameters = new ReferenceGroup(getDescription(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, null),
new ConnectorParameterRenderer(propertiesEditionComponent, this), schemaName);
this.connectorParameters.createControls(parent, widgetFactory);
connectorParameters.setID(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
connectorParameters.setEEFType("eef::AdvancedTableComposition");
infoLabel = new CLabel(propertiesGroup, SWT.NONE);
Image image;
try {
image = new Image(parent.getShell().getDisplay(), EEFPropertyViewUtil.getIconPath("icons/full/obj16/check-icon-16x16.png"));
infoLabel.setImage(image);
infoLabel.setText("There are no errors");
} catch (URISyntaxException | IOException e) {
log.error("Error initializing properties view message banner", e);
}
} else {
this.connectorParameters = new ReferencesTable(getDescription(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, EsbMessages.CloudConnectorOperationPropertiesEditionPart_ConnectorParametersLabel), new ReferencesTableListener() {
public void handleAdd() {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null));
connectorParameters.refresh();
}
public void handleEdit(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element));
connectorParameters.refresh();
}
public void handleMove(EObject element, int oldIndex, int newIndex) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex));
connectorParameters.refresh();
}
public void handleRemove(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element));
connectorParameters.refresh();
}
public void navigateTo(EObject element) { }
});
for (ViewerFilter filter : this.connectorParametersFilters) {
this.connectorParameters.addFilter(filter);
}
this.connectorParameters.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, EsbViewsRepository.FORM_KIND));
this.connectorParameters.createControls(parent, widgetFactory);
this.connectorParameters.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
if (e.item != null && e.item.getData() instanceof EObject) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData()));
}
}
});
GridData connectorParametersData = new GridData(GridData.FILL_HORIZONTAL);
connectorParametersData.horizontalSpan = 3;
this.connectorParameters.setLayoutData(connectorParametersData);
this.connectorParameters.setLowerBound(0);
this.connectorParameters.setUpperBound(-1);
connectorParameters.setID(EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
connectorParameters.setEEFType("eef::AdvancedTableComposition");
}
Control[] newControls = propertiesGroup.getChildren();
connectorParamElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createConfigRefText(FormToolkit widgetFactory, Composite parent) {
filterConfigSubPropertiesGroup = EEFPropertyViewUtil.createSubsectionGroup(widgetFactory, parent, "Session",
true);
Control [] previousControls = filterConfigSubPropertiesGroup.getChildren();
createDescription(filterConfigSubPropertiesGroup, EsbViewsRepository.CloudConnectorOperation.Properties.configRef, EsbMessages.CloudConnectorOperationPropertiesEditionPart_ConfigRefLabel);
configRef = widgetFactory.createText(filterConfigSubPropertiesGroup, ""); //$NON-NLS-1$
configRef.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(filterConfigSubPropertiesGroup);
GridData configRefData = new GridData(GridData.FILL_HORIZONTAL);
configRef.setLayoutData(configRefData);
configRef.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.configRef,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, configRef.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.configRef,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, configRef.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
configRef.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.configRef, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, configRef.getText()));
}
}
});
EditingUtils.setID(configRef, EsbViewsRepository.CloudConnectorOperation.Properties.configRef);
EditingUtils.setEEFtype(configRef, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, filterConfigSubPropertiesGroup, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.configRef, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createConfigRefText
Control [] newControls = filterConfigSubPropertiesGroup.getChildren();
configRefElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return filterConfigSubPropertiesGroup;
}
protected Composite createConnectorNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.connectorName, EsbMessages.CloudConnectorOperationPropertiesEditionPart_ConnectorNameLabel);
connectorName = widgetFactory.createText(parent, ""); //$NON-NLS-1$
connectorName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData connectorNameData = new GridData(GridData.FILL_HORIZONTAL);
connectorName.setLayoutData(connectorNameData);
connectorName.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.connectorName,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, connectorName.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.connectorName,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, connectorName.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
connectorName.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.connectorName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, connectorName.getText()));
}
}
});
EditingUtils.setID(connectorName, EsbViewsRepository.CloudConnectorOperation.Properties.connectorName);
EditingUtils.setEEFtype(connectorName, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.connectorName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createConnectorNameText
// End of user code
return parent;
}
protected Composite createOperationNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.operationName, EsbMessages.CloudConnectorOperationPropertiesEditionPart_OperationNameLabel);
operationName = widgetFactory.createText(parent, ""); //$NON-NLS-1$
operationName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData operationNameData = new GridData(GridData.FILL_HORIZONTAL);
operationName.setLayoutData(operationNameData);
operationName.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.operationName,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, operationName.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.operationName,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, operationName.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
operationName.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.operationName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, operationName.getText()));
}
}
});
EditingUtils.setID(operationName, EsbViewsRepository.CloudConnectorOperation.Properties.operationName);
EditingUtils.setEEFtype(operationName, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.operationName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createOperationNameText
// End of user code
return parent;
}
protected Composite createCloudConnectorNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName, EsbMessages.CloudConnectorOperationPropertiesEditionPart_CloudConnectorNameLabel);
cloudConnectorName = widgetFactory.createText(parent, ""); //$NON-NLS-1$
cloudConnectorName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData cloudConnectorNameData = new GridData(GridData.FILL_HORIZONTAL);
cloudConnectorName.setLayoutData(cloudConnectorNameData);
cloudConnectorName.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, cloudConnectorName.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, cloudConnectorName.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
cloudConnectorName.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, cloudConnectorName.getText()));
}
}
});
EditingUtils.setID(cloudConnectorName, EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName);
EditingUtils.setEEFtype(cloudConnectorName, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createCloudConnectorNameText
// End of user code
return parent;
}
protected Composite createAvailableConfigsText(FormToolkit widgetFactory, Composite parent) {
Control [] previousControls = filterConfigSubPropertiesGroup.getChildren();
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs, EsbMessages.CloudConnectorOperationPropertiesEditionPart_AvailableConfigsLabel);
availableConfigs = widgetFactory.createText(parent, ""); //$NON-NLS-1$
availableConfigs.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData availableConfigsData = new GridData(GridData.FILL_HORIZONTAL);
availableConfigs.setLayoutData(availableConfigsData);
availableConfigs.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, availableConfigs.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, availableConfigs.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
CloudConnectorOperationPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
availableConfigs.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, availableConfigs.getText()));
}
}
});
EditingUtils.setID(availableConfigs, EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs);
EditingUtils.setEEFtype(availableConfigs, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createAvailableConfigsText
Control [] newControls = filterConfigSubPropertiesGroup.getChildren();
availableConfigsElements = EEFPropertyViewUtil.getTableElements(previousControls, newControls);
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createParameterEditorTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType, EsbMessages.CloudConnectorOperationPropertiesEditionPart_ParameterEditorTypeLabel);
parameterEditorType = new EMFComboViewer(parent);
parameterEditorType.setContentProvider(new ArrayContentProvider());
parameterEditorType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData parameterEditorTypeData = new GridData(GridData.FILL_HORIZONTAL);
parameterEditorType.getCombo().setLayoutData(parameterEditorTypeData);
parameterEditorType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
parameterEditorType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloudConnectorOperationPropertiesEditionPartForm.this, EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getParameterEditorType()));
}
});
parameterEditorType.setID(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createParameterEditorTypeEMFComboViewer
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getDescription()
*
*/
public String getDescription() {
return description.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setDescription(String newValue)
*
*/
public void setDescription(String newValue) {
if (newValue != null) {
description.setText(newValue);
} else {
description.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.description);
if (eefElementEditorReadOnlyState && description.isEnabled()) {
description.setEnabled(false);
description.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !description.isEnabled()) {
description.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getCommentsList()
*
*/
public EList getCommentsList() {
return commentsListList;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setCommentsList(EList newValue)
*
*/
public void setCommentsList(EList newValue) {
commentsListList = newValue;
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.commentsList);
if (eefElementEditorReadOnlyState && commentsList.isEnabled()) {
commentsList.setEnabled(false);
commentsList.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !commentsList.isEnabled()) {
commentsList.setEnabled(true);
}
}
public void addToCommentsList(Object newValue) {
commentsListList.add(newValue);
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
}
public void removeToCommentsList(Object newValue) {
commentsListList.remove(newValue);
if (newValue != null) {
commentsList.setText(commentsListList.toString());
} else {
commentsList.setText(""); //$NON-NLS-1$
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getReverse()
*
*/
public Boolean getReverse() {
return Boolean.valueOf(reverse.getSelection());
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setReverse(Boolean newValue)
*
*/
public void setReverse(Boolean newValue) {
if (newValue != null) {
reverse.setSelection(newValue.booleanValue());
} else {
reverse.setSelection(false);
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.reverse);
if (eefElementEditorReadOnlyState && reverse.isEnabled()) {
reverse.setEnabled(false);
reverse.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !reverse.isEnabled()) {
reverse.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#initConnectorParameters(EObject current, EReference containingFeature, EReference feature)
*/
public void initConnectorParameters(ReferencesTableSettings settings) {
if (current.eResource() != null && current.eResource().getResourceSet() != null)
this.resourceSet = current.eResource().getResourceSet();
if (hasConnectorSchema) {
EObject dataObject = ((ReferencesTableSettings) settings).getSource();
if (connectorParameters instanceof ReferenceGroup) {
((ReferenceGroup) connectorParameters).setInput(settings, dataObject);
}
} else {
ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider();
connectorParameters.setContentProvider(contentProvider);
connectorParameters.setInput(settings);
boolean eefElementEditorReadOnlyState = isReadOnly(
EsbViewsRepository.CloudConnectorOperation.Properties.connectorParameters);
if (eefElementEditorReadOnlyState && connectorParameters.isEnabled()) {
connectorParameters.setEnabled(false);
connectorParameters.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !connectorParameters.isEnabled()) {
connectorParameters.setEnabled(true);
}
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#updateConnectorParameters()
*
*/
public void updateConnectorParameters() {
if (!hasConnectorSchema) {
connectorParameters.refresh();
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#addFilterConnectorParameters(ViewerFilter filter)
*
*/
public void addFilterToConnectorParameters(ViewerFilter filter) {
connectorParametersFilters.add(filter);
if (this.connectorParameters != null) {
this.connectorParameters.addFilter(filter);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#addBusinessFilterConnectorParameters(ViewerFilter filter)
*
*/
public void addBusinessFilterToConnectorParameters(ViewerFilter filter) {
connectorParametersBusinessFilters.add(filter);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#isContainedInConnectorParametersTable(EObject element)
*
*/
public boolean isContainedInConnectorParametersTable(EObject element) {
return ((ReferencesTableSettings)connectorParameters.getInput()).contains(element);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getConfigRef()
*
*/
public String getConfigRef() {
return configRef.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setConfigRef(String newValue)
*
*/
public void setConfigRef(String newValue) {
if (newValue != null) {
configRef.setText(newValue);
} else {
configRef.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.configRef);
if (eefElementEditorReadOnlyState && configRef.isEnabled()) {
configRef.setEnabled(false);
configRef.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !configRef.isEnabled()) {
configRef.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getConnectorName()
*
*/
public String getConnectorName() {
return connectorName.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setConnectorName(String newValue)
*
*/
public void setConnectorName(String newValue) {
if (newValue != null) {
connectorName.setText(newValue);
} else {
connectorName.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.connectorName);
if (eefElementEditorReadOnlyState && connectorName.isEnabled()) {
connectorName.setEnabled(false);
connectorName.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !connectorName.isEnabled()) {
connectorName.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getOperationName()
*
*/
public String getOperationName() {
return operationName.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setOperationName(String newValue)
*
*/
public void setOperationName(String newValue) {
if (newValue != null) {
operationName.setText(newValue);
} else {
operationName.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.operationName);
if (eefElementEditorReadOnlyState && operationName.isEnabled()) {
operationName.setEnabled(false);
operationName.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !operationName.isEnabled()) {
operationName.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getCloudConnectorName()
*
*/
public String getCloudConnectorName() {
return cloudConnectorName.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setCloudConnectorName(String newValue)
*
*/
public void setCloudConnectorName(String newValue) {
if (newValue != null) {
cloudConnectorName.setText(newValue);
} else {
cloudConnectorName.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.cloudConnectorName);
if (eefElementEditorReadOnlyState && cloudConnectorName.isEnabled()) {
cloudConnectorName.setEnabled(false);
cloudConnectorName.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !cloudConnectorName.isEnabled()) {
cloudConnectorName.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getAvailableConfigs()
*
*/
public String getAvailableConfigs() {
return availableConfigs.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setAvailableConfigs(String newValue)
*
*/
public void setAvailableConfigs(String newValue) {
if (newValue != null) {
availableConfigs.setText(newValue);
} else {
availableConfigs.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.availableConfigs);
if (eefElementEditorReadOnlyState && availableConfigs.isEnabled()) {
availableConfigs.setEnabled(false);
availableConfigs.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !availableConfigs.isEnabled()) {
availableConfigs.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#getParameterEditorType()
*
*/
public Enumerator getParameterEditorType() {
Enumerator selection = (Enumerator) ((StructuredSelection) parameterEditorType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#initParameterEditorType(Object input, Enumerator current)
*/
public void initParameterEditorType(Object input, Enumerator current) {
parameterEditorType.setInput(input);
parameterEditorType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType);
if (eefElementEditorReadOnlyState && parameterEditorType.isEnabled()) {
parameterEditorType.setEnabled(false);
parameterEditorType.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !parameterEditorType.isEnabled()) {
parameterEditorType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloudConnectorOperationPropertiesEditionPart#setParameterEditorType(Enumerator newValue)
*
*/
public void setParameterEditorType(Enumerator newValue) {
parameterEditorType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloudConnectorOperation.Properties.parameterEditorType);
if (eefElementEditorReadOnlyState && parameterEditorType.isEnabled()) {
parameterEditorType.setEnabled(false);
parameterEditorType.setToolTipText(EsbMessages.CloudConnectorOperation_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !parameterEditorType.isEnabled()) {
parameterEditorType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.CloudConnectorOperation_Part_Title;
}
// Start of user code additional methods
@Override
public void refresh() {
super.refresh();
if(!hasConnectorSchema) {
validate();
}
}
public void validate() {
EEFPropertyViewUtil epv = new EEFPropertyViewUtil(view);
epv.showEntry(new Control[] { filterConfigSubPropertiesGroup.getParent() }, false);
epv.clearElements(new Composite[] { filterConfigSubPropertiesGroup });
epv.showEntry(configRefElements, false);
epv.showEntry(availableConfigsElements, false);
epv.clearTableButtons(connectorParamElements);
view.layout(true, true);
}
public void updateMessage(String kind, String message, String field, String regex) throws URISyntaxException, IOException {
String bannerMessage;
Image image;
if(infoLabel != null && !infoLabel.isDisposed()) {
infoLabel.dispose();
}
infoLabel = new CLabel(propertiesGroup, SWT.NONE);
if(kind.equals("requirederror")) {
image = new Image(propertiesGroup.getShell().getDisplay(), EEFPropertyViewUtil.getIconPath("icons/full/obj16/error-icon-16x16.png"));
bannerMessage = "Value of the '" + field + "' field is required";
} else if (kind.equals("regexerror")) {
image = new Image(propertiesGroup.getShell().getDisplay(), EEFPropertyViewUtil.getIconPath("icons/full/obj16/error-icon-16x16.png"));
bannerMessage = "Value of the '" + field + "' field should match " + regex;
} else if (kind.equals("custom")){
image = new Image(propertiesGroup.getShell().getDisplay(), EEFPropertyViewUtil.getIconPath("icons/full/obj16/check-icon-16x16.png"));
bannerMessage = message;
} else {
image = new Image(propertiesGroup.getShell().getDisplay(), EEFPropertyViewUtil.getIconPath("icons/full/obj16/check-icon-16x16.png"));
bannerMessage = "There are no errors";
}
infoLabel.setImage(image);
infoLabel.setText(bannerMessage);
infoLabel.getParent().layout();
}
// End of user code
}
|
package org.xwiki.search.solr.internal;
import java.util.Arrays;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLifecycleException;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.phase.Disposable;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.context.concurrent.ExecutionContextRunnable;
import org.xwiki.job.Job;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.search.solr.internal.api.SolrConfiguration;
import org.xwiki.search.solr.internal.api.SolrIndexer;
import org.xwiki.search.solr.internal.api.SolrIndexerException;
import org.xwiki.search.solr.internal.api.SolrInstance;
import org.xwiki.search.solr.internal.job.IndexerJob;
import org.xwiki.search.solr.internal.job.IndexerRequest;
import org.xwiki.search.solr.internal.metadata.LengthSolrInputDocument;
import org.xwiki.search.solr.internal.metadata.SolrMetadataExtractor;
import org.xwiki.search.solr.internal.reference.SolrReferenceResolver;
import com.xpn.xwiki.util.AbstractXWikiRunnable;
/**
* Default implementation of {@link SolrIndexer}.
* <p/>
* This implementation does not directly process the given leaf-references, but adds them to a processing queue, in the
* order they were received. The {@link Runnable} part of this implementation is the one that sequentially reads and
* processes the queue.
*
* @version $Id$
* @since 5.1M2
*/
@Component
@Singleton
public class DefaultSolrIndexer extends AbstractXWikiRunnable implements SolrIndexer, Initializable, Disposable
{
/**
* Index queue entry.
*
* @version $Id$
*/
private static class IndexQueueEntry
{
/**
* The reference of the entity to index.
*/
public EntityReference reference;
/**
* The query used to filter entries to delete.
*/
public String deleteQuery;
/**
* The indexing operation to perform.
*/
public IndexOperation operation;
/**
* @param indexReference the reference of the entity to index.
* @param operation the indexing operation to perform.
*/
public IndexQueueEntry(EntityReference indexReference, IndexOperation operation)
{
this.reference = indexReference;
this.operation = operation;
}
/**
* @param deleteQuery the query used to filter entries to delete.
* @param operation the indexing operation to perform.
*/
public IndexQueueEntry(String deleteQuery, IndexOperation operation)
{
this.deleteQuery = deleteQuery;
this.operation = operation;
}
@Override
public String toString()
{
String str;
switch (operation) {
case INDEX:
str = "INDEX " + this.reference;
break;
case DELETE:
str = "DELETE " + this.deleteQuery;
break;
case STOP:
str = "STOP";
break;
default:
str = "";
break;
}
return str;
}
}
/**
* Resolve queue entry.
*
* @version $Id$
*/
private static class ResolveQueueEntry
{
/**
* The reference of the entity to index.
*/
public EntityReference reference;
/**
* Also apply operation to reference children.
*/
public boolean recurse;
/**
* The indexing operation to perform.
*/
public IndexOperation operation;
/**
* @param reference the reference of the entity to index.
* @param recurse also apply operation to reference children.
* @param operation the indexing operation to perform.
*/
public ResolveQueueEntry(EntityReference reference, boolean recurse, IndexOperation operation)
{
this.reference = reference;
this.recurse = recurse;
this.operation = operation;
}
}
/**
* Extract children references from passed references and dispatch them to the index queue.
*
* @version $Id$
*/
private class Resolver extends AbstractXWikiRunnable
{
@Override
public void runInternal()
{
logger.debug("Start SOLR resolver thread");
while (!Thread.interrupted()) {
ResolveQueueEntry queueEntry;
try {
queueEntry = resolveQueue.take();
} catch (InterruptedException e) {
logger.warn("The SOLR resolve thread has been interrupted", e);
queueEntry = RESOLVE_QUEUE_ENTRY_STOP;
}
if (queueEntry == RESOLVE_QUEUE_ENTRY_STOP) {
// Send the stop signal to the indexing thread without blocking.
indexQueue.offer(INDEX_QUEUE_ENTRY_STOP);
break;
}
try {
if (queueEntry.operation == IndexOperation.INDEX) {
Iterable<EntityReference> references;
if (queueEntry.recurse) {
references = solrRefereceResolver.getReferences(queueEntry.reference);
} else {
references = Arrays.asList(queueEntry.reference);
}
for (EntityReference reference : references) {
indexQueue.put(new IndexQueueEntry(reference, queueEntry.operation));
}
} else {
if (queueEntry.recurse) {
indexQueue.put(new IndexQueueEntry(solrRefereceResolver.getQuery(queueEntry.reference),
queueEntry.operation));
} else if (queueEntry.reference != null) {
indexQueue.put(new IndexQueueEntry(queueEntry.reference, queueEntry.operation));
}
}
} catch (Throwable e) {
logger.warn("Failed to apply operation [{}] on root reference [{}]", queueEntry.operation,
queueEntry.reference, e);
}
}
logger.debug("Stop SOLR resolver thread");
}
}
/**
* Stop resolver thread.
*/
private static final ResolveQueueEntry RESOLVE_QUEUE_ENTRY_STOP = new ResolveQueueEntry(null, false,
IndexOperation.STOP);
/**
* Stop indexer thread.
*/
private static final IndexQueueEntry INDEX_QUEUE_ENTRY_STOP = new IndexQueueEntry((String) null,
IndexOperation.STOP);
/**
* Logging framework.
*/
@Inject
private Logger logger;
/**
* Component manager used to get metadata extractors.
*/
@Inject
private ComponentManager componentManager;
/**
* The Solr configuration source.
*/
@Inject
private SolrConfiguration configuration;
/**
* Communication with the Solr instance.
*/
@Inject
private Provider<SolrInstance> solrInstanceProvider;
/**
* Extract contained indexable references.
*/
@Inject
private SolrReferenceResolver solrRefereceResolver;
/**
* The queue of index operation to perform.
*/
private BlockingQueue<IndexQueueEntry> indexQueue;
/**
* The queue of resolve references and add them to the index queue.
*/
private BlockingQueue<ResolveQueueEntry> resolveQueue;
/**
* Indexer jobs.
*/
// TODO: use JobManager instead when it support several threads
private ExecutorService indexerJobs;
/**
* Thread in which the indexUpdater will be executed.
*/
private Thread indexThread;
/**
* Thread in which the provided references children will be resolved.
*/
private Thread resolveThread;
/**
* Indicate of the component has been disposed.
*/
private boolean disposed;
@Override
public void initialize() throws InitializationException
{
// Initialize the queues before starting the threads.
this.resolveQueue = new LinkedBlockingQueue<ResolveQueueEntry>();
this.indexQueue = new LinkedBlockingQueue<IndexQueueEntry>(this.configuration.getIndexerQueueCapacity());
// Launch the resolve thread
this.resolveThread = new Thread(new Resolver());
this.resolveThread.setName("XWiki Solr resolve thread");
this.resolveThread.setDaemon(true);
this.resolveThread.start();
this.resolveThread.setPriority(Thread.NORM_PRIORITY - 1);
// Launch the index thread
this.indexThread = new Thread(this);
this.indexThread.setName("XWiki Solr index thread");
this.indexThread.setDaemon(true);
this.indexThread.start();
this.indexThread.setPriority(Thread.NORM_PRIORITY - 1);
// Setup indexer job thread
BasicThreadFactory factory =
new BasicThreadFactory.Builder().namingPattern("XWiki Solr index job thread").daemon(true)
.priority(Thread.MIN_PRIORITY).build();
this.indexerJobs = Executors.newSingleThreadExecutor(factory);
}
@Override
public void dispose() throws ComponentLifecycleException
{
// Mark the component as disposed
this.disposed = true;
// Shutdown indexer jobs queue
this.indexerJobs.shutdownNow();
// Send the stop signal to the resolver thread without blocking.
this.resolveQueue.offer(RESOLVE_QUEUE_ENTRY_STOP);
}
@Override
protected void runInternal()
{
this.logger.debug("Start SOLR indexer thread");
while (!Thread.interrupted()) {
// Block until there is at least one entry in the queue
IndexQueueEntry queueEntry = null;
try {
queueEntry = this.indexQueue.take();
} catch (InterruptedException e) {
this.logger.warn("The SOLR index thread has been interrupted", e);
queueEntry = INDEX_QUEUE_ENTRY_STOP;
}
if (queueEntry == INDEX_QUEUE_ENTRY_STOP) {
break;
}
// Add to the batch until either the batch size is achieved or the queue gets emptied
processBatch(queueEntry);
}
this.logger.debug("Stop SOLR indexer thread");
}
/**
* Process a batch of operations that were just read from the index operations queue. This method also commits the
* batch when it finishes to process it.
*
* @param queueEntry the batch to process
*/
private void processBatch(IndexQueueEntry queueEntry)
{
SolrInstance solrInstance = this.solrInstanceProvider.get();
int length = 0;
int size = 0;
for (IndexQueueEntry batchEntry = queueEntry; batchEntry != null; batchEntry = this.indexQueue.poll()) {
IndexOperation operation = batchEntry.operation;
// For the current contiguous operations queue, group the changes
try {
if (IndexOperation.INDEX.equals(operation)) {
LengthSolrInputDocument solrDocument = getSolrDocument(batchEntry.reference);
if (solrDocument != null) {
solrInstance.add(solrDocument);
length += solrDocument.getLength();
++size;
}
} else if (IndexOperation.DELETE.equals(operation)) {
if (batchEntry.reference == null) {
solrInstance.deleteByQuery(batchEntry.deleteQuery);
} else {
solrInstance.delete(this.solrRefereceResolver.getId(batchEntry.reference));
}
++size;
}
} catch (Throwable e) {
this.logger.error("Failed to process entry [{}]", batchEntry, e);
}
// Commit the index changes so that they become available to queries. This is a costly operation and that is
// the reason why we perform it at the end of the batch.
if (shouldCommit(length, size)) {
commit();
length = 0;
size = 0;
}
}
// Commit what's left
if (size > 0) {
commit();
}
}
/**
* Commit.
*/
private void commit()
{
SolrInstance solrInstance = this.solrInstanceProvider.get();
try {
solrInstance.commit();
} catch (Exception e) {
this.logger.error("Failed to commit index changes to the Solr server. Rolling back.", e);
try {
solrInstance.rollback();
} catch (Exception ex) {
// Just log the failure.
this.logger.error("Failed to rollback index changes.", ex);
}
}
}
/**
* Check various constraints to know if the batch should be committed.
*
* @param length the current length
* @param size the current size
* @return true if the batch should be sent
*/
private boolean shouldCommit(int length, int size)
{
// If the length is above the configured maximum
if (length >= this.configuration.getIndexerBatchMaxLengh()) {
return true;
}
// If the size is above the configured maximum
return size >= this.configuration.getIndexerBatchSize();
}
private LengthSolrInputDocument getSolrDocument(EntityReference reference) throws SolrIndexerException,
IllegalArgumentException
{
SolrMetadataExtractor metadataExtractor = getMetadataExtractor(reference.getType());
// If the entity type is supported, use the extractor to get the SolrInputDocuent.
if (metadataExtractor != null) {
return metadataExtractor.getSolrDocument(reference);
}
return null;
}
/**
* @param entityType the entity type
* @return the metadata extractor that is registered for the specified type or {@code null} if none exists.
*/
private SolrMetadataExtractor getMetadataExtractor(EntityType entityType)
{
SolrMetadataExtractor result = null;
try {
result = this.componentManager.getInstance(SolrMetadataExtractor.class, entityType.name().toLowerCase());
} catch (ComponentLookupException e) {
this.logger.warn("Unsupported entity type: [{}]", entityType.toString(), e);
}
return result;
}
@Override
public void index(EntityReference reference, boolean recurse)
{
addToQueue(reference, recurse, IndexOperation.INDEX);
}
@Override
public void delete(EntityReference reference, boolean recurse)
{
addToQueue(reference, recurse, IndexOperation.DELETE);
}
/**
* Add a list of references to the index queue, all having the same operation.
*
* @param reference the references to add
* @param recurse also apply operation to children
* @param operation the operation to assign to the given references
*/
private void addToQueue(EntityReference reference, boolean recurse, IndexOperation operation)
{
if (!this.disposed) {
// Don't block because the capacity of the resolver queue is not limited.
this.resolveQueue.offer(new ResolveQueueEntry(reference, recurse, operation));
}
}
@Override
public int getQueueSize()
{
return this.indexQueue.size() + this.resolveQueue.size();
}
@Override
public IndexerJob startIndex(IndexerRequest request) throws SolrIndexerException
{
IndexerJob job;
try {
job = this.componentManager.getInstance(Job.class, IndexerJob.JOBTYPE);
} catch (ComponentLookupException e) {
throw new SolrIndexerException("Failed to lookup indexer job component", e);
}
job.initialize(request);
this.indexerJobs.execute(new ExecutionContextRunnable(job, this.componentManager));
return job;
}
}
|
package backend.resource;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.egit.github.core.Label;
import backend.resource.serialization.SerializableLabel;
import javafx.scene.Node;
import javafx.scene.control.Tooltip;
@SuppressWarnings("unused")
public class TurboLabel {
public static final String EXCLUSIVE_DELIMITER = ".";
public static final String NONEXCLUSIVE_DELIMITER = "-";
private void ______SERIALIZED_FIELDS______() {
}
private final String actualName;
private final String colour;
private void ______TRANSIENT_FIELDS______() {
}
private final String repoId;
private void ______CONSTRUCTORS______() {
}
public TurboLabel(String repoId, String name) {
this.actualName = name;
this.colour = "ffffff";
this.repoId = repoId;
}
public static TurboLabel nonexclusive(String repoId, String group, String name) {
return new TurboLabel(repoId, joinWith(group, name, false));
}
public static TurboLabel exclusive(String repoId, String group, String name) {
return new TurboLabel(repoId, joinWith(group, name, true));
}
public TurboLabel(String repoId, Label label) {
this.actualName = label.getName();
this.colour = label.getColor();
this.repoId = repoId;
}
public TurboLabel(String repoId, SerializableLabel label) {
this.actualName = label.getActualName();
this.colour = label.getColour();
this.repoId = repoId;
}
private void ______METHODS______() {
}
private Optional<String> getDelimiter() {
// Escaping due to constants not being valid regexes
Pattern p = Pattern.compile(String.format("^[^\\%s\\%s]+(\\%s|\\%s)",
EXCLUSIVE_DELIMITER,
NONEXCLUSIVE_DELIMITER,
EXCLUSIVE_DELIMITER,
NONEXCLUSIVE_DELIMITER));
Matcher m = p.matcher(actualName);
if (m.find()) {
return Optional.of(m.group(1));
} else {
return Optional.empty();
}
}
private static String joinWith(String group, String name, boolean exclusive) {
return group + (exclusive ? EXCLUSIVE_DELIMITER : NONEXCLUSIVE_DELIMITER) + name;
}
public boolean isExclusive() {
return getDelimiter().isPresent() && getDelimiter().get().equals(EXCLUSIVE_DELIMITER);
}
public Optional<String> getGroup() {
if (getDelimiter().isPresent()) {
String delimiter = getDelimiter().get();
// Escaping due to constants not being valid regexes
String[] segments = actualName.split("\\" + delimiter);
assert segments.length >= 1;
if (segments.length == 1) {
if (actualName.endsWith(delimiter)) {
// group.
return Optional.of(segments[0]);
} else {
// .name
return Optional.empty();
}
} else {
// group.name
assert segments.length == 2;
return Optional.of(segments[0]);
}
} else {
// name
return Optional.empty();
}
}
public String getName() {
if (getDelimiter().isPresent()) {
String delimiter = getDelimiter().get();
// Escaping due to constants not being valid regexes
String[] segments = actualName.split("\\" + delimiter);
assert segments.length >= 1;
if (segments.length == 1) {
if (actualName.endsWith(delimiter)) {
// group.
return "";
} else {
// .name
return segments[0];
}
} else {
// group.name
assert segments.length == 2;
return segments[1];
}
} else {
// name
return actualName;
}
}
public String getStyle() {
String colour = getColour();
int r = Integer.parseInt(colour.substring(0, 2), 16);
int g = Integer.parseInt(colour.substring(2, 4), 16);
int b = Integer.parseInt(colour.substring(4, 6), 16);
double luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b;
boolean bright = luminance > 128;
return "-fx-background-color: #" + getColour() + "; -fx-text-fill: " + (bright ? "black" : "white");
}
public Node getNode() {
javafx.scene.control.Label node = new javafx.scene.control.Label(getName());
node.getStyleClass().add("labels");
node.setStyle(getStyle());
if (getGroup().isPresent()) {
Tooltip groupTooltip = new Tooltip(getGroup().get());
node.setTooltip(groupTooltip);
}
return node;
}
@Override
public String toString() {
return actualName;
}
private void ______BOILERPLATE______() {
}
public String getRepoId() {
return repoId;
}
public String getColour() {
return colour;
}
public String getActualName() {
return actualName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TurboLabel that = (TurboLabel) o;
return actualName.equals(that.actualName) && colour.equals(that.colour);
}
@Override
public int hashCode() {
int result = actualName.hashCode();
result = 31 * result + colour.hashCode();
return result;
}
}
|
package biweekly.io.json;
import java.io.Closeable;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.HashMap;
import java.util.Map;
import biweekly.ICalendar;
import biweekly.component.ICalComponent;
import biweekly.component.RawComponent;
import biweekly.component.marshaller.ComponentLibrary;
import biweekly.component.marshaller.ICalComponentMarshaller;
import biweekly.component.marshaller.RawComponentMarshaller;
import biweekly.io.SkipMeException;
import biweekly.parameter.ICalParameters;
import biweekly.property.ICalProperty;
import biweekly.property.RawProperty;
import biweekly.property.marshaller.ICalPropertyMarshaller;
import biweekly.property.marshaller.PropertyLibrary;
import biweekly.property.marshaller.RawPropertyMarshaller;
public class JCalWriter implements Closeable {
private final Map<Class<? extends ICalProperty>, ICalPropertyMarshaller<? extends ICalProperty>> propertyMarshallers = new HashMap<Class<? extends ICalProperty>, ICalPropertyMarshaller<? extends ICalProperty>>(0);
private final Map<Class<? extends ICalComponent>, ICalComponentMarshaller<? extends ICalComponent>> componentMarshallers = new HashMap<Class<? extends ICalComponent>, ICalComponentMarshaller<? extends ICalComponent>>(0);
private final JCalRawWriter writer;
/**
* Creates a jCal writer that writes to an output stream.
* @param outputStream the output stream to write to
*/
public JCalWriter(OutputStream outputStream) {
this(new OutputStreamWriter(outputStream));
}
/**
* Creates a jCal writer that writes to an output stream.
* @param outputStream the output stream to write to
* @param wrapInArray true to wrap all iCalendar objects in a parent array,
* false not to (useful when writing more than one iCalendar object)
*/
public JCalWriter(OutputStream outputStream, boolean wrapInArray) throws IOException {
this(new OutputStreamWriter(outputStream), wrapInArray);
}
/**
* Creates a jCal writer that writes to a file.
* @param file the file to write to
* @throws IOException if the file cannot be written to
*/
public JCalWriter(File file) throws IOException {
this(new FileWriter(file));
}
/**
* Creates a jCal writer that writes to a file.
* @param file the file to write to
* @param wrapInArray true to wrap all iCalendar objects in a parent array,
* false not to (useful when writing more than one iCalendar object)
* @throws IOException if the file cannot be written to
*/
public JCalWriter(File file, boolean wrapInArray) throws IOException {
this(new FileWriter(file), wrapInArray);
}
/**
* Creates a jCal writer that writes to a writer.
* @param writer the writer to the data stream
*/
public JCalWriter(Writer writer) {
this(writer, false);
}
/**
* Creates a jCal writer that writes to a writer.
* @param writer the writer to the data stream
* @param wrapInArray true to wrap all iCalendar objects in a parent array,
* false not to (useful when writing more than one iCalendar object)
*/
public JCalWriter(Writer writer, boolean wrapInArray) {
this.writer = new JCalRawWriter(writer, wrapInArray);
}
/**
* Registers a marshaller for an experimental property.
* @param marshaller the marshaller to register
*/
public void registerMarshaller(ICalPropertyMarshaller<? extends ICalProperty> marshaller) {
propertyMarshallers.put(marshaller.getPropertyClass(), marshaller);
}
/**
* Registers a marshaller for an experimental component.
* @param marshaller the marshaller to register
*/
public void registerMarshaller(ICalComponentMarshaller<? extends ICalComponent> marshaller) {
componentMarshallers.put(marshaller.getComponentClass(), marshaller);
}
public void write(ICalendar ical) throws IOException {
writeComponent(ical);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void writeComponent(ICalComponent component) throws IOException {
ICalComponentMarshaller compMarshaller = findComponentMarshaller(component);
if (compMarshaller == null) {
throw new IllegalArgumentException("No marshaller found for component class \"" + component.getClass().getName() + "\". This component will not be written.");
}
writer.writeStartComponent(compMarshaller.getComponentName().toLowerCase());
//write properties
for (Object obj : compMarshaller.getProperties(component)) {
ICalProperty property = (ICalProperty) obj;
ICalPropertyMarshaller propMarshaller = findPropertyMarshaller(property);
if (propMarshaller == null) {
throw new IllegalArgumentException("No marshaller found for property class \"" + property.getClass().getName() + "\". This property will not be written.");
}
//marshal property
String propertyName = propMarshaller.getPropertyName().toLowerCase();
ICalParameters parameters;
JCalValue value;
try {
parameters = propMarshaller.prepareParameters(property);
value = propMarshaller.writeJson(property);
} catch (SkipMeException e) {
continue;
}
//write property
parameters.setValue(null);
writer.writeProperty(propertyName, parameters, value);
}
//write sub-components
for (Object obj : compMarshaller.getComponents(component)) {
ICalComponent subComponent = (ICalComponent) obj;
writeComponent(subComponent);
}
writer.writeEndComponent();
}
/**
* Finds a component marshaller.
* @param component the component being marshalled
* @return the component marshaller or null if not found
*/
private ICalComponentMarshaller<? extends ICalComponent> findComponentMarshaller(ICalComponent component) {
ICalComponentMarshaller<? extends ICalComponent> m = componentMarshallers.get(component.getClass());
if (m == null) {
m = ComponentLibrary.getMarshaller(component.getClass());
if (m == null) {
if (component instanceof RawComponent) {
RawComponent raw = (RawComponent) component;
m = new RawComponentMarshaller(raw.getName());
}
}
}
return m;
}
/**
* Finds a property marshaller.
* @param property the property being marshalled
* @return the property marshaller or null if not found
*/
private ICalPropertyMarshaller<? extends ICalProperty> findPropertyMarshaller(ICalProperty property) {
ICalPropertyMarshaller<? extends ICalProperty> m = propertyMarshallers.get(property.getClass());
if (m == null) {
m = PropertyLibrary.getMarshaller(property.getClass());
if (m == null) {
if (property instanceof RawProperty) {
RawProperty raw = (RawProperty) property;
m = new RawPropertyMarshaller(raw.getName());
}
}
}
return m;
}
/**
* Finishes writing the JSON document and closes the underlying
* {@link Writer}.
* @throws IOException if there's a problem closing the stream
*/
public void close() throws IOException {
writer.close();
}
/**
* Finishes writing the JSON document so that it is syntactically correct.
* No more iCalendar objects can be written once this method is called.
* @throws IOException if there's a problem writing to the data stream
*/
public void closeJsonStream() throws IOException {
writer.closeJsonStream();
}
}
|
package ch.bind.philib.io;
import java.nio.ByteBuffer;
/**
* TODO
*
* @author Philipp Meinen
*/
public final class BufferOps {
private BufferOps() {
}
private static volatile byte[] nullFiller;
public static void memsetZero(ByteBuffer buf) {
if (buf.hasArray()) {
memsetZero(buf.array());
} else {
byte[] filler = getFiller();
int filLen = filler.length;
buf.clear();
int rem = buf.remaining();
while (rem > 0) {
int l = Math.min(rem, filLen);
buf.put(filler, 0, l);
rem -= l;
}
buf.clear();
}
}
public static void memsetZero(byte[] buf) {
byte[] filler = getFiller();
int filLen = filler.length;
int rem = buf.length;
int off = 0;
while (rem > 0) {
int l = Math.min(rem, filLen);
memset(filler, buf, off, l);
rem -= l;
off += l;
}
}
private static final void memset(byte[] src, byte[] dst, int dstOff, int len) {
System.arraycopy(src, 0, dst, dstOff, len);
}
private static byte[] getFiller() {
byte[] f = nullFiller;
if (f == null) {
f = new byte[8192];
nullFiller = f;
}
return f;
}
}
|
package cloud.swiftnode.kspam;
import cloud.swiftnode.kspam.listener.PlayerListener;
import cloud.swiftnode.kspam.util.Lang;
import cloud.swiftnode.kspam.util.Static;
import cloud.swiftnode.kspam.util.StaticStorage;
import cloud.swiftnode.kspam.util.URLs;
import cloud.swiftnode.kspam.util.Version;
import org.bukkit.Bukkit;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.plugin.java.JavaPlugin;
import org.mcstats.Metrics;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.util.Set;
public class KSpam extends JavaPlugin {
public static KSpam INSTANCE;
@Override
public void onEnable() {
INSTANCE = this;
Bukkit.getPluginManager().registerEvents(new PlayerListener(), this);
cacheInit();
updateCheck();
metricsInit();
Static.consoleMsg(Lang.INTRO.builder()
.single(Lang.Key.KSPAM_VERSION, Static.getVersion()));
}
@Override
public void onDisable() {
cacheSave();
}
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
// Lazy
switch (args.length) {
case 1:
if (args[0].equalsIgnoreCase("force")) {
if (!sender.isOp()) {
break;
}
StaticStorage.forceMode = !StaticStorage.forceMode;
sender.sendMessage(Lang.SET.builder().single(Lang.Key.VALUE, StaticStorage.forceMode).prefix().build());
return true;
} else if (args[0].equalsIgnoreCase("info")) {
sender.sendMessage(Lang.NEW_VERSION.builder().single(Lang.Key.NEW_VERSION, StaticStorage.getNewVer()).prefix().build());
sender.sendMessage(Lang.CURRENT_VERSION.builder().single(Lang.Key.KSPAM_VERSION, StaticStorage.getCurrVer()).prefix().build());
sender.sendMessage(Lang.PREFIX + String.valueOf(StaticStorage.cachedSet.size()));
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
private void cacheInit() {
if (!getDataFolder().isDirectory()) {
getDataFolder().mkdirs();
}
File file = new File(getDataFolder(), "K-Spam.cache");
if (!file.isFile()) {
try (FileOutputStream outStream = new FileOutputStream(file)) {
URL url = URLs.CACHE.toUrl();
ReadableByteChannel rbc = Channels.newChannel(url.openStream());
outStream.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
} catch (Exception ex) {
Static.consoleMsg(ex);
}
}
try (ObjectInputStream inStream = new ObjectInputStream(new FileInputStream(file))) {
StaticStorage.cachedSet = (Set<String>) inStream.readObject();
Static.consoleMsg(Lang.CACHE_COUNT.builder()
.prefix().single(Lang.Key.CACHE_COUNT, StaticStorage.cachedSet.size()));
} catch (Exception ex) {
Static.consoleMsg(ex);
}
}
private void cacheSave() {
try {
File file = new File(getDataFolder(), "K-Spam.cache");
ObjectOutputStream outStream = new ObjectOutputStream(new FileOutputStream(file));
outStream.writeObject(StaticStorage.cachedSet);
} catch (Exception ex) {
Static.consoleMsg(ex);
}
}
private void updateCheck() {
try {
URL url = new URL("https://github.com/EntryPointKR/K-SPAM/releases/latest");
BufferedReader reader = new BufferedReader(new InputStreamReader(url.openStream()));
String line;
while ((line = reader.readLine()) != null) {
if (!line.contains("<span class=\"css-truncate-target\">")) {
continue;
}
StaticStorage.setNewVer(new Version(
Static.substring(line, "<span class=\"css-truncate-target\">", "</span>")));
if (StaticStorage.getCurrVer().beforeEquals(StaticStorage.getNewVer())) {
Static.consoleMsg(
Lang.UPDATE_INFO_NEW.builder().prefix(),
Lang.NEW_VERSION.builder().single(Lang.Key.NEW_VERSION, StaticStorage.getNewVer()).prefix(),
Lang.CURRENT_VERSION.builder().single(Lang.Key.KSPAM_VERSION, StaticStorage.getCurrVer()).prefix()
);
} else {
Static.consoleMsg(Lang.UPDATE_INFO_NO.builder().prefix());
}
return;
}
} catch (Exception ex) {
Static.consoleMsg(ex);
}
}
private void metricsInit() {
try {
Metrics metrics = new Metrics(this);
metrics.start();
} catch (Exception ex) {
Static.consoleMsg(ex);
}
}
}
|
package com.adonis.utils;
import com.maxmind.geoip2.DatabaseReader;
import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.maxmind.geoip2.model.CityResponse;
import com.maxmind.geoip2.model.CountryResponse;
import com.maxmind.geoip2.model.EnterpriseResponse;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.IOException;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Map;
import java.util.TreeMap;
@Slf4j
public class GeoService {
private static class ResourceHolder {
private static final GeoService geoService = new GeoService();
}
private DatabaseReader readerCity;
private DatabaseReader readerCountry;
private String ip;
private InetAddress ipAddress;
private CountryCodes countryCodes;
private static CountryResponse countryResponse;
private GeoService() {
try {
readerCity = new DatabaseReader.Builder(new File(this.getClass().getClassLoader().getResource("GeoLite2-City.mmdb").getPath())).build();
readerCountry = new DatabaseReader.Builder(new File(this.getClass().getClassLoader().getResource("GeoIP2-Country.mmdb").getPath())).build();
countryCodes = new CountryCodes();
} catch (Exception e) {
log.error("Exception:", e);
}
try {
ip = getIpAdress();
ipAddress = getIpInetAdress();
} catch (Exception e) {
log.error("Exception:", e);
}
}
public static GeoService getInstance() {
return ResourceHolder.geoService;
}
public String getCountryCode(String ip) {
try {
CountryResponse cr = readerCity.country(InetAddress.getByName(ip));
return cr.getCountry().getIsoCode();
} catch (UnknownHostException e) {
log.error("Exception:", e);
} catch (IOException e) {
log.error("Exception:", e);
} catch (GeoIp2Exception e) {
log.error("Exception:", e);
}
return "
}
public String getCountryName(String ip) {
try {
CountryResponse cr = readerCity.country(InetAddress.getByName(ip));
return cr.getCountry().getName();
} catch (UnknownHostException e) {
log.error("Exception:", e);
} catch (IOException e) {
log.error("Exception:", e);
} catch (GeoIp2Exception e) {
log.error("Exception:", e);
}
return "
}
public String getCity(String ip){
try {
CityResponse cr = readerCity.city(InetAddress.getByName(ip));
return cr.getCity().getName();
} catch (UnknownHostException e) {
log.error("Exception:", e);
return "";
} catch (IOException e) {
log.error("Exception:", e);
return "";
} catch (GeoIp2Exception e) {
log.error("Exception:", e);
return "";
}
}
public String getCity(InetAddress ip){
try {
CityResponse cr = readerCity.city(ip);
return cr.getCity().getName();
} catch (UnknownHostException e) {
log.error("Exception:", e);
return "Riga";
} catch (IOException e) {
log.error("Exception:", e);
return "Riga";
} catch (GeoIp2Exception e) {
log.error("Exception:", e);
return "Riga";
}
}
public String getCountry(InetAddress ipAddress) {
try {
countryResponse = readerCountry.country(ipAddress);
return countryResponse.getCountry().getName();
} catch (IOException | GeoIp2Exception ex) {
log.info("Could not get country for IP " + ipAddress);
try {
countryResponse = readerCountry.country(InetAddress.getByName(VaadinUtils.getIpAddress()));
return countryResponse.getCountry().getName();
} catch (IOException e) {
e.printStackTrace();
} catch (GeoIp2Exception e) {
e.printStackTrace();
}finally {
return "Latvia";
}
// return "ZZZ";
}
}
public String getIpCountry(InetAddress ipAddress) {
try {
CountryResponse countryResponse = readerCountry.country(ipAddress);
return countryResponse.getCountry().getIsoCode();
} catch (IOException | GeoIp2Exception ex) {
log.info("Could not get country for IP " + ipAddress);
// return "ZZZ";
return countryCodes.map.get("Latvia");
}
}
public EnterpriseResponse getInfo(InetAddress ipAddress){
try {
EnterpriseResponse enterpriseResponse = readerCountry.enterprise(ipAddress);
return enterpriseResponse;
} catch (IOException e) {
log.error("Exception:", e);
} catch (GeoIp2Exception e) {
log.error("Exception:", e);
}
return null;
}
public String getIpCountry(String ipAddress) {
try {
return getIpCountry(InetAddress.getByName(ipAddress));
} catch (UnknownHostException ex) {
log.info("Bad ip address " + ipAddress, ex);
}
// return "ZZZ";
return countryCodes.map.get("Latvia");
}
public String getIpAdress(){
try {
return Inet4Address.getLocalHost().getHostAddress();
} catch (UnknownHostException e) {
log.error("UnknownHostException:", e);;
return VaadinUtils.getIpAddress();
}
}
public InetAddress getIpInetAdress(){
try {
return Inet4Address.getLocalHost();
} catch (UnknownHostException e) {
log.error("UnknownHostException:", e);;
try {
InetAddress inetAddress = InetAddress.getByName(VaadinUtils.getIpAddress());
return inetAddress;
} catch (UnknownHostException e1) {
e1.printStackTrace();
}
}
return null;
}
public String getIp() {
return ip;
}
public InetAddress getIpAddress() {
return ipAddress;
}
public CountryResponse getCountryResponse() {
return countryResponse;
}
private class CountryCodes {
final Map<String, String> map = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
public CountryCodes() {
map.put("Andorra, Principality Of", "AD");
map.put("United Arab Emirates", "AE");
map.put("Afghanistan, Islamic State Of", "AF");
map.put("Antigua And Barbuda", "AG");
map.put("Anguilla", "AI");
map.put("Albania", "AL");
map.put("Armenia", "AM");
map.put("Netherlands Antilles", "AN");
map.put("Angola", "AO");
map.put("Antarctica", "AQ");
map.put("Argentina", "AR");
map.put("American Samoa", "AS");
map.put("Austria", "AT");
map.put("Australia", "AU");
map.put("Aruba", "AW");
map.put("Azerbaidjan", "AZ");
map.put("Bosnia-Herzegovina", "BA");
map.put("Barbados", "BB");
map.put("Bangladesh", "BD");
map.put("Belgium", "BE");
map.put("Burkina Faso", "BF");
map.put("Bulgaria", "BG");
map.put("Bahrain", "BH");
map.put("Burundi", "BI");
map.put("Benin", "BJ");
map.put("Bermuda", "BM");
map.put("Brunei Darussalam", "BN");
map.put("Bolivia", "BO");
map.put("Brazil", "BR");
map.put("Bahamas", "BS");
map.put("Bhutan", "BT");
map.put("Bouvet Island", "BV");
map.put("Botswana", "BW");
map.put("Belarus", "BY");
map.put("Belize", "BZ");
map.put("Canada", "CA");
map.put("Cocos (Keeling) Islands", "CC");
map.put("Central African Republic", "CF");
map.put("Congo, The Democratic Republic Of The", "CD");
map.put("Congo", "CG");
map.put("Switzerland", "CH");
map.put("Ivory Coast (Cote D'Ivoire)", "CI");
map.put("Cook Islands", "CK");
map.put("Chile", "CL");
map.put("Cameroon", "CM");
map.put("China", "CN");
map.put("Colombia", "CO");
map.put("Costa Rica", "CR");
map.put("Former Czechoslovakia", "CS");
map.put("Cuba", "CU");
map.put("Cape Verde", "CV");
map.put("Christmas Island", "CX");
map.put("Cyprus", "CY");
map.put("Czech Republic", "CZ");
map.put("Germany", "DE");
map.put("Djibouti", "DJ");
map.put("Denmark", "DK");
map.put("Dominica", "DM");
map.put("Dominican Republic", "DO");
map.put("Algeria", "DZ");
map.put("Ecuador", "EC");
map.put("Estonia", "EE");
map.put("Egypt", "EG");
map.put("Western Sahara", "EH");
map.put("Eritrea", "ER");
map.put("Spain", "ES");
map.put("Ethiopia", "ET");
map.put("Finland", "FI");
map.put("Fiji", "FJ");
map.put("Falkland Islands", "FK");
map.put("Micronesia", "FM");
map.put("Faroe Islands", "FO");
map.put("France", "FR");
map.put("France (European Territory)", "FX");
map.put("Gabon", "GA");
map.put("Great Britain", "UK");
map.put("Grenada", "GD");
map.put("Georgia", "GE");
map.put("French Guyana", "GF");
map.put("Ghana", "GH");
map.put("Gibraltar", "GI");
map.put("Greenland", "GL");
map.put("Gambia", "GM");
map.put("Guinea", "GN");
map.put("Guadeloupe (French)", "GP");
map.put("Equatorial Guinea", "GQ");
map.put("Greece", "GR");
map.put("S. Georgia & S. Sandwich Isls.", "GS");
map.put("Guatemala", "GT");
map.put("Guam (USA)", "GU");
map.put("Guinea Bissau", "GW");
map.put("Guyana", "GY");
map.put("Hong Kong", "HK");
map.put("Heard And McDonald Islands", "HM");
map.put("Honduras", "HN");
map.put("Croatia", "HR");
map.put("Haiti", "HT");
map.put("Hungary", "HU");
map.put("Indonesia", "ID");
map.put("Ireland", "IE");
map.put("Israel", "IL");
map.put("India", "IN");
map.put("British Indian Ocean Territory", "IO");
map.put("Iraq", "IQ");
map.put("Iran", "IR");
map.put("Iceland", "IS");
map.put("Italy", "IT");
map.put("Jamaica", "JM");
map.put("Jordan", "JO");
map.put("Japan", "JP");
map.put("Kenya", "KE");
map.put("Kyrgyz Republic (Kyrgyzstan)", "KG");
map.put("Cambodia, Kingdom Of", "KH");
map.put("Kiribati", "KI");
map.put("Comoros", "KM");
map.put("Saint Kitts & Nevis Anguilla", "KN");
map.put("North Korea", "KP");
map.put("South Korea", "KR");
map.put("Kuwait", "KW");
map.put("Cayman Islands", "KY");
map.put("Kazakhstan", "KZ");
map.put("Laos", "LA");
map.put("Lebanon", "LB");
map.put("Saint Lucia", "LC");
map.put("Liechtenstein", "LI");
map.put("Sri Lanka", "LK");
map.put("Liberia", "LR");
map.put("Lesotho", "LS");
map.put("Lithuania", "LT");
map.put("Luxembourg", "LU");
map.put("Latvia", "LV");
map.put("Libya", "LY");
map.put("Morocco", "MA");
map.put("Monaco", "MC");
map.put("Moldavia", "MD");
map.put("Madagascar", "MG");
map.put("Marshall Islands", "MH");
map.put("Macedonia", "MK");
map.put("Mali", "ML");
map.put("Myanmar", "MM");
map.put("Mongolia", "MN");
map.put("Macau", "MO");
map.put("Northern Mariana Islands", "MP");
map.put("Martinique (French)", "MQ");
map.put("Mauritania", "MR");
map.put("Montserrat", "MS");
map.put("Malta", "MT");
map.put("Mauritius", "MU");
map.put("Maldives", "MV");
map.put("Malawi", "MW");
map.put("Mexico", "MX");
map.put("Malaysia", "MY");
map.put("Mozambique", "MZ");
map.put("Namibia", "NA");
map.put("New Caledonia (French)", "NC");
map.put("Niger", "NE");
map.put("Norfolk Island", "NF");
map.put("Nigeria", "NG");
map.put("Nicaragua", "NI");
map.put("Netherlands", "NL");
map.put("Norway", "NO");
map.put("Nepal", "NP");
map.put("Nauru", "NR");
map.put("Neutral Zone", "NT");
map.put("Niue", "NU");
map.put("New Zealand", "NZ");
map.put("Oman", "OM");
map.put("Panama", "PA");
map.put("Peru", "PE");
map.put("Polynesia (French)", "PF");
map.put("Papua New Guinea", "PG");
map.put("Philippines", "PH");
map.put("Pakistan", "PK");
map.put("Poland", "PL");
map.put("Saint Pierre And Miquelon", "PM");
map.put("Pitcairn Island", "PN");
map.put("Puerto Rico", "PR");
map.put("Portugal", "PT");
map.put("Palau", "PW");
map.put("Paraguay", "PY");
map.put("Qatar", "QA");
map.put("Reunion (French)", "RE");
map.put("Romania", "RO");
map.put("Russian Federation", "RU");
map.put("Rwanda", "RW");
map.put("Saudi Arabia", "SA");
map.put("Solomon Islands", "SB");
map.put("Seychelles", "SC");
map.put("Sudan", "SD");
map.put("Sweden", "SE");
map.put("Singapore", "SG");
map.put("Saint Helena", "SH");
map.put("Slovenia", "SI");
map.put("Svalbard And Jan Mayen Islands", "SJ");
map.put("Slovak Republic", "SK");
map.put("Sierra Leone", "SL");
map.put("San Marino", "SM");
map.put("Senegal", "SN");
map.put("Somalia", "SO");
map.put("Suriname", "SR");
map.put("Saint Tome (Sao Tome) And Principe", "ST");
map.put("Former USSR", "SU");
map.put("El Salvador", "SV");
map.put("Syria", "SY");
map.put("Swaziland", "SZ");
map.put("Turks And Caicos Islands", "TC");
map.put("Chad", "TD");
map.put("French Southern Territories", "TF");
map.put("Togo", "TG");
map.put("Thailand", "TH");
map.put("Tadjikistan", "TJ");
map.put("Tokelau", "TK");
map.put("Turkmenistan", "TM");
map.put("Tunisia", "TN");
map.put("Tonga", "TO");
map.put("East Timor", "TP");
map.put("Turkey", "TR");
map.put("Trinidad And Tobago", "TT");
map.put("Tuvalu", "TV");
map.put("Taiwan", "TW");
map.put("Tanzania", "TZ");
map.put("Ukraine", "UA");
map.put("Uganda", "UG");
map.put("United Kingdom", "UK");
map.put("USA Minor Outlying Islands", "UM");
map.put("United States", "US");
map.put("Uruguay", "UY");
map.put("Uzbekistan", "UZ");
map.put("Holy See (Vatican City State)", "VA");
map.put("Saint Vincent & Grenadines", "VC");
map.put("Venezuela", "VE");
map.put("Virgin Islands (British)", "VG");
map.put("Virgin Islands (USA)", "VI");
map.put("Vietnam", "VN");
map.put("Vanuatu", "VU");
map.put("Wallis And Futuna Islands", "WF");
map.put("Samoa", "WS");
map.put("Yemen", "YE");
map.put("Mayotte", "YT");
map.put("Yugoslavia", "YU");
map.put("South Africa", "ZA");
map.put("Zambia", "ZM");
map.put("Zaire", "ZR");
map.put("Zimbabwe", "ZW");
}
public String getCode(String country){
String countryFound = map.get(country);
if(countryFound==null){
countryFound="LV";
}
return countryFound;
}
}
public CountryCodes getCountryCodes() {
return countryCodes;
}
public String getCountryISOCode(String nameCountry) {
return countryCodes.getCode(nameCountry);
}
}
|
package com.conveyal.gtfs;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.NoSuchElementException;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import java.util.zip.ZipFile;
/**
* Fast cache for GTFS feeds stored on S3.
*/
public class GTFSCache {
private static final Logger LOG = LoggerFactory.getLogger(GTFSCache.class);
public final String bucket;
public final String bucketFolder;
public final File cacheDir;
private static final AmazonS3 s3 = new AmazonS3Client();
private LoadingCache<String, GTFSFeed> cache = CacheBuilder.newBuilder()
.maximumSize(10)
.build(new CacheLoader<String, GTFSFeed>() {
@Override
public GTFSFeed load(String s) throws Exception {
return retrieveFeed(s);
}
});
/** If bucket is null, work offline and do not use S3 */
public GTFSCache(String bucket, File cacheDir) {
if (bucket == null) LOG.info("No bucket specified; GTFS Cache will run locally");
else LOG.info("Using bucket {} for GTFS Cache", bucket);
this.bucket = bucket;
this.bucketFolder = null;
this.cacheDir = cacheDir;
}
public GTFSCache(String bucket, String bucketFolder, File cacheDir) {
if (bucket == null) LOG.info("No bucket specified; GTFS Cache will run locally");
else LOG.info("Using bucket {} for GTFS Cache", bucket);
this.bucket = bucket;
this.bucketFolder = bucketFolder.replaceAll("\\/","");
this.cacheDir = cacheDir;
}
/**
* Add a GTFS feed to this cache with the given ID. NB this is not the feed ID, because feed IDs are not
* unique when you load multiple versions of the same feed.
*/
public GTFSFeed put (String id, File feedFile) throws Exception {
return put(id, feedFile, null);
}
/** Add a GTFS feed to this cache where the ID is calculated from the feed itself */
public GTFSFeed put (Function<GTFSFeed, String> idGenerator, File feedFile) throws Exception {
return put(null, feedFile, idGenerator);
}
private GTFSFeed put (String id, File feedFile, Function<GTFSFeed, String> idGenerator) throws Exception {
// generate temporary ID to name files
String tempId = id != null ? id : UUID.randomUUID().toString();
// read the feed
String cleanTempId = cleanId(tempId);
File dbFile = new File(cacheDir, cleanTempId + ".db");
File movedFeedFile = new File(cacheDir, cleanTempId + ".zip");
// don't copy if we're loading from a locally-cached feed
if (!feedFile.equals(movedFeedFile)) Files.copy(feedFile, movedFeedFile);
GTFSFeed feed = new GTFSFeed(dbFile.getAbsolutePath());
feed.loadFromFile(new ZipFile(movedFeedFile));
feed.findPatterns();
if (idGenerator != null) id = idGenerator.apply(feed);
String cleanId = cleanId(id);
feed.close(); // make sure everything is written to disk
if (idGenerator != null) {
new File(cacheDir, cleanTempId + ".zip").renameTo(new File(cacheDir, cleanId + ".zip"));
new File(cacheDir, cleanTempId + ".db").renameTo(new File(cacheDir, cleanId + ".db"));
new File(cacheDir, cleanTempId + ".db.p").renameTo(new File(cacheDir, cleanId + ".db.p"));
}
// upload feed
// TODO best way to do this? Should we zip the files together?
LOG.info("Writing feed to s3 cache");
if (bucket != null) {
String key = bucketFolder != null ? String.join("/", bucketFolder, cleanId) : cleanId;
s3.putObject(bucket, key + ".zip", feedFile);
s3.putObject(bucket, key + ".db", new File(cacheDir, cleanId + ".db"));
s3.putObject(bucket, key + ".db.p", new File(cacheDir, cleanId + ".db.p"));
}
// reconnect to feed database
feed = new GTFSFeed(new File(cacheDir, cleanId + ".db").getAbsolutePath());
cache.put(id, feed);
return feed;
}
public GTFSFeed get (String id) {
try {
return cache.get(id);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
public boolean containsId (String id) {
GTFSFeed feed = null;
try {
feed = cache.get(id);
} catch (Exception e) {
return false;
}
return feed != null;
}
/** retrieve a feed from local cache or S3 */
private GTFSFeed retrieveFeed (String originalId) {
// see if we have it cached locally
String id = cleanId(originalId);
String key = bucketFolder != null ? String.join("/", bucketFolder, id) : id;
File dbFile = new File(cacheDir, id + ".db");
if (dbFile.exists()) {
LOG.info("Processed GTFS was found cached locally");
return new GTFSFeed(dbFile.getAbsolutePath());
}
if (bucket != null) {
try {
LOG.info("Attempting to download cached GTFS MapDB.");
S3Object db = s3.getObject(bucket, key + ".db");
InputStream is = db.getObjectContent();
FileOutputStream fos = new FileOutputStream(dbFile);
ByteStreams.copy(is, fos);
is.close();
fos.close();
S3Object dbp = s3.getObject(bucket, key + ".db.p");
InputStream isp = dbp.getObjectContent();
FileOutputStream fosp = new FileOutputStream(new File(cacheDir, id + ".db.p"));
ByteStreams.copy(isp, fosp);
isp.close();
fosp.close();
LOG.info("Returning processed GTFS from S3");
return new GTFSFeed(dbFile.getAbsolutePath());
} catch (AmazonServiceException | IOException e) {
LOG.info("Error retrieving MapDB from S3, will download original GTFS.", e);
}
}
// see if the
// if we fell through to here, getting the mapdb was unsuccessful
// grab GTFS from S3 if it is not found locally
File feedFile = new File(cacheDir, id + ".zip");
if (!feedFile.exists() && bucket != null) {
try {
S3Object gtfs = s3.getObject(bucket, key + ".zip");
InputStream is = gtfs.getObjectContent();
FileOutputStream fos = new FileOutputStream(feedFile);
ByteStreams.copy(is, fos);
is.close();
fos.close();
} catch (Exception e) {
LOG.warn("Could not download feed at s3://{}/{}.", bucket, key);
throw new RuntimeException(e);
}
}
if (feedFile.exists()) {
// TODO this will also re-upload the original feed ZIP to S3.
try {
return put(originalId, feedFile);
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
throw new NoSuchElementException(originalId);
}
}
public static String cleanId(String id) {
return id.replaceAll("[^A-Za-z0-9]", "-");
}
}
|
package com.example;
import com.example.domain.Book;
import com.example.service.BookService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.List;
@Controller
public class BooksController {
private BookService bookService;
public BooksController(@Autowired final BookService bookService) {
this.bookService = bookService;
}
@GetMapping("/")
@ResponseBody
public List<Book> getAllBooks() {
return bookService.getAllBooks();
}
}
|
package com.github.eerohele;
import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import org.apache.tools.ant.Main;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.launch.Launcher;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.taskdefs.Java;
import org.apache.tools.ant.types.Commandline.Argument;
import org.apache.tools.ant.types.Environment.Variable;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.types.Path;
public class DotTask extends Task {
private static final String TASK_NAME = "dita-ot";
private Boolean inheritAll = false;
private String home;
private String transtype;
private String workdir = System.getProperty("java.io.tmpdir");
private ArrayList<FileSet> filesets = new ArrayList<FileSet>();
private Vector<Parameter> params = new Vector<Parameter>();
public void setInheritAll(Boolean i) {
inheritAll = i;
}
public void setHome(String h) {
home = h;
}
public void setTranstype(String t) {
transtype = t;
}
public void setWorkdir(String w) {
workdir = w;
}
public void addFileset(FileSet f) {
filesets.add(f);
}
protected void validate() {
if (filesets.size() < 1) {
throw new BuildException("No fileset given.");
}
if (home.length() < 1) {
throw new BuildException("DITA-OT home directory not set.");
}
}
public void execute() {
validate();
getProject().setName(TASK_NAME);
Java task = initializeJavaTask(getProject());
task.setTaskName(TASK_NAME);
addSystemProperty(setParameters(task), Constants.TRANSTYPE, transtype);
ArrayList<File> files = new ArrayList<File>();
for (FileSet fs : filesets) {
DirectoryScanner ds = fs.getDirectoryScanner(getProject());
for (String path : ds.getIncludedFiles()) {
files.add(new File(fs.getDir(), path));
}
runJavaTaskOnFiles(task, files);
}
task.clearArgs();
}
public Parameter createParameter() {
Parameter param = new Parameter();
params.add(param);
return param;
}
public class Parameter {
public Parameter() {}
String name;
String value;
public void setName(String name) { this.name = name; }
public String getName() { return name; }
public void setValue(String value) { this.value = value; }
public String getValue() { return value; }
}
private Java setInheritedParameters(Java task) {
if (inheritAll) {
Hashtable<?, ?> props = getProject().getUserProperties();
Enumeration<?> e = props.keys();
while (e.hasMoreElements()) {
String key = e.nextElement().toString();
String value = props.get(key).toString();
addSystemProperty(task, key, value);
}
}
return task;
}
private Java setParameters(Java task) {
for (Iterator it = params.iterator(); it.hasNext();) {
Parameter param = (Parameter) it.next();
addSystemProperty(task, param.getName(), param.getValue());
}
return setInheritedParameters(task);
}
private void runJavaTaskOnFiles(Java task, List<File> files) {
for (File file : files) {
String baseName = getBaseName(file);
String tempDir = Paths.get(workdir,
Constants.TEMP,
baseName).toString();
String outputDir = Paths.get(workdir,
Constants.OUT,
baseName).toString();
addSystemProperty(task, Parameters.ARGS_INPUT, file.getPath());
addSystemProperty(task, Parameters.TEMP_DIR, tempDir);
addSystemProperty(task, Parameters.OUTPUT_DIR, outputDir);
task.executeJava();
}
}
private String getBaseName(File file) {
String fileName = file.getName();
int pos = fileName.lastIndexOf(Constants.PERIOD);
if (pos > 0) {
return fileName.substring(0, pos);
} else {
return fileName;
}
}
private Java addSystemProperty(Java task, String key, String value) {
Variable var = new Variable();
var.setKey(key);
var.setValue(value);
task.addSysproperty(var);
return task;
}
private Path makeClassPath(Project p) {
Path classPath = new Path(p, System.getProperty("java.class.path"));
FileSet fs = new FileSet();
fs.setDir(new File(home));
fs.appendIncludes(Constants.DITA_CLASSPATH_INCLUDES);
classPath.addFileset(fs);
return classPath;
}
private Java setBuildFile(Java task) {
Argument arg = task.createArg();
arg.setLine(Constants.ANTFLAGS_BUILDFILE +
Constants.SPACE +
new File(home, Main.DEFAULT_BUILD_FILENAME).getPath());
return task;
}
private Java initializeJavaTask(Project p) {
Java task = new Java();
task.setClassname(Launcher.class.getName());
task.setFork(true);
task.setProject(p);
task.setClasspath(makeClassPath(p));
return setBuildFile(task);
}
}
|
package com.github.jkutner;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.io.filefilter.TrueFileFilter;
import org.xembly.Directives;
import org.xembly.ImpossibleModificationException;
import org.xembly.Xembler;
import java.io.*;
import java.net.URL;
import java.util.*;
public class BoincApp {
public static final String DEFAULT_WRAPPER_VERSION="26016";
private static String[] defaultPlatforms = new String[] {
"x86_64-apple-darwin",
"i686-apple-darwin",
"windows_intelx86",
"windows_x86_64",
"i686-pc-linux-gnu",
"x86_64-pc-linux-gnu"
};
private Set<String> platforms;
private File boincDir = new File(System.getProperty("user.dir"), "boinc");
private File srcUberjar;
private File srcJobXml;
private File srcTemplatesDir;
private String versionKey;
private File targetDir;
private String assimilatorClass;
public BoincApp(
File uberjar,
Map<String,Boolean> altPlatforms,
File jobXml,
File templatesDir,
String versionKey,
File targetDir,
String assimilatorClass
) {
platforms = new HashSet<String>();
for (String p : altPlatforms.keySet())
if (altPlatforms.get(p)) platforms.add(p);
for (String p : defaultPlatforms)
if (!altPlatforms.containsKey(p) || altPlatforms.get(p)) platforms.add(p);
this.srcUberjar = uberjar;
this.srcJobXml = jobXml;
this.srcTemplatesDir = templatesDir;
this.versionKey = versionKey == null ? UUID.randomUUID().toString() : versionKey;
this.targetDir = targetDir;
this.assimilatorClass = assimilatorClass == null ? "Assimilator" : assimilatorClass;
}
public void cleanBoincDir(Boolean keepWrapper) throws IOException {
if (this.boincDir.exists()) {
if (keepWrapper) {
for (File f : FileUtils.listFiles(this.boincDir, new WrapperFilter(), TrueFileFilter.INSTANCE)) {
if (!f.isDirectory()) {
FileUtils.forceDelete(f);
}
}
} else {
FileUtils.deleteDirectory(this.boincDir);
}
}
}
private static class WrapperFilter implements IOFileFilter {
public boolean accept(File file) {
return !"zip".equals(FilenameUtils.getExtension(file.getName()));
}
public boolean accept(File file, String s) {
return !"zip".equals(FilenameUtils.getExtension(s));
}
}
public void packageIntoBoincDir() throws IOException, ImpossibleModificationException, ZipException {
cleanBoincDir(true);
FileUtils.forceMkdir(boincDir);
File appDir = new File(boincDir, "app");
FileUtils.forceMkdir(appDir);
File binDir = new File(boincDir, "bin");
FileUtils.forceMkdir(binDir);
if (this.srcTemplatesDir.exists()) {
File templatesDir = new File(boincDir, "templates");
FileUtils.copyDirectory(this.srcTemplatesDir, templatesDir);
}
//File downloadsDir = new File(boincDir, "download");
//FileUtils.forceMkdir(downloadsDir);
String uberjarName = this.srcUberjar.getName();
String uberjarPhysicalName = FilenameUtils.getBaseName(this.srcUberjar.getName())+"_"+this.versionKey+".jar";
// if there is an assimilator?
createAssimilatorScript(binDir, uberjarPhysicalName);
for (String p : platforms) {
Map<String,File> files = new HashMap<String, File>();
File platformDir = new File(appDir, p);
FileUtils.forceMkdir(platformDir);
File uberjar = new File(platformDir, uberjarPhysicalName);
FileUtils.copyFile(this.srcUberjar, uberjar);
files.put(uberjarName, uberjar);
files.put("job.xml", copyJobXml(platformDir, p, uberjarName));
files.put("wrapper", installWrapper(platformDir, p));
createVersionFile(platformDir, files);
createComposerJson();
}
}
protected void createAssimilatorScript(File binDir, String uberjarName) throws IOException {
File scriptFile = new File(binDir, "java_assimilator");
//BufferedWriter out = null;
try (
InputStream is = getClass().getResourceAsStream( "/java_assimilator.sh");
BufferedReader br = new BufferedReader(new InputStreamReader(is));
FileWriter fw = new FileWriter(scriptFile);
BufferedWriter out = new BufferedWriter(fw);
) {
String line;
while ((line = br.readLine()) != null) {
line = line.replace("%uberjar_name%", uberjarName);
line = line.replace("%assimilator_class%", this.assimilatorClass);
out.write(line);
out.write("\n");
}
}
}
protected File copyJobXml(File platformDir, String platform, String uberjarName)
throws ImpossibleModificationException, IOException {
String xml = new Xembler(new Directives().add("job_desc")
.add("task")
.add("application").set(getJavaCmd(platform)).up()
.add("command_line").set("-jar " + uberjarName).up()
.add("append_cmdline_args")
).xml();
String jobFilename = "job_"+platform+"_"+this.versionKey+".xml";
File jobFile = new File(platformDir, jobFilename);
FileUtils.writeStringToFile(jobFile, xml);
return jobFile;
}
protected File installWrapper(File platformDir, String platform) throws IOException, ZipException {
String wrapperZipFilename = wrapperName(platform) + ".zip";
File wrapperZipFile = new File(this.targetDir, wrapperZipFilename);
if (wrapperZipFile.exists()) {
System.out.println("Using cached " + wrapperZipFilename + "...");
} else {
System.out.println("Downloading " + wrapperZipFilename + "...");
String urlString = System.getProperty(
"boinc.wrapper." + platform + ".url",
"http://boinc.berkeley.edu/dl/" + wrapperZipFilename);
URL wrapperUrl = new URL(urlString);
FileUtils.copyURLToFile(wrapperUrl, wrapperZipFile);
}
System.out.println("Extracting " + wrapperZipFilename + "...");
ZipFile zipFile = new ZipFile(wrapperZipFile);
zipFile.extractAll(platformDir.toString());
return new File(platformDir, wrapperName(platform)+wrapperExtension(platform));
}
protected void createVersionFile(File platformDir, Map<String,File> files)
throws ImpossibleModificationException, IOException {
Directives version = new Directives().add("version");
for (String logicalName : files.keySet()) {
File physicalFile = files.get(logicalName);
Directives fileXml = version.add("file")
.add("physical_name").set(physicalFile.getName()).up()
.add("copy_file").up();
if (logicalName.equals("wrapper")) {
fileXml.add("main_program").up();
} else {
fileXml.add("logical_name").set(logicalName).up();
}
fileXml.up();
}
String xml = new Xembler(version).xml();
File versionFile = new File(platformDir, "version.xml");
FileUtils.writeStringToFile(versionFile, xml);
}
protected void createComposerJson() throws IOException {
File composerJson = new File(System.getProperty("usr.dir"), "composer.json");
if (!composerJson.exists())
FileUtils.writeStringToFile(composerJson, "{}");
}
protected String wrapperName(String platform) {
String wrapperVersion = System.getProperty("boinc.wrapper.version", wrapperVersion(platform));
return "wrapper_"+wrapperVersion+"_"+platform;
}
protected String wrapperVersion(String platform) {
if (platform.startsWith("windows_"))
return "26016";
return "26014";
}
protected String wrapperExtension(String platform) {
if (platform.startsWith("windows_"))
return ".exe";
return "";
}
protected String getJavaCmd(String platform) {
return "/usr/bin/java";
}
}
|
package com.github.mertakdut;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.github.mertakdut.BaseFindings.XmlItem;
import com.github.mertakdut.exception.OutOfPagesException;
import com.github.mertakdut.exception.ReadingException;
public class Reader {
private boolean isFoundNeeded;
private Content content;
private boolean isProgressFileFound;
/**
* Parses only needed files for book info.
*
* @param filePath
* @throws ReadingException
*/
public void setInfoContent(String filePath) throws ReadingException {
fillContent(filePath, false, false);
}
/**
* Parses all the files needed for reading book. This method must be called before calling readSection method.
*
* @param filePath
* @throws ReadingException
*/
public void setFullContent(String filePath) throws ReadingException {
fillContent(filePath, true, false);
}
/**
* Does the same job with setFullContent but also tries to load saved progress if found any. If no progress file is found then it'll work the same as setFullContent does.
*
* @param filePath
* @return saved page index. 0 if no progress is found.
* @throws ReadingException
*/
public int setFullContentWithProgress(String filePath) throws ReadingException {
fillContent(filePath, true, true);
if (isProgressFileFound) {
return loadProgress();
} else {
return 0;
}
}
/**
* Main method that splits and gets the parts of the book.
*
* @param index
* @return
* @throws ReadingException
* @throws OutOfPagesException
* if index is greater than the page count.
*/
public BookSection readSection(int index) throws ReadingException, OutOfPagesException {
return content.maintainBookSections(index);
}
// Optionals
public void setMaxContentPerSection(int maxContentPerSection) {
Optionals.maxContentPerSection = maxContentPerSection;
}
public void setCssStatus(CssStatus cssStatus) {
Optionals.cssStatus = cssStatus;
}
public void setIsIncludingTextContent(boolean isIncludingTextContent) {
Optionals.isIncludingTextContent = isIncludingTextContent;
}
public void setIsOmittingTitleTag(boolean isOmittingTitleTag) {
Optionals.isOmittingTitleTag = isOmittingTitleTag;
}
// Additional operations
public Package getInfoPackage() {
return content.getPackage();
}
public Toc getToc() {
return content.getToc();
}
public byte[] getCoverImage() throws ReadingException {
if (content != null) {
return content.getCoverImage();
}
throw new ReadingException("Content info is not set.");
}
public void saveProgress(int lastPageIndex) throws ReadingException, OutOfPagesException {
if (lastPageIndex < content.getToc().getNavMap().getNavPoints().size()) {
content.getToc().setLastPageIndex(lastPageIndex);
} else {
throw new OutOfPagesException(lastPageIndex, content.getToc().getNavMap().getNavPoints().size());
}
saveProgress();
}
public void saveProgress() throws ReadingException {
ZipFile epubFile = null;
ZipOutputStream zipOutputStream = null;
ObjectOutputStream objectOutputStream = null;
String newFilePath = null;
try {
epubFile = new ZipFile(content.getZipFilePath());
String fileName = new File(content.getZipFilePath()).getName();
newFilePath = content.getZipFilePath().replace(fileName, "tmp_" + fileName);
zipOutputStream = new ZipOutputStream(new FileOutputStream(newFilePath));
Enumeration<? extends ZipEntry> entries = epubFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if (entry.getName().equals(Constants.SAVE_FILE_NAME)) // Don't copy the progress file. We'll put the new one already.
continue;
ZipEntry destEntry = new ZipEntry(entry.getName());
zipOutputStream.putNextEntry(destEntry);
if (!entry.isDirectory()) {
ContextHelper.copy(epubFile.getInputStream(entry), zipOutputStream);
}
zipOutputStream.closeEntry();
}
ZipEntry progressFileEntry = new ZipEntry(Constants.SAVE_FILE_NAME);
zipOutputStream.putNextEntry(progressFileEntry);
objectOutputStream = new ObjectOutputStream(zipOutputStream);
objectOutputStream.writeObject(content.getToc());
zipOutputStream.closeEntry();
} catch (IOException e) {
e.printStackTrace();
File newFile = new File(newFilePath);
if (newFile.exists()) {
newFile.delete();
}
throw new ReadingException("Error writing progressed ZipFile: " + e.getMessage());
} finally {
if (epubFile != null) {
try {
epubFile.close();
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error closing ZipFile: " + e.getMessage());
}
}
if (objectOutputStream != null) {
try {
objectOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error closing object output stream: " + e.getMessage());
}
}
}
File oldFile = new File(content.getZipFilePath());
if (oldFile.exists()) {
oldFile.delete();
}
File newFile = new File(newFilePath);
if (newFile.exists() && !oldFile.exists()) {
newFile.renameTo(new File(content.getZipFilePath()));
}
}
public boolean isSavedProgressFound() {
return isProgressFileFound;
}
public int loadProgress() throws ReadingException {
if (!isProgressFileFound)
throw new ReadingException("No save files are found. Loading progress is unavailable.");
ZipFile epubFile = null;
InputStream saveFileInputStream = null;
ObjectInputStream oiStream = null;
try {
try {
epubFile = new ZipFile(content.getZipFilePath());
ZipEntry zipEntry = epubFile.getEntry(Constants.SAVE_FILE_NAME);
saveFileInputStream = epubFile.getInputStream(zipEntry);
oiStream = new ObjectInputStream(saveFileInputStream);
Toc toc = (Toc) oiStream.readObject();
content.setToc(toc);
return content.getToc().getLastPageIndex();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
throw new ReadingException("Error initializing ZipFile: " + e.getMessage());
}
} finally {
if (epubFile != null) {
try {
epubFile.close();
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error closing ZipFile: " + e.getMessage());
}
}
if (oiStream != null) {
try {
oiStream.close();
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error closing object input stream: " + e.getMessage());
}
}
}
}
// Private methods
private Content fillContent(String zipFilePath, boolean isFullContent, boolean isLoadingProgress) throws ReadingException {
if (zipFilePath == null) {
throw new ReadingException("Epub file path is null.");
}
this.content = new Content();
this.content.setZipFilePath(zipFilePath);
ZipFile epubFile = null;
try {
try {
epubFile = new ZipFile(zipFilePath);
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error initializing ZipFile: " + e.getMessage());
}
Enumeration<? extends ZipEntry> files = epubFile.entries();
while (files.hasMoreElements()) {
ZipEntry entry = (ZipEntry) files.nextElement();
if (!entry.isDirectory()) {
String entryName = entry.getName();
if (entryName != null) {
content.addEntryName(entryName);
if (entryName.equals(Constants.SAVE_FILE_NAME)) {
isProgressFileFound = true;
}
}
}
}
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(false);
factory.setValidating(false);
try {
factory.setFeature("http://xml.org/sax/features/namespaces", false);
factory.setFeature("http://xml.org/sax/features/validation", false);
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false);
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
} catch (ParserConfigurationException e) {
e.printStackTrace();
// throw new ReadingException("Error initializing DocumentBuilderFactory: " + e.getMessage());
}
DocumentBuilder docBuilder;
try {
docBuilder = factory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
e.printStackTrace();
throw new ReadingException("DocumentBuilder cannot be created: " + e.getMessage());
}
boolean isContainerXmlFound = false;
boolean isTocXmlFound = false;
for (int i = 0; i < content.getEntryNames().size(); i++) {
if (isContainerXmlFound && (isTocXmlFound || !isFullContent)) {
break;
}
String currentEntryName = content.getEntryNames().get(i);
if (currentEntryName.contains(Constants.FILE_NAME_CONTAINER_XML)) {
isContainerXmlFound = true;
ZipEntry container = epubFile.getEntry(currentEntryName);
InputStream inputStream;
try {
inputStream = epubFile.getInputStream(container);
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("IOException while reading " + Constants.FILE_NAME_CONTAINER_XML + " file: " + e.getMessage());
}
Document document = getDocument(docBuilder, inputStream, Constants.FILE_NAME_CONTAINER_XML);
parseContainerXml(docBuilder, document, epubFile);
} else if ((!isLoadingProgress || !isProgressFileFound) && isFullContent && currentEntryName.contains(Constants.EXTENSION_NCX)) {
isTocXmlFound = true;
ZipEntry toc = epubFile.getEntry(currentEntryName);
InputStream inputStream;
try {
inputStream = epubFile.getInputStream(toc);
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("IOException while reading " + Constants.EXTENSION_NCX + " file: " + e.getMessage());
}
Document document = getDocument(docBuilder, inputStream, Constants.EXTENSION_NCX);
parseTocFile(document, content.getToc());
}
}
if (!isContainerXmlFound) {
throw new ReadingException("container.xml not found.");
}
if (!isTocXmlFound && isFullContent && (!isLoadingProgress || !isProgressFileFound)) {
throw new ReadingException("toc.ncx not found.");
}
if (isFullContent && (!isLoadingProgress || !isProgressFileFound)) {
mergeTocElements();
}
// Debug
// content.print();
return content;
} finally {
if (epubFile != null) {
try {
epubFile.close();
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("Error closing ZipFile: " + e.getMessage());
}
}
}
}
private void parseContainerXml(DocumentBuilder docBuilder, Document document, ZipFile epubFile) throws ReadingException {
if (document.hasChildNodes()) {
isFoundNeeded = false;
traverseDocumentNodesAndFillContent(document.getChildNodes(), content.getContainer());
}
String opfFilePath = content.getContainer().getFullPathValue();
ZipEntry opfFileEntry = epubFile.getEntry(opfFilePath);
if (opfFileEntry == null) {
for (String entryName : content.getEntryNames()) {
if (entryName.contains(Constants.EXTENSION_OPF)) {
opfFileEntry = epubFile.getEntry(entryName);
break;
}
}
}
if (opfFileEntry == null) {
throw new ReadingException(".opf file not found");
}
InputStream opfFileInputStream;
try {
opfFileInputStream = epubFile.getInputStream(opfFileEntry);
} catch (IOException e) {
e.printStackTrace();
throw new ReadingException("IO error while reading " + Constants.EXTENSION_OPF + " inputstream: " + e.getMessage());
}
Document packageDocument = getDocument(docBuilder, opfFileInputStream, Constants.EXTENSION_OPF);
parseOpfFile(packageDocument, content.getPackage());
}
private void parseOpfFile(Document document, Package pckage) throws ReadingException {
if (document.hasChildNodes()) {
isFoundNeeded = false;
traverseDocumentNodesAndFillContent(document.getChildNodes(), pckage);
}
}
private void parseTocFile(Document document, Toc toc) throws ReadingException {
if (document.hasChildNodes()) {
isFoundNeeded = false;
traverseDocumentNodesAndFillContent(document.getChildNodes(), toc);
}
}
private Document getDocument(DocumentBuilder docBuilder, InputStream inputStream, String fileName) throws ReadingException {
Document document;
try {
document = docBuilder.parse(inputStream);
inputStream.close();
return document;
} catch (Exception e) {
e.printStackTrace();
throw new ReadingException("Parse error while parsing " + fileName + " file: " + e.getMessage());
}
}
private void traverseDocumentNodesAndFillContent(NodeList nodeList, BaseFindings findings) throws ReadingException {
if (isFoundNeeded)
return;
for (int i = 0; i < nodeList.getLength(); i++) {
Node tempNode = nodeList.item(i);
if (tempNode.getNodeType() == Node.ELEMENT_NODE) {
isFoundNeeded = findings.fillContent(tempNode);
if (isFoundNeeded)
break;
if (tempNode.hasChildNodes()) {
traverseDocumentNodesAndFillContent(tempNode.getChildNodes(), findings);
}
}
}
}
private void mergeTocElements() throws ReadingException {
List<NavPoint> currentNavPoints = new ArrayList<>(content.getToc().getNavMap().getNavPoints());
int navPointIndex = 0; // Holds the last duplicate content position, when the new content found insertion is done from that position.
int insertedNavPointCount = 0;
for (XmlItem spine : content.getPackage().getSpine().getXmlItemList()) {
Map<String, String> spineAttributes = spine.getAttributes();
String idRef = spineAttributes.get("idref");
for (XmlItem manifest : content.getPackage().getManifest().getXmlItemList()) {
Map<String, String> manifestAttributes = manifest.getAttributes();
String manifestElementId = manifestAttributes.get("id");
if (idRef.equals(manifestElementId)) {
NavPoint navPoint = new NavPoint();
// navPoint.setPlayOrder(currentNavPoints.size() + spineNavPoints.size() + 1); // Is playOrder needed? I think not because we've already sorted the navPoints with playOrder before
// merging.
navPoint.setContentSrc(ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(manifestAttributes.get("href"), Constants.SLASH)));
boolean duplicateContentSrc = false;
boolean isAnchoredFound = false;
for (int j = 0; j < currentNavPoints.size(); j++) {
NavPoint navPointItem = currentNavPoints.get(j);
if (navPoint.getContentSrc().equals(navPointItem.getContentSrc())) {
duplicateContentSrc = true;
navPointIndex = j;
break;
} else if (!isAnchoredFound && navPoint.getContentSrc().startsWith(navPointItem.getContentSrc()) && navPoint.getContentSrc().replace(navPointItem.getContentSrc(), "").startsWith("%23")) {
isAnchoredFound = true;
navPointIndex = j;
} else if (!isAnchoredFound && navPointItem.getContentSrc().startsWith(navPoint.getContentSrc()) && navPointItem.getContentSrc().replace(navPoint.getContentSrc(), "").startsWith("%23")) {
isAnchoredFound = true;
navPointIndex = j;
}
}
if (!duplicateContentSrc) {
content.getToc().getNavMap().getNavPoints().add(navPointIndex + insertedNavPointCount++, navPoint);
}
}
}
}
}
}
|
package com.jaamsim.basicsim;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.concurrent.atomic.AtomicLong;
import com.jaamsim.datatypes.DoubleVector;
import com.jaamsim.events.Conditional;
import com.jaamsim.events.EventHandle;
import com.jaamsim.events.EventManager;
import com.jaamsim.events.ProcessTarget;
import com.jaamsim.input.AttributeDefinitionListInput;
import com.jaamsim.input.AttributeHandle;
import com.jaamsim.input.BooleanInput;
import com.jaamsim.input.Input;
import com.jaamsim.input.InputAgent;
import com.jaamsim.input.InputErrorException;
import com.jaamsim.input.Keyword;
import com.jaamsim.input.KeywordIndex;
import com.jaamsim.input.Output;
import com.jaamsim.input.OutputHandle;
import com.jaamsim.input.StringInput;
import com.jaamsim.input.SynonymInput;
import com.jaamsim.ui.FrameBox;
import com.jaamsim.units.DimensionlessUnit;
import com.jaamsim.units.TimeUnit;
import com.jaamsim.units.Unit;
import com.jaamsim.units.UserSpecifiedUnit;
/**
* Abstract class that encapsulates the methods and data needed to create a
* simulation object. Encapsulates the basic system objects to achieve discrete
* event execution.
*/
public class Entity {
private static AtomicLong entityCount = new AtomicLong(0);
private static final ArrayList<Entity> allInstances;
private static final HashMap<String, Entity> namedEntities;
private String entityName;
private final long entityNumber;
//public static final int FLAG_TRACE = 0x01; // reserved in case we want to treat tracing like the other flags
//public static final int FLAG_TRACEREQUIRED = 0x02;
//public static final int FLAG_TRACESTATE = 0x04;
public static final int FLAG_LOCKED = 0x08;
//public static final int FLAG_TRACKEVENTS = 0x10;
public static final int FLAG_ADDED = 0x20;
public static final int FLAG_EDITED = 0x40;
public static final int FLAG_GENERATED = 0x80;
public static final int FLAG_DEAD = 0x0100;
private int flags;
protected boolean traceFlag = false;
private final ArrayList<Input<?>> inpList = new ArrayList<>();
private final HashMap<String, AttributeHandle> attributeMap = new HashMap<>();
private final BooleanInput trace;
@Keyword(description = "A free form string describing the Entity",
exampleList = {"'A very useful entity'"})
protected final StringInput desc;
@Keyword(description = "The list of user defined attributes for this entity.\n" +
" The attribute name is followed by its initial value. The unit provided for" +
"this value will determine the attribute's unit type.",
exampleList = {"{ A 20.0 s } { alpha 42 }"})
public final AttributeDefinitionListInput attributeDefinitionList;
static {
allInstances = new ArrayList<>(100);
namedEntities = new HashMap<>(100);
}
{
trace = new BooleanInput("Trace", "Key Inputs", false);
trace.setHidden(true);
this.addInput(trace);
desc = new StringInput("Description", "Key Inputs", "");
desc.setHidden(true);
this.addInput(desc);
attributeDefinitionList = new AttributeDefinitionListInput(this, "AttributeDefinitionList",
"Key Inputs", new ArrayList<AttributeHandle>());
attributeDefinitionList.setHidden(true);
this.addInput(attributeDefinitionList);
}
/**
* Constructor for entity initializing members.
*/
public Entity() {
entityNumber = getNextID();
synchronized(allInstances) {
allInstances.add(this);
}
flags = 0;
}
private static long getNextID() {
return entityCount.incrementAndGet();
}
public static ArrayList<? extends Entity> getAll() {
synchronized(allInstances) {
return allInstances;
}
}
public static <T extends Entity> ArrayList<T> getInstancesOf(Class<T> proto) {
ArrayList<T> instanceList = new ArrayList<>();
for (Entity each : allInstances) {
if (proto == each.getClass()) {
instanceList.add(proto.cast(each));
}
}
return instanceList;
}
public static <T extends Entity> InstanceIterable<T> getInstanceIterator(Class<T> proto){
return new InstanceIterable<>(proto);
}
public static <T extends Entity> ClonesOfIterable<T> getClonesOfIterator(Class<T> proto){
return new ClonesOfIterable<>(proto);
}
/**
* Returns an iterator over the given proto class, but also filters only those
* objects that implement the given interface class.
* @return
*/
public static <T extends Entity> ClonesOfIterableInterface<T> getClonesOfIterator(Class<T> proto, Class<?> iface){
return new ClonesOfIterableInterface<>(proto, iface);
}
public static <T extends Entity> ArrayList<T> getClonesOf(Class<T> proto) {
ArrayList<T> cloneList = new ArrayList<>();
for (Entity each : allInstances) {
if (proto.isAssignableFrom(each.getClass())) {
cloneList.add(proto.cast(each));
}
}
return cloneList;
}
public static Entity idToEntity(long id) {
synchronized (allInstances) {
for (Entity e : allInstances) {
if (e.getEntityNumber() == id) {
return e;
}
}
return null;
}
}
public void validate() throws InputErrorException {
for (Input<?> in : inpList) {
in.validate();
}
}
/**
* Initialises the entity prior to the start of the model run.
* <p>
* This method must not depend on any other entities so that it can be
* called for each entity in any sequence.
*/
public void earlyInit() {
// Reset the attributes to their initial values
for (AttributeHandle h : attributeMap.values()) {
h.setValue(h.getInitialValue());
}
}
/**
* Initialises the entity prior to the start of the model run.
* <p>
* This method assumes other entities have already called earlyInit.
*/
public void lateInit() {}
/**
* Starts the execution of the model run for this entity.
* <p>
* If required, initialisation that depends on another entity can be
* performed in this method. It is called after earlyInit().
*/
public void startUp() {}
/**
* Resets the statistics collected by the entity.
*/
public void clearStatistics() {}
/**
* Assigns input values that are helpful when the entity is dragged and
* dropped into a model.
*/
public void setInputsForDragAndDrop() {}
public void kill() {
synchronized (allInstances) {
for (int i = 0; i < allInstances.size(); i++) {
if (allInstances.get(i) == this) {
allInstances.remove(i);
break;
}
}
}
if (!testFlag(FLAG_GENERATED)) {
synchronized (namedEntities) {
if (namedEntities.get(entityName) == this)
namedEntities.remove(entityName);
entityName = null;
}
}
setFlag(FLAG_DEAD);
}
/**
* Performs any actions that are required at the end of the simulation run, e.g. to create an output report.
*/
public void doEnd() {}
public static long getEntitySequence() {
long seq = (long)allInstances.size() << 32;
seq += entityCount.get();
return seq;
}
/**
* Get the current Simulation ticks value.
* @return the current simulation tick
*/
public final long getSimTicks() {
return EventManager.simTicks();
}
/**
* Get the current Simulation time.
* @return the current time in seconds
*/
public final double getSimTime() {
return EventManager.simSeconds();
}
public final double getCurrentTime() {
long ticks = getSimTicks();
return ticks / Simulation.getSimTimeFactor();
}
protected void addInput(Input<?> in) {
inpList.add(in);
}
protected void addSynonym(Input<?> in, String synonym) {
inpList.add(new SynonymInput(synonym, in));
}
public final Input<?> getInput(String key) {
for (int i = 0; i < inpList.size(); i++) {
Input<?> in = inpList.get(i);
if (key.equals(in.getKeyword())) {
if (in.isSynonym())
return ((SynonymInput)in).input;
else
return in;
}
}
return null;
}
/**
* Copy the inputs for each keyword to the caller. Any inputs that have already
* been set for the caller are overwritten by those for the entity being copied.
* @param ent = entity whose inputs are to be copied
*/
public void copyInputs(Entity ent) {
ArrayList<String> tmp = new ArrayList<>();
for (Input<?> sourceInput : ent.inpList) {
if (sourceInput.isDefault() || sourceInput.isSynonym()) {
continue;
}
tmp.clear();
sourceInput.getValueTokens(tmp);
KeywordIndex kw = new KeywordIndex(sourceInput.getKeyword(), tmp, null);
InputAgent.apply(this, kw);
}
}
/**
* Creates an exact copy of the specified entity.
* <p>
* All the entity's inputs are copied to the new entity, but its internal
* properties are left uninitialised.
* @param ent - entity to be copied.
* @param name - name of the copied entity.
* @return - copied entity.
*/
public static <T extends Entity> T fastCopy(T ent, String name) {
// Create the new entity
@SuppressWarnings("unchecked")
T ret = (T)InputAgent.generateEntityWithName(ent.getClass(), name);
// Loop through the original entity's inputs
ArrayList<Input<?>> orig = ent.getEditableInputs();
for (int i = 0; i < orig.size(); i++) {
Input<?> sourceInput = orig.get(i);
// Default values do not need to be copied
if (sourceInput.isDefault() || sourceInput.isSynonym())
continue;
// Assign the value to the copied entity's input
Input<?> targetInput = ret.getEditableInputs().get(i);
targetInput.copyFrom(sourceInput);
ret.updateForInput(targetInput);
}
return ret;
}
public void setFlag(int flag) {
flags |= flag;
}
public void clearFlag(int flag) {
flags &= ~flag;
}
public boolean testFlag(int flag) {
return (flags & flag) != 0;
}
public void setTraceFlag() {
traceFlag = true;
}
public void clearTraceFlag() {
traceFlag = false;
}
/**
* Method to return the name of the entity.
* Note that the name of the entity may not be the unique identifier used in the namedEntityHashMap; see Entity.toString()
*/
public final String getName() {
return entityName;
}
/**
* Get the unique number for this entity
* @return
*/
public long getEntityNumber() {
return entityNumber;
}
/**
* Method to return the unique identifier of the entity. Used when building Edit tree labels
* @return entityName
*/
@Override
public String toString() {
return getName();
}
public static Entity getNamedEntity(String name) {
synchronized (namedEntities) {
return namedEntities.get(name);
}
}
/**
* Method to set the input name of the entity.
*/
public void setName(String newName) {
if (testFlag(FLAG_GENERATED)) {
entityName = newName;
return;
}
synchronized (namedEntities) {
namedEntities.remove(entityName);
entityName = newName;
namedEntities.put(entityName, this);
}
}
/**
* This method updates the Entity for changes in the given input
*/
public void updateForInput( Input<?> in ) {
if (in == trace) {
if (trace.getValue())
this.setTraceFlag();
else
this.clearTraceFlag();
return;
}
if (in == attributeDefinitionList) {
attributeMap.clear();
for (AttributeHandle h : attributeDefinitionList.getValue()) {
this.addAttribute(h.getName(), h);
}
// Update the OutputBox
FrameBox.reSelectEntity();
return;
}
}
static long calculateDelayLength(double waitLength) {
return Math.round(waitLength * Simulation.getSimTimeFactor());
}
public double calculateDiscreteTime(double time) {
long discTime = calculateDelayLength(time);
return discTime / Simulation.getSimTimeFactor();
}
public double calculateEventTime(double waitLength) {
long eventTime = getSimTicks() + calculateDelayLength(waitLength);
if( eventTime < 0 ) {
eventTime = Long.MAX_VALUE;
}
return eventTime / Simulation.getSimTimeFactor();
}
public double calculateEventTimeBefore(double waitLength) {
long eventTime = getSimTicks() + (long)Math.floor(waitLength * Simulation.getSimTimeFactor());
if( eventTime < 0 ) {
eventTime = Long.MAX_VALUE;
}
return eventTime / Simulation.getSimTimeFactor();
}
public double calculateEventTimeAfter(double waitLength) {
long eventTime = getSimTicks() + (long)Math.ceil(waitLength * Simulation.getSimTimeFactor());
return eventTime / Simulation.getSimTimeFactor();
}
public final void startProcess(String methodName, Object... args) {
ProcessTarget t = new ReflectionTarget(this, methodName, args);
startProcess(t);
}
public final void startProcess(ProcessTarget t) {
EventManager.startProcess(t);
}
public final void scheduleProcess(double secs, int priority, ProcessTarget t) {
EventManager.scheduleSeconds(secs, priority, false, t, null);
}
public final void scheduleProcess(double secs, int priority, ProcessTarget t, EventHandle handle) {
EventManager.scheduleSeconds(secs, priority, false, t, handle);
}
public final void scheduleProcess(double secs, int priority, boolean fifo, ProcessTarget t, EventHandle handle) {
EventManager.scheduleSeconds(secs, priority, fifo, t, handle);
}
public final void scheduleProcessTicks(long ticks, int priority, boolean fifo, ProcessTarget t, EventHandle h) {
EventManager.scheduleTicks(ticks, priority, fifo, t, h);
}
public final void scheduleProcessTicks(long ticks, int priority, ProcessTarget t) {
EventManager.scheduleTicks(ticks, priority, false, t, null);
}
public final void waitUntil(Conditional cond, EventHandle handle) {
// Don't actually wait if the condition is already true
if (cond.evaluate()) return;
EventManager.waitUntil(cond, handle);
}
/**
* Wait a number of simulated seconds and a given priority.
* @param secs
* @param priority
*/
public final void simWait(double secs, int priority) {
EventManager.waitSeconds(secs, priority, false, null);
}
/**
* Wait a number of simulated seconds and a given priority.
* @param secs
* @param priority
*/
public final void simWait(double secs, int priority, EventHandle handle) {
EventManager.waitSeconds(secs, priority, false, handle);
}
/**
* Wait a number of simulated seconds and a given priority.
* @param secs
* @param priority
*/
public final void simWait(double secs, int priority, boolean fifo, EventHandle handle) {
EventManager.waitSeconds(secs, priority, fifo, handle);
}
/**
* Wait a number of discrete simulation ticks and a given priority.
* @param secs
* @param priority
*/
public final void simWaitTicks(long ticks, int priority) {
EventManager.waitTicks(ticks, priority, false, null);
}
/**
* Wait a number of discrete simulation ticks and a given priority.
* @param secs
* @param priority
* @param fifo
* @param handle
*/
public final void simWaitTicks(long ticks, int priority, boolean fifo, EventHandle handle) {
EventManager.waitTicks(ticks, priority, fifo, handle);
}
/**
* Wrapper of eventManager.scheduleWait(). Used as a syntax nicity for
* calling the wait method.
*
* @param duration The duration to wait
* @param priority The relative priority of the event scheduled
*/
public final void scheduleWait( double duration, int priority ) {
long waitLength = calculateDelayLength(duration);
if (waitLength == 0)
return;
EventManager.waitTicks(waitLength, priority, false, null);
}
/**
* Wrapper of eventManager.scheduleWait(). Used as a syntax nicity for
* calling the wait method.
*
* @param duration The duration to wait
* @param priority The relative priority of the event scheduled
*/
public final void scheduleWait( double duration, int priority, EventHandle handle ) {
long waitLength = calculateDelayLength(duration);
if (waitLength == 0)
return;
EventManager.waitTicks(waitLength, priority, false, handle);
}
public void handleSelectionLost() {}
// EDIT TABLE METHODS
public ArrayList<Input<?>> getEditableInputs() {
return inpList;
}
// TRACING METHODS
/**
* Track the given subroutine.
*/
public void trace(String meth) {
if (traceFlag) InputAgent.trace(0, this, meth);
}
/**
* Track the given subroutine.
*/
public void trace(int level, String meth) {
if (traceFlag) InputAgent.trace(level, this, meth);
}
/**
* Track the given subroutine (one line of text).
*/
public void trace(String meth, String text1) {
if (traceFlag) InputAgent.trace(0, this, meth, text1);
}
/**
* Track the given subroutine (two lines of text).
*/
public void trace(String meth, String text1, String text2) {
if (traceFlag) InputAgent.trace(0, this, meth, text1, text2);
}
/**
* Print an addition line of tracing.
*/
public void traceLine(String text) {
this.trace( 1, text );
}
public void error(String fmt, Object... args)
throws ErrorException {
final StringBuilder sb = new StringBuilder(this.getName());
sb.append(": ");
sb.append(String.format(fmt, args));
throw new ErrorException(sb.toString());
}
/**
* Print an error message.
*/
public void error( String meth, String text1, String text2 ) {
double time = 0.0d;
if (EventManager.hasCurrent())
time = getCurrentTime();
InputAgent.logError("Time:%.5f Entity:%s%n%s%n%s%n%s%n",
time, getName(),
meth, text1, text2);
// We don't want the model to keep executing, throw an exception and let
// the higher layers figure out if we should terminate the run or not.
throw new ErrorException("ERROR: %s", getName());
}
/**
* Print a warning message.
*/
public void warning( String meth, String text1, String text2 ) {
double time = 0.0d;
if (EventManager.hasCurrent())
time = getCurrentTime();
InputAgent.logWarning("Time:%.5f Entity:%s%n%s%n%s%n%s%n",
time, getName(),
meth, text1, text2);
}
/**
* Returns a user specific unit type. This is needed for entity types like distributions that may change the unit type
* that is returned at runtime.
* @return
*/
public Class<? extends Unit> getUserUnitType() {
return DimensionlessUnit.class;
}
public final OutputHandle getOutputHandle(String outputName) {
if (hasAttribute(outputName))
return attributeMap.get(outputName);
if (hasOutput(outputName)) {
OutputHandle ret = new OutputHandle(this, outputName);
if (ret.getUnitType() == UserSpecifiedUnit.class)
ret.setUnitType(getUserUnitType());
return ret;
}
return null;
}
/**
* Optimized version of getOutputHandle() for output names that are known to be interned
* @param outputName
* @return
*/
public final OutputHandle getOutputHandleInterned(String outputName) {
if (hasAttribute(outputName))
return attributeMap.get(outputName);
if (OutputHandle.hasOutputInterned(this.getClass(), outputName)) {
OutputHandle ret = new OutputHandle(this, outputName);
if (ret.getUnitType() == UserSpecifiedUnit.class)
ret.setUnitType(getUserUnitType());
return ret;
}
return null;
}
public boolean hasOutput(String outputName) {
if (OutputHandle.hasOutput(this.getClass(), outputName))
return true;
if (attributeMap.containsKey(outputName))
return true;
return false;
}
private static final String OUTPUT_FORMAT = "%s\t%s\t%s\t%s%n";
private static final String LIST_OUTPUT_FORMAT = "%s\t%s[%s]\t%s\t%s%n";
/**
* Writes the entry in the output report for this entity.
* @param file - the file in which the outputs are written
* @param simTime - simulation time at which the outputs are evaluated
*/
public void printReport(FileEntity file, double simTime) {
// Loop through the outputs
ArrayList<OutputHandle> handles = OutputHandle.getOutputHandleList(this);
for (OutputHandle out : handles) {
// Should this output appear in the report?
if (!out.isReportable())
continue;
// Determine the preferred unit for this output
Class<? extends Unit> ut = out.getUnitType();
double factor = Unit.getDisplayedUnitFactor(ut);
String unitString = Unit.getDisplayedUnit(ut);
if (ut == Unit.class || ut == DimensionlessUnit.class)
unitString = "-";
// Numerical output
if (out.isNumericValue()) {
double val = out.getValueAsDouble(simTime, Double.NaN)/factor;
file.format(OUTPUT_FORMAT,
this.getName(), out.getName(), val, unitString);
}
// DoubleVector output
else if (out.getReturnType() == DoubleVector.class) {
DoubleVector vec = out.getValue(simTime, DoubleVector.class);
for (int i=0; i<vec.size(); i++) {
double val = vec.get(i);
file.format(LIST_OUTPUT_FORMAT,
this.getName(), out.getName(), i, val/factor, unitString);
}
}
// ArrayList output
else if (out.getReturnType() == ArrayList.class) {
ArrayList<Object> array = out.getValue(simTime, ArrayList.class);
for (int i=0; i<array.size(); i++) {
Object obj = array.get(i);
if (obj instanceof Double) {
double val = (Double)obj;
file.format(LIST_OUTPUT_FORMAT,
this.getName(), out.getName(), i, val/factor, unitString);
}
else {
file.format(LIST_OUTPUT_FORMAT,
this.getName(), out.getName(), i, obj, unitString);
}
}
}
// Keyed output
else if (out.getReturnType() == LinkedHashMap.class) {
LinkedHashMap<Object, Object> map = out.getValue(simTime, LinkedHashMap.class);
for (Object key : map.keySet()) {
Object obj = map.get(key);
if (obj instanceof Double) {
double val = (Double)obj;
file.format(LIST_OUTPUT_FORMAT,
this.getName(), out.getName(), key, val/factor, unitString);
}
else {
file.format(LIST_OUTPUT_FORMAT,
this.getName(), out.getName(), key, obj, unitString);
}
}
}
// All other outputs
else {
if (ut != Unit.class && ut != DimensionlessUnit.class)
unitString = Unit.getSIUnit(ut); // other outputs are not converted to preferred units
String str = out.getValue(simTime, out.getReturnType()).toString();
file.format(OUTPUT_FORMAT,
this.getName(), out.getName(), str, unitString);
}
}
}
/**
* Returns true if there are any outputs that will be printed to the output report.
*/
public boolean isReportable() {
return OutputHandle.isReportable(getClass());
}
public String getDescription() {
return desc.getValue();
}
private void addAttribute(String name, AttributeHandle h) {
attributeMap.put(name, h);
}
public boolean hasAttribute(String name) {
return attributeMap.containsKey(name);
}
public Class<? extends Unit> getAttributeUnitType(String name) {
AttributeHandle h = attributeMap.get(name);
if (h == null)
return null;
return h.getUnitType();
}
public void setAttribute(String name, double value, Class<? extends Unit> ut) {
AttributeHandle h = attributeMap.get(name);
if (h == null)
this.error("Invalid attribute name: %s", name);
if (h.getUnitType() != ut)
this.error("Invalid unit returned by an expression. Received: %s, expected: %s",
ut.getSimpleName(), h.getUnitType().getSimpleName(), "");
h.setValue(value);
}
public ArrayList<String> getAttributeNames(){
ArrayList<String> ret = new ArrayList<>();
for (String name : attributeMap.keySet()) {
ret.add(name);
}
return ret;
}
public ObjectType getObjectType() {
return ObjectType.getObjectTypeForClass(this.getClass());
}
@Output(name = "Name",
description = "The unique input name for this entity.",
sequence = 0)
public String getNameOutput(double simTime) {
return entityName;
}
@Output(name = "ObjectType",
description = "The class of objects that this entity belongs to.",
sequence = 1)
public String getObjectTypeName(double simTime) {
return this.getObjectType().getName();
}
@Output(name = "SimTime",
description = "The present simulation time.",
unitType = TimeUnit.class,
sequence = 2)
public double getSimTime(double simTime) {
return simTime;
}
}
|
package com.jcabi.aspects;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.concurrent.TimeUnit;
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Cacheable {
/**
* Lifetime of an object in cache, in time units.
*/
int lifetime() default 1;
/**
* Time units of object lifetime.
*
* <p>The minimum unit you can use is a second. We simply can't cache for
* less than a second, because cache is being cleaned every second.
*/
TimeUnit unit() default TimeUnit.MINUTES;
/**
* Keep in cache forever.
*/
boolean forever() default false;
/**
* Identifies a method that should flush all cached entities of
* this class/object.
* @since 0.7.14
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Flush {
}
}
|
package edu.colorado.csdms.wmt.client.ui;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.DragOverEvent;
import com.google.gwt.event.dom.client.DragOverHandler;
import com.google.gwt.event.dom.client.DropEvent;
import com.google.gwt.event.dom.client.DropHandler;
import com.google.gwt.user.client.ui.Tree;
import com.google.gwt.user.client.ui.TreeItem;
import edu.colorado.csdms.wmt.client.data.Component;
import edu.colorado.csdms.wmt.client.data.ModelJSO;
import edu.colorado.csdms.wmt.client.data.Port;
/**
* A ModelTree is used to graphically represent the construction of a
* simulation through component models, each represented by a
* {@link ModelCell}.
*
* @author Mark Piper (mark.piper@colorado.edu)
*/
public class ModelTree extends Tree implements DragOverHandler, DropHandler {
public DataManager data; // experimenting with a public member variable
private List<ModelCell> openModelCells;
/**
* Creates a ModelTree with an open "driver" port.
*
* @param data A DataManager object.
*/
public ModelTree(DataManager data) {
this.data = data;
this.openModelCells = new ArrayList<ModelCell>();
initializeTree();
this.data.setModelTree(this);
// Set an empty model in the DataManager. It will be used to save the
// model created with this ModelTree.
ModelJSO model = (ModelJSO) ModelJSO.createObject();
this.data.setModel(model);
this.data.getModel().setName("Model " + this.data.saveAttempts.toString());
// Set up ModelTree event handlers.
addDomHandler(this, DragOverEvent.getType());
addDomHandler(this, DropEvent.getType());
}
/**
* A worker that sets up the root TreeItem (the "driver") of the ModelTree.
*/
public void initializeTree() {
Port driverPort = new Port("driver", true);
addTreeItem(driverPort, null);
}
/**
* Adds a new TreeItem with a ModelCell to the ModelTree at the targeted
* leaf location, or at the root if the target is missing.
*
* @param port the Port used to create a ModelCell for the TreeItem
* @param target the targeted leaf TreeItem
* @return the reference to the created TreeItem
*/
public TreeItem addTreeItem(Port port, TreeItem target) {
ModelCell cell = new ModelCell(port, Component.makeInfoComponent());
TreeItem item = null;
if (target == null) {
item = new TreeItem(cell);
this.addItem(item);
} else {
item = target.addItem(cell);
item.setStyleName("wmt-TreeItem");
}
cell.setParentTreeItem(item); // Clumsy
return item;
}
/**
* Adds a Component to the ModelCell used by the targeted TreeItem. Uses
* {@link #setComponent(Component, TreeItem)}.
*
* @param component the Component to add
* @param target the TreeItem to which the Component is to be added
*/
public void addComponent(Component component, TreeItem target) {
GWT.log("Adding component: " + component.getName());
// Mark the model as unsaved with an asterisk. Is this the driver port? If
// so, also suggest a model name.
if (this.getItem(0).equals(target)) {
data.getModel().setName(
component.getName() + " " + data.saveAttempts.toString());
}
data.getPerspective().setModelPanelTitle(false);
this.setComponent(component, target);
// Ensure that the (class) component replaces the model component.
data.replaceModelComponent(data.getComponent(component.getId()));
}
/**
* Adds a Component to the ModelCell used by the targeted TreeItem.
*
* @param component the Component to add
* @param target the TreeItem to which the Component is to be added
*/
public void setComponent(Component component, TreeItem target) {
// Get the ModelCell used by the TreeItem target.
ModelCell cell = (ModelCell) target.getWidget();
// If the Component already exists at a higher level in the ModelTree, set
// a link to it and exit.
Component connected1 = hasConnectedInstance(cell.getPortCell().getPort());
if (connected1 != null) {
cell.setComponentCell(cell.new ComponentCell(connected1));
cell.getComponentCell().addStyleDependentName("linked");
cell.getComponentCell().isLinked(true);
return;
}
// Connect the new Component to the ModelCell.
cell.setComponentCell(cell.new ComponentCell(component));
cell.isConnected(true);
// Add new, empty, TreeItems for the "uses" ports of the Component.
for (int i = 0; i < component.getUsesPorts().length; i++) {
Port newPort = new Port();
newPort.setId(component.getUsesPorts()[i].getId());
newPort.isRequired(component.getUsesPorts()[i].isRequired());
TreeItem newItem = addTreeItem(newPort, target);
// If this new Port has a connected Component higher in the ModelTree,
// set a link to it.
ModelCell newCell = (ModelCell) newItem.getWidget();
Component connected2 = hasConnectedInstance(newPort);
if (connected2 != null) {
newCell.setComponentCell(newCell.new ComponentCell(connected2));
newCell.getComponentCell().addStyleDependentName("linked");
newCell.getComponentCell().isLinked(true);
}
}
// Update the sensitivity of the DragCells in the ComponentList.
data.getComponentList().setCellSensitivity();
}
/**
* Iterate through the TreeItems of this ModelTree, finding what ModelCells
* have open PortCells. Add the cell to the openModelCells Vector.
* @return
*
* @return a Vector of ModelCells with open ports.
*/
public List<ModelCell> findOpenModelCells() {
// Always start with a fresh list.
openModelCells.clear();
Iterator<TreeItem> iter = this.treeItemIterator();
while (iter.hasNext()) {
TreeItem treeItem = (TreeItem) iter.next();
ModelCell cell = (ModelCell) treeItem.getWidget();
if (cell.getComponentCell().getComponent().getId() == null) {
openModelCells.add(cell);
}
}
return openModelCells;
}
/**
* Checks whether a given component is present in the ModelTree. This is an
* overloaded version of {@link #isComponentPresent(String)}.
*
* @param component a Component to check
* @return true if the component is in the ModelTree
*/
public Boolean isComponentPresent(Component component) {
String componentId = component.getId();
return isComponentPresent(componentId);
}
/**
* Checks whether a given component is present in the ModelTree.
*
* @param componentId the id of component to check
* @return true if the component is in the ModelTree
*/
public Boolean isComponentPresent(String componentId) {
Boolean componentIsPresent = false;
if (componentId != null) {
Iterator<TreeItem> iter = this.treeItemIterator();
while (iter.hasNext() && !componentIsPresent) {
TreeItem treeItem = (TreeItem) iter.next();
ModelCell cell = (ModelCell) treeItem.getWidget();
if (cell.getComponentCell().getComponent().getId() != null) {
componentIsPresent =
cell.getComponentCell().getComponent().getId().matches(
componentId);
}
}
}
return componentIsPresent;
}
/**
* Checks whether the input Port has already appeared higher up in the
* ModelTree hierarchy, and has a connected Component. If so, the Component
* is returned; otherwise, a null object is returned.
* <p>
* I'm concerned that this may be inefficient, and slow to iterate through a
* large ModelTree, since each TreeItem is hit.
*
* @param port the Port object
*/
public Component hasConnectedInstance(Port port) {
Component connected = null;
Iterator<TreeItem> iter = this.treeItemIterator();
while (iter.hasNext()) {
TreeItem treeItem = (TreeItem) iter.next();
ModelCell cell = (ModelCell) treeItem.getWidget();
if (cell.isConnected()) {
Component cellComponent = cell.getComponentCell().getComponent();
String cellPortId = cell.getPortCell().getPort().getId();
if (cellPortId.matches("driver")) {
if (cellComponent.getProvidesPorts().length > 0) {
cellPortId = cellComponent.getProvidesPorts()[0].getId();
}
}
// GWT.log("match? " + cellPortId + " " + port.getId());
if (cellPortId.matches(port.getId())) {
connected = cellComponent;
}
}
}
return connected;
}
/**
* Handles events when a drag item hovers over a drop target. Note that
* events are continuously spawned, so there can be a lot.
* <p>
* This method is apparently needed to have drop events.
*/
@Override
public void onDragOver(DragOverEvent event) {
return;
}
@Override
public void onDrop(DropEvent event) {
event.preventDefault();
event.stopPropagation();
}
}
|
package com.upcrob.tagexp;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import java.util.Collection;
import java.util.Set;
import java.util.HashSet;
/**
* An Evaluator contains facilities for evaluating the truth value of tag
* expressions in relation to an input Collection of tags.
*/
public class Evaluator {
/**
* Evaluates a tag expression against an input Collection of Strings
* to determine its truth value.
*
* For instance, the expression, "(a or b) and not c" should return
* true when the input Collection consists of {"a", "d", "e"} since
* it contains the String, "a" ("b" would also have worked) but not
* the String, "c".
*
* @param expression Input expression String.
* @param tags Input Collection of tag Strings.
* @throws ParseException Thrown if the expression could not be parsed.
*/
public boolean evaluate(String expression, Collection<String> tags) {
// Evaluate, case-sensitive
return evaluate(expression, tags, true);
}
/**
* Evaluates a tag expression against an input Collection of Strings
* to determine its truth value.
*
* @param expression Input expression String.
* @param tags Input Collection of tag Strings.
* @param caseSensitive Toggles case-sensitivity of tag-search. Note that
* a case-insensitive search may be slower.
* @throws ParseException Thrown if the expression could not be parsed.
*/
public boolean evaluate(String expression, Collection<String> tags, boolean caseSensitive) {
// Verify arguments are non-null
if (expression == null || tags == null) {
throw new IllegalArgumentException("Input arguments must be non-null.");
}
// Setup lexer and parser
TagExpLexer lexer = new TagExpLexer(new ANTLRStringStream(expression));
TagExpParser parser = new TagExpParser(new CommonTokenStream(lexer));
// Build parse tree
Node root;
try {
root = parser.eval();
} catch (RecognitionException e) {
throw new ParseException("Invalid expression.", e);
}
// Evaluate tree
return eval(root, new HashSet<String>(tags), caseSensitive);
}
/**
* Helper method that does most of the evaluation work.
*/
private boolean eval(Node root, Set<String> tags, boolean caseSensitive) {
switch (root.type) {
case OR:
// Do a logical OR on all children of this node
for (Node n : root.children) {
if (eval(n, tags, caseSensitive)) {
// Return true if any of the
// sub-trees are true
return true;
}
}
return false;
case XOR:
// Do a logical XOR on all children of this node
boolean found = false;
for (Node n : root.children) {
if (eval(n, tags, caseSensitive)) {
if (found) {
// Sub-tree already evaluated to
// true, return false
return false;
}
found = true;
}
}
return found;
case AND:
// Do a logical AND on all children of this node
for (Node n : root.children) {
if (!eval(n, tags, caseSensitive)) {
// Return false if any of the
// sub-trees are false
return false;
}
}
return true;
case NOT:
// Do a logical NOT on this node's sub-tree
Node child = root.children.get(0);
return !eval(child, tags, caseSensitive);
case TERM:
// Determine if this term is in the tag set
if (caseSensitive) {
// Case-sensitive search
return tags.contains(root.value);
} else {
// Case-insensitive search
for (String tag : tags) {
if (tag.equalsIgnoreCase(root.value)) {
return true;
}
}
return false;
}
}
throw new ParseException("Invalid parse tree.");
}
}
|
package cronapi.database;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.metamodel.EntityType;
import javax.persistence.metamodel.SingularAttribute;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.security.core.GrantedAuthority;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializable;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
import cronapi.RestClient;
import cronapi.SecurityBeanFilter;
import cronapi.Utils;
import cronapi.Var;
import cronapi.i18n.Messages;
import cronapi.rest.security.CronappSecurity;
/**
* Class database manipulation, responsible for querying, inserting,
* updating and deleting database data procedurally, allowing paged
* navigation and setting page size.
*
* @author robson.ataide
* @version 1.0
* @since 2017-04-26
*
*/
public class DataSource implements JsonSerializable {
private String entity;
private String simpleEntity;
private Class domainClass;
private String filter;
private Var[] params;
private int pageSize;
private Page page;
private int index;
private int current;
private Pageable pageRequest;
private Object insertedElement = null;
private EntityManager customEntityManager;
/**
* Init a datasource with a page size equals 100
*
* @param entity - full name of entitiy class like String
*/
public DataSource(String entity) {
this(entity, 100);
}
/**
* Init a datasource with a page size equals 100, and custom entity manager
*
* @param entity - full name of entitiy class like String
* @param entityManager - custom entity manager
*/
public DataSource(String entity, EntityManager entityManager) {
this(entity, 100);
this.customEntityManager = entityManager;
}
/**
* Init a datasource setting a page size
*
* @param entity - full name of entitiy class like String
* @param pageSize - page size of a Pageable object retrieved from repository
*/
public DataSource(String entity, int pageSize) {
this.entity = entity;
this.simpleEntity = entity.substring(entity.lastIndexOf(".")+1);
this.pageSize = pageSize;
this.pageRequest = new PageRequest(0, pageSize);
//initialize dependencies and necessaries objects
this.instantiateRepository();
}
private EntityManager getEntityManager(Class domainClass) {
if (customEntityManager != null)
return customEntityManager;
else
return TransactionManager.getEntityManager(domainClass);
}
public Class getDomainClass() {
return domainClass;
}
public String getSimpleEntity() {
return simpleEntity;
}
/**
* Retrieve repository from entity
*
* @throws RuntimeException when repository not fount, entity passed not found or cast repository
*/
private void instantiateRepository() {
try {
domainClass = Class.forName(this.entity);
} catch (ClassNotFoundException cnfex) {
throw new RuntimeException(cnfex);
}
}
private List<String> parseParams(String SQL) {
final String delims = " \n\r\t.(){},+:=!";
final String quots = "\'";
String token = "";
boolean isQuoted = false;
List<String> tokens = new LinkedList<>();
for(int i = 0; i < SQL.length(); i++) {
if(quots.indexOf(SQL.charAt(i)) != -1) {
isQuoted = token.length() == 0;
}
if(delims.indexOf(SQL.charAt(i)) == -1 || isQuoted) {
token += SQL.charAt(i);
}
else {
if(token.length() > 0) {
if (token.startsWith(":"))
tokens.add(token.substring(1));
token = "";
isQuoted = false;
}
if (SQL.charAt(i) == ':') {
token = ":";
}
}
}
if(token.length() > 0) {
if (token.startsWith(":"))
tokens.add(token.substring(1));
}
return tokens;
}
/**
* Retrieve objects from database using repository when filter is null or empty,
* if filter not null or is not empty, this method uses entityManager and create a
* jpql instruction.
*
* @return a array of Object
*/
public Object[] fetch() {
String jpql = this.filter;
if (jpql == null) {
jpql = "select e from " + simpleEntity + " e";
}
try {
EntityManager em = getEntityManager(domainClass);
TypedQuery<?> query = em.createQuery(jpql, domainClass);
int i = 0;
List<String> parsedParams = parseParams(jpql);
for (String param : parsedParams) {
Var p = null;
if (i <= this.params.length-1) {
p = this.params[i];
}
if (p != null) {
if (p.getId() != null) {
query.setParameter(p.getId(), p.getObject(query.getParameter(p.getId()).getParameterType()));
} else {
query.setParameter(param, p.getObject(query.getParameter(parsedParams.get(i)).getParameterType()));
}
} else {
query.setParameter(param, null);
}
i++;
}
query.setFirstResult(this.pageRequest.getPageNumber() * this.pageRequest.getPageSize());
query.setMaxResults(this.pageRequest.getPageSize());
List<?> resultsInPage = query.getResultList();
this.page = new PageImpl(resultsInPage, this.pageRequest, 0);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
//has data, moves cursor to first position
if (this.page.getNumberOfElements() > 0)
this.current = 0;
return this.page.getContent().toArray();
}
public EntityMetadata getMetadata() {
return new EntityMetadata(domainClass);
}
/**
* Create a new instance of entity and add a
* results and set current (index) for his position
*/
public void insert() {
try {
this.insertedElement = this.domainClass.newInstance();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public Object toObject(Map<?,?> values) {
try {
Object insertedElement = this.domainClass.newInstance();
for (Object key: values.keySet()) {
updateField(insertedElement, key.toString(), values.get(key));
}
return insertedElement;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public void insert(Object value) {
try {
if (value instanceof Map) {
this.insertedElement = this.domainClass.newInstance();
Map<?,?> values = (Map<?,?>) value;
for (Object key : values.keySet()) {
try {
updateField(key.toString(), values.get(key));
} catch(Exception e) {
}
}
} else {
this.insertedElement = value;
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public Object save() {
return save(true);
}
/**
* Saves the object in the current index or a new object when has insertedElement
*/
public Object save(boolean returnCursorAfterInsert) {
try {
Object toSave;
EntityManager em = getEntityManager(domainClass);
em.getMetamodel().entity(domainClass);
if (!em.getTransaction().isActive()) {
em.getTransaction().begin();
}
if (this.insertedElement != null) {
toSave = this.insertedElement;
if (returnCursorAfterInsert)
this.insertedElement = null;
em.persist(toSave);
} else
toSave = this.getObject();
return em.merge(toSave);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void delete(Var[] primaryKeys) {
insert();
int i = 0;
Var[] params = new Var[primaryKeys.length];
EntityManager em = getEntityManager(domainClass);
EntityType type = em.getMetamodel().entity(domainClass);
String jpql = " DELETE FROM "+entity.substring(entity.lastIndexOf(".")+1) + " WHERE ";
for (Object obj: type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " AND ";
}
jpql += "" + field.getName() + " = :p" + i;
params[i] = Var.valueOf("p" + i, primaryKeys[i].getObject(field.getType().getJavaType()));
i++;
}
}
execute(jpql, params);
}
/**
* Removes the object in the current index
*/
public void delete() {
try {
Object toRemove = this.getObject();
EntityManager em = getEntityManager(domainClass);
if (!em.getTransaction().isActive()) {
em.getTransaction().begin();
}
//returns managed instance
toRemove = em.merge(toRemove);
em.remove(toRemove);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Update a field from object in the current index
*
* @param fieldName - attributte name in entity
* @param fieldValue - value that replaced or inserted in field name passed
*/
public void updateField(String fieldName, Object fieldValue) {
updateField(getObject(), fieldName, fieldValue);
}
private void updateField(Object obj, String fieldName, Object fieldValue) {
try {
boolean update = true;
if(RestClient.getRestClient().isFilteredEnabled()) {
update = SecurityBeanFilter.includeProperty(obj.getClass(), fieldName);
}
if (update) {
Method setMethod = Utils.findMethod(obj, "set" + fieldName);
if (setMethod != null) {
if (fieldValue instanceof Var) {
fieldValue = ((Var) fieldValue).getObject(setMethod.getParameterTypes()[0]);
} else {
Var tVar = Var.valueOf(fieldValue);
fieldValue = tVar.getObject(setMethod.getParameterTypes()[0]);
}
setMethod.invoke(obj, fieldValue);
} else {
throw new RuntimeException("Field " + fieldName + " not found");
}
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Update fields from object in the current index
*
* @param fields - bidimensional array like fields
* sample: { {"name", "Paul"}, {"age", "21"} }
*
* @thows RuntimeException if a field is not accessible through a set method
*/
public void updateFields(Var... fields) {
try {
for (Var field : fields) {
Method setMethod = Utils.findMethod(getObject(), "set" + field.getId());
if (setMethod != null) {
setMethod.invoke(getObject(), field.getObject());
}
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public void filter(Var data, Var[] extraParams) {
EntityManager em = getEntityManager(domainClass);
EntityType type = em.getMetamodel().entity(domainClass);
int i = 0;
String jpql = " select e FROM "+entity.substring(entity.lastIndexOf(".")+1) + " e WHERE ";
Vector<Var> params = new Vector<>();
for (Object obj: type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " AND ";
}
jpql += "e." + field.getName() + " = :p" + i;
params.add(Var.valueOf("p" + i, data.getField(field.getName()).getObject(field.getType().getJavaType())));
i++;
}
}
if (extraParams != null) {
for (Var p: extraParams) {
jpql += "e." + p.getId() + " = :p" + i;
params.add(Var.valueOf("p" + i, p.getObject()));
i++;
}
}
Var[] arr = params.toArray(new Var[params.size()]);
filter(jpql, arr);
}
public void update(Var data) {
try {
LinkedList<String> fields = data.keySet();
for(String key : fields) {
if(!key.equalsIgnoreCase(Class.class.getSimpleName())) {
this.updateField(key, data.getField(key));
}
}
}
catch(Exception e) {
throw new RuntimeException(e);
}
}
/**
* Return object in current index
*
* @return Object from database in current position
*/
public Object getObject() {
if (this.insertedElement != null)
return this.insertedElement;
if (this.current < 0 || this.current > this.page.getContent().size()-1)
return null;
return this.page.getContent().get(this.current);
}
/**
* Return field passed from object in current index
*
* @return Object value of field passed
* @thows RuntimeException if a field is not accessible through a set method
*/
public Object getObject(String fieldName) {
try {
Method getMethod = Utils.findMethod(getObject(), "get" + fieldName);
if (getMethod != null)
return getMethod.invoke(getObject());
return null;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Moves the index for next position, in pageable case,
* looking for next page and so on
*/
public void next() {
if (this.page.getNumberOfElements() > (this.current + 1))
this.current++;
else {
if (this.page.hasNext()) {
this.pageRequest = this.page.nextPageable();
this.fetch();
this.current = 0;
} else {
this.current = -1;
}
}
}
/**
* Moves the index for next position, in pageable case,
* looking for next page and so on
*/
public void nextOnPage() {
this.current++;
}
/**
* Verify if can moves the index for next position,
* in pageable case, looking for next page and so on
*
* @return boolean true if has next, false else
*/
public boolean hasNext() {
if (this.page.getNumberOfElements() > (this.current + 1))
return true;
else {
if (this.page.hasNext()) {
return true;
} else {
return false;
}
}
}
public boolean hasData() {
return getObject() != null;
}
/**
* Moves the index for previous position, in pageable case,
* looking for next page and so on
*
* @return boolean true if has previous, false else
*/
public boolean previous() {
if (this.current - 1 >= 0) {
this.current
} else {
if (this.page.hasPrevious()) {
this.pageRequest = this.page.previousPageable();
this.fetch();
this.current = this.page.getNumberOfElements() - 1;
} else {
return false;
}
}
return true;
}
public void setCurrent(int current) {
this.current = current;
}
public int getCurrent() {
return this.current;
}
/**
* Gets a Pageable object retrieved from repository
*
* @return pageable from repository, returns null when fetched by filter
*/
public Page getPage() {
return this.page;
}
/**
* Create a new page request with size passed
*
* @param pageSize size of page request
*/
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
this.pageRequest = new PageRequest(0, pageSize);
this.current = -1;
}
/**
* Fetch objects from database by a filter
*
* @param filter jpql instruction like a namedQuery
* @param params parameters used in jpql instruction
*/
public void filter(String filter, Var... params) {
this.filter = filter;
this.params = params;
this.pageRequest = new PageRequest(0, pageSize);
this.current = -1;
this.fetch();
}
/**
* Fetch objects from database by a filter
*
* @param filter jpql instruction like a namedQuery
* @param pageRequest Page
* @param params parameters used in jpql instruction
*/
public void filter(String filter, PageRequest pageRequest, Var... params) {
if (filter == null && params.length > 0) {
EntityManager em = getEntityManager(domainClass);
EntityType type = em.getMetamodel().entity(domainClass);
int i = 0;
String jpql = "Select e from " + simpleEntity + " e where ";
for (Object obj : type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " and ";
}
jpql += "e." + field.getName() + " = :p" + i;
params[i].setId("p" + i);
}
}
filter = jpql;
}
this.params = params;
this.filter = filter;
this.pageRequest = pageRequest;
this.current = -1;
this.fetch();
}
private Class forName(String name) {
try {
return Class.forName(name);
} catch (ClassNotFoundException e) {
return null;
}
}
private Object newInstance(String name) {
try {
return Class.forName(name).newInstance();
} catch (Exception e) {
return null;
}
}
public void deleteRelation(String refId, Var[] primaryKeys, Var[] relationKeys) {
EntityMetadata metadata = getMetadata();
RelationMetadata relationMetadata = metadata.getRelations().get(refId);
EntityManager em = getEntityManager(domainClass);
int i = 0;
String jpql = null;
Var[] params = null;
if (relationMetadata.getAssossiationName() != null) {
params = new Var[relationKeys.length + primaryKeys.length];
jpql = " DELETE FROM " + relationMetadata.gettAssossiationSimpleName() + " WHERE ";
EntityType type = em.getMetamodel().entity(domainClass);
for (Object obj : type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " AND ";
}
jpql += relationMetadata.getAssociationAttribute().getName() + "." + field.getName() + " = :p" + i;
params[i] = Var.valueOf("p" + i, primaryKeys[i].getObject(field.getType().getJavaType()));
i++;
}
}
int v = 0;
type = em.getMetamodel().entity(forName(relationMetadata.getAssossiationName()));
for (Object obj : type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " AND ";
}
jpql += relationMetadata.getAttribute().getName() + "." + field.getName() + " = :p" + i;
params[i] = Var.valueOf("p" + i, relationKeys[v].getObject(field.getType().getJavaType()));
i++;
v++;
}
}
} else {
params = new Var[relationKeys.length];
jpql = " DELETE FROM " + relationMetadata.getSimpleName() + " WHERE ";
EntityType type = em.getMetamodel().entity(forName(relationMetadata.getName()));
for (Object obj : type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " AND ";
}
jpql += "" + field.getName() + " = :p" + i;
params[i] = Var.valueOf("p" + i, relationKeys[i].getObject(field.getType().getJavaType()));
i++;
}
}
}
execute(jpql, params);
}
public Object insertRelation(String refId, Map<?, ?> data, Var... primaryKeys) {
EntityMetadata metadata = getMetadata();
RelationMetadata relationMetadata = metadata.getRelations().get(refId);
EntityManager em = getEntityManager(domainClass);
filter(null, new PageRequest(0, 100), primaryKeys);
Object insertion = null;
Object result = null;
if (relationMetadata.getAssossiationName() != null) {
insertion = this.newInstance(relationMetadata.getAssossiationName());
updateField(insertion, relationMetadata.getAttribute().getName(), Var.valueOf(data).getObject(forName(relationMetadata.getName())));
updateField(insertion, relationMetadata.getAssociationAttribute().getName(), getObject());
result = getObject();
} else {
insertion = Var.valueOf(data).getObject(forName(relationMetadata.getName()));
updateField(insertion, relationMetadata.getAttribute().getName(), getObject());
result = insertion;
}
if (!em.getTransaction().isActive()) {
em.getTransaction().begin();
}
em.persist(insertion);
return result;
}
public void filterByRelation(String refId, PageRequest pageRequest, Var... primaryKeys) {
EntityMetadata metadata = getMetadata();
RelationMetadata relationMetadata = metadata.getRelations().get(refId);
EntityManager em = getEntityManager(domainClass);
EntityType type = null;
String name = null;
String selectAttr = "";
String filterAttr = relationMetadata.getAttribute().getName();
type = em.getMetamodel().entity(domainClass);
if (relationMetadata.getAssossiationName() != null) {
name = relationMetadata.getAssossiationName();
selectAttr = "."+relationMetadata.getAttribute().getName();
filterAttr = relationMetadata.getAssociationAttribute().getName();
} else {
name = relationMetadata.getName();
}
int i = 0;
String jpql = "Select e"+selectAttr+" from "+name+" e where ";
for (Object obj: type.getAttributes()) {
SingularAttribute field = (SingularAttribute) obj;
if (field.isId()) {
if (i > 0) {
jpql += " and ";
}
jpql += "e."+filterAttr+"."+field.getName()+" = :p"+i;
primaryKeys[i].setId("p"+i);
}
}
filter(jpql, pageRequest, primaryKeys);
}
/**
* Clean Datasource and to free up allocated memory
*/
public void clear() {
this.pageRequest = new PageRequest(0, 100);
this.current = -1;
this.page = null;
}
/**
* Execute Query
*
* @param query - JPQL instruction for filter objects to remove
* @param params - Bidimentional array with params name and params value
*/
public void execute(String query, Var... params) {
try {
EntityManager em = getEntityManager(domainClass);
TypedQuery<?> strQuery = em.createQuery(query, domainClass);
for (Var p : params) {
strQuery.setParameter(p.getId(), p.getObject());
}
try {
if (!em.getTransaction().isActive()) {
em.getTransaction().begin();
}
strQuery.executeUpdate();
} catch (Exception e) {
throw new RuntimeException(e);
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public Var getTotalElements() {
return new Var(this.page.getTotalElements());
}
@Override
public String toString() {
if (this.page != null) {
return this.page.getContent().toString();
} else {
return "[]";
}
}
@Override
public void serialize(JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeObject(this.page.getContent());
}
@Override
public void serializeWithType(JsonGenerator gen, SerializerProvider serializers, TypeSerializer typeSer) throws IOException {
gen.writeObject(this.page.getContent());
}
public void checkRESTSecurity(String method) throws Exception {
checkRESTSecurity(domainClass, method);
}
public void checkRESTSecurity(String relationId, String method) throws Exception {
EntityMetadata metadata = getMetadata();
RelationMetadata relationMetadata = metadata.getRelations().get(relationId);
checkRESTSecurity(Class.forName(relationMetadata.getName()), method);
}
private void checkRESTSecurity(Class clazz, String method) throws Exception {
Annotation security = clazz.getAnnotation(CronappSecurity.class);
boolean authorized = false;
if(security instanceof CronappSecurity) {
CronappSecurity cronappSecurity = (CronappSecurity)security;
Method methodPermission = cronappSecurity.getClass().getMethod(method.toLowerCase());
if(methodPermission != null) {
String value = (String)methodPermission.invoke(cronappSecurity);
if(value == null) {
value = "authenticated";
}
String[] authorities = value.trim().split(";");
for(String role : authorities) {
if(role.equalsIgnoreCase("authenticated")) {
authorized = RestClient.getRestClient().getUser() != null;
if(authorized)
break;
}
if(role.equalsIgnoreCase("permitAll") || role.equalsIgnoreCase("public")) {
authorized = true;
break;
}
for(GrantedAuthority authority : RestClient.getRestClient().getAuthorities()) {
if(role.equalsIgnoreCase(authority.getAuthority())) {
authorized = true;
break;
}
}
if(authorized)
break;
}
}
}
if(!authorized) {
throw new RuntimeException(Messages.getString("notAllowed"));
}
}
}
|
package de.prob2.ui.menu;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.ResourceBundle;
import javax.annotation.Nullable;
import com.google.inject.Inject;
import com.google.inject.Injector;
import de.codecentric.centerdevice.MenuToolkit;
import de.codecentric.centerdevice.util.StageUtils;
import de.prob2.ui.MainController;
import de.prob2.ui.config.FileChooserManager;
import de.prob2.ui.internal.FXMLInjected;
import de.prob2.ui.internal.StageManager;
import de.prob2.ui.persistence.UIState;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SeparatorMenuItem;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.WindowEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@FXMLInjected
public class WindowMenu extends Menu {
private static final Logger logger = LoggerFactory.getLogger(WindowMenu.class);
private final Injector injector;
private final StageManager stageManager;
private final ResourceBundle bundle;
private final FileChooserManager fileChooserManager;
@FXML
private Menu presetPerspectivesMenu;
@FXML
private MenuItem detatchedMenuItem;
@Inject
private WindowMenu(final StageManager stageManager, final Injector injector, final ResourceBundle bundle,
final FileChooserManager fileChooserManager, @Nullable MenuToolkit menuToolkit) {
this.injector = injector;
this.stageManager = stageManager;
this.bundle = bundle;
this.fileChooserManager = fileChooserManager;
stageManager.loadFXML(this, "windowMenu.fxml");
if (menuToolkit != null) {
MenuItem zoomMenuItem = menuToolkit.createZoomMenuItem();
zoomMenuItem.setOnAction(
event -> StageUtils.getFocusedStage().ifPresent(stage -> {
if(!stage.isMaximized()) {
stage.setMaximized(true);
} else {
stage.sizeToScene();
stage.setMaximized(false);
stage.centerOnScreen();
}
}));
this.getItems().addAll(0, Arrays.asList(menuToolkit.createMinimizeMenuItem(), zoomMenuItem,
menuToolkit.createCycleWindowsItem(), new SeparatorMenuItem()));
this.getItems().addAll(new SeparatorMenuItem(), menuToolkit.createBringAllToFrontItem(),
new SeparatorMenuItem());
menuToolkit.autoAddWindowMenuItems(this);
}
}
@FXML
private void handleCloseWindow() {
final Stage stage = this.stageManager.getCurrent();
if (stage != null) {
stage.fireEvent(new WindowEvent(stage, WindowEvent.WINDOW_CLOSE_REQUEST));
}
}
@FXML
private void handleLoadDefault() {
reset();
loadPreset("main.fxml");
}
@FXML
private void handleLoadSeparated() {
reset();
loadPreset("separatedHistory.fxml");
}
@FXML
private void handleLoadSeparated2() {
reset();
loadPreset("separatedHistoryAndStatistics.fxml");
}
@FXML
private void handleLoadDetached() {
injector.getInstance(DetachViewStageController.class).showAndWait();
}
@FXML
private void handleLoadPerspective() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle(bundle.getString("common.fileChooser.open.title"));
fileChooser.getExtensionFilters().addAll(
new FileChooser.ExtensionFilter(bundle.getString("common.fileChooser.fileTypes.fxml"), "*.fxml"));
Path selectedFile = fileChooserManager.showOpenFileChooser(fileChooser, FileChooserManager.Kind.PERSPECTIVES,
stageManager.getMainStage());
if (selectedFile != null) {
try {
MainController main = injector.getInstance(MainController.class);
FXMLLoader loader = injector.getInstance(FXMLLoader.class);
loader.setLocation(selectedFile.toUri().toURL());
injector.getInstance(UIState.class)
.setGuiState("custom " + selectedFile.toUri().toURL().toExternalForm());
reset();
loader.setRoot(main);
loader.setController(main);
Parent root = loader.load();
stageManager.getMainStage().getScene().setRoot(root);
} catch (IOException e) {
logger.error("Loading fxml failed", e);
stageManager
.makeExceptionAlert(e, "common.alerts.couldNotOpenFile.content", selectedFile)
.showAndWait();
}
}
}
private void reset() {
injector.getInstance(DetachViewStageController.class).attachAllViews();
}
public Parent loadPreset(String location) {
injector.getInstance(UIState.class).setGuiState(location);
final MainController root = injector.getInstance(MainController.class);
root.refresh();
stageManager.getMainStage().getScene().setRoot(root);
injector.getInstance(MenuController.class).setMacMenu();
return root;
}
public void enablePerspectivesAndDetatched() {
presetPerspectivesMenu.setDisable(false);
presetPerspectivesMenu.setVisible(true);
detatchedMenuItem.setDisable(false);
detatchedMenuItem.setVisible(true);
}
}
|
package edu.sjsu.mithai.main;
import edu.sjsu.mithai.config.ConfigFileObservable;
import edu.sjsu.mithai.config.Configuration;
import edu.sjsu.mithai.data.DataGenerationTask;
import edu.sjsu.mithai.data.MetadataGenerationTask;
import edu.sjsu.mithai.data.SensorStore;
import edu.sjsu.mithai.export.HttpExporterTask;
import edu.sjsu.mithai.mqtt.MQTTDataReceiverTask;
import edu.sjsu.mithai.mqtt.MQTTMetaDataRecieverTask;
import edu.sjsu.mithai.mqtt.MQTTPublisherTask;
import edu.sjsu.mithai.sensors.TemperatureSensor;
import edu.sjsu.mithai.spark.SparkStreamingObject;
import edu.sjsu.mithai.util.TaskManager;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.util.Observable;
import java.util.Observer;
import static edu.sjsu.mithai.config.MithaiProperties.EXPORTER_TYPE;
import static edu.sjsu.mithai.config.MithaiProperties.NUMBER_OF_SENSORS;
public class Mithai implements Observer {
protected static Configuration configuration;
protected SensorStore sensorStore;
public static void main(String[] args) throws Exception {
Mithai mithai = new Mithai();
if(args.length<1)
mithai.start(null);
else
mithai.start(args[0]);
}
protected void start(String arg) throws Exception {
Logger.getLogger("org").setLevel(Level.ERROR);
Logger.getLogger("akka").setLevel(Level.ERROR);
ConfigFileObservable.getInstance().addObserver(this);
Runtime.getRuntime().addShutdownHook(new ShutDownHook());
//TODO file path will be provided by user
if (arg == null || arg.equals("")) {
File configFile = new File(Mithai.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath());
configuration = new Configuration(configFile.getParent() + "/application.properties");
} else
configuration = new Configuration(arg);
sensorStore = new SensorStore();
loadDevices();
setupHandlers();
//Start tasks here
// TaskManager.getInstance().submitTask(new ConfigMonitorTask(configuration));
TaskManager.getInstance().submitTask(new MQTTDataReceiverTask(configuration));
TaskManager.getInstance().submitTask(new MQTTMetaDataRecieverTask(configuration));
TaskManager.getInstance().submitTask(new MQTTPublisherTask(configuration));
TaskManager.getInstance().submitTask(new DataGenerationTask(configuration, sensorStore));
TaskManager.getInstance().submitTask(new MetadataGenerationTask(configuration));
TaskManager.getInstance().submitTask(new HttpExporterTask(configuration));
if (!configuration.getProperty(EXPORTER_TYPE).equals("HTTP")) {
// TaskManager.getInstance().submitTask(new ExporterTask(configuration, Store.messageStore()));
}
// Start Streaming context
Thread.sleep(14 * 1000);
SparkStreamingObject.streamingContext().start();
// // Stop all tasks and wait 60 seconds to finish them
// TaskManager.getInstance().stopAll();
}
protected synchronized void loadDevices() {
sensorStore.getDevices().clear();
for (int i = 1; i<= Integer.parseInt(configuration.getProperty(NUMBER_OF_SENSORS)); i++) {
sensorStore.addDevice(new TemperatureSensor("sensor" + i));
}
}
protected synchronized void setupHandlers() {
TaskManager.getInstance().addHandler(new MithaiHandler());
}
@Override
public void update(Observable observable, Object o) {
if (observable instanceof ConfigFileObservable) {
loadDevices();
// Kick out old data generation task and start new one
TaskManager.getInstance().stop(DataGenerationTask.class);
try {
TaskManager.getInstance().submitTask(new DataGenerationTask(configuration, sensorStore));
} catch (IOException e) {
e.printStackTrace();
}
}
}
private static class ShutDownHook extends Thread {
@Override
public void run() {
System.out.println("###Shutdown triggered.. Stopping all tasks..");
try {
TaskManager.getInstance().stopAll();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
package egen.io.weather.details;
public class URI {
public static final String WEATHER="{weather}";
public static final String CITY ="{city}";
public static final String PRO="{city}/{property}";
public static final String AVG="{city}/average/{duration}/{property}";
}
|
package fjab.loancalc.view;
import java.math.BigDecimal;
public class LoanBean {
private BigDecimal annualInterestRate;
private BigDecimal loanAmount;
private String repaymentPeriodicity = "Monthly";//default value
private Integer loanLengthYears;
private Integer loanLengthMonths;
public LoanBean(){}
public LoanBean(BigDecimal annualInterestRate,
BigDecimal loanAmount,
String repaymentPeriodicity,
Integer loanLengthYears,
Integer loanLengthMonths){
this.annualInterestRate = annualInterestRate;
this.loanAmount = loanAmount;
this.repaymentPeriodicity = repaymentPeriodicity;
this.loanLengthYears = loanLengthYears;
this.loanLengthMonths = loanLengthMonths;
}
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
sb.append("annualInterestRate:").append(this.annualInterestRate).append("\n");
sb.append("loanAmount:").append(this.loanAmount).append("\n");
sb.append("repaymentPeriodicity:").append(this.repaymentPeriodicity).append("\n");
sb.append("loanLengthYears:").append(this.loanLengthYears).append("\n");
sb.append("loanLengthMonths:").append(this.loanLengthMonths).append("\n");
return sb.toString();
}
public BigDecimal getAnnualInterestRate() {
return annualInterestRate;
}
public void setAnnualInterestRate(BigDecimal annualInterestRate) {
this.annualInterestRate = annualInterestRate;
}
public BigDecimal getLoanAmount() {
return loanAmount;
}
public void setLoanAmount(BigDecimal loanAmount) {
this.loanAmount = loanAmount;
}
public Integer getLoanLengthYears() {
return loanLengthYears;
}
public void setLoanLengthYears(Integer loanLengthYears) {
this.loanLengthYears = loanLengthYears;
}
public Integer getLoanLengthMonths() {
return loanLengthMonths;
}
public void setLoanLengthMonths(Integer loanLengthMonths) {
this.loanLengthMonths = loanLengthMonths;
}
public String getRepaymentPeriodicity() {
return repaymentPeriodicity;
}
public void setRepaymentPeriodicity(String repaymentPeriodicity) {
this.repaymentPeriodicity = repaymentPeriodicity;
}
}
|
package hu.bme.mit.spaceship;
/**
* A simple spaceship with two proton torpedos and four lasers
*/
public class GT4500 implements SpaceShip {
private TorpedoStore primaryTorpedoStore;
private TorpedoStore secondaryTorpedoStore;
private boolean wasPrimaryFiredLast = false;
public GT4500() {
this.primaryTorpedoStore = new TorpedoStore(10);
this.secondaryTorpedoStore = new TorpedoStore(10);
}
public boolean fireLasers(FiringMode firingMode) {
return firingMode == FiringMode.ALL;
}
/**
* Tries to fire the torpedo stores of the ship.
*
* @param firingMode how many torpedo bays to fire
* SINGLE: fires only one of the bays.
* - For the first time the primary store is fired.
* - To give some cooling time to the torpedo stores, torpedo stores are fired alternating.
* - But if the store next in line is empty the ship tries to fire the other store.
* - If the fired store reports a failure, the ship does not try to fire the other one.
* ALL: tries to fire both of the torpedo stores.
*
* @return whether at least one torpedo was fired successfully
*/
@Override
public boolean fireTorpedos(FiringMode firingMode) {
boolean firingSuccess = false;
switch (firingMode) {
case SINGLE:
if (wasPrimaryFiredLast) {
// try to fire the secondary first
if (! secondaryTorpedoStore.isEmpty()) {
firingSuccess = secondaryTorpedoStore.fire(1);
wasPrimaryFiredLast = false;
}
else {
// although primary was fired last time, but the secondary is empty
// thus try to fire primary again
if (! primaryTorpedoStore.isEmpty()) {
firingSuccess = primaryTorpedoStore.fire(1);
wasPrimaryFiredLast = true;
}
// if both of the stores are empty, nothing can be done, return failure
}
}
else {
// try to fire the primary first
if (! primaryTorpedoStore.isEmpty()) {
firingSuccess = primaryTorpedoStore.fire(1);
wasPrimaryFiredLast = true;
}
else {
// although secondary was fired last time, but primary is empty
// thus try to fire secondary again
if (! secondaryTorpedoStore.isEmpty()) {
firingSuccess = secondaryTorpedoStore.fire(1);
wasPrimaryFiredLast = false;
}
// if both of the stores are empty, nothing can be done, return failure
}
}
break;
case ALL:
firingSuccess = false;
if(!primaryTorpedoStore.isEmpty()){
primaryTorpedoStore.fire(1);
firingSuccess = true;
}
if(!secondaryTorpedoStore.isEmpty()){
secondaryTorpedoStore.fire(1);
firingSuccess = true;
}
break;
}
return firingSuccess;
}
}
|
package hudson.remoting;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;
import static java.util.logging.Level.*;
import javax.annotation.CheckForNull;
import javax.annotation.CheckReturnValue;
import javax.annotation.meta.When;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
/**
* Manages unique ID for exported objects, and allows look-up from IDs.
*
* @author Kohsuke Kawaguchi
*/
final class ExportTable {
private final Map<Integer,Entry<?>> table = new HashMap<Integer,Entry<?>>();
private final Map<Object,Entry<?>> reverse = new HashMap<Object,Entry<?>>();
/**
* {@link ExportList}s which are actively recording the current
* export operation.
*/
private final ThreadLocal<ExportList> lists = new ThreadLocal<ExportList>();
/**
* For diagnosing problems like JENKINS-20707 where we seem to be unexporting too eagerly,
* record most recent unexported objects up to {@link #UNEXPORT_LOG_SIZE}
*
* New entries are added to the end, and older ones are removed from the beginning.
*/
private final List<Entry<?>> unexportLog = new LinkedList<Entry<?>>();
/**
* Information about one exported object.
*/
private final class Entry<T> {
final int id;
private Class<? super T>[] interfaces;
private T object;
/**
* {@code object.getClass().getName()} kept around so that we can see the type even after it
* gets deallocated.
*/
@Nonnull
private final String objectType;
/**
* Where was this object first exported?
*/
@Nonnull
final CreatedAt allocationTrace;
/**
* Where was this object unexported?
*/
@CheckForNull
ReleasedAt releaseTrace;
/**
* Current reference count.
* Access to {@link ExportTable} is guarded by synchronized block,
* so accessing this field requires no further synchronization.
*/
private int referenceCount;
/**
* This field can be set programmatically to track reference counting
*/
@CheckForNull
private ReferenceCountRecorder recorder;
Entry(@Nonnull T object, Class<? super T>... interfaces) {
this.id = iota++;
this.interfaces = interfaces.clone();
this.object = object;
this.objectType = object.getClass().getName();
this.allocationTrace = new CreatedAt();
table.put(id,this);
reverse.put(object,this);
}
void addRef() {
referenceCount++;
if (recorder!=null)
recorder.onAddRef(null);
}
/**
* Increase reference count so much to effectively prevent de-allocation.
* If the reference counting is correct, we just need to increment by one,
* but this makes it safer even in case of some reference counting errors
* (and we can still detect the problem by comparing the reference count with the magic value.
*/
void pin() {
// only add the magic constant if we are in the range Integer.MIN_VALUE < x < 0x2000000
// this means that over-reference removal will still yield a referece above 0 and repeated pinning
// will not yield a negative reference count.
// e.g. if we had:
// init -> 0x00000000;
// pin -> 0x40000001;
// release -> 0x39999999;
// pin -> 0x79999999;
// addRef -> 0x80000000 => BOOM
// By making the decision point half way, we give the maximum number of releases away from the pinned
// magic value
if (referenceCount<0x20000000)
referenceCount += 0x40000000;
}
/**
* Releases the entry.
* @param callSite
* Optional location that indicates where the actual call site was that triggered the activity,
* in case it was requested from the other side of the channel.
*/
void release(@CheckForNull Throwable callSite) {
if (recorder!=null)
recorder.onRelease(callSite);
if(--referenceCount==0) {
table.remove(id);
reverse.remove(object);
object = null;
releaseTrace = new ReleasedAt(callSite);
unexportLog.add(this);
while (unexportLog.size()>UNEXPORT_LOG_SIZE)
unexportLog.remove(0);
}
}
private String interfaceNames() {
StringBuilder buf = new StringBuilder(10 + getInterfaces().length * 128);
String sep = "[";
for (Class<? super T> clazz: getInterfaces()) {
buf.append(sep).append(clazz.getName());
sep = ", ";
}
buf.append("]");
return buf.toString();
}
/**
* Dumps the contents of the entry.
*/
void dump(PrintWriter w) throws IOException {
w.printf("#%d (ref.%d) : object=%s type=%s interfaces=%s%n", id, referenceCount, object, objectType, interfaceNames());
allocationTrace.printStackTrace(w);
if (releaseTrace!=null) {
releaseTrace.printStackTrace(w);
}
if (recorder!=null) {
recorder.dump(w);
}
}
String dump() {
try {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
dump(pw);
pw.close();
return sw.toString();
} catch (IOException e) {
throw new Error(e); // impossible
}
}
synchronized Class<? super T>[] getInterfaces() {
return interfaces;
}
synchronized void setInterfaces(Class<? super T>[] interfaces) {
this.interfaces = interfaces;
}
synchronized void addInterface(Class<? super T> clazz) {
for (Class<? super T> c: interfaces) {
if (c.equals(clazz)) return;
}
Class<? super T>[] replacement = new Class[interfaces.length+1];
System.arraycopy(interfaces, 0, replacement, 0, interfaces.length);
replacement[interfaces.length] = clazz;
interfaces = replacement;
}
}
static class Source extends Exception {
protected final long timestamp = System.currentTimeMillis();
/**
* @param callSite
* Optional location that indicates where the actual call site was that triggered the activity,
* in case it was requested from the other side of the channel.
*/
@SuppressWarnings("ResultOfMethodCallIgnored")
Source(@CheckForNull Throwable callSite) {
super(callSite);
updateOurStackTraceCache();
}
// TODO: We export the objects frequently, The current approach ALWAYS leads
// to creation of two Stacktrace arrays in the memory: the original and the cloned one
// Throwable API. Throwable API allows to workaround it only by using a heavy printStackTrace() method.
// Approach #1: Maybe a manual implementation of getOurStackTrace() and local storage is preferable.
// Approach #2: Consider disabling this logic by default
/**
* Update the internal stacktrace cache.
* Forces the computation of the stack trace in a Java friendly data structure,
* so that the call stack can be seen from the heap dump after the fact.
* @return Cloned version of the inner cache.
*/
@CheckReturnValue(when = When.NEVER)
protected final StackTraceElement[] updateOurStackTraceCache() {
return getStackTrace();
}
}
static class CreatedAt extends Source {
CreatedAt() {
super(null);
}
@Override
public String toString() {
return " Created at "+new Date(timestamp);
}
}
static class ReleasedAt extends Source {
ReleasedAt(@CheckForNull Throwable callSite) {
super(callSite);
}
@Override
public String toString() {
return " Released at "+new Date(timestamp);
}
}
/**
* Captures the list of export, so that they can be unexported later.
* This is tied to a particular thread, so it only records operations
* on the current thread.
* The class is not serializable.
*/
@Restricted(NoExternalUse.class)
@SuppressFBWarnings(value = "SE_BAD_FIELD_INNER_CLASS",
justification = "ExportList is supposed to be serializable as ArrayList, but it is not. "
+ "The issue is ignored since the class does not belong to the public API")
public final class ExportList extends ArrayList<Entry> {
private final ExportList old;
private ExportList() {
old=lists.get();
lists.set(this);
}
void release(Throwable callSite) {
synchronized(ExportTable.this) {
for (Entry e : this)
e.release(callSite);
}
}
void stopRecording() {
lists.set(old);
}
private static final long serialVersionUID = 1L; // we don't actually serialize this class but just to shutup FindBugs
}
/**
* Unique ID generator.
*/
private int iota = 1;
/**
* Starts the recording of the export operations
* and returns the list that captures the result.
*
* @see ExportList#stopRecording()
*/
ExportList startRecording() {
ExportList el = new ExportList();
lists.set(el);
return el;
}
boolean isRecording() {
return lists.get()!=null;
}
/**
* Exports the given object.
*
* <p>
* Until the object is {@link #unexport(Object,Throwable) unexported}, it will
* not be subject to GC.
*
* @return
* The assigned 'object ID'. If the object is already exported,
* it will return the ID already assigned to it.
* {@code 0} if the input parameter is {@code null}.
* @param clazz Class of the object
* @param t Class instance
*/
synchronized <T> int export(@Nonnull Class<T> clazz, @CheckForNull T t) {
return export(clazz, t,true);
}
/**
* Exports the given object.
* @param clazz Class of the object
* @param t Object to be exported
* @param notifyListener
* If false, listener will not be notified. This is used to
* @return
* The assigned 'object ID'. If the object is already exported,
* it will return the ID already assigned to it.
* {@code 0} if the input parameter is {@code null}.
*/
synchronized <T> int export(@Nonnull Class<T> clazz, @CheckForNull T t, boolean notifyListener) {
if(t==null) return 0; // bootstrap classloader
Entry e = reverse.get(t);
if (e == null) {
e = new Entry<T>(t, clazz);
} else {
e.addInterface(clazz);
}
e.addRef();
if(notifyListener) {
ExportList l = lists.get();
if(l!=null) l.add(e);
}
return e.id;
}
/*package*/ synchronized void pin(@Nonnull Object t) {
Entry e = reverse.get(t);
if(e!=null)
e.pin();
}
/**
* Retrieves object by id.
* @param id Object ID
* @return Object
* @throws ExecutionException The requested ID cannot be found.
* The root cause will be diagnosed by {@link #diagnoseInvalidObjectId(int)}.
*/
@Nonnull
synchronized Object get(int id) throws ExecutionException {
Entry e = table.get(id);
if(e!=null) return e.object;
throw diagnoseInvalidObjectId(id);
}
/**
* Retrieves object by id.
* @param oid Object ID
* @return Object or {@code null} if the ID is missing in the {@link ExportTable}.
* @since TODO
*/
@CheckForNull
synchronized Object getOrNull(int oid) {
Entry<?> e = table.get(oid);
if(e!=null) return e.object;
return null;
}
@Nonnull
synchronized Class[] type(int id) throws ExecutionException {
Entry e = table.get(id);
if(e!=null) return e.getInterfaces();
throw diagnoseInvalidObjectId(id);
}
/**
* Propagate a channel termination error to all the exported objects.
*
* <p>
* Exported {@link Pipe}s are vulnerable to infinite blocking
* when the channel is lost and the sender side is cut off. The reader
* end will not see that the writer has disappeared.
*
* @param e Termination error
*
*/
void abort(@CheckForNull Throwable e) {
List<Entry<?>> values;
synchronized (this) {
values = new ArrayList<Entry<?>>(table.values());
}
for (Entry<?> v : values) {
if (v.object instanceof ErrorPropagatingOutputStream) {
try {
((ErrorPropagatingOutputStream)v.object).error(e);
} catch (Throwable x) {
LOGGER.log(INFO, "Failed to propagate a channel termination error",x);
}
}
}
// clear the references to allow exported objects to get GCed.
// don't bother putting them into #unexportLog because this channel
// is forever closed.
synchronized (this) {
table.clear();
reverse.clear();
}
}
/**
* Creates a diagnostic exception for Invalid object id.
* @param id Object ID
* @return Exception to be thrown
*/
@Nonnull
private synchronized ExecutionException diagnoseInvalidObjectId(int id) {
Exception cause=null;
if (!unexportLog.isEmpty()) {
for (Entry e : unexportLog) {
if (e.id==id)
cause = new Exception("Object was recently deallocated\n"+Util.indent(e.dump()), e.releaseTrace);
}
if (cause==null)
cause = new Exception("Object appears to be deallocated at lease before "+
new Date(unexportLog.get(0).releaseTrace.timestamp));
}
return new ExecutionException("Invalid object ID "+id+" iota="+iota, cause);
}
/**
* Removes the exported object from the table.
* @param t Object to be unexported. {@code null} instances will be ignored.
* @param callSite Stacktrace of the invocation source
*/
synchronized void unexport(@CheckForNull Object t, Throwable callSite) {
if(t==null) return;
Entry e = reverse.get(t);
if(e==null) {
LOGGER.log(SEVERE, "Trying to unexport an object that's not exported: "+t);
return;
}
e.release(callSite);
}
/**
* Removes the exported object for the specified oid from the table.
* Logs error if the object has been already unexported.
*/
void unexportByOid(Integer oid, Throwable callSite) {
unexportByOid(oid, callSite, true);
}
/**
* Removes the exported object for the specified oid from the table.
* @param oid Object ID. If {@code null} the method will do nothing.
* @param callSite Unexport command caller
* @param severeErrorIfMissing Consider missing object as {@link #SEVERE} error. {@link #FINE} otherwise
* @since TODO
*/
synchronized void unexportByOid(@CheckForNull Integer oid, @CheckForNull Throwable callSite, boolean severeErrorIfMissing) {
if(oid==null) return;
Entry e = table.get(oid);
if(e==null) {
Level loggingLevel = severeErrorIfMissing ? SEVERE : FINE;
LOGGER.log(loggingLevel, "Trying to unexport an object that's already unexported", diagnoseInvalidObjectId(oid));
if (callSite!=null)
LOGGER.log(loggingLevel, "2nd unexport attempt is here", callSite);
return;
}
e.release(callSite);
}
/**
* Dumps the contents of the table to a file.
* @throws IOException Output error
*/
synchronized void dump(@Nonnull PrintWriter w) throws IOException {
for (Entry e : table.values()) {
e.dump(w);
}
}
/*package*/ synchronized boolean isExported(Object o) {
return reverse.containsKey(o);
}
/**
* Defines number of entries to be stored in the unexport history.
* @since 2.40
*/
public static int UNEXPORT_LOG_SIZE = Integer.getInteger(ExportTable.class.getName()+".unexportLogSize",1024);
private static final Logger LOGGER = Logger.getLogger(ExportTable.class.getName());
}
|
package java.util.function;
public interface Function<T, R> {
/**
* <p>Applies this function to the given argument</p>
* @param t the function argument
* @return the function result
*/
public R apply(T t);
// not implementable pre Java 8:
// public default <V> Function<T, V> andThen(Function<? super R, ? extends V> after);
// public default <V> Function<T, V> compose(Function<? super V, ? extends T> before);
// public static <T> Function<T, T> identity();
}
|
package javaslang.collection;
import javaslang.Algebra.Monad;
import javaslang.Algebra.Monoid;
import javaslang.*;
import javaslang.Memoizer.Memoizer0;
import javaslang.monad.Try;
import java.io.*;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.util.*;
import java.util.function.*;
import java.util.stream.Collector;
public interface Stream<T> extends Seq<T>, Monad<T, Traversable<?>>, Monoid<Stream<T>>, ValueObject {
/**
* Returns a {@link java.util.stream.Collector} which may be used in conjunction with
* {@link java.util.stream.Stream#collect(java.util.stream.Collector)} to obtain a {@link javaslang.collection.Stream}.
*
* @param <T> Component type of the Stream.
* @return A javaslang.collection.Stream Collector.
*/
static <T> Collector<T, ArrayList<T>, Stream<T>> collector() {
final Supplier<ArrayList<T>> supplier = ArrayList::new;
final BiConsumer<ArrayList<T>, T> accumulator = ArrayList::add;
final BinaryOperator<ArrayList<T>> combiner = (left, right) -> {
left.addAll(right);
return left;
};
final Function<ArrayList<T>, Stream<T>> finisher = Stream::of;
return Collector.of(supplier, accumulator, combiner, finisher);
}
static Stream<Integer> gen(int from) {
return Stream.of(new Iterator<Integer>() {
int i = from;
boolean hasNext = true;
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public Integer next() {
if (i == Integer.MAX_VALUE) {
hasNext = false;
}
return i++;
}
});
}
static Stream<BigInteger> gen(BigInteger from) {
return Stream.of(new Iterator<BigInteger>() {
BigInteger i = from;
@Override
public boolean hasNext() {
return true;
}
@Override
public BigInteger next() {
final BigInteger value = i;
i = i.add(BigInteger.ONE);
return value;
}
});
}
// Supplier is not referential transparent in general. Example: Stream.gen(Math::random).take(10)
static <T> Stream<T> gen(Supplier<T> supplier) {
return Stream.of(new Iterator<T>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public T next() {
return supplier.get();
}
});
}
static Stream<String> stdin() {
return lines(System.in);
}
static Stream<String> stdin(Charset charset) {
return lines(System.in, charset);
}
static Stream<String> lines(InputStream in) {
return lines(in, Charset.defaultCharset());
}
static Stream<String> lines(InputStream in, Charset charset) {
return Stream.of(new Iterator<String>() {
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, charset));
String next;
@Override
public boolean hasNext() {
final boolean hasNext = (next = Try.of(reader::readLine).orElse(null)) != null;
if (!hasNext) {
Try.run(reader::close);
}
return hasNext;
}
@Override
public String next() {
// user can never call this Iterator.next() directly => no check of hasNext here
return next;
}
});
}
static Stream<Character> chars(InputStream in) {
return chars(in, Charset.defaultCharset());
}
static Stream<Character> chars(InputStream in, Charset charset) {
return Stream.of(new Iterator<Character>() {
final InputStreamReader reader = new InputStreamReader(in, charset);
int next;
@Override
public boolean hasNext() {
final boolean hasNext = (next = Try.of(reader::read).orElse(-1)) != -1;
if (!hasNext) {
Try.run(reader::close);
}
return hasNext;
}
@Override
public Character next() {
// user can never call this Iterator.next() directly => no check of hasNext here
return (char) next;
}
});
}
static Stream<Byte> bytes(InputStream in) {
return Stream.of(new Iterator<Byte>() {
int next;
@Override
public boolean hasNext() {
final boolean hasNext = (next = Try.of(in::read).orElse(-1)) != -1;
if (!hasNext) {
Try.run(in::close);
}
return hasNext;
}
@Override
public Byte next() {
// user can never call this Iterator.next() directly => no check of hasNext here
return (byte) next;
}
});
}
/**
* Returns the single instance of Nil. Convenience method for {@code Nil.instance()} .
*
* @param <T> Component type of Nil, determined by type inference in the particular context.
* @return The empty list.
*/
static <T> Stream<T> nil() {
return Nil.instance();
}
/**
* Creates a Stream of the given elements.
* <p/>
* <pre>
* <code> Stream.of(1, 2, 3, 4)
* = Nil.instance().prepend(4).prepend(3).prepend(2).prepend(1)
* = new Cons(1, new Cons(2, new Cons(3, new Cons(4, Nil.instance()))))</code>
* </pre>
*
* @param <T> Component type of the Stream.
* @param elements Zero or more elements.
* @return A list containing the given elements in the same order.
*/
@SafeVarargs
static <T> Stream<T> of(T... elements) {
Require.nonNull(elements, "elements is null");
return Stream.of(new Iterator<T>() {
int i = 0;
@Override
public boolean hasNext() {
return i < elements.length;
}
@Override
public T next() {
return elements[i++];
}
});
}
/**
* Creates a Stream of the given elements.
*
* @param <T> Component type of the Stream.
* @param elements An Iterable of elements.
* @return A list containing the given elements in the same order.
*/
static <T> Stream<T> of(Iterable<? extends T> elements) {
Require.nonNull(elements, "elements is null");
if (elements instanceof Stream) {
@SuppressWarnings("unchecked")
final Stream<T> stream = (Stream<T>) elements;
return stream;
} else {
return Stream.of(elements.iterator());
}
}
// providing this method to save resources creating a Stream - makes no sense for collections in general
static <T> Stream<T> of(Iterator<? extends T> iterator) {
Require.nonNull(iterator, "iterator is null");
class Local {
Stream<T> of(Iterator<? extends T> iterator) {
if (iterator.hasNext()) {
return new Cons<>(iterator.next(), () -> Local.this.of(iterator));
} else {
return Nil.instance();
}
}
}
return new Deferred<>(() -> new Local().of(iterator));
}
static Stream<Integer> range(int from, int toExclusive) {
if (toExclusive == Integer.MIN_VALUE) {
return Nil.instance();
} else {
return Stream.rangeClosed(from, toExclusive - 1);
}
}
static Stream<Integer> rangeClosed(int from, int toInclusive) {
if (from > toInclusive) {
return Nil.instance();
} else if (from == Integer.MIN_VALUE && toInclusive == Integer.MIN_VALUE) {
return new Cons<>(Integer.MIN_VALUE, Nil::instance);
} else {
return Stream.of(new Iterator<Integer>() {
int i = from;
@Override
public boolean hasNext() {
return i <= toInclusive;
}
@Override
public Integer next() {
return i++;
}
});
}
}
@Override
default Stream<T> append(T element) {
return isEmpty() ? new Cons<>(element, Nil::instance) : new Cons<>(head(), () -> tail().append(element));
}
@Override
default Stream<T> appendAll(Iterable<? extends T> elements) {
Require.nonNull(elements, "elements is null");
return isEmpty() ? Stream.of(elements) : new Cons<>(head(), () -> tail().appendAll(elements));
}
@Override
default Stream<T> clear() {
return Nil.instance();
}
@Override
default Stream<T> combine(Stream<T> list1, Stream<T> list2) {
Require.nonNull(list1, "list1 is null");
Require.nonNull(list2, "list2 is null");
return list1.appendAll(list2);
}
@Override
default Stream<T> distinct() {
// TODO: better solution?
return Stream.of(List.of(this).distinct());
}
@Override
default Stream<T> drop(int n) {
return (Stream<T>) Seq.super.drop(n);
}
@Override
default Stream<T> dropRight(int n) {
return (Stream<T>) Seq.super.dropRight(n);
}
@Override
default Stream<T> dropWhile(Predicate<? super T> predicate) {
return (Stream<T>) Seq.super.dropWhile(predicate);
}
@Override
default Stream<T> filter(Predicate<? super T> predicate) {
Require.nonNull(predicate, "predicate is null");
if (isEmpty()) {
return Nil.instance();
} else {
final T head = head();
if (predicate.test(head)) {
return new Cons<>(head, () -> tail().filter(predicate));
} else {
return tail().filter(predicate);
}
}
}
@Override
default <U, TRAVERSABLE extends Manifest<U, Traversable<?>>> Stream<U> flatMap(Function<? super T, TRAVERSABLE> mapper) {
Require.nonNull(mapper, "mapper is null");
if (isEmpty()) {
return Nil.instance();
} else {
@SuppressWarnings("unchecked")
final Traversable<U> mapped = (Traversable<U>) mapper.apply(head());
return Nil.<U>instance().appendAll(mapped).appendAll(tail().flatMap(mapper));
}
}
@Override
default T fold(T zero, BiFunction<? super T, ? super T, ? extends T> op) {
return foldLeft(zero, op);
}
@Override
default T get(int index) {
if (isEmpty()) {
throw new IndexOutOfBoundsException("get(" + index + ") on empty stream");
}
if (index < 0) {
throw new IndexOutOfBoundsException("get(" + index + ")");
}
Stream<T> stream = this;
for (int i = index - 1; i >= 0; i
stream = stream.tail();
if (stream.isEmpty()) {
throw new IndexOutOfBoundsException(String.format("get(%s) on stream of size %s", index, index - i));
}
}
return stream.head();
}
@Override
default int indexOf(T element) {
int index = 0;
for (Stream<T> stream = this; !stream.isEmpty(); stream = stream.tail(), index++) {
if (Objects.equals(stream.head(), element)) {
return index;
}
}
return -1;
}
@Override
default Stream<T> init() {
if (isEmpty()) {
throw new UnsupportedOperationException("init on empty Stream");
} else {
final Stream<T> tail = tail();
if (tail.isEmpty()) {
return Nil.instance();
} else {
return new Cons<>(head(), tail::init);
}
}
}
@Override
default Stream<T> insert(int index, T element) {
if (index < 0) {
throw new IndexOutOfBoundsException("insert(" + index + ", e)");
}
if (index > 0 && isEmpty()) {
throw new IndexOutOfBoundsException("insert(" + index + ", e) on empty stream");
}
if (index == 0) {
return new Cons<>(element, () -> this);
} else {
return new Cons<>(head(), () -> tail().insert(index - 1, element));
}
}
@Override
default Stream<T> insertAll(int index, Iterable<? extends T> elements) {
Require.nonNull(elements, "elements is null");
if (index < 0) {
throw new IndexOutOfBoundsException("insertAll(" + index + ", elements)");
}
if (index > 0 && isEmpty()) {
throw new IndexOutOfBoundsException("insertAll(" + index + ", elements) on empty stream");
}
if (index == 0) {
return Stream.of(elements).appendAll(this);
} else {
return new Cons<>(head(), () -> tail().insertAll(index - 1, elements));
}
}
@Override
default Stream<T> intersperse(T element) {
if (isEmpty()) {
return Nil.instance();
} else {
return new Cons<>(head(), () -> {
final Stream<T> tail = tail();
return tail.isEmpty() ? tail : new Cons<>(element, () -> tail.intersperse(element));
});
}
}
@Override
default Iterator<T> iterator() {
final class StreamIterator implements Iterator<T> {
Supplier<Stream<T>> streamSupplier = () -> Stream.this;
@Override
public boolean hasNext() {
return !streamSupplier.get().isEmpty();
}
@Override
public T next() {
final Stream<T> stream = streamSupplier.get();
if (stream.isEmpty()) {
throw new NoSuchElementException();
} else {
// defer computation of stream = stream.tail() because computation of new head may be blocking!
streamSupplier = stream::tail;
return stream.head();
}
}
}
return new StreamIterator();
}
@Override
default int lastIndexOf(T element) {
int result = -1, index = 0;
for (Stream<T> stream = this; !stream.isEmpty(); stream = stream.tail(), index++) {
if (Objects.equals(stream.head(), element)) {
result = index;
}
}
return result;
}
@Override
default <U> Stream<U> map(Function<? super T, ? extends U> mapper) {
Require.nonNull(mapper, "mapper is null");
if (isEmpty()) {
return Nil.instance();
} else {
return new Cons<>(mapper.apply(head()), () -> tail().map(mapper));
}
}
@Override
default Stream<T> prepend(T element) {
return new Cons<>(element, () -> this);
}
@Override
default Stream<T> prependAll(Iterable<? extends T> elements) {
Require.nonNull(elements, "elements is null");
return Stream.of(elements).appendAll(this);
}
@Override
default Stream<T> remove(T element) {
if (isEmpty()) {
return this;
} else {
final T head = head();
return Objects.equals(head, element) ? tail() : new Cons<>(head, () -> tail().remove(element));
}
}
@Override
default Stream<T> removeAll (T removed){
return filter(e -> !Objects.equals(e, removed));
}
@Override
default Stream<T> removeAll(Iterable<? extends T> elements) {
Require.nonNull(elements, "elements is null");
final Stream<T> distinct = Stream.of(elements).distinct();
return filter(e -> !distinct.contains(e));
}
@Override
default Stream<T> replace(T currentElement, T newElement) {
if (isEmpty()) {
return this;
} else {
final T head = head();
if (Objects.equals(head, currentElement)) {
return new Cons<>(newElement, this::tail);
} else {
return new Cons<>(head, () -> tail().replace(currentElement, newElement));
}
}
}
@Override
default Stream<T> replaceAll(T currentElement, T newElement) {
if (isEmpty()) {
return this;
} else {
final T head = head();
final T newHead = Objects.equals(head, currentElement) ? newElement : head;
return new Cons<>(newHead, () -> tail().replaceAll(currentElement, newElement));
}
}
@Override
default Stream<T> replaceAll(UnaryOperator<T> operator) {
if (isEmpty()) {
return this;
} else {
return new Cons<>(operator.apply(head()), () -> tail().replaceAll(operator));
}
}
@Override
default Stream<T> retainAll(Iterable<? extends T> elements) {
final Stream<T> retained = Stream.of(elements).distinct();
return filter(retained::contains);
}
@Override
default Stream<T> reverse() {
return foldLeft(nil(), Stream::prepend);
}
@Override
default Stream<T> set(int index, T element) {
if (isEmpty()) {
throw new IndexOutOfBoundsException("set(" + index + ", e) on empty stream");
}
if (index < 0) {
throw new IndexOutOfBoundsException("set(" + index + ", e)");
}
Stream<T> preceding = Nil.instance();
Stream<T> tail = this;
for (int i = index; i > 0; i--, tail = tail.tail()) {
if (tail.isEmpty()) {
throw new IndexOutOfBoundsException("set(" + index + ", e) on stream of size " + length());
}
preceding = preceding.prepend(tail.head());
}
if (tail.isEmpty()) {
throw new IndexOutOfBoundsException("set(" + index + ", e) on stream of size " + length());
}
// skip the current head element because it is replaced
return preceding.reverse().appendAll(tail.tail().prepend(element));
}
@Override
default Stream<T> sort() {
return toJavaStream().sorted().collect(Stream.collector());
}
@Override
default Stream<T> sort(Comparator<? super T> c) {
Require.nonNull(c, "comparator is null");
return toJavaStream().sorted(c).collect(Stream.collector());
}
@Override
default Tuple.Tuple2<Stream<T>, Stream<T>> span(Predicate<? super T> predicate) {
Require.nonNull(predicate, "predicate is null");
return Tuple.of(takeWhile(predicate), dropWhile(predicate));
}
@Override
default Tuple.Tuple2<Stream<T>, Stream<T>> splitAt ( int n){
return Tuple.of(take(n), drop(n));
}
@Override
default Spliterator<T> spliterator() {
// the focus of the Stream API is on random-access collections of *known size*
return Spliterators.spliterator(iterator(), length(), Spliterator.ORDERED | Spliterator.IMMUTABLE);
}
@Override
default Stream<T> subsequence(int beginIndex) {
if (beginIndex < 0) {
throw new IndexOutOfBoundsException("subsequence(" + beginIndex + ")");
}
Stream<T> result = this;
for (int i = 0; i < beginIndex; i++, result = result.tail()) {
if (result.isEmpty()) {
throw new IndexOutOfBoundsException(String.format("subsequence(%s) on stream of size %s", beginIndex, i));
}
}
return result;
}
@Override
default Stream<T> subsequence(int beginIndex, int endIndex) {
if (beginIndex < 0 || endIndex - beginIndex < 0) {
throw new IndexOutOfBoundsException(String.format("subsequence(%s, %s)", beginIndex, endIndex));
}
if (endIndex - beginIndex == 0) {
return Nil.instance();
}
if (isEmpty()) {
throw new IndexOutOfBoundsException("subsequence of empty stream");
}
if (beginIndex == 0) {
return new Cons<>(head(), () -> tail().subsequence(0, endIndex - 1));
} else {
return tail().subsequence(beginIndex - 1, endIndex - 1);
}
}
@Override
Stream<T> tail();
@Override
default Stream<T> take(int n) {
if (isEmpty()) {
return this;
} else if (n < 1) {
return Nil.instance();
} else {
return new Cons<>(head(), () -> tail().take(n - 1));
}
}
@Override
default Stream<T> takeRight(int n) {
return (Stream<T>) Seq.super.takeRight(n);
}
@Override
default Stream<T> takeWhile(Predicate<? super T> predicate) {
if (isEmpty()) {
return this;
} else {
final T head = head();
if (predicate.test(head)) {
return new Cons<>(head, () -> tail().takeWhile(predicate));
} else {
return Nil.instance();
}
}
}
@Override
default <T1, T2> Tuple.Tuple2<Stream<T1>, Stream<T2>> unzip(Function<? super T, Tuple.Tuple2<T1, T2>> unzipper) {
Require.nonNull(unzipper, "unzipper is null");
final Stream<Tuple.Tuple2<T1, T2>> stream = map(unzipper);
return Tuple.of(stream.map(t -> t._1), stream.map(t -> t._2));
}
@Override
default Stream<T> zero() {
return Nil.instance();
}
@Override
default <U> Stream<Tuple.Tuple2<T, U>> zip(Iterable<U> iterable) {
Require.nonNull(iterable, "iterable is null");
final Stream<U> that = Stream.of(iterable);
if (this.isEmpty() || that.isEmpty()) {
return Nil.instance();
} else {
return new Cons<>(Tuple.of(this.head(), that.head()), () -> this.tail().zip(that.tail()));
}
}
@Override
default <U> Stream<Tuple.Tuple2<T, U>> zipAll(Iterable<U> iterable, T thisElem, U thatElem) {
Require.nonNull(iterable, "iterable is null");
final Stream<U> that = Stream.of(iterable);
final boolean isThisEmpty = this.isEmpty();
final boolean isThatEmpty = that.isEmpty();
if (isThisEmpty && isThatEmpty) {
return Nil.instance();
} else {
final T head1 = isThisEmpty ? thisElem : this.head();
final U head2 = isThatEmpty ? thatElem : that.head();
final Stream<T> tail1 = isThisEmpty ? this : this.tail();
final Stream<U> tail2 = isThatEmpty ? that : that.tail();
return new Cons<>(Tuple.of(head1, head2), () -> tail1.zipAll(tail2, thisElem, thatElem));
}
}
@Override
default Stream<Tuple.Tuple2<T, Integer>> zipWithIndex() {
return zip(() -> new Iterator<Integer>() {
int i = 0;
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
return i++;
}
});
}
/**
* Non-empty Stream.
*
* @param <T> Component type of the Stream.
*/
// DEV NOTE: class declared final because of serialization proxy pattern.
// (see Effective Java, 2nd ed., p. 315)
static final class Cons<T> extends AbstractStream<T> {
private static final long serialVersionUID = 53595355464228669L;
private final T head;
private final Memoizer0<Stream<T>> tail;
public Cons(T head, Supplier<Stream<T>> tail) {
this.head = head;
this.tail = Memoizer.of(tail);
}
@Override
public T head() {
return head;
}
@Override
public Stream<T> tail() {
return tail.apply();
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Tuple.Tuple2<T, Stream<T>> unapply() {
return Tuple.of(head(), tail());
}
/**
* {@code writeReplace} method for the serialization proxy pattern.
* <p/>
* The presence of this method causes the serialization system to emit a SerializationProxy instance instead of
* an instance of the enclosing class.
*
* @return A SerialiationProxy for this enclosing class.
*/
private Object writeReplace() {
return new SerializationProxy<>(this);
}
/**
* {@code readObject} method for the serialization proxy pattern.
* <p/>
* Guarantees that the serialization system will never generate a serialized instance of the enclosing class.
*
* @param stream An object serialization stream.
* @throws java.io.InvalidObjectException This method will throw with the message "Proxy required".
*/
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Proxy required");
}
/**
* A serialization proxy which, in this context, is used to deserialize immutable, linked Streams with final
* instance fields.
*
* @param <T> The component type of the underlying stream.
*/
// DEV NOTE: The serialization proxy pattern is not compatible with non-final, i.e. extendable,
// classes. Also, it may not be compatible with circular object graphs.
private static final class SerializationProxy<T> implements Serializable {
private static final long serialVersionUID = 3851894487210781138L;
// the instance to be serialized/deserialized
private transient Cons<T> stream;
/**
* Constructor for the case of serialization, called by {@link Cons#writeReplace()}.
* <p/>
* The constructor of a SerializationProxy takes an argument that concisely represents the logical state of
* an instance of the enclosing class.
*
* @param stream a Cons
*/
SerializationProxy(Cons<T> stream) {
this.stream = stream;
}
/**
* Write an object to a serialization stream.
*
* @param s An object serialization stream.
* @throws java.io.IOException If an error occurs writing to the stream.
*/
private void writeObject(ObjectOutputStream s) throws IOException {
s.defaultWriteObject();
s.writeInt(stream.length());
for (Stream<T> l = stream; !l.isEmpty(); l = l.tail()) {
s.writeObject(l.head());
}
}
/**
* Read an object from a deserialization stream.
*
* @param s An object deserialization stream.
* @throws ClassNotFoundException If the object's class read from the stream cannot be found.
* @throws InvalidObjectException If the stream contains no stream elements.
* @throws IOException If an error occurs reading from the stream.
*/
@SuppressWarnings("ConstantConditions")
private void readObject(ObjectInputStream s) throws ClassNotFoundException, IOException {
s.defaultReadObject();
final int size = s.readInt();
if (size <= 0) {
throw new InvalidObjectException("No elements");
}
Stream<T> temp = Nil.instance();
for (int i = 0; i < size; i++) {
@SuppressWarnings("unchecked")
final T element = (T) s.readObject();
temp = temp.append(element);
}
// DEV-NOTE: Cons is deserialized
stream = (Cons<T>) temp;
}
/**
* {@code readResolve} method for the serialization proxy pattern.
* <p/>
* Returns a logically equivalent instance of the enclosing class. The presence of this method causes the
* serialization system to translate the serialization proxy back into an instance of the enclosing class
* upon deserialization.
*
* @return A deserialized instance of the enclosing class.
*/
private Object readResolve() {
return stream;
}
}
}
/**
* The empty Stream.
* <p/>
* This is a singleton, i.e. not Cloneable.
*
* @param <T> Component type of the Stream.
*/
static final class Nil<T> extends AbstractStream<T> {
private static final long serialVersionUID = 809473773619488283L;
private static final Nil<?> INSTANCE = new Nil<>();
// hidden
private Nil() {
}
public static <T> Nil<T> instance() {
@SuppressWarnings("unchecked")
final Nil<T> instance = (Nil<T>) INSTANCE;
return instance;
}
@Override
public T head() {
throw new UnsupportedOperationException("head of empty stream");
}
@Override
public Stream<T> tail() {
throw new UnsupportedOperationException("tail of empty stream");
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public Tuple.Tuple0 unapply() {
return Tuple.empty();
}
/**
* Instance control for object serialization.
*
* @return The singleton instance of Nil.
* @see java.io.Serializable
*/
private Object readResolve() {
return INSTANCE;
}
}
/**
* Deferred Stream for lazy evaluation of blocking input.
*
* @param <T> Component type of the Stream.
*/
static final class Deferred<T> extends AbstractStream<T> {
private static final long serialVersionUID = -8478757773471498399L;
private final Memoizer0<Stream<T>> stream;
public Deferred(Supplier<Stream<T>> streamSupplier) {
this.stream = Memoizer.of(streamSupplier);
}
@Override
public T head() {
return stream.apply().head();
}
@Override
public Stream<T> tail() {
return stream.apply().tail();
}
@Override
public boolean isEmpty() {
return stream.apply().isEmpty();
}
@Override
public Tuple.Tuple2<T, Stream<T>> unapply() {
return Tuple.of(head(), tail());
}
/**
* {@code writeReplace} method for serializing wrapped Stream.
* <p/>
* The presence of this method causes the serialization system to delegate tp the wrapped Stream instance
* instead of an instance of the enclosing class.
*
* @return A SerialiationProxy for this enclosing class.
*/
private Object writeReplace() {
return stream.apply();
}
/**
* {@code readObject} method for preventing serialization of the enclosing class.
* <p/>
* Guarantees that the serialization system will never generate a serialized instance of the enclosing class.
*
* @param stream An object serialization stream.
* @throws java.io.InvalidObjectException This method will throw with the message "Not deserializable".
*/
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("No direct serialization");
}
}
static abstract class AbstractStream<T> implements Stream<T> {
private static final long serialVersionUID = 5433763348296234013L;
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (!(o instanceof Stream)) {
return false;
} else {
Stream<?> stream1 = this;
Stream<?> stream2 = (Stream<?>) o;
while (!stream1.isEmpty() && !stream2.isEmpty()) {
final boolean isEqual = Objects.equals(stream1.head(), stream2.head());
if (!isEqual) {
return false;
}
stream1 = stream1.tail();
stream2 = stream2.tail();
}
return stream1.isEmpty() && stream2.isEmpty();
}
}
@Override
public int hashCode() {
int hashCode = 1;
for (Stream<T> stream = this; !stream.isEmpty(); stream = stream.tail()) {
final T element = stream.head();
hashCode = 31 * hashCode + Objects.hashCode(element);
}
return hashCode;
}
@Override
public String toString() {
return Stream.class.getSimpleName() + map(Strings::toString).join(", ", "(", ")");
}
}
}
|
package jdungeonquest.gui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.swing.DefaultListModel;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import jdungeonquest.GameMap;
import jdungeonquest.Tile;
import jdungeonquest.TileHolder;
import net.miginfocom.swing.MigLayout;
public class ClientGUI extends JPanel{
GUI parent;
ChatPanel chatPanel;
// SunPanel sunPanel;
// PlayerHolderPanel playerHolderPanel;
MapPanel mapPanel;
GameMap map;
TileHolder tileHolder;
Map<String, int[]> playerPosition;
ClientGUI(GUI parent){
this.parent = parent;
map = new GameMap();
tileHolder = new TileHolder();
playerPosition = new HashMap<>();
playerPosition.put("TestPlayer", new int[]{0,0});
initGUI();
}
private void initGUI() {
MigLayout layout = new MigLayout("fill", "[grow][grow]", "[grow][grow]");
this.setLayout(layout);
chatPanel = new ChatPanel(this);
mapPanel = new MapPanel(map);
mapPanel.setPreferredSize(new Dimension(2000, 2600));
mapPanel.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(java.awt.event.MouseEvent evt) {
processMouseClick(evt.getPoint().x, evt.getPoint().y);
}
});
add(new JScrollPane(mapPanel), "w 200:600:1000, h 200:600:1000, grow 60");
add(chatPanel);
}
void processMouseClick(int x, int y){
}
void addChatMessage(String msg, String author) {
String time = new SimpleDateFormat("[HH:mm:ss] ").format(new Date());
String text = time + author + ":" + msg;
((DefaultListModel) chatPanel.messageList.getModel()).add(((DefaultListModel) chatPanel.messageList.getModel()).getSize(), text);
}
private class MapPanel extends JPanel {
GameMap map;
BufferedImage testImage;
public MapPanel(GameMap map) {
try {
this.testImage = ImageIO.read(getClass().getResourceAsStream( (new Tile()).getImagePath()));
} catch (IOException ex) {
Logger.getLogger(ClientGUI.class.getName()).log(Level.SEVERE, null, ex);
}
this.map = map;
}
private BufferedImage resizeImage(BufferedImage originalImage, int width, int height) throws IOException {
BufferedImage resizedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Graphics2D g = resizedImage.createGraphics();
g.drawImage(originalImage, 0, 0, width, height, null);
g.dispose();
return resizedImage;
}
public BufferedImage scaleImage(BufferedImage img, int width, int height, Color background) {
int imgWidth = img.getWidth();
int imgHeight = img.getHeight();
if (imgWidth * height < imgHeight * width) {
width = imgWidth * height / imgHeight;
} else {
height = imgHeight * width / imgWidth;
}
BufferedImage newImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Graphics2D g = newImage.createGraphics();
try {
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.setBackground(background);
g.clearRect(0, 0, width, height);
g.drawImage(img, 0, 0, width, height, null);
} finally {
g.dispose();
}
return newImage;
}
@Override
public void paintComponent(Graphics g1) {
// super.paint(g1);
int w = this.getWidth();
int h = this.getHeight();
int step_x = w / 10;
int step_y = h / 13;
Graphics2D g = (Graphics2D) g1;
for (int x = 0; x < GameMap.MAX_X; x++) {
for (int y = 0; y < GameMap.MAX_Y; y++) {
// BufferedImage tileImage = null;
Tile tile = map.getTile(x, y);
g.drawImage(tile.getImage(), x * step_x, y * step_y, this);
}
}
for(String player : playerPosition.keySet()){
final int player_x = playerPosition.get(player)[0];
final int player_y = playerPosition.get(player)[1];
g.drawString(player, player_x * step_x + 50, player_y * step_y + 50);
}
g.dispose();
}
}
private class ChatPanel extends JPanel{
JList messageList;
ClientGUI parent;
JButton sendButton;
JTextField textField;
public ChatPanel(ClientGUI p) {
this.parent = p;
MigLayout layout = new MigLayout("", "[grow 0][grow 0]", "[grow 0]");
setLayout(layout);
messageList = new JList();
messageList.setModel(new DefaultListModel());
messageList.ensureIndexIsVisible(((DefaultListModel) messageList.getModel()).size() - 1);
textField = new JTextField("");
sendButton = new JButton("Send");
sendButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
String text = textField.getText();
textField.setText("");
parent.parent.getClient().sendChatMessage(text);
}
});
add(textField, "grow, push");
add(sendButton, "wrap");
add(new JScrollPane(messageList), "grow");
}
}
}
|
// samskivert library - useful routines for java programs
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.util;
import java.awt.Color;
import java.util.HashMap;
import java.util.Map;
/**
* Provides a mechanism for converting a string representation of a value
* into a Java object when provided with the type of the target object.
* This is used to do things like populate object fields with values
* parsed from an XML file and the like.
*/
public class ValueMarshaller
{
/**
* Attempts to convert the specified value to an instance of the
* specified object type.
*
* @exception Exception thrown if no field parser exists for the
* target type or if an error occurs while parsing the value.
*/
public static Object unmarshal (Class<?> type, String source)
throws Exception
{
// look up an argument parser for the field type
Parser parser = _parsers.get(type);
if (parser == null) {
String errmsg = "Don't know how to convert strings into " +
"values of type '" + type + "'.";
throw new Exception(errmsg);
}
return parser.parse(source);
}
protected static interface Parser
{
public Object parse (String source) throws Exception;
}
protected static Map<Class<?>,Parser> _parsers = new HashMap<Class<?>,Parser>();
static {
// we can parse strings
_parsers.put(String.class, new Parser() {
public Object parse (String source) throws Exception {
return source;
}
});
// and bytes
_parsers.put(Byte.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Byte.valueOf(source);
}
});
_parsers.put(Byte.class, _parsers.get(Byte.TYPE));
// and shorts
_parsers.put(Short.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Short.valueOf(source);
}
});
_parsers.put(Short.class, _parsers.get(Short.TYPE));
// and ints
_parsers.put(Integer.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Integer.valueOf(source);
}
});
_parsers.put(Integer.class, _parsers.get(Integer.TYPE));
// and longs
_parsers.put(Long.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Long.valueOf(source);
}
});
_parsers.put(Long.class, _parsers.get(Long.TYPE));
// and floats
_parsers.put(Float.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Float.valueOf(source);
}
});
_parsers.put(Float.class, _parsers.get(Float.TYPE));
// and booleans
_parsers.put(Boolean.TYPE, new Parser() {
public Object parse (String source) throws Exception {
return Boolean.valueOf(source);
}
});
_parsers.put(Boolean.class, _parsers.get(Boolean.TYPE));
// and byte arrays
_parsers.put(byte[].class, new Parser() {
public Object parse (String source) throws Exception {
int[] values = StringUtil.parseIntArray(source);
int vcount = values.length;
byte[] bytes = new byte[vcount];
for (int ii = 0; ii < vcount; ii++) {
bytes[ii] = (byte)values[ii];
}
return bytes;
}
});
// and int arrays
_parsers.put(int[].class, new Parser() {
public Object parse (String source) throws Exception {
return StringUtil.parseIntArray(source);
}
});
// and float arrays
_parsers.put(float[].class, new Parser() {
public Object parse (String source) throws Exception {
return StringUtil.parseFloatArray(source);
}
});
// and string arrays, oh my!
_parsers.put(String[].class, new Parser() {
public Object parse (String source) throws Exception {
return StringUtil.parseStringArray(source);
}
});
// and Color objects
_parsers.put(Color.class, new Parser() {
public Object parse (String source) throws Exception {
if (source.length() == 0 || source.charAt(0) != '
return new Color(Integer.parseInt(source, 16));
} else {
return new Color(Integer.parseInt(source.substring(1), 16));
}
}
});
}
}
|
// $Id: FramedInputStream.java,v 1.3 2002/12/10 19:33:22 mdb Exp $
package com.threerings.io;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
/**
* The framed input stream reads input that was framed by a framing output
* stream. Framing in this case simply means writing the length of the
* frame followed by the data associated with the frame so that an entire
* frame can be loaded from the network layer before any higher layer
* attempts to process it. Additionally, any failure in decoding a frame
* won't result in the entire stream being skewed due to the remainder of
* the undecoded frame remaining in the input stream.
*
* <p>The framed input stream reads an entire frame worth of data into its
* internal buffer when <code>readFrame()</code> is called. It then
* behaves as if this is the only data available on the stream (meaning
* that when the data in the frame is exhausted, it will behave as if the
* end of the stream has been reached). The buffer can only contain a
* single frame at a time, so any data left over from a previous frame
* will disappear when <code>readFrame()</code> is called again.
*
* <p><em>Note:</em> The framing input stream does not synchronize reads
* from its internal buffer. It is intended to only be accessed from a
* single thread.
*
* <p>Implementation note: maybe this should derive from
* <code>FilterInputStream</code> and be tied to a single
* <code>InputStream</code> for its lifetime.
*/
public class FramedInputStream extends InputStream
{
/**
* Creates a new framed input stream.
*/
public FramedInputStream ()
{
_buffer = ByteBuffer.allocate(INITIAL_BUFFER_CAPACITY);
}
/**
* Reads a frame from the provided channel, appending to any partially
* read frame. If the entire frame data is not yet available,
* <code>readFrame</code> will return false, otherwise true.
*
* <p> <em>Note:</em> when this method returns true, it is required
* that the caller read <em>all</em> of the frame data from the stream
* before again calling {@link #readFrame} as the previous frame's
* data will be elimitated upon the subsequent call.
*
* @return true if the entire frame has been read, false if the buffer
* contains only a partial frame.
*/
public boolean readFrame (ReadableByteChannel source)
throws IOException
{
// flush data from any previous frame from the buffer
if (_buffer.limit() == _length) {
// this will remove the old frame's bytes from the buffer,
// shift our old data to the start of the buffer, position the
// buffer appropriately for appending new data onto the end of
// our existing data, and set the limit to the capacity
_buffer.limit(_have);
_buffer.position(_length);
_buffer.compact();
_have -= _length;
// we may have picked up the next frame in a previous read, so
// try decoding the length straight away
_length = decodeLength();
}
// we may already have the next frame entirely in the buffer from
// a previous read
if (checkForCompleteFrame()) {
return true;
}
// read whatever data we can from the source
do {
int got = source.read(_buffer);
if (got == -1) {
throw new EOFException();
}
_have += got;
// if there's room remaining in the buffer, that means we've
// read all there is to read, so we can move on to inspecting
// what we've got
if (_buffer.remaining() > 0) {
break;
}
// otherwise, we've filled up our buffer as a result of this
// read, expand it and try reading some more
ByteBuffer newbuf = ByteBuffer.allocate(_buffer.capacity() << 1);
newbuf.put((ByteBuffer)_buffer.flip());
_buffer = newbuf;
// don't let things grow without bounds
} while (_buffer.capacity() < MAX_BUFFER_CAPACITY);
// if we didn't already have our length, see if we now have enough
// data to obtain it
if (_length == -1) {
_length = decodeLength();
}
// finally check to see if there's a complete frame in the buffer
// and prepare to serve it up if there is
return checkForCompleteFrame();
}
/**
* Decodes and returns the length of the current frame from the buffer
* if possible. Returns -1 otherwise.
*/
protected final int decodeLength ()
{
// if we don't have enough bytes to determine our frame size, stop
// here and let the caller know that we're not ready
if (_have < HEADER_SIZE) {
return -1;
}
// decode the frame length
_buffer.rewind();
int length = (_buffer.get() & 0xFF) << 24;
length += (_buffer.get() & 0xFF) << 16;
length += (_buffer.get() & 0xFF) << 8;
length += (_buffer.get() & 0xFF);
_buffer.position(_have);
return length;
}
/**
* Returns true if a complete frame is in the buffer, false otherwise.
* If a complete frame is in the buffer, the buffer will be prepared
* to deliver that frame via our {@link InputStream} interface.
*/
protected final boolean checkForCompleteFrame ()
{
if (_length == -1 || _have < _length) {
return false;
}
// prepare the buffer such that this frame can be read
_buffer.position(HEADER_SIZE);
_buffer.limit(_length);
return true;
}
/**
* Reads the next byte of data from this input stream. The value byte
* is returned as an <code>int</code> in the range <code>0</code> to
* <code>255</code>. If no byte is available because the end of the
* stream has been reached, the value <code>-1</code> is returned.
*
* <p>This <code>read</code> method cannot block.
*
* @return the next byte of data, or <code>-1</code> if the end of the
* stream has been reached.
*/
public int read ()
{
return (_buffer.remaining() > 0) ? (_buffer.get() & 0xFF) : -1;
}
/**
* Reads up to <code>len</code> bytes of data into an array of bytes
* from this input stream. If <code>pos</code> equals
* <code>count</code>, then <code>-1</code> is returned to indicate
* end of file. Otherwise, the number <code>k</code> of bytes read is
* equal to the smaller of <code>len</code> and
* <code>count-pos</code>. If <code>k</code> is positive, then bytes
* <code>buf[pos]</code> through <code>buf[pos+k-1]</code> are copied
* into <code>b[off]</code> through <code>b[off+k-1]</code> in the
* manner performed by <code>System.arraycopy</code>. The value
* <code>k</code> is added into <code>pos</code> and <code>k</code> is
* returned.
*
* <p>This <code>read</code> method cannot block.
*
* @param b the buffer into which the data is read.
* @param off the start offset of the data.
* @param len the maximum number of bytes read.
*
* @return the total number of bytes read into the buffer, or
* <code>-1</code> if there is no more data because the end of the
* stream has been reached.
*/
public int read (byte[] b, int off, int len)
{
// if they want no bytes, we give them no bytes; this is
// purportedly the right thing to do regardless of whether we're
// at EOF or not
if (len == 0) {
return 0;
}
// trim the amount to be read to what is available; if they wanted
// bytes and we have none, return -1 to indicate EOF
if ((len = Math.min(len, _buffer.remaining())) == 0) {
return -1;
}
_buffer.get(b, off, len);
return len;
}
/**
* Skips <code>n</code> bytes of input from this input stream. Fewer
* bytes might be skipped if the end of the input stream is reached.
* The actual number <code>k</code> of bytes to be skipped is equal to
* the smaller of <code>n</code> and <code>count-pos</code>. The value
* <code>k</code> is added into <code>pos</code> and <code>k</code> is
* returned.
*
* @param n the number of bytes to be skipped.
*
* @return the actual number of bytes skipped.
*/
public long skip (long n)
{
throw new UnsupportedOperationException();
}
/**
* Returns the number of bytes that can be read from this input stream
* without blocking.
*
* @return the number of bytes remaining to be read from the buffered
* frame.
*/
public int available ()
{
return _buffer.remaining();
}
/**
* Always returns false as framed input streams do not support
* marking.
*/
public boolean markSupported ()
{
return false;
}
/**
* Does nothing, as marking is not supported.
*/
public void mark (int readAheadLimit)
{
// not supported; do nothing
}
/**
* Resets the buffer to the beginning of the buffered frames.
*/
public void reset ()
{
// position our buffer at the beginning of the frame data
_buffer.position(HEADER_SIZE);
}
/** The buffer in which we maintain our frame data. */
protected ByteBuffer _buffer;
/** The length of the current frame being read. */
protected int _length = -1;
/** The number of bytes total that we have in our buffer (these bytes
* may comprise more than one frame. */
protected int _have = 0;
/** The size of the frame header (a 32-bit integer). */
protected static final int HEADER_SIZE = 4;
/** The default initial size of the internal buffer. */
protected static final int INITIAL_BUFFER_CAPACITY = 32;
/** No need to get out of hand. */
protected static final int MAX_BUFFER_CAPACITY = 512 * 1024;
}
|
package jwebform.element;
import java.util.List;
import jwebform.element.structure.Element;
import jwebform.element.structure.ElementResult;
import jwebform.element.structure.HTMLProducer;
import jwebform.element.structure.OneFieldDecoration;
import jwebform.element.structure.OneValueElementProcessor;
import jwebform.element.structure.StandardElementRenderer;
import jwebform.element.structure.StaticElementInfo;
import jwebform.env.Env.EnvWithSubmitInfo;
import jwebform.validation.ValidationResult;
import jwebform.validation.Validator;
import jwebform.view.Tag;
import jwebform.view.TagAttributes;
public class NumberType extends TextType implements Element {
public final static String KEY = "jwebform.element.NumberInput";
int number;
public NumberType(String name, OneFieldDecoration decoration, int initialValue,
Validator validator) {
super(name, decoration, Integer.toString(initialValue), validator);
number = initialValue;
}
@Override
public ElementResult apply(EnvWithSubmitInfo env) {
OneValueElementProcessor oneValueElement = new OneValueElementProcessor();
String val = Integer.toString(number);
ElementResult result = oneValueElement.calculateElementResult(env, name, val , validator,
new StaticElementInfo(name, getDefault(), 1, KEY), this, t -> true);
try {
number = Integer.parseInt(result.getValue());
} catch (NumberFormatException e) {
number = 0; // RFE: maybe a second var to indivate, that number is not settable. (or Integer and NULL?)
}
return result;
}
// very simple version!
protected HTMLProducer getDefault() {
return producerInfos -> {
StandardElementRenderer renderer = new StandardElementRenderer();
String errorMessage = renderer.generateErrorMessage(producerInfos);
// TODO: Get rid of type="number"
Tag inputTag = renderer.generateInputTag(producerInfos, "number", "input");
String html = decoration.getLabel() + errorMessage + inputTag.getStartHtml();
return html;
};
}
@Override
public String toString() {
return String.format("NumberInput. name=%s", name);
}
public int getNumber() {
return number;
}
}
|
package org.eclipse.smarthome.io.transport.serial.rxtx.rfc2217.internal;
import java.net.URI;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.eclipse.smarthome.io.transport.serial.PortInUseException;
import org.eclipse.smarthome.io.transport.serial.SerialPort;
import org.eclipse.smarthome.io.transport.serial.SerialPortIdentifier;
import org.eclipse.smarthome.io.transport.serial.rxtx.RxTxSerialPort;
import gnu.io.rfc2217.TelnetSerialPort;
/**
* Specific serial port identifier implementation for RFC2217.
*
* @author Matthias Steigenberger - Initial contribution
*/
@NonNullByDefault
public class SerialPortIdentifierImpl implements SerialPortIdentifier {
final TelnetSerialPort id;
private final URI uri;
/**
* Constructor.
*
* @param id the underlying comm port identifier implementation
*/
public SerialPortIdentifierImpl(final TelnetSerialPort id, URI uri) {
this.id = id;
this.uri = uri;
}
@Override
public String getName() {
final String name = id.getName();
return name != null ? name : "";
}
@Override
public SerialPort open(String owner, int timeout) throws PortInUseException {
try {
id.getTelnetClient().setConnectTimeout(timeout);
id.getTelnetClient().connect(uri.getHost(), uri.getPort());
return new RxTxSerialPort(id);
} catch (Exception e) {
throw new IllegalStateException(
String.format("Unable to establish remote connection to serial port %s", uri), e);
}
}
@Override
public boolean isCurrentlyOwned() {
// Check if the socket is not available for use, if true interpret as being owned.
return !id.getTelnetClient().isAvailable();
}
@Override
public @Nullable String getCurrentOwner() {
// Unknown who owns a socket connection. Therefore return null.
return null;
}
}
|
package org.openforis.collect.manager;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.openforis.collect.metamodel.ui.UIOptions;
import org.openforis.collect.model.CollectSurvey;
import org.openforis.collect.model.CollectSurveyContext;
import org.openforis.collect.model.SurveySummary;
import org.openforis.collect.persistence.SurveyDao;
import org.openforis.collect.persistence.SurveyImportException;
import org.openforis.collect.persistence.SurveyWorkDao;
import org.openforis.commons.collection.CollectionUtils;
import org.openforis.idm.metamodel.Survey;
import org.openforis.idm.metamodel.xml.IdmlParseException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
/**
* @author M. Togna
* @author S. Ricci
*
*/
public class SurveyManager {
@Autowired
private SamplingDesignManager samplingDesignManager;
@Autowired
private SpeciesManager speciesManager;
@Autowired
private SurveyDao surveyDao;
@Autowired
private SurveyWorkDao surveyWorkDao;
@Autowired
private CollectSurveyContext collectSurveyContext;
private List<CollectSurvey> surveys;
private Map<Integer, CollectSurvey> surveysById;
private Map<String, CollectSurvey> surveysByName;
private Map<String, CollectSurvey> surveysByUri;
public SurveyManager() {
surveysById = new HashMap<Integer, CollectSurvey>();
surveysByName = new HashMap<String, CollectSurvey>();
surveysByUri = new HashMap<String, CollectSurvey>();
}
@Transactional
protected void init() {
initSurveysCache();
}
protected void initSurveysCache() {
surveysById.clear();
surveysByName.clear();
surveysByUri.clear();
surveys = surveyDao.loadAll();
for (CollectSurvey survey : surveys) {
addToCache(survey);
}
}
private void addToCache(CollectSurvey survey) {
if ( ! surveys.contains(survey) ) {
surveys.add(survey);
}
surveysById.put(survey.getId(), survey);
surveysByName.put(survey.getName(), survey);
surveysByUri.put(survey.getUri(), survey);
}
protected void removeFromCache(CollectSurvey survey) {
surveys.remove(survey);
surveysById.remove(survey.getId());
surveysByName.remove(survey.getName());
surveysByUri.remove(survey.getUri());
}
public List<CollectSurvey> getAll() {
return CollectionUtils.unmodifiableList(surveys);
}
@Transactional
public CollectSurvey get(String name) {
CollectSurvey survey = surveysByName.get(name);
return survey;
}
@Transactional
public CollectSurvey getById(int id) {
CollectSurvey survey = surveysById.get(id);
return survey;
}
@Transactional
public CollectSurvey getByUri(String uri) {
CollectSurvey survey = surveysByUri.get(uri);
return survey;
}
@Transactional
public void importModel(CollectSurvey survey) throws SurveyImportException {
surveyDao.importModel(survey);
addToCache(survey);
}
@Transactional
public void updateModel(CollectSurvey survey) throws SurveyImportException {
//remove old survey from surveys cache
CollectSurvey oldSurvey = surveysByName.get(survey.getName());
if ( oldSurvey != null ) {
removeFromCache(oldSurvey);
} else {
throw new SurveyImportException("Could not find survey to update");
}
surveyDao.updateModel(survey);
addToCache(survey);
}
@Transactional
public List<SurveySummary> getSurveySummaries(String lang) {
List<SurveySummary> summaries = new ArrayList<SurveySummary>();
for (Survey survey : surveys) {
Integer id = survey.getId();
String projectName = survey.getProjectName(lang);
String name = survey.getName();
String uri = survey.getUri();
SurveySummary summary = new SurveySummary(id, name, uri, projectName);
summaries.add(summary);
}
return summaries;
}
public String marshalSurvey(Survey survey) {
try {
String result = surveyDao.marshalSurvey(survey);
return result;
} catch (SurveyImportException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public void marshalSurvey(Survey survey, OutputStream os) {
try {
surveyDao.marshalSurvey(survey, os);
} catch (SurveyImportException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public CollectSurvey unmarshalSurvey(InputStream is) throws IdmlParseException {
return surveyDao.unmarshalIdml(is);
}
public CollectSurvey unmarshalSurvey(Reader reader) throws IdmlParseException {
return surveyDao.unmarshalIdml(reader);
}
@Transactional
public List<SurveySummary> loadSurveyWorkSummaries(String lang) {
List<SurveySummary> summaries = new ArrayList<SurveySummary>();
for (Survey survey : surveys) {
Integer id = survey.getId();
String projectName = survey.getProjectName(lang);
String name = survey.getName();
SurveySummary summary = new SurveySummary(id, name, projectName);
summaries.add(summary);
}
return summaries;
}
@Transactional
public CollectSurvey loadSurveyWork(int id) {
return surveyWorkDao.load(id);
}
@Transactional
public List<SurveySummary> getSurveyWorkSummaries() {
List<SurveySummary> result = surveyWorkDao.loadSummaries();
return result;
}
@Transactional
public SurveySummary loadSurveyWorkSummary(int id) {
return surveyWorkDao.loadSurveySummary(id);
}
@Transactional
public SurveySummary loadSurveyWorkSummaryByName(String name) {
return surveyWorkDao.loadSurveySummaryByName(name);
}
@Transactional
public CollectSurvey loadPublishedSurveyForEdit(String uri) {
CollectSurvey surveyWork = surveyWorkDao.loadByUri(uri);
if ( surveyWork == null ) {
CollectSurvey publishedSurvey = (CollectSurvey) surveyDao.loadByUri(uri);
surveyWork = createSurveyWork(publishedSurvey);
}
return surveyWork;
}
@Transactional
public boolean isSurveyWork(CollectSurvey survey) {
Integer id = survey.getId();
String name = survey.getName();
SurveySummary workSurveySummary = loadSurveyWorkSummaryByName(name);
if (workSurveySummary == null || workSurveySummary.getId() != id ) {
CollectSurvey publishedSurvey = get(name);
if (publishedSurvey == null || publishedSurvey.getId() != id ) {
throw new IllegalStateException("Survey with name '" + name
+ "' not found");
} else {
return false;
}
} else {
return true;
}
}
public CollectSurvey createSurveyWork() {
CollectSurvey survey = (CollectSurvey) collectSurveyContext.createSurvey();
UIOptions uiOptions = survey.createUIOptions();
survey.addApplicationOptions(uiOptions);
return survey;
}
protected CollectSurvey createSurveyWork(CollectSurvey survey) {
// CollectSurvey surveyWork = survey.clone();
CollectSurvey surveyWork = survey;
surveyWork.setId(null);
surveyWork.setPublished(true);
return surveyWork;
}
@Transactional
public void saveSurveyWork(CollectSurvey survey) throws SurveyImportException {
Integer id = survey.getId();
if ( id == null ) {
surveyWorkDao.insert(survey);
CollectSurvey publishedSurvey = surveyDao.loadByUri(survey.getUri());
if ( publishedSurvey != null ) {
int surveyWorkId = survey.getId();
int publishedSurveyId = publishedSurvey.getId();
samplingDesignManager.duplicateSamplingDesignForWork(publishedSurveyId, surveyWorkId);
speciesManager.duplicateTaxonomyForWork(publishedSurveyId, surveyWorkId);
}
} else {
surveyWorkDao.update(survey);
}
}
@Transactional
public void publish(CollectSurvey survey) throws SurveyImportException {
Integer surveyWorkId = survey.getId();
CollectSurvey publishedSurvey = get(survey.getName());
if ( publishedSurvey == null ) {
survey.setPublished(true);
importModel(survey);
initSurveysCache();
} else {
updateModel(survey);
}
if ( surveyWorkId != null ) {
int publishedSurveyId = survey.getId();
samplingDesignManager.publishSamplingDesign(surveyWorkId, publishedSurveyId);
speciesManager.publishTaxonomies(surveyWorkId, publishedSurveyId);
surveyWorkDao.delete(surveyWorkId);
}
}
/*
* Getters and setters
*
*/
public SamplingDesignManager getSamplingDesignManager() {
return samplingDesignManager;
}
public void setSamplingDesignManager(SamplingDesignManager samplingDesignManager) {
this.samplingDesignManager = samplingDesignManager;
}
public SpeciesManager getSpeciesManager() {
return speciesManager;
}
public void setSpeciesManager(SpeciesManager speciesManager) {
this.speciesManager = speciesManager;
}
public SurveyDao getSurveyDao() {
return surveyDao;
}
public void setSurveyDao(SurveyDao surveyDao) {
this.surveyDao = surveyDao;
}
public SurveyWorkDao getSurveyWorkDao() {
return surveyWorkDao;
}
public void setSurveyWorkDao(SurveyWorkDao surveyWorkDao) {
this.surveyWorkDao = surveyWorkDao;
}
public CollectSurveyContext getCollectSurveyContext() {
return collectSurveyContext;
}
public void setCollectSurveyContext(CollectSurveyContext collectSurveyContext) {
this.collectSurveyContext = collectSurveyContext;
}
}
|
package lemming.data;
import lemming.context.Context;
import lemming.context.ContextType;
import lemming.lemma.Lemma;
import lemming.pos.Pos;
import lemming.sense.Sense;
import org.apache.wicket.model.ResourceModel;
import javax.persistence.criteria.*;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* A helper class for criteria restrictions.
*/
public final class CriteriaHelper {
/**
* Matches a filter string against a context type.
*
* @param filter string filter
* @return A context type, or null.
*/
private static ContextType.Type matchContextType(String filter) {
String rubricString = new ResourceModel("Type.RUBRIC").getObject();
String segmentString = new ResourceModel("Type.SEGMENT").getObject();
if (rubricString.toUpperCase().startsWith(filter.toUpperCase())) {
return ContextType.Type.RUBRIC;
} else if (segmentString.toUpperCase().startsWith(filter.toUpperCase())) {
return ContextType.Type.SEGMENT;
}
return null;
}
/**
* Matches a filter string against a lemma source type.
*
* @param filter string filter
* @return A lemma source type, or null.
*/
private static Source.LemmaType matchLemmaSourceType(String filter) {
String tlString = new ResourceModel("LemmaType.TL").getObject();
String userString = new ResourceModel("LemmaType.USER").getObject();
if (tlString.toUpperCase().startsWith(filter.toUpperCase())) {
return Source.LemmaType.TL;
} else if (userString.toUpperCase().startsWith(filter.toUpperCase())) {
return Source.LemmaType.USER;
}
return null;
}
/**
* Matches a filter string against a pos source type.
*
* @param filter string filter
* @return A pos source type, or null.
*/
private static Source.PosType matchPosSourceType(String filter) {
String deafString = new ResourceModel("PosType.DEAF").getObject();
String userString = new ResourceModel("PosType.USER").getObject();
if (deafString.toUpperCase().startsWith(filter.toUpperCase())) {
return Source.PosType.DEAF;
} else if (userString.toUpperCase().startsWith(filter.toUpperCase())) {
return Source.PosType.USER;
}
return null;
}
/**
* Returns automatically created context restrictions for a string filter.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param filter string filter
* @return An expression of type boolean, or null.
*/
private static Expression<Boolean> getContextFilterStringRestriction(CriteriaBuilder criteriaBuilder,
Root<?> root, String filter) {
// deactivate filtering by context type
ContextType.Type type = null;
if (type != null) {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("location"), filter + "%"),
criteriaBuilder.equal(root.get("type"), type),
criteriaBuilder.like(root.get("preceding"), filter + "%"),
criteriaBuilder.like(root.get("keyword"), filter + "%"),
criteriaBuilder.like(root.get("following"), filter + "%"),
criteriaBuilder.like(root.get("lemmaString"), filter + "%"),
criteriaBuilder.like(root.get("posString"), filter + "%")
);
} else {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("location"), filter + "%"),
criteriaBuilder.like(root.get("preceding"), filter + "%"),
criteriaBuilder.like(root.get("keyword"), filter + "%"),
criteriaBuilder.like(root.get("following"), filter + "%"),
criteriaBuilder.like(root.get("lemmaString"), filter + "%"),
criteriaBuilder.like(root.get("posString"), filter + "%")
);
}
}
/**
* Returns automatically created lemma restrictions for a string filter.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param filter string filter
* @return An expression of type boolean, or null.
*/
private static Expression<Boolean> getLemmaFilterStringRestriction(CriteriaBuilder criteriaBuilder, Root<?> root,
String filter) {
Source.LemmaType source = CriteriaHelper.matchLemmaSourceType(filter);
if (source != null) {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("name"), filter + "%"),
criteriaBuilder.like(root.get("replacementString"), filter + "%"),
criteriaBuilder.like(root.get("posString"), filter + "%"),
criteriaBuilder.equal(root.get("source"), source),
criteriaBuilder.like(root.get("reference"), filter + "%"));
} else {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("name"), filter + "%"),
criteriaBuilder.like(root.get("replacementString"), filter + "%"),
criteriaBuilder.like(root.get("posString"), filter + "%"),
criteriaBuilder.like(root.get("reference"), filter + "%"));
}
}
/**
* Returns automatically created pos restrictions for a string filter.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param filter string filter
* @return An expression of type boolean, or null.
*/
private static Expression<Boolean> getPosFilterStringRestriction(CriteriaBuilder criteriaBuilder, Root<?> root,
String filter) {
Source.PosType source = CriteriaHelper.matchPosSourceType(filter);
if (source != null) {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("name"), filter + "%"),
criteriaBuilder.equal(root.get("source"), source));
} else {
return criteriaBuilder.like(root.get("name"), filter + "%");
}
}
/**
* Returns automatically created sense restrictions for a string filter.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param filter string filter
* @return An expression of type boolean, or null.
*/
private static Expression<Boolean> getSenseFilterStringRestriction(CriteriaBuilder criteriaBuilder, Root<?> root,
String filter) {
return criteriaBuilder.or(
criteriaBuilder.like(root.get("meaning"), filter + "%"),
criteriaBuilder.like(root.get("lemmaString"), filter + "%"));
}
/**
* Returns automatically created restrictions for a string filter.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param joins map of joins
* @param filter string filter
* @param typeClass data type
* @return An expression of type boolean, or null.
*/
public static Expression<Boolean> getFilterStringRestriction(CriteriaBuilder criteriaBuilder, Root<?> root,
Map<String,Join<?,?>> joins, String filter,
Class<?> typeClass) {
if (typeClass.equals(Context.class)) {
return getContextFilterStringRestriction(criteriaBuilder, root, filter);
} else if (typeClass.equals(Lemma.class)) {
return getLemmaFilterStringRestriction(criteriaBuilder, root, filter);
} else if (typeClass.equals(Pos.class)) {
return getPosFilterStringRestriction(criteriaBuilder, root, filter);
} else if (typeClass.equals(Sense.class)) {
return getSenseFilterStringRestriction(criteriaBuilder, root, filter);
}
return null;
}
/**
* Returns an automatically created list of order objects for context ordering.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param property sort property
* @param isAscending sort direction
* @return A list of order objects.
*/
private static List<Order> getContextOrder(CriteriaBuilder criteriaBuilder, Root<?> root, String property,
Boolean isAscending) {
List<Order> orderList = new ArrayList<Order>();
if (isAscending) {
switch (property) {
case "lemmaString":
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "posString":
orderList.add(criteriaBuilder.asc(root.get("posString")));
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
break;
case "location":
orderList.add(criteriaBuilder.asc(root.get("location")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "preceding":
orderList.add(criteriaBuilder.asc(root.get("preceding")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "keyword":
orderList.add(criteriaBuilder.asc(root.get("keyword")));
orderList.add(criteriaBuilder.asc(root.get("following")));
break;
case "following":
orderList.add(criteriaBuilder.asc(root.get("following")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
}
} else {
switch (property) {
case "lemmaString":
orderList.add(criteriaBuilder.desc(root.get("lemmaString")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "posString":
orderList.add(criteriaBuilder.desc(root.get("posString")));
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
break;
case "location":
orderList.add(criteriaBuilder.desc(root.get("location")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "preceding":
orderList.add(criteriaBuilder.desc(root.get("preceding")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
case "keyword":
orderList.add(criteriaBuilder.desc(root.get("keyword")));
orderList.add(criteriaBuilder.asc(root.get("following")));
break;
case "following":
orderList.add(criteriaBuilder.desc(root.get("following")));
orderList.add(criteriaBuilder.asc(root.get("keyword")));
break;
}
}
return orderList;
}
/**
* Returns an automatically created list of order objects for sense ordering.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param property sort property
* @param isAscending sort direction
* @return A list of order objects.
*/
private static List<Order> getSenseOrder(CriteriaBuilder criteriaBuilder, Root<?> root, String property,
Boolean isAscending) {
List<Order> orderList = new ArrayList<Order>();
if (isAscending) {
switch (property) {
case "meaning":
orderList.add(criteriaBuilder.asc(root.get("meaning")));
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
break;
case "lemmaString":
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
orderList.add(criteriaBuilder.asc(root.get("parentPosition")));
orderList.add(criteriaBuilder.asc(root.get("childPosition")));
break;
}
} else {
switch (property) {
case "meaning":
orderList.add(criteriaBuilder.desc(root.get("meaning")));
orderList.add(criteriaBuilder.asc(root.get("lemmaString")));
break;
case "lemmaString":
orderList.add(criteriaBuilder.desc(root.get("lemmaString")));
orderList.add(criteriaBuilder.asc(root.get("parentPosition")));
orderList.add(criteriaBuilder.asc(root.get("childPosition")));
break;
}
}
return orderList;
}
/**
* Returns an automatically created list of order objects for a property string.
*
* @param criteriaBuilder contructor for criteria queries
* @param root query root referencing entities
* @param joins map of joins
* @param property sort property
* @param isAscending sort direction
* @param typeClass data type
* @return A list of order objects.
*/
public static List<Order> getOrder(CriteriaBuilder criteriaBuilder, Root<?> root, Map<String,Join<?,?>> joins,
String property, Boolean isAscending, Class<?> typeClass) {
List<Order> orderList = new ArrayList<Order>();
String[] splitProperty = property.split("\\.");
Expression<String> expression;
if (typeClass.equals(Context.class)) {
return getContextOrder(criteriaBuilder, root, property, isAscending);
} else if (typeClass.equals(Sense.class)) {
return getSenseOrder(criteriaBuilder, root, property, isAscending);
}
if (Array.getLength(splitProperty) == 2) {
Join<?,?> join = joins.get(splitProperty[0]);
if (join instanceof Join) {
expression = join.get(splitProperty[1]);
} else {
throw new IllegalStateException("Join for sort property " + property + " is missing.");
}
} else {
expression = root.get(property);
}
if (isAscending) {
orderList.add(criteriaBuilder.asc(expression));
} else {
orderList.add(criteriaBuilder.desc(expression));
}
return orderList;
}
/**
* Returns automatically created joins for some classes.
*
* @param root query root referencing entities
* @param typeClass data type
* @return A map of joins, or null.
*/
public static Map<String,Join<?,?>> getJoins(Root<?> root, Class<?> typeClass) {
return null;
}
}
|
/**
* A doubly linked list implementation.
*
* @author William Fiset, william.alexandre.fiset@gmail.com
*/
package com.williamfiset.algorithms.datastructures.linkedlist;
public class DoublyLinkedList<T> implements Iterable<T> {
private int size = 0;
private Node<T> head = null;
private Node<T> tail = null;
// Internal node class to represent data
private class Node<T> {
T data;
Node<T> prev, next;
public Node(T data, Node<T> prev, Node<T> next) {
this.data = data;
this.prev = prev;
this.next = next;
}
@Override
public String toString() {
return data.toString();
}
}
// Empty this linked list, O(n)
public void clear() {
Node<T> trav = head;
while (trav != null) {
Node<T> next = trav.next;
trav.prev = trav.next = null;
trav.data = null;
trav = next;
}
head = tail = trav = null;
size = 0;
}
// Return the size of this linked list
public int size() {
return size;
}
// Is this linked list empty?
public boolean isEmpty() {
return size() == 0;
}
// Add an element to the tail of the linked list, O(1)
public void add(T elem) {
addLast(elem);
}
// Add a node to the tail of the linked list, O(1)
public void addLast(T elem) {
if (isEmpty()) {
head = tail = new Node<T>(elem, null, null);
} else {
tail.next = new Node<T>(elem, tail, null);
tail = tail.next;
}
size++;
}
// Add an element to the beginning of this linked list, O(1)
public void addFirst(T elem) {
if (isEmpty()) {
head = tail = new Node<T>(elem, null, null);
} else {
head.prev = new Node<T>(elem, null, head);
head = head.prev;
}
size++;
}
// Check the value of the first node if it exists, O(1)
public T peekFirst() {
if (isEmpty()) throw new RuntimeException("Empty list");
return head.data;
}
// Check the value of the last node if it exists, O(1)
public T peekLast() {
if (isEmpty()) throw new RuntimeException("Empty list");
return tail.data;
}
// Remove the first value at the head of the linked list, O(1)
public T removeFirst() {
// Can't remove data from an empty list
if (isEmpty()) throw new RuntimeException("Empty list");
// Extract the data at the head and move
// the head pointer forwards one node
T data = head.data;
head = head.next;
--size;
// If the list is empty set the tail to null
if (isEmpty()) tail = null;
// Do a memory cleanup of the previous node
else head.prev = null;
// Return the data that was at the first node we just removed
return data;
}
// Remove the last value at the tail of the linked list, O(1)
public T removeLast() {
// Can't remove data from an empty list
if (isEmpty()) throw new RuntimeException("Empty list");
// Extract the data at the tail and move
// the tail pointer backwards one node
T data = tail.data;
tail = tail.prev;
--size;
// If the list is now empty set the head to null
if (isEmpty()) head = null;
// Do a memory clean of the node that was just removed
else tail.next = null;
// Return the data that was in the last node we just removed
return data;
}
// Remove an arbitrary node from the linked list, O(1)
private T remove(Node<T> node) {
// If the node to remove is somewhere either at the
// head or the tail handle those independently
if (node.prev == null) return removeFirst();
if (node.next == null) return removeLast();
// Make the pointers of adjacent nodes skip over 'node'
node.next.prev = node.prev;
node.prev.next = node.next;
// Temporarily store the data we want to return
T data = node.data;
// Memory cleanup
node.data = null;
node = node.prev = node.next = null;
--size;
// Return the data in the node we just removed
return data;
}
// Remove a node at a particular index, O(n)
public T removeAt(int index) {
// Make sure the index provided is valid
if (index < 0 || index >= size) throw new IllegalArgumentException();
int i;
Node<T> trav;
// Search from the front of the list
if (index < size / 2) {
for (i = 0, trav = head; i != index; i++) {
trav = trav.next;
}
// Search from the back of the list
} else for (i = size - 1, trav = tail; i != index; i
trav = trav.prev;
}
return remove(trav);
}
// Remove a particular value in the linked list, O(n)
public boolean remove(Object obj) {
Node<T> trav = head;
// Support searching for null
if (obj == null) {
for (trav = head; trav != null; trav = trav.next) {
if (trav.data == null) {
remove(trav);
return true;
}
}
// Search for non null object
} else {
for (trav = head; trav != null; trav = trav.next) {
if (obj.equals(trav.data)) {
remove(trav);
return true;
}
}
}
return false;
}
// Find the index of a particular value in the linked list, O(n)
public int indexOf(Object obj) {
int index = 0;
Node<T> trav = head;
// Support searching for null
if (obj == null) {
for (; trav != null; trav = trav.next, index++) {
if (trav.data == null) {
return index;
}
}
// Search for non null object
} else for (; trav != null; trav = trav.next, index++) {
if (obj.equals(trav.data)) {
return index;
}
}
return -1;
}
// Check is a value is contained within the linked list
public boolean contains(Object obj) {
return indexOf(obj) != -1;
}
@Override
public java.util.Iterator<T> iterator() {
return new java.util.Iterator<T>() {
private Node<T> trav = head;
@Override
public boolean hasNext() {
return trav != null;
}
@Override
public T next() {
T data = trav.data;
trav = trav.next;
return data;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[ ");
Node<T> trav = head;
while (trav != null) {
sb.append(trav.data + ", ");
trav = trav.next;
}
sb.append(" ]");
return sb.toString();
}
}
|
package org.xwiki.security.authorization.internal;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.xwiki.component.annotation.Component;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.rendering.transformation.RenderingContext;
import org.xwiki.security.authorization.AccessDeniedException;
import org.xwiki.security.authorization.AuthorizationManager;
import org.xwiki.security.authorization.ContextualAuthorizationManager;
import org.xwiki.security.authorization.Right;
import org.xwiki.security.internal.XWikiConstants;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.doc.XWikiDocument;
/**
* Default implementation of the {@link ContextualAuthorizationManager}.
*
* @version $Id$
* @since 6.1RC1
*/
@Component
@Singleton
public class DefaultContextualAuthorizationManager implements ContextualAuthorizationManager
{
@Inject
private AuthorizationManager authorizationManager;
@Inject
private RenderingContext renderingContext;
@Inject
private Provider<XWikiContext> xcontextProvider;
@Override
public void checkAccess(Right right) throws AccessDeniedException
{
checkAccess(right, getCurrentEntity());
}
@Override
public void checkAccess(Right right, EntityReference entity) throws AccessDeniedException
{
DocumentReference user = getCurrentUser(right);
if (!checkPreAccess(right)) {
throw new AccessDeniedException(right, user, entity);
}
authorizationManager.checkAccess(right, user, entity);
}
@Override
public boolean hasAccess(Right right)
{
return hasAccess(right, getCurrentEntity());
}
@Override
public boolean hasAccess(Right right, EntityReference entity)
{
DocumentReference user = getCurrentUser(right);
return checkPreAccess(right) && authorizationManager.hasAccess(right, user, entity);
}
/**
* Check pre-condition for access.
*
* @param right the right being checked.
* @return true if pre-condition are fulfilled.
*/
private boolean checkPreAccess(Right right)
{
if (right == Right.PROGRAM) {
if (renderingContext.isRestricted() || xcontextProvider.get().hasDroppedPermissions()) {
return false;
}
}
return true;
}
/**
* @return the current user from context.
*/
private DocumentReference getCurrentUser(Right right)
{
// Backward compatibility for the old way of assigning programming right.
if (right == Right.PROGRAM) {
XWikiDocument doc = getProgrammingDocument();
if (doc != null) {
return getContentAuthor(doc);
}
}
return this.xcontextProvider.get().getUserReference();
}
/**
* @param doc a document.
* @return the content author reference of that document.
*/
private DocumentReference getContentAuthor(XWikiDocument doc)
{
DocumentReference user = doc.getContentAuthorReference();
if (user != null && XWikiConstants.GUEST_USER.equals(user.getName())) {
// Public users (not logged in) should be passed as null in the new API. It may happen that badly
// design code, and poorly written API does not take care, so we prevent security issue here.
user = null;
}
return user;
}
/**
* Get the current entity from context.
*
* @return the current sdoc or doc document reference, or the current wiki reference if no doc available.
*/
private EntityReference getCurrentEntity()
{
XWikiContext xcontext = xcontextProvider.get();
XWikiDocument doc = xcontext.getDoc();
if (doc != null) {
return doc.getDocumentReference();
}
return null;
}
/**
* Get the document used to test programming right.
*
* @param right the right being checked.
* @return the current sdoc or doc document reference, null if no doc available.
*/
private XWikiDocument getProgrammingDocument()
{
XWikiContext xcontext = this.xcontextProvider.get();
XWikiDocument document = (XWikiDocument) xcontext.get(XWikiDocument.CKEY_SDOC);
if (document == null) {
document = xcontext.getDoc();
}
return document;
}
}
|
package boids;
import java.awt.Color;
import java.awt.Graphics;
import java.util.ArrayList;
import java.util.Random;
import javax.swing.JComponent;
/**
* Provides a drawing surface for the boid flock.
*
* @author Matthew Polk
* @author Nick Pascucci
*/
@SuppressWarnings("serial")
class BoidCanvas extends JComponent {
final static int MAX_SPEED = 10;
final static int NEIGHBOR_DISTANCE = 100;
int SIZE = 5;
ArrayList<Boid> boids;
/**
* Creates a new BoidCanvas object with defaults.
*/
public BoidCanvas() {
boids = new ArrayList<Boid>();
}
/**
* Searches the boids ArrayList for nearby boids to this one.
*/
private ArrayList<Boid> getNearbyBoids(Boid b) {
ArrayList<Boid> neighbors = new ArrayList<Boid>();
for (Boid d : boids) {
if (d.x - b.x + (d.y - b.y) < NEIGHBOR_DISTANCE)
neighbors.add(d);
}
return neighbors;
}
/**
* Calculates the combined vectors for each boid.
*/
public void getVectors() {
ArrayList<Boid> newBoids = new ArrayList<Boid>();
for (Boid b : boids) {
// Build a copy of the boids list so we can
// modify the new situation without touching the original.
// Touching the original can mess up future calculations.
newBoids.add(b);
}
for (Boid newBoid : newBoids) {
int[] cv = getCenterVector(newBoid);
int[] av = getAwayVector(newBoid);
int[] ms = getMatchSpeedVector(newBoid);
// Combined vector is the sum of the contributing vectors.
newBoid.movementVector[0] = cv[0] + av[0] + ms[0];
newBoid.movementVector[1] = cv[1] + av[1] + ms[1];
/*
* But, the vector may be very large. We should scale it down a bit.
* Remember to scale uniformly!
*/
newBoid.movementVector[0] /= 10;
newBoid.movementVector[1] /= 10;
}
boids = newBoids;
}
/*
* Converts a vector to a unit vector.
*/
private float[] toUnitVector(int[] vector){
float x = (float) vector[0];
float y = (float) vector[1];
//Loses some precision here.
float magnitude = (float) Math.sqrt(x*x + y*y);
float newX = (float) x/magnitude;
float newY = (float) y/magnitude;
float[] newVector = {};
return newVector;
}
/**
* Generates the vector to the center of the flock.
*
* @param currentBoid
* The boid who is the origin of the vector.
* @return An int array representing the x,y unit vectors.
*/
public int[] getCenterVector(Boid currentBoid) {
int centerX = getSwarmCenter()[0];
int centerY = getSwarmCenter()[1];
int[] centerVec = {centerX - currentBoid.x, centerY - currentBoid.y};
return centerVec;
}
/**
* Calculates the weighted center of the swarm.
* @return
*/
public int[] getSwarmCenter(){
int centerX = 0;
int centerY = 0;
//We calculate the center of the swarm by summing all of
//the x and y coordinates, and then dividing by the number
//of boids.
for (Boid b : boids) {
centerX += b.x;
centerY += b.y;
}
centerX /= boids.size();
centerY /= boids.size();
System.out.println("Swarm center at " + centerX + " " + centerY);
int[] center = {centerX, centerY};
return center;
}
/**
* Generates a boid-specific vector to avoid neighbors.
*
* @param b
* @return
*/
public int[] getAwayVector(Boid b) {
int[] awayVec = new int[2];
ArrayList<Boid> Neighborhood = getNearbyBoids(b);
int Neighborhood_x = 0;
int Neighborhood_y = 0;
for (Boid d : Neighborhood) {
Neighborhood_x -= (d.x - b.x);
Neighborhood_y -= (d.y - b.y);
}
Neighborhood_x /= Neighborhood.size();
Neighborhood_y /= Neighborhood.size();
awayVec[0] = Neighborhood_x;
awayVec[1] = Neighborhood_y;
return awayVec;
}
/**
* Generates a vector to match the speed of a boid's neighbors.
*
* @param b
* @return
*/
public int[] getMatchSpeedVector(Boid b) {
int[] matchVec = new int[2];
ArrayList<Boid> Neighborhood = getNearbyBoids(b);
int neighborhoodDX = 0;
int neighborhoodDY = 0;
for (Boid d : Neighborhood) {
neighborhoodDX += d.movementVector[0];
neighborhoodDY += d.movementVector[1];
}
neighborhoodDX /= Neighborhood.size();
neighborhoodDY /= Neighborhood.size();
matchVec[0] = neighborhoodDX - b.movementVector[0];
matchVec[1] = neighborhoodDY - b.movementVector[1];
return matchVec;
}
/**
* Runs the simulation.
*
* @param num_boids
* @throws InterruptedException
*/
public void run(int num_boids) throws InterruptedException {
Random rand = new Random();
for (int i = 0; i < num_boids; i++) {
boids.add(new Boid(rand.nextInt(this.getWidth()), rand.nextInt(this.getWidth()), SIZE));
}
while (true) {
getVectors();
for (Boid b : boids) {
b.x += b.movementVector[0];
b.y += b.movementVector[1];
if(b.x < 1)
b.x = 1;
else if(b.x > this.getWidth()-1)
b.x = this.getWidth()-1;
if(b.y < 1)
b.y = 1;
else if(b.y > this.getHeight()-1)
b.y = this.getHeight()-1;
}
System.out.println(boids.get(0).x + " " + boids.get(0).y);
repaint();
Thread.sleep(100);
}
}
/**
* Drawing code.
*/
public void paintComponent(Graphics g) {
super.repaint();
for (Boid b : boids) {
b.draw(g);
}
g.setColor(Color.RED);
g.fillRect(getSwarmCenter()[0], getSwarmCenter()[1], 4, 4);
}
}
|
package nl.homeserver;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class ErrorResponse {
@Getter
private final String code;
@Getter
private final String details;
}
|
package org.animotron.io;
import org.animotron.expression.Expression;
import java.io.IOException;
import java.util.Iterator;
/**
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
*
*/
public class PipedInput<T> implements Cloneable, Iterable<T>, Iterator<T> {
protected boolean closedByWriter = false;
protected volatile boolean closedByReader = false;
protected boolean connected = false;
private Thread readSide;
private Thread writeSide;
protected int in = -1;
protected int out = 0;
private static final int DEFAULT_PIPE_SIZE = 1024;
@SuppressWarnings("unchecked")
protected T buffer[] = (T[]) new Object[DEFAULT_PIPE_SIZE];
private synchronized T read() throws IOException {
if (!connected) {
throw new IOException("Pipe not connected");
} else if (closedByReader) {
throw new IOException("Pipe closed");
} else if (writeSide != null && !writeSide.isAlive() && !closedByWriter && (in < 0)) {
throw new IOException("Write end dead");
}
readSide = Thread.currentThread();
int trials = 2;
while (in < 0) {
if (closedByWriter) {
/* closed by writer, return EOF */
return null;
}
if ((writeSide != null) && (!writeSide.isAlive()) && (--trials < 0)) {
throw new IOException("Pipe broken");
}
/* might be a writer waiting */
notifyAll();
try {
wait(1000);
} catch (InterruptedException ex) {
throw new java.io.InterruptedIOException();
}
}
T ret = buffer[out++];
if (out >= buffer.length) {
out = 0;
}
if (in == out) {
/* now empty */
in = -1;
}
return ret;
}
public void close() throws IOException {
closedByReader = true;
synchronized (this) {
in = -1;
}
}
public void receive(T obj) throws IOException {
checkStateForReceive();
writeSide = Thread.currentThread();
if (in == out)
awaitSpace();
if (in < 0) {
in = 0;
out = 0;
}
buffer[in++] = obj;
if (in >= buffer.length) {
in = 0;
}
}
protected synchronized void receivedLast() {
closedByWriter = true;
notifyAll();
}
private void checkStateForReceive() throws IOException {
if (!connected) {
throw new IOException("Pipe not connected");
} else if (closedByWriter) {
throw new IOException("Pipe closed by writer ");//+Utils.shortID(this));
} else if (closedByReader) {
throw new IOException("Pipe closed by reader");
} else if (readSide != null && !readSide.isAlive()) {
throw new IOException("Read end dead");
}
}
private void awaitSpace() throws IOException {
while (in == out) {
checkStateForReceive();
/* full: kick any waiting readers */
notifyAll();
try {
wait(1000);
} catch (InterruptedException ex) {
throw new java.io.InterruptedIOException();
}
}
}
@Override
public Iterator<T> iterator() {
return this;
}
private T current = null;
private boolean first = true;
@Override
public boolean hasNext() {
if (first) {
next();
first = false;
}
return current != null;
}
@Override
public T next() {
T next = current;
current = step();
return next;
}
private T step() {
try {
T o = (T) read();
if (o instanceof Expression)
return null;
return o;
} catch (ClassCastException e) {
return step();
} catch (IOException e) {
return null;
}
}
@Override
public void remove() {
}
}
|
package org.banyan.bytecode;
import java.util.concurrent.atomic.AtomicInteger;
public class Syntest {
private volatile String vol = "11";
public static synchronized void synTest() {
}
public void synTest2() {
synchronized (this) {
System.err.println(11);
aa();
}
}
public static void main(String[] args) {
aa();
}
public static void aa() {
AtomicInteger num = new AtomicInteger(1);
num.compareAndSet(num.get(), 2);
num.incrementAndGet();
System.out.println();
}
}
|
package org.apache.velocity.runtime;
import java.io.InputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.net.MalformedURLException;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.Hashtable;
import java.util.Properties;
import java.util.Stack;
import java.util.Enumeration;
import java.util.TreeMap;
import java.util.Vector;
import org.apache.log.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.runtime.log.LogManager;
import org.apache.velocity.runtime.parser.Parser;
import org.apache.velocity.runtime.parser.ParseException;
import org.apache.velocity.runtime.parser.node.SimpleNode;
import org.apache.velocity.runtime.directive.Directive;
import org.apache.velocity.runtime.VelocimacroFactory;
import org.apache.velocity.runtime.resource.Resource;
import org.apache.velocity.runtime.resource.ContentResource;
import org.apache.velocity.runtime.resource.ResourceManager;
import org.apache.velocity.util.SimplePool;
import org.apache.velocity.util.StringUtils;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.runtime.configuration.Configuration;
public class Runtime implements RuntimeConstants
{
/**
* VelocimacroFactory object to manage VMs
*/
private static VelocimacroFactory vmFactory = new VelocimacroFactory();
/**
* The Runtime logger.
*/
private static Logger logger;
/**
* The caching system used by the Velocity Runtime
*/
private static Hashtable globalCache;
/**
* The Runtime parser pool
*/
private static SimplePool parserPool;
/**
* Indicate whether the Runtime has been fully initialized.
*/
private static boolean initialized;
/**
* These are the properties that are laid down over top
* of the default properties when requested.
*/
private static Properties overridingProperties = null;
/**
* The logging systems initialization may be defered if
* it is to be initialized by an external system. There
* may be messages that need to be stored until the
* logger is instantiated. They will be stored here
* until the logger is alive.
*/
private static Vector pendingMessages = new Vector();
/**
* This is a hashtable of initialized directives.
* The directives that populate this hashtable are
* taken from the RUNTIME_DEFAULT_DIRECTIVES
* property file. This hashtable is passed
* to each parser that is created.
*/
private static Hashtable runtimeDirectives;
/**
* Object that houses the configuration options for
* the velocity runtime. The Configuration object allows
* the convenient retrieval of a subset of properties.
* For example all the properties for a resource loader
* can be retrieved from the main Configuration object
* using something like the following:
*
* Configuration loaderConfiguration =
* configuration.subset(loaderID);
*
* And a configuration is a lot more convenient to deal
* with then conventional properties objects, or Maps.
*/
private static Configuration configuration = new Configuration();
/*
* This is the primary initialization method in the Velocity
* Runtime. The systems that are setup/initialized here are
* as follows:
*
* <ul>
* <li>Logging System</li>
* <li>ResourceManager</li>
* <li>Parser Pool</li>
* <li>Global Cache</li>
* <li>Static Content Include System</li>
* <li>Velocimacro System</li>
* </ul>
*/
public synchronized static void init()
throws Exception
{
if (initialized == false)
{
try
{
initializeProperties();
initializeLogger();
ResourceManager.initialize();
initializeDirectives();
initializeParserPool();
initializeGlobalCache();
/*
* initialize the VM Factory. It will use the properties
* accessable from Runtime, so keep this here at the end.
*/
vmFactory.initVelocimacro();
info("Velocity successfully started.");
initialized = true;
}
catch (Exception e)
{
System.out.println(e);
e.printStackTrace();
}
}
}
/**
* Initializes the Velocity Runtime with
* properties object.
*
* @param Properties velocity properties object
* @throws Exception
*/
public synchronized static void init( Properties props )
throws Exception
{
if( initialized == false )
{
overridingProperties = props;
init();
}
}
/**
* Initializes the Velocity Runtime with
* a properties file retrieved using propertiesFile
*
* @param String name of properties file
* @throws Exception
*/
public synchronized static void init( String props )
throws Exception
{
if (initialized == false)
{
setProperties(props);
init();
}
}
/**
* Allow an external mechanism to set the properties for
* Velocity Runtime. This is being use right now by Turbine.
* There is a standard velocity properties file that is
* employed by Turbine/Velocity apps, but certain properties
* like the location of log file, and the template path
* must be set by Turbine because the location of the
* log file, and the template path are based on
* the location of the context root.
*
* So common properties can be set with a standard properties
* file, then certain properties can be changed before
* the Velocity Runtime is initialized.
*
* @param String name of properties file
* @throws Exception
*/
public synchronized static void setProperties(String propertiesFileName)
throws Exception
{
/*
* Set the default properties because client apps are
* using the:
*
* 1) Runtime.setProperties();
* 2) Runtime.setProperty() | Runtime.setSourceProperty()
* 3) Runtime.init();
*
* Sequence and the default props have to be present
* in order for 2) to work.
*/
setDefaultProperties();
Properties p = new Properties();
/*
* if we were passed properties, try loading propertiesFile as a
* straight file first, if that fails, then try and use the classpath
*/
if (propertiesFileName != null && !propertiesFileName.equals(""))
{
File file = new File(propertiesFileName);
try
{
if( file.exists() )
{
FileInputStream is = new FileInputStream( file );
p.load(is);
}
else
{
info ("Override Properties : " + file.getPath() +
" not found. Looking in classpath.");
/*
* lets try the classpath
*/
ClassLoader classLoader = Runtime.class.getClassLoader();
InputStream inputStream = classLoader
.getResourceAsStream( propertiesFileName );
if (inputStream!= null)
{
p.load(inputStream);
}
else
{
info ("Override Properties : " + propertiesFileName +
" not found in classpath.");
}
}
}
catch (Exception ex)
{
error("Exception finding properties " +
propertiesFileName + " : " + ex);
}
}
overridingProperties = p;
}
/**
* Initializes the Velocity Runtime with properties file.
* The properties file may be in the file system proper,
* or the properties file may be in the classpath.
*/
public static void setDefaultProperties()
{
ClassLoader classLoader = Runtime.class.getClassLoader();
try
{
InputStream inputStream = classLoader
.getResourceAsStream( DEFAULT_RUNTIME_PROPERTIES );
configuration.setPropertiesInputStream( inputStream );
info ("Default Properties File: " +
new File(DEFAULT_RUNTIME_PROPERTIES).getPath());
}
catch (IOException ioe)
{
System.err.println("Cannot get Velocity Runtime default properties!");
}
}
/**
* Allows an external system to set a property in
* the Velocity Runtime.
*
* @param String property key
* @param String property value
*/
public static void setProperty(String key, String value)
{
if (overridingProperties == null)
{
overridingProperties = new Properties();
}
overridingProperties.setProperty( key, value );
}
/**
* Initialize Velocity properties, if the default
* properties have not been laid down first then
* do so. Then proceed to process any overriding
* properties. Laying down the default properties
* gives a much greater chance of having a
* working system.
*/
private static void initializeProperties()
{
/*
* Always lay down the default properties first as
* to provide a solid base.
*/
if (configuration.isInitialized() == false)
{
setDefaultProperties();
}
if( overridingProperties != null)
{
/* Override each default property specified */
for (Enumeration e = overridingProperties.keys(); e.hasMoreElements() ; )
{
String s = (String) e.nextElement();
configuration.setOverridingProperty( s, overridingProperties.getProperty(s) );
info (" ** Property Override : " + s + " = " +
overridingProperties.getProperty(s));
}
}
}
/**
* Initialize the Velocity logging system.
*
* @throws Exception
*/
private static void initializeLogger() throws Exception
{
/*
* Grab the log file entry from the velocity
* properties file.
*/
String logFile = configuration.getString(RUNTIME_LOG);
/*
* Initialize the logger. We will eventually move all
* logging into the logging manager.
*/
logger = LogManager.createLogger(logFile);
if ( !pendingMessages.isEmpty())
{
/*
* iterate and log each individual message...
*/
for( Enumeration e = pendingMessages.elements(); e.hasMoreElements(); )
{
logger.info( (String) e.nextElement());
}
}
Runtime.info("Log file being used is: " + new File(logFile).getAbsolutePath());
}
/**
* This methods initializes all the directives
* that are used by the Velocity Runtime. The
* directives to be initialized are listed in
* the RUNTIME_DEFAULT_DIRECTIVES properties
* file.
*
* @throws Exception
*/
private static void initializeDirectives() throws Exception
{
/*
* Initialize the runtime directive table.
* This will be used for creating parsers.
*/
runtimeDirectives = new Hashtable();
Properties directiveProperties = new Properties();
/*
* Grab the properties file with the list of directives
* that we should initialize.
*/
ClassLoader classLoader = Runtime.class.getClassLoader();
InputStream inputStream = classLoader
.getResourceAsStream(DEFAULT_RUNTIME_DIRECTIVES);
if (inputStream == null)
throw new Exception("Error loading directive.properties! " +
"Something is very wrong if these properties " +
"aren't being located. Either your Velocity " +
"distribution is incomplete or your Velocity " +
"jar file is corrupted!");
directiveProperties.load(inputStream);
/*
* Grab all the values of the properties. These
* are all class names for example:
*
* org.apache.velocity.runtime.directive.Foreach
*/
Enumeration directiveClasses = directiveProperties.elements();
while (directiveClasses.hasMoreElements())
{
String directiveClass = (String) directiveClasses.nextElement();
try
{
/*
* Attempt to instantiate the directive class. This
* should usually happen without error because the
* properties file that lists the directives is
* not visible. It's in a package that isn't
* readily accessible.
*/
Class clazz = Class.forName(directiveClass);
Directive directive = (Directive) clazz.newInstance();
runtimeDirectives.put(directive.getName(), directive);
Runtime.info("Loaded Pluggable Directive: "
+ directiveClass);
}
catch (Exception e)
{
Runtime.error("Error Loading Pluggable Directive: "
+ directiveClass);
}
}
}
/**
* Allow clients of Velocity to set a template stream
* source property before the template source streams
* are initialized. This would for example allow clients
* to set the template path that would be used by the
* file template stream source. Right now these properties
* have to be set before the template stream source is
* initialized. Maybe we should allow these properties
* to be changed on the fly.
*
* @param String resource loader property key
* @param String resource loader property value
*/
public static void setSourceProperty(String key, String value)
{
info (" ** !!! Resource Loader Property Override : " + key + " = " + value);
ResourceManager.setSourceProperty(key, value);
}
/**
* Initializes the Velocity parser pool.
* This still needs to be implemented.
*/
private static void initializeParserPool()
{
parserPool = new SimplePool(NUMBER_OF_PARSERS);
for (int i=0;i<NUMBER_OF_PARSERS ;i++ )
{
parserPool.put (createNewParser());
}
Runtime.info ("Created: " + NUMBER_OF_PARSERS + " parsers.");
}
/**
* Returns a JavaCC generated Parser.
*
* @return Parser javacc generated parser
*/
public static Parser createNewParser()
{
Parser parser = new Parser();
parser.setDirectives(runtimeDirectives);
return parser;
}
/**
* Parse the input stream and return the root of
* AST node structure.
*
* @param InputStream inputstream retrieved by a resource loader
* @param String name of the template being parsed
*/
public static SimpleNode parse(InputStream inputStream, String templateName )
throws ParseException
{
SimpleNode ast = null;
Parser parser = (Parser) parserPool.get();
if (parser != null)
{
try
{
ast = parser.parse(inputStream, templateName);
}
finally
{
parserPool.put(parser);
}
}
else
{
error("Runtime : ran out of parsers!");
}
return ast;
}
/**
* Initialize the global cache use by the Velocity
* runtime. Cached templates will be stored here,
* as well as cached content pulled in by the #include
* directive. Who knows what else we'll find to
* cache.
*/
private static void initializeGlobalCache()
{
globalCache = new Hashtable();
}
/**
* Returns a <code>Template</code> from the resource manager
*
* @param name The file name of the desired template.
* @return The template.
* @throws ResourceNotFoundException if template not found
* from any available source.
* @throws ParseErrorException if template cannot be parsed due
* to syntax (or other) error.
* @throws Exception if an error occurs in template initialization
*/
public static Template getTemplate(String name)
throws ResourceNotFoundException, ParseErrorException, Exception
{
return (Template) ResourceManager
.getResource(name,ResourceManager.RESOURCE_TEMPLATE);
}
/**
* Returns a static content resource from the
* resource manager.
*
* @param name Name of content resource to get
* @return parsed ContentResource object ready for use
* @throws ResourceNotFoundException if template not found
* from any available source.
*/
public static ContentResource getContent(String name)
throws ResourceNotFoundException, ParseErrorException, Exception
{
return (ContentResource) ResourceManager
.getResource(name,ResourceManager.RESOURCE_CONTENT);
}
/**
* Handle logging.
*
* @param String message to log
*/
private static void log(String message)
{
if (logger != null)
{
logger.info(message);
}
else
{
pendingMessages.addElement(message);
}
}
/**
* Added this to check and make sure that the configuration
* is initialized before trying to get properties from it.
* This occurs when there are errors during initialization
* and the default properties have yet to be layed down.
*/
private static boolean showStackTrace()
{
if (configuration.isInitialized())
{
return getBoolean(RUNTIME_LOG_WARN_STACKTRACE, false);
}
else
{
return false;
}
}
/**
* Log a warning message.
*
* @param Object message to log
*/
public static void warn(Object message)
{
String out = null;
if ( showStackTrace() &&
(message instanceof Throwable || message instanceof Exception) )
{
out = StringUtils.stackTrace((Throwable)message);
}
else
{
out = message.toString();
}
log(WARN + out);
}
/**
* Log an info message.
*
* @param Object message to log
*/
public static void info(Object message)
{
String out = null;
if ( showStackTrace() &&
( message instanceof Throwable || message instanceof Exception) )
{
out = StringUtils.stackTrace((Throwable)message);
}
else
{
out = message.toString();
}
log(INFO + out);
}
/**
* Log an error message.
*
* @param Object message to log
*/
public static void error(Object message)
{
String out = null;
if ( showStackTrace() &&
( message instanceof Throwable || message instanceof Exception ) )
{
out = StringUtils.stackTrace((Throwable)message);
}
else
{
out = message.toString();
}
log(ERROR + out);
}
/**
* Log a debug message.
*
* @param Object message to log
*/
public static void debug(Object message)
{
if (DEBUG_ON)
{
log(DEBUG + message.toString());
}
}
/**
* String property accessor method with default to hide the
* configuration implementation.
*
* @param String key property key
* @param String defaultValue default value to return if key not
* found in resource manager.
* @return String value of key or default
*/
public static String getString( String key, String defaultValue)
{
return configuration.getString(key, defaultValue);
}
/**
* Returns the appropriate VelocimacroProxy object if strVMname
* is a valid current Velocimacro.
*
* @param String vmName Name of velocimacro requested
* @return String VelocimacroProxy
*/
public static Directive getVelocimacro( String vmName, String templateName )
{
return vmFactory.getVelocimacro( vmName, templateName );
}
public static boolean addVelocimacro( String name,
String macro,
String argArray[],
String sourceTemplate )
{
return vmFactory.addVelocimacro( name, macro, argArray, sourceTemplate );
}
/**
* Checks to see if a VM exists
*
* @param name Name of velocimacro
* @return boolean True if VM by that name exists, false if not
*/
public static boolean isVelocimacro( String vmName, String templateName )
{
return vmFactory.isVelocimacro( vmName, templateName );
}
/**
* tells the vmFactory to dump the specified namespace. This is to support
* clearing the VM list when in inline-VM-local-scope mode
*/
public static boolean dumpVMNamespace( String namespace )
{
return vmFactory.dumpVMNamespace( namespace );
}
/**
* String property accessor method to hide the configuration implementation
* @param key property key
* @return value of key or null
*/
public static String getString(String key)
{
return configuration.getString( key );
}
/**
* Int property accessor method to hide the configuration implementation.
*
* @param String key property key
* @return int value
*/
public static int getInt( String key )
{
return configuration.getInt( key );
}
/**
* Int property accessor method to hide the configuration implementation.
*
* @param key property key
* @param int default value
* @return int value
*/
public static int getInt( String key, int defaultValue )
{
return configuration.getInt( key, defaultValue );
}
/**
* Boolean property accessor method to hide the configuration implementation.
*
* @param String key property key
* @param boolean default default value if property not found
* @return boolean value of key or default value
*/
public static boolean getBoolean( String key, boolean def )
{
return configuration.getBoolean( key, def );
}
/**
* Return the velocity runtime configuration object.
*
* @return Configuration configuration object which houses
* the velocity runtime properties.
*/
public static Configuration getConfiguration()
{
return configuration;
}
}
|
package org.cojen.tupl;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.lang.ref.ReferenceQueue;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.cojen.tupl.util.Latch;
/**
*
*
* @author Brian S O'Neill
*/
final class Checkpointer implements Runnable {
private static final int STATE_INIT = 0, STATE_RUNNING = 1, STATE_CLOSED = 2;
private final AtomicInteger mSuspendCount;
private final ReferenceQueue<AbstractDatabase> mRefQueue;
private final WeakReference<AbstractDatabase> mDatabaseRef;
private final long mRateNanos;
private final long mSizeThreshold;
private final long mDelayThresholdNanos;
private volatile Thread mThread;
private volatile int mState;
private Thread mShutdownHook;
private List<ShutdownHook> mToShutdown;
// Is null when extra checkpoint threads aren't enabled.
private final ThreadPoolExecutor mExtraExecutor;
/**
* @param extraLimit maximum number of extra checkpoint threads to use
*/
Checkpointer(AbstractDatabase db, DatabaseConfig config, int extraLimit) {
mSuspendCount = new AtomicInteger();
mRateNanos = config.mCheckpointRateNanos;
mSizeThreshold = config.mCheckpointSizeThreshold;
mDelayThresholdNanos = config.mCheckpointDelayThresholdNanos;
if (mRateNanos < 0) {
mRefQueue = new ReferenceQueue<>();
mDatabaseRef = new WeakReference<>(db, mRefQueue);
} else {
mRefQueue = null;
mDatabaseRef = new WeakReference<>(db);
}
ThreadPoolExecutor extraExecutor;
{
int max = config.mMaxCheckpointThreads;
if (max < 0) {
max = (-max * Runtime.getRuntime().availableProcessors());
}
max = Math.min(max, extraLimit) - 1;
if (max <= 0) {
extraExecutor = null;
} else {
long timeoutNanos = Math.max
(config.mCheckpointRateNanos, config.mCheckpointDelayThresholdNanos);
if (timeoutNanos < 0) {
// One minute default.
timeoutNanos = TimeUnit.MINUTES.toNanos(1);
}
// Add one more second, with wraparound check.
timeoutNanos += TimeUnit.SECONDS.toNanos(1);
if (timeoutNanos < 0) {
timeoutNanos = Long.MAX_VALUE;
}
extraExecutor = new ThreadPoolExecutor
(max, max, timeoutNanos, TimeUnit.NANOSECONDS,
new LinkedBlockingQueue<>(), Checkpointer::newThread);
extraExecutor.allowCoreThreadTimeOut(true);
}
}
mExtraExecutor = extraExecutor;
}
/**
* @param initialCheckpoint true to perform an initial checkpoint in the new thread
*/
void start(boolean initialCheckpoint) {
if (!initialCheckpoint) {
mState = STATE_RUNNING;
}
mThread = newThread(this);
mThread.start();
}
private static Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setDaemon(true);
t.setName("Checkpointer-" + Long.toUnsignedString(t.getId()));
return t;
}
@Override
public void run() {
try {
if (mState == STATE_INIT) {
// Start with an initial forced checkpoint.
AbstractDatabase db = mDatabaseRef.get();
if (db != null) {
db.checkpoint();
}
mState = STATE_RUNNING;
}
if (mRefQueue != null) {
// When the checkpoint rate is negative (infinite delay), this thread is
// suspended until the database isn't referenced anymore, or until the database
// is explicitly closed.
mRefQueue.remove();
close(null);
return;
}
long lastDurationNanos = 0;
while (true) {
long delayMillis = (mRateNanos - lastDurationNanos) / 1000000L;
if (delayMillis > 0) {
Thread.sleep(delayMillis);
}
AbstractDatabase db = mDatabaseRef.get();
if (db == null) {
close(null);
return;
}
if (mSuspendCount.get() != 0) {
// Don't actually suspend the thread, allowing for weak reference checks.
lastDurationNanos = 0;
} else try {
long startNanos = System.nanoTime();
db.checkpoint(false, mSizeThreshold, mDelayThresholdNanos);
long endNanos = System.nanoTime();
lastDurationNanos = endNanos - startNanos;
} catch (DatabaseException e) {
EventListener listener = db.eventListener();
if (listener != null) {
listener.notify(EventType.CHECKPOINT_FAILED, "Checkpoint failed: %1$s", e);
}
if (!e.isRecoverable()) {
throw e;
}
lastDurationNanos = 0;
}
}
} catch (Throwable e) {
if (mState != STATE_CLOSED) {
AbstractDatabase db = mDatabaseRef.get();
if (db != null) {
Utils.closeQuietly(db, e);
}
}
close(e);
}
}
/**
* Register to close the given object on shutdown or when the Database is
* no longer referenced. The Shutdown object must not maintain a strong
* reference to the Database.
*
* @param obj ignored if null
* @return false if immediately shutdown
*/
boolean register(ShutdownHook obj) {
if (obj == null) {
return false;
}
doRegister: if (mState != STATE_CLOSED) {
synchronized (this) {
if (mState == STATE_CLOSED) {
break doRegister;
}
if (mShutdownHook == null) {
Thread hook = new Thread(() -> Checkpointer.this.close(null));
try {
Runtime.getRuntime().addShutdownHook(hook);
mShutdownHook = hook;
} catch (IllegalStateException e) {
break doRegister;
}
}
if (mToShutdown == null) {
mToShutdown = new ArrayList<>(2);
}
mToShutdown.add(obj);
return true;
}
}
obj.shutdown();
return false;
}
void suspend() {
suspend(+1);
}
void resume() {
suspend(-1);
}
private void suspend(int amt) {
while (true) {
int count = mSuspendCount.get() + amt;
if (count < 0) {
// Overflowed or too many resumes.
throw new IllegalStateException();
}
if (mSuspendCount.compareAndSet(count - amt, count)) {
break;
}
}
}
/**
* Expected to only be implemented by the NodeContext class.
*/
static interface DirtySet {
/**
* Flush all nodes matching the given state. Only one flush at a time is allowed.
*
* @param dirtyState the old dirty state to match on; CACHED_DIRTY_0 or CACHED_DIRTY_1
*/
void flushDirty(int dirtyState) throws IOException;
}
void flushDirty(DirtySet[] dirtySets, int dirtyState) throws IOException {
if (mExtraExecutor == null) {
for (DirtySet set : dirtySets) {
set.flushDirty(dirtyState);
}
return;
}
final class Countdown extends Latch {
volatile Throwable mException;
Countdown(int count) {
super(count);
}
void failed(Throwable ex) {
if (mException == null) {
// Compare-and-set is probably overkill here.
mException = ex;
}
releaseShared();
}
}
final Countdown cd = new Countdown(dirtySets.length);
for (DirtySet set : dirtySets) {
mExtraExecutor.execute(() -> {
try {
set.flushDirty(dirtyState);
} catch (Throwable e) {
cd.failed(e);
return;
}
cd.releaseShared();
});
}
Runnable task;
while ((task = mExtraExecutor.getQueue().poll()) != null) {
task.run();
}
cd.acquireExclusive();
Throwable ex = cd.mException;
if (ex != null) {
Utils.rethrow(ex);
}
}
boolean isClosed() {
return mState == STATE_CLOSED;
}
void close(Throwable cause) {
mState = STATE_CLOSED;
mDatabaseRef.enqueue();
mDatabaseRef.clear();
List<ShutdownHook> toShutdown;
synchronized (this) {
if (mShutdownHook != null) {
try {
Runtime.getRuntime().removeShutdownHook(mShutdownHook);
} catch (Throwable e) {
}
mShutdownHook = null;
}
// Only run shutdown hooks if cleanly closing, to avoid deadlocks.
if (mToShutdown == null || cause != null) {
toShutdown = null;
} else {
toShutdown = new ArrayList<>(mToShutdown);
}
mToShutdown = null;
}
if (toShutdown != null) {
for (ShutdownHook obj : toShutdown) {
obj.shutdown();
}
}
}
/**
* Interrupt all running threads, after calling close. Returns a thread to join, unless
* checkpointer was never started.
*/
Thread interrupt() {
if (mExtraExecutor != null) {
mExtraExecutor.shutdownNow();
}
Thread t = mThread;
if (t != null) {
mThread = null;
t.interrupt();
}
return t;
}
}
|
package com.yahoo.search.rendering;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.TreeNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.yahoo.container.logging.TraceRenderer;
import com.yahoo.data.JsonProducer;
import com.yahoo.data.access.Inspectable;
import com.yahoo.data.access.Inspector;
import com.yahoo.data.access.Type;
import com.yahoo.data.access.simple.JsonRender;
import com.yahoo.data.access.simple.Value;
import com.yahoo.document.datatypes.FieldValue;
import com.yahoo.document.datatypes.StringFieldValue;
import com.yahoo.document.datatypes.TensorFieldValue;
import com.yahoo.document.json.JsonWriter;
import com.yahoo.lang.MutableBoolean;
import com.yahoo.processing.Response;
import com.yahoo.processing.execution.Execution.Trace;
import com.yahoo.processing.rendering.AsynchronousSectionedRenderer;
import com.yahoo.processing.request.CompoundName;
import com.yahoo.processing.response.Data;
import com.yahoo.processing.response.DataList;
import com.yahoo.search.Query;
import com.yahoo.search.Result;
import com.yahoo.search.grouping.Continuation;
import com.yahoo.search.grouping.result.AbstractList;
import com.yahoo.search.grouping.result.BucketGroupId;
import com.yahoo.search.grouping.result.Group;
import com.yahoo.search.grouping.result.GroupId;
import com.yahoo.search.grouping.result.RawBucketId;
import com.yahoo.search.grouping.result.RawId;
import com.yahoo.search.grouping.result.RootGroup;
import com.yahoo.search.grouping.result.ValueGroupId;
import com.yahoo.search.result.Coverage;
import com.yahoo.search.result.DefaultErrorHit;
import com.yahoo.search.result.ErrorHit;
import com.yahoo.search.result.ErrorMessage;
import com.yahoo.search.result.FeatureData;
import com.yahoo.search.result.Hit;
import com.yahoo.search.result.HitGroup;
import com.yahoo.search.result.NanNumber;
import com.yahoo.tensor.Tensor;
import com.yahoo.tensor.serialization.JsonFormat;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UncheckedIOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.Deque;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.function.LongSupplier;
import static com.fasterxml.jackson.databind.SerializationFeature.FLUSH_AFTER_WRITE_VALUE;
/**
* JSON renderer for search results.
*
* @author Steinar Knutsen
* @author bratseth
*/
// NOTE: The JSON format is a public API. If new elements are added be sure to update the reference doc.
public class JsonRenderer extends AsynchronousSectionedRenderer<Result> {
private static final CompoundName WRAP_DEEP_MAPS = new CompoundName("renderer.json.jsonMaps");
private static final CompoundName WRAP_MAPS_ALL = new CompoundName("renderer.json.jsonMapsAll");
private static final CompoundName WRAP_WSETS = new CompoundName("renderer.json.jsonWsets");
private static final CompoundName WRAP_WSETS_ALL = new CompoundName("renderer.json.jsonWsetsAll");
private static final CompoundName DEBUG_RENDERING_KEY = new CompoundName("renderer.json.debug");
private static final CompoundName JSON_CALLBACK = new CompoundName("jsoncallback");
private static final CompoundName TENSOR_FORMAT = new CompoundName("format.tensors");
// if this must be optimized, simply use com.fasterxml.jackson.core.SerializableString
private static final String BUCKET_LIMITS = "limits";
private static final String BUCKET_TO = "to";
private static final String BUCKET_FROM = "from";
private static final String CHILDREN = "children";
private static final String CONTINUATION = "continuation";
private static final String COVERAGE = "coverage";
private static final String COVERAGE_COVERAGE = "coverage";
private static final String COVERAGE_DOCUMENTS = "documents";
private static final String COVERAGE_DEGRADE = "degraded";
private static final String COVERAGE_DEGRADE_MATCHPHASE = "match-phase";
private static final String COVERAGE_DEGRADE_TIMEOUT = "timeout";
private static final String COVERAGE_DEGRADE_ADAPTIVE_TIMEOUT = "adaptive-timeout";
private static final String COVERAGE_DEGRADED_NON_IDEAL_STATE = "non-ideal-state";
private static final String COVERAGE_FULL = "full";
private static final String COVERAGE_NODES = "nodes";
private static final String COVERAGE_RESULTS = "results";
private static final String COVERAGE_RESULTS_FULL = "resultsFull";
private static final String ERRORS = "errors";
private static final String ERROR_CODE = "code";
private static final String ERROR_MESSAGE = "message";
private static final String ERROR_SOURCE = "source";
private static final String ERROR_STACK_TRACE = "stackTrace";
private static final String ERROR_SUMMARY = "summary";
private static final String FIELDS = "fields";
private static final String ID = "id";
private static final String LABEL = "label";
private static final String RELEVANCE = "relevance";
private static final String ROOT = "root";
private static final String SOURCE = "source";
private static final String TOTAL_COUNT = "totalCount";
private static final String TIMING = "timing";
private static final String QUERY_TIME = "querytime";
private static final String SUMMARY_FETCH_TIME = "summaryfetchtime";
private static final String SEARCH_TIME = "searchtime";
private static final String TYPES = "types";
private static final String GROUPING_VALUE = "value";
private static final String VESPA_HIDDEN_FIELD_PREFIX = "$";
private final JsonFactory generatorFactory;
private JsonGenerator generator;
private FieldConsumer fieldConsumer;
private Deque<Integer> renderedChildren;
static class FieldConsumerSettings {
boolean debugRendering = false;
boolean jsonDeepMaps = false;
boolean jsonWsets = false;
boolean jsonMapsAll = false;
boolean jsonWsetsAll = false;
boolean tensorShortForm = false;
boolean convertDeep() { return (jsonDeepMaps || jsonWsets); }
}
private final FieldConsumerSettings fieldConsumerSettings = new FieldConsumerSettings();
private LongSupplier timeSource;
private OutputStream stream;
public JsonRenderer() {
this(null);
}
/**
* Creates a json renderer using a custom executor.
* Using a custom executor is useful for tests to avoid creating new threads for each renderer registry.
*/
public JsonRenderer(Executor executor) {
super(executor);
generatorFactory = new JsonFactory();
generatorFactory.setCodec(createJsonCodec());
}
/**
* Create the codec used for rendering instances of {@link TreeNode}. This
* method will be invoked when creating the first renderer instance, but not
* for each fresh clone used by individual results.
*
* @return an object mapper for the internal JsonFactory
*/
protected static ObjectMapper createJsonCodec() {
return new ObjectMapper().disable(FLUSH_AFTER_WRITE_VALUE);
}
@Override
public void init() {
super.init();
fieldConsumerSettings.debugRendering = false;
fieldConsumerSettings.jsonDeepMaps = false;
fieldConsumerSettings.jsonWsets = false;
fieldConsumerSettings.jsonMapsAll = false;
fieldConsumerSettings.jsonWsetsAll = false;
fieldConsumerSettings.tensorShortForm = false;
setGenerator(null, fieldConsumerSettings);
renderedChildren = null;
timeSource = System::currentTimeMillis;
stream = null;
}
@Override
public void beginResponse(OutputStream stream) throws IOException {
beginJsonCallback(stream);
getSettings(getResult().getQuery());
setGenerator(generatorFactory.createGenerator(stream, JsonEncoding.UTF8), fieldConsumerSettings);
renderedChildren = new ArrayDeque<>();
generator.writeStartObject();
renderTrace(getExecution().trace());
renderTiming();
generator.writeFieldName(ROOT);
}
private void renderTiming() throws IOException {
if (!getResult().getQuery().getPresentation().getTiming()) return;
double milli = .001d;
long now = timeSource.getAsLong();
long searchTime = now - getResult().getElapsedTime().first();
double searchSeconds = searchTime * milli;
generator.writeObjectFieldStart(TIMING);
if (getResult().getElapsedTime().firstFill() != 0L) {
long queryTime = getResult().getElapsedTime().weightedSearchTime();
long summaryFetchTime = getResult().getElapsedTime().weightedFillTime();
double querySeconds = queryTime * milli;
double summarySeconds = summaryFetchTime * milli;
generator.writeNumberField(QUERY_TIME, querySeconds);
generator.writeNumberField(SUMMARY_FETCH_TIME, summarySeconds);
}
generator.writeNumberField(SEARCH_TIME, searchSeconds);
generator.writeEndObject();
}
private void getSettings(Query q) {
if (q == null) {
fieldConsumerSettings.debugRendering = false;
fieldConsumerSettings.jsonDeepMaps = false;
fieldConsumerSettings.jsonWsets = false;
fieldConsumerSettings.jsonMapsAll = false;
fieldConsumerSettings.jsonWsetsAll = false;
fieldConsumerSettings.tensorShortForm = false;
return;
}
var props = q.properties();
fieldConsumerSettings.debugRendering = props.getBoolean(DEBUG_RENDERING_KEY, false);
fieldConsumerSettings.jsonDeepMaps = props.getBoolean(WRAP_DEEP_MAPS, false);
fieldConsumerSettings.jsonWsets = props.getBoolean(WRAP_WSETS, false);
fieldConsumerSettings.jsonMapsAll = props.getBoolean(WRAP_MAPS_ALL, false);
fieldConsumerSettings.jsonWsetsAll = props.getBoolean(WRAP_WSETS_ALL, false);
fieldConsumerSettings.tensorShortForm = (props.get(TENSOR_FORMAT) != null &&
props.getString(TENSOR_FORMAT).equalsIgnoreCase("short"));
}
protected void renderTrace(Trace trace) throws IOException {
if (!trace.traceNode().children().iterator().hasNext()) return;
if (getResult().getQuery().getTraceLevel() == 0) return;
try {
long basetime = trace.traceNode().timestamp();
if (basetime == 0L)
basetime = getResult().getElapsedTime().first();
trace.accept(new TraceRenderer(generator, fieldConsumer, basetime));
} catch (TraceRenderer.TraceRenderWrapper e) {
throw new IOException(e);
}
}
@Override
public void beginList(DataList<?> list) throws IOException {
Preconditions.checkArgument(list instanceof HitGroup,
"Expected subclass of com.yahoo.search.result.HitGroup, got %s.",
list.getClass());
moreChildren();
renderHitGroupHead((HitGroup) list);
}
protected void moreChildren() throws IOException {
if (!renderedChildren.isEmpty())
childrenArray();
renderedChildren.push(0);
}
private void childrenArray() throws IOException {
if (renderedChildren.peek() == 0)
generator.writeArrayFieldStart(CHILDREN);
renderedChildren.push(renderedChildren.pop() + 1);
}
private void lessChildren() throws IOException {
int lastRenderedChildren = renderedChildren.pop();
if (lastRenderedChildren > 0) {
generator.writeEndArray();
}
}
protected void renderHitGroupHead(HitGroup hitGroup) throws IOException {
generator.writeStartObject();
renderHitContents(hitGroup);
if (getRecursionLevel() == 1)
renderCoverage();
ErrorHit errorHit = hitGroup.getErrorHit();
if (errorHit != null)
renderErrors(errorHit.errors());
// the framework will invoke begin methods as needed from here
}
protected void renderErrors(Set<ErrorMessage> errors) throws IOException {
if (errors.isEmpty()) return;
generator.writeArrayFieldStart(ERRORS);
for (ErrorMessage e : errors) {
String summary = e.getMessage();
String source = e.getSource();
Throwable cause = e.getCause();
String message = e.getDetailedMessage();
generator.writeStartObject();
generator.writeNumberField(ERROR_CODE, e.getCode());
generator.writeStringField(ERROR_SUMMARY, summary);
if (source != null) {
generator.writeStringField(ERROR_SOURCE, source);
}
if (message != null) {
generator.writeStringField(ERROR_MESSAGE, message);
}
if (cause != null && cause.getStackTrace().length > 0) {
StringWriter s = new StringWriter();
PrintWriter p = new PrintWriter(s);
cause.printStackTrace(p);
p.close();
generator.writeStringField(ERROR_STACK_TRACE, s.toString());
}
generator.writeEndObject();
}
generator.writeEndArray();
}
protected void renderCoverage() throws IOException {
Coverage c = getResult().getCoverage(false);
if (c == null) return;
generator.writeObjectFieldStart(COVERAGE);
generator.writeNumberField(COVERAGE_COVERAGE, c.getResultPercentage());
generator.writeNumberField(COVERAGE_DOCUMENTS, c.getDocs());
if (c.isDegraded()) {
generator.writeObjectFieldStart(COVERAGE_DEGRADE);
generator.writeBooleanField(COVERAGE_DEGRADE_MATCHPHASE, c.isDegradedByMatchPhase());
generator.writeBooleanField(COVERAGE_DEGRADE_TIMEOUT, c.isDegradedByTimeout());
generator.writeBooleanField(COVERAGE_DEGRADE_ADAPTIVE_TIMEOUT, c.isDegradedByAdapativeTimeout());
generator.writeBooleanField(COVERAGE_DEGRADED_NON_IDEAL_STATE, c.isDegradedByNonIdealState());
generator.writeEndObject();
}
generator.writeBooleanField(COVERAGE_FULL, c.getFull());
generator.writeNumberField(COVERAGE_NODES, c.getNodes());
generator.writeNumberField(COVERAGE_RESULTS, c.getResultSets());
generator.writeNumberField(COVERAGE_RESULTS_FULL, c.getFullResultSets());
generator.writeEndObject();
}
protected void renderHit(Hit hit) throws IOException {
if (!shouldRender(hit)) return;
childrenArray();
generator.writeStartObject();
renderHitContents(hit);
generator.writeEndObject();
}
protected boolean shouldRender(Hit hit) {
return ! (hit instanceof DefaultErrorHit);
}
protected void renderHitContents(Hit hit) throws IOException {
String id = hit.getDisplayId();
if (id != null)
generator.writeStringField(ID, id);
generator.writeNumberField(RELEVANCE, hit.getRelevance().getScore());
if (hit.types().size() > 0) {
generator.writeArrayFieldStart(TYPES);
for (String t : hit.types()) {
generator.writeString(t);
}
generator.writeEndArray();
}
String source = hit.getSource();
if (source != null)
generator.writeStringField(SOURCE, hit.getSource());
renderSpecialCasesForGrouping(hit);
renderAllFields(hit);
}
protected void renderAllFields(Hit hit) throws IOException {
fieldConsumer.startHitFields();
renderTotalHitCount(hit);
renderStandardFields(hit);
fieldConsumer.endHitFields();
}
private void renderStandardFields(Hit hit) {
hit.forEachFieldAsRaw(fieldConsumer);
}
private void renderSpecialCasesForGrouping(Hit hit) throws IOException {
if (hit instanceof AbstractList) {
renderGroupingListSyntheticFields((AbstractList) hit);
} else if (hit instanceof Group) {
renderGroupingGroupSyntheticFields(hit);
}
}
private void renderGroupingGroupSyntheticFields(Hit hit) throws IOException {
renderGroupMetadata(((Group) hit).getGroupId());
if (hit instanceof RootGroup) {
renderContinuations(Collections.singletonMap(
Continuation.THIS_PAGE, ((RootGroup) hit).continuation()));
}
}
private void renderGroupingListSyntheticFields(AbstractList a) throws IOException {
writeGroupingLabel(a);
renderContinuations(a.continuations());
}
private void writeGroupingLabel(AbstractList a) throws IOException {
generator.writeStringField(LABEL, a.getLabel());
}
protected void renderContinuations(Map<String, Continuation> continuations) throws IOException {
if (continuations.isEmpty()) return;
generator.writeObjectFieldStart(CONTINUATION);
for (Map.Entry<String, Continuation> e : continuations.entrySet()) {
generator.writeStringField(e.getKey(), e.getValue().toString());
}
generator.writeEndObject();
}
protected void renderGroupMetadata(GroupId id) throws IOException {
if (!(id instanceof ValueGroupId || id instanceof BucketGroupId)) return;
if (id instanceof ValueGroupId) {
ValueGroupId<?> valueId = (ValueGroupId<?>) id;
generator.writeStringField(GROUPING_VALUE, getIdValue(valueId));
} else {
BucketGroupId<?> bucketId = (BucketGroupId<?>) id;
generator.writeObjectFieldStart(BUCKET_LIMITS);
generator.writeStringField(BUCKET_FROM, getBucketFrom(bucketId));
generator.writeStringField(BUCKET_TO, getBucketTo(bucketId));
generator.writeEndObject();
}
}
private static String getIdValue(ValueGroupId<?> id) {
return (id instanceof RawId ? Arrays.toString(((RawId) id).getValue()) : id.getValue()).toString();
}
private static String getBucketFrom(BucketGroupId<?> id) {
return (id instanceof RawBucketId ? Arrays.toString(((RawBucketId) id).getFrom()) : id.getFrom()).toString();
}
private static String getBucketTo(BucketGroupId<?> id) {
return (id instanceof RawBucketId ? Arrays.toString(((RawBucketId) id).getTo()) : id.getTo()).toString();
}
protected void renderTotalHitCount(Hit hit) throws IOException {
if ( ! (getRecursionLevel() == 1 && hit instanceof HitGroup)) return;
fieldConsumer.ensureFieldsField();
generator.writeNumberField(TOTAL_COUNT, getResult().getTotalHitCount());
// alternative for the above two lines:
// fieldConsumer.accept(TOTAL_COUNT, getResult().getTotalHitCount());
}
@Override
public void data(Data data) throws IOException {
Preconditions.checkArgument(data instanceof Hit,
"Expected subclass of com.yahoo.search.result.Hit, got %s.",
data.getClass());
renderHit((Hit) data);
}
@Override
public void endList(DataList<?> list) throws IOException {
lessChildren();
generator.writeEndObject();
}
@Override
public void endResponse() throws IOException {
generator.close();
endJsonCallback();
}
@Override
public String getEncoding() {
return "utf-8";
}
@Override
public String getMimeType() {
return "application/json";
}
private Result getResult() {
Response r = getResponse();
Preconditions.checkArgument(r instanceof Result,
"JsonRenderer can only render instances of com.yahoo.search.Result, got instance of %s.",
r.getClass());
return (Result) r;
}
/**
* Adds JSONP (Json with padding) support.
*
* Basically, if the JSON renderer receives a query parameter "jsoncallback=...",
* the JSON response will be wrapped in a function call with the name specified
* by the client. This side-steps the same-origin policy, thus supports calling
* Vespa from javascript loaded from a different domain then the Vespa instance.
*/
private void beginJsonCallback(OutputStream stream) throws IOException {
if (shouldRenderJsonCallback()) {
String jsonCallback = getJsonCallback() + "(";
stream.write(jsonCallback.getBytes(StandardCharsets.UTF_8));
this.stream = stream;
}
}
private void endJsonCallback() throws IOException {
if (shouldRenderJsonCallback() && stream != null) {
stream.write(");".getBytes(StandardCharsets.UTF_8));
}
}
private boolean shouldRenderJsonCallback() {
String jsonCallback = getJsonCallback();
return jsonCallback != null && !"".equals(jsonCallback);
}
private String getJsonCallback() {
Result result = getResult();
Query query = result.getQuery();
if (query != null) {
return query.properties().getString(JSON_CALLBACK, null);
}
return null;
}
private void setGenerator(JsonGenerator generator, FieldConsumerSettings settings) {
this.generator = generator;
this.fieldConsumer = generator == null ? null : createFieldConsumer(generator, settings);
}
protected FieldConsumer createFieldConsumer(JsonGenerator generator, boolean debugRendering) {
fieldConsumerSettings.debugRendering = debugRendering;
return createFieldConsumer(generator, fieldConsumerSettings);
}
private FieldConsumer createFieldConsumer(JsonGenerator generator, FieldConsumerSettings settings) {
return new FieldConsumer(generator, settings);
}
/**
* Only for testing. Never to be used in any other context.
*/
void setTimeSource(LongSupplier timeSource) {
this.timeSource = timeSource;
}
/**
* Received callbacks when fields of hits are encountered.
* This instance is reused for all hits of a Result since we are in a single-threaded context
* and want to limit object creation.
*/
public static class FieldConsumer implements Hit.RawUtf8Consumer, TraceRenderer.FieldConsumer {
private final JsonGenerator generator;
private final FieldConsumerSettings settings;
private MutableBoolean hasFieldsField;
public FieldConsumer(JsonGenerator generator, boolean debugRendering) {
this(generator, debugRendering, false);
}
public FieldConsumer(JsonGenerator generator, boolean debugRendering, boolean tensorShortForm) {
this(generator, debugRendering, tensorShortForm, false);
}
public FieldConsumer(JsonGenerator generator, boolean debugRendering, boolean tensorShortForm, boolean jsonDeepMaps) {
this.generator = generator;
this.settings = new FieldConsumerSettings();
this.settings.debugRendering = debugRendering;
this.settings.tensorShortForm = tensorShortForm;
this.settings.jsonDeepMaps = jsonDeepMaps;
}
FieldConsumer(JsonGenerator generator, FieldConsumerSettings settings) {
this.generator = generator;
this.settings = settings;
}
/**
* Call before using this for a hit to track whether we
* have created the "fields" field of the JSON object
*/
void startHitFields() {
this.hasFieldsField = new MutableBoolean(false);
}
/** Call before rendering a field to the generator */
void ensureFieldsField() throws IOException {
if (hasFieldsField.get()) return;
generator.writeObjectFieldStart(FIELDS);
hasFieldsField.set(true);
}
/** Call after all fields in a hit to close the "fields" field of the JSON object */
void endHitFields() throws IOException {
if ( ! hasFieldsField.get()) return;
generator.writeEndObject();
this.hasFieldsField = null;
}
@Override
public void accept(String name, Object value) {
try {
if (shouldRender(name, value)) {
ensureFieldsField();
generator.writeFieldName(name);
renderFieldContents(value);
}
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public void accept(String name, byte[] utf8Data, int offset, int length) {
try {
if (shouldRenderUtf8Value(name, length)) {
ensureFieldsField();
generator.writeFieldName(name);
generator.writeUTF8String(utf8Data, offset, length);
}
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
protected boolean shouldRender(String name, Object value) {
if (settings.debugRendering) return true;
if (name.startsWith(VESPA_HIDDEN_FIELD_PREFIX)) return false;
if (value instanceof CharSequence && ((CharSequence) value).length() == 0) return false;
// StringFieldValue cannot hold a null, so checking length directly is OK:
if (value instanceof StringFieldValue && ((StringFieldValue) value).getString().isEmpty()) return false;
if (value instanceof NanNumber) return false;
return true;
}
protected boolean shouldRenderUtf8Value(String name, int length) {
if (settings.debugRendering) return true;
if (name.startsWith(VESPA_HIDDEN_FIELD_PREFIX)) return false;
if (length == 0) return false;
return true;
}
private Inspector maybeConvertMap(Inspector data) {
var map = new Value.ObjectValue();
for (int i = 0; i < data.entryCount(); i++) {
Inspector obj = data.entry(i);
if (obj.type() != Type.OBJECT || obj.fieldCount() != 2) {
return null;
}
Inspector key = obj.field("key");
Inspector value = obj.field("value");
if (! key.valid()) return null;
if (! value.valid()) return null;
if (key.type() != Type.STRING && !settings.jsonMapsAll) {
return null;
}
if (settings.convertDeep()) {
value = deepMaybeConvert(value);
}
if (key.type() == Type.STRING) {
map.put(key.asString(), value);
} else {
map.put(key.toString(), value);
}
}
return map;
}
private Inspector maybeConvertWset(Inspector data) {
var wset = new Value.ObjectValue();
for (int i = 0; i < data.entryCount(); i++) {
Inspector obj = data.entry(i);
if (obj.type() != Type.OBJECT || obj.fieldCount() != 2) {
return null;
}
Inspector item = obj.field("item");
Inspector weight = obj.field("weight");
if (! item.valid()) return null;
if (! weight.valid()) return null;
// TODO support non-integer weights?
if (weight.type() != Type.LONG) return null;
if (item.type() == Type.STRING) {
wset.put(item.asString(), weight.asLong());
} else if (settings.jsonWsetsAll) {
wset.put(item.toString(), weight.asLong());
} else {
return null;
}
}
return wset;
}
private Inspector convertInsideObject(Inspector data) {
var object = new Value.ObjectValue();
for (var entry : data.fields()) {
object.put(entry.getKey(), deepMaybeConvert(entry.getValue()));
}
return object;
}
private static Inspector wrapAsMap(Inspector data) {
if (data.type() != Type.ARRAY) return null;
if (data.entryCount() == 0) return null;
Value.ObjectValue map = new Value.ObjectValue();
for (int i = 0; i < data.entryCount(); i++) {
Inspector obj = data.entry(i);
if (obj.type() != Type.OBJECT) return null;
if (obj.fieldCount() != 2) return null;
Inspector key = obj.field("key");
Inspector value = obj.field("value");
if (key.type() != Type.STRING) return null;
if (! value.valid()) return null;
map.put(key.asString(), value);
}
return map;
}
private Inspector deepMaybeConvert(Inspector data) {
if (data.type() == Type.ARRAY) {
if (settings.jsonDeepMaps) {
var map = maybeConvertMap(data);
if (map != null) return map;
}
if (settings.jsonWsets) {
var wset = maybeConvertWset(data);
if (wset != null) return wset;
}
}
if (data.type() == Type.OBJECT) {
return convertInsideObject(data);
}
return data;
}
private Inspector convertTopLevelArray(Inspector data) {
if (data.entryCount() > 0) {
var map = maybeConvertMap(data);
if (map != null) return map;
if (settings.jsonWsets) {
var wset = maybeConvertWset(data);
if (wset != null) return wset;
}
if (settings.convertDeep()) {
var array = new Value.ArrayValue();
for (int i = 0; i < data.entryCount(); i++) {
Inspector obj = data.entry(i);
array.add(deepMaybeConvert(obj));
}
return array;
}
}
return data;
}
private Inspector maybeConvertData(Inspector data) throws IOException {
if (data.type() == Type.ARRAY) {
return convertTopLevelArray(data);
}
if (settings.convertDeep() && data.type() == Type.OBJECT) {
return convertInsideObject(data);
}
return data;
}
private void renderInspector(Inspector data) throws IOException {
renderInspectorDirect(maybeConvertData(data));
}
private void renderInspectorDirect(Inspector data) throws IOException {
StringBuilder intermediate = new StringBuilder();
JsonRender.render(data, intermediate, true);
generator.writeRawValue(intermediate.toString());
}
protected void renderFieldContents(Object field) throws IOException {
if (field instanceof Inspectable && ! (field instanceof FeatureData)) {
renderInspector(((Inspectable)field).inspect());
} else {
accept(field);
}
}
@Override
public void accept(Object field) throws IOException {
if (field == null) {
generator.writeNull();
} else if (field instanceof Boolean) {
generator.writeBoolean((Boolean)field);
} else if (field instanceof Number) {
renderNumberField((Number) field);
} else if (field instanceof TreeNode) {
generator.writeTree((TreeNode) field);
} else if (field instanceof Tensor) {
renderTensor(Optional.of((Tensor)field));
} else if (field instanceof FeatureData) {
generator.writeRawValue(((FeatureData)field).toJson(settings.tensorShortForm));
} else if (field instanceof Inspectable) {
renderInspectorDirect(((Inspectable)field).inspect());
} else if (field instanceof JsonProducer) {
generator.writeRawValue(((JsonProducer) field).toJson());
} else if (field instanceof StringFieldValue) {
generator.writeString(((StringFieldValue)field).getString());
} else if (field instanceof TensorFieldValue) {
renderTensor(((TensorFieldValue)field).getTensor());
} else if (field instanceof FieldValue) {
// the null below is the field which has already been written
((FieldValue) field).serialize(null, new JsonWriter(generator));
} else {
generator.writeString(field.toString());
}
}
private void renderNumberField(Number field) throws IOException {
if (field instanceof Integer) {
generator.writeNumber(field.intValue());
} else if (field instanceof Float) {
generator.writeNumber(field.floatValue());
} else if (field instanceof Double) {
generator.writeNumber(field.doubleValue());
} else if (field instanceof Long) {
generator.writeNumber(field.longValue());
} else if (field instanceof Byte || field instanceof Short) {
generator.writeNumber(field.intValue());
} else if (field instanceof BigInteger) {
generator.writeNumber((BigInteger) field);
} else if (field instanceof BigDecimal) {
generator.writeNumber((BigDecimal) field);
} else {
generator.writeNumber(field.doubleValue());
}
}
private void renderTensor(Optional<Tensor> tensor) throws IOException {
if (tensor.isEmpty()) {
generator.writeStartObject();
generator.writeArrayFieldStart("cells");
generator.writeEndArray();
generator.writeEndObject();
return;
}
if (settings.tensorShortForm) {
generator.writeRawValue(new String(JsonFormat.encodeShortForm(tensor.get()), StandardCharsets.UTF_8));
} else {
generator.writeRawValue(new String(JsonFormat.encode(tensor.get()), StandardCharsets.UTF_8));
}
}
}
}
|
package org.helioviewer.jhv;
import java.io.File;
/**
* An enum containing all the directories mapped in a system independent way. If
* a new directory is required, just add it here and it will be created at
* startup.
*
* @author caplins
*
*/
public enum JHVDirectory {
/** The home directory. */
HOME {
private final String path = System.getProperty("user.home");
@Override
public String getPath() {
return this.path + File.separator + "JHelioviewer-SWHV" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The image cache directory. */
CACHE {
@Override
public String getPath() {
return HOME.getPath() + "Cache" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The shared library directory. */
LIBS {
@Override
public String getPath() {
return HOME.getPath() + "Libs" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The shared library directory. */
LIBS_LAST_CONFIG {
@Override
public String getPath() {
return HOME.getPath() + "Libs" + File.separator + "LastConfig" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The JHV state directory. */
STATES {
@Override
public String getPath() {
return HOME.getPath() + "States" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The exports directory (movies, screenshots, meta data). */
EXPORTS {
@Override
public String getPath() {
return HOME.getPath() + "Exports" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The log directory. */
LOGS {
@Override
public String getPath() {
return HOME.getPath() + "Logs" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The log settings directory. */
SETTINGS {
@Override
public String getPath() {
return HOME.getPath() + "Settings" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The remote files directory. */
REMOTEFILES {
@Override
public String getPath() {
return HOME.getPath() + "Downloads" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The plugins directory. */
PLUGINS {
@Override
public String getPath() {
return HOME.getPath() + "Plugins" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The plugins directory. */
PLUGINSCACHE {
@Override
public String getPath() {
return HOME.getPath() + "Plugins" + File.separator + "Cache" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The plugins directory. */
PLUGINS_LAST_CONFIG {
@Override
public String getPath() {
return HOME.getPath() + "Plugins" + File.separator + "LastConfig" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** Directory of automatically loaded GIMP gradient files. */
COLOR_PLUGINS {
@Override
public String getPath() {
return HOME.getPath() + "Colortables" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** temp directory for the Cg-compiler */
TEMP {
@Override
public String getPath() {
return HOME.getPath() + "Temp" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/** The remote files directory. */
VSO_DOWNLOAD {
@Override
public String getPath() {
return HOME.getPath() + "VSOData" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
},
/**
* Resources needed by plugins to operate. These can be jars and could not
* be added in the Plugins directory where they are considered as Plugins.
*/
PLUGIN_RESOURCES {
@Override
public String getPath() {
return HOME.getPath() + "PluginResources" + File.separator;
}
@Override
public File getFile() {
return new File(getPath());
}
};
/** A String representation of the path of the directory. */
abstract public String getPath();
/** A File representation of the path of the directory. */
abstract public File getFile();
};
|
package org.jtrfp.trcl.miss;
import java.awt.Point;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.Camera;
import org.jtrfp.trcl.DisplayModeHandler;
import org.jtrfp.trcl.NAVSystem;
import org.jtrfp.trcl.OverworldSystem;
import org.jtrfp.trcl.RenderableSpacePartitioningGrid;
import org.jtrfp.trcl.SkySystem;
import org.jtrfp.trcl.Tunnel;
import org.jtrfp.trcl.World;
import org.jtrfp.trcl.beh.CollidesWithTerrain;
import org.jtrfp.trcl.beh.CollidesWithTunnelWalls;
import org.jtrfp.trcl.beh.LoopingPositionBehavior;
import org.jtrfp.trcl.beh.MatchDirection;
import org.jtrfp.trcl.beh.MatchPosition;
import org.jtrfp.trcl.beh.SkyCubeCloudModeUpdateBehavior;
import org.jtrfp.trcl.beh.phy.MovesByVelocity;
import org.jtrfp.trcl.core.Features;
import org.jtrfp.trcl.core.Renderer;
import org.jtrfp.trcl.core.ResourceManager;
import org.jtrfp.trcl.core.TR;
import org.jtrfp.trcl.file.AbstractTriplet;
import org.jtrfp.trcl.file.DirectionVector;
import org.jtrfp.trcl.file.LVLFile;
import org.jtrfp.trcl.file.Location3D;
import org.jtrfp.trcl.file.NAVFile.NAVSubObject;
import org.jtrfp.trcl.file.NAVFile.START;
import org.jtrfp.trcl.file.TDFFile;
import org.jtrfp.trcl.game.Game;
import org.jtrfp.trcl.game.TVF3Game;
import org.jtrfp.trcl.miss.LoadingProgressReporter.UpdateHandler;
import org.jtrfp.trcl.miss.NAVObjective.Factory;
import org.jtrfp.trcl.obj.ObjectDirection;
import org.jtrfp.trcl.obj.Player;
import org.jtrfp.trcl.obj.PortalEntrance;
import org.jtrfp.trcl.obj.PortalExit;
import org.jtrfp.trcl.obj.Projectile;
import org.jtrfp.trcl.obj.ProjectileFactory;
import org.jtrfp.trcl.obj.Propelled;
import org.jtrfp.trcl.obj.TunnelEntranceObject;
import org.jtrfp.trcl.obj.WorldObject;
import org.jtrfp.trcl.shell.GameShell;
import org.jtrfp.trcl.snd.GPUResidentMOD;
import org.jtrfp.trcl.snd.MusicPlaybackEvent;
import org.jtrfp.trcl.snd.SoundSystem;
public class Mission {
// PROPERTIES
//public static final String MISSION_MODE = "missionMode";
public static final String SATELLITE_VIEW = "satelliteView";
public static final String CURRENT_NAV_TARGET = "currentNavTarget";
private final TR tr;
private final List<NAVObjective>
navs = new LinkedList<NAVObjective>();
private final LVLFile lvl;
private final HashMap<String, Tunnel>
tunnels = new HashMap<String, Tunnel>();
private final HashMap<Integer, PortalEntrance>
tunnelPortals = new HashMap<Integer, PortalEntrance>();
private double[] playerStartPosition
= new double[3];
private List<NAVSubObject> navSubObjects;
private ObjectDirection playerStartDirection;
private final Game game;
private final String levelName;
private OverworldSystem overworldSystem;
private final Result[] missionEnd = new Result[]{null};
private int groundTargetsDestroyed=0,
airTargetsDestroyed=0,
foliageDestroyed=0;
private int totalNumTunnels;
private final LinkedList<Tunnel>
tunnelsRemaining = new LinkedList<Tunnel>();
private final boolean showIntro;
private volatile MusicPlaybackEvent
bgMusic;
private final Object missionLock = new Object();
private final Map<Integer,TunnelEntranceObject>
tunnelMap = new HashMap<Integer,TunnelEntranceObject>();
private boolean bossFight = false, satelliteView = false;
//private MissionMode missionMode = new Mission.LoadingMode();
private final PropertyChangeSupport pcs = new PropertyChangeSupport(this);
private Tunnel currentTunnel;
private final DisplayModeHandler displayHandler;
public Object [] levelLoadingMode, tunnelMode, overworldMode, gameplayMode, briefingMode, summaryMode, emptyMode= new Object[]{};
private NAVObjective currentNavTarget;
private final RenderableSpacePartitioningGrid tunnelGrid = new RenderableSpacePartitioningGrid();
private final RenderableSpacePartitioningGrid partitioningGrid = new RenderableSpacePartitioningGrid();
private enum LoadingStages {
navs, tunnels, overworld
}// end LoadingStages
//ROOT STATES
public interface MissionState extends Game.GameRunningMode{}
public interface ConstructingState extends MissionState{}
public interface ConstructedState extends MissionState{}
public interface ActiveMissionState extends ConstructedState{}
public interface LoadingState extends ActiveMissionState{}
public interface GameplayState extends ActiveMissionState{}
public interface Briefing extends GameplayState{}
public interface PlanetBrief extends Briefing{}
public interface EnemyBrief extends Briefing{}
public interface MissionSummary extends Briefing{}
public interface PlayerActivity extends GameplayState{}
public interface OverworldState extends PlayerActivity{}
public interface ChamberState extends OverworldState{}
public interface TunnelState extends PlayerActivity{}
public Mission(TR tr, Game game, LVLFile lvl, String levelName, boolean showIntro) {
this.tr = tr;
this.lvl = lvl;
this.game = game;
this.levelName = levelName;
this.showIntro = showIntro;
this.displayHandler = new DisplayModeHandler(this.getPartitioningGrid());
Features.init(this);
tr.setRunState(new ConstructingState(){});
levelLoadingMode = new Object[]{
((TVF3Game)game).levelLoadingScreen,
((TVF3Game)game).upfrontDisplay
};
tr.setRunState(new ConstructedState(){});
}// end Mission
public Result go() {
tr.setRunState(new LoadingState(){});
synchronized(missionLock){
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
tr.getThreadManager().setPaused(true);
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile proj:pf.getProjectiles())
proj.destroy();
System.out.println("Starting GampeplayLevel loading sequence...");
final LoadingProgressReporter rootProgress = LoadingProgressReporter.Impl
.createRoot(new UpdateHandler() {
@Override
public void update(double unitProgress) {
((TVF3Game)game).getLevelLoadingScreen().setLoadingProgress(unitProgress);
}
});
final LoadingProgressReporter[] progressStages = rootProgress
.generateSubReporters(LoadingStages.values().length);
final Renderer renderer = tr.mainRenderer.get();
renderer.getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(false);
renderer.getSkyCube().setSkyCubeGen(GameShell.DEFAULT_GRADIENT);
final Camera camera = renderer.getCamera();
camera.setHeading(Vector3D.PLUS_I);
camera.setTop(Vector3D.PLUS_J);
((TVF3Game)game).levelLoadingMode();
displayHandler.setDisplayMode(levelLoadingMode);
((TVF3Game)game).getUpfrontDisplay().submitPersistentMessage(levelName);
try {
final ResourceManager rm = tr.getResourceManager();
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
final TDFFile tdf = rm.getTDFData(lvl.getTunnelDefinitionFile());
player.setActive(false);
// Abort check
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
overworldSystem = new OverworldSystem(tr,
progressStages[LoadingStages.overworld.ordinal()]);
briefingMode = new Object[]{
((TVF3Game)game).briefingScreen,
overworldSystem
};
gameplayMode = new Object[]{
((TVF3Game)game).navSystem,
((TVF3Game)game).hudSystem,
((TVF3Game)game).upfrontDisplay,
rm.getDebrisSystem(),
rm.getPowerupSystem(),
rm.getProjectileFactories(),
rm.getExplosionFactory(),
rm.getSmokeSystem()
};
overworldMode = new Object[]{
gameplayMode,
overworldSystem
};
tunnelMode = new Object[]{
((TVF3Game)game).navSystem,
((TVF3Game)game).hudSystem,
((TVF3Game)game).upfrontDisplay,
rm.getDebrisSystem(),
rm.getPowerupSystem(),
rm.getProjectileFactories(),
rm.getExplosionFactory(),
rm.getSmokeSystem(),
tunnelGrid
};
summaryMode = new Object[]{
((TVF3Game)game).getBriefingScreen(),
overworldSystem
};
getOverworldSystem().loadLevel(lvl, tdf);
System.out.println("\t...Done.");
// Install NAVs
final NAVSystem navSystem = ((TVF3Game)tr.getGame()).getNavSystem();
navSubObjects = rm.getNAVData(lvl.getNavigationFile())
.getNavObjects();
START s = (START) navSubObjects.get(0);
Location3D l3d = s.getLocationOnMap();
playerStartPosition[0] = TR.legacy2Modern(l3d.getZ());
playerStartPosition[2] = TR.legacy2Modern(l3d.getX());
final double HEIGHT_PADDING = 10000;
playerStartPosition[1] = Math.max(HEIGHT_PADDING + getOverworldSystem().getAltitudeMap().heightAt(
TR.legacy2Modern(l3d.getZ()),
TR.legacy2Modern(l3d.getX())),TR.legacy2Modern(l3d.getY()));
playerStartDirection = new ObjectDirection(s.getRoll(),
s.getPitch(), s.getYaw());
// ////// INITIAL HEADING
player.setPosition(getPlayerStartPosition());
player.setDirection(getPlayerStartDirection());
player.setHeading(player.getHeading().negate());// Kludge to fix
// incorrect heading
///////// STATE
final Propelled propelled = player.probeForBehavior(Propelled.class);
propelled.setPropulsion(propelled.getMinPropulsion());
player.probeForBehavior(CollidesWithTerrain.class).setEnable(true);
installTunnels(tdf,progressStages[LoadingStages.tunnels.ordinal()]);
Factory f = new NAVObjective.Factory(tr, getLevelName());
final LoadingProgressReporter[] navProgress = progressStages[LoadingStages.navs
.ordinal()].generateSubReporters(navSubObjects.size());
for (int i = 0; i < navSubObjects.size(); i++) {
final NAVSubObject obj = navSubObjects.get(i);
f.create(tr, obj, navs);
navProgress[i].complete();
}// end for(navSubObjects)
navSystem.updateNAVState();
player.resetVelocityRotMomentum();
final String startX = System.getProperty("org.jtrfp.trcl.startX");
final String startY = System.getProperty("org.jtrfp.trcl.startY");
final String startZ = System.getProperty("org.jtrfp.trcl.startZ");
final double[] playerPos = player.getPosition();
if (startX != null && startY != null && startZ != null) {
System.out.println("Using user-specified start point");
final int sX = Integer.parseInt(startX);
final int sY = Integer.parseInt(startY);
final int sZ = Integer.parseInt(startZ);
playerPos[0] = sX;
playerPos[1] = sY;
playerPos[2] = sZ;
player.notifyPositionChange();
}// end if(user start point)
System.out.println("Start position set to " + player.getPosition()[0]+" "+player.getPosition()[1]+" "+player.getPosition()[2]);
System.out.println("Setting sun vector");
final AbstractTriplet sunVector = lvl.getSunlightDirectionVector();
tr.getThreadManager().submitToGL(new Callable<Void>() {
@Override
public Void call() throws Exception {
tr.mainRenderer.get().setSunVector(
new Vector3D(sunVector.getX(), sunVector.getY(),
sunVector.getZ()).normalize());
return null;
}
}).get();
System.out.println("\t...Done.");
} catch (Exception e) {
e.printStackTrace();
}
if (System.getProperties().containsKey(
"org.jtrfp.trcl.flow.Mission.skipNavs")) {
try {
final int skips = Integer.parseInt(System
.getProperty("org.jtrfp.trcl.flow.Mission.skipNavs"));
System.out.println("Skipping " + skips + " navs.");
for (int i = 0; i < skips; i++) {
removeNAVObjective(currentNAVObjective());
}// end for(skips)
} catch (NumberFormatException e) {
System.err
.println("Invalid format for property \"org.jtrfp.trcl.flow.Mission.skipNavs\". Must be integer.");
}
}// end if(containsKey)
// Transition to gameplay mode.
// Abort check
synchronized (missionEnd) {
if (missionEnd[0] != null)
return missionEnd[0];
}//end sync(missionEnd)
final SoundSystem ss = Mission.this.tr.soundSystem.get();
MusicPlaybackEvent evt;
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(new GPUResidentMOD(tr, tr
.getResourceManager().getMOD(
lvl.getBackgroundMusicFile())),
true));
synchronized(Mission.this){
if(bgMusic==null){
bgMusic=evt;
bgMusic.play();
}
}//end sync(Mission.this)
((TVF3Game)game).getUpfrontDisplay().removePersistentMessage();
tr.soundSystem.get().setPaused(false);
tr.getThreadManager().setPaused(false);
if(showIntro){
tr.setRunState(new EnemyBrief(){});
//setMissionMode(new Mission.IntroMode());
displayHandler.setDisplayMode(briefingMode);
((TVF3Game)game).getBriefingScreen().briefingSequence(lvl);//TODO: Convert to feature
}
tr.setRunState(new OverworldState(){});
final SkySystem skySystem = getOverworldSystem().getSkySystem();
tr.mainRenderer.get().getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(true);
renderer.getSkyCube().setSkyCubeGen(skySystem.getBelowCloudsSkyCubeGen());
renderer.setAmbientLight(skySystem.getSuggestedAmbientLight());
renderer.setSunColor(skySystem.getSuggestedSunColor());
((TVF3Game)game).getNavSystem() .activate();
displayHandler.setDisplayMode(overworldMode);
((TVF3Game)game).getPlayer() .setActive(true);
((TVF3Game)tr.getGame()).setPaused(false);
tr.setRunState(new PlayerActivity(){});
//Wait for mission end
synchronized(missionEnd){
while(missionEnd[0]==null){try{missionEnd.wait();}
catch(InterruptedException e){break;}}}
//Completion summary
if(missionEnd[0]!=null)
if(!missionEnd[0].isAbort()){
displayHandler.setDisplayMode(summaryMode);
tr.setRunState(new MissionSummary(){});
((TVF3Game)game).getBriefingScreen().missionCompleteSummary(lvl,missionEnd[0]);
}//end if(proper ending)
bgMusic.stop();
cleanup();
return missionEnd[0];
}//end sync
}// end go()
public NAVObjective currentNAVObjective() {
if (navs.isEmpty())
return null;
return navs.get(0);
}//end currentNAVObjective()
public void removeNAVObjective(NAVObjective o) {
navs.remove(o);
updateNavState();
((TVF3Game)tr.getGame()).getNavSystem().updateNAVState();
}// end removeNAVObjective(...)
private void updateNavState(){
try{this.setCurrentNavTarget(navs.get(0));}
catch(IndexOutOfBoundsException e){setCurrentNavTarget(null);}
}
public static class Result {
private final int airTargetsDestroyed, groundTargetsDestroyed,foliageDestroyed;
private final double tunnelsFoundPctNorm;
private boolean abort=false;
public Result(int airTargetsDestroyed, int groundTargetsDestroyed, int foliageDestroyed, double tunnelsFoundPctNorm) {
this.airTargetsDestroyed =airTargetsDestroyed;
this.groundTargetsDestroyed =groundTargetsDestroyed;
this.foliageDestroyed =foliageDestroyed;
this.tunnelsFoundPctNorm =tunnelsFoundPctNorm;
}//end constructor
/**
* @return the airTargetsDestroyed
*/
public int getAirTargetsDestroyed() {
return airTargetsDestroyed;
}
/**
* @return the groundTargetsDestroyed
*/
public int getGroundTargetsDestroyed() {
return groundTargetsDestroyed;
}
/**
* @return the foliageDestroyed
*/
public int getFoliageDestroyed() {
return foliageDestroyed;
}
/**
* @return the tunnelsFoundPctNorm
*/
public double getTunnelsFoundPctNorm() {
return tunnelsFoundPctNorm;
}
/**
* @return the abort
*/
public boolean isAbort() {
return abort;
}
/**
* @param abort the abort to set
*/
public void setAbort(boolean abort) {
this.abort = abort;
}
}// end Result
/**
* @return the playerStartPosition
*/
public double[] getPlayerStartPosition() {
return playerStartPosition;
}
/**
* @return the playerStartDirection
*/
public ObjectDirection getPlayerStartDirection() {
return playerStartDirection;
}
private void installTunnels(TDFFile tdf, LoadingProgressReporter reporter){
TDFFile.Tunnel[] tuns = tdf.getTunnels();
tuns = tuns == null?new TDFFile.Tunnel[0]:tuns;//Null means no tunnels.
final LoadingProgressReporter[] reporters = reporter
.generateSubReporters(tuns.length);
if (tuns != null) {
int tIndex = 0;
// Build tunnels
for (TDFFile.Tunnel tun : tuns) {
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".entrance", tun.getEntrance().toString());
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".exit", tun.getExit().toString());
newTunnel(tun,reporters[tIndex]);
tIndex++;
}//end if(tuns!=null)
}// end if(tuns!=null)
totalNumTunnels = tunnelsRemaining.size();
}//end installTunnels()
private Tunnel newTunnel(org.jtrfp.trcl.file.TDFFile.Tunnel tdfTun,
LoadingProgressReporter reporter) {
final Tunnel tunnel = new Tunnel(tr, tdfTun, reporter, tdfTun.getTunnelLVLFile());
tunnelsRemaining.add(tunnel);
DirectionVector tunnelEntranceLegacyPos = tdfTun.getEntrance();
final Point tunnelEntranceMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getX())));
final PortalEntrance portalEntrance = getTunnelEntrancePortal(tunnelEntranceMapSquarePos);
final PortalExit portalExit = portalEntrance.getPortalExit();
addTunnelEntrance(tunnelEntranceMapSquarePos,tunnel,portalEntrance);
if(portalExit!=null){
portalExit.setHeading(Tunnel.TUNNEL_START_DIRECTION.getHeading());
portalExit.setTop(Tunnel.TUNNEL_START_DIRECTION.getTop());
portalExit.setPosition(Tunnel.TUNNEL_START_POS.toArray());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else throw new NullPointerException("Null portal exit! "+tunnelEntranceMapSquarePos);
DirectionVector tunnelExitLegacyPos = tdfTun.getExit();
final Point tunnelExitMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getX())));
System.out.println("Tunnel exit at sector "+tunnelExitMapSquarePos);
//portalExit = getTunnelEntrancePortal(tunnelExitMapSquarePos);
/*if(portalExit!=null){
portalExit.setHeading(tunnel.getExitObject().getHeading().negate());
portalExit.setTop(tunnel.getExitObject().getTop());
portalExit.setPosition(tunnel.getExitObject().getPosition());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else System.err.println("Null exit.");*/
tunnels.put(tdfTun.getTunnelLVLFile().toUpperCase(), tunnel);
return tunnel;
}
public Tunnel getTunnelByFileName(String tunnelFileName) {
return tunnels.get(tunnelFileName.toUpperCase());
}
public TunnelEntranceObject getNearestTunnelEntrance(double xInLegacyUnits,
double yInLegacyUnits, double zInLegacyUnits) {
TunnelEntranceObject result = null;
double closestDistance = Double.POSITIVE_INFINITY;
final Vector3D entPos = new Vector3D(
TR.legacy2Modern(zInLegacyUnits),//Intentionally backwards
TR.legacy2Modern(yInLegacyUnits),
TR.legacy2Modern(xInLegacyUnits)
);
System.out.println("Requested entry pos="+entPos);
for (TunnelEntranceObject teo : tunnelMap.values()) {
final Vector3D pos = new Vector3D(teo.getPosition());
System.out.println("Found tunnel at "+pos);
final double distance = pos.distance(entPos);
if (distance < closestDistance) {
closestDistance = distance;
result = teo;
}
}// end for(tunnels)
return result;
}// end getTunnelWhoseEntranceClosestTo(...)
public void playerDestroyed() {
new Thread() {
@Override
public void run() {
System.out.println("MISSION FAILED.");
notifyMissionEnd(null);
}// end run()
}.start();
}// end playerDestroyed()
public void notifyMissionEnd(Result r){
synchronized(missionEnd){
missionEnd[0]=r;
missionEnd.notifyAll();}
}//end notifyMissionEnd()
public List<NAVObjective> getRemainingNAVObjectives() {
return navs;
}
/**
* @return the navSubObjects
*/
public List<NAVSubObject> getNavSubObjects() {
return navSubObjects;
}
/**
* @param navSubObjects
* the navSubObjects to set
*/
public void setNavSubObjects(List<NAVSubObject> navSubObjects) {
this.navSubObjects = navSubObjects;
}
public OverworldSystem getOverworldSystem() {
return overworldSystem;
}
public Mission notifyAirTargetDestroyed(){
airTargetsDestroyed++;
return this;
}
public Mission notifyGroundTargetDestroyed(){
groundTargetsDestroyed++;
return this;
}
public Mission notifyTunnelFound(Tunnel tun){
tunnelsRemaining.remove(tun);
return this;
}
public Mission notifyFoliageDestroyed(){
foliageDestroyed++;
return this;
}
public void enterBossMode(final String bossMusicFile){
setBossFight(true);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(bossMusicFile),
true));
synchronized(Mission.this){
evt.play();
if(bgMusic!=null)
bgMusic.stop();
bgMusic=evt;
}
return null;
}// end call()
});
}//end enterBossMode()
public void exitBossMode(){
setBossFight(false);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(lvl.getBackgroundMusicFile()),
true));
synchronized(Mission.this){
evt.play();
bgMusic.stop();
bgMusic=evt;}
return null;
}// end call()
});
}//end exitBossMode()
public void abort() {
final Result result = new Result(
airTargetsDestroyed,
groundTargetsDestroyed,
foliageDestroyed,
1.-(double)tunnelsRemaining.size()/(double)totalNumTunnels);
result.setAbort(true);
notifyMissionEnd(result);
//Wait for mission to end
synchronized(missionLock){//Don't execute while mission is in progress.
cleanup();
}//end sync{}
}//end abort()
private void cleanup() {
displayHandler.setDisplayMode(emptyMode);
// Remove projectile factories
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile projectile:pf.getProjectiles())
projectile.destroy();
}
/**
* Find a tunnel at the given map square, if any.
* @param mapSquareXZ Position in cells, not world coords.
* @return The Tunnel at this map square, or null if none here.
* @since Jan 13, 2015
*/
public TunnelEntranceObject getTunnelEntranceObject(Point mapSquareXZ){
final int key = pointToHash(mapSquareXZ);
System.out.println("getTunnelEntranceObject "+mapSquareXZ);
for(TunnelEntranceObject teo:tunnelMap.values())
System.out.print(" "+new Vector3D(teo.getPosition()).scalarMultiply(1/TR.mapSquareSize));
System.out.println();
return tunnelMap.get(key);
}
public void registerTunnelEntrancePortal(Point mapSquareXZ, PortalEntrance entrance){
synchronized(tunnelPortals){
tunnelPortals.put(pointToHash(mapSquareXZ),entrance);}
}
PortalEntrance getTunnelEntrancePortal(Point mapSquareXZ){
synchronized(tunnelPortals){
return tunnelPortals.get(pointToHash(mapSquareXZ));}
}
public void addTunnelEntrance(Point mapSquareXZ, Tunnel tunnel, PortalEntrance entrance){
TunnelEntranceObject teo;
overworldSystem.add(teo = new TunnelEntranceObject(tr,tunnel,entrance));
tunnelMap.put(pointToHash(mapSquareXZ),teo);
}
private int pointToHash(Point point){
final int key =(int)point.getX()+(int)point.getY()*65536;
return key;
}
public synchronized void enterTunnel(final TunnelEntranceObject teo) {
final Tunnel tunnelToEnter = teo.getSourceTunnel();
System.out.println("Entering tunnel "+tunnelToEnter);
final Game game = ((TVF3Game)tr.getGame());
final OverworldSystem overworldSystem = ((TVF3Game)game).getCurrentMission().getOverworldSystem();
setCurrentTunnel(tunnelToEnter);
((TVF3Game)game).getCurrentMission().notifyTunnelFound(tunnelToEnter);
tr.setRunState(new TunnelState(){});
//tr.getDefaultGrid().nonBlockingAddBranch(tunnel);
//tr.getDefaultGrid().blockingRemoveBranch(overworldSystem);
setDisplayMode(tunnelMode);
//Move player to tunnel
tr.mainRenderer.get().getSkyCube().setSkyCubeGen(Tunnel.TUNNEL_SKYCUBE_GEN);
//Ensure chamber mode is off
overworldSystem.setChamberMode(false);
overworldSystem.setTunnelMode(true);
//Update debug data
tr.getReporter().report("org.jtrfp.Tunnel.isInTunnel?", "true");
final ProjectileFactory [] pfs = tr.getResourceManager().getProjectileFactories();
for(ProjectileFactory pf:pfs){
Projectile [] projectiles = pf.getProjectiles();
for(Projectile proj:projectiles){
((WorldObject)proj).
probeForBehavior(LoopingPositionBehavior.class).
setEnable(false);
}//end for(projectiles)
}//end for(projectileFactories)
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
player.setActive(false);
player.resetVelocityRotMomentum();
player.probeForBehavior(CollidesWithTunnelWalls.class).setEnable(true);
player.probeForBehavior(MovesByVelocity.class) .setVelocity(Vector3D.ZERO);
player.probeForBehavior(LoopingPositionBehavior.class).setEnable(false);
//player.probeForBehavior(HeadingXAlwaysPositiveBehavior.class).setEnable(true);
player.probeForBehavior(CollidesWithTerrain.class) .setEnable(false);
tunnelToEnter.dispatchTunnelEntryNotifications();
final Renderer portalRenderer = teo.getPortalEntrance().getPortalRenderer();
//TODO: NPE bug on this line v
final Camera secondaryCam = /*tr.secondaryRenderer.get().getCamera()*/portalRenderer.getCamera();
//player.setPosition(Tunnel.TUNNEL_START_POS.toArray());//TODO: remove debug code
player.setPosition(secondaryCam.getPosition()); //TODO: Uncomment
player.setHeading (secondaryCam.getHeading());
player.setTop (secondaryCam.getTop());
player.notifyPositionChange();
//Move the secondary cam to the overworld.
overworldSystem.setChamberMode(tunnelToEnter.getExitObject().isMirrorTerrain());
//Set the skycube appropriately
portalRenderer.getSkyCube().setSkyCubeGen(((TVF3Game)tr.getGame()).
getCurrentMission().
getOverworldSystem().
getSkySystem().
getBelowCloudsSkyCubeGen());
tr.setRunState(new TunnelState(){});
player.setActive(true);
}//end enterTunnel()
/**
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
pcs.addPropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.addPropertyChangeListener(propertyName, listener);
}
/**
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners()
*/
public PropertyChangeListener[] getPropertyChangeListeners() {
return pcs.getPropertyChangeListeners();
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners(java.lang.String)
*/
public PropertyChangeListener[] getPropertyChangeListeners(
String propertyName) {
return pcs.getPropertyChangeListeners(propertyName);
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#hasListeners(java.lang.String)
*/
public boolean hasListeners(String propertyName) {
return pcs.hasListeners(propertyName);
}
/**
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
pcs.removePropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.removePropertyChangeListener(propertyName, listener);
}
/**
* @return the bossFight
*/
public boolean isBossFight() {
return bossFight;
}
/**
* @param bossFight the bossFight to set
*/
public void setBossFight(boolean bossFight) {
pcs.firePropertyChange("bossFight", this.bossFight, bossFight);
this.bossFight = bossFight;
}
public void setSatelliteView(boolean satelliteView) {
if(!(tr.getRunState() instanceof OverworldState)&&satelliteView)
throw new IllegalArgumentException("Cannot activate satellite view while runState is "+tr.getRunState().getClass().getSimpleName());
if(satelliteView && ((TVF3Game)tr.getGame()).isPaused())
throw new IllegalArgumentException("Cannot activate satellite view while paused.");
pcs.firePropertyChange(SATELLITE_VIEW, this.satelliteView, satelliteView);
if(satelliteView!=this.satelliteView){
final Game game = ((TVF3Game)tr.getGame());
final Camera cam = tr.mainRenderer.get().getCamera();
if(satelliteView){//Switched on
tr.getThreadManager().setPaused(true);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(false);
cam.probeForBehavior(MatchPosition.class).setEnable(false);
cam.probeForBehavior(MatchDirection.class).setEnable(false);
final Vector3D pPos = new Vector3D(((TVF3Game)game).getPlayer().getPosition());
final Vector3D pHeading = ((TVF3Game)tr.getGame()).getPlayer().getHeading();
cam.setPosition(new Vector3D(pPos.getX(),TR.visibilityDiameterInMapSquares*TR.mapSquareSize*.65,pPos.getZ()));
cam.setHeading(Vector3D.MINUS_J);
cam.setTop(new Vector3D(pHeading.getX(),.0000000001,pHeading.getZ()).normalize());
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(true);
}else{//Switched off
tr.getThreadManager().setPaused(false);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
((TVF3Game)tr.getGame()).getNavSystem().activate();
tr.getDefaultGrid().addBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().addBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(true);
cam.probeForBehavior(MatchPosition.class).setEnable(true);
cam.probeForBehavior(MatchDirection.class).setEnable(true);
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(false);
}//end !satelliteView
}//end if(change)
this.satelliteView=satelliteView;
}
/**
* @return the satelliteView
*/
public boolean isSatelliteView() {
System.out.println("isSatelliteView="+satelliteView);
return satelliteView;
}
public Tunnel getCurrentTunnel() {
if(!(tr.getRunState() instanceof TunnelState))return null;
return currentTunnel;
}
/**
*
* @param newTunnel
* @return The old tunnel, or null if none.
* @since Jan 23, 2016
*/
public Tunnel setCurrentTunnel(final Tunnel newTunnel){
final Tunnel oldTunnel = getCurrentTunnel();
this.currentTunnel = newTunnel;
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
tunnelGrid.removeAll();
//if(oldTunnel != null)
// tunnelGrid.removeBranch(oldTunnel);
tunnelGrid.addBranch(newTunnel);
}});
return oldTunnel;
}//end setCurrentTunnel
public Game getGame() {
return game;
}
public void destruct() {
Features.destruct(this);
}
public String getLevelName() {
return levelName;
}
public int getGroundTargetsDestroyed() {
return groundTargetsDestroyed;
}
public void setGroundTargetsDestroyed(int groundTargetsDestroyed) {
this.groundTargetsDestroyed = groundTargetsDestroyed;
}
public int getAirTargetsDestroyed() {
return airTargetsDestroyed;
}
public void setAirTargetsDestroyed(int airTargetsDestroyed) {
this.airTargetsDestroyed = airTargetsDestroyed;
}
public int getFoliageDestroyed() {
return foliageDestroyed;
}
public void setFoliageDestroyed(int foliageDestroyed) {
this.foliageDestroyed = foliageDestroyed;
}
public Collection<Tunnel> getTunnelsRemaining() {
return Collections.unmodifiableCollection(tunnelsRemaining);
}
public int getTotalNumTunnels() {
return totalNumTunnels;
}
public void setTotalNumTunnels(int totalNumTunnels) {
this.totalNumTunnels = totalNumTunnels;
}
public NAVObjective getCurrentNavTarget() {
return currentNavTarget;
}
public Mission setCurrentNavTarget(NAVObjective newTarget) {
final NAVObjective oldTarget = this.currentNavTarget;
this.currentNavTarget = newTarget;
pcs.firePropertyChange(CURRENT_NAV_TARGET, oldTarget, newTarget);
return this;
}
public void setDisplayMode(Object [] newMode){//TODO: Refactor this to follow tr's run state instead
displayHandler.setDisplayMode(newMode);
}
public RenderableSpacePartitioningGrid getPartitioningGrid() {
return partitioningGrid;
}
}// end Mission
|
package org.neo4j.remote;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.Expansion;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipExpander;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.ReturnableEvaluator;
import org.neo4j.graphdb.StopEvaluator;
import org.neo4j.graphdb.TraversalPosition;
import org.neo4j.graphdb.Traverser;
import org.neo4j.graphdb.Traverser.Order;
import org.neo4j.kernel.TraversalFactory;
final class RemoteNode extends RemotePropertyContainer implements Node
{
RemoteNode( RemoteGraphDbEngine txService, long id )
{
super( txService, id );
}
@Override
public int hashCode()
{
return ( int ) id;
}
@Override
public boolean equals( Object obj )
{
if ( obj instanceof RemoteNode )
{
RemoteNode node = ( RemoteNode ) obj;
return node.id == id && node.engine.equals( engine );
}
else
{
return false;
}
}
@Override
public String toString()
{
return "Node[" + id + "]";
}
public Relationship createRelationshipTo( Node otherNode,
RelationshipType type )
{
if ( otherNode instanceof RemoteNode )
{
RemoteNode other = ( RemoteNode ) otherNode;
if ( other.engine.equals( engine ) )
{
return engine.current().createRelationship( type, this, other );
}
}
throw new IllegalArgumentException(
"Other node not in same node space." );
}
public void delete()
{
engine.current().deleteNode( this );
}
public long getId()
{
return id;
}
public Iterable<Relationship> getRelationships()
{
return engine.current().getRelationships( this, Direction.BOTH,
( ( RelationshipType[] ) null ) );
}
public Iterable<Relationship> getRelationships( RelationshipType... types )
{
return engine.current().getRelationships( this, Direction.BOTH,
( types == null ? types : new RelationshipType[ 0 ] ) );
}
public Iterable<Relationship> getRelationships( Direction dir )
{
return engine.current().getRelationships( this, dir,
( ( RelationshipType[] ) null ) );
}
public Iterable<Relationship> getRelationships( RelationshipType type,
Direction dir )
{
RelationshipType[] types = { type };
return engine.current().getRelationships( this, dir, types );
}
public Relationship getSingleRelationship( RelationshipType type,
Direction dir )
{
Iterator<Relationship> relations = getRelationships( type, dir )
.iterator();
if ( !relations.hasNext() )
{
return null;
}
else
{
Relationship relation = relations.next();
if ( relations.hasNext() )
{
throw new NotFoundException( "More then one relationship["
+ type + "] found" );
}
return relation;
}
}
public boolean hasRelationship()
{
return getRelationships().iterator().hasNext();
}
public boolean hasRelationship( RelationshipType... types )
{
return getRelationships( types ).iterator().hasNext();
}
public boolean hasRelationship( Direction dir )
{
return getRelationships( dir ).iterator().hasNext();
}
public boolean hasRelationship( RelationshipType type, Direction dir )
{
return getRelationships( type, dir ).iterator().hasNext();
}
public Expansion<Relationship> expandAll()
{
return TraversalFactory.expanderForAllTypes().expand( this );
}
public Expansion<Relationship> expand( RelationshipType type )
{
return expand( type, Direction.BOTH );
}
public Expansion<Relationship> expand( RelationshipType type,
Direction direction )
{
return TraversalFactory.expanderForTypes( type, direction ).expand(
this );
}
public Expansion<Relationship> expand( RelationshipExpander expander )
{
return TraversalFactory.expander( expander ).expand( this );
}
/*
* NOTE: traversers are harder to build up remotely. Maybe traversal should
* be done on the client side, using the regular primitive accessors.
*/
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator, ReturnableEvaluator returnableEvaluator,
RelationshipType relationshipType, Direction direction )
{
return traversal( traversalOrder, stopEvaluator, returnableEvaluator,
new RelationshipType[] { relationshipType },
new Direction[] { direction } );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator, ReturnableEvaluator returnableEvaluator,
RelationshipType firstRelationshipType, Direction firstDirection,
RelationshipType secondRelationshipType, Direction secondDirection )
{
return traversal( traversalOrder, stopEvaluator, returnableEvaluator,
new RelationshipType[] { firstRelationshipType,
secondRelationshipType }, new Direction[] { firstDirection,
secondDirection } );
}
public Traverser traverse( Order traversalOrder,
StopEvaluator stopEvaluator, ReturnableEvaluator returnableEvaluator,
Object... relationshipTypesAndDirections )
{
if ( relationshipTypesAndDirections.length % 2 != 0 )
{
throw new IllegalArgumentException(
"Not as many directions as relationship types." );
}
RelationshipType[] relationshipTypes = new RelationshipType[ relationshipTypesAndDirections.length / 2 ];
Direction[] directions = new Direction[ relationshipTypesAndDirections.length / 2 ];
for ( int i = 0, j = 0; j < directions.length; i += 2, j++ )
{
try
{
relationshipTypes[ j ] = ( RelationshipType ) relationshipTypesAndDirections[ i ];
}
catch ( ClassCastException ex )
{
throw new IllegalArgumentException( "Not a RelationshipType: "
+ relationshipTypesAndDirections[ i ] );
}
try
{
directions[ j ] = ( Direction ) relationshipTypesAndDirections[ i + 1 ];
}
catch ( ClassCastException ex )
{
throw new IllegalArgumentException( "Not a Direction: "
+ relationshipTypesAndDirections[ i + 1 ] );
}
}
return traversal( traversalOrder, stopEvaluator, returnableEvaluator,
relationshipTypes, directions );
}
@Override
Object getContainerProperty( String key )
{
return engine.current().getProperty( this, key );
}
public Iterable<String> getPropertyKeys()
{
return engine.current().getPropertyKeys( this );
}
public boolean hasProperty( String key )
{
return engine.current().hasProperty( this, key );
}
public Object removeProperty( String key )
{
return engine.current().removeProperty( this, key );
}
public void setProperty( String key, Object value )
{
engine.current().setProperty( this, key, value );
}
private Traverser traversal( Order traversalOrder,
StopEvaluator stopEvaluator, ReturnableEvaluator returnableEvaluator,
RelationshipType[] relationshipTypes, Direction[] directions )
{
final Iterable<TraversalPosition> positions = engine.current()
.traverse( this, traversalOrder, stopEvaluator,
returnableEvaluator, relationshipTypes, directions );
return new Traverser()
{
Iterator<TraversalPosition> iter = positions.iterator();
TraversalPosition last = null;
TraversalPosition current = null;
public TraversalPosition currentPosition()
{
return last;
}
public Collection<Node> getAllNodes()
{
Collection<Node> result = new LinkedList<Node>();
for ( Node node : this )
{
result.add( node );
}
return result;
}
public Iterator<Node> iterator()
{
return new Iterator<Node>()
{
public boolean hasNext()
{
if ( current != null )
{
return true;
}
else if ( iter.hasNext() )
{
current = iter.next();
return true;
}
else
{
return false;
}
}
public Node next()
{
if ( hasNext() )
{
last = current;
current = null;
return last.currentNode();
}
else
{
throw new NoSuchElementException();
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
};
}
};
}
}
|
package org.scijava.util;
import java.io.File;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Useful methods for working with {@link Class} objects and primitive types.
*
* @author Curtis Rueden
*/
public final class ClassUtils {
private ClassUtils() {
// prevent instantiation of utility class
}
private static final FieldCache fieldCache = new FieldCache();
private static final MethodCache methodCache = new MethodCache();
// -- Class loading, querying and reflection --
/**
* Loads the class with the given name, using the current thread's context
* class loader, or null if it cannot be loaded.
*
* @see #loadClass(String, ClassLoader)
*/
public static Class<?> loadClass(final String className) {
return loadClass(className, null);
}
/**
* Loads the class with the given name, using the specified
* {@link ClassLoader}, or null if it cannot be loaded.
* <p>
* This method is capable of parsing several different class name syntaxes.
* In particular, array classes (including primitives) represented using
* either square brackets or internal Java array name syntax are supported.
* Examples:
* </p>
* <ul>
* <li>{@code boolean} is loaded as {@code boolean.class}</li>
* <li>{@code Z} is loaded as {@code boolean.class}</li>
* <li>{@code double[]} is loaded as {@code double[].class}</li>
* <li>{@code string[]} is loaded as {@code java.lang.String.class}</li>
* <li>{@code [F} is loaded as {@code float[].class}</li>
* </ul>
*
* @param name The name of the class to load.
* @param classLoader The class loader with which to load the class; if null,
* the current thread's context class loader will be used.
*/
public static Class<?> loadClass(final String name,
final ClassLoader classLoader)
{
// handle primitive types
if (name.equals("Z") || name.equals("boolean")) return boolean.class;
if (name.equals("B") || name.equals("byte")) return byte.class;
if (name.equals("C") || name.equals("char")) return char.class;
if (name.equals("D") || name.equals("double")) return double.class;
if (name.equals("F") || name.equals("float")) return float.class;
if (name.equals("I") || name.equals("int")) return int.class;
if (name.equals("J") || name.equals("long")) return long.class;
if (name.equals("S") || name.equals("short")) return short.class;
if (name.equals("V") || name.equals("void")) return void.class;
// handle built-in class shortcuts
final String className;
if (name.equals("string")) className = "java.lang.String";
else className = name;
// handle source style arrays (e.g.: "java.lang.String[]")
if (name.endsWith("[]")) {
final String elementClassName = name.substring(0, name.length() - 2);
return getArrayClass(loadClass(elementClassName, classLoader));
}
// handle non-primitive internal arrays (e.g.: "[Ljava.lang.String;")
if (name.startsWith("[L") && name.endsWith(";")) {
final String elementClassName = name.substring(2, name.length() - 1);
return getArrayClass(loadClass(elementClassName, classLoader));
}
// handle other internal arrays (e.g.: "[I", "[[I", "[[Ljava.lang.String;")
if (name.startsWith("[")) {
final String elementClassName = name.substring(1);
return getArrayClass(loadClass(elementClassName, classLoader));
}
// load the class!
try {
final ClassLoader cl =
classLoader == null ? Thread.currentThread().getContextClassLoader()
: classLoader;
return cl.loadClass(className);
}
catch (final ClassNotFoundException e) {
return null;
}
}
/**
* Gets the array class corresponding to the given element type.
* <p>
* For example, {@code getArrayClass(double.class)} returns
* {@code double[].class}.
* </p>
*/
public static Class<?> getArrayClass(final Class<?> elementClass) {
if (elementClass == null) return null;
// NB: It appears the reflection API has no built-in way to do this.
// So unfortunately, we must allocate a new object and then inspect it.
try {
return Array.newInstance(elementClass, 0).getClass();
}
catch (final IllegalArgumentException exc) {
return null;
}
}
/** Checks whether a class with the given name exists. */
public static boolean hasClass(final String className) {
return hasClass(className, null);
}
/** Checks whether a class with the given name exists. */
public static boolean hasClass(final String className,
final ClassLoader classLoader)
{
return loadClass(className, classLoader) != null;
}
/**
* Gets the base location of the given class.
* <p>
* If the class is directly on the file system (e.g.,
* "/path/to/my/package/MyClass.class") then it will return the base directory
* (e.g., "/path/to").
* </p>
* <p>
* If the class is within a JAR file (e.g.,
* "/path/to/my-jar.jar!/my/package/MyClass.class") then it will return the
* path to the JAR (e.g., "/path/to/my-jar.jar").
* </p>
*
* @param className The name of the class whose location is desired.
* @see FileUtils#urlToFile(URL) to convert the result to a {@link File}.
*/
public static URL getLocation(final String className) {
return getLocation(className, null);
}
/**
* Gets the base location of the given class.
* <p>
* If the class is directly on the file system (e.g.,
* "/path/to/my/package/MyClass.class") then it will return the base directory
* (e.g., "/path/to").
* </p>
* <p>
* If the class is within a JAR file (e.g.,
* "/path/to/my-jar.jar!/my/package/MyClass.class") then it will return the
* path to the JAR (e.g., "/path/to/my-jar.jar").
* </p>
*
* @param className The name of the class whose location is desired.
* @param classLoader The class loader to use when loading the class.
* @see FileUtils#urlToFile(URL) to convert the result to a {@link File}.
*/
public static URL getLocation(final String className,
final ClassLoader classLoader)
{
final Class<?> c = loadClass(className, classLoader);
return getLocation(c);
}
/**
* Gets the base location of the given class.
* <p>
* If the class is directly on the file system (e.g.,
* "/path/to/my/package/MyClass.class") then it will return the base directory
* (e.g., "file:/path/to").
* </p>
* <p>
* If the class is within a JAR file (e.g.,
* "/path/to/my-jar.jar!/my/package/MyClass.class") then it will return the
* path to the JAR (e.g., "file:/path/to/my-jar.jar").
* </p>
*
* @param c The class whose location is desired.
* @see FileUtils#urlToFile(URL) to convert the result to a {@link File}.
*/
public static URL getLocation(final Class<?> c) {
if (c == null) return null; // could not load the class
// try the easy way first
try {
final URL codeSourceLocation =
c.getProtectionDomain().getCodeSource().getLocation();
if (codeSourceLocation != null) return codeSourceLocation;
}
catch (final SecurityException e) {
// NB: Cannot access protection domain.
}
catch (final NullPointerException e) {
// NB: Protection domain or code source is null.
}
// NB: The easy way failed, so we try the hard way. We ask for the class
// itself as a resource, then strip the class's path from the URL string,
// leaving the base path.
// get the class's raw resource path
final URL classResource = c.getResource(c.getSimpleName() + ".class");
if (classResource == null) return null; // cannot find class resource
final String url = classResource.toString();
final String suffix = c.getCanonicalName().replace('.', '/') + ".class";
if (!url.endsWith(suffix)) return null; // weird URL
// strip the class's path from the URL string
final String base = url.substring(0, url.length() - suffix.length());
String path = base;
// remove the "jar:" prefix and "!/" suffix, if present
if (path.startsWith("jar:")) path = path.substring(4, path.length() - 2);
try {
return new URL(path);
}
catch (final MalformedURLException e) {
e.printStackTrace();
return null;
}
}
/**
* Gets the given class's {@link Method}s marked with the annotation of the
* specified class.
* <p>
* Unlike {@link Class#getMethods()}, the result will include any non-public
* methods, including methods defined in supertypes of the given class.
* </p>
*
* @param c The class to scan for annotated methods.
* @param annotationClass The type of annotation for which to scan.
* @return A new list containing all methods with the requested annotation.
*/
public static <A extends Annotation> List<Method> getAnnotatedMethods(
final Class<?> c, final Class<A> annotationClass)
{
List<Method> methods = methodCache.getList(c, annotationClass);
if (methods == null) {
methods = new ArrayList<Method>();
getAnnotatedMethods(c, annotationClass, methods);
}
return methods;
}
/**
* Gets the given class's {@link Method}s marked with the annotation of the
* specified class.
* <p>
* Unlike {@link Class#getMethods()}, the result will include any non-public
* methods, including methods defined in supertypes of the given class.
* </p>
*
* @param c The class to scan for annotated methods.
* @param annotationClass The type of annotation for which to scan.
* @param methods The list to which matching methods will be added.
*/
public static <A extends Annotation> void
getAnnotatedMethods(final Class<?> c, final Class<A> annotationClass,
final List<Method> methods)
{
List<Method> cachedMethods = methodCache.getList(c, annotationClass);
if (cachedMethods == null) {
Query query = new Query();
query.put(annotationClass, Method.class);
cacheAnnotatedObjects(c, query);
cachedMethods = methodCache.getList(c, annotationClass);
}
methods.addAll(cachedMethods);
}
/**
* Gets the given class's {@link Field}s marked with the annotation of the
* specified class.
* <p>
* Unlike {@link Class#getFields()}, the result will include any non-public
* fields, including fields defined in supertypes of the given class.
* </p>
*
* @param c The class to scan for annotated fields.
* @param annotationClass The type of annotation for which to scan.
* @return A new list containing all fields with the requested annotation.
*/
public static <A extends Annotation> List<Field> getAnnotatedFields(
final Class<?> c, final Class<A> annotationClass)
{
List<Field> fields = fieldCache.getList(c, annotationClass);
if (fields == null) {
fields = new ArrayList<Field>();
getAnnotatedFields(c, annotationClass, fields);
}
return fields;
}
/**
* Gets the given class's {@link Field}s marked with the annotation of the
* specified class.
* <p>
* Unlike {@link Class#getFields()}, the result will include any non-public
* fields, including fields defined in supertypes of the given class.
* </p>
*
* @param c The class to scan for annotated fields.
* @param annotationClass The type of annotation for which to scan.
* @param fields The list to which matching fields will be added.
*/
public static <A extends Annotation> void getAnnotatedFields(
final Class<?> c, final Class<A> annotationClass, final List<Field> fields)
{
List<Field> cachedFields = fieldCache.getList(c, annotationClass);
if (cachedFields == null) {
Query query = new Query();
query.put(annotationClass, Field.class);
cacheAnnotatedObjects(c, query);
cachedFields = fieldCache.getList(c, annotationClass);
}
fields.addAll(cachedFields);
}
/**
* This method scans the provided class, its superclasses and interfaces for
* all supported {@code {@link Annotation} : {@link AnnotatedObject} pairs.
* These are then cached to remove the need for future queries.
* <p>
* By combining multiple {@code Annotation : AnnotatedObject} pairs in one
* query, we can limit the number of times a class's superclass and interface
* hierarchy are traversed.
* </p>
*
* @param scannedClass Class to scan
* @param query Pairs of {@link Annotation} and {@link AnnotatedObject}s to
* discover.
*/
public static void cacheAnnotatedObjects(final Class<?> scannedClass,
final Query query)
{
// NB: The java.lang.Object class does not have any annotated methods.
// And even if it did, it definitely does not have any methods annotated
// with SciJava annotations such as org.scijava.event.EventHandler, which
// are the main sorts of methods we are interested in.
if (scannedClass == null || scannedClass == Object.class) return;
// Initialize step - determine which queries are solved
final Set<Class<? extends Annotation>> keysToDrop =
new HashSet<Class<? extends Annotation>>();
for (final Class<? extends Annotation> annotationClass : query.keySet()) {
// Fields
if (fieldCache.getList(scannedClass, annotationClass) != null) {
keysToDrop.add(annotationClass);
}
else if (methodCache.getList(scannedClass, annotationClass) != null) {
keysToDrop.add(annotationClass);
}
}
// Clean up resolved keys
for (final Class<? extends Annotation> key : keysToDrop) {
query.remove(key);
}
// Stop now if we know all requested information is cached
if (query.isEmpty()) return;
final List<Class<?>> inherited = new ArrayList<Class<?>>();
// cache all parents recursively
final Class<?> superClass = scannedClass.getSuperclass();
if (superClass != null) {
// Recursive step
cacheAnnotatedObjects(
superClass, new Query(query));
inherited.add(superClass);
}
// cache all interfaces recursively
for (final Class<?> ifaceClass : scannedClass.getInterfaces()) {
// Recursive step
cacheAnnotatedObjects(
ifaceClass,
new Query(query));
inherited.add(ifaceClass);
}
// Populate supported objects for scanned class
for (final Class<? extends Annotation> annotationClass : query.keySet()) {
final Class<? extends AnnotatedElement> objectClass =
query.get(annotationClass);
// Methods
if (Method.class.isAssignableFrom(objectClass)) {
populateCache(scannedClass, inherited, annotationClass, methodCache,
scannedClass.getDeclaredMethods());
}
// Fields
else if (Field.class.isAssignableFrom(objectClass)) {
populateCache(scannedClass, inherited, annotationClass, fieldCache,
scannedClass.getDeclaredFields());
}
}
}
private static <T extends AnnotatedElement> void
populateCache(final Class<?> scannedClass, final List<Class<?>> inherited,
final Class<? extends Annotation> annotationClass,
CacheMap<T> cacheMap, T[] declaredElements)
{
// Add inherited elements
for (final Class<?> inheritedClass : inherited) {
final List<T> annotatedElements =
cacheMap.getList(inheritedClass, annotationClass);
if (annotatedElements != null && !annotatedElements.isEmpty()) {
final List<T> scannedElements =
cacheMap.makeList(scannedClass, annotationClass);
scannedElements.addAll(annotatedElements);
}
}
// Add declared elements
if (declaredElements != null && declaredElements.length > 0) {
List<T> scannedElements = null;
for (final T t : declaredElements) {
if (t.getAnnotation(annotationClass) != null) {
if (scannedElements == null) {
scannedElements = cacheMap.makeList(scannedClass, annotationClass);
}
scannedElements.add(t);
}
}
}
// If there were no elements for this query, map an empty
// list to mark the query complete
if (cacheMap.getList(scannedClass, annotationClass) == null) {
cacheMap.putList(scannedClass, annotationClass, Collections.<T>emptyList());
}
}
/**
* Gets the specified field of the given class, or null if it does not exist.
*/
public static Field getField(final String className, final String fieldName) {
return getField(loadClass(className), fieldName);
}
/**
* Gets the specified field of the given class, or null if it does not exist.
*/
public static Field getField(final Class<?> c, final String fieldName) {
if (c == null) return null;
try {
return c.getDeclaredField(fieldName);
}
catch (final NoSuchFieldException e) {
return null;
}
}
/**
* Gets the given field's value of the specified object instance, or null if
* the value cannot be obtained.
*/
public static Object getValue(final Field field, final Object instance) {
try {
field.setAccessible(true);
return field.get(instance);
}
catch (final IllegalAccessException e) {
return null;
}
}
// FIXME: Move to ConvertService and deprecate this signature.
public static void setValue(final Field field, final Object instance,
final Object value)
{
try {
field.setAccessible(true);
final Object compatibleValue;
if (value == null || field.getType().isInstance(value)) {
// the given value is compatible with the field
compatibleValue = value;
}
else {
// the given value needs to be converted to a compatible type
final Type fieldType =
GenericUtils.getFieldType(field, instance.getClass());
compatibleValue = ConversionUtils.convert(value, fieldType);
}
field.set(instance, compatibleValue);
}
catch (final IllegalAccessException e) {
throw new IllegalArgumentException("No access to field: " +
field.getName(), e);
}
}
// -- Type querying --
public static boolean isBoolean(final Class<?> type) {
return type == boolean.class || Boolean.class.isAssignableFrom(type);
}
public static boolean isByte(final Class<?> type) {
return type == byte.class || Byte.class.isAssignableFrom(type);
}
public static boolean isCharacter(final Class<?> type) {
return type == char.class || Character.class.isAssignableFrom(type);
}
public static boolean isDouble(final Class<?> type) {
return type == double.class || Double.class.isAssignableFrom(type);
}
public static boolean isFloat(final Class<?> type) {
return type == float.class || Float.class.isAssignableFrom(type);
}
public static boolean isInteger(final Class<?> type) {
return type == int.class || Integer.class.isAssignableFrom(type);
}
public static boolean isLong(final Class<?> type) {
return type == long.class || Long.class.isAssignableFrom(type);
}
public static boolean isShort(final Class<?> type) {
return type == short.class || Short.class.isAssignableFrom(type);
}
public static boolean isNumber(final Class<?> type) {
return Number.class.isAssignableFrom(type) || type == byte.class ||
type == double.class || type == float.class || type == int.class ||
type == long.class || type == short.class;
}
public static boolean isText(final Class<?> type) {
return String.class.isAssignableFrom(type) || isCharacter(type);
}
// -- Comparison --
/**
* Compares two {@link Class} objects using their fully qualified names.
* <p>
* Note: this method provides a natural ordering that may be inconsistent with
* equals. Specifically, two unequal classes may return 0 when compared in
* this fashion if they represent the same class loaded using two different
* {@link ClassLoader}s. Hence, if this method is used as a basis for
* implementing {@link Comparable#compareTo} or
* {@link java.util.Comparator#compare}, that implementation may want to
* impose logic beyond that of this method, for breaking ties, if a total
* ordering consistent with equals is always required.
* </p>
*
* @see org.scijava.Priority#compare(org.scijava.Prioritized,
* org.scijava.Prioritized)
*/
public static int compare(final Class<?> c1, final Class<?> c2) {
if (c1 == c2) return 0;
final String name1 = c1 == null ? null : c1.getName();
final String name2 = c2 == null ? null : c2.getName();
return MiscUtils.compare(name1, name2);
}
// -- Helper methods --
// -- Deprecated methods --
/** @deprecated use {@link ConversionUtils#convert(Object, Class)} */
@Deprecated
public static <T> T convert(final Object value, final Class<T> type) {
return ConversionUtils.convert(value, type);
}
/** @deprecated use {@link ConversionUtils#canConvert(Class, Class)} */
@Deprecated
public static boolean canConvert(final Class<?> c, final Class<?> type) {
return ConversionUtils.canConvert(c, type);
}
/** @deprecated use {@link ConversionUtils#canConvert(Object, Class)} */
@Deprecated
public static boolean canConvert(final Object value, final Class<?> type) {
return ConversionUtils.canConvert(value, type);
}
/** @deprecated use {@link ConversionUtils#cast(Object, Class)} */
@Deprecated
public static <T> T cast(final Object obj, final Class<T> type) {
return ConversionUtils.cast(obj, type);
}
/** @deprecated use {@link ConversionUtils#canCast(Class, Class)} */
@Deprecated
public static boolean canCast(final Class<?> c, final Class<?> type) {
return ConversionUtils.canCast(c, type);
}
/** @deprecated use {@link ConversionUtils#canCast(Object, Class)} */
@Deprecated
public static boolean canCast(final Object obj, final Class<?> type) {
return ConversionUtils.canCast(obj, type);
}
/** @deprecated use {@link ConversionUtils#getNonprimitiveType(Class)} */
@Deprecated
public static <T> Class<T> getNonprimitiveType(final Class<T> type) {
return ConversionUtils.getNonprimitiveType(type);
}
/** @deprecated use {@link ConversionUtils#getNullValue(Class)} */
@Deprecated
public static <T> T getNullValue(final Class<T> type) {
return ConversionUtils.getNullValue(type);
}
/** @deprecated use {@link GenericUtils#getFieldClasses(Field, Class)} */
@Deprecated
public static List<Class<?>> getTypes(final Field field, final Class<?> type)
{
return GenericUtils.getFieldClasses(field, type);
}
/** @deprecated use {@link GenericUtils#getFieldType(Field, Class)} */
@Deprecated
public static Type getGenericType(final Field field, final Class<?> type) {
return GenericUtils.getFieldType(field, type);
}
/**
* Convenience class to further type narrow {@link CacheMap} to {@link Field}s.
*/
private static class FieldCache extends CacheMap<Field> { }
/**
* Convenience class to further type narrow {@link CacheMap} to {@link Method}s.
*/
private static class MethodCache extends CacheMap<Method> { }
/**
* Convenience class for {@code Map > Map > List} hierarchy. Cleans up generics
* and contains helper methods for traversing the two map levels.
*
* @param <T> - {@link AnnotatedElement} {@link List} ultimately referenced by
* this map
*/
private static class CacheMap<T extends AnnotatedElement> extends
HashMap<Class<?>, Map<Class<? extends Annotation>, List<T>>>
{
/**
* @param c Base class
* @param annotationClass Annotation type
* @return Cached list of Methods in the base class with the specified
* annotation, or null if a cached list does not exist.
*/
public List<T> getList(final Class<?> c,
final Class<? extends Annotation> annotationClass)
{
List<T> annotatedFields = null;
Map<Class<? extends Annotation>, List<T>> annotationTypes = get(c);
if (annotationTypes != null) {
annotatedFields = annotationTypes.get(annotationClass);
}
return annotatedFields;
}
/**
* Populates the provided list with {@link Method} entries of the given base
* class which are annotated with the specified annotation type.
*
* @param c Base class
* @param annotationClass Annotation type
* @param annotatedFields Method list to populate
*/
public void putList(final Class<?> c,
final Class<? extends Annotation> annotationClass,
List<T> annotatedMethods)
{
Map<Class<? extends Annotation>, List<T>> map = get(c);
if (map == null) {
map = new HashMap<Class<? extends Annotation>, List<T>>();
put(c, map);
}
map.put(annotationClass, annotatedMethods);
}
/**
* As {@link #getList(Class, Class)} but ensures an array is created and
* mapped, if it doesn't already exist.
*
* @param c Base class
* @param annotationClass Annotation type
* @return Cached list of Fields in the base class with the specified
* annotation.
*/
public List<T> makeList(final Class<?> c,
final Class<? extends Annotation> annotationClass)
{
List<T> methods = getList(c, annotationClass);
if (methods == null) {
methods = new ArrayList<T>();
putList(c, annotationClass, methods);
}
return methods;
}
}
}
|
package org.wattdepot.dashboard;
import com.mongodb.BasicDBObject;
import com.mongodb.BulkWriteOperation;
import com.mongodb.BulkWriteResult;
import com.mongodb.Cursor;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.MongoClient;
import com.mongodb.ParallelScanOptions;
import com.mongodb.ServerAddress;
import org.wattdepot.client.http.api.WattDepotClient;
import org.wattdepot.common.domainmodel.Depository;
import org.wattdepot.common.domainmodel.DepositoryList;
import org.wattdepot.common.domainmodel.InterpolatedValue;
import org.wattdepot.common.domainmodel.InterpolatedValueList;
import org.wattdepot.common.domainmodel.Sensor;
import org.wattdepot.common.domainmodel.SensorGroup;
import org.wattdepot.common.domainmodel.SensorGroupList;
import org.wattdepot.common.domainmodel.SensorList;
import org.wattdepot.common.exception.BadCredentialException;
import org.wattdepot.common.exception.IdNotFoundException;
import org.wattdepot.common.exception.NoMeasurementException;
import org.wattdepot.common.util.DateConvert;
import javax.xml.datatype.DatatypeConfigurationException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Hello world!
*/
public class App {
public static void main(String[] args) {
try {
MongoClient mongoClient = new MongoClient("localhost", 27017);
DB db = mongoClient.getDB("hale_aloha");
DBCollection powerCollection = db.getCollection("power");
DBCollection hourlyCollection = db.getCollection("hourly");
DBCollection dailyCollection = db.getCollection("daily");
DBCollection statusCollection = db.getCollection("status");
ValueFactory factory = ValueFactory.getInstance();
factory.updateHourlyEnergy();
factory.updateMongoEnergyLast24Hours(hourlyCollection);
factory.updateDailyEnergy();
factory.updateMongoEnergyDailyData(dailyCollection);
factory.updateSensorStatus();
factory.updateMongoSensorStatus(statusCollection);
// factory.foo();
}
catch (UnknownHostException e) {
e.printStackTrace();
}
}
}
|
package pulley;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import pulley.actions.A0;
import pulley.actions.Actions;
public class SchedulerComputation implements Scheduler {
private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(Runtime
.getRuntime().availableProcessors());
@Override
public void schedule(A0 action) {
executor.execute(Actions.toRunnable(action));
}
@Override
public void schedule(A0 action, long delay, TimeUnit unit) {
executor.schedule(Actions.toRunnable(action), delay, unit);
}
@Override
public long now() {
return System.currentTimeMillis();
}
}
|
package utils;
import function.external.flanking.FlankingCommand;
import function.genotype.base.GenotypeLevelFilterCommand;
import global.Data;
import function.genotype.base.SampleManager;
import function.variant.base.VariantLevelFilterCommand;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Vector;
/**
*
* @author nick
*/
public class ThirdPartyToolManager {
private static final String R_301_SCRIPT_SYSTEM_PATH = "/nfs/goldstein/software/R-3.0.1/bin/Rscript";
private static final String R_325_SCRIPT_SYSTEM_PATH = "/nfs/goldstein/software/R-3.2.5/bin/Rscript";
private static final String COLLAPSED_REGRESSION_R = "/nfs/goldstein/software/atav_home/lib/collapsed_regression_2.0.R";
private static final String PVALS_QQPLOT_R = "/nfs/goldstein/software/atav_home/lib/pvals_qqplot.R";
private static final String QQPLOT_FOR_COLLAPSING_R = "/nfs/goldstein/software/atav_home/lib/qqplot_for_collapsing.R";
private static final String PERL_SYSTEM_PATH = "perl";
private static final String FLANKING_SEQ_PERL = "/nfs/goldstein/software/atav_home/lib/flanking_seq.pl";
private static final String TRIO_DENOVO_TIER = "/nfs/goldstein/software/atav_home/lib/r0.4_trio_denovo_tier.R";
private static final String TRIO_COMP_HET_TIER = "/nfs/goldstein/software/atav_home/lib/r0.4_trio_comp_het_tier.R";
private static final String NON_TRIO_TIER = "/nfs/goldstein/software/atav_home/lib/non_trio_tier.R";
public static int systemCall(String[] cmd) {
LogManager.writeAndPrintNoNewLine("System call start");
int exitValue = Data.NA;
try {
Process myProc;
if (cmd.length > 1) {
LogManager.writeAndPrintNoNewLine(cmd[2]);
myProc = Runtime.getRuntime().exec(cmd);
} else {
LogManager.writeAndPrintNoNewLine(cmd[0]);
myProc = Runtime.getRuntime().exec(cmd[0]);
}
InputStream is = myProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line;
Vector<String> result = new Vector<>();
while ((line = br.readLine()) != null) {
result.add(line);
}
exitValue = myProc.waitFor();
} catch (Exception e) {
ErrorManager.send(e);
}
if (exitValue != 0) {
LogManager.writeAndPrint("System call failed.");
} else {
LogManager.writeAndPrint("System call complete.");
}
return exitValue;
}
public static void callCollapsedRegression(String outputFile,
String geneSampleMatrixFilePath,
String method) {
String cmd = R_301_SCRIPT_SYSTEM_PATH + " "
+ COLLAPSED_REGRESSION_R + " "
+ "--samples " + SampleManager.getTempCovarPath() + " "
+ "--clps " + geneSampleMatrixFilePath + " "
+ "--out " + outputFile + " "
+ "--method " + method + " "
+ "--transpose "
+ "--log " + CommonCommand.outputPath + "regress.log";
int exitValue = systemCall(new String[]{cmd});
if (exitValue != 0) {
LogManager.writeAndPrint("\nwarning: the application failed to run Collapsed "
+ "Regression script (" + method + "). \n");
deleteFile(outputFile);
}
}
public static void callFlankingSeq(String baseFlankingSeqFilePath) {
String cmd = PERL_SYSTEM_PATH + " " + FLANKING_SEQ_PERL
+ " --variant " + VariantLevelFilterCommand.includeVariantId
+ " --width " + FlankingCommand.width
+ " --out " + baseFlankingSeqFilePath;
int exitValue = systemCall(new String[]{cmd});
if (exitValue != 0) {
LogManager.writeAndPrint("\nwarning: the application failed to run flanking "
+ "sequence script. \n");
deleteFile(baseFlankingSeqFilePath);
}
}
public static void callPvalueQQPlot(String pvalueFile, int col, String outputPath) {
String cmd = R_301_SCRIPT_SYSTEM_PATH + " "
+ PVALS_QQPLOT_R + " "
+ pvalueFile + " "
+ col + " "
+ outputPath;
int exitValue = systemCall(new String[]{cmd});
if (exitValue != 0) {
deleteFile(outputPath);
}
}
public static void generatePvaluesQQPlot(String title, String pvalueName,
String pvalueFile, String outputPath) {
String[] temp = title.split(",");
int col = 0;
for (String str : temp) {
col++;
if (str.trim().equalsIgnoreCase(pvalueName)) {
break;
}
}
callPvalueQQPlot(pvalueFile, col, outputPath);
}
public static void generateQQPlot4CollapsingFetP(String matrixFilePath, String outputPath) {
String cmd = R_301_SCRIPT_SYSTEM_PATH + " "
+ QQPLOT_FOR_COLLAPSING_R + " "
+ GenotypeLevelFilterCommand.sampleFile + " "
+ matrixFilePath + " "
+ "1000 " // permutation
+ outputPath; // output path
int exitValue = systemCall(new String[]{cmd});
if (exitValue != 0) {
deleteFile(outputPath);
}
}
private static void deleteFile(String filePath) {
File f = new File(filePath);
f.deleteOnExit();
}
public static void gzipFile(String path) {
String cmd = "gzip -9 " + path;
systemCall(new String[]{cmd});
}
public static void runTrioDenovoTier(String denovoFilePath) {
String cmd = R_325_SCRIPT_SYSTEM_PATH + " "
+ TRIO_DENOVO_TIER + " "
+ denovoFilePath;
systemCall(new String[]{cmd});
}
public static void runTrioCompHetTier(String compHetFilePath) {
String cmd = R_325_SCRIPT_SYSTEM_PATH + " "
+ TRIO_COMP_HET_TIER + " "
+ compHetFilePath;
systemCall(new String[]{cmd});
}
public static void runNonTrioTier(String variantFilePath) {
String cmd = R_325_SCRIPT_SYSTEM_PATH + " "
+ NON_TRIO_TIER + " "
+ variantFilePath;
systemCall(new String[]{cmd});
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.