code stringlengths 3 1.18M | language stringclasses 1 value |
|---|---|
package gov.nasa.anml.lifted;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.utility.SimpleObject;
public interface ChainableExpression<V,S extends SimpleObject<? super S>> extends Expression<V,S> {
PDDL.Time splitFirst(PDDL.Time t);
PDDL.Time splitRest(PDDL.Time t);
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.SimpleObject;
import gov.nasa.anml.utility.SimpleString;
import static gov.nasa.anml.lifted.IdentifierCode.*;
//don't need reference objects in background representation
//because fluent as an expression is clearly a reference to its value,
//and is implemented by a java reference anyways, so there is only
//space overhead to a FluentReference object.
//inside other objects, like Effects,
//public int fID;
//saves space and time over public Fluent f;
//but has no behavior -- one has to remember to do
//s.get(fID), instead of f.value(s).
//if Fluent implements a pool, then one could make static methods
//for the best of both worlds:
//Fluent.value(fID,s) would be roughly equivalent to
//Fluent.get(fID).value(s) or
//f.value(s) when one already has the reference.
//except that the fluent object itself is not needed at a sufficiently
//ground level, so that Fluent.get(fID) can be skipped.
//if both ways are made final, and thus probably inlined, then
//the static method saves on an object field reference:
//s.get(fID) instead of s.get(f.id);
public class Fluent<T extends SimpleObject<? super T>> extends IdentifierImp<T,T> implements Expression<T,T> {
public Type<T> type;
public Expression<T,T> init;
public Fluent() {
}
public Fluent(String n) {
name = new SimpleString(n);
}
public Fluent(SimpleString n) {
super(n);
}
public Fluent(SimpleString n,Type<T> t) {
super(n);
type = t;
}
public Fluent(SimpleString n, Type<T> t, Expression<T,T> init) {
super(n);
this.type = t;
if (init != null)
this.init = new Assign<T>(this,init);
}
public TypeCode typeCode() {
return type.typeCode();
}
public T value(State s) {
return (T) s.resolveFluent(id).value;
}
public History<T> storage(State p, State c) {
return (History<T>) c.resolveFluent(id);
}
public T init(State s) {
History<T> place = new History<T>(init.value(s));
s.fluents.put(id,place);
return place.value;
}
public IdentifierCode idCode() {
return Fluent;
}
public boolean apply(State p, int contextID, State c) {
if (type.typeCode() != TypeCode.Boolean)
return false;
if (value(p) != ANMLBoolean.True)
return false;
return true;
}
public transient PDDL.Predicate boolPDDL;
public transient PDDL.Function floatPDDL;
public transient PDDL.PredicateReference asPDDLPredicateReference;
public transient PDDL.FunctionReference asPDDLFunctionReference;
public PDDL.Expression translateLValue(PDDL pddl,Interval unit) {
switch(typeCode()) {
case Boolean:
return asPDDLPredicateReference;
case Float:
return asPDDLFunctionReference;
default:
System.err.println("Oops!");
}
return super.translateLValue(pddl,unit);
}
public void translateDecl(PDDL pddl,Interval unit) {
if (typeCode() == TypeCode.Boolean) {
if (boolPDDL != null)
return;
int length = pddl.bufAppend(name);
boolPDDL = pddl.new Predicate(pddl.bufToString());
asPDDLPredicateReference = pddl.new PredicateReference(boolPDDL);
pddl.bufReset(length);
pddl.predicates.add(boolPDDL);
boolPDDL.context.addAll(pddl.context);
if (init != null)
init.translateStmt(pddl,unit,PDDL.Time.Start);
} else if (typeCode() == TypeCode.Float) {
if (floatPDDL != null)
return;
int length = pddl.bufAppend(name);
floatPDDL = pddl.new Function(pddl.bufToString());
asPDDLFunctionReference = pddl.new FunctionReference(floatPDDL);
pddl.bufReset(length);
pddl.functions.add(floatPDDL);
floatPDDL.context.addAll(pddl.context);
if (init != null)
init.translateStmt(pddl,unit,PDDL.Time.Start);
} else if (typeCode() == TypeCode.Integer) {
// be nice by relaxing integers to floats in the compilation to PDDL
if (floatPDDL != null)
return;
int length = pddl.bufAppend(name);
floatPDDL = pddl.new Function(pddl.bufToString());
asPDDLFunctionReference = pddl.new FunctionReference(floatPDDL);
pddl.bufReset(length);
pddl.functions.add(floatPDDL);
floatPDDL.context.addAll(pddl.context);
type = (Type<T>) Unit.floatType;
// make sure all future references see this as a float
if (init != null)
init.translateStmt(pddl,unit,PDDL.Time.Start);
}
}
public PDDL.Expression translateExpr(PDDL pddl, Interval unit) {
switch(typeCode()) {
case Boolean:
return asPDDLPredicateReference;
case Float:
return asPDDLFunctionReference;
default:
System.err.println("Oops!");
}
return super.translateExpr(pddl,unit);
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.*;
import gov.nasa.anml.utility.*;
import gov.nasa.anml.PDDL;
//really this is a function binding
// Bind<Action,SimpleBoolean,SimpleVoid> is also a good choice, but technically one thing does actually modify
// the implicit predicate, so there is a non-void history to be concerned about.
public class ActionReference extends Bind<Action,SimpleBoolean,SimpleBoolean> {
public ActionReference() {}
public ActionReference(Action ref) {
super(ref);
}
// could make a pool of various arity arrays to avoid garbage collection
// and memory bloat simultaneously
public SimpleBoolean value(State s) {
// use setArgs(s) to find the instantiation of the action
// that is being tested to see if it is executing in s
return ANMLBoolean.False;
}
public TypeCode typeCode() {
return TypeCode.Boolean;
}
public History<SimpleBoolean> storage(State p, State c) {
// maybe return a Step object?
return null;
}
public boolean apply(State p, int contextID, State c) {
// TODO: use setArgs() to set the parameters of the action up
return ref.apply(p,contextID,c);
}
public transient PDDL.BooleanExpression asPDDLBooleanCondition;
public transient PDDL.PredicateReference refPDDL;
public PDDL.Expression translateExpr(PDDL pddl, Interval unit) {
if (refPDDL != null)
return refPDDL;
refPDDL = pddl.new PredicateReference(this.ref.makePDDLExecuting());
// need a new reference (rather than this.ref.trivialSelfRelf or whatever its called) because arguments != formal parameters
for (Expression<? extends SimpleObject<?>,?> e : this.arguments) {
PDDL.Argument a = e.translateArgument(pddl,unit);
if (a != null)
refPDDL.arguments.add(a);
}
return refPDDL;
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.Set;
public interface ExtensibleType<T extends Comparable> extends Type<T> {
void add(T member);
void extend(ExtensibleType<T> t);
void set(Enumeration<T> t);
boolean addSubType(ExtensibleType<T> t);
void addSuperType(ExtensibleType<T> t);
Enumeration<T> members();
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.*;
import gov.nasa.anml.PDDL.Argument;
import gov.nasa.anml.PDDL.FunctionReference;
import gov.nasa.anml.PDDL.PredicateReference;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.*;
// really this is a function binding
public class ConstantFunctionReference<T extends SimpleObject<T>> extends Bind<ConstantFunction<T>,T,SimpleVoid> implements ConstantExpression<T> {
public ConstantFunctionReference() {}
public ConstantFunctionReference(ConstantFunction<T> ref) {
super(ref);
}
// could make a pool of various arity arrays to avoid garbage collection
// and memory bloat simultaneously
public T value(State s) {
ConstantExpression<T> init = ref.init.get(setArgs(s));
return init == null ? null : init.value(s);
}
public TypeCode typeCode() {
return ref.typeCode();
}
public History<SimpleVoid> storage(State p, State c) {
return null;
}
public boolean apply(State p, int contextID, State c) {
if (ref.typeCode() != TypeCode.Boolean)
return false;
if (value(p) != ANMLBoolean.True)
return false;
return true;
}
public transient PDDL.PredicateReference asPDDLPredicateReference;
public transient PDDL.FunctionReference asPDDLFunctionReference;
public transient PDDL.Expression asPDDLExpression;
public PDDL.Expression translateExpr(PDDL pddl, Interval unit) {
if (asPDDLExpression != null)
return asPDDLExpression;
switch(typeCode()) {
case Float:
return asPDDLExpression = translateRefF(pddl,unit);
case Boolean:
return asPDDLExpression = translateRefB(pddl,unit);
default:
System.err.println("Oh no! PDDL only supports fluent booleans and floats (symbols/objects are okay as arguments/parameters, but not as values).");
}
return pddl.FalseRef;
}
public PDDL.Expression translateLValue(PDDL pddl, Interval unit) {
if (asPDDLExpression != null)
return asPDDLExpression;
switch(typeCode()) {
case Float:
return asPDDLExpression = translateRefF(pddl,unit);
case Boolean:
return asPDDLExpression = translateRefB(pddl,unit);
default:
System.err.println("Oops!");
}
return super.translateLValue(pddl,unit);
}
PDDL.PredicateReference translateRefB(PDDL pddl, Interval unit) {
if (asPDDLPredicateReference != null)
return asPDDLPredicateReference;
asPDDLPredicateReference = pddl.new PredicateReference(ref.boolPDDL);
for (Expression<? extends SimpleObject<?>,?> e : this.arguments) {
PDDL.Argument a = e.translateArgument(pddl,unit);
if (a != null)
asPDDLPredicateReference.arguments.add(a);
else {
System.err.println("Arguments must be object/symbol literals or parameters.");
}
}
return asPDDLPredicateReference;
}
PDDL.FunctionReference translateRefF(PDDL pddl, Interval unit) {
if (asPDDLFunctionReference == null)
return asPDDLFunctionReference;
asPDDLFunctionReference = pddl.new FunctionReference(ref.floatPDDL);
for (Expression<? extends SimpleObject<?>,?> e : this.arguments) {
PDDL.Argument a = e.translateArgument(pddl,unit);
if (a != null)
asPDDLFunctionReference.arguments.add(a);
else {
System.err.println("Arguments must be object/symbol literals or parameters.");
}
}
return asPDDLFunctionReference;
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.utility.SimpleBoolean;
import gov.nasa.anml.utility.SimpleObject;
public class Consume<T extends SimpleObject<? super T>> extends BinaryExpression<T,T,T> {
public Consume(Expression<T, T> l, Expression<T, ?> r) {
super(l, r);
}
public boolean apply(State p, int contextID, State c) {
return false;
}
public T value(State s) {
return null;
}
public void translateStmt(PDDL pddl, Interval unit, PDDL.Time part) {
ArrayList<PDDL.BooleanExpression> conditions = unit.getPDDLConditions();
ArrayList<PDDL.Effect> effects = unit.getPDDLEffects();
PDDL.Time pl,pr;
switch(part) {
case Start:
case End:
pl = pr = part;
break;
default:
pl = PDDL.Time.Start;
pr = PDDL.Time.End;
break;
}
switch(left.typeCode()) {
case Boolean:
assert right.typeCode() == TypeCode.Boolean : "No operation combines booleans and other types directly";
PDDL.PredicateReference refB = (PDDL.PredicateReference) left.translateLValue(pddl,unit);
if (right instanceof SimpleBoolean) {
boolean v = ((SimpleBoolean)right).v;
if (v) {
conditions.add(pddl.wrap(pl,pddl.makeTest(refB,true)));
effects.add(pddl.makeEffect(pl,refB,false));
if (pr != pl)
conditions.add(pddl.wrap(PDDL.Time.Interim,pddl.makeTest(refB,false)));
}
} else {
PDDL.BooleanExpression r = (PDDL.BooleanExpression) right.translateExpr(pddl,unit);
conditions.add(pddl.wrap(pl,pddl.makeTest(PDDL.Op.gte,refB,r)));
effects.add(pddl.wrap(pl,pddl.makeEffect(r,refB,false)));
// not easy to implement the interim portion
// if we had upper and lower bounds on bools and floats then it wouldn't be so bad.
//conditions.add(pddl.wrap(PDDL.Time.Interim,pddl.makeTest(refB,false)));
}
break;
case Float:
assert right.typeCode() == TypeCode.Float : "No operation combines floats and non-floats (at present).";
PDDL.FunctionReference refF = (PDDL.FunctionReference) left.translateLValue(pddl,unit);
PDDL.FloatExpression v = (PDDL.FloatExpression) right.translateExpr(pddl,unit);
// this assumes a lower bound of 0.
conditions.add(pddl.wrap(pl,pddl.makeTest(PDDL.Op.gte,refF,v)));
effects.add(pddl.makeEffect(pl,PDDL.Op.decrease,refF,v));
// could add this constraint, but it would be better to just throw it in the domain action and be done with it once.
//conditions.add(pddl.wrap(PDDL.Time.Start,pddl.makeTest(PDDL.Op.gte,refF,pddl.Zero)));
break;
default:
System.err.println("Oops!");
}
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.Action;
import gov.nasa.anml.PDDL.ComplexAction;
import gov.nasa.anml.PDDL.Exists;
import gov.nasa.anml.PDDL.FloatLiteral;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.SimpleObject;
import gov.nasa.anml.utility.SimpleString;
public class ForAll extends Block {
public ForAll(Scope parent, SimpleString n) {
super(parent,n);
}
public boolean apply(State p, int contextID, State c) {
for(int j=0; j < 1; ++j) {
for (int i=0;i<statements.size();++i) {
if (!statements.get(i).apply(p,contextID,c))
return false;
}
}
return true;
}
public TypeCode typeCode() {
return TypeCode.Boolean;
}
public ANMLBoolean value(State p) {
for(int j=0; j < 1; ++j) {
for (int i=0;i<statements.size();++i) {
if (!statements.get(i).apply(p,0,null))
return ANMLBoolean.False;
}
}
return ANMLBoolean.True;
}
public void translateStmt(PDDL pddl, Interval unit, Time part) {
translateDecl(pddl,unit);
//bit of a hack here.
ArrayList<PDDL.BooleanExpression> conditions = unit.getPDDLConditions();
ArrayList<PDDL.Effect> effects = unit.getPDDLEffects();
part = PDDL.getPart(unit,this);
ArrayList<PDDL.BooleanExpression> myConditions = getPDDLConditions();
ArrayList<PDDL.Effect> myEffects = getPDDLEffects();
ArrayList<PDDL.Parameter> myParameters = getPDDLParameters();
for(int i=0; i < statements.size(); ++i) {
Statement s = statements.get(i);
s.translateStmt(pddl,this,part);
switch(myConditions.size()) {
case 0:
break;
case 1:
conditions.add(pddl.new ForAll(myParameters,myConditions.get(0)));
myConditions.clear();
break;
default:
// swallows the array, so need a new array afterwards; this is the especially kludgy part of the whole affair
conditions.add(pddl.new ForAll(myParameters,pddl.wrap(PDDL.Op.and,myConditions)));
asPDDLAction.condition.arguments = myConditions = new ArrayList<PDDL.BooleanExpression>();
}
assert myEffects.size() == 0 : "No non-determinism.";
}
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.Argument;
import gov.nasa.anml.PDDL.BooleanExpression;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.SimpleObject;
import gov.nasa.anml.utility.SimpleVoid;
// few could actually directly extend from this, because they'd have a non-constant version with an existing super-type heirarchy to inherit from instead.
public abstract class ConstantExpressionImp<T> extends ExpressionImp<T,SimpleVoid> implements ConstantExpression<T> {
public void translateStmt(PDDL pddl, Interval unit, PDDL.Time time) {
ExpressionImp.translateStmt(this,pddl,unit,time);
}
public History<SimpleVoid> storage(State p, State c) {
return null;
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.*;
import gov.nasa.anml.PDDL.TimedBooleanExpression;
import gov.nasa.anml.utility.*;
//really this is a function binding
public class FluentFunctionReference<T extends SimpleObject<T>> extends Bind<FluentFunction<T>,T,T> {
public FluentFunctionReference() {}
public FluentFunctionReference(FluentFunction<T> ref) {
super(ref);
}
// could make a pool of various arity arrays to avoid garbage collection
// and memory bloat simultaneously
public T value(State s) {
BindingHistoryMap<T> f = (BindingHistoryMap<T>) s.resolveFunction(ref.id);
if (f == null)
return null;
History<T> h = f.get(setArgs(s));
if (h == null)
return null;
return h.value;
}
public TypeCode typeCode() {
return ref.typeCode();
}
public History<T> storage(State p, State c) {
BindingHistoryMap<T> f = (BindingHistoryMap<T>) c.resolveFunction(ref.id);
if (f == null)
return null;
return f.get(setArgs(p));
}
public boolean apply(State p, int contextID, State c) {
if (ref.typeCode() != TypeCode.Boolean)
return false;
if (value(p) != ANMLBoolean.True)
return false;
return true;
}
public transient PDDL.BooleanExpression asPDDLCondition;
public transient PDDL.PredicateReference asPDDLPredicateReference;
public transient PDDL.FunctionReference asPDDLFunctionReference;
public transient PDDL.Expression asPDDLExpression;
public PDDL.FunctionReference asPDDLFloatExpr() {
return asPDDLFunctionReference;
}
public PDDL.PredicateReference asPDDLBooleanExpr() {
return asPDDLPredicateReference;
}
public void translateStmt(PDDL pddl, Interval unit, PDDL.Time part) {
switch(typeCode()) {
case Boolean:
translateStmtB(pddl,unit,part);
break;
case Float:
translateStmtF(pddl,unit,part);
break;
default:
System.err.println("Oops!!");
}
}
public PDDL.Expression translateExpr(PDDL pddl, Interval unit) {
if (asPDDLExpression != null)
return asPDDLExpression;
switch(typeCode()) {
case Float:
return asPDDLExpression = translateRefF(pddl,unit);
case Boolean:
return asPDDLExpression = translateRefB(pddl,unit);
default:
System.err.println("Oh no! PDDL only supports fluent booleans and floats (symbols/objects are okay as arguments/parameters, but not as values).");
}
return pddl.FalseRef;
}
public PDDL.Expression translateLValue(PDDL pddl, Interval unit) {
if (asPDDLExpression != null)
return asPDDLExpression;
switch(typeCode()) {
case Float:
return asPDDLExpression = translateRefF(pddl,unit);
case Boolean:
return asPDDLExpression = translateRefB(pddl,unit);
default:
System.err.println("Oh no! PDDL only supports fluent booleans and floats (symbols/objects are okay as arguments/parameters, but not as values).");
}
return super.translateLValue(pddl,unit);
}
private PDDL.BooleanExpression translateRefB(PDDL pddl, Interval unit) {
if (asPDDLPredicateReference != null)
return asPDDLPredicateReference;
asPDDLPredicateReference = pddl.new PredicateReference(ref.boolPDDL);
for (Expression<? extends SimpleObject<?>,?> e : this.arguments) {
PDDL.Argument a = e.translateArgument(pddl,unit);
if (a != null)
asPDDLPredicateReference.arguments.add(a);
else {
System.err.println("Arguments must be object/symbol literals or parameters.");
}
}
return asPDDLPredicateReference;
}
private PDDL.FloatExpression translateRefF(PDDL pddl, Interval unit) {
if (asPDDLFunctionReference != null)
return asPDDLFunctionReference;
asPDDLFunctionReference = pddl.new FunctionReference(ref.floatPDDL);
for (Expression<? extends SimpleObject<?>,?> e : this.arguments) {
PDDL.Argument a = e.translateArgument(pddl,unit);
if (a != null)
asPDDLFunctionReference.arguments.add(a);
else {
System.err.println("Arguments must be object/symbol literals or parameters.");
}
}
return asPDDLFunctionReference;
}
void translateStmtB(PDDL pddl, Interval unit, PDDL.Time part) {
ArrayList<PDDL.BooleanExpression> conditions = unit.getPDDLConditions();
ArrayList<PDDL.Effect> effects = unit.getPDDLEffects();
translateRefB(pddl,unit);
switch(part) {
case Start:
case Interim:
case End:
conditions.add(pddl.wrap(part,asPDDLPredicateReference));
break;
case All:
conditions.add(pddl.wrap(PDDL.Time.Start,asPDDLPredicateReference));
conditions.add(pddl.wrap(PDDL.Time.Interim,asPDDLPredicateReference));
conditions.add(pddl.wrap(PDDL.Time.End,asPDDLPredicateReference));
break;
case StartHalf:
conditions.add(pddl.wrap(PDDL.Time.Start,asPDDLPredicateReference));
conditions.add(pddl.wrap(PDDL.Time.Interim,asPDDLPredicateReference));
break;
case EndHalf:
conditions.add(pddl.wrap(PDDL.Time.Interim,asPDDLPredicateReference));
conditions.add(pddl.wrap(PDDL.Time.End,asPDDLPredicateReference));
break;
case Timeless:
conditions.add(asPDDLPredicateReference);
break;
default:
System.err.println("New PDDL.Time constant unaccounted for in FluentFunctionRef.");
conditions.add(pddl.FalseCondition);
}
}
void translateStmtF(PDDL pddl, Interval unit, PDDL.Time part) {
System.err.println("Warning: Encountered numeric-expression-as-statement, i.e., an attempt at condition. Interpreting as an impossible constraint.\n\t"+this);
unit.getPDDLConditions().add(pddl.FalseCondition);
}
}
| Java |
package gov.nasa.anml.lifted;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.*;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.SimpleFloat;
import gov.nasa.anml.utility.SimpleObject;
public abstract class BinaryExpression<I extends SimpleObject<? super I>,V extends SimpleObject<? super V>,S extends SimpleObject<? super S>> extends ChainableExpressionImp<V,S> {
public Expression<I,S> left;
public Expression<I,?> right;
public BinaryExpression(Expression<I,S> l,Expression<I,?> r) {
left = l;
right = r;
}
public TypeCode typeCode() {
return left.typeCode();
}
public History<S> storage(State p, State c) {
return left.storage(p,c);
}
public void translateDecl(PDDL pddl, Interval unit) {
left.translateDecl(pddl,unit);
right.translateDecl(pddl,unit);
}
public PDDL.Expression translateLValue(PDDL pddl,Interval unit) {
return left.translateLValue(pddl,unit);
}
/*
public V clone() {
return this.value();
}
public V value() {
return this.value(null);
}
public int compareTo(SimpleObject<V> v) {
return this.value().compareTo(v);
}
public void assign(V v) {
}*/
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.utility.SimpleBoolean;
import gov.nasa.anml.utility.SimpleObject;
public class Lend<T extends SimpleObject<? super T>> extends BinaryExpression<T,T,T> {
public Lend(Expression<T, T> l, Expression<T, ?> r) {
super(l, r);
}
public PDDL.Time splitFirst(PDDL.Time t) {
switch(t) {
case All:
return PDDL.Time.All;
case StartHalf:
return PDDL.Time.All;
case EndHalf:
return PDDL.Time.All;
// if crammed into splitting atomic pieces up....
case Interim:
return PDDL.Time.All;
case Start:
// should be immediately before start: ...start)
return PDDL.Time.Start;
case End:
// should be immediately before end: ...end)
// instead of (start, end)
return PDDL.Time.End;
case Timeless:
default:
return t;
}
}
public PDDL.Time splitRest(PDDL.Time t) {
switch(t) {
case All:
return PDDL.Time.End;
case StartHalf:
return PDDL.Time.End;
case EndHalf:
return PDDL.Time.End;
// if forced to split atomic things...
case Interim:
return PDDL.Time.End;
case Start:
return PDDL.Time.Interim;
case End:
return PDDL.Time.End;
case Timeless:
default:
return t;
}
}
public void translateStmt(PDDL pddl, Interval unit, PDDL.Time part) {
ArrayList<PDDL.BooleanExpression> conditions = unit.getPDDLConditions();
ArrayList<PDDL.Effect> effects = unit.getPDDLEffects();
PDDL.Time pl,pr;
switch(part) {
case Start:
case End:
pl = pr = part;
break;
default:
pl = PDDL.Time.Start;
pr = PDDL.Time.End;
break;
}
switch(left.typeCode()) {
case Boolean:
assert right.typeCode() == TypeCode.Boolean : "No operation combines booleans and other types directly";
PDDL.PredicateReference refB = (PDDL.PredicateReference) left.translateLValue(pddl,unit);
if (right instanceof SimpleBoolean) {
boolean v = ((SimpleBoolean)right).v;
if (v) {
conditions.add(pddl.wrap(pl,pddl.makeTest(refB,false)));
if (pl != pr)
effects.add(pddl.wrap(pl,pddl.makeEffect(refB,true)));
effects.add(pddl.wrap(pr,pddl.makeEffect(refB,false)));
}
} else {
PDDL.BooleanExpression r = (PDDL.BooleanExpression) right.translateExpr(pddl,unit);
conditions.add(pddl.wrap(pl,pddl.makeTest(PDDL.Op.implies,r,pddl.negate(refB))));
if (pl != pr)
effects.add(pddl.wrap(pl,pddl.makeEffect(r,refB,true)));
effects.add(pddl.wrap(pr,pddl.makeEffect(r,refB,false)));
}
break;
case Float:
assert right.typeCode() == TypeCode.Float : "No operation combines floats and non-floats (at present).";
PDDL.FunctionReference refF = (PDDL.FunctionReference) left.translateLValue(pddl,unit);
PDDL.FloatExpression v = (PDDL.FloatExpression) right.translateExpr(pddl,unit);
//conditions.add(pddl.wrap(pl,pddl.makeTest(PDDL.Op.gte,refF,v)));
effects.add(pddl.wrap(pl,pddl.makeEffect(PDDL.Op.increase,refF,v)));
effects.add(pddl.wrap(pr,pddl.makeEffect(PDDL.Op.decrease,refF,pddl.wrap(pl,v))));
// the latter wrap is to make it very clear at what time we want the expression to be evaluated
// in order to calculate the amount of increase.
// however, the resulting syntax is likely to break PDDL planners.
break;
default:
System.err.println("Oops!");
conditions.add(pddl.FalseCondition);
}
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.*;
import gov.nasa.anml.utility.*;
public interface Constraint<T> extends Collection<T>, Cloneable {
//boolean contains(T v);
boolean containsAll(Constraint<T> t); //subsumes, supertype, superset, ...
Set<T> values();
Pair<T,T> bounds();
Constraint<T> clone();
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.utility.SimpleString;
import gov.nasa.anml.utility.SimpleVoid;
import static gov.nasa.anml.lifted.IdentifierCode.*;
public class ObjectLiteral extends IdentifierImp<SimpleString,SimpleVoid> implements ConstantExpression<SimpleString> {
public ArrayList<ObjectType> types = new ArrayList<ObjectType>();
public ObjectLiteral() {
super();
}
public ObjectLiteral(SimpleString n) {
super(n);
}
public ObjectLiteral(SimpleString n, int i) {
super(n,i);
}
public ObjectLiteral(SimpleString n, int i, ObjectType t) {
super(n,i);
types.add(t);
}
public IdentifierCode idCode() {
return Object;
}
public TypeCode typeCode() {
return TypeCode.Object;
}
public SimpleString value(State s) {
return name;
}
public boolean apply(State p, int contextID, State c) {
return false;
}
public void addType(ObjectType t) {
int i=0;
while (i<types.size()) {
if (t.isSubType(types.get(i))) {
types.remove(i);
} else {
++i;
}
}
types.add(t);
}
public transient PDDL.Object asPDDLObject;
public transient PDDL.ObjectRef asPDDLArgument;
public PDDL.Argument translateArgument(PDDL pddl, Interval unit) {
if (asPDDLArgument != null)
return asPDDLArgument;
return asPDDLArgument = pddl.new ObjectRef(asPDDLObject);
}
public void translateDecl(PDDL pddl,Interval unit) {
if (asPDDLObject != null)
return;
int length = pddl.bufAppend(name);
String fullName = pddl.bufToString();
if (types.size() > 1) {
ObjectType myType = new ObjectType(null,new SimpleString(fullName+"Type"));
myType.superTypes.addAll(this.types);
this.types.clear();
this.types.add(myType);
myType.members.add(this);
myType.translateDecl(pddl,unit);
}
asPDDLObject = pddl.new Object(fullName,types.get(0).asPDDLType);
pddl.bufReset(length);
pddl.domainObjects.add(asPDDLObject);
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.*;
import gov.nasa.anml.utility.Pair;
import gov.nasa.anml.utility.SimpleObject;
public class Enumeration<T extends Comparable> implements Constraint<T> {
// could have holes, unlike Range
public Set<T> values = new HashSet<T>();
// least over(upper)-approximating interval (lub)
public Pair<T,T> bounds;
public Enumeration() {
}
public Enumeration(Set<T> v) {
values = v;
}
void setBounds() {
Set<T> v = values;
if (v != null) {
T min, max;
Iterator<T> i = v.iterator();
if (i.hasNext()) {
min=max=i.next();
while(i.hasNext()) {
T a = i.next();
if (a.compareTo(min) < 0)
min = a;
else if (a.compareTo(max) > 0)
max = a;
}
bounds = new Pair<T,T>(min,max);
}
}
}
public boolean containsAll(Constraint<T> t) {
Set<T> v = t.values();
if (v == null)
return false;
Pair<T,T> b = t.bounds();
if (b != null) {
Pair<T,T> a = bounds();
if (a.left.compareTo(b.left) > 0)
return false;
if (a.right.compareTo(b.right) < 0)
return false;
}
return values.containsAll(v);
}
public Set<T> values() {
return values;
}
public Pair<T,T> bounds() {
if (bounds == null)
setBounds();
return bounds;
}
public boolean add(T e) {
return values.add(e);
}
public boolean addAll(Collection<? extends T> c) {
return values.addAll(c);
}
public void clear() {
values.clear();
}
public boolean equals(Object o) {
return values.equals(o);
}
public int hashCode() {
return values.hashCode();
}
public boolean isEmpty() {
return values.isEmpty();
}
public Iterator<T> iterator() {
return values.iterator();
}
public boolean remove(Object o) {
return values.remove(o);
}
public boolean removeAll(Collection<?> c) {
return values.removeAll(c);
}
public boolean retainAll(Collection<?> c) {
return values.retainAll(c);
}
public int size() {
return values.size();
}
public Object[] toArray() {
return values.toArray();
}
public <T> T[] toArray(T[] a) {
return values.toArray(a);
}
public boolean contains(Object o) {
return values.contains(o);
}
public boolean containsAll(Collection<?> c) {
return values.containsAll(c);
}
public Enumeration<T> clone() {
Enumeration<T> c = null;
try {
c = (Enumeration<T>) super.clone();
} catch (CloneNotSupportedException e) {
//assert false;
}
c.values = new HashSet<T>(values);
return null;
}
}
| Java |
package gov.nasa.anml.lifted;
import static gov.nasa.anml.lifted.IdentifierCode.*;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.FloatExpression;
import gov.nasa.anml.utility.SimpleObject;
import gov.nasa.anml.utility.SimpleString;
import gov.nasa.anml.utility.SimpleVoid;
// objects of primitive type are single-valued, unstructured, etc.
// the possibilities are
// boolean,byte,char,short,int,long,float,double,string
//
// Don't internally distinguish string types from symbol types --
// symbols are just strings that don't need quoting to be tokenized as ids.
//
// Idea: allow quoted strings as identifiers?
// int "My var" = 2, "My Second Var" = 3;
// double quotes bad idea, but...
// int 'my var' = 2, 'my second var' = 3;
// isn't as bad. Could do backquotes to make _sure_ there is no
// confusion with string literals. `my var`
//
// See TypeCode
public class PrimitiveType<T> extends IdentifierImp<SimpleString,SimpleVoid> implements Type<T> {
public TypeCode typeCode;
public Constraint<T> constraint;
// should be called only once per TypeCode...probably
public PrimitiveType(TypeCode typeCode) {
super(typeCode.name);
this.typeCode = typeCode;
this.constraint = null;
}
public PrimitiveType(SimpleString n, TypeCode typeCode) {
super(n);
this.typeCode = typeCode;
this.constraint = null;
}
public PrimitiveType(SimpleString n, TypeCode typeCode, Constraint<T> constraint) {
super(n);
this.typeCode = typeCode;
this.constraint = constraint;
}
public PrimitiveType(TypeCode typeCode, Constraint<T> constraint) {
this.typeCode = typeCode;
this.constraint = constraint;
}
public final TypeCode typeCode() {
return typeCode;
}
public IdentifierCode idCode() {
return Type;
}
public PrimitiveType<T> constrain(Constraint<T> c) {
if (constraint == null || constraint.containsAll(c))
return new PrimitiveType<T>(typeCode,c);
return null;
}
public PrimitiveType<T> clone() {
PrimitiveType<T> ret = null;
try {
ret = (PrimitiveType<T>) super.clone();
} catch (CloneNotSupportedException e) {
//assert false;
}
return ret;
}
// no explicit PDDL representation of non-symbolic types
// booleans and floats are special
public PDDL.Type asPDDLType() {
return null;
}
}
| Java |
package gov.nasa.anml.lifted;
import java.util.ArrayList;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.PDDL.Action;
import gov.nasa.anml.PDDL.BooleanExpression;
import gov.nasa.anml.PDDL.Effect;
import gov.nasa.anml.PDDL.FloatExpression;
import gov.nasa.anml.PDDL.Parameter;
import gov.nasa.anml.PDDL.Predicate;
import gov.nasa.anml.utility.SimpleFloat;
import gov.nasa.anml.utility.SimpleInteger;
public class DegenerateInterval implements Interval {
public Constant<SimpleFloat> time;
public Constant<SimpleInteger> bra, ket;
public Point[] pieces;
public DegenerateInterval(Constant<SimpleFloat> t, int b, int k) {
int l = k-b+1;
pieces = new Point[l];
for(int d=0;d<l;++d) {
pieces[d] = new Point(t,b++);
}
time = t;
setShape(b,k);
}
public AtomicTime getPiece(int k) {
return pieces[k];
}
public Constant<SimpleFloat> getDuration() {
return IntervalImp.constantDurationZero;
}
public Constant<SimpleFloat> getEnd() {
return time;
}
public Constant<SimpleFloat> getStart() {
return time;
}
public Constant<SimpleInteger> getBra() {
return bra;
}
public Constant<SimpleInteger> getKet() {
return ket;
}
public void setBra(Constant<SimpleInteger> bra) {
this.bra = bra;
}
public void setKet(Constant<SimpleInteger> ket) {
this.ket = ket;
}
public final void setShape(int b, int k) {
setBra(b);
setKet(k);
}
public void setBra(int b) {
this.bra = IntervalImp.makeBra(b);
}
public void setKet(int k) {
this.ket = IntervalImp.makeKet(k);
}
// TODO: all of the below
public ArrayList<PDDL.BooleanExpression> getPDDLConditions() {
return null;
}
public FloatExpression getPDDLDuration() {
return null;
}
public ArrayList<Effect> getPDDLEffects() {
return null;
}
public Predicate getPDDLExecuting() {
return null;
}
public ArrayList<Parameter> getPDDLParameters() {
return null;
}
public Action getPDDLAction() {
return null;
}
public Predicate makePDDLExecuting() {
return null;
}
}
| Java |
package gov.nasa.anml.lifted;
import gov.nasa.anml.PDDL;
import gov.nasa.anml.State;
import gov.nasa.anml.PDDL.Time;
import gov.nasa.anml.utility.SimpleBoolean;
import gov.nasa.anml.utility.SimpleObject;
import gov.nasa.anml.utility.SimpleVoid;
public class ANMLBoolean extends SimpleBoolean implements ConstantExpression<SimpleBoolean> {
// need new True/False values because the ones in SimpleBoolean don't
// have the typeCode(), value(State), and ANMLBoolean value() methods
public static final ANMLBoolean True = new ANMLBoolean(true),
False = new ANMLBoolean(false);
// cannot override public static final
// because of the silly invocation like:
// SimpleBoolean foo = new ANMLBoolean(blah);
// foo.make(...);
// which invokes the following method:
public static ANMLBoolean make(boolean v) {
return v ? True : False;
}
private ANMLBoolean(boolean v) {
this.v = v;
}
public ANMLBoolean value() {
return this;
}
public ANMLBoolean value(State s) {
return this;
}
public TypeCode typeCode() {
return TypeCode.Boolean;
}
public History<SimpleVoid> storage(State p, State c) {
return null;
}
public boolean apply(State p, int contextID, State c) {
return this == True;
}
public void translateDecl(PDDL pddl, Interval unit) {
}
public void translateStmt(PDDL pddl, Interval unit, Time part) {
if (!v) {
unit.getPDDLConditions().add(pddl.FalseCondition);
}
}
public PDDL.Expression translateExpr(PDDL pddl, Interval unit) {
return v ? pddl.TrueRef : pddl.FalseRef;
}
public PDDL.Expression translateLValue(PDDL pddl, Interval unit) {
return null;
}
public PDDL.Argument translateArgument(PDDL pddl, Interval unit) {
// TODO:
// could return true and false objects for parameters to actions.
return null;
}
}
| Java |
package gov.nasa.anml.lifted;
import gov.nasa.anml.State;
import gov.nasa.anml.utility.SimpleBoolean;
public class Unordered extends CompoundIntervalExpression {
public SimpleBoolean value(State p) {
for (int i=0;i<expressions.size();++i) {
if (expressions.get(i).value(p) != ANMLBoolean.True)
return ANMLBoolean.False;
}
// TODO: remember envelop
return ANMLBoolean.True;
}
}
| Java |
package gov.nasa.anml;
import gov.nasa.anml.lifted.History;
import gov.nasa.anml.utility.ArrHashMap;
import gov.nasa.anml.utility.ArrMap;
import gov.nasa.anml.utility.SimpleObject;
public class BindingHistoryMap<T extends SimpleObject<? super T>> extends ArrHashMap<SimpleObject<?>[],History<T>> {
public Entry<SimpleObject<?>[],History<T>> createEntry(int index, SimpleObject<?>[] key, History<T> value, int hash) {
Entry<SimpleObject<?>[],History<T>> e = table[index];
table[index] = new Entry<SimpleObject<?>[],History<T>>(key, value.clone(), hash, e);
size++;
return e;
}
public BindingHistoryMap<T> clone() {
return (BindingHistoryMap<T>) super.clone();
}
}
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
/**
* <!-- begin-user-doc -->
* The <b>Package</b> for the model.
* It contains accessors for the meta objects to represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.ResearchFactory
* @model kind="package"
* @generated
*/
public interface ResearchPackage extends EPackage {
/**
* The package name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNAME = "research";
/**
* The package namespace URI.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_URI = "http:///info/bondtnt/labs/model/research.ecore";
/**
* The package namespace name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_PREFIX = "info.bondtnt.labs.model.research";
/**
* The singleton instance of the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
ResearchPackage eINSTANCE = info.bondtnt.labs.model.research.impl.ResearchPackageImpl.init();
/**
* The meta object id for the '{@link info.bondtnt.labs.model.research.impl.AbstractBoundedGenericParameterImpl <em>Abstract Bounded Generic Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.AbstractBoundedGenericParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getAbstractBoundedGenericParameter()
* @generated
*/
int ABSTRACT_BOUNDED_GENERIC_PARAMETER = 0;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME = 0;
/**
* The number of structural features of the '<em>Abstract Bounded Generic Parameter</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT = 1;
/**
* The meta object id for the '{@link info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl <em>Bounded Double Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getBoundedDoubleParameter()
* @generated
*/
int BOUNDED_DOUBLE_PARAMETER = 1;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_DOUBLE_PARAMETER__NAME = ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME;
/**
* The feature id for the '<em><b>First Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_DOUBLE_PARAMETER__FIRST_VALUE = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 0;
/**
* The feature id for the '<em><b>Last Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_DOUBLE_PARAMETER__LAST_VALUE = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 1;
/**
* The feature id for the '<em><b>Step Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_DOUBLE_PARAMETER__STEP_VALUE = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 2;
/**
* The number of structural features of the '<em>Bounded Double Parameter</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_DOUBLE_PARAMETER_FEATURE_COUNT = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 3;
/**
* The meta object id for the '{@link info.bondtnt.labs.model.research.impl.BoundedGenericParameterImpl <em>Bounded Generic Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.BoundedGenericParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getBoundedGenericParameter()
* @generated
*/
int BOUNDED_GENERIC_PARAMETER = 2;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_GENERIC_PARAMETER__NAME = ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME;
/**
* The feature id for the '<em><b>All Values</b></em>' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_GENERIC_PARAMETER__ALL_VALUES = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 0;
/**
* The number of structural features of the '<em>Bounded Generic Parameter</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT = ABSTRACT_BOUNDED_GENERIC_PARAMETER_FEATURE_COUNT + 1;
/**
* The meta object id for the '{@link info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl <em>Named Double Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getNamedDoubleParameter()
* @generated
*/
int NAMED_DOUBLE_PARAMETER = 3;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int NAMED_DOUBLE_PARAMETER__NAME = 0;
/**
* The feature id for the '<em><b>Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int NAMED_DOUBLE_PARAMETER__VALUE = 1;
/**
* The number of structural features of the '<em>Named Double Parameter</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int NAMED_DOUBLE_PARAMETER_FEATURE_COUNT = 2;
/**
* The meta object id for the '{@link info.bondtnt.labs.model.research.impl.ParametersListImpl <em>Parameters List</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.ParametersListImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getParametersList()
* @generated
*/
int PARAMETERS_LIST = 4;
/**
* The feature id for the '<em><b>List</b></em>' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int PARAMETERS_LIST__LIST = 0;
/**
* The number of structural features of the '<em>Parameters List</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int PARAMETERS_LIST_FEATURE_COUNT = 1;
/**
* Returns the meta object for class '{@link info.bondtnt.labs.model.research.AbstractBoundedGenericParameter <em>Abstract Bounded Generic Parameter</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Abstract Bounded Generic Parameter</em>'.
* @see info.bondtnt.labs.model.research.AbstractBoundedGenericParameter
* @generated
*/
EClass getAbstractBoundedGenericParameter();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.AbstractBoundedGenericParameter#getName <em>Name</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Name</em>'.
* @see info.bondtnt.labs.model.research.AbstractBoundedGenericParameter#getName()
* @see #getAbstractBoundedGenericParameter()
* @generated
*/
EAttribute getAbstractBoundedGenericParameter_Name();
/**
* Returns the meta object for class '{@link info.bondtnt.labs.model.research.BoundedDoubleParameter <em>Bounded Double Parameter</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Bounded Double Parameter</em>'.
* @see info.bondtnt.labs.model.research.BoundedDoubleParameter
* @generated
*/
EClass getBoundedDoubleParameter();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.BoundedDoubleParameter#getFirstValue <em>First Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>First Value</em>'.
* @see info.bondtnt.labs.model.research.BoundedDoubleParameter#getFirstValue()
* @see #getBoundedDoubleParameter()
* @generated
*/
EAttribute getBoundedDoubleParameter_FirstValue();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.BoundedDoubleParameter#getLastValue <em>Last Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Last Value</em>'.
* @see info.bondtnt.labs.model.research.BoundedDoubleParameter#getLastValue()
* @see #getBoundedDoubleParameter()
* @generated
*/
EAttribute getBoundedDoubleParameter_LastValue();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.BoundedDoubleParameter#getStepValue <em>Step Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Step Value</em>'.
* @see info.bondtnt.labs.model.research.BoundedDoubleParameter#getStepValue()
* @see #getBoundedDoubleParameter()
* @generated
*/
EAttribute getBoundedDoubleParameter_StepValue();
/**
* Returns the meta object for class '{@link info.bondtnt.labs.model.research.BoundedGenericParameter <em>Bounded Generic Parameter</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Bounded Generic Parameter</em>'.
* @see info.bondtnt.labs.model.research.BoundedGenericParameter
* @generated
*/
EClass getBoundedGenericParameter();
/**
* Returns the meta object for the attribute list '{@link info.bondtnt.labs.model.research.BoundedGenericParameter#getAllValues <em>All Values</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute list '<em>All Values</em>'.
* @see info.bondtnt.labs.model.research.BoundedGenericParameter#getAllValues()
* @see #getBoundedGenericParameter()
* @generated
*/
EAttribute getBoundedGenericParameter_AllValues();
/**
* Returns the meta object for class '{@link info.bondtnt.labs.model.research.NamedDoubleParameter <em>Named Double Parameter</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Named Double Parameter</em>'.
* @see info.bondtnt.labs.model.research.NamedDoubleParameter
* @generated
*/
EClass getNamedDoubleParameter();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.NamedDoubleParameter#getName <em>Name</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Name</em>'.
* @see info.bondtnt.labs.model.research.NamedDoubleParameter#getName()
* @see #getNamedDoubleParameter()
* @generated
*/
EAttribute getNamedDoubleParameter_Name();
/**
* Returns the meta object for the attribute '{@link info.bondtnt.labs.model.research.NamedDoubleParameter#getValue <em>Value</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Value</em>'.
* @see info.bondtnt.labs.model.research.NamedDoubleParameter#getValue()
* @see #getNamedDoubleParameter()
* @generated
*/
EAttribute getNamedDoubleParameter_Value();
/**
* Returns the meta object for class '{@link info.bondtnt.labs.model.research.ParametersList <em>Parameters List</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Parameters List</em>'.
* @see info.bondtnt.labs.model.research.ParametersList
* @generated
*/
EClass getParametersList();
/**
* Returns the meta object for the attribute list '{@link info.bondtnt.labs.model.research.ParametersList#getList <em>List</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute list '<em>List</em>'.
* @see info.bondtnt.labs.model.research.ParametersList#getList()
* @see #getParametersList()
* @generated
*/
EAttribute getParametersList_List();
/**
* Returns the factory that creates the instances of the model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the factory that creates the instances of the model.
* @generated
*/
ResearchFactory getResearchFactory();
/**
* <!-- begin-user-doc -->
* Defines literals for the meta objects that represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @generated
*/
interface Literals {
/**
* The meta object literal for the '{@link info.bondtnt.labs.model.research.impl.AbstractBoundedGenericParameterImpl <em>Abstract Bounded Generic Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.AbstractBoundedGenericParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getAbstractBoundedGenericParameter()
* @generated
*/
EClass ABSTRACT_BOUNDED_GENERIC_PARAMETER = eINSTANCE.getAbstractBoundedGenericParameter();
/**
* The meta object literal for the '<em><b>Name</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME = eINSTANCE.getAbstractBoundedGenericParameter_Name();
/**
* The meta object literal for the '{@link info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl <em>Bounded Double Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getBoundedDoubleParameter()
* @generated
*/
EClass BOUNDED_DOUBLE_PARAMETER = eINSTANCE.getBoundedDoubleParameter();
/**
* The meta object literal for the '<em><b>First Value</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute BOUNDED_DOUBLE_PARAMETER__FIRST_VALUE = eINSTANCE.getBoundedDoubleParameter_FirstValue();
/**
* The meta object literal for the '<em><b>Last Value</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute BOUNDED_DOUBLE_PARAMETER__LAST_VALUE = eINSTANCE.getBoundedDoubleParameter_LastValue();
/**
* The meta object literal for the '<em><b>Step Value</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute BOUNDED_DOUBLE_PARAMETER__STEP_VALUE = eINSTANCE.getBoundedDoubleParameter_StepValue();
/**
* The meta object literal for the '{@link info.bondtnt.labs.model.research.impl.BoundedGenericParameterImpl <em>Bounded Generic Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.BoundedGenericParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getBoundedGenericParameter()
* @generated
*/
EClass BOUNDED_GENERIC_PARAMETER = eINSTANCE.getBoundedGenericParameter();
/**
* The meta object literal for the '<em><b>All Values</b></em>' attribute list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute BOUNDED_GENERIC_PARAMETER__ALL_VALUES = eINSTANCE.getBoundedGenericParameter_AllValues();
/**
* The meta object literal for the '{@link info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl <em>Named Double Parameter</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getNamedDoubleParameter()
* @generated
*/
EClass NAMED_DOUBLE_PARAMETER = eINSTANCE.getNamedDoubleParameter();
/**
* The meta object literal for the '<em><b>Name</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute NAMED_DOUBLE_PARAMETER__NAME = eINSTANCE.getNamedDoubleParameter_Name();
/**
* The meta object literal for the '<em><b>Value</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute NAMED_DOUBLE_PARAMETER__VALUE = eINSTANCE.getNamedDoubleParameter_Value();
/**
* The meta object literal for the '{@link info.bondtnt.labs.model.research.impl.ParametersListImpl <em>Parameters List</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.impl.ParametersListImpl
* @see info.bondtnt.labs.model.research.impl.ResearchPackageImpl#getParametersList()
* @generated
*/
EClass PARAMETERS_LIST = eINSTANCE.getParametersList();
/**
* The meta object literal for the '<em><b>List</b></em>' attribute list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute PARAMETERS_LIST__LIST = eINSTANCE.getParametersList_List();
}
} //ResearchPackage
| Java |
package info.bondtnt.labs.model.research;
import org.eclipse.emf.common.util.EList;
/**
* @author <a href="mailto:bondtnt@gmail.com">Andrey Bondarenko</a>
* @model
*/
public interface BoundedGenericParameter<Type> extends AbstractBoundedGenericParameter<Type> {
/**
* @model changeable="false"
*/
public EList<Type> getAllValues();
/**
* @model
*/
public void addValue(Type value);
/**
* @model
*/
public void removeAllValues();
/**
* @model
*/
public void removeValue(Type value);
}
| Java |
package info.bondtnt.labs.model.research;
import org.eclipse.emf.ecore.EObject;
/**
* @author <a href="mailto:bondtnt@gmail.com">Andrey Bondarenko</a>
* @model
*/
public interface NamedDoubleParameter extends EObject {
/**
* @model
*/
public String getName();
/**
* Sets the value of the '{@link info.bondtnt.labs.model.research.NamedDoubleParameter#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
/**
* @model
*/
public Double getValue();
/**
* Sets the value of the '{@link info.bondtnt.labs.model.research.NamedDoubleParameter#getValue <em>Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Value</em>' attribute.
* @see #getValue()
* @generated
*/
void setValue(Double value);
}
| Java |
package info.bondtnt.labs.model.research;
import org.eclipse.emf.ecore.EObject;
/**
* @model
*/
public interface AbstractBoundedGenericParameter<Type> extends EObject {
/**
* @model
*/
public abstract String getName();
/**
* Sets the value of the '{@link info.bondtnt.labs.model.research.AbstractBoundedGenericParameter#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
/**
* @model changeable="false" transient="true"
*/
public abstract Integer countOfValues();
/**
* Returns value according to its index.
* First index is 0; Last index is (countOfValues() - 1);
* First value usually is less than last;
*
* @model changeable="false"
*/
public abstract Type getValueByIndex(Integer index);
}
| Java |
package info.bondtnt.labs.model.research;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
/**
* @author <a href="mailto:bondtnt@gmail.com">Andrey Bondarenko</a>
* @model
*/
public interface ParametersList<Type> extends EObject {
/**
* @model containment=true
*/
public EList<Type> getList();
/**
* @model
*/
public void addParameter(Type namedParam);
}
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.BoundedGenericParameter;
import info.bondtnt.labs.model.research.ResearchPackage;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.util.EDataTypeUniqueEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Bounded Generic Parameter</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link info.bondtnt.labs.model.research.impl.BoundedGenericParameterImpl#getAllValues <em>All Values</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class BoundedGenericParameterImpl<Type> extends AbstractBoundedGenericParameterImpl<Type> implements BoundedGenericParameter<Type> {
/**
* The cached value of the '{@link #getAllValues() <em>All Values</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAllValues()
* @generated
* @ordered
*/
protected EList<Type> allValues;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected BoundedGenericParameterImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ResearchPackage.Literals.BOUNDED_GENERIC_PARAMETER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Type> getAllValues() {
checkAllValuesNotNull();
return allValues;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public void addValue(Type value) {
checkAllValuesNotNull();
allValues.add(value);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public void removeAllValues() {
checkAllValuesNotNull();
allValues.clear();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public void removeValue(Type value) {
checkAllValuesNotNull();
allValues.remove(value);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ResearchPackage.BOUNDED_GENERIC_PARAMETER__ALL_VALUES:
return getAllValues();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ResearchPackage.BOUNDED_GENERIC_PARAMETER__ALL_VALUES:
return allValues != null && !allValues.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (allValues: ");
result.append(allValues);
result.append(')');
return result.toString();
}
private void checkAllValuesNotNull() {
if (allValues == null) {
allValues = new EDataTypeUniqueEList<Type>(Object.class, this, ResearchPackage.BOUNDED_GENERIC_PARAMETER__ALL_VALUES);
}
}
@Override
public Integer countOfValues() {
checkAllValuesNotNull();
return this.allValues.size();
}
@Override
public Type getValueByIndex(Integer index) {
checkAllValuesNotNull();
return allValues.get(index);
}
} //BoundedGenericParameterImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.ParametersList;
import info.bondtnt.labs.model.research.ResearchPackage;
import java.util.Collection;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EDataTypeUniqueEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Parameters List</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link info.bondtnt.labs.model.research.impl.ParametersListImpl#getList <em>List</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ParametersListImpl<Type> extends EObjectImpl implements ParametersList<Type> {
/**
* The cached value of the '{@link #getList() <em>List</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getList()
* @generated
* @ordered
*/
protected EList<Type> list;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ParametersListImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ResearchPackage.Literals.PARAMETERS_LIST;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Type> getList() {
checkListNotNull();
return list;
}
private void checkListNotNull() {
if (list == null) {
list = new EDataTypeUniqueEList<Type>(Object.class, this, ResearchPackage.PARAMETERS_LIST__LIST);
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public void addParameter(Type namedParam) {
checkListNotNull();
list.add(namedParam);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ResearchPackage.PARAMETERS_LIST__LIST:
return getList();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ResearchPackage.PARAMETERS_LIST__LIST:
getList().clear();
getList().addAll((Collection<? extends Type>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ResearchPackage.PARAMETERS_LIST__LIST:
getList().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ResearchPackage.PARAMETERS_LIST__LIST:
return list != null && !list.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (list: ");
result.append(list);
result.append(')');
return result.toString();
}
} //ParametersListImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.AbstractBoundedGenericParameter;
import info.bondtnt.labs.model.research.ResearchPackage;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Abstract Bounded Generic Parameter</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link info.bondtnt.labs.model.research.impl.AbstractBoundedGenericParameterImpl#getName <em>Name</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class AbstractBoundedGenericParameterImpl<Type> extends EObjectImpl implements AbstractBoundedGenericParameter<Type> {
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AbstractBoundedGenericParameterImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ResearchPackage.Literals.ABSTRACT_BOUNDED_GENERIC_PARAMETER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public Integer countOfValues() {
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public Type getValueByIndex(Integer index) {
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME:
return getName();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME:
setName((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME:
setName(NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //AbstractBoundedGenericParameterImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.NamedDoubleParameter;
import info.bondtnt.labs.model.research.ResearchPackage;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Named Double Parameter</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl#getName <em>Name</em>}</li>
* <li>{@link info.bondtnt.labs.model.research.impl.NamedDoubleParameterImpl#getValue <em>Value</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class NamedDoubleParameterImpl extends EObjectImpl implements NamedDoubleParameter {
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The default value of the '{@link #getValue() <em>Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getValue()
* @generated
* @ordered
*/
protected static final Double VALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getValue() <em>Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getValue()
* @generated
* @ordered
*/
protected Double value = VALUE_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected NamedDoubleParameterImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ResearchPackage.Literals.NAMED_DOUBLE_PARAMETER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ResearchPackage.NAMED_DOUBLE_PARAMETER__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Double getValue() {
return value;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setValue(Double newValue) {
Double oldValue = value;
value = newValue;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ResearchPackage.NAMED_DOUBLE_PARAMETER__VALUE, oldValue, value));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ResearchPackage.NAMED_DOUBLE_PARAMETER__NAME:
return getName();
case ResearchPackage.NAMED_DOUBLE_PARAMETER__VALUE:
return getValue();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ResearchPackage.NAMED_DOUBLE_PARAMETER__NAME:
setName((String)newValue);
return;
case ResearchPackage.NAMED_DOUBLE_PARAMETER__VALUE:
setValue((Double)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ResearchPackage.NAMED_DOUBLE_PARAMETER__NAME:
setName(NAME_EDEFAULT);
return;
case ResearchPackage.NAMED_DOUBLE_PARAMETER__VALUE:
setValue(VALUE_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ResearchPackage.NAMED_DOUBLE_PARAMETER__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case ResearchPackage.NAMED_DOUBLE_PARAMETER__VALUE:
return VALUE_EDEFAULT == null ? value != null : !VALUE_EDEFAULT.equals(value);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(", value: ");
result.append(value);
result.append(')');
return result.toString();
}
} //NamedDoubleParameterImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.BoundedDoubleParameter;
import info.bondtnt.labs.model.research.ResearchPackage;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Bounded Double Parameter</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl#getFirstValue <em>First Value</em>}</li>
* <li>{@link info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl#getLastValue <em>Last Value</em>}</li>
* <li>{@link info.bondtnt.labs.model.research.impl.BoundedDoubleParameterImpl#getStepValue <em>Step Value</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class BoundedDoubleParameterImpl extends AbstractBoundedGenericParameterImpl<Double> implements BoundedDoubleParameter {
private static final int PRECISION_ENCHASER = 1000;
/**
* The default value of the '{@link #getFirstValue() <em>First Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFirstValue()
* @generated
* @ordered
*/
protected static final Double FIRST_VALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getFirstValue() <em>First Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFirstValue()
* @generated
* @ordered
*/
protected Double firstValue = FIRST_VALUE_EDEFAULT;
/**
* The default value of the '{@link #getLastValue() <em>Last Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLastValue()
* @generated
* @ordered
*/
protected static final Double LAST_VALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getLastValue() <em>Last Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLastValue()
* @generated
* @ordered
*/
protected Double lastValue = LAST_VALUE_EDEFAULT;
/**
* The default value of the '{@link #getStepValue() <em>Step Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStepValue()
* @generated
* @ordered
*/
protected static final Double STEP_VALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getStepValue() <em>Step Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStepValue()
* @generated
* @ordered
*/
protected Double stepValue = STEP_VALUE_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected BoundedDoubleParameterImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ResearchPackage.Literals.BOUNDED_DOUBLE_PARAMETER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Double getFirstValue() {
return firstValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Double getLastValue() {
return lastValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Double getStepValue() {
return stepValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public EList<Double> allValues() {
BasicEList<Double> basicEList = new BasicEList<Double>();
Integer countOfValues = countOfValues();
for (int index = 0; index < countOfValues; index++) {
Double value = getFirstValue() * PRECISION_ENCHASER + PRECISION_ENCHASER * index;
basicEList.add(value / PRECISION_ENCHASER);
}
return basicEList;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public void setBoundaries(double firstValue, double lastValue, double stepValue) {
if (firstValue > lastValue) {
throw new IllegalArgumentException("'lastValue' can't be greater than 'firstValue'");
}
this.firstValue = firstValue;
this.lastValue = lastValue;
this.stepValue = stepValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__FIRST_VALUE:
return getFirstValue();
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__LAST_VALUE:
return getLastValue();
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__STEP_VALUE:
return getStepValue();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__FIRST_VALUE:
return FIRST_VALUE_EDEFAULT == null ? firstValue != null : !FIRST_VALUE_EDEFAULT.equals(firstValue);
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__LAST_VALUE:
return LAST_VALUE_EDEFAULT == null ? lastValue != null : !LAST_VALUE_EDEFAULT.equals(lastValue);
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER__STEP_VALUE:
return STEP_VALUE_EDEFAULT == null ? stepValue != null : !STEP_VALUE_EDEFAULT.equals(stepValue);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (firstValue: ");
result.append(firstValue);
result.append(", lastValue: ");
result.append(lastValue);
result.append(", stepValue: ");
result.append(stepValue);
result.append(')');
return result.toString();
}
/////////////////////////////////////////////////////////////////
// Overiden part
/////////////////////////////////////////////////////////////////
@Override
public Integer countOfValues() {
Double delta = lastValue - firstValue;
Long count = Math.round(delta / stepValue);
count++;
return count.intValue();
}
@Override
public Double getValueByIndex(Integer index) {
Double result = null;
if (index < this.countOfValues()) {
Double value = this.getFirstValue();
final Double stepVal = this.getStepValue();
value = value * PRECISION_ENCHASER + stepVal * PRECISION_ENCHASER * index;
result = value / PRECISION_ENCHASER;
} else {
throw new IllegalArgumentException("Parameter 'index' must be less than countOfvalues()!");
}
return result;
}
} //BoundedDoubleParameterImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.impl;
import info.bondtnt.labs.model.research.AbstractBoundedGenericParameter;
import info.bondtnt.labs.model.research.BoundedDoubleParameter;
import info.bondtnt.labs.model.research.BoundedGenericParameter;
import info.bondtnt.labs.model.research.NamedDoubleParameter;
import info.bondtnt.labs.model.research.ParametersList;
import info.bondtnt.labs.model.research.ResearchFactory;
import info.bondtnt.labs.model.research.ResearchPackage;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EGenericType;
import org.eclipse.emf.ecore.EOperation;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.ETypeParameter;
import org.eclipse.emf.ecore.impl.EPackageImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class ResearchPackageImpl extends EPackageImpl implements ResearchPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass abstractBoundedGenericParameterEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass boundedDoubleParameterEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass boundedGenericParameterEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass namedDoubleParameterEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass parametersListEClass = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see info.bondtnt.labs.model.research.ResearchPackage#eNS_URI
* @see #init()
* @generated
*/
private ResearchPackageImpl() {
super(eNS_URI, ResearchFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this
* model, and for any others upon which it depends. Simple
* dependencies are satisfied by calling this method on all
* dependent packages before doing anything else. This method drives
* initialization for interdependent packages directly, in parallel
* with this package, itself.
* <p>Of this package and its interdependencies, all packages which
* have not yet been registered by their URI values are first created
* and registered. The packages are then initialized in two steps:
* meta-model objects for all of the packages are created before any
* are initialized, since one package's meta-model objects may refer to
* those of another.
* <p>Invocation of this method will not affect any packages that have
* already been initialized.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static ResearchPackage init() {
if (isInited) return (ResearchPackage)EPackage.Registry.INSTANCE.getEPackage(ResearchPackage.eNS_URI);
// Obtain or create and register package
ResearchPackageImpl theResearchPackage = (ResearchPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(eNS_URI) instanceof ResearchPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(eNS_URI) : new ResearchPackageImpl());
isInited = true;
// Create package meta-data objects
theResearchPackage.createPackageContents();
// Initialize created meta-data
theResearchPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theResearchPackage.freeze();
return theResearchPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getAbstractBoundedGenericParameter() {
return abstractBoundedGenericParameterEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getAbstractBoundedGenericParameter_Name() {
return (EAttribute)abstractBoundedGenericParameterEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getBoundedDoubleParameter() {
return boundedDoubleParameterEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getBoundedDoubleParameter_FirstValue() {
return (EAttribute)boundedDoubleParameterEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getBoundedDoubleParameter_LastValue() {
return (EAttribute)boundedDoubleParameterEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getBoundedDoubleParameter_StepValue() {
return (EAttribute)boundedDoubleParameterEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getBoundedGenericParameter() {
return boundedGenericParameterEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getBoundedGenericParameter_AllValues() {
return (EAttribute)boundedGenericParameterEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getNamedDoubleParameter() {
return namedDoubleParameterEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getNamedDoubleParameter_Name() {
return (EAttribute)namedDoubleParameterEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getNamedDoubleParameter_Value() {
return (EAttribute)namedDoubleParameterEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getParametersList() {
return parametersListEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getParametersList_List() {
return (EAttribute)parametersListEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ResearchFactory getResearchFactory() {
return (ResearchFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
abstractBoundedGenericParameterEClass = createEClass(ABSTRACT_BOUNDED_GENERIC_PARAMETER);
createEAttribute(abstractBoundedGenericParameterEClass, ABSTRACT_BOUNDED_GENERIC_PARAMETER__NAME);
boundedDoubleParameterEClass = createEClass(BOUNDED_DOUBLE_PARAMETER);
createEAttribute(boundedDoubleParameterEClass, BOUNDED_DOUBLE_PARAMETER__FIRST_VALUE);
createEAttribute(boundedDoubleParameterEClass, BOUNDED_DOUBLE_PARAMETER__LAST_VALUE);
createEAttribute(boundedDoubleParameterEClass, BOUNDED_DOUBLE_PARAMETER__STEP_VALUE);
boundedGenericParameterEClass = createEClass(BOUNDED_GENERIC_PARAMETER);
createEAttribute(boundedGenericParameterEClass, BOUNDED_GENERIC_PARAMETER__ALL_VALUES);
namedDoubleParameterEClass = createEClass(NAMED_DOUBLE_PARAMETER);
createEAttribute(namedDoubleParameterEClass, NAMED_DOUBLE_PARAMETER__NAME);
createEAttribute(namedDoubleParameterEClass, NAMED_DOUBLE_PARAMETER__VALUE);
parametersListEClass = createEClass(PARAMETERS_LIST);
createEAttribute(parametersListEClass, PARAMETERS_LIST__LIST);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Create type parameters
ETypeParameter abstractBoundedGenericParameterEClass_Type = addETypeParameter(abstractBoundedGenericParameterEClass, "Type");
ETypeParameter boundedGenericParameterEClass_Type = addETypeParameter(boundedGenericParameterEClass, "Type");
ETypeParameter parametersListEClass_Type = addETypeParameter(parametersListEClass, "Type");
// Set bounds for type parameters
// Add supertypes to classes
EGenericType g1 = createEGenericType(this.getAbstractBoundedGenericParameter());
EGenericType g2 = createEGenericType(ecorePackage.getEDoubleObject());
g1.getETypeArguments().add(g2);
boundedDoubleParameterEClass.getEGenericSuperTypes().add(g1);
g1 = createEGenericType(this.getAbstractBoundedGenericParameter());
g2 = createEGenericType(boundedGenericParameterEClass_Type);
g1.getETypeArguments().add(g2);
boundedGenericParameterEClass.getEGenericSuperTypes().add(g1);
// Initialize classes and features; add operations and parameters
initEClass(abstractBoundedGenericParameterEClass, AbstractBoundedGenericParameter.class, "AbstractBoundedGenericParameter", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getAbstractBoundedGenericParameter_Name(), ecorePackage.getEString(), "name", null, 0, 1, AbstractBoundedGenericParameter.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
addEOperation(abstractBoundedGenericParameterEClass, ecorePackage.getEIntegerObject(), "countOfValues", 0, 1, IS_UNIQUE, IS_ORDERED);
EOperation op = addEOperation(abstractBoundedGenericParameterEClass, null, "getValueByIndex", 0, 1, IS_UNIQUE, IS_ORDERED);
addEParameter(op, ecorePackage.getEIntegerObject(), "index", 0, 1, IS_UNIQUE, IS_ORDERED);
g1 = createEGenericType(abstractBoundedGenericParameterEClass_Type);
initEOperation(op, g1);
initEClass(boundedDoubleParameterEClass, BoundedDoubleParameter.class, "BoundedDoubleParameter", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getBoundedDoubleParameter_FirstValue(), ecorePackage.getEDoubleObject(), "firstValue", null, 0, 1, BoundedDoubleParameter.class, !IS_TRANSIENT, !IS_VOLATILE, !IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getBoundedDoubleParameter_LastValue(), ecorePackage.getEDoubleObject(), "lastValue", null, 0, 1, BoundedDoubleParameter.class, !IS_TRANSIENT, !IS_VOLATILE, !IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getBoundedDoubleParameter_StepValue(), ecorePackage.getEDoubleObject(), "stepValue", null, 0, 1, BoundedDoubleParameter.class, !IS_TRANSIENT, !IS_VOLATILE, !IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
addEOperation(boundedDoubleParameterEClass, ecorePackage.getEDoubleObject(), "allValues", 0, -1, IS_UNIQUE, IS_ORDERED);
op = addEOperation(boundedDoubleParameterEClass, null, "setBoundaries", 0, 1, IS_UNIQUE, IS_ORDERED);
addEParameter(op, ecorePackage.getEDouble(), "firstValue", 0, 1, IS_UNIQUE, IS_ORDERED);
addEParameter(op, ecorePackage.getEDouble(), "lastValue", 0, 1, IS_UNIQUE, IS_ORDERED);
addEParameter(op, ecorePackage.getEDouble(), "stepValue", 0, 1, IS_UNIQUE, IS_ORDERED);
initEClass(boundedGenericParameterEClass, BoundedGenericParameter.class, "BoundedGenericParameter", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
g1 = createEGenericType(boundedGenericParameterEClass_Type);
initEAttribute(getBoundedGenericParameter_AllValues(), g1, "allValues", null, 0, -1, BoundedGenericParameter.class, !IS_TRANSIENT, !IS_VOLATILE, !IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
op = addEOperation(boundedGenericParameterEClass, null, "addValue", 0, 1, IS_UNIQUE, IS_ORDERED);
g1 = createEGenericType(boundedGenericParameterEClass_Type);
addEParameter(op, g1, "value", 0, 1, IS_UNIQUE, IS_ORDERED);
addEOperation(boundedGenericParameterEClass, null, "removeAllValues", 0, 1, IS_UNIQUE, IS_ORDERED);
op = addEOperation(boundedGenericParameterEClass, null, "removeValue", 0, 1, IS_UNIQUE, IS_ORDERED);
g1 = createEGenericType(boundedGenericParameterEClass_Type);
addEParameter(op, g1, "value", 0, 1, IS_UNIQUE, IS_ORDERED);
initEClass(namedDoubleParameterEClass, NamedDoubleParameter.class, "NamedDoubleParameter", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getNamedDoubleParameter_Name(), ecorePackage.getEString(), "name", null, 0, 1, NamedDoubleParameter.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getNamedDoubleParameter_Value(), ecorePackage.getEDoubleObject(), "value", null, 0, 1, NamedDoubleParameter.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(parametersListEClass, ParametersList.class, "ParametersList", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
g1 = createEGenericType(parametersListEClass_Type);
initEAttribute(getParametersList_List(), g1, "list", null, 0, -1, ParametersList.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
op = addEOperation(parametersListEClass, null, "addParameter", 0, 1, IS_UNIQUE, IS_ORDERED);
g1 = createEGenericType(parametersListEClass_Type);
addEParameter(op, g1, "namedParam", 0, 1, IS_UNIQUE, IS_ORDERED);
// Create resource
createResource(eNS_URI);
}
} //ResearchPackageImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.util;
import info.bondtnt.labs.model.research.*;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.common.notify.impl.AdapterFactoryImpl;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Adapter Factory</b> for the model.
* It provides an adapter <code>createXXX</code> method for each class of the model.
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.ResearchPackage
* @generated
*/
public class ResearchAdapterFactory extends AdapterFactoryImpl {
/**
* The cached model package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ResearchPackage modelPackage;
/**
* Creates an instance of the adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ResearchAdapterFactory() {
if (modelPackage == null) {
modelPackage = ResearchPackage.eINSTANCE;
}
}
/**
* Returns whether this factory is applicable for the type of the object.
* <!-- begin-user-doc -->
* This implementation returns <code>true</code> if the object is either the model's package or is an instance object of the model.
* <!-- end-user-doc -->
* @return whether this factory is applicable for the type of the object.
* @generated
*/
@Override
public boolean isFactoryForType(Object object) {
if (object == modelPackage) {
return true;
}
if (object instanceof EObject) {
return ((EObject)object).eClass().getEPackage() == modelPackage;
}
return false;
}
/**
* The switch that delegates to the <code>createXXX</code> methods.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ResearchSwitch<Adapter> modelSwitch =
new ResearchSwitch<Adapter>() {
@Override
public <Type> Adapter caseAbstractBoundedGenericParameter(AbstractBoundedGenericParameter<Type> object) {
return createAbstractBoundedGenericParameterAdapter();
}
@Override
public Adapter caseBoundedDoubleParameter(BoundedDoubleParameter object) {
return createBoundedDoubleParameterAdapter();
}
@Override
public <Type> Adapter caseBoundedGenericParameter(BoundedGenericParameter<Type> object) {
return createBoundedGenericParameterAdapter();
}
@Override
public Adapter caseNamedDoubleParameter(NamedDoubleParameter object) {
return createNamedDoubleParameterAdapter();
}
@Override
public <Type> Adapter caseParametersList(ParametersList<Type> object) {
return createParametersListAdapter();
}
@Override
public Adapter defaultCase(EObject object) {
return createEObjectAdapter();
}
};
/**
* Creates an adapter for the <code>target</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param target the object to adapt.
* @return the adapter for the <code>target</code>.
* @generated
*/
@Override
public Adapter createAdapter(Notifier target) {
return modelSwitch.doSwitch((EObject)target);
}
/**
* Creates a new adapter for an object of class '{@link info.bondtnt.labs.model.research.AbstractBoundedGenericParameter <em>Abstract Bounded Generic Parameter</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see info.bondtnt.labs.model.research.AbstractBoundedGenericParameter
* @generated
*/
public Adapter createAbstractBoundedGenericParameterAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link info.bondtnt.labs.model.research.BoundedDoubleParameter <em>Bounded Double Parameter</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see info.bondtnt.labs.model.research.BoundedDoubleParameter
* @generated
*/
public Adapter createBoundedDoubleParameterAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link info.bondtnt.labs.model.research.BoundedGenericParameter <em>Bounded Generic Parameter</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see info.bondtnt.labs.model.research.BoundedGenericParameter
* @generated
*/
public Adapter createBoundedGenericParameterAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link info.bondtnt.labs.model.research.NamedDoubleParameter <em>Named Double Parameter</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see info.bondtnt.labs.model.research.NamedDoubleParameter
* @generated
*/
public Adapter createNamedDoubleParameterAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link info.bondtnt.labs.model.research.ParametersList <em>Parameters List</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see info.bondtnt.labs.model.research.ParametersList
* @generated
*/
public Adapter createParametersListAdapter() {
return null;
}
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
public Adapter createEObjectAdapter() {
return null;
}
} //ResearchAdapterFactory
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package info.bondtnt.labs.model.research.util;
import info.bondtnt.labs.model.research.*;
import java.util.List;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)}
* to invoke the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see info.bondtnt.labs.model.research.ResearchPackage
* @generated
*/
public class ResearchSwitch<T> {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ResearchPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ResearchSwitch() {
if (modelPackage == null) {
modelPackage = ResearchPackage.eINSTANCE;
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
public T doSwitch(EObject theEObject) {
return doSwitch(theEObject.eClass(), theEObject);
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(EClass theEClass, EObject theEObject) {
if (theEClass.eContainer() == modelPackage) {
return doSwitch(theEClass.getClassifierID(), theEObject);
}
else {
List<EClass> eSuperTypes = theEClass.getESuperTypes();
return
eSuperTypes.isEmpty() ?
defaultCase(theEObject) :
doSwitch(eSuperTypes.get(0), theEObject);
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case ResearchPackage.ABSTRACT_BOUNDED_GENERIC_PARAMETER: {
AbstractBoundedGenericParameter<?> abstractBoundedGenericParameter = (AbstractBoundedGenericParameter<?>)theEObject;
T result = caseAbstractBoundedGenericParameter(abstractBoundedGenericParameter);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ResearchPackage.BOUNDED_DOUBLE_PARAMETER: {
BoundedDoubleParameter boundedDoubleParameter = (BoundedDoubleParameter)theEObject;
T result = caseBoundedDoubleParameter(boundedDoubleParameter);
if (result == null) result = caseAbstractBoundedGenericParameter(boundedDoubleParameter);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ResearchPackage.BOUNDED_GENERIC_PARAMETER: {
BoundedGenericParameter<?> boundedGenericParameter = (BoundedGenericParameter<?>)theEObject;
T result = caseBoundedGenericParameter(boundedGenericParameter);
if (result == null) result = caseAbstractBoundedGenericParameter(boundedGenericParameter);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ResearchPackage.NAMED_DOUBLE_PARAMETER: {
NamedDoubleParameter namedDoubleParameter = (NamedDoubleParameter)theEObject;
T result = caseNamedDoubleParameter(namedDoubleParameter);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ResearchPackage.PARAMETERS_LIST: {
ParametersList<?> parametersList = (ParametersList<?>)theEObject;
T result = caseParametersList(parametersList);
if (result == null) result = defaultCase(theEObject);
return result;
}
default: return defaultCase(theEObject);
}
}
/**
* Returns the result of interpreting the object as an instance of '<em>Abstract Bounded Generic Parameter</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Abstract Bounded Generic Parameter</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public <Type> T caseAbstractBoundedGenericParameter(AbstractBoundedGenericParameter<Type> object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Bounded Double Parameter</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Bounded Double Parameter</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBoundedDoubleParameter(BoundedDoubleParameter object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Bounded Generic Parameter</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Bounded Generic Parameter</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public <Type> T caseBoundedGenericParameter(BoundedGenericParameter<Type> object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Named Double Parameter</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Named Double Parameter</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseNamedDoubleParameter(NamedDoubleParameter object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Parameters List</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Parameters List</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public <Type> T caseParametersList(ParametersList<Type> object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
public T defaultCase(EObject object) {
return null;
}
} //ResearchSwitch
| Java |
package info.bondtnt.labs.model.research;
import org.eclipse.emf.common.util.EList;
/**
* @author <a href="mailto:bondtnt@gmail.com">Andrey Bondarenko</a>
* @model
*/
public interface BoundedDoubleParameter extends AbstractBoundedGenericParameter<Double> {
/**
* @model changeable="false"
*/
public Double getFirstValue();
/**
* @model changeable="false"
*/
public Double getLastValue();
/**
* @model changeable="false" transient="true"
*/
public EList<Double> allValues();
/**
* @model changeable="false"
*/
public Double getStepValue();
/**
* @model
*/
public void setBoundaries(double firstValue, double lastValue, double stepValue);
}
| Java |
package info.bondtnt.labs.model.research;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@RunWith(Suite.class)
@Suite.SuiteClasses({
BoundedDoubleParameterImplTest.class,
BoundedParamTest.class,
BoundedTypedParameterImplTest.class
})
public class ResearchTestsSuite {
// the class remains completely empty,
// being used only as a holder for the above annotations
}
| Java |
package info.bond.labs.ann;
/**
* @model
*/
public interface Monitor {
}
| Java |
package info.bond.labs.ann;
/**
* @model
*/
public interface OutputPatternListener {
/**
* @model
*/
int getInputDimension();
}
| Java |
package info.bond.labs.ann;
import java.util.Vector;
/**
* @author Andrey
*
*
* @model
*/
public interface NeuralLayer {
/**
* @model
*/
String getLayerName();
/**
* @model
*/
int getRows();
/**
* @model containment="true"
*/
public Vector<InputPatternListener> getAllInputs();
/**
* @model containment="true"
*/
public Vector<OutputPatternListener> getAllOutputs();
/**
* @model
*/
Monitor getMonitor();
boolean addInputSynapse(InputPatternListener newListener);
boolean addOutputSynapse(OutputPatternListener newListener);
void removeOutputSynapse(OutputPatternListener newListener);
void removeInputSynapse(InputPatternListener newListener);
}
| Java |
package info.bond.labs.ann;
/**
* @model
*/
public interface InputPatternListener {
/**
* @model
*/
int getOutputDimension();
}
| Java |
package com.ibm.model.shapes.model;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
/**
* @model abstract="true"
*/
public interface Shape extends EObject {
/**
* @model
*/
String getName();
/**
* Sets the value of the '{@link com.ibm.model.shapes.model.Shape#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
/**
* @model type="com.ibm.model.shapes.model.Connection" containment="true"
*/
EList getSourceConnections();
/**
* @model type="com.ibm.model.shapes.model.Connection"
*/
EList getTargetConnections();
} | Java |
package com.ibm.model.shapes.model;
/**
* @model
*/
public interface RectangularShape extends Shape {} | Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
/**
* <!-- begin-user-doc -->
* The <b>Package</b> for the model.
* It contains accessors for the meta objects to represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.ModelFactory
* @model kind="package"
* @generated
*/
public interface ModelPackage extends EPackage {
/**
* The package name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNAME = "model";
/**
* The package namespace URI.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_URI = "http:///com/ibm/model/shapes/model.ecore";
/**
* The package namespace name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
String eNS_PREFIX = "com.ibm.model.shapes.model";
/**
* The singleton instance of the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
ModelPackage eINSTANCE = com.ibm.model.shapes.model.impl.ModelPackageImpl.init();
/**
* The meta object id for the '{@link com.ibm.model.shapes.model.impl.ConnectionImpl <em>Connection</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ConnectionImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getConnection()
* @generated
*/
int CONNECTION = 0;
/**
* The feature id for the '<em><b>Source</b></em>' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int CONNECTION__SOURCE = 0;
/**
* The feature id for the '<em><b>Target</b></em>' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int CONNECTION__TARGET = 1;
/**
* The number of structural features of the '<em>Connection</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int CONNECTION_FEATURE_COUNT = 2;
/**
* The meta object id for the '{@link com.ibm.model.shapes.model.impl.ShapeImpl <em>Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getShape()
* @generated
*/
int SHAPE = 3;
/**
* The feature id for the '<em><b>Source Connections</b></em>' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPE__SOURCE_CONNECTIONS = 0;
/**
* The feature id for the '<em><b>Target Connections</b></em>' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPE__TARGET_CONNECTIONS = 1;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPE__NAME = 2;
/**
* The number of structural features of the '<em>Shape</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPE_FEATURE_COUNT = 3;
/**
* The meta object id for the '{@link com.ibm.model.shapes.model.impl.EllipticalShapeImpl <em>Elliptical Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.EllipticalShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getEllipticalShape()
* @generated
*/
int ELLIPTICAL_SHAPE = 1;
/**
* The feature id for the '<em><b>Source Connections</b></em>' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ELLIPTICAL_SHAPE__SOURCE_CONNECTIONS = SHAPE__SOURCE_CONNECTIONS;
/**
* The feature id for the '<em><b>Target Connections</b></em>' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ELLIPTICAL_SHAPE__TARGET_CONNECTIONS = SHAPE__TARGET_CONNECTIONS;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ELLIPTICAL_SHAPE__NAME = SHAPE__NAME;
/**
* The number of structural features of the '<em>Elliptical Shape</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int ELLIPTICAL_SHAPE_FEATURE_COUNT = SHAPE_FEATURE_COUNT + 0;
/**
* The meta object id for the '{@link com.ibm.model.shapes.model.impl.RectangularShapeImpl <em>Rectangular Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.RectangularShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getRectangularShape()
* @generated
*/
int RECTANGULAR_SHAPE = 2;
/**
* The feature id for the '<em><b>Source Connections</b></em>' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int RECTANGULAR_SHAPE__SOURCE_CONNECTIONS = SHAPE__SOURCE_CONNECTIONS;
/**
* The feature id for the '<em><b>Target Connections</b></em>' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int RECTANGULAR_SHAPE__TARGET_CONNECTIONS = SHAPE__TARGET_CONNECTIONS;
/**
* The feature id for the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int RECTANGULAR_SHAPE__NAME = SHAPE__NAME;
/**
* The number of structural features of the '<em>Rectangular Shape</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int RECTANGULAR_SHAPE_FEATURE_COUNT = SHAPE_FEATURE_COUNT + 0;
/**
* The meta object id for the '{@link com.ibm.model.shapes.model.impl.ShapesDiagramImpl <em>Shapes Diagram</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ShapesDiagramImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getShapesDiagram()
* @generated
*/
int SHAPES_DIAGRAM = 4;
/**
* The feature id for the '<em><b>Shapes</b></em>' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPES_DIAGRAM__SHAPES = 0;
/**
* The number of structural features of the '<em>Shapes Diagram</em>' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
int SHAPES_DIAGRAM_FEATURE_COUNT = 1;
/**
* Returns the meta object for class '{@link com.ibm.model.shapes.model.Connection <em>Connection</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Connection</em>'.
* @see com.ibm.model.shapes.model.Connection
* @generated
*/
EClass getConnection();
/**
* Returns the meta object for the reference '{@link com.ibm.model.shapes.model.Connection#getSource <em>Source</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Source</em>'.
* @see com.ibm.model.shapes.model.Connection#getSource()
* @see #getConnection()
* @generated
*/
EReference getConnection_Source();
/**
* Returns the meta object for the reference '{@link com.ibm.model.shapes.model.Connection#getTarget <em>Target</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Target</em>'.
* @see com.ibm.model.shapes.model.Connection#getTarget()
* @see #getConnection()
* @generated
*/
EReference getConnection_Target();
/**
* Returns the meta object for class '{@link com.ibm.model.shapes.model.EllipticalShape <em>Elliptical Shape</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Elliptical Shape</em>'.
* @see com.ibm.model.shapes.model.EllipticalShape
* @generated
*/
EClass getEllipticalShape();
/**
* Returns the meta object for class '{@link com.ibm.model.shapes.model.RectangularShape <em>Rectangular Shape</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Rectangular Shape</em>'.
* @see com.ibm.model.shapes.model.RectangularShape
* @generated
*/
EClass getRectangularShape();
/**
* Returns the meta object for class '{@link com.ibm.model.shapes.model.Shape <em>Shape</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Shape</em>'.
* @see com.ibm.model.shapes.model.Shape
* @generated
*/
EClass getShape();
/**
* Returns the meta object for the containment reference list '{@link com.ibm.model.shapes.model.Shape#getSourceConnections <em>Source Connections</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Source Connections</em>'.
* @see com.ibm.model.shapes.model.Shape#getSourceConnections()
* @see #getShape()
* @generated
*/
EReference getShape_SourceConnections();
/**
* Returns the meta object for the reference list '{@link com.ibm.model.shapes.model.Shape#getTargetConnections <em>Target Connections</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference list '<em>Target Connections</em>'.
* @see com.ibm.model.shapes.model.Shape#getTargetConnections()
* @see #getShape()
* @generated
*/
EReference getShape_TargetConnections();
/**
* Returns the meta object for the attribute '{@link com.ibm.model.shapes.model.Shape#getName <em>Name</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Name</em>'.
* @see com.ibm.model.shapes.model.Shape#getName()
* @see #getShape()
* @generated
*/
EAttribute getShape_Name();
/**
* Returns the meta object for class '{@link com.ibm.model.shapes.model.ShapesDiagram <em>Shapes Diagram</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Shapes Diagram</em>'.
* @see com.ibm.model.shapes.model.ShapesDiagram
* @generated
*/
EClass getShapesDiagram();
/**
* Returns the meta object for the containment reference list '{@link com.ibm.model.shapes.model.ShapesDiagram#getShapes <em>Shapes</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Shapes</em>'.
* @see com.ibm.model.shapes.model.ShapesDiagram#getShapes()
* @see #getShapesDiagram()
* @generated
*/
EReference getShapesDiagram_Shapes();
/**
* Returns the factory that creates the instances of the model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the factory that creates the instances of the model.
* @generated
*/
ModelFactory getModelFactory();
/**
* <!-- begin-user-doc -->
* Defines literals for the meta objects that represent
* <ul>
* <li>each class,</li>
* <li>each feature of each class,</li>
* <li>each enum,</li>
* <li>and each data type</li>
* </ul>
* <!-- end-user-doc -->
* @generated
*/
interface Literals {
/**
* The meta object literal for the '{@link com.ibm.model.shapes.model.impl.ConnectionImpl <em>Connection</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ConnectionImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getConnection()
* @generated
*/
EClass CONNECTION = eINSTANCE.getConnection();
/**
* The meta object literal for the '<em><b>Source</b></em>' reference feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference CONNECTION__SOURCE = eINSTANCE.getConnection_Source();
/**
* The meta object literal for the '<em><b>Target</b></em>' reference feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference CONNECTION__TARGET = eINSTANCE.getConnection_Target();
/**
* The meta object literal for the '{@link com.ibm.model.shapes.model.impl.EllipticalShapeImpl <em>Elliptical Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.EllipticalShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getEllipticalShape()
* @generated
*/
EClass ELLIPTICAL_SHAPE = eINSTANCE.getEllipticalShape();
/**
* The meta object literal for the '{@link com.ibm.model.shapes.model.impl.RectangularShapeImpl <em>Rectangular Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.RectangularShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getRectangularShape()
* @generated
*/
EClass RECTANGULAR_SHAPE = eINSTANCE.getRectangularShape();
/**
* The meta object literal for the '{@link com.ibm.model.shapes.model.impl.ShapeImpl <em>Shape</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ShapeImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getShape()
* @generated
*/
EClass SHAPE = eINSTANCE.getShape();
/**
* The meta object literal for the '<em><b>Source Connections</b></em>' containment reference list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference SHAPE__SOURCE_CONNECTIONS = eINSTANCE.getShape_SourceConnections();
/**
* The meta object literal for the '<em><b>Target Connections</b></em>' reference list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference SHAPE__TARGET_CONNECTIONS = eINSTANCE.getShape_TargetConnections();
/**
* The meta object literal for the '<em><b>Name</b></em>' attribute feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EAttribute SHAPE__NAME = eINSTANCE.getShape_Name();
/**
* The meta object literal for the '{@link com.ibm.model.shapes.model.impl.ShapesDiagramImpl <em>Shapes Diagram</em>}' class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.impl.ShapesDiagramImpl
* @see com.ibm.model.shapes.model.impl.ModelPackageImpl#getShapesDiagram()
* @generated
*/
EClass SHAPES_DIAGRAM = eINSTANCE.getShapesDiagram();
/**
* The meta object literal for the '<em><b>Shapes</b></em>' containment reference list feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
EReference SHAPES_DIAGRAM__SHAPES = eINSTANCE.getShapesDiagram_Shapes();
}
} //ModelPackage
| Java |
package com.ibm.model.shapes.model;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
/**
* @model
*/
public interface ShapesDiagram extends EObject {
/**
* @model type="com.ibm.model.shapes.model.Shape" containment="true"
*/
EList getShapes();
} | Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.impl;
import com.ibm.model.shapes.model.Connection;
import com.ibm.model.shapes.model.ModelPackage;
import com.ibm.model.shapes.model.Shape;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Connection</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link com.ibm.model.shapes.model.impl.ConnectionImpl#getSource <em>Source</em>}</li>
* <li>{@link com.ibm.model.shapes.model.impl.ConnectionImpl#getTarget <em>Target</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ConnectionImpl extends EObjectImpl implements Connection {
/**
* The cached value of the '{@link #getSource() <em>Source</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSource()
* @generated
* @ordered
*/
protected Shape source = null;
/**
* The cached value of the '{@link #getTarget() <em>Target</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTarget()
* @generated
* @ordered
*/
protected Shape target = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ConnectionImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EClass eStaticClass() {
return ModelPackage.Literals.CONNECTION;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Shape getSource() {
if (source != null && source.eIsProxy()) {
InternalEObject oldSource = (InternalEObject)source;
source = (Shape)eResolveProxy(oldSource);
if (source != oldSource) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ModelPackage.CONNECTION__SOURCE, oldSource, source));
}
}
return source;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Shape basicGetSource() {
return source;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSource(Shape newSource) {
Shape oldSource = source;
source = newSource;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.CONNECTION__SOURCE, oldSource, source));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Shape getTarget() {
if (target != null && target.eIsProxy()) {
InternalEObject oldTarget = (InternalEObject)target;
target = (Shape)eResolveProxy(oldTarget);
if (target != oldTarget) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ModelPackage.CONNECTION__TARGET, oldTarget, target));
}
}
return target;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Shape basicGetTarget() {
return target;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTarget(Shape newTarget) {
Shape oldTarget = target;
target = newTarget;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.CONNECTION__TARGET, oldTarget, target));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ModelPackage.CONNECTION__SOURCE:
if (resolve) return getSource();
return basicGetSource();
case ModelPackage.CONNECTION__TARGET:
if (resolve) return getTarget();
return basicGetTarget();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ModelPackage.CONNECTION__SOURCE:
setSource((Shape)newValue);
return;
case ModelPackage.CONNECTION__TARGET:
setTarget((Shape)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eUnset(int featureID) {
switch (featureID) {
case ModelPackage.CONNECTION__SOURCE:
setSource((Shape)null);
return;
case ModelPackage.CONNECTION__TARGET:
setTarget((Shape)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean eIsSet(int featureID) {
switch (featureID) {
case ModelPackage.CONNECTION__SOURCE:
return source != null;
case ModelPackage.CONNECTION__TARGET:
return target != null;
}
return super.eIsSet(featureID);
}
} //ConnectionImpl | Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.impl;
import com.ibm.model.shapes.model.Connection;
import com.ibm.model.shapes.model.ModelPackage;
import com.ibm.model.shapes.model.Shape;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.EObjectResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Shape</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link com.ibm.model.shapes.model.impl.ShapeImpl#getSourceConnections <em>Source Connections</em>}</li>
* <li>{@link com.ibm.model.shapes.model.impl.ShapeImpl#getTargetConnections <em>Target Connections</em>}</li>
* <li>{@link com.ibm.model.shapes.model.impl.ShapeImpl#getName <em>Name</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public abstract class ShapeImpl extends EObjectImpl implements Shape {
/**
* The cached value of the '{@link #getSourceConnections() <em>Source Connections</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSourceConnections()
* @generated
* @ordered
*/
protected EList sourceConnections = null;
/**
* The cached value of the '{@link #getTargetConnections() <em>Target Connections</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTargetConnections()
* @generated
* @ordered
*/
protected EList targetConnections = null;
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ShapeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EClass eStaticClass() {
return ModelPackage.Literals.SHAPE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList getSourceConnections() {
if (sourceConnections == null) {
sourceConnections = new EObjectContainmentEList(Connection.class, this, ModelPackage.SHAPE__SOURCE_CONNECTIONS);
}
return sourceConnections;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList getTargetConnections() {
if (targetConnections == null) {
targetConnections = new EObjectResolvingEList(Connection.class, this, ModelPackage.SHAPE__TARGET_CONNECTIONS);
}
return targetConnections;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.SHAPE__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ModelPackage.SHAPE__SOURCE_CONNECTIONS:
return ((InternalEList)getSourceConnections()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ModelPackage.SHAPE__SOURCE_CONNECTIONS:
return getSourceConnections();
case ModelPackage.SHAPE__TARGET_CONNECTIONS:
return getTargetConnections();
case ModelPackage.SHAPE__NAME:
return getName();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ModelPackage.SHAPE__SOURCE_CONNECTIONS:
getSourceConnections().clear();
getSourceConnections().addAll((Collection)newValue);
return;
case ModelPackage.SHAPE__TARGET_CONNECTIONS:
getTargetConnections().clear();
getTargetConnections().addAll((Collection)newValue);
return;
case ModelPackage.SHAPE__NAME:
setName((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eUnset(int featureID) {
switch (featureID) {
case ModelPackage.SHAPE__SOURCE_CONNECTIONS:
getSourceConnections().clear();
return;
case ModelPackage.SHAPE__TARGET_CONNECTIONS:
getTargetConnections().clear();
return;
case ModelPackage.SHAPE__NAME:
setName(NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean eIsSet(int featureID) {
switch (featureID) {
case ModelPackage.SHAPE__SOURCE_CONNECTIONS:
return sourceConnections != null && !sourceConnections.isEmpty();
case ModelPackage.SHAPE__TARGET_CONNECTIONS:
return targetConnections != null && !targetConnections.isEmpty();
case ModelPackage.SHAPE__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //ShapeImpl | Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.impl;
import com.ibm.model.shapes.model.Connection;
import com.ibm.model.shapes.model.EllipticalShape;
import com.ibm.model.shapes.model.ModelFactory;
import com.ibm.model.shapes.model.ModelPackage;
import com.ibm.model.shapes.model.RectangularShape;
import com.ibm.model.shapes.model.Shape;
import com.ibm.model.shapes.model.ShapesDiagram;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.impl.EPackageImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class ModelPackageImpl extends EPackageImpl implements ModelPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass connectionEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass ellipticalShapeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass rectangularShapeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass shapeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass shapesDiagramEClass = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see com.ibm.model.shapes.model.ModelPackage#eNS_URI
* @see #init()
* @generated
*/
private ModelPackageImpl() {
super(eNS_URI, ModelFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this
* model, and for any others upon which it depends. Simple
* dependencies are satisfied by calling this method on all
* dependent packages before doing anything else. This method drives
* initialization for interdependent packages directly, in parallel
* with this package, itself.
* <p>Of this package and its interdependencies, all packages which
* have not yet been registered by their URI values are first created
* and registered. The packages are then initialized in two steps:
* meta-model objects for all of the packages are created before any
* are initialized, since one package's meta-model objects may refer to
* those of another.
* <p>Invocation of this method will not affect any packages that have
* already been initialized.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static ModelPackage init() {
if (isInited) return (ModelPackage)EPackage.Registry.INSTANCE.getEPackage(ModelPackage.eNS_URI);
// Obtain or create and register package
ModelPackageImpl theModelPackage = (ModelPackageImpl)(EPackage.Registry.INSTANCE.getEPackage(eNS_URI) instanceof ModelPackageImpl ? EPackage.Registry.INSTANCE.getEPackage(eNS_URI) : new ModelPackageImpl());
isInited = true;
// Create package meta-data objects
theModelPackage.createPackageContents();
// Initialize created meta-data
theModelPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theModelPackage.freeze();
return theModelPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getConnection() {
return connectionEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getConnection_Source() {
return (EReference)connectionEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getConnection_Target() {
return (EReference)connectionEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getEllipticalShape() {
return ellipticalShapeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getRectangularShape() {
return rectangularShapeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getShape() {
return shapeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getShape_SourceConnections() {
return (EReference)shapeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getShape_TargetConnections() {
return (EReference)shapeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getShape_Name() {
return (EAttribute)shapeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getShapesDiagram() {
return shapesDiagramEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getShapesDiagram_Shapes() {
return (EReference)shapesDiagramEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ModelFactory getModelFactory() {
return (ModelFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
connectionEClass = createEClass(CONNECTION);
createEReference(connectionEClass, CONNECTION__SOURCE);
createEReference(connectionEClass, CONNECTION__TARGET);
ellipticalShapeEClass = createEClass(ELLIPTICAL_SHAPE);
rectangularShapeEClass = createEClass(RECTANGULAR_SHAPE);
shapeEClass = createEClass(SHAPE);
createEReference(shapeEClass, SHAPE__SOURCE_CONNECTIONS);
createEReference(shapeEClass, SHAPE__TARGET_CONNECTIONS);
createEAttribute(shapeEClass, SHAPE__NAME);
shapesDiagramEClass = createEClass(SHAPES_DIAGRAM);
createEReference(shapesDiagramEClass, SHAPES_DIAGRAM__SHAPES);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Add supertypes to classes
ellipticalShapeEClass.getESuperTypes().add(this.getShape());
rectangularShapeEClass.getESuperTypes().add(this.getShape());
// Initialize classes and features; add operations and parameters
initEClass(connectionEClass, Connection.class, "Connection", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getConnection_Source(), this.getShape(), null, "source", null, 0, 1, Connection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getConnection_Target(), this.getShape(), null, "target", null, 0, 1, Connection.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(ellipticalShapeEClass, EllipticalShape.class, "EllipticalShape", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEClass(rectangularShapeEClass, RectangularShape.class, "RectangularShape", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEClass(shapeEClass, Shape.class, "Shape", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getShape_SourceConnections(), this.getConnection(), null, "sourceConnections", null, 0, -1, Shape.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getShape_TargetConnections(), this.getConnection(), null, "targetConnections", null, 0, -1, Shape.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getShape_Name(), ecorePackage.getEString(), "name", null, 0, 1, Shape.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(shapesDiagramEClass, ShapesDiagram.class, "ShapesDiagram", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getShapesDiagram_Shapes(), this.getShape(), null, "shapes", null, 0, -1, ShapesDiagram.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Create resource
createResource(eNS_URI);
}
} //ModelPackageImpl
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.impl;
import com.ibm.model.shapes.model.ModelPackage;
import com.ibm.model.shapes.model.Shape;
import com.ibm.model.shapes.model.ShapesDiagram;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Shapes Diagram</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link com.ibm.model.shapes.model.impl.ShapesDiagramImpl#getShapes <em>Shapes</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ShapesDiagramImpl extends EObjectImpl implements ShapesDiagram {
/**
* The cached value of the '{@link #getShapes() <em>Shapes</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getShapes()
* @generated
* @ordered
*/
protected EList shapes = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ShapesDiagramImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EClass eStaticClass() {
return ModelPackage.Literals.SHAPES_DIAGRAM;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList getShapes() {
if (shapes == null) {
shapes = new EObjectContainmentEList(Shape.class, this, ModelPackage.SHAPES_DIAGRAM__SHAPES);
}
return shapes;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ModelPackage.SHAPES_DIAGRAM__SHAPES:
return ((InternalEList)getShapes()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ModelPackage.SHAPES_DIAGRAM__SHAPES:
return getShapes();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ModelPackage.SHAPES_DIAGRAM__SHAPES:
getShapes().clear();
getShapes().addAll((Collection)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eUnset(int featureID) {
switch (featureID) {
case ModelPackage.SHAPES_DIAGRAM__SHAPES:
getShapes().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean eIsSet(int featureID) {
switch (featureID) {
case ModelPackage.SHAPES_DIAGRAM__SHAPES:
return shapes != null && !shapes.isEmpty();
}
return super.eIsSet(featureID);
}
} //ShapesDiagramImpl | Java |
package com.ibm.model.shapes.model;
/**
* @model
*/
public interface EllipticalShape extends Shape {} | Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.util;
import com.ibm.model.shapes.model.*;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.common.notify.impl.AdapterFactoryImpl;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Adapter Factory</b> for the model.
* It provides an adapter <code>createXXX</code> method for each class of the model.
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.ModelPackage
* @generated
*/
public class ModelAdapterFactory extends AdapterFactoryImpl {
/**
* The cached model package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ModelPackage modelPackage;
/**
* Creates an instance of the adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ModelAdapterFactory() {
if (modelPackage == null) {
modelPackage = ModelPackage.eINSTANCE;
}
}
/**
* Returns whether this factory is applicable for the type of the object.
* <!-- begin-user-doc -->
* This implementation returns <code>true</code> if the object is either the model's package or is an instance object of the model.
* <!-- end-user-doc -->
* @return whether this factory is applicable for the type of the object.
* @generated
*/
public boolean isFactoryForType(Object object) {
if (object == modelPackage) {
return true;
}
if (object instanceof EObject) {
return ((EObject)object).eClass().getEPackage() == modelPackage;
}
return false;
}
/**
* The switch the delegates to the <code>createXXX</code> methods.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ModelSwitch modelSwitch =
new ModelSwitch() {
public Object caseConnection(Connection object) {
return createConnectionAdapter();
}
public Object caseEllipticalShape(EllipticalShape object) {
return createEllipticalShapeAdapter();
}
public Object caseRectangularShape(RectangularShape object) {
return createRectangularShapeAdapter();
}
public Object caseShape(Shape object) {
return createShapeAdapter();
}
public Object caseShapesDiagram(ShapesDiagram object) {
return createShapesDiagramAdapter();
}
public Object defaultCase(EObject object) {
return createEObjectAdapter();
}
};
/**
* Creates an adapter for the <code>target</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param target the object to adapt.
* @return the adapter for the <code>target</code>.
* @generated
*/
public Adapter createAdapter(Notifier target) {
return (Adapter)modelSwitch.doSwitch((EObject)target);
}
/**
* Creates a new adapter for an object of class '{@link com.ibm.model.shapes.model.Connection <em>Connection</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.ibm.model.shapes.model.Connection
* @generated
*/
public Adapter createConnectionAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.ibm.model.shapes.model.EllipticalShape <em>Elliptical Shape</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.ibm.model.shapes.model.EllipticalShape
* @generated
*/
public Adapter createEllipticalShapeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.ibm.model.shapes.model.RectangularShape <em>Rectangular Shape</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.ibm.model.shapes.model.RectangularShape
* @generated
*/
public Adapter createRectangularShapeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.ibm.model.shapes.model.Shape <em>Shape</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.ibm.model.shapes.model.Shape
* @generated
*/
public Adapter createShapeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.ibm.model.shapes.model.ShapesDiagram <em>Shapes Diagram</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.ibm.model.shapes.model.ShapesDiagram
* @generated
*/
public Adapter createShapesDiagramAdapter() {
return null;
}
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
public Adapter createEObjectAdapter() {
return null;
}
} //ModelAdapterFactory
| Java |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package com.ibm.model.shapes.model.util;
import com.ibm.model.shapes.model.*;
import java.util.List;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)}
* to invoke the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see com.ibm.model.shapes.model.ModelPackage
* @generated
*/
public class ModelSwitch {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ModelPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ModelSwitch() {
if (modelPackage == null) {
modelPackage = ModelPackage.eINSTANCE;
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
public Object doSwitch(EObject theEObject) {
return doSwitch(theEObject.eClass(), theEObject);
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected Object doSwitch(EClass theEClass, EObject theEObject) {
if (theEClass.eContainer() == modelPackage) {
return doSwitch(theEClass.getClassifierID(), theEObject);
}
else {
List eSuperTypes = theEClass.getESuperTypes();
return
eSuperTypes.isEmpty() ?
defaultCase(theEObject) :
doSwitch((EClass)eSuperTypes.get(0), theEObject);
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected Object doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case ModelPackage.CONNECTION: {
Connection connection = (Connection)theEObject;
Object result = caseConnection(connection);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ModelPackage.ELLIPTICAL_SHAPE: {
EllipticalShape ellipticalShape = (EllipticalShape)theEObject;
Object result = caseEllipticalShape(ellipticalShape);
if (result == null) result = caseShape(ellipticalShape);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ModelPackage.RECTANGULAR_SHAPE: {
RectangularShape rectangularShape = (RectangularShape)theEObject;
Object result = caseRectangularShape(rectangularShape);
if (result == null) result = caseShape(rectangularShape);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ModelPackage.SHAPE: {
Shape shape = (Shape)theEObject;
Object result = caseShape(shape);
if (result == null) result = defaultCase(theEObject);
return result;
}
case ModelPackage.SHAPES_DIAGRAM: {
ShapesDiagram shapesDiagram = (ShapesDiagram)theEObject;
Object result = caseShapesDiagram(shapesDiagram);
if (result == null) result = defaultCase(theEObject);
return result;
}
default: return defaultCase(theEObject);
}
}
/**
* Returns the result of interpretting the object as an instance of '<em>Connection</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>Connection</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public Object caseConnection(Connection object) {
return null;
}
/**
* Returns the result of interpretting the object as an instance of '<em>Elliptical Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>Elliptical Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public Object caseEllipticalShape(EllipticalShape object) {
return null;
}
/**
* Returns the result of interpretting the object as an instance of '<em>Rectangular Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>Rectangular Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public Object caseRectangularShape(RectangularShape object) {
return null;
}
/**
* Returns the result of interpretting the object as an instance of '<em>Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public Object caseShape(Shape object) {
return null;
}
/**
* Returns the result of interpretting the object as an instance of '<em>Shapes Diagram</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>Shapes Diagram</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public Object caseShapesDiagram(ShapesDiagram object) {
return null;
}
/**
* Returns the result of interpretting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpretting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
public Object defaultCase(EObject object) {
return null;
}
} //ModelSwitch
| Java |
package org.joone.engine;
import java.beans.*;
public class GaussianLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.engine.GaussianLayer.class , null ); // NOI18N
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_initialGaussianSize = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerHeight = 4;
private static final int PROPERTY_layerName = 5;
private static final int PROPERTY_layerWidth = 6;
private static final int PROPERTY_learner = 7;
private static final int PROPERTY_orderingPhase = 8;
private static final int PROPERTY_outputLayer = 9;
private static final int PROPERTY_rows = 10;
private static final int PROPERTY_timeConstant = 11;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[12];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", org.joone.engine.GaussianLayer.class, "getAllInputs", "setAllInputs" ); // NOI18N
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", org.joone.engine.GaussianLayer.class, "getAllOutputs", "setAllOutputs" ); // NOI18N
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_initialGaussianSize] = new PropertyDescriptor ( "initialGaussianSize", org.joone.engine.GaussianLayer.class, "getInitialGaussianSize", "setInitialGaussianSize" ); // NOI18N
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", org.joone.engine.GaussianLayer.class, "isInputLayer", null ); // NOI18N
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerHeight] = new PropertyDescriptor ( "layerHeight", org.joone.engine.GaussianLayer.class, "getLayerHeight", "setLayerHeight" ); // NOI18N
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", org.joone.engine.GaussianLayer.class, "getLayerName", "setLayerName" ); // NOI18N
properties[PROPERTY_layerWidth] = new PropertyDescriptor ( "layerWidth", org.joone.engine.GaussianLayer.class, "getLayerWidth", "setLayerWidth" ); // NOI18N
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", org.joone.engine.GaussianLayer.class, "getLearner", null ); // NOI18N
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_orderingPhase] = new PropertyDescriptor ( "orderingPhase", org.joone.engine.GaussianLayer.class, "getOrderingPhase", "setOrderingPhase" ); // NOI18N
properties[PROPERTY_orderingPhase].setDisplayName ( "ordering phase (epochs)" );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", org.joone.engine.GaussianLayer.class, "isOutputLayer", null ); // NOI18N
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", org.joone.engine.GaussianLayer.class, "getRows", "setRows" ); // NOI18N
properties[PROPERTY_rows].setHidden ( true );
properties[PROPERTY_timeConstant] = new PropertyDescriptor ( "timeConstant", org.joone.engine.GaussianLayer.class, "getTimeConstant", "setTimeConstant" ); // NOI18N
}
catch(IntrospectionException e) {
e.printStackTrace();
}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addInputSynapse0 = 0;
private static final int METHOD_addNoise1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_copyInto3 = 3;
private static final int METHOD_removeAllInputs4 = 4;
private static final int METHOD_removeAllOutputs5 = 5;
private static final int METHOD_removeInputSynapse6 = 6;
private static final int METHOD_removeOutputSynapse7 = 7;
private static final int METHOD_run8 = 8;
private static final int METHOD_start9 = 9;
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[10];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
static {
try {
methods[METHOD_addInputSynapse0] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("addInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_addInputSynapse0].setDisplayName ( "" );
methods[METHOD_addNoise1] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("addNoise", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_addNoise1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_copyInto3] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("copyInto", new Class[] {org.joone.engine.NeuralLayer.class})); // NOI18N
methods[METHOD_copyInto3].setDisplayName ( "" );
methods[METHOD_removeAllInputs4] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("removeAllInputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllInputs4].setDisplayName ( "" );
methods[METHOD_removeAllOutputs5] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("removeAllOutputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllOutputs5].setDisplayName ( "" );
methods[METHOD_removeInputSynapse6] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("removeInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_removeInputSynapse6].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse7] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("removeOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_removeOutputSynapse7].setDisplayName ( "" );
methods[METHOD_run8] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("run", new Class[] {})); // NOI18N
methods[METHOD_run8].setDisplayName ( "" );
methods[METHOD_start9] = new MethodDescriptor ( org.joone.engine.GaussianLayer.class.getMethod("start", new Class[] {})); // NOI18N
methods[METHOD_start9].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
}//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
import java.util.TreeSet;
import org.joone.net.NetCheck;
/** This Synapse connects the N input neurons with the M output neurons
* using a matrix of FIRFilter elements of size NxM.
* A FIRFilter connection is a delayed connection that permits to implement
* a temporal backprop alg. functionally equivalent to the TDNN (Time Delay
* Neural Network), but in a more efficient and elegant manner.
*
* @see org.joone.engine.FIRFilter
* @author P. Marrone
*/
public class DelaySynapse extends Synapse {
protected FIRFilter[][] fir;
private int taps;
private static final long serialVersionUID = 8268129000639124340L;
public DelaySynapse() {
super();
}
public void addNoise(double amplitude) {
int x;
int y;
int m_cols = getOutputDimension();
int m_rows = getInputDimension();
for (y = 0; y < m_cols; ++y)
for (x = 0; x < m_rows; ++x)
fir[x][y].addNoise(amplitude);
}
protected void backward(double[] pattern) {
int x;
int y;
double s;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
setLearningRate(getMonitor().getLearningRate());
// Aggiustamento dei pesi
for (x = 0; x < m_rows; ++x) {
s = 0;
for (y = 0; y < m_cols; ++y) {
//debug(array.value[x][y], "matrix[" + x + "][" + y + "]");
fir[x][y].lrate = getLearningRate();
fir[x][y].momentum = getMomentum();
s += fir[x][y].backward(pattern[y]);
}
bouts[x] = s;
}
}
protected void forward(double[] pattern) {
int x;
int y;
double s;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
//debug(pattern, "FS1:forward");
for (y = 0; y < m_cols; ++y) {
s = 0;
for (x = 0; x < m_rows; ++x) {
//debug(array.value[x][y], "matrix[" + x + "][" + y + "]");
s += fir[x][y].forward(pattern[x]);
}
outs[y] = s;
}
//debug(outs, "FS2:forward");
}
/**
* Inserire qui la descrizione del metodo.
* Data di creazione: (10/04/00 23.02.20)
* @return int
*/
public int getTaps() {
return taps;
}
/**
* setArrays method comment.
*/
protected void setArrays(int rows, int cols) {
inps = new double[rows];
outs = new double[cols];
bouts = new double[rows];
}
protected void setDimensions(int rows, int cols) {
int icols, irows;
int x, y;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
if (rows == -1)
irows = m_rows;
else
irows = rows;
if (cols == -1)
icols = m_cols;
else
icols = cols;
fir = new FIRFilter[irows][icols];
for (x = 0; x < irows; ++x)
for (y = 0; y < icols; ++y) {
fir[x][y] = new FIRFilter(getTaps());
}
setArrays(irows, icols);
}
/**
* Inserire qui la descrizione del metodo.
* Data di creazione: (10/04/00 23.02.20)
* @param newTaps int
*/
public void setTaps(int newTaps) {
taps = newTaps;
this.setDimensions(-1, -1);
}
public TreeSet check() {
TreeSet checks = super.check();
if (getTaps() == 0) {
checks.add(
new NetCheck(
NetCheck.FATAL,
"The Taps parameter cannot be equal to zero.",
this));
}
// Return check messages
return checks;
}
} | Java |
package org.joone.engine;
public interface NeuralNetListener extends java.util.EventListener
{
void netStarted(NeuralNetEvent e);
void cicleTerminated(NeuralNetEvent e);
void netStopped(NeuralNetEvent e);
void errorChanged(NeuralNetEvent e);
void netStoppedError(NeuralNetEvent e,String error);
} | Java |
package org.joone.engine;
import org.joone.engine.extenders.*;
/**
* This class implements the RPROP learning algorithm.
*
* @author Boris Jansen
*/
public class RpropLearner extends ExtendableLearner {
/** The RPROP extender. Only used to make back compatibility possible. */
private RpropExtender theRpropExtender;
/** Creates a new instance of RpropLearner */
public RpropLearner() {
setUpdateWeightExtender(new BatchModeExtender());
theRpropExtender = new RpropExtender();
addDeltaRuleExtender(theRpropExtender);
}
/**
* Creates a new instance of RpropLearner.
*
* @param aParameters the parameter for this learning algorithm.
*/
public RpropLearner(RpropParameters aParameters) {
super();
theRpropExtender.setParameters(aParameters);
}
/**
* @deprecated used for backward compatibility
*/
protected void reinit() {
theRpropExtender.reinit();
}
public RpropParameters getParameters() {
return theRpropExtender.getParameters();
}
public void setParameters(RpropParameters aParameters) {
theRpropExtender.setParameters(aParameters);
}
/**
* Gets the sign of a double.
*
* return the sign of a double (-1, 0, 1).
*/
protected double sign(double d) {
if(d > 0) {
return 1.0;
} else if(d < 0) {
return -1.0;
}
return 0;
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class MemoryLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( MemoryLayer.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_bias = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_learner = 5;
private static final int PROPERTY_monitor = 6;
private static final int PROPERTY_outputLayer = 7;
private static final int PROPERTY_rows = 8;
private static final int PROPERTY_taps = 9;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[10];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", MemoryLayer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", MemoryLayer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", MemoryLayer.class, "getBias", "setBias" );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", MemoryLayer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", MemoryLayer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", MemoryLayer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", MemoryLayer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", MemoryLayer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", MemoryLayer.class, "getRows", "setRows" );
properties[PROPERTY_taps] = new PropertyDescriptor ( "taps", MemoryLayer.class, "getTaps", "setTaps" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[0];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
/**
* The <code>Fifo</code> class represents a first-in-first-out
* (FIFO) stack of objects.
*/
public class Fifo extends java.util.Vector {
private static final long serialVersionUID = -3937649024771901836L;
/**
* Tests if this stack is empty.
*
* @return <code>true</code> if this stack is empty;
* <code>false</code> otherwise.
*/
public boolean empty() {
return size() == 0;
}
/**
* Looks at the object at the top of this stack without removing it
* from the stack.
*
* @return the object at the top of this stack.
* @exception EmptyStackException if this stack is empty.
*/
public synchronized Object peek() {
int len = size();
if (len == 0)
throw new java.util.EmptyStackException();
return elementAt(0);
}
/**
* Removes the object at the top of this stack and returns that
* object as the value of this function.
*
* @return The object at the top of this stack.
* @exception EmptyStackException if this stack is empty.
*/
public synchronized Object pop() {
Object obj;
obj = peek();
removeElementAt(0);
return obj;
}
/**
* Pushes an item onto the top of this stack.
*
* @param item the item to be pushed onto this stack.
* @return the <code>item</code> argument.
*/
public Object push(Object item) {
addElement(item);
return item;
}
/**
* Returns where an object is on this stack.
*
* @param o the desired object.
* @return the distance from the top of the stack where the object is]
* located; the return value <code>-1</code> indicates that the
* object is not on the stack.
*/
public synchronized int search(Object o) {
int i = lastIndexOf(o);
if (i >= 0) {
return size() - i;
}
return -1;
}
} | Java |
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.exception.JooneRuntimeException;
import org.joone.log.*;
import java.util.TreeSet;
import org.joone.inspection.implementations.BiasInspection;
import org.joone.net.NetCheck;
/** <P>This layer implements the Gaussian Neighborhood SOM strategy. It receives
* the euclidean distances between the input vector and weights and calculates the
* distance fall off between the winning node and all other nodes. These are
* passed back allowing the previous synapse to adjust it's weights.</P>
* <P>The distance fall off is calculated according to a Gaussian distribution from
* the winning node. This layer uses implemtations of SpatialMap in order to
* calculate these distances. Currently this layer uses the GaussianSpatialMap
* which calculates the Gaussian distance for all nodes in the SOM map. Future
* maps will allow distance calculations based on a specific shape such as a circle
* , square or diamond. Currently the GuassianLayer supports 3D SOM maps.</P>
* @see SimpleLayer parent
*/
public class GaussianLayer extends SimpleLayer implements NeuralNetListener {
private static final ILogger log = LoggerFactory.getLogger (GaussianLayer.class);
private static final long serialVersionUID = -941653911909171430L;
// Width of the map in the this layer.
private int LayerWidth = 1;
// Height of the map in the this layer.
private int LayerHeight = 1;
// Depth of the map in the this layer.
private int LayerDepth = 1;
private SpatialMap space_map;
private double timeConstant = 200.0;
private int orderingPhase = 1000;
private double initialGaussianSize = 10;
/** <P>The default constructor for this GaussianLayer.</P> */
public GaussianLayer() {
super();
}
/** The constructor that takes a name of the layer.
* @param ElemName The name of the Layer
*/
public GaussianLayer(java.lang.String ElemName) {
super(ElemName);
}
/** <P>This method has a blank body as there are no biases to adjust.</P>
* @param pattern Not used. The pattern to process and pass back.
* @throws JooneRuntimeException The run time exception.
*/
public void backward(double[] pattern)
throws JooneRuntimeException
{
}
/** <P>This method takes as input an array of euclidean distances between the input and
* weights calculated by the previous synapse. This method calculates the Gaussian
* distance fall off between the winning neuron and all other nodes. These distances are passed on to the next synapse.</P>
* @param pattern The pattern containing the euclidean distances from the previous synapse.
* @see NeuralLayer#forward (double[])
* @throws JooneRuntimeException This <code>Exception </code> is a wrapper Exception when an Exception is thrown
* while doing the maths.
*/
public void forward (double[] pattern)
throws JooneRuntimeException
{
try
{
getSpace_map().ApplyNeighborhoodFunction(pattern,outs, getMonitor().isLearning());
}
catch (Exception aioobe)
{
String msg;
log.error ( msg = "Exception thrown while processing the pattern " + pattern.toString()
+ " Exception thrown is " + aioobe.getClass ().getName () + ". Message is " + aioobe.getMessage() );
throw new JooneRuntimeException (msg, aioobe);
//aioobe.printStackTrace();
}
}
/** Getter for property LayerDepth.
* @return Value of property LayerDepth.
*
*/
public int getLayerDepth() {
return LayerDepth;
}
/** Setter for property LayerDepth.
* @param layerDepth New value of property LayerDepth.
*
*/
public void setLayerDepth(int layerDepth) {
if ( layerDepth != getLayerDepth() )
{
this.LayerDepth = layerDepth;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
getSpace_map().setMapDepth(layerDepth);
}
}
/** Getter for property LayerHeight.
* @return Value of property LayerHeight.
*
*/
public int getLayerHeight() {
return LayerHeight;
}
/** Setter for property LayerHeight.
* @param LayerHeight New value of property LayerHeight.
*
*/
public void setLayerHeight(int LayerHeight) {
if ( LayerHeight != getLayerHeight() )
{
this.LayerHeight = LayerHeight;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
getSpace_map().setMapHeight(LayerHeight);
}
}
/** Getter for property LayerWidth.
* @return Value of property LayerWidth.
*
*/
public int getLayerWidth() {
return LayerWidth;
}
/** Setter for property LayerWidth.
* @param LayerWidth New value of property LayerWidth.
*
*/
public void setLayerWidth(int LayerWidth) {
if ( LayerWidth != getLayerWidth() )
{
this.LayerWidth = LayerWidth;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
getSpace_map().setMapWidth(LayerWidth);
}
}
/** Gets the largest layer dimension size.
* @return The size of the largest dimension, width , height or depth.
*/
public int getLargestDimension()
{
int max = 1;
if ( getLayerWidth() > max)
max = getLayerWidth();
if ( getLayerHeight() > max)
max = getLayerHeight();
if ( getLayerDepth() > max)
max = getLayerDepth();
return(max);
}
/** <P>Check that there are no errors or problems with the properties of this
* GaussianLayer.</P>
* @return The TreeSet of errors / problems if any.
*/
public TreeSet check() {
TreeSet checks = super.check();
if ( getLayerWidth() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer width should be greater than or equal to 1." , this));
if ( getLayerHeight() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer height should be greater than or equal to 1." , this));
if ( getLayerDepth() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer depth should be greater than or equal to 1." , this));
if (getOrderingPhase() > getMonitor().getTotCicles())
checks.add(new NetCheck(NetCheck.WARNING, "Ordering phase should be lesser than or equal to the number of epochs" , this));
return checks;
}
public void start() {
if (getMonitor() != null) {
getMonitor().addNeuralNetListener(this, false);
}
super.start();
}
/** <P>Initialises the time constant used to decrease the size of the spatial
* map.</P>
* @param e The original Net Event.
*/
public void netStarted(NeuralNetEvent e) {
getSpace_map().init( getMonitor().getTotCicles());
space_map.setInitialGaussianSize(getLargestDimension());
}
/** <P>Updates the Gaussian Size if in learning mode.</P>
* @param e The original Net Event.
*/
public void cicleTerminated(NeuralNetEvent e) {
if ( getMonitor().isLearning() )
{
getSpace_map().updateCurrentGaussianSize(getMonitor().getTotCicles()-getMonitor().getCurrentCicle());
}
}
/** Getter for property orderingPhase.
* @return Value of property orderingPhase.
*
*/
public int getOrderingPhase() {
return orderingPhase;
}
/** Setter for property orderingPhase.
* @param orderingPhase New value of property orderingPhase.
*
*/
public void setOrderingPhase(int orderingPhase) {
this.orderingPhase = orderingPhase;
getSpace_map().setOrderingPhase(orderingPhase);
}
/** Getter for property timeConstant.
* @return Value of property timeConstant.
*
*/
public double getTimeConstant() {
return timeConstant;
}
/** Setter for property timeConstant.
* @param timeConstant New value of property timeConstant.
*
*/
public void setTimeConstant(double timeConstant) {
this.timeConstant = timeConstant;
getSpace_map().setTimeConstant(timeConstant);
}
/** Getter for property space_map.
* @return Value of property space_map.
*
*/
protected org.joone.engine.SpatialMap getSpace_map() {
if (space_map == null) {
space_map = new GaussianSpatialMap();
space_map.setMapDepth(getLayerDepth());
space_map.setMapHeight(getLayerHeight());
space_map.setMapWidth(getLayerWidth());
space_map.setInitialGaussianSize(getInitialGaussianSize());
space_map.setOrderingPhase(getOrderingPhase());
space_map.setTimeConstant(getTimeConstant());
}
return space_map;
}
/** Getter for property initialGaussianSize.
* @return Value of property initialGaussianSize.
*
*/
public double getInitialGaussianSize() {
return initialGaussianSize;
}
/** Setter for property initialGaussianSize.
* @param initialGaussianSize New value of property initialGaussianSize.
*
*/
public void setInitialGaussianSize(double initialGaussianSize) {
this.initialGaussianSize = initialGaussianSize;
getSpace_map().setInitialGaussianSize(initialGaussianSize);
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
public void netStoppedError(NeuralNetEvent e, String error) {
}
public void errorChanged(NeuralNetEvent e) {
}
public void netStopped(NeuralNetEvent e) {
}
} | Java |
/*
* ExtendableLearner.java
*
* Created on September 14, 2004, 8:30 AM
*/
package org.joone.engine;
import java.util.*;
import org.joone.engine.extenders.*;
/**
* Learners that extend this class are forced to implement certain functions, a
* so-called skeleton. The good thing is, because learners extend this class
* certain plug-ins can be added. For example, plug ins that change the objective
* function, or the delta-update rule. Still learners that do not fit into this
* skeleton have to opportunity to implement Learner directly (or extend
* AbstractLearner), but it won't be able to use the extra plug-ins (unless it
* is build in the learner by the programmer itself).
*
* Basically, this class is the BasicLearner, but by adding extenders it can
* provide totally different learning algoriths.
*
* @author Boris Jansen
*/
public class ExtendableLearner extends AbstractLearner {
/** The list with delta rule extenders, extenders that change the
* delta w, e.g. momentum term, etc. */
protected List theDeltaRuleExtenders = new ArrayList();
/** The list with gradient extenders, extenders that change the gradient. */
protected List theGradientExtenders = new ArrayList();
/** The update weight extender, that is, the way to update
* the weights, online, batch mode, etc. */
protected UpdateWeightExtender theUpdateWeightExtender;
/** Creates a new instance of ExtendableLearner */
public ExtendableLearner() {
}
public final void requestBiasUpdate(double[] currentGradientOuts) {
double myDelta;
preBiasUpdate(currentGradientOuts);
for(int x = 0; x < getLayer().getRows(); x++) {
myDelta = getDelta(currentGradientOuts, x);
updateBias(x, myDelta);
}
postBiasUpdate(currentGradientOuts);
}
public final void requestWeightUpdate(double[] currentPattern, double[] currentInps) {
double myDelta;
preWeightUpdate(currentPattern, currentInps);
boolean[][] isEnabled = getSynapse().getWeights().getEnabled();
boolean[][] isFixed = getSynapse().getWeights().getFixed();
for(int x = 0; x < getSynapse().getInputDimension(); x++) {
for(int y = 0; y < getSynapse().getOutputDimension(); y++) {
if (!isFixed[x][y] && isEnabled[x][y]) {
myDelta = getDelta(currentInps, x, currentPattern, y);
updateWeight(x, y, myDelta);
}
}
}
postWeightUpdate(currentPattern, currentInps);
}
/**
* Updates a bias with the calculated delta value.
*
* @param j the index of the bias to update.
* @param aDelta the calculated delta value.
*/
protected void updateBias(int j, double aDelta) {
theUpdateWeightExtender.updateBias(j, aDelta);
}
/**
* Updates a weight with the calculated delta value.
*
* @param j the input index of the weight to update.
* @param k the output index of the weight to update.
* @param aDelta the calculated delta value.
*/
protected void updateWeight(int j, int k, double aDelta) {
theUpdateWeightExtender.updateWeight(j, k, aDelta);
}
/**
* Computes the delta value for a bias.
*
* @param currentGradientOuts the back propagated gradients.
* @param j the index of the bias.
*/
protected double getDelta(double[] currentGradientOuts, int j) {
// if this method is overwritten, make sure that no delta extenders can be set
// by throwing an exception from setDeltaExtender()
// more than one delta extender might be set, this variable is used to pass on
// the delta value calculated by the previous delta extender to the next one
double myDelta = getDefaultDelta(currentGradientOuts, j);
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
myDelta = ((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
getDelta(currentGradientOuts, j, myDelta);
}
}
return myDelta;
}
/**
* Gets the default (normal calculation of) delta.
*
* @param currentGradientOuts the back propagated gradients.
* @param j the index of the bias.
*/
public double getDefaultDelta(double[] currentGradientOuts, int j) {
return getLearningRate(j) * getGradientBias(currentGradientOuts, j);
}
/**
* Computes the delta value for a weight.
*
* @param currentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
*/
protected double getDelta(double[] currentInps, int j, double[] currentPattern, int k) {
// if this method is overwritten, make sure that no delta extenders can be set
// by throwing an exception from setDeltaExtender()
// more than one delta extender might be set, this variable is used to pass on
// the delta value calculated by the previous delta extender to the next one
double myDelta = getDefaultDelta(currentInps, j, currentPattern, k);
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
myDelta = ((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
getDelta(currentInps, j, currentPattern, k, myDelta);
}
}
return myDelta;
}
/**
* Gets the default (normal calculation of) delta.
*
* @param currentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
*/
public double getDefaultDelta(double[] currentInps, int j, double[] currentPattern, int k) {
return getLearningRate(j, k) * getGradientWeight(currentInps, j, currentPattern, k);
}
/**
* Gets the learning rate.
*
* @param j the index of the bias (for which we should get the learning rate).
* @return the learning rate for a bias.
*/
protected double getLearningRate(int j) {
// in future we could add learning rate extenders...
return getMonitor().getLearningRate();
}
/**
* Gets the learning rate.
*
* @param j the input index of the weight (for which we should get the learning rate).
* @param k the output index of the weight (for which we should get the learning rate).
* @return the learning rate for a weight.
*/
protected double getLearningRate(int j, int k) {
// in future we could add learning rate extenders...
return getMonitor().getLearningRate();
}
/**
* Gets the gradient for biases.
*
* @param currentGradientOuts the back protected gradients.
* @param j the index of the bias.
* @return the gradient for bias b_i.
*/
public double getGradientBias(double[] currentGradientOuts, int j) {
double myGradient = getDefaultGradientBias(currentGradientOuts, j);
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
myGradient = ((GradientExtender)theGradientExtenders.get(i)).
getGradientBias(currentGradientOuts, j, myGradient);
}
}
return myGradient;
}
/**
* Gets the default (normal calculation of the) gradient for biases.
*
* @param currentGradientOuts the back protected gradients.
* @param j the index of the bias.
* @return the gradient for bias b_i.
*/
public double getDefaultGradientBias(double[] currentGradientOuts, int j) {
return currentGradientOuts[j];
}
/**
* Gets the gradient for weights.
*
* @param aCurrentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
*
* @return the gradient for the weight w_j_k
*/
public double getGradientWeight(double[] currentInps, int j, double[] currentPattern, int k) {
double myGradient = getDefaultGradientWeight(currentInps, j, currentPattern, k);
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
myGradient = ((GradientExtender)theGradientExtenders.get(i)).
getGradientWeight(currentInps, j, currentPattern, k, myGradient);
}
}
return myGradient;
}
/**
* Gets the default (normal calculation of the) gradient for weights.
*
* @param aCurrentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
*
* @return the gradient for the weight w_j_k
*/
public double getDefaultGradientWeight(double[] currentInps, int j, double[] currentPattern, int k) {
return currentInps[j] * currentPattern[k];
}
/**
* Gives learners and extenders a change to do some pre-computing before the
* biases are updated.
*
* @param currentGradientOuts the back propagated gradients.
*/
protected final void preBiasUpdate(double[] currentGradientOuts) {
preBiasUpdateImpl(currentGradientOuts);
// update weight extender...
if(theUpdateWeightExtender != null && theUpdateWeightExtender.isEnabled()) {
theUpdateWeightExtender.preBiasUpdate(currentGradientOuts);
}
// delta rule extenders...
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
preBiasUpdate(currentGradientOuts);
}
}
// gradient extenders...
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
((GradientExtender)theGradientExtenders.get(i)).
preBiasUpdate(currentGradientOuts);
}
}
}
/**
* Gives learners a change to do some pre-computing before the biases are
* updated.
*
* @param currentGradientOuts
*/
protected void preBiasUpdateImpl(double[] currentGradientOuts) {
}
/**
* Gives learners and extenders a change to do some pre-computing before the
* weights are updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
protected final void preWeightUpdate(double[] currentPattern, double[] currentInps) {
preWeightUpdateImpl(currentPattern, currentInps);
// update weight extender...
if(theUpdateWeightExtender != null && theUpdateWeightExtender.isEnabled()) {
theUpdateWeightExtender.preWeightUpdate(currentInps, currentPattern);
}
// delta rule extenders...
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
preWeightUpdate(currentInps, currentPattern);
}
}
// gradient extenders...
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
((GradientExtender)theGradientExtenders.get(i)).
preWeightUpdate(currentInps, currentPattern);
}
}
}
/**
* Gives learners a change to do some pre-computing before the weights are
* updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
protected void preWeightUpdateImpl(double[] currentPattern, double[] currentInps) {
}
/**
* Gives learners and extenders a change to do some post-computing after the
* biases are updated.
*
* @param currentGradientOuts the back propagated gradients.
*/
protected final void postBiasUpdate(double[] currentGradientOuts) {
// gradient extenders...
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
((GradientExtender)theGradientExtenders.get(i)).
postBiasUpdate(currentGradientOuts);
}
}
// delta rule extenders...
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
postBiasUpdate(currentGradientOuts);
}
}
// update weight extenders...
if(theUpdateWeightExtender != null && theUpdateWeightExtender.isEnabled()) {
theUpdateWeightExtender.postBiasUpdate(currentGradientOuts);
}
postBiasUpdateImpl(currentGradientOuts);
}
/**
* Gives learners a change to do some post-computing after the biases are
* updated.
*
* @param currentGradientOuts the back propagated gradients.
*/
protected void postBiasUpdateImpl(double[] currentGradientOuts) {
}
/**
* Gives learners and extenders a change to do some post-computing after the
* weights are updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
protected final void postWeightUpdate(double[] currentPattern, double[] currentInps) {
// gradient extenders...
for(int i = 0; i < theGradientExtenders.size(); i++) {
if(((GradientExtender)theGradientExtenders.get(i)).isEnabled()) {
((GradientExtender)theGradientExtenders.get(i)).
postWeightUpdate(currentInps, currentPattern);
}
}
// delta extenders...
for(int i = 0; i < theDeltaRuleExtenders.size(); i++) {
if(((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).isEnabled()) {
((DeltaRuleExtender)theDeltaRuleExtenders.get(i)).
postWeightUpdate(currentInps, currentPattern);
}
}
// update weight extenders...
if(theUpdateWeightExtender != null && theUpdateWeightExtender.isEnabled()) {
theUpdateWeightExtender.postWeightUpdate(currentInps, currentPattern);
}
postWeightUpdateImpl(currentInps, currentInps);
}
/**
* Gives learners a change to do some post-computing after the weights are
* updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
protected void postWeightUpdateImpl(double[] currentPattern, double[] currentInps) {
}
/**
* Adds a delta extender.
*
* @param aDeltaRuleExtender the delta rule extender to add.
*/
public void addDeltaRuleExtender(DeltaRuleExtender aDeltaRuleExtender) {
// Note one needs to be careful to the order of the extenders,
// also note that basic and batch learner add a delta (momentum)
// extender in their constructor
theDeltaRuleExtenders.add(aDeltaRuleExtender);
aDeltaRuleExtender.setLearner(this);
}
/**
* Adds a gradient extender.
*
* @param aGradientExtender the gradient extender to add.
*/
public void addGradientExtender(GradientExtender aGradientExtender) {
theGradientExtenders.add(aGradientExtender);
aGradientExtender.setLearner(this);
}
/**
* Sets an update weight extender.
*
* @param anUpdateWeightExtender the update weight extender to set.
*/
public void setUpdateWeightExtender(UpdateWeightExtender anUpdateWeightExtender) {
theUpdateWeightExtender = anUpdateWeightExtender;
theUpdateWeightExtender.setLearner(this);
}
/**
* Gets the update weight extender.
*
* @return the update weight extender.
*/
public UpdateWeightExtender getUpdateWeightExtender() {
return theUpdateWeightExtender;
}
}
| Java |
/*
* CircularSpatialMap.java
*
* Created on 2003/6/13 11:34
*/
package org.joone.engine;
/**
* This class implements the SpatialMap interface providing a circular spatial map for use with the GaussianLayer and Kohonen Networks.
* The radius of the circle is equal to the initial Gaussian Size and is reduced if training is currently in process.
*/
public class CircularSpatialMap extends SpatialMap {
private static final long serialVersionUID = 442118480555350769L;
/** Creates a new instance of CircularSpatialMap */
public CircularSpatialMap() {
}
public void ApplyNeighborhoodFunction(double[] distances, double[] n_outs, boolean isLearning) {
double dFalloff=0;
double nbhRadius=1; // Neighbourhood radius
double nbhRadiusSq = 1;
double dist_to_node=0;
int current_output = 0;
// Extract the winning neuron from the distances passed in by the synapse/layer.
extractWinner(distances);
int winx = getWinnerX();
int winy = getWinnerY();
int winz = getWinnerZ();
nbhRadius = getCurrentGaussianSize(); // Get Current Neighbourhood Radius
nbhRadiusSq = nbhRadius * nbhRadius; // Neighbourhood Radius Squared.
// Loop through the map and set the neighborhood function (individual learning rate) of each neighborhood output.
for (int z=0;z<getMapDepth();z++){
for (int y=0; y<getMapHeight(); y++) {
for (int x=0; x<getMapWidth(); x++) {
dist_to_node = distanceBetween(winx,winy,winz,x,y,z);
if (dist_to_node <= nbhRadiusSq) {
dFalloff = getCircle2DDistanceFalloff(dist_to_node, nbhRadiusSq);
current_output = x+(y* getMapWidth())+(z*( getMapWidth()*getMapHeight()));
n_outs[current_output] = dFalloff;
}
else // Set to 0
{
current_output = x+(y* getMapWidth())+(z*( getMapWidth()*getMapHeight()));
n_outs[current_output] = 0;
}
}
}
}
}
/**
* Gets the fall off distance from the edge of the radius.
* @param distSq The square of the distance to the output/node being measured.
* @param radiusSq The square of the radius of the current circular spatial neighborhood.
* @return The fall off distance between the distSq and the radiusSq.
*/
private double getCircle2DDistanceFalloff(double distSq, double radiusSq) {
return Math.exp(-(distSq)/(2 * radiusSq));
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class MonitorBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.engine.Monitor.class , null ); // NOI18N//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_batchSize = 0;
private static final int PROPERTY_currentCicle = 1;
private static final int PROPERTY_globalError = 2;
private static final int PROPERTY_learning = 3;
private static final int PROPERTY_learningMode = 4;
private static final int PROPERTY_learningRate = 5;
private static final int PROPERTY_momentum = 6;
private static final int PROPERTY_preLearning = 7;
private static final int PROPERTY_singleThreadMode = 8;
private static final int PROPERTY_supervised = 9;
private static final int PROPERTY_totCicles = 10;
private static final int PROPERTY_trainingPatterns = 11;
private static final int PROPERTY_useRMSE = 12;
private static final int PROPERTY_validation = 13;
private static final int PROPERTY_validationPatterns = 14;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[15];
try {
properties[PROPERTY_batchSize] = new PropertyDescriptor ( "batchSize", org.joone.engine.Monitor.class, "getBatchSize", "setBatchSize" ); // NOI18N
properties[PROPERTY_currentCicle] = new PropertyDescriptor ( "currentCicle", org.joone.engine.Monitor.class, "getCurrentCicle", "setCurrentCicle" ); // NOI18N
properties[PROPERTY_currentCicle].setExpert ( true );
properties[PROPERTY_currentCicle].setHidden ( true );
properties[PROPERTY_globalError] = new PropertyDescriptor ( "globalError", org.joone.engine.Monitor.class, "getGlobalError", "setGlobalError" ); // NOI18N
properties[PROPERTY_globalError].setExpert ( true );
properties[PROPERTY_globalError].setHidden ( true );
properties[PROPERTY_learning] = new PropertyDescriptor ( "learning", org.joone.engine.Monitor.class, "isLearning", "setLearning" ); // NOI18N
properties[PROPERTY_learningMode] = new PropertyDescriptor ( "learningMode", org.joone.engine.Monitor.class, "getLearningMode", "setLearningMode" ); // NOI18N
properties[PROPERTY_learningRate] = new PropertyDescriptor ( "learningRate", org.joone.engine.Monitor.class, "getLearningRate", "setLearningRate" ); // NOI18N
properties[PROPERTY_momentum] = new PropertyDescriptor ( "momentum", org.joone.engine.Monitor.class, "getMomentum", "setMomentum" ); // NOI18N
properties[PROPERTY_preLearning] = new PropertyDescriptor ( "preLearning", org.joone.engine.Monitor.class, "getPreLearning", "setPreLearning" ); // NOI18N
properties[PROPERTY_preLearning].setDisplayName ( "pre-learning cycles" );
properties[PROPERTY_singleThreadMode] = new PropertyDescriptor ( "singleThreadMode", org.joone.engine.Monitor.class, "isSingleThreadMode", "setSingleThreadMode" ); // NOI18N
properties[PROPERTY_supervised] = new PropertyDescriptor ( "supervised", org.joone.engine.Monitor.class, "isSupervised", "setSupervised" ); // NOI18N
properties[PROPERTY_totCicles] = new PropertyDescriptor ( "totCicles", org.joone.engine.Monitor.class, "getTotCicles", "setTotCicles" ); // NOI18N
properties[PROPERTY_totCicles].setDisplayName ( "epochs" );
properties[PROPERTY_trainingPatterns] = new PropertyDescriptor ( "trainingPatterns", org.joone.engine.Monitor.class, "getTrainingPatterns", "setTrainingPatterns" ); // NOI18N
properties[PROPERTY_trainingPatterns].setDisplayName ( "training patterns" );
properties[PROPERTY_useRMSE] = new PropertyDescriptor ( "useRMSE", org.joone.engine.Monitor.class, "isUseRMSE", "setUseRMSE" ); // NOI18N
properties[PROPERTY_validation] = new PropertyDescriptor ( "validation", org.joone.engine.Monitor.class, "isValidation", "setValidation" ); // NOI18N
properties[PROPERTY_validationPatterns] = new PropertyDescriptor ( "validationPatterns", org.joone.engine.Monitor.class, "getValidationPatterns", "setValidationPatterns" ); // NOI18N
properties[PROPERTY_validationPatterns].setDisplayName ( "validation patterns" );
}
catch(IntrospectionException e) {
e.printStackTrace();
}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* FanInBasedWeightInitializer.java
*
* Created on December 6, 2004, 12:17 PM
*/
package org.joone.engine.weights;
import org.joone.engine.Matrix;
/**
* The weights are uniformly distributed (that is randomly) within the range <code>[LB/F_i, UB/F_i]</code>.
* <code>LB</code> and <code>UB</code> stand for <i>lower bound</i> and <i>upper bound</i>,
* which is a certain number. Here the bounds will be by default -2.4 and 2.4 as described
* in <i>Neural Networks - A Comprehensive Foundation, Haykin</i>, chapter 6.7 <i>Some Hints
* for Making the Back-Propagation Algorithm Perform Better</i>. <code>F_i</code> is the fan-in,
* i.e. the total number of inputs) of neuron i. There is also an option to use instead of
* <code>F_i</code> the square root of <code>F_i</code>, which is also used in some cases.
*
* @author Boris Jansen
*/
public class FanInBasedWeightInitializer implements WeightInitializer {
/** The lower bound. */
private double lowerBound = -2.4; // default
/** The upper bound. */
private double upperBound = 2.4; // default
/** Flag indicating if we should use the square root of the fan-in (<code>true</code>), or should
* be use the normal fan-in (<code>false</code>) to determine the interval to init the weights with. */
private boolean sqrtFanIn = false; // default
/**
* Creates a new instance of FanInBasedWeightInitializer. It uses it default values +/- 2.4
* for the bounds and the normal fan-in.
*
*/
public FanInBasedWeightInitializer() {
}
/**
* Creates a new instance of FanInBasedWeightInitializer.
*
* @param aBoundary the boundary to use to init the weights
* (<code>[-aBoundary/F_i, aBoundary/F_i]</code>, where <code>F_i</code> is
* the fan-in of neuron i.
*/
public FanInBasedWeightInitializer(double aBoundary) {
lowerBound = -aBoundary;
upperBound = aBoundary;
}
/**
* Creates a new instance of FanInBasedWeightInitializer
*
* @param aLowerBound the lower boundary to use divided by the fan-in of a neuron.
* @param anUpperBound the upper boundary to use divided by the fan-in of a neuron.
*/
public FanInBasedWeightInitializer(double aLowerBound, double anUpperBound) {
lowerBound = aLowerBound;
upperBound = anUpperBound;
}
public void initialize(Matrix aMatrix) {
// fan-in equals the rows of a matrix
for(int x = 0; x < aMatrix.getM_rows(); x++) {
for(int y = 0; y < aMatrix.getM_cols(); y++) {
if(aMatrix.enabled[x][y] && !aMatrix.fixed[x][y]) {
aMatrix.value[x][y] =
(lowerBound / (isSqrtFanIn() ? Math.sqrt((double)aMatrix.getM_rows()) : (double)aMatrix.getM_rows())) +
Math.random() *
((upperBound - lowerBound) / (isSqrtFanIn() ? Math.sqrt((double)aMatrix.getM_rows()) : (double)aMatrix.getM_rows()));
}
}
}
}
/**
* Sets the flag indicating the mode of the fan-in to use. If set to <code>true</code>
* the square root of the fan-in will be used, otherwise the normal fan-in will be used
* (default mode).
*
* @param aMode the mode to use, <code>true</code> for the square root of the fan-in,
* <code>false</code> for the normal fan-in.
*/
public void setSqrtFanIn(boolean aMode) {
sqrtFanIn = aMode;
}
/**
* Checks if the mode of the fan-in is the square root mode, i.e. the square root
* of the fan-in is used or if the normal mode, i.e. the normal fan-in is used.
*
* @return true if the square root of the fan-in is used, false otherwise.
*/
public boolean isSqrtFanIn() {
return sqrtFanIn;
}
/**
* Gets the lower bound.
*
* @return the lower bound.
*/
public double getLowerBound() {
return lowerBound;
}
/**
* Sets the lower bound.
*
* @param aLowerBound the new lower bound.
*/
public void setLowerBound(double aLowerBound) {
lowerBound = aLowerBound;
}
/**
* Gets the upper bound.
*
* @return the upper bound.
*/
public double getUpperBound() {
return upperBound;
}
/**
* Sets the upper bound.
*
* @param anUpperBound the new upper bound.
*/
public void setUpperBound(double anUpperBound) {
upperBound = anUpperBound;
}
}
| Java |
/*
* WeightInitializer.java
*
* Created on October 15, 2004, 3:20 PM
*/
package org.joone.engine.weights;
import org.joone.engine.Matrix;
/**
* This interface desribes the methods that needs to be implemented in order to create new
* weight (or bias) initializers. Weight initializers can be set by using the method
* {@link org.joone.engine.Matrix#setWeightInitializer(WeightInitializer).
*
* @author Boris Jansen
*/
public interface WeightInitializer extends java.io.Serializable {
/**
* Initializes weights (biases) represented by the matrix.
*
* @param aMatrix the weights (biases) to be initialized.
*/
public void initialize(Matrix aMatrix);
}
| Java |
/*
* RandomWeightInitializer.java
*
* Created on October 15, 2004, 3:30 PM
*/
package org.joone.engine.weights;
import org.joone.engine.Matrix;
import org.joone.log.*;
/**
* This class initializes weights (and biases) in a random way within a given domain.
*
* @author Boris Jansen
*/
public class RandomWeightInitializer implements WeightInitializer {
/** Logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(RandomWeightInitializer.class);
private static final long serialVersionUID = 1547731234507850525L;
/** The upper boundery of the domain to initialize the weights with. */
private double upperBound = 0;
/** The lower boundery of the domain to initialize the weights with. */
private double lowerBound = 0;
/**
* Creates a new instance of RandomWeightInitializer
*
* @param aBoundary the boundaries of the domain to initialize the weights with
* to <code>[-aBoundary, aBoundary]</code>.
*/
public RandomWeightInitializer(double aBoundary) {
if(aBoundary < 0) {
log.warn("Boundary smaller than zero. Domain set to [" + aBoundary + ", " + -aBoundary + "].");
aBoundary = Math.abs(aBoundary);
}
upperBound = aBoundary;
lowerBound = -aBoundary;
}
/**
* Creates a new instance of RandomWeightInitializer and set the domain to initialize
* the weights with to <code>[aLowerBound, anUpperBound]</code>.
*
* @param aLowerBound the lower boundary of the domain to initialize the weights with.
* @param anUpperBound the upper boundary of the domain to initialize the weights with.
* to <code>[-aBoundary, aBoundary]</code>.
*/
public RandomWeightInitializer(double aLowerBound, double anUpperBound) {
if(aLowerBound > anUpperBound) {
log.warn("Lower bound is larger than upper bound. Domain set to ["
+ anUpperBound + ", " + aLowerBound + "].");
upperBound = aLowerBound;
lowerBound = anUpperBound;
} else {
upperBound = anUpperBound;
lowerBound = aLowerBound;
}
}
/**
* Initializes the weights or biases within the domain <code>[lowerBound, upperBound]</code>.
*
* @param aMatrix the weights or biases to initialize.
*/
public void initialize(Matrix aMatrix) {
for(int x = 0; x < aMatrix.getM_rows(); x++) {
for(int y = 0; y < aMatrix.getM_cols(); y++) {
if(aMatrix.enabled[x][y] && !aMatrix.fixed[x][y]) {
aMatrix.value[x][y] = lowerBound + Math.random() * (upperBound - lowerBound);
}
}
}
}
/**
* Gets the lower bound.
*
* @return the lower bound.
*/
public double getLowerBound() {
return lowerBound;
}
/**
* Sets the lower bound.
*
* @param aLowerBound the new lower bound.
*/
public void setLowerBound(double aLowerBound) {
lowerBound = aLowerBound;
}
/**
* Gets the upper bound.
*
* @return the upper bound.
*/
public double getUpperBound() {
return upperBound;
}
/**
* Sets the upper bound.
*
* @param anUpperBound the new upper bound.
*/
public void setUpperBound(double anUpperBound) {
upperBound = anUpperBound;
}
}
| Java |
/*
* netStoppedEventNotifier.java
*
* Created on 31 gennaio 2003, 21.19
*/
package org.joone.engine;
/**
* Raises the netStopped event from within a separate Thread
* @author root
*/
public class NetStoppedEventNotifier extends AbstractEventNotifier {
/** Creates a new instance of netStoppedEventNotifier */
public NetStoppedEventNotifier(Monitor mon) {
super(mon);
}
/**
* Raises the netStopped event
*
*/
public void run() {
if (monitor != null)
monitor.fireNetStopped();
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class TanhLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( TanhLayer.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_bias = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_learner = 5;
private static final int PROPERTY_monitor = 6;
private static final int PROPERTY_outputLayer = 7;
private static final int PROPERTY_rows = 8;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[9];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", TanhLayer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", TanhLayer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", TanhLayer.class, "getBias", "setBias" );
properties[PROPERTY_bias].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", TanhLayer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", TanhLayer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", TanhLayer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", TanhLayer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", TanhLayer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", TanhLayer.class, "getRows", "setRows" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[0];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
public class FreudRuleFullSynapse extends FullSynapse {
private static final long serialVersionUID = 4391516546875376355L;
public FreudRuleFullSynapse() {
super();
}
protected void backward(double[] pattern) {
int x;
int y;
double s, fr;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
// Aggiustamento dei pesi
for (x = 0; x < m_rows; ++x) {
s = 0;
fr = 1 - ((m_rows - x - 1) / m_rows);
fr = fr * getLearningRate();
setLearningRate(fr);
for (y = 0; y < m_cols; ++y) {
s += pattern[y] * array.value[x][y];
}
bouts[x] = s;
}
myLearner.requestWeightUpdate(pattern, inps);
}
} | Java |
package org.joone.engine;
import org.joone.log.*;
/**
* Layer that applies the tangent hyperbolic transfer function
* to its input patterns
*/
public class TanhLayer extends SimpleLayer implements LearnableLayer {
private static final long serialVersionUID = -2073914754873517298L;
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger (TanhLayer.class);
/** Constant to overcome the "flat spot" problem. This problem is described in:
* S.E. Fahlman, "An emperical study of learning speed in backpropagation with
* good scaling properties," Dept. Comput. Sci. Carnegie Mellon Univ., Pittsburgh,
* PA, Tech. Rep., CMU-CS-88-162, 1988.
* Setting this constant to 0 (default value), the derivative of the sigmoid function
* is unchanged (normal function). An good value for this constant might be 0.1.
*/
private double flatSpotConstant = 0.0;
/**
* default constructor
* */
public TanhLayer() {
super();
learnable = true;
}
public TanhLayer(java.lang.String name) {
this();
this.setLayerName(name);
}
/**
*
* @see SimpleLayer#backward (double[])
* */
public void backward(double[] pattern) {
super.backward(pattern);
double dw, absv;
int x;
int n = getRows();
for (x = 0; x < n; ++x) {
gradientOuts[x] = pattern[x] * ((1 + outs[x]) * (1 - outs[x]) + getFlatSpotConstant());
}
myLearner.requestBiasUpdate(gradientOuts);
}
/**
* @see SimpleLayer#forward (double[])
* */
public void forward(double[] pattern) {
double nExp, pExp;
int x;
int n = getRows();
for (x=0; x < n; ++x) {
//fast-forward :) A Tanh computation that only needs to call the expensive Math.exp once, saves a little time.
outs[x] = -1 + (2/ (1+Math.exp(-2* (pattern[x]+bias.value[x][0]) ) ) );
}
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = true;
return super.getLearner();
}
/**
* Sets the constant to overcome the flat spot problem.
* This problem is described in:
* S.E. Fahlman, "An emperical study of learning speed in backpropagation with
* good scaling properties," Dept. Comput. Sci. Carnegie Mellon Univ., Pittsburgh,
* PA, Tech. Rep., CMU-CS-88-162, 1988.
* Setting this constant to 0 (default value), the derivative of the sigmoid function
* is unchanged (normal function). An good value for this constant might be 0.1.
*
* @param aConstant
*/
public void setFlatSpotConstant(double aConstant) {
flatSpotConstant = aConstant;
}
/**
* Gets the flat spot constant.
*
* @return the flat spot constant.
*/
public double getFlatSpotConstant() {
return flatSpotConstant;
}
} | Java |
/*
* SoftmaxLayer.java
*
* Created on 11 January 2006, 22.19
*
*/
package org.joone.engine;
/**
* The outputs of the Softmax layer must be interpreted as probabilities.
* The output of each node, in fact, ranges from 0 and 1, and
* the sum of all the nodes is always 1.
* Useful to implement the 1 of C classification network.
*
* @author P.Marrone
*/
public class SoftmaxLayer extends LinearLayer {
private static final long serialVersionUID = 2243109263560495355L;
/** Creates a new instance of SoftmaxLayer */
public SoftmaxLayer() {
super();
}
public void forward(double[] pattern) {
int x;
int n = getRows();
double sum = 0;
for (x = 0; x < n; ++x) {
outs[x] = Math.exp(getBeta() * pattern[x]);
sum += outs[x];
}
for (x = 0; x < n; ++x) {
outs[x] = outs[x] / sum;
}
}
}
| Java |
package org.joone.engine;
import org.joone.log.*;
import org.joone.engine.learning.*;
import org.joone.exception.*;
import org.joone.inspection.*;
import org.joone.inspection.implementations.*;
import org.joone.io.*;
import org.joone.net.*;
import org.joone.util.*;
import java.io.*;
import java.util.*;
/**
* The Layer object is the basic element forming the neural net.
* Primarily it consists of a number of neurons that apply a transfer
* function to the sum of a number of input patterns and convey the result
* to the output pattern. The input patterns are received from connected
* input listeners and the transformed results are passed to connected output
* listeners. The component also handles learning by accepting patterns of error
* gradients from output listeners, applying a reverse (inverse) transfer function
* and passing the result to the input listeners. Layers execute their own
* Threads to perform the perform the pattern conveyance, so that a network
* of Layers can operate in a multi-threaded manner. The execution and termination
* of the Thread is controlled by a Monitor object.
*/
public abstract class Layer implements NeuralLayer, Runnable, Serializable,
Inspectable, LearnableLayer {
/** Stop flag. If the step has this value, the execution thread terminates. */
public static final int STOP_FLAG = -1;
/** Serial version ID for this class */
private static final long serialVersionUID = -1572591602454639355L;
/** The name of the layer */
private String LayerName;
/** The number of neurons in the layer */
private int rows = 0;
/** Holds the bias of neurons of the layer */
protected Matrix bias;
/**
* The monitor of the layer.
* Contains all parameters needed to the learning phase
*/
protected Monitor monitor;
/** Not used but maintained for backward serialization compatability. */
protected int m_batch;
/** The Net's phase: false == recall; true == learning */
protected boolean learning;
/** Contains true if for the Layer must be used
* a Learner instead of a built-in learning algorithm.
* Set it in the constructor of any inherited class.
* Used by the getLearner method.
* @see getLearner
*/
protected boolean learnable = false;
/** Contains the list of input connected listeners (InputPatternListener) */
protected Vector inputPatternListeners = null;
/** Contains the list of output connected listeners (OutputPatternListener) */
protected Vector outputPatternListeners = null;
/** The execution Thread for this layer. */
private transient Thread myThread = null;
/** The monitor used to control read/write access to myThread */
private transient volatile Object myThreadMonitor;
/**
* Set of output values passed from this layer
* to connected OutputListeners durng the recall phase.
*/
protected transient double[] outs;
/**
* Set of input values passed to this layer
* from connected InputListeners during the recall phase.
*/
protected transient double[] inps;
/**
* Set of input error gradient values passed to this layer
* from connected OutputListenrs during the learning phase.
*/
protected transient double[] gradientInps;
/**
* Set of output error gradient values passed from this layer
* to connected InputListenrs during the learning phase.
*/
protected transient double[] gradientOuts;
/** The step number of the network run. */
protected transient int step = 0;
/** Whether the layer is running */
protected transient volatile boolean running = false;
/** The Learner for this layer. */
protected transient Learner myLearner = null;
/** Logger for this class */
private static final ILogger log = LoggerFactory.getLogger(Layer.class);
/** The empty constructor */
public Layer() {
}
/**
* Creates a named layer
* @param ElemName The name of the layer
*/
public Layer(String ElemName) {
this.setLayerName(ElemName);
}
/**
* Adds a noise componentto the biases of the layer
* and to all the input connected synapses.
* @param amplitude the noise's amplitude in terms of distance from zero;
* e.g. a value equal 0.3 means a noise range from -0.3 to 0.3
*/
public void addNoise(double amplitude) {
InputPatternListener elem;
bias.addNoise(amplitude);
if (inputPatternListeners == null) {
return;
}
int currentSize = inputPatternListeners.size();
for (int index = 0; index < currentSize; index++) {
elem = (InputPatternListener) inputPatternListeners.elementAt(index);
if (elem != null) {
if (elem instanceof Synapse)
((Synapse) elem).addNoise(amplitude);
}
}
}
/**
* Initialize the weights of the biases and of all the connected synapses
* @param amplitude the amplitude of the applied noise
*/
public void randomize(double amplitude) {
InputPatternListener elem;
// bias.randomize(-1.0 * amplitude, amplitude);
bias.initialize();
if (inputPatternListeners == null) {
return;
}
int currentSize = inputPatternListeners.size();
for (int index = 0; index < currentSize; index++) {
elem = (InputPatternListener) inputPatternListeners.elementAt(index);
if (elem != null) {
if (elem instanceof Synapse)
((Synapse) elem).randomize(amplitude);
}
}
}
/**
* Reverse transfer function of the component.
* @param pattern input pattern on which to apply the transfer function
* @throws JooneRuntimeException
*/
protected abstract void backward(double[] pattern)
throws JooneRuntimeException;
/**
* Copies one layer into another, to obtain a type-transformation
* from one kind of Layer to another.
* The old Layer is disconnected from the net, and the new Layer
* takes its place.
* @param newLayer the new layer with which to replace this one
* @return The new layer
*/
public NeuralLayer copyInto(NeuralLayer newLayer) {
newLayer.setMonitor(getMonitor());
newLayer.setRows(getRows());
newLayer.setBias(getBias());
newLayer.setLayerName(getLayerName());
newLayer.setAllInputs((Vector) getAllInputs().clone());
newLayer.setAllOutputs((Vector) getAllOutputs().clone());
removeAllInputs();
removeAllOutputs();
return newLayer;
}
/**
* Calls all the fwdGet methods on the input synapses to get the input patterns
*/
protected void fireFwdGet() {
double[] patt;
Pattern tPatt;
InputPatternListener tempListener = null;
int currentSize = inputPatternListeners.size();
step = 0;
for (int index = 0; (index < currentSize) && running; index++) {
tempListener =
(InputPatternListener) inputPatternListeners.elementAt(index);
if (tempListener != null) {
tPatt = tempListener.fwdGet();
if (tPatt != null) {
patt = tPatt.getArray();
if (patt.length != inps.length) {
adjustSizeToFwdPattern(patt);
}
//Sum the received pattern into inps.
sumInput(patt);
if (step != STOP_FLAG)
/* In case of a recurrent network, the layer could receive
* patterns with different sequence numbers.
* The stored sequence number is the higher one. */
if ((step < tPatt.getCount())
|| (tPatt.getCount() == STOP_FLAG)) // The stop is guaranteed
step = tPatt.getCount();
}
}
}
}
/**
* Calls all the fwdPut methods on the output synapses to pass
* them the calculated patterns
* @param pattern the Pattern to pass to the output synapses
*/
protected void fireFwdPut(Pattern pattern) {
if (outputPatternListeners == null) {
return;
}
int currentSize = outputPatternListeners.size();
OutputPatternListener tempListener = null;
for (int index = 0; (index < currentSize) && running; index++) {
tempListener =
(OutputPatternListener) outputPatternListeners.elementAt(index);
if (tempListener != null) {
boolean loop = false;
if (tempListener instanceof Synapse)
loop = ((Synapse)tempListener).isLoopBack();
if ((currentSize == 1)
&& getMonitor().isLearningCicle(pattern.getCount())
&& !loop)
tempListener.fwdPut(pattern);
else
tempListener.fwdPut((Pattern) pattern.clone());
}
}
}
/**
* Calls all the revGet methods on the output synapses to get the error gradients
*/
protected void fireRevGet() {
if (outputPatternListeners == null) {
return;
}
double[] patt;
Pattern tPatt;
int currentSize = outputPatternListeners.size();
OutputPatternListener tempListener = null;
for (int index = 0; (index < currentSize) && running; index++) {
tempListener =
(OutputPatternListener) outputPatternListeners.elementAt(index);
if (tempListener != null) {
tPatt = tempListener.revGet();
if (tPatt != null) {
patt = tPatt.getArray();
if (patt.length != gradientInps.length) {
adjustSizeToRevPattern(patt);
}
//Sum the received error gradient pattern into outs.
sumBackInput(patt);
}
}
}
}
/**
* Calls all the revPut methods on the input synapses to get the input patterns
* and pass them the resulting calculated gradients
* @param pattern the Pattern to pass to the input listeners
*/
protected void fireRevPut(Pattern pattern) {
if (inputPatternListeners == null) {
return;
}
int currentSize = inputPatternListeners.size();
InputPatternListener tempListener = null;
for (int index = 0; (index < currentSize) && running; index++) {
tempListener =
(InputPatternListener) inputPatternListeners.elementAt(index);
if (tempListener != null) {
boolean loop = false;
if (tempListener instanceof Synapse)
loop = ((Synapse)tempListener).isLoopBack();
if ((currentSize == 1) && !loop)
tempListener.revPut(pattern);
else
tempListener.revPut((Pattern) pattern.clone());
}
}
}
/**
* Adjusts the size of a layer if the size of the forward pattern differs.
*
* @param aPattern the pattern holding a different size than the layer
* (dimension of neurons is not in accordance with the dimension of the
* pattern that is being forwarded).
*/
protected void adjustSizeToFwdPattern(double[] aPattern) {
// this function is included to give layers (e.g. Rbf layers) a
// change to take different actions (by overwriting this function)
// in case the pattern has a different size than the layer
int myOldSize = getRows();
setRows(aPattern.length);
log.warn("Pattern size mismatches #neurons. #neurons in layer '"
+ getLayerName() +"' adjusted [fwd pass, "
+ myOldSize + " -> " + getRows() + "].");
}
/**
* Adjusts the size of a layer if the size of the reverse pattern differs.
*
* @param aPattern the pattern holding a different size than the layer
* (dimension of neurons is not in accordance with the dimension of the
* pattern that is being reversed).
*/
protected void adjustSizeToRevPattern(double[] aPattern) {
// this function is included to give layers (e.g. Rbf layers) a
// change to take different actions (by overwriting this function)
// in case the pattern has a different size than the layer
int myOldSize = getRows();
setRows(aPattern.length);
log.warn("Pattern size mismatches #neurons. #neurons in layer '"
+ getLayerName() +"' adjusted [rev pass, "
+ myOldSize + " -> " + getRows() + "].");
}
/**
* Transfer function to recall a result on a trained net
* @param pattern input pattern to which to apply the rtransfer function
* @throws JooneRuntimeException
*/
// TO DO: Transform the JooneRuntimeException to JoonePropagationException
protected abstract void forward(double[] pattern)
throws JooneRuntimeException;
/**
* Returns the vector of the input listeners
* @return the connected input pattern listeners
*/
public Vector getAllInputs() {
return inputPatternListeners;
}
/**
* Returns the vector of the output listeners
* @return the connected output pattern listeners
*/
public Vector getAllOutputs() {
return outputPatternListeners;
}
/**
* Return the bias matrix
* @return the layer biases
*/
public Matrix getBias() {
return bias;
}
/**
* Returns the number of neurons contained in the layer
* @return the number of neurons in the layer.
*/
public int getDimension() {
return getRows();
}
/**
* Returns the name of the layer
* @return the name of the layer
*/
public String getLayerName() {
return LayerName;
}
/**
* Returns the monitor object
* @return the layer's Monitor object
*/
public Monitor getMonitor() {
return monitor;
}
/**
* Returns the dimension (# of neurons) of the Layer
* @return the number of neurons in the layer
*/
public int getRows() {
return rows;
}
/**
* Remove all the input listeners of the layer
*/
public void removeAllInputs() {
if (inputPatternListeners != null) {
Vector tempVect = (Vector) inputPatternListeners.clone();
for (int i = 0; i < tempVect.size(); ++i)
this.removeInputSynapse(
(InputPatternListener) tempVect.elementAt(i));
inputPatternListeners = null;
}
}
/**
* Remove all the output listeners of the layer
*/
public void removeAllOutputs() {
if (outputPatternListeners != null) {
Vector tempVect = (Vector) outputPatternListeners.clone();
for (int i = 0; i < tempVect.size(); ++i)
this.removeOutputSynapse(
(OutputPatternListener) tempVect.elementAt(i));
outputPatternListeners = null;
}
}
/**
* Remove an input Listener
* @param newListener the input listener to remove
*/
public void removeInputSynapse(InputPatternListener newListener) {
if (inputPatternListeners != null) {
inputPatternListeners.removeElement(newListener);
newListener.setInputFull(false);
if (newListener instanceof NeuralNetListener) {
removeListener((NeuralNetListener)newListener);
}
if (inputPatternListeners.size() == 0)
inputPatternListeners = null;
}
}
/**
* Remove an output listener from the layer
* @param newListener the output listener to remove
*/
public void removeOutputSynapse(OutputPatternListener newListener) {
if (outputPatternListeners != null) {
outputPatternListeners.removeElement(newListener);
newListener.setOutputFull(false);
if (newListener instanceof NeuralNetListener) {
removeListener((NeuralNetListener)newListener);
}
if (outputPatternListeners.size() == 0)
outputPatternListeners = null;
}
}
protected void removeListener(NeuralNetListener listener) {
if (getMonitor() != null)
getMonitor().removeNeuralNetListener(listener);
}
/**
* Gets the values lastly outputed by the neurons of this layer.
*
* @return the values lastly outputed.
*/
public double[] getLastOutputs() {
return (double[])outs.clone();
}
/**
* The core running engine of the layer.
* Called from the method <CODE>start()</CODE>
* @throws JooneRuntimeException
*/
public void run() throws JooneRuntimeException {
Pattern patt = new Pattern();
while (running) {
// Recall phase
inps = new double[getRows()];
try {
fireFwdGet();
if (running) {
forward(inps);
patt.setArray(outs);
patt.setCount(step);
fireFwdPut(patt);
}
if (step != STOP_FLAG)
if (monitor != null) {
// Gets if the next step is a learning step
learning = monitor.isLearningCicle(step);
} else
learning = false;
else
// Stops the layer
running = false;
} catch (JooneRuntimeException jre) {
String msg = "JooneException thrown in run() method."
+ jre.getMessage();
log.error(msg);
running = false;
new NetErrorManager(getMonitor(), msg);
}
// Learning phase
if (learning && running) {
gradientInps = new double[getDimension()];
try {
fireRevGet();
backward(gradientInps);
patt.setArray(gradientOuts);
patt.setOutArray(outs);
// Added for some unsupervised learning algorithm (See org.joone.engine.Pattern)
patt.setCount(step);
fireRevPut(patt);
} catch (JooneRuntimeException jre) {
String msg = "In run() JooneException thrown." + jre.getMessage();
log.error(msg);
running = false;
new NetErrorManager(getMonitor(), msg);
}
}
} // END while (running = false)
resetInputListeners();
synchronized(getThreadMonitor()) { myThread = null;}
}
/**
* Sets the Vector that contains all the input listeners.
* Can be useful to set the input synapses taken from another Layer
* @param newInputPatternListeners The vector containing the list of input synapses
*/
public synchronized void setAllInputs(Vector newInputPatternListeners) {
inputPatternListeners = newInputPatternListeners;
if (inputPatternListeners != null)
for (int i = 0; i < inputPatternListeners.size(); ++i)
this.setInputDimension(
(InputPatternListener) inputPatternListeners.elementAt(i));
notifyAll();
}
/**
* Sets the Vector that contains all the input listeners.
* It accepts an ArrayList as parameter. Added for Spring
* Can be useful to set the input synapses taken from another Layer
* @param newInputPatternListeners The vector containing the list of input synapses
*/
public void setInputSynapses(ArrayList newInputPatternListeners) {
this.setAllInputs(new Vector(newInputPatternListeners));
}
/**
* Sets the Vector that contains all the output listeners.
* Can be useful to set the output synapses taken from another Layer
* @param newOutputPatternListeners The vector containing the list of output synapses
*/
public void setAllOutputs(Vector newOutputPatternListeners) {
outputPatternListeners = newOutputPatternListeners;
if (outputPatternListeners != null)
for (int i = 0; i < outputPatternListeners.size(); ++i)
this.setOutputDimension(
(OutputPatternListener) outputPatternListeners.elementAt(i));
}
/**
* Sets the Vector that contains all the output listeners.
* It accepts an ArrayList as parameter. Added for Spring
* Can be useful to set the output synapses taken from another Layer
* @param newOutputPatternListeners The vector containing the list of output synapses
*/
public void setOutputSynapses(ArrayList newOutputPatternListeners) {
this.setAllOutputs(new Vector(newOutputPatternListeners));
}
/**
* Sets the matrix of biases
* @param newBias The Matrix object containing the biases
*/
public void setBias(Matrix newBias) {
bias = newBias;
}
/**
* Sets the dimension of the layer.
* Override to define how the internal buffers must be sized.
*/
protected abstract void setDimensions();
/**
* Sets the dimension of the listener passed as parameter.
* Called after a new input listener is added.
* @param syn the listener to be affected
*/
protected void setInputDimension(InputPatternListener syn) {
if (syn.getOutputDimension() != getRows())
syn.setOutputDimension(getRows());
}
/**
* Adds a new input synapse to the layer
* @param newListener The new input synapse to add
* @return whether the listener was added
*/
public synchronized boolean addInputSynapse(InputPatternListener newListener) {
if (inputPatternListeners == null) {
inputPatternListeners = new Vector();
}
boolean retValue = false;
if (!inputPatternListeners.contains(newListener))
if (!newListener.isInputFull()) {
inputPatternListeners.addElement(newListener);
if (newListener.getMonitor() == null)
newListener.setMonitor(getMonitor());
newListener.setInputFull(true);
this.setInputDimension(newListener);
retValue = true;
}
notifyAll();
return retValue;
}
/**
* Sets the name of the layer
* @param newLayerName The name
*/
public void setLayerName(String newLayerName) {
LayerName = newLayerName;
}
/**
* Sets the monitor object
* @param mon The Monitor
*/
public void setMonitor(Monitor mon) {
monitor = mon;
// Sets the Monitor object of all input and output synapses
setVectMonitor(inputPatternListeners, mon);
setVectMonitor(outputPatternListeners, mon);
}
/**
* Set the monitor object for all pattern listeners in a Vector
* @param vect the Vector of pattern listeners
* @param mon the Monitor to be set
*/
private void setVectMonitor(Vector vect, Monitor mon) {
if (vect != null) {
int currentSize = vect.size();
Object tempListener = null;
for (int index = 0; index < currentSize; index++) {
tempListener = vect.elementAt(index);
if (tempListener != null)
((NeuralElement) tempListener).setMonitor(mon);
}
}
}
/**
* Sets the dimension of the listener passed as parameter.
* Called after a new output listener is added.
* @param syn the OutputPatternListener to affect
*/
protected void setOutputDimension(OutputPatternListener syn) {
if (syn.getInputDimension() != getRows())
syn.setInputDimension(getRows());
}
/**
* Adds a new output synapse to the layer
* @param newListener The new output synapse
* @return whether the listener was added
*/
public boolean addOutputSynapse(OutputPatternListener newListener) {
if (outputPatternListeners == null)
outputPatternListeners = new Vector();
boolean retValue = false;
if (!outputPatternListeners.contains(newListener))
if (!newListener.isOutputFull()) {
outputPatternListeners.addElement(newListener);
newListener.setMonitor(getMonitor());
newListener.setOutputFull(true);
this.setOutputDimension(newListener);
retValue = true;
}
return retValue;
}
/**
* Sets the dimension (# of neurons) of the Layer
* @param newRows The number of the neurons contained in the Layer
*/
public void setRows(int newRows) {
if (rows != newRows) {
rows = newRows;
setDimensions();
setConnDimensions();
bias = new Matrix(getRows(), 1);
}
}
/**
* Starts the Layer
*/
public void start() {
synchronized(getThreadMonitor()) {
if (myThread == null) {
// Check if some input synapse is connected
if (inputPatternListeners != null) {
if (checkInputEnabled()) {
// If all the input synapses are disabled, the layer doesn't start
running = true;
if (getLayerName() != null)
myThread = new Thread(this, getLayerName());
else
myThread = new Thread(this);
this.init();
myThread.start();
} else {
String msg = "Can't start: '"
+ getLayerName()
+ "' has not input synapses connected and/or enabled";
log.error(msg);
throw new JooneRuntimeException(msg);
}
} else {
String msg = "Can't start: '"
+ getLayerName()
+ "' has not input synapses connected";
log.error(msg);
throw new JooneRuntimeException(msg);
}
}
}
}
public void init() {
this.initLearner();
// initialize all the output synapses
if (outputPatternListeners != null) {
Vector tempVect = (Vector) outputPatternListeners.clone();
for (int i = 0; i < tempVect.size(); ++i) {
if (tempVect.elementAt(i) instanceof NeuralElement)
((NeuralElement) tempVect.elementAt(i)).init();
}
}
}
/**
* Checks if at least one input synapse is enabled
* @return false if all the input synapses are disabled
*/
protected boolean checkInputEnabled() {
for (int i = 0; i < inputPatternListeners.size(); ++i) {
InputPatternListener iPatt =
(InputPatternListener) inputPatternListeners.elementAt(i);
if (iPatt.isEnabled())
return true;
}
return false;
}
/**
* Stops the Layer
*/
public void stop() {
synchronized(getThreadMonitor()) {
if (myThread != null) {
running = false;
myThread.interrupt();
}
}
}
/**
* Reset all the input listeners
*/
protected void resetInputListeners() {
int currentSize = inputPatternListeners.size();
for (int index = 0; index < currentSize; index++) {
InputPatternListener tempListener =
(InputPatternListener) inputPatternListeners.elementAt(index);
if (tempListener != null)
tempListener.reset();
}
}
/**
* Calculates the net input of the error gradents during the learning phase
* @param pattern array of input values
*/
protected void sumBackInput(double[] pattern) {
int x = 0;
try {
for (; x < gradientInps.length; ++x)
gradientInps[x] += pattern[x];
} catch (IndexOutOfBoundsException iobe) {
log.warn(
getLayerName()
+ " gradInps.size:"
+ gradientInps.length
+ " pattern.size:"
+ pattern.length
+ " x:"
+ x);
}
}
/**
* Calculates the net input of the values in the recall phase
* @param pattern array of input values
*/
protected void sumInput(double[] pattern) {
for (int x = 0; x < inps.length; ++x) {
inps[x] += pattern[x];
}
}
/**
* Read in a serialised version of this layer
* @param in the serialised stream
* @throws IOException
* @throws ClassNotFoundException
*/
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
if (in.getClass().getName().indexOf("xstream") != -1) {
in.defaultReadObject();
} else {
LayerName = (String) in.readObject();
rows = in.readInt();
bias = (Matrix) in.readObject();
monitor = (Monitor) in.readObject();
m_batch = in.readInt();
learning = in.readBoolean();
inputPatternListeners = readVector(in);
outputPatternListeners = readVector(in);
}
setDimensions();
}
/**
* Write a serialized version of this layer
* @param out the output stream to write this layer to
* @throws IOException
*/
private void writeObject(ObjectOutputStream out) throws IOException {
if (out.getClass().getName().indexOf("xstream") != -1) {
out.defaultWriteObject();
} else {
out.writeObject(LayerName);
out.writeInt(rows);
out.writeObject(bias);
out.writeObject(monitor);
out.writeInt(m_batch);
out.writeBoolean(learning);
writeVector(out, inputPatternListeners);
writeVector(out, outputPatternListeners);
}
}
/**
* This method is useful to serialize only the vector's
* elements that don't implement the Serialize interface,
* only when the Monitor.isExporting returns the value TRUE.
* @param out the output stream to write to
* @param vect the Vector to serialize
* @throws IOException
*/
private void writeVector(ObjectOutputStream out, Vector vect)
throws IOException {
if (vect != null) {
boolean exporting = false;
if ((monitor != null) && (monitor.isExporting()))
exporting = true;
for (int i = 0; i < vect.size(); ++i) {
Object obj = vect.elementAt(i);
if (!(obj instanceof NotSerialize) || !(exporting))
out.writeObject(obj);
}
}
out.writeObject(null);
}
/**
* Create a Vector from a serialized version
* @param in the input stream serialized version
* @return the deserialized Vector
* @throws IOException
* @throws ClassNotFoundException
*/
private Vector readVector(ObjectInputStream in)
throws IOException, ClassNotFoundException {
Vector vect = new Vector();
Object obj = in.readObject();
while (obj != null) {
vect.addElement(obj);
obj = in.readObject();
}
return vect;
}
/**
* Sets the input and output synapses' dimensions
*/
protected void setConnDimensions() {
if (inputPatternListeners != null) {
int currentSize = inputPatternListeners.size();
InputPatternListener tempListener = null;
for (int index = 0; index < currentSize; index++) {
tempListener =
(InputPatternListener) inputPatternListeners.elementAt(index);
if (tempListener != null) {
setInputDimension(tempListener);
}
}
}
if (outputPatternListeners != null) {
int currentSize = outputPatternListeners.size();
OutputPatternListener tempListener = null;
for (int index = 0; index < currentSize; index++) {
tempListener =
(OutputPatternListener) outputPatternListeners.elementAt(index);
if (tempListener != null) {
setOutputDimension(tempListener);
}
}
}
}
/**
* Determine whether the execution thread is running
* @return whether it is running
*/
public boolean isRunning() {
synchronized(getThreadMonitor()) {
if (myThread != null && myThread.isAlive()) {
return true;
}
return false;
}
}
/**
* Get check messages from listeners.
* Subclasses should call this method from thier own check method.
*
* @see NeuralLayer
* @return validation errors.
*/
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
// All layers must have at least one input patern listener.
// The absense of an output patern listener is acceptable.
if ((inputPatternListeners == null) || (inputPatternListeners.size() == 0)) {
checks.add(new NetCheck(NetCheck.FATAL,
"Layer has no input synapses attached.",
this));
}
// Get the input patern listener check messages;
if (inputPatternListeners != null) {
for (int i = 0; i < inputPatternListeners.size(); i++) {
InputPatternListener listener =
(InputPatternListener) inputPatternListeners.elementAt(i);
checks.addAll(listener.check());
if (listener instanceof StreamInputSynapse) {
StreamInputSynapse sis = (StreamInputSynapse) listener;
int cols = sis.numColumns();
if (cols != rows) {
checks.add(new NetCheck(NetCheck.FATAL,
"Rows parameter does not match the number of columns for the attached input stream .",
this));
}
}
}
}
// Get the input patern listener check messages;
if (outputPatternListeners != null) {
for (int i = 0; i < outputPatternListeners.size(); i++) {
OutputPatternListener listener =
(OutputPatternListener) outputPatternListeners.elementAt(i);
checks.addAll(listener.check());
}
}
// Return check messages
return checks;
}
/**
* Produce a String representation of this layer
* @see Object#toString()
* @return string representation of the layer
*/
public String toString() {
return getLayerName();
// StringBuffer buf = new StringBuffer();
// buf.append("Name : ")
// .append(LayerName)
// .append(", rows : ")
// .append(rows)
// .append(", Bias : ")
// .append(bias)
// .append(", Monitor : ")
// .append(monitor);
//
// return buf.toString();
}
/**
* Method to help remove disused references quickly
* when the layer goes out of scope.
* @see Object#finalize()
* @throws Throwable
*/
public void finalize() throws Throwable {
super.finalize();
LayerName = null;
bias = null;
monitor = null;
if(inputPatternListeners != null) {
inputPatternListeners.clear();
inputPatternListeners = null;
}
if(outputPatternListeners != null) {
outputPatternListeners.clear();
outputPatternListeners = null;
}
}
/**
* Method to get a collection of bias inspections for this layer
* @return
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(bias));
return col;
}
/**
* Get the title for the inspectable interface
* @return
*/
public String InspectableTitle() {
return getLayerName();
}
/**
* Determine whether this layer has an input synapse attached
* that is a step counter.
* @return whether it is a step counter.
*/
public boolean hasStepCounter() {
Vector inps = getAllInputs();
if (inps == null)
return false;
for (int x = 0; x < inps.size(); ++x) {
if (inps.elementAt(x) instanceof InputSynapse) {
InputSynapse inp = (InputSynapse) inps.elementAt(x);
if (inp.isStepCounter())
return true;
}
}
return false;
}
/**
* Determine whether this is an input layer.
* @return whether this is an input layer
*/
public boolean isInputLayer() {
Vector inputListeners = getAllInputs();
return checkInputs(inputListeners);
}
/**
* Determine whether ther are any stream input synapses attached.
* @param inputListeners Vector to check.
* @return whether there are any attached StreamInputSynapses
*/
protected boolean checkInputs(Vector inputListeners) {
if (inputListeners == null || inputListeners.size() == 0) {
return true;
}
for (int x = 0; x < inputListeners.size(); ++x) {
if (inputListeners.elementAt(x) instanceof StreamInputSynapse) {
return true;
}
}
return false;
}
/**
* Determine whether this is an output layer.
* @return whether this is an output layer
*/
public boolean isOutputLayer() {
Vector outputVectors = getAllOutputs();
return checkOutputs(outputVectors);
}
/**
* Determine whether ther are any stream output or teach synapses attached.
* Also checks the attached listeners of OutputSwitchSynapses.
* Also checks for loopback condition.
* All connected synapses must be of this type.
* @param outputListeners Vector to check.
* @return whether there are any attached StreamOutputSynapses
*/
protected boolean checkOutputs(Vector outputListeners) {
boolean lastListener = false;
if (outputListeners == null || outputListeners.size() == 0) {
return true;
}
for (int x = 0; x < outputListeners.size(); ++x) {
if ((outputListeners.elementAt(x) instanceof StreamOutputSynapse)
|| (outputListeners.elementAt(x) instanceof TeachingSynapse)
|| (outputListeners.elementAt(x) instanceof TeacherSynapse))
lastListener = true;
else if (outputListeners.elementAt(x) instanceof OutputSwitchSynapse) {
OutputSwitchSynapse os = (OutputSwitchSynapse) outputListeners.elementAt(x);
if (checkOutputs(os.getAllOutputs()))
lastListener = true;
else
return false;
} else if (outputListeners.elementAt(x) instanceof Synapse) {
Synapse syn = (Synapse) outputListeners.elementAt(x);
if (syn.isLoopBack())
lastListener = true;
else
return false;
}
}
return lastListener;
}
/** Returns the appropriate Learner object for this class
* depending on the Monitor.learningMode property value
* @return the Learner object if applicable, otherwise null
* @see org.joone.engine.Learnable#getLearner()
*/
public Learner getLearner() {
if (!learnable) {
return null;
}
return getMonitor().getLearner();
}
/**
* Initialize the Learner object of this layer
* @see org.joone.engine.Learnable#initLearner()
*/
public void initLearner() {
myLearner = getLearner();
if(myLearner != null) {
myLearner.registerLearnable(this);
}
}
/**
* Getter for property myThreadMonitor.
* @return Value of property myThreadMonitor.
*/
protected Object getThreadMonitor() {
if (myThreadMonitor == null)
myThreadMonitor = new Object();
return myThreadMonitor;
}
/** Waits for the current layer's thread to stop
*/
public void join() {
try {
if (myThread != null)
myThread.join();
} catch (InterruptedException doNothing) { } catch (NullPointerException doNothing) {
/* As we cannot synchronize this method, we could get
* a NullPointerException on calling myThread.join()
*/
}
}
/*********************************************************
* Implementation code for the single-thread version of Joone
* /*********************************************************
*
* /**
* This method serves to a single forward step
* when the Layer is called from an external thread
*/
public void fwdRun(Pattern pattIn) {
Pattern patt = new Pattern();
inps = new double[getRows()];
running = true;
if (pattIn == null) {
fireFwdGet();
} else {
inps = pattIn.getArray();
}
if (running) {
forward(inps);
patt.setArray(outs);
if ((pattIn == null) || (pattIn.getCount() != -1)) {
patt.setCount(step);
} else {
patt.setCount(-1);
}
fireFwdPut(patt);
}
running = false;
}
/**
* This method serves to a single backward step
* when the Layer is called from an external thread
*/
public void revRun(Pattern pattIn) {
Pattern patt = new Pattern();
gradientInps = new double[getDimension()];
running = true;
if (pattIn == null) {
fireRevGet();
} else {
gradientInps = pattIn.getArray();
}
if (running) {
backward(gradientInps);
patt.setArray(gradientOuts);
patt.setOutArray(outs);
patt.setCount(step);
fireRevPut(patt);
}
running = false;
}
}
| Java |
/*
* LogarithmicLayer.java
*
* Created on 1 settembre 2002, 21.19
*/
package org.joone.engine;
import org.joone.log.*;
/**
* This layer implements a logarithmic transfer function.
* Used in some NN to avoid to saturate the inputs.
* @author P.Marrone
*/
public class LogarithmicLayer extends SimpleLayer implements LearnableLayer {
/**
* Logger definition
* */
private static final ILogger log = LoggerFactory.getLogger(LogarithmicLayer.class);
private static final long serialVersionUID = -4983197905588348060L;
/** Creates a new instance of LogarithmicLayer */
public LogarithmicLayer() {
super();
learnable = true;
}
public LogarithmicLayer(String elemName) {
this();
this.setLayerName(elemName);
}
/** Transfer function to recall a result on a trained net
* @param pattern double[] - input pattern
*/
protected void forward(double[] pattern) {
double myNeuronInput;
int n = getRows();
for (int x=0; x < n; ++x) {
myNeuronInput = pattern[x] + getBias().value[x][0];
if (myNeuronInput >= 0)
outs[x] = Math.log(1 + myNeuronInput);
else
outs[x] = -Math.log(1 - myNeuronInput);
}
}
/** Reverse transfer function of the component.
* @param pattern double[] - input pattern on wich to apply the transfer function
*/
protected void backward(double[] pattern) {
double dw, absv;
super.backward(pattern);
int n = getRows();
double deriv;
for (int x = 0; x < n; ++x) {
if (outs[x] >= 0)
deriv = 1 / (1 + outs[x]);
else
deriv = 1 / (1 - outs[x]);
gradientOuts[x] = pattern[x] * deriv;
}
myLearner.requestBiasUpdate(gradientOuts);
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = true;
return super.getLearner();
}
}
| Java |
package org.joone.engine;
public class FullSynapse extends Synapse implements LearnableSynapse
{
private static final long serialVersionUID = 5518898101307425554L;
public FullSynapse() {
super();
learnable = true;
}
protected void backward(double[] pattern) {
int x;
int y;
double s;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
setLearningRate(getMonitor().getLearningRate());
// Weights adjustement
for (x=0; x < m_rows; ++x) {
s = 0;
for (y=0; y < m_cols; ++y) {
s += pattern[y] * array.value[x][y];
}
bouts[x] = s;
}
myLearner.requestWeightUpdate(pattern, inps);
}
protected void forward(double[] pattern) {
int x;
int y;
double s;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
for (y=0; y < m_cols; ++y) {
s = 0;
for (x=0; x < m_rows; ++x) {
s += pattern[x] * array.value[x][y];
}
outs[y] = s;
}
}
/**
* setArrays method comment.
*/
protected void setArrays(int rows, int cols) {
inps = new double[rows];
outs = new double[cols];
bouts = new double[rows];
}
protected void setDimensions(int rows, int cols) {
int icols, irows;
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
if (rows == -1)
irows = m_rows;
else
irows = rows;
if (cols == -1)
icols = m_cols;
else
icols = cols;
array = new Matrix(irows, icols);
setArrays(irows, icols);
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = true;
return super.getLearner();
}
} | Java |
/*
* RbfLayer.java
*
* Created on July 21, 2004, 3:32 PM
*/
package org.joone.engine;
/**
* This is the basis (helper) for radial basis function layers.
*
* @author Boris Jansen
*/
public abstract class RbfLayer extends Layer {
/** Creates a new instance of RbfLayer */
public RbfLayer() {
super();
}
/**
* Creates a new instance of RbfLayer
*
* @param anElemName The name of the Layer
*/
public RbfLayer(String anElemName) {
super(anElemName);
}
protected void setDimensions() {
// cannot set inps and gradientOuts, unrelated to the number of neurons
outs = new double[getRows()];
gradientInps = new double[getRows()];
}
/**
* Adjusts the size of a layer if the size of the forward pattern differs.
*
* @param aPattern the pattern holding a different size than the layer
* (dimension of neurons is not in accordance with the dimension of the
* pattern that is being forwarded).
*/
protected void adjustSizeToFwdPattern(double[] aPattern) {
// In case of a RBF layer the size of a pattern might differ
// from the size of (number of neurons in) the layer. So we
// don't adjust the size of the layer (as is done usually, see Layer),
// but we just adjust the pattern size
inps = new double[aPattern.length];
}
}
| Java |
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.inspection.implementations.BiasInspection;
import org.joone.log.*;
/** The output of a linear layer neuron is the sum of the weighted input values,
* scaled by the beta parameter. No transfer function is applied to limit the output value
*/
public class LinearLayer extends SimpleLayer {
private double beta = 1;
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger (LinearLayer.class);
private static final long serialVersionUID = 2243109263560495304L;
/** The constructor
*/
public LinearLayer() {
super();
}
/** The constructor
* @param ElemName The name of the Layer
*/
public LinearLayer(String ElemName) {
super(ElemName);
}
public void backward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x)
gradientOuts[x] = pattern[x] * beta;
}
public void forward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x)
outs[x] = beta * pattern[x]; // + bias.value[x][0];
}
/** Returns the value of the beta parameter
* @return double - The beta parameter
*/
public double getBeta() {
return beta;
}
/** Sets the beta value
* @param newBeta double
*/
public void setBeta(double newBeta) {
beta = newBeta;
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
} | Java |
package org.joone.engine;
import org.joone.exception.JooneRuntimeException;
import org.joone.log.*;
/** The output of a sigmoid layer neuron is the sum of the weighted input values,
* applied to a sigmoid function. This function is expressed mathematically as:
* y = 1 / (1 + e^-x)
* This has the effect of smoothly limiting the output within the range 0 and 1
*
* @see SimpleLayer parent
* @see Layer parent
* @see NeuralLayer implemented interface
*/
public class SigmoidLayer extends SimpleLayer implements LearnableLayer {
private static final ILogger log = LoggerFactory.getLogger(SigmoidLayer.class);
private static final long serialVersionUID = -8700747963164046048L;
/** Constant to overcome the "flat spot" problem. This problem is described in:
* S.E. Fahlman, "An emperical study of learning speed in backpropagation with
* good scaling properties," Dept. Comput. Sci. Carnegie Mellon Univ., Pittsburgh,
* PA, Tech. Rep., CMU-CS-88-162, 1988.
* Setting this constant to 0 (default value), the derivative of the sigmoid function
* is unchanged (normal function). An good value for this constant might be 0.1.
*/
private double flatSpotConstant = 0.0;
/** The constructor
*/
public SigmoidLayer() {
super();
learnable = true;
}
/** The constructor
* @param ElemName The name of the Layer
*/
public SigmoidLayer(java.lang.String ElemName) {
this();
this.setLayerName(ElemName);
}
public void backward(double[] pattern)
throws JooneRuntimeException {
super.backward(pattern);
double dw, absv;
int x;
int n = getRows();
for (x = 0; x < n; ++x) {
gradientOuts[x] = pattern[x] * (outs[x] * (1 - outs[x]) + getFlatSpotConstant());
}
myLearner.requestBiasUpdate(gradientOuts);
}
/**
* This method accepts an array of values in input and forwards it
* according to the Sigmoid propagation pattern.
*
* @param pattern
* @see NeuralLayer#forward (double[])
* @throws JooneRuntimeException This <code>Exception </code> is a wrapper Exception when an Exception is thrown
* while doing the maths.
* */
public void forward(double[] pattern)
throws JooneRuntimeException {
int x = 0;
double in;
int n = getRows();
try {
for ( x = 0; x < n; ++x) {
in = pattern[x] + bias.value[x][0];
outs[x] = 1 / (1 + Math.exp(-in));
}
}catch (Exception aioobe) {
String msg;
log.error( msg = "Exception thrown while processing the element " + x + " of the array. Value is : " + pattern[x]
+ " Exception thrown is " + aioobe.getClass().getName() + ". Message is " + aioobe.getMessage() );
throw new JooneRuntimeException(msg, aioobe);
//aioobe.printStackTrace();
}
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = true;
return super.getLearner();
}
/**
* Sets the constant to overcome the flat spot problem.
* This problem is described in:
* S.E. Fahlman, "An emperical study of learning speed in backpropagation with
* good scaling properties," Dept. Comput. Sci. Carnegie Mellon Univ., Pittsburgh,
* PA, Tech. Rep., CMU-CS-88-162, 1988.
* Setting this constant to 0 (default value), the derivative of the sigmoid function
* is unchanged (normal function). An good value for this constant might be 0.1.
*
* @param aConstant
*/
public void setFlatSpotConstant(double aConstant) {
flatSpotConstant = aConstant;
}
/**
* Gets the flat spot constant.
*
* @return the flat spot constant.
*/
public double getFlatSpotConstant() {
return flatSpotConstant;
}
} | Java |
package org.joone.engine;
import org.joone.net.NeuralNet;
/**
* Transport class used to notify the events raised from a neural network
*/
public class NeuralNetEvent extends java.util.EventObject {
private static final long serialVersionUID = -2307998901508765401L;
private NeuralNet nnet;
/**
* The event constructor
* @param source The object generating this event. Normally it is the neural network's Monitor
*/
public NeuralNetEvent(Monitor source) {
super(source);
}
/**
* Getter for the NeuralNet generating this event.
* Warning: Use this method ONLY if the event has been raised by
* an org.joone.helpers class, otherwise you could get a null value.
* @return The neural network generating this event
* @since 1.2.2
*/
public NeuralNet getNeuralNet() {
return nnet;
}
/**
* Setter for the NeuralNet generating this event.
* @param nnet The neural network generating this event
* @since 1.2.2
*/
public void setNeuralNet(NeuralNet nnet) {
this.nnet = nnet;
}
} | Java |
/*
* RbfInputSynapse.java
*
* Created on July 21, 2004, 3:58 PM
*/
package org.joone.engine;
/**
* The synapse to the input of a radial basis function layer should't provide a
* single value to every neuron in the output (RBF) layer, as is usual the case.
* It should provide the outputs of all the input neurons as a vector to every
* neuron in the radial basis function layer.
*
* @author Boris Jansen
*/
public class RbfInputSynapse extends Synapse {
/** Creates a new instance of RbfInputSynapse */
public RbfInputSynapse() {
}
protected void backward(double[] pattern) {
// we don't come here... revGet() returns null
// see command revGet()
}
/**
public Pattern revGet() {
// The correct way is to overwrite revGet() to return null,
// because this synapse does not perform back propagation,
// however, there exist somewhere a bug. The patterns (input
// and desired get out of sink, so for a temporary solutions
// we don't overwrite revGet and left backward() empty
return null;
}
*/
protected void forward(double[] pattern) {
// We output the input vector. The RBF layer should process
// this input vector for each neuron.
outs = pattern;
}
protected void setArrays(int rows, int cols) {
inps = new double[rows];
outs = new double[rows];
bouts = new double[rows];
}
protected void setDimensions(int rows, int cols) {
if (rows == -1) {
rows = getInputDimension();
}
if(cols == -1) {
cols = getOutputDimension();
}
setArrays(rows, cols);
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class SimpleLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( SimpleLayer.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_bias = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_learner = 5;
private static final int PROPERTY_monitor = 6;
private static final int PROPERTY_outputLayer = 7;
private static final int PROPERTY_rows = 8;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[9];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", SimpleLayer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", SimpleLayer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", SimpleLayer.class, "getBias", "setBias" );
properties[PROPERTY_bias].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", SimpleLayer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", SimpleLayer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_layerName].setDisplayName ( "Name" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", SimpleLayer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", SimpleLayer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", SimpleLayer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", SimpleLayer.class, "getRows", "setRows" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[0];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
/**
* The pattern object contains the data that must be processed from a neural net.
* It can contain the input data or the training data.
*/
public class Pattern implements java.lang.Cloneable, java.io.Serializable
{
private int count;
private double[] array;
/* outArray: used for some unsupervised learning algorithm, where the
* receiving synapse needs to know the activation of the output
* connected layer.
* Added on Aug 22, 2002 by P.Marrone
*/
private double[] outArray;
private static final long serialVersionUID = -609786590797838450L;
/**
* Default Constructor
* Added for Save As XML
*/
public Pattern() {
super();
}
public Pattern(double[] arr)
{
super();
array = arr;
}
public synchronized Object clone()
{
Pattern cPat = new Pattern((double[])this.array.clone());
if (outArray != null)
cPat.setOutArray((double[])this.outArray.clone());
cPat.count = this.count;
return cPat;
}
public synchronized double[] getArray()
{
// return (double[])array.clone();
return array;
}
public synchronized int getCount()
{
return count;
}
public synchronized void setArray(double[] arr)
{
array = (double[])arr.clone();
}
public synchronized void setCount(int n)
{
count = n;
}
public void setValue(int point, double value) {
array[point] = value;
}
/** Getter for property outArray.
* @return Value of property outArray.
*/
public double[] getOutArray() {
return (double[])this.outArray.clone();
}
/** Setter for property outArray.
* @param outArray New value of property outArray.
*/
public void setOutArray(double[] outArray) {
this.outArray = (double[])outArray.clone();
}
/**
* Getter for property values.
* Added for XML serialization
* @return Value of property values.
*/
public double[] getValues() {
return this.array;
}
/**
* Setter for property values.
* Added for XML serialization
* @param values New value of property values.
*/
public void setValues(double[] values) {
this.array = values;
}
} | Java |
/*
* ContextLayer.java
*
* Created on 20 settembre 2002, 14.59
*/
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.inspection.implementations.BiasInspection;
/**
* The context layer is similar to the linear layer except that
* it has an auto-recurrent connection between its output and input.
*
* @author P.Marrone
*/
public class ContextLayer extends SimpleLayer {
private double beta = 1;
private double timeConstant = 0.5;
private static final long serialVersionUID = -8773800970295287404L;
public ContextLayer() {
super();
}
public ContextLayer(java.lang.String name) {
super(name);
}
public void backward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x)
gradientOuts[x] = pattern[x] * beta;
}
public void forward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x)
outs[x] = beta * (pattern[x] + (timeConstant * outs[x]));
}
/** Getter for property beta.
* @return Value of property beta.
*
*/
public double getBeta() {
return beta;
}
/** Setter for property beta.
* @param beta New value of property beta.
*
*/
public void setBeta(double beta) {
this.beta = beta;
}
/** Getter for property timeConstant.
* @return Value of property timeConstant.
*
*/
public double getTimeConstant() {
return timeConstant;
}
/** Setter for property timeConstant.
* @param timeConstant New value of property timeConstant.
*
*/
public void setTimeConstant(double timeConstant) {
this.timeConstant = timeConstant;
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
}
| Java |
package org.joone.engine;
import org.joone.net.NetCheck;
import org.joone.net.NeuralNet;
import java.io.*;
import java.util.*;
import org.joone.log.*;
/**
* The Monitor object is the controller of the behavior of the neural net.
* It controls the start/stop actions and permits to set the parameters of the net
* (learning rate, momentum, ecc.).
* Each component of the neural net (Layers and Synapses) are connected to a monitor object
* (the monitor can be different or the same for the all components).
*/
public class Monitor implements Serializable {
private static final long serialVersionUID = 2909501813894146845L;
private int preLearning = 0;
private boolean learning = false;
private int currentCicle;
private int run = 0;
private int saveCurrentCicle;
private int saveRun;
// Starting parameters
private int patterns; // Training patterns
private int validationPatterns; // Validation patterns
private int totCicles;
private double learningRate;
private double momentum;
private double globalError;
private int batchSize = 0;
/** Use RMSE (if true) for back propagation, MSE (if false) otherwise. */
private boolean useRMSE = true;
/** The learner factory. If set this factory provides synapses and layers
* with learners. */
private LearnerFactory theLearnerFactory = null;
/**
* @label parent
*/
private Monitor parent;
/* No removable listeners. They cannot be removed on the removeAllListeners call.
* This is useful to avoid that permanent internal listeners are removed
* when the neural network is cloned.
*/
private transient Vector internalListeners = new Vector();
private transient Vector netListeners = new Vector();
private transient boolean firstTime = true;
private transient boolean exporting = false;
private transient boolean validation = false;
private transient boolean running = false;
/** The next flag indicates if training data should be used for validation (true), or not (false).
* The default is false. */
private transient boolean trainingDataForValidation = false;
private static final ILogger log = LoggerFactory.getLogger( Monitor.class );
private boolean supervisioned = false;
private boolean singleThreadMode = true;
public int learningMode = 0;
private List learners; // Container of the available Learners
private Hashtable params;
/**
* This is the default Constructor.
*/
public Monitor() {
firstTime = true;
netListeners = new Vector();
internalListeners = new Vector();
parent = null;
}
/**
* adds a neural net event listener the Monitor
* @param l NeuralNetListener
*/
public void addNeuralNetListener(NeuralNetListener l) {
this.addNeuralNetListener(l, true);
}
/** adds a neural net event listener to the Monitor
* @param l the new NeuralNetListener
* @param removable true if the added listener can be removed by the removeAllListeners method call
*/
public synchronized void addNeuralNetListener(NeuralNetListener l, boolean removable) {
if (parent != null)
parent.addNeuralNetListener(l, removable);
else {
if (!getListeners().contains(l)) {
netListeners.addElement(l);
if (!removable) {
if (!getNoDetachListeners().contains(l))
getNoDetachListeners().addElement(l);
}
}
}
}
private Vector getNoDetachListeners() {
if (internalListeners == null)
internalListeners = new Vector();
return internalListeners;
}
private Vector getListeners() {
if (netListeners == null)
netListeners = new Vector();
return netListeners;
}
/** Invoked when an epoch finishes
*/
public void fireCicleTerminated() {
if (parent != null)
parent.fireCicleTerminated();
else {
int size = getListeners().size();
if (size == 0)
return;
Object[] list;
synchronized (this) {
list = getListeners().toArray();
}
NeuralNetEvent event = new NeuralNetEvent(this);
for (int i = 0; i < size; ++i) {
NeuralNetListener listener = (NeuralNetListener) list[i];
listener.cicleTerminated(event);
}
}
}
/** Invoked when the net starts
*/
public void fireNetStarted() {
if (parent != null)
parent.fireNetStarted();
else {
int size = getListeners().size();
if (size == 0)
return;
Object[] list;
synchronized (this) {
list = getListeners().toArray();
}
NeuralNetEvent event = new NeuralNetEvent(this);
for (int i = 0; i < list.length; ++i) {
NeuralNetListener listener = (NeuralNetListener) list[i];
listener.netStarted(event);
}
}
}
/** Invoked when all the epochs finish
*/
public void fireNetStopped() {
if (parent != null)
parent.fireNetStopped();
else {
int size = getListeners().size();
if (size == 0)
return;
Object[] list;
synchronized (this) {
list = getListeners().toArray();
}
NeuralNetEvent event = new NeuralNetEvent(this);
for (int i = 0; i < list.length; ++i) {
NeuralNetListener listener = (NeuralNetListener) list[i];
listener.netStopped(event);
}
}
}
/** Invoked when an error occurs
* @param errMsg the thrown error message
*/
public void fireNetStoppedError(String errMsg) {
if (parent != null)
parent.fireNetStoppedError(errMsg);
else {
int size = getListeners().size();
if (size == 0)
return;
Object[] list;
synchronized (this) {
list = getListeners().toArray();
}
NeuralNetEvent event = new NeuralNetEvent(this);
for (int i = 0; i < list.length; ++i) {
NeuralNetListener listener = (NeuralNetListener) list[i];
listener.netStoppedError(event,errMsg);
}
if (running) {
log.error("Neural net stopped due to the following error: "+errMsg);
log.debug("\tepoch:"+currentCicle);
log.debug("\tcycle:"+run);
log.debug("\tlearning:"+isLearning());
log.debug("\tvalidation:"+isValidation());
log.debug("\ttrainingPatterns:"+getTrainingPatterns());
log.debug("\tvalidationPatterns:"+getValidationPatterns());
}
}
}
/** Invoked when the GlobalError changes
*/
public void fireErrorChanged() {
if (parent != null)
parent.fireErrorChanged();
else {
int size = getListeners().size();
if (size == 0)
return;
Object[] list;
synchronized (this) {
list = getListeners().toArray();
}
NeuralNetEvent event = new NeuralNetEvent(this);
for (int i = 0; i < list.length; ++i) {
NeuralNetListener listener = (NeuralNetListener) list[i];
listener.errorChanged(event);
}
}
}
/** Returns the current epoch
* @return int
*/
public synchronized int getCurrentCicle() {
if (parent != null)
return parent.getCurrentCicle();
else
return currentCicle;
}
/** Returns the actual (R)MSE of the NN
* @return double
*/
public double getGlobalError() {
if (parent != null)
return parent.getGlobalError();
else
return globalError;
}
/** Returns the learning rate
* @return double
*/
public synchronized double getLearningRate() {
if (parent != null)
return parent.getLearningRate();
else
return learningRate;
}
/** Returns the momentum
* @return double
*/
public double getMomentum() {
if (parent != null)
return parent.getMomentum();
else
return momentum;
}
/** Returns the number of the input training patterns
* @return int
*/
public int getTrainingPatterns() {
if (parent != null)
return parent.getTrainingPatterns();
else
return patterns;
}
/** Sets the number of the input training patterns
* @param newPatterns int
*/
public void setTrainingPatterns(int newPatterns) {
if (parent != null)
parent.setTrainingPatterns(newPatterns);
else
patterns = newPatterns;
}
/** Returns the initial ignored input patterns (during the training phase)
* @return int
*/
public int getPreLearning() {
if (parent != null)
return parent.getPreLearning();
else
return preLearning;
}
/** Returns the actual elaborated pattern
* @return int
*/
public synchronized int getStep() {
if (parent != null)
return parent.getStep();
else
return run;
}
/** Returns the total number of epochs
* @return int
*/
public int getTotCicles() {
if (parent != null)
return parent.getTotCicles();
else
return totCicles;
}
/**
* Runs the neural net in multi-thread mode.
* WARNING: AVOID to invoke this method. Use instead NeuralNet.go()
*
* @see org.joone.net.NeuralNet.go()
*/
public synchronized void Go() {
if (parent != null)
parent.Go();
else {
setSingleThreadMode(false);
run = getNumOfPatterns();
currentCicle = totCicles;
firstTime = false;
running = true;
notifyAll();
}
}
/**
* Returns TRUE if the net is into a learning phase
* @return boolean
*/
public boolean isLearning() {
return learning;
}
/** Returns the phase of the net (learning or not) for the current pattern
* @return boolean
* @param num the pattern requested
*/
public boolean isLearningCicle(int num) {
if (parent != null) {
boolean learn = parent.isLearningCicle(num);
return (learn & isLearning());
} else
if (num > preLearning)
return isLearning();
else
return false;
}
public synchronized void resetCycle() {
run = 0;
}
/** Returns if the next pattern must be elaborated
* @return boolean
*/
public synchronized boolean nextStep() {
if (parent != null)
return parent.nextStep();
else {
while (run == 0) {
try {
if (!firstTime) {
if (currentCicle > 0) {
fireCicleTerminated();
--currentCicle;
if(currentCicle < 0) {
// currentCicle might be smaller than 0 here if someone
// calls Monitor.Stop in a cicleTerminated() method (which
// is called by the fireCicleTerminated() method)
currentCicle = 0;
}
run = getNumOfPatterns();
}
if (currentCicle == 0) {
running = false;
if (!this.isSupervised() || (!this.isLearning() && !this.isValidation()))
new NetStoppedEventNotifier(this).start();
if (saveRun == 0) {
saveRun = getNumOfPatterns();
saveCurrentCicle = totCicles;
}
run = 0;
firstTime = true;
return false;
//wait();
}
} else
/* If it goes here, it means that this method
* has been called first to call Go() or runAgain() */
wait();
} catch (InterruptedException e) {
//e.printStackTrace();
run = 0;
firstTime = true;
return false;
}
}
if ((run == getNumOfPatterns()) && (currentCicle == totCicles))
fireNetStarted();
if (run > 0)
--run;
return true;
}
}
protected Object readResolve() {
firstTime = true;
return this;
}
/** Removes a listener
* @param l the listener to be removed
*/
public synchronized void removeNeuralNetListener(NeuralNetListener l) {
if (parent != null)
parent.removeNeuralNetListener(l);
else {
getListeners().removeElement(l);
getNoDetachListeners().removeElement(l);
}
}
/**
* Let continue the net.
*/
public synchronized void runAgain() {
if (parent != null)
parent.runAgain();
else {
run = getNumOfPatterns(); // old: run = saveRun;
currentCicle = saveCurrentCicle;
firstTime = false;
running = true;
notifyAll();
}
}
/** Not used
* @param newCurrentCicle int
*/
public void setCurrentCicle(int newCurrentCicle) {
if (parent != null)
parent.setCurrentCicle(newCurrentCicle);
else
currentCicle = newCurrentCicle;
}
/** Sets the actual error of the NN
* @param newGlobalError double
*/
public void setGlobalError(double newGlobalError) {
if (parent != null)
parent.setGlobalError(newGlobalError);
else {
globalError = newGlobalError;
this.fireErrorChanged();
}
}
/** Sets the phase of the neural network: learning (true) or recall (false)
* @param newLearning boolean
*/
public void setLearning(boolean newLearning) {
learning = newLearning;
}
/** Sets the learning rate
* @param newLearningRate double
*/
public void setLearningRate(double newLearningRate) {
if (parent != null)
parent.setLearningRate(newLearningRate);
else
learningRate = newLearningRate;
}
/** Sets the momentum
* @param newMomentum double
*/
public void setMomentum(double newMomentum) {
if (parent != null)
parent.setMomentum(newMomentum);
else
momentum = newMomentum;
}
/** Sets the initial ignored input patterns (during the training phase)
* @param newPreLearning int
*/
public void setPreLearning(int newPreLearning) {
if (parent != null)
parent.setPreLearning(newPreLearning);
else
preLearning = newPreLearning;
}
/** Sets the total number of epochs
* @param newTotCicles int
*/
public void setTotCicles(int newTotCicles) {
if (parent != null)
parent.setTotCicles(newTotCicles);
else
totCicles = newTotCicles;
}
/** Stops the NN when running in multi-thread mode.
* WARNING: DO NOT INVOKE directly, use instead NeuralNet.stop()
*
* @see org.joone.net.NeuralNet.stop()
*/
public synchronized void Stop() {
if (parent != null)
parent.Stop();
else {
saveRun = run;
saveCurrentCicle = currentCicle;
run = 0;
currentCicle = 0;
}
}
/** Getter for property exporting.
* @return Value of property exporting.
*/
public boolean isExporting() {
if (parent != null)
return parent.isExporting();
else
return exporting;
}
/** Setter for property exporting.
* @param exporting New value of property exporting.
*/
public void setExporting(boolean exporting) {
if (parent != null)
parent.setExporting(exporting);
else
this.exporting = exporting;
}
/**
* Needed for XML saving
*/
public int getRun() {
return run;
}
/** Getter for property validation.
* @return Value of property validation.
*/
public boolean isValidation() {
if (parent != null)
return parent.isValidation();
else
return validation;
}
/** Setter for property validation.
* @param validation New value of property validation.
*/
public void setValidation(boolean validation) {
if (parent != null)
parent.setValidation(validation);
else
this.validation = validation;
}
/**
* Getter for the property trainingDataForValidation, i.e. should
* the training data be used for validation.
*
* @return true if the training data should be used, false otherwise.
*/
public boolean isTrainingDataForValidation() {
if (parent != null) {
return parent.isTrainingDataForValidation();
} else {
return trainingDataForValidation;
}
}
/**
* Setter for the property trainingDataForValidation.
*
* @param aMode true if the training data should be used for validation,
* false otherwise.
*/
public void setTrainingDataForValidation(boolean aMode) {
if(parent != null) {
parent.setTrainingDataForValidation(aMode);
} else {
trainingDataForValidation = aMode;
}
}
/** Removes all the NeuralNetListeners. Removes only the listeners marked as removable
* @see addNeuralNetListener
*/
public void removeAllListeners() {
if (parent != null)
parent.removeAllListeners();
else
if (internalListeners != null)
netListeners = (Vector)internalListeners.clone();
else
netListeners = null;
}
/** Getter for property parent.
* @return Value of property parent.
*
*/
public Monitor getParent() {
return parent;
}
/** Setter for property parent.
* @param parent New value of property parent.
*
*/
public void setParent(Monitor parent) {
this.parent = parent;
}
/** Returns the number of the input validation patterns
* @return int
*/
public int getValidationPatterns() {
if (parent != null)
return parent.getValidationPatterns();
else
return validationPatterns;
}
/** Sets the number of the input validation patterns
* @param newPatterns int
*/
public void setValidationPatterns(int newPatterns) {
if (parent != null)
parent.setValidationPatterns(newPatterns);
else
validationPatterns = newPatterns;
}
public int getNumOfPatterns() {
if (parent != null) {
return parent.getNumOfPatterns();
} else {
if (isValidation() && !isTrainingDataForValidation()) {
return validationPatterns;
} else {
return patterns;
}
}
}
public TreeSet check() {
TreeSet checks = new TreeSet();
if (getLearningRate() <= 0 && isLearning()) {
checks.add(new NetCheck(NetCheck.FATAL, "Learning Rate must be greater than zero.", this));
}
if (isValidation() && getValidationPatterns() <= 0) {
checks.add(new NetCheck(NetCheck.FATAL, "Validation Patterns not set.", this));
}
if (isLearning() && getTrainingPatterns() <= 0) {
checks.add(new NetCheck(NetCheck.FATAL, "Training Patterns not set.", this));
}
if (!isValidation() && (getTrainingPatterns() <= 0)) {
checks.add(new NetCheck(NetCheck.FATAL, "Training Patterns not set.", this));
}
if (getTotCicles() <= 0) {
checks.add(new NetCheck(NetCheck.FATAL, "TotCicles (epochs) not set.", this));
}
return checks;
}
/**
* Getter for property supervised.
* @return Value of property supervised.
*/
public boolean isSupervised() {
if (parent != null)
return parent.isSupervised();
else
return supervisioned;
}
/**
* Setter for property supervised. (default = false)
* @param supervised New value of property supervised.
*/
public void setSupervised(boolean supervised) {
if (parent != null)
parent.setSupervised(supervised);
else
this.supervisioned = supervised;
}
/** Getter for the property BatchSize
* @return the size (# of cycles) of the batch mode
*/
public int getBatchSize() {
if (parent != null)
return getBatchSize();
else
return batchSize;
}
/** Setter for the property BatchSize
* @param the size (# of cycles) of the batch mode
*/
public void setBatchSize(int i) {
if (parent != null)
parent.setBatchSize(i);
else
batchSize = i;
}
/**
* Getter for property learningMode.
* The learningMode determines the kind of learning algorithm applied to the
* neural network.
* @return Value of property learningMode.
* @see getLearners()
*/
public int getLearningMode() {
return learningMode;
}
/**
* Setter for property learningMode.
* @param learningMode New value of property learningMode.
*/
public void setLearningMode(int learningMode) {
this.learningMode = learningMode;
}
/** Getter for the Learner declared at position 'index'
* @param the index of the Learner to get
* @return the Learner at 'index' position
* @see getLearners()
*/
public Learner getLearner(int index) {
Learner myLearner = null;
if(index < getLearners().size() && index >= 0) {
String myClassName = (String)getLearners().get(index);
try {
Class myClass = Class.forName(myClassName);
myLearner = (Learner)myClass.newInstance();
} catch (ClassNotFoundException cnfe) {
log.error("Class " + myClassName + " not found");
} catch (InstantiationException ie) {
log.error("Error instantiating the class " + myClassName);
} catch (IllegalAccessException iae) {
log.error("Illegal access instantiating the class " + myClassName);
}
}
if(myLearner == null) { // set default learner
// log.warn("No learner is set, use default (basic) learner.");
myLearner = new BasicLearner();
}
myLearner.setMonitor(this);
return myLearner;
}
/**
* Gets a learner for a synapse or layer.
*
* @return a learner for a synapse or layer.
*/
public Learner getLearner() {
Learner myLearner = null;
if(theLearnerFactory != null) {
myLearner = theLearnerFactory.getLearner(this);
myLearner.setMonitor(this);
}
if(myLearner == null) {
myLearner = getLearner(getLearningMode());
}
return myLearner;
}
/**
* Getter for property learners.
* @return Value of property learners.
*/
protected java.util.List getLearners() {
if (learners == null)
learners = new ArrayList(10);
return learners;
}
/**
* Setter for property learners.
* Used to set all the Learner objects used by this NN.
* @param learners New value of property learners.
*/
protected void setLearners(java.util.List learners) {
this.learners = learners;
}
/**
* Adds a new Learner to the Neural Network
* Usage:
* Monitor.addLearner(0, "org.joone.engine.BasicLearner");
* Monitor.addLearner(1, "org.joone.engine.BatchLearner");
* Monitor.addLearner(2, "org.joone.engine.RpropLearner");
* @param i the index of the new Learner
* @param learner a String containing the class name of the Learner to add
*/
public void addLearner(int i, String learner) {
if (!getLearners().contains(learner)) {
getLearners().add(i, learner);
}
}
/** Gets a custom parameter from the Monitor.
* The user is free to use the custom parameters as s/he wants.
* They are useful to store a whatever value that could be used by the nnet's components.
* It has been introduced to set the parameters of new added Learners.
* @param key The searched key
* @return The value of the parameter if found, otherwise null
*/
public Object getParam(String key) {
if (params == null)
return null;
return params.get(key);
}
/** Sets a custom parameter of the Monitor.
* The user is free to use the custom parameters as s/he wants.
* They are useful to store a whatever value that could be used by the nnet's components.
* It has been introduced to set the parameters of new added Learners.
* @param key The key of the param
* @param obj The value of the param
*/
public void setParam(String key, Object obj) {
if (params == null)
params = new Hashtable();
if (params.containsKey(key))
params.remove(key);
params.put(key, obj);
}
/** Return all the keys of the parameters contained in this Monitor.
* @return An array of Strings containing all the keys if found, otherwise null
*/
public String[] getKeys() {
if (params == null)
return null;
String[] keys = new String[params.keySet().size()];
Enumeration myEnum = params.keys();
for (int i=0; myEnum.hasMoreElements(); ++i) {
keys[i] = (String)myEnum.nextElement();
}
return keys;
}
/**
* Uses the RMSE when set to true. Uses MSE when set to false.
*
* @param aMode the mode to set true for RMSE, false for MSE.
*/
public void setUseRMSE(boolean aMode) {
useRMSE = aMode;
}
/**
* Checks if we should use RMSE (true) or MSE false.
*
* @return true if we should use RMSE, false if we should use MSE.
*/
public boolean isUseRMSE() {
return useRMSE;
}
/**
* Set learner factory.
*
* @param aFactory the learner factory to set.
*/
public void setLearnerFactory(LearnerFactory aFactory) {
theLearnerFactory = aFactory;
}
public boolean isSingleThreadMode() {
if (parent != null)
return parent.isSingleThreadMode();
else
return singleThreadMode;
}
public void setSingleThreadMode(boolean singleThreadMode) {
if (parent != null)
parent.setSingleThreadMode(singleThreadMode);
else
this.singleThreadMode = singleThreadMode;
}
} | Java |
/*
* ConvergenceEvent.java
*
* Created on October 28, 2004, 3:13 PM
*/
package org.joone.engine.listeners;
import org.joone.engine.Monitor;
/**
* This event will be generated whenever convergence has reached according to
* some criteria.
*
* @author Boris Jansen
*/
public class ConvergenceEvent extends java.util.EventObject {
/**
* Creates a new instance of ConvergenceEvent
*
* @param aSource the source that caused this event.
*/
public ConvergenceEvent(Monitor aSource) {
super(aSource);
}
}
| Java |
/*
* ErrorBasedConvergenceObserver.java
*
* Created on October 28, 2004, 3:06 PM
*/
package org.joone.engine.listeners;
import org.joone.engine.*;
/**
* This observer observes if the network has convergenced based on the
* sequence of training errors.
*
* @author Boris Jansen
*/
public class ErrorBasedConvergenceObserver extends ConvergenceObserver {
/** Whenever each training error in a sequence of errors decreases less than
* this percentage, a <code>ConvergenceEvent</code> will be generated. */
private double percentage = -1;
/** Sets the size of the cycles (sequence). Whenever the training error of a neural network
* is smaller than a certain percentage for this number of epochs, the network is considered
* as converged. */
private int cycles = 5; // default
/** Counter to check how many cycles the error decreases less than <code>percentage</code>. */
private int cycleCounter = 0;
/** Variable to remember the previous error. */
private double lastError = -1;
/** Creates a new instance of ErrorBasedConvergenceObserver */
public ErrorBasedConvergenceObserver() {
}
/**
* Sets the percentage. Whenever a neural network's training error doesn't decrease more
* than this percentage for a couple of steps in a sequence of training errors, the network
* is considered as converged.
*
* @param aPercentage the percentage to set.
*/
public void setPercentage(double aPercentage) {
percentage = aPercentage;
}
/**
* Gets the percentage.
*
* @return the percentage.
*/
public double getPercentage() {
return percentage;
}
/**
* Sets the number of cycles. Whenever the training error of a network doesn't decrease more
* than a percentage for this number of cycles, the network is considered as converged.
*
* @param aCylces
*/
public void setCycles(int aCylces) {
cycles = aCylces;
}
/**
* Gets the number of cycles over which convergence is checked.
*
* @return the number of cycles.
*/
public int getCycles() {
return cycles;
}
protected void manageStop(Monitor mon) {
}
protected void manageCycle(Monitor mon) {
}
protected void manageStart(Monitor mon) {
}
protected void manageError(Monitor mon) {
if(percentage < 0 || cycles <= 0) {
return;
}
double myCurrentError = mon.getGlobalError();
if(lastError >= 0) { // if lastError < 0, it is the first time and the lastError is unknown
double myPercentage = (lastError - myCurrentError) * 100 / lastError;
if(myPercentage <= percentage && myPercentage >= 0) {
cycleCounter++;
} else {
disableCurrentConvergence = false; // we are not in a convergence state, so if we were
// we moved out of it
cycleCounter = 0; // reset counter
}
if(cycleCounter == cycles) {
if(!disableCurrentConvergence) {
fireNetConverged(mon);
}
cycleCounter = 0;
}
}
lastError = myCurrentError;
}
protected void manageStopError(Monitor mon, String msgErr) {
}
}
| Java |
/*
* ErrorBasedTerminator.java
*
* Created on October 28, 2004, 2:45 PM
*/
package org.joone.engine.listeners;
import org.joone.engine.*;
import org.joone.util.MonitorPlugin;
/**
* Stops a network whenever the training error of the network falls below a
* certain value.
*
* @author Boris Jansen
*/
public class ErrorBasedTerminator extends MonitorPlugin {
/** The error level. Whenever the training error falls below this value
* the network should be stopped. */
private double errorLevel;
/** The cycle the network was stopped. */
private int stoppedCycle = -1;
/** Has a stop request performed. */
private boolean stopRequested = false;
/** Creates a new instance of ErrorBasedTerminator */
public ErrorBasedTerminator() {
}
/**
* Creates a new instance of ErrorBasedTerminator
*
* @param anErrorLevel the error level. A network having a training error
* equal to or below this level will be stopped.
*/
public ErrorBasedTerminator(double anErrorLevel) {
errorLevel = anErrorLevel;
}
/**
* Sets the error level. A network having a training error
* equal to or below this level will be stopped.
*
* @param anErrorLevel the error level to set.
*/
public void setErrorLevel(double anErrorLevel) {
errorLevel = anErrorLevel;
}
/**
* Gets the error level.
*
* @return the error level.
*/
public double getErrorLevel() {
return errorLevel;
}
protected void manageStop(Monitor mon) {
}
protected void manageCycle(Monitor mon) {
}
protected void manageStart(Monitor mon) {
stoppedCycle = -1;
stopRequested = false;
}
protected void manageError(Monitor mon) {
if(mon.getGlobalError() <= errorLevel) {
if(!isStopRequestPerformed()) {
stoppedCycle = mon.getTotCicles() - mon.getCurrentCicle() + 1;
stopRequested = true;
}
getNeuralNet().stop();
}
}
protected void manageStopError(Monitor mon, String msgErr) {
}
/**
* Gets the cycle the network was stopped.
*
* @return the cycle the network was stopped or -1 if the network hasn't been
* stopped since it is (re)started.
*/
public int getStoppedCycle() {
return stoppedCycle;
}
/**
* Checks if this object requested / stopped the neural network.
*
* @return <code>true</code> if this object requested the stop of the network
* since it has been started, <code>false</code> otherwise.
*/
public boolean isStopRequestPerformed() {
return stopRequested;
}
}
| Java |
/*
* ConvergenceObserver.java
*
* Created on October 28, 2004, 3:21 PM
*/
package org.joone.engine.listeners;
import java.util.*;
import org.joone.engine.Monitor;
import org.joone.util.MonitorPlugin;
/**
* Abstract class for all convergence observer.
*
* @author Boris Jansen
*/
public abstract class ConvergenceObserver extends MonitorPlugin {
/** The next flag indicates if the current convegence should be neglected. This
* is used in situations where convergence was reached, but the network continues
* running. If we would not neglect the current convergence event would continue
* to be generated. This flag is used to disable the event sfor the current convergence. */
protected boolean disableCurrentConvergence = false;
/** List of <code>ConvergenceListener</code>s. */
private List listeners = new ArrayList();
/** Creates a new instance of ConvergenceObserver */
public ConvergenceObserver() {
}
/**
* Adds a convergence listener.
*
* @param aListener the listener to add.
*/
public void addConvergenceListener(ConvergenceListener aListener) {
if(!listeners.contains(aListener)) {
listeners.add(aListener);
}
}
/**
* Removes a convergence listener.
*
* @param aListener the listener to remove.
*/
public void removeConvergenceListener(ConvergenceListener aListener) {
listeners.remove(aListener);
}
/**
* Fires a net converged event.
*
* @param aMonitor a monitor object.
*/
protected void fireNetConverged(Monitor aMonitor) {
Object[] myList;
synchronized (this) {
myList = listeners.toArray();
}
ConvergenceEvent myEvent = new ConvergenceEvent(aMonitor);
for (int i = 0; i < myList.length; ++i) {
((ConvergenceListener)myList[i]).netConverged(myEvent, this);
}
}
/**
* Disables current convergence events. Used in situations where convergence
* was reached but the network keeps running. By calling this method no events
* signaling convergence was reached will be greated. Whenever the network
* moves out of the convergence state, new events will be created again once
* the system reaches convergence.
*/
public void disableCurrentConvergence() {
disableCurrentConvergence = true;
}
}
| Java |
/*
* DeltaBasedConvergenceObserver.java
*
* Created on October 29, 2004, 12:05 PM
*/
package org.joone.engine.listeners;
import java.util.*;
import org.joone.engine.*;
import org.joone.net.*;
import org.joone.util.MonitorPlugin;
/**
* This observer observes if the network has convergenced based on the size of the
* weight updates (deltas).
*
* @author Boris Jansen
*/
public class DeltaBasedConvergenceObserver extends ConvergenceObserver {
/** Whenever each weight update value for some epochs / cycles is less than
* this size, a <code>ConvergenceEvent</code> will be generated. */
private double size = 0;
/** Sets the size of the cycles (sequence). Whenever the weight update values of a neural network
* are equal to or smaller than a certain value for this number of epochs, the network is considered
* as converged. */
private int cycles = 5; // default
/** Counter to check how many cycles the deltas where equal to or less than <code>size</code>. */
private int cycleCounter = 0;
/** The network holding the layers and synapses to be checked. */
private NeuralNet net;
/** Creates a new instance of DeltaBasedConvergenceObserver */
public DeltaBasedConvergenceObserver() {
}
/**
* Sets the size. Whenever the weight (biases) update values (deltas) are smaller
* than this value for a certain number of cycles ({@link setCycles()}, the network is
* considered as converged.
*
* @param aSize the size to set.
*/
public void setSize(double aSize) {
size = aSize;
}
/**
* Gets the size (delta bound for convergence).
*
* @return the size.
*/
public double getSize() {
return size;
}
/**
* Sets the number of cycles. Whenever the deltas are equal to or smaller than the set size
* for this number of cycles, the network is considered as converged.
*
* @param aCylces
*/
public void setCycles(int aCylces) {
cycles = aCylces;
}
/**
* Gets the number of cycles over which convergence is checked.
*
* @return the number of cycles.
*/
public int getCycles() {
return cycles;
}
/**
* Sets the neural network to be checked for convergence.
*
* @param aNet the network to set.
*/
public void setNeuralNet(NeuralNet aNet) {
net = aNet;
}
/**
* Gets the neural net that is being checked for convergence.
*
* @return the net that is being checked for convergence.
*/
public NeuralNet getNeuralNet() {
return net;
}
protected void manageStop(Monitor mon) {
}
protected void manageCycle(Monitor mon) {
}
protected void manageStart(Monitor mon) {
}
protected void manageError(Monitor mon) {
if(cycles <= 0) {
return;
}
Layer myLayer;
Matrix myBiases, myWeights;
for(int i = 0; i < net.getLayers().size(); i++) {
myLayer = (Layer)net.getLayers().get(i);
myBiases = myLayer.getBias();
for(int b = 0; b < myBiases.getM_rows(); b++) {
if(myBiases != null && !isConvergence(myBiases)) {
cycleCounter = 0;
disableCurrentConvergence = false; // we are not in a convergence state, so if we were
// we moved out of it
return;
}
}
for(int s = 0; s < myLayer.getAllOutputs().size(); s++) {
if(myLayer.getAllOutputs().get(s) instanceof Synapse) {
myWeights = ((Synapse)myLayer.getAllOutputs().get(s)).getWeights();
if(myWeights != null && !isConvergence(myWeights)) {
cycleCounter = 0;
disableCurrentConvergence = false;
return;
}
}
}
}
cycleCounter++;
if(cycleCounter == cycles) {
if(!disableCurrentConvergence) {
fireNetConverged(mon);
}
cycleCounter = 0;
}
}
/**
* Checks if the weights or biases have converged, i.e. if the delta weight update
* value is below size.
*
* @param aMatrix the matrix (weights or biases) to check if their deltas are equal
* to or below size.
* @return true if the deltas are equal to or below size, false otherwise.
*/
protected boolean isConvergence(Matrix aMatrix) {
for(int r = 0; r < aMatrix.getM_rows(); r++) {
for(int c = 0; c < aMatrix.getM_cols(); c++) {
if(Math.abs(aMatrix.delta[r][c]) > size) {
return false;
}
}
}
return true;
}
protected void manageStopError(Monitor mon, String msgErr) {
}
}
| Java |
/*
* ConvergenceListener.java
*
* Created on October 28, 2004, 3:16 PM
*/
package org.joone.engine.listeners;
/**
* Listens for convergence events.
*
* @author Boris Jansen
*/
public interface ConvergenceListener extends java.util.EventListener {
/**
* This method is called whenever the network has converged according to some
* <code>ConvergenceObserver</code>
*
* @param anEvent the event that is generated.
* @param anObserver the observer that generated the event.
*/
void netConverged(ConvergenceEvent anEvent, ConvergenceObserver anObserver);
}
| Java |
package org.joone.engine;
/** This class implements a synapse that permits to have asynchronous
* methods to write output patterns.
* The <CODE>fwdPut</CODE> method, infact, uses a FIFO structure to
* store the patterns and to separate the writing from the reading layers.
*/
public class BufferedSynapse extends Synapse {
private transient Fifo fifo;
private static final long serialVersionUID = -8067243400677466498L;
/** BufferedOutputSynapse constructor.
*/
public BufferedSynapse() {
super();
}
/**
*/
protected void backward(double[] pattern) {
// Not used
}
/**
*/
protected void forward(double[] pattern) {
outs = pattern;
}
/** Return the first element of the FIFO structure, if exists.
* @return Pattern
*/
public Pattern fwdGet() {
Pattern pat;
synchronized (getFwdLock()) {
while (items == 0) {
try {
fwdLock.wait();
} catch (InterruptedException e) {
return null;
}
}
pat = (Pattern)fifo.pop();
items = fifo.size();
fwdLock.notifyAll();
return pat;
}
}
/** Writes the input pattern into the FIFO structure.
* The layer that calls this methos will wait only
* the time needed to put the input data into the pipeline.
* @param pattern The Pattern object to write in the FIFO structure
*/
public synchronized void fwdPut(Pattern pattern) {
m_pattern = pattern;
inps = pattern.getArray();
forward(inps);
m_pattern.setArray(outs);
fifo.push(m_pattern);
items = fifo.size();
notifyAll();
}
/** Not used
* @return Pattern
*/
public synchronized Pattern revGet() {
// Not used
return null;
}
/** Not used
* @param pattern
*/
public synchronized void revPut(Pattern pattern) {
// Not used
}
/**
* setArrays method comment.
*/
protected void setArrays(int rows, int cols) {}
/**
* setDimensions method comment.
*/
protected void setDimensions(int rows, int cols) {}
} | Java |
/*
* AbstractEventNotifier.java
*
* Created on 31 gennaio 2003, 21.09
*/
package org.joone.engine;
/**
* This class raises an event notification invoking the corrisponnding
* Monitor.fireXXX method. The event is raised from within a separate
* Thread to avoid the race conditions to happen
*
* @author pmarrone
*/
public abstract class AbstractEventNotifier implements Runnable {
protected Monitor monitor;
private Thread myThread;
/** Creates a new instance of AbstractEventNotifier */
public AbstractEventNotifier(Monitor mon) {
monitor = mon;
}
/**
* The inherited classes must to override this method
* invoking into it the desired monitor.fireXXX method
*/
public abstract void run();
public synchronized void start() {
if (myThread == null) {
myThread = new Thread(this);
myThread.start();
}
}
}
| Java |
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.exception.JooneRuntimeException;
import org.joone.log.*;
import java.util.TreeSet;
import org.joone.inspection.implementations.BiasInspection;
import org.joone.net.NetCheck;
/** <P>This layer implements the Winner Takes All SOM strategy. The layer
* expects to receive euclidean distances between the previous synapse weights and
* it's input. The layer simply works out which node is the winner and passes 1.0
* for that node and 0.0 for the others.</P>
* @see SimpleLayer parent
*/
public class WTALayer extends SimpleLayer {
private static final ILogger log = LoggerFactory.getLogger (WTALayer.class);
private static final long serialVersionUID = -941653911909171430L;
// Width of the map in the this layer.
private int LayerWidth = 1;
// Height of the map in the this layer.
private int LayerHeight = 1;
// Depth of the map in the this layer.
private int LayerDepth = 1;
/** The default constructor for this WTALayer. */
public WTALayer() {
super();
}
/** The constructor allowing a name to be specified.
* @param ElemName The name of the Layer
*/
public WTALayer(java.lang.String ElemName) {
super(ElemName);
}
/** <P>No biases need updating or setting. Not implemented / not required.</P>
* @param pattern The pattern with which to update internal variables. Not required.
* @throws JooneRuntimeException The Joone Run time exception.
*/
public void backward(double[] pattern)
throws JooneRuntimeException
{
}
/** This method accepts an array of values from the input and forwards it
* according to the Winner Takes All strategy. See class documentation.
* @param pattern <P>Should be the euclidean distance between the previous synapse's input vector and
* weights.</P>
* @see NeuralLayer#forward (double[])
* @throws JooneRuntimeException This <code>Exception </code> is a wrapper Exception when an Exception is thrown
* while doing the maths.
*/
public void forward (double[] pattern)
throws JooneRuntimeException
{
int x = 0;
double in = 0f;
int winner = 0;
double min_dist = 9999999999999f;
int n = getRows();
try
{
for ( x = 0; x < n; ++x) {
in = pattern[x];
if ( in < min_dist)
{
min_dist = in;
winner = x;
}
}
for ( x = 0; x < n; ++x) {
if ( x == winner)
{
outs[x] = 1f;
}
else
outs[x] = 0f;
}
}catch (Exception aioobe)
{
String msg;
log.error ( msg = "Exception thrown while processing the element " + x + " of the array. Value is : " + pattern[x]
+ " Exception thrown is " + aioobe.getClass ().getName () + ". Message is " + aioobe.getMessage() );
throw new JooneRuntimeException (msg, aioobe);
//aioobe.printStackTrace();
}
}
/** Getter for property LayerDepth.
* @return Value of property LayerDepth.
*
*/
public int getLayerDepth() {
return LayerDepth;
}
/** Setter for property LayerDepth.
* @param LayerDepth New value of property LayerDepth.
*
*/
public void setLayerDepth(int LayerDepth) {
if ( LayerDepth != getLayerDepth() )
{
this.LayerDepth = LayerDepth;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
}
}
/** Getter for property LayerHeight.
* @return Value of property LayerHeight.
*
*/
public int getLayerHeight() {
return LayerHeight;
}
/** Setter for property LayerHeight.
* @param LayerHeight New value of property LayerHeight.
*
*/
public void setLayerHeight(int LayerHeight) {
if ( LayerHeight != getLayerHeight() )
{
this.LayerHeight = LayerHeight;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
}
}
/** Getter for property LayerWidth.
* @return Value of property LayerWidth.
*
*/
public int getLayerWidth() {
return LayerWidth;
}
/** Setter for property LayerWidth.
* @param LayerWidth New value of property LayerWidth.
*
*/
public void setLayerWidth(int LayerWidth) {
if ( LayerWidth != getLayerWidth() )
{
this.LayerWidth = LayerWidth;
setRows(getLayerWidth()*getLayerHeight()*getLayerDepth());
setDimensions();
setConnDimensions();
}
}
/**
* Check that there are no errors or problems with the properties of this WTALayer.
* @return The TreeSet of errors / problems if any.
*/
public TreeSet check() {
TreeSet checks = super.check();
if ( getLayerWidth() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer width should be greater than or equal to 1." , this));
if ( getLayerHeight() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer height should be greater than or equal to 1." , this));
if ( getLayerDepth() < 1 )
checks.add(new NetCheck(NetCheck.FATAL, "Layer depth should be greater than or equal to 1." , this));
return checks;
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
} | Java |
package org.joone.engine;
/** This interface represents an output synapse for a generic layer.
* @author: Paolo Marrone
*/
public interface OutputPatternListener extends NeuralElement {
/** Method to put a pattern forward to the next layer
* @param pattern neural.engine.Pattern
*/
public void fwdPut(Pattern pattern);
public boolean isOutputFull();
public void setOutputFull(boolean outputFull);
/** Returns the dimension of the output synapse
* @return int
*/
public int getInputDimension();
/** Returns the error pattern coming from the next layer during the training phase
* @return neural.engine.Pattern
*/
public Pattern revGet();
/** Sets the dimension of the output synapse
* @param newOutputDimension int
*/
public void setInputDimension(int newInputDimension);
} | Java |
package org.joone.engine;
import java.beans.*;
public class WTALayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.engine.WTALayer.class , null ); // NOI18N
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_inputLayer = 2;
private static final int PROPERTY_layerHeight = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_layerWidth = 5;
private static final int PROPERTY_learner = 6;
private static final int PROPERTY_monitor = 7;
private static final int PROPERTY_outputLayer = 8;
private static final int PROPERTY_rows = 9;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[10];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", org.joone.engine.WTALayer.class, "getAllInputs", "setAllInputs" ); // NOI18N
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", org.joone.engine.WTALayer.class, "getAllOutputs", "setAllOutputs" ); // NOI18N
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", org.joone.engine.WTALayer.class, "isInputLayer", null ); // NOI18N
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerHeight] = new PropertyDescriptor ( "layerHeight", org.joone.engine.WTALayer.class, "getLayerHeight", "setLayerHeight" ); // NOI18N
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", org.joone.engine.WTALayer.class, "getLayerName", "setLayerName" ); // NOI18N
properties[PROPERTY_layerWidth] = new PropertyDescriptor ( "layerWidth", org.joone.engine.WTALayer.class, "getLayerWidth", "setLayerWidth" ); // NOI18N
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", org.joone.engine.WTALayer.class, "getLearner", null ); // NOI18N
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", org.joone.engine.WTALayer.class, "getMonitor", "setMonitor" ); // NOI18N
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", org.joone.engine.WTALayer.class, "isOutputLayer", null ); // NOI18N
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", org.joone.engine.WTALayer.class, "getRows", "setRows" ); // NOI18N
properties[PROPERTY_rows].setHidden ( true );
}
catch(IntrospectionException e) {
e.printStackTrace();
}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addInputSynapse0 = 0;
private static final int METHOD_addNoise1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_copyInto3 = 3;
private static final int METHOD_removeAllInputs4 = 4;
private static final int METHOD_removeAllOutputs5 = 5;
private static final int METHOD_removeInputSynapse6 = 6;
private static final int METHOD_removeOutputSynapse7 = 7;
private static final int METHOD_run8 = 8;
private static final int METHOD_start9 = 9;
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[10];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
static {
try {
methods[METHOD_addInputSynapse0] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("addInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_addInputSynapse0].setDisplayName ( "" );
methods[METHOD_addNoise1] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("addNoise", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_addNoise1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_copyInto3] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("copyInto", new Class[] {org.joone.engine.NeuralLayer.class})); // NOI18N
methods[METHOD_copyInto3].setDisplayName ( "" );
methods[METHOD_removeAllInputs4] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("removeAllInputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllInputs4].setDisplayName ( "" );
methods[METHOD_removeAllOutputs5] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("removeAllOutputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllOutputs5].setDisplayName ( "" );
methods[METHOD_removeInputSynapse6] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("removeInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_removeInputSynapse6].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse7] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("removeOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_removeOutputSynapse7].setDisplayName ( "" );
methods[METHOD_run8] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("run", new Class[] {})); // NOI18N
methods[METHOD_run8].setDisplayName ( "" );
methods[METHOD_start9] = new MethodDescriptor ( org.joone.engine.WTALayer.class.getMethod("start", new Class[] {})); // NOI18N
methods[METHOD_start9].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
}//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
public class NeuralNetAdapter implements NeuralNetListener
{
public void cicleTerminated(NeuralNetEvent e) {}
public void netStopped(NeuralNetEvent e) {}
public void netStarted(NeuralNetEvent e) {
}
public void errorChanged(NeuralNetEvent e) {
}
public void netStoppedError(NeuralNetEvent e,String error){
}
} | Java |
/*
* CircularSpatialMap.java
*
* Created on 2003/6/13 11:34
*/
package org.joone.engine;
/**
* This class implements the SpatialMap interface providing a circular spatial map for use with the GaussianLayer and Kohonen Networks.
* The radius of the circle is equal to the initial Gaussian Size and is reduced if training is currently in process.
*/
public class GaussianSpatialMap extends SpatialMap {
private static final long serialVersionUID = -5578079370364572387L;
/** Creates a new instance of CircularSpatialMap */
public GaussianSpatialMap() {
}
public void ApplyNeighborhoodFunction(double[] distances, double[] n_outs, boolean isLearning) {
double dFalloff=0;
double nbhRadius=1; // Neighbourhood radius
double nbhRadiusSq = 1;
double dist_to_node=0;
int current_output = 0;
// Extract the winning neuron from the distances passed in by the synapse/layer.
extractWinner(distances);
int winx = getWinnerX();
int winy = getWinnerY();
int winz = getWinnerZ();
//if (isLearning)
nbhRadius = getCurrentGaussianSize(); // Get Current Neighbourhood Radius
nbhRadiusSq = nbhRadius * nbhRadius; // Neighbourhood Radius Squared.
// Loop through the map and set the neighborhood function (individual learning rate) of each neighborhood output.
for (int z=0;z<getMapDepth();z++){
for (int y=0; y<getMapHeight(); y++) {
for (int x=0; x<getMapWidth(); x++) {
dist_to_node = distanceBetween(winx,winy,winz,x,y,z);
dFalloff = getCircle2DDistanceFalloff(dist_to_node, nbhRadiusSq);
current_output = x+(y* getMapWidth())+(z*( getMapWidth()*getMapHeight()));
n_outs[current_output] = dFalloff;
}
}
}
}
/**
* Gets the fall off distance from the edge of the radius.
* @param distSq The square of the distance to the output/node being measured.
* @param radiusSq The square of the radius of the current circular spatial neighborhood.
* @return The fall off distance between the distSq and the radiusSq.
*/
private double getCircle2DDistanceFalloff(double distSq, double radiusSq) {
return Math.exp(-(distSq)/(2 * radiusSq));
}
}
| Java |
package org.joone.engine;
import java.util.*;
/**
* This is the interface for all the layer objects of the neural network
*/
public interface NeuralLayer {
/** Adds a noise to the biases of the layer and to all the input synapses connected
* @param amplitude the noise's amplitude in terms of distance from zero;
* e.g.: a value equal 0.3 means a noise from -0.3 to 0.3
*/
public void addNoise(double amplitude);
/** Copies a Layer into another one, to obtain a type-transformation
* from a kind of Layer to another.
* The old Layer is disconnected from the net, while the new Layer
* takes its place.
* @param newLayer neural.engine.Layer
* @return The new layer
*/
public NeuralLayer copyInto(NeuralLayer newLayer);
/** Returns the vector of the input listeners
* @return java.util.Vector
*/
public java.util.Vector getAllInputs();
/** Returns the vector of the input listeners
* @return java.util.Vector
*/
public java.util.Vector getAllOutputs();
/** Return the bias matrix
* @return neural.engine.Matrix
*/
public Matrix getBias();
/** Returns the name of the layer
* @return java.lang.String
*/
public java.lang.String getLayerName();
/** Returns the dimension (# of neurons) of the Layer
* @return int
*/
public int getRows();
/** Remove all the input listeners of the net
*/
public void removeAllInputs();
/** Remove all the output listeners of the net
*/
public void removeAllOutputs();
/** Remove an input Listener
* @param newListener the input listener to remove
*/
public void removeInputSynapse(InputPatternListener newListener);
/** Remove an output listener from the layer
* @param newListener the output listener to remove
*/
public void removeOutputSynapse(OutputPatternListener newListener);
/** Sets the vector that contains all the input listeners. Can be useful to set the input synapses taken from another Layer
* @param newAInputPatternListener The vector containing the list of input synapses
*/
public void setAllInputs(java.util.Vector newAInputPatternListener);
/** Sets the vector that contains all the output listeners. Can be useful to set the output synapses taken from another Layer
* @param newAOutputPatternListener The vector containing the list of output synapses
*/
public void setAllOutputs(java.util.Vector newAOutputPatternListener);
/** Sets the matrix of biases
* @param newBias The Matrix object containing the biases
*/
public void setBias(Matrix newBias);
/** Adds a new input synapse to the layer
* @param newListener The new input synapse
* @return true if the input synapse has been attached sucessfully
*/
public boolean addInputSynapse(InputPatternListener newListener);
/** Sets the name of the layer
* @param newLayerName The name
*/
public void setLayerName(java.lang.String newLayerName);
/** Adds a new output synapse to the layer
* @param newListener The new output synapse
* @return true if the output synapse has been attached sucessfully
*/
public boolean addOutputSynapse(OutputPatternListener newListener);
/** Sets the dimension (# of neurons) of the Layer
* @param newRows The number of the neurons contained in the Layer
*/
public void setRows(int newRows);
/** Starts the Layer
*/
public void start();
/** Sets the monitor object
* @param newMonitor The Monitor to be set
*/
public void setMonitor(Monitor newMonitor);
/** Returns the monitor object
* @return java.engine.Monitor
*/
public Monitor getMonitor();
/** Returns true if the layer is running
* @return boolean
*/
public boolean isRunning();
/**
* Validation checks for invalid parameter values, misconfiguration, etc.
* All network components should include a check method that firstly calls its ancestor check method and
* adds these to any check messages it produces. This allows check messages to be collected from all levels
* of a component to be returned to the caller's check method. Using a TreeSet ensures that
* duplicate messages are removed. Check messages should be produced using the generateValidationErrorMessage
* method of the NetChecker class.
*
* @return validation errors.
*/
public TreeSet check();
} | Java |
package org.joone.engine;
import java.util.Vector;
import java.util.TreeSet;
import java.io.*;
/** This class acts as a switch that can connect its input to one of its connected
* output synapses.
* Many output synapses can be attached to the switch calling the method addOutputSynapse,
* but only one is attached to the input; which one is connected is determined
* by the call to the method setActiveOutput, passing to it the name of
* the selected synapse.
*/
public class OutputSwitchSynapse implements OutputPatternListener, Serializable {
protected Vector outputs;
private String name;
private Monitor mon;
private int inputDimension;
private boolean outputFull;
private boolean enabled = true;
private static final long serialVersionUID = 2906294213180089226L;
private OutputPatternListener activeSynapse;
private OutputPatternListener defaultSynapse;
/** The constructor
*/
public OutputSwitchSynapse() {
outputs = new Vector();
activeSynapse = defaultSynapse = null;
mon = null;
inputDimension = 0;
}
/** Resets the switch, connecting the default synapse to the output
*/
public void reset() {
setActiveSynapse(getDefaultSynapse());
}
/** Removes an output synapse from the switch
* @param inputName The name of the synapse to remove
*/
public boolean removeOutputSynapse(String outputName) {
boolean retValue = false;
OutputPatternListener opl = getOutputSynapse(outputName);
if (opl != null) {
outputs.removeElement(opl);
opl.setOutputFull(false);
if (outputs.size() > 0) {
if (getActiveOutput().equalsIgnoreCase(outputName))
setActiveSynapse((OutputPatternListener) outputs.elementAt(0));
if (getDefaultOutput().equalsIgnoreCase(outputName))
setDefaultSynapse((OutputPatternListener) outputs.elementAt(0));
} else {
setActiveOutput("");
setDefaultOutput("");
}
retValue = true;
}
return retValue;
}
protected OutputPatternListener getOutputSynapse(String outputName) {
OutputPatternListener out = null;
int i;
for (i = 0; i < outputs.size(); ++i) {
out = (OutputPatternListener) outputs.elementAt(i);
if (out.getName().equalsIgnoreCase(outputName))
break;
}
if (i < outputs.size())
return out;
else
return null;
}
/** Adds an output synapse to the switch
* @param newOutput the new output synapse
*/
public boolean addOutputSynapse(OutputPatternListener newOutput) {
boolean retValue = false;
if (!outputs.contains(newOutput))
if (!newOutput.isOutputFull()) {
outputs.addElement(newOutput);
newOutput.setInputDimension(inputDimension);
newOutput.setMonitor(mon);
newOutput.setOutputFull(true);
if (outputs.size() == 1) {
setDefaultOutput(newOutput.getName());
setActiveOutput(newOutput.getName());
}
retValue = true;
}
return retValue;
}
/** Returns the name of the actual connected output synapse
* @return The name of the connected output synapse
*/
public String getActiveOutput() {
if (activeSynapse != null)
return activeSynapse.getName();
else
return "";
}
/** Sets the output synapse connected to the input
* @param newActiveOutput the name of the output synapse to connect
*/
public void setActiveOutput(String newActiveOutput) {
OutputPatternListener out = getOutputSynapse(newActiveOutput);
this.activeSynapse = out;
}
/** Returns the name of the default output synapse that is connected
* when the reset method is called
* @return the name of the default synapse
*/
public String getDefaultOutput() {
if (defaultSynapse != null)
return defaultSynapse.getName();
else
return "";
}
/** Sets the name of the default output synapse that is connected
* when the reset method is called
* @param newDefaultOutput the name of the default output synapse
*/
public void setDefaultOutput(String newDefaultOutput) {
OutputPatternListener out = getOutputSynapse(newDefaultOutput);
defaultSynapse = out;
}
/**
* Getter for property activeSynapse. @return Value of property activeSynapse.
*/
protected OutputPatternListener getActiveSynapse() {
return activeSynapse;
}
/**
* Setter for property activeSynapse. @param activeSynapse New value of property activeSynapse.
*/
protected void setActiveSynapse(OutputPatternListener activeSynapse) {
this.activeSynapse = activeSynapse;
}
/**
* Getter for property defaultSynapse. @return Value of property defaultSynapse.
*/
protected OutputPatternListener getDefaultSynapse() {
return defaultSynapse;
}
/**
* Setter for property defaultSynapse. @param defaultSynapse New value of property defaultSynapse.
*/
protected void setDefaultSynapse(OutputPatternListener defaultSynapse) {
this.defaultSynapse = defaultSynapse;
}
/** Returns the name of the output synapse
* @return String
*/
public String getName() {
return name;
}
/** Sets the name of the output synapse
* @param name String
*/
public void setName(String name) {
this.name = name;
}
//------ Methods and parameters mapped on the active input synapse -------
/** Sets the dimension of the output synapse
* @param newOutputDimension int
*/
public void setInputDimension(int newInputDimension) {
this.inputDimension = newInputDimension;
OutputPatternListener out;
for (int i = 0; i < outputs.size(); ++i) {
out = (OutputPatternListener) outputs.elementAt(i);
out.setInputDimension(newInputDimension);
}
}
/** Returns the dimension of the output synapse
* @return int
*/
public int getInputDimension() {
return inputDimension;
}
/** Returns the monitor
* @return org.joone.engine.Monitor
*/
public Monitor getMonitor() {
return mon;
}
/** Sets the Monitor object of the input synapse
* @param newMonitor org.joone.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
this.mon = newMonitor;
OutputPatternListener out;
for (int i = 0; i < outputs.size(); ++i) {
out = (OutputPatternListener) outputs.elementAt(i);
out.setMonitor(newMonitor);
}
}
protected void backward(double[] pattern) {
// Not used
}
protected void forward(double[] pattern) {
// Not used
}
public Vector getAllOutputs() {
return outputs;
}
public void resetOutput() {
OutputPatternListener out;
this.reset();
}
/** Method to put a pattern forward to the next layer
* @param pattern neural.engine.Pattern
*/
public void fwdPut(Pattern pattern) {
if (isEnabled() && (activeSynapse != null))
activeSynapse.fwdPut(pattern);
}
/** Returns the error pattern coming from the next layer during the training phase
* @return neural.engine.Pattern
*/
public Pattern revGet() {
if (isEnabled() && (activeSynapse != null))
return activeSynapse.revGet();
else
return null;
}
/**
* Base for check messages.
* Subclasses should call this method from thier own check method.
*
* @see OutputPaternListener
* @return validation errors.
*/
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
// Return check messages
return checks;
}
/** Getter for property outputFull.
* @return Value of property outputFull.
*
*/
public boolean isOutputFull() {
return outputFull;
}
/** Setter for property outputFull.
* @param outputFull New value of property outputFull.
*
*/
public void setOutputFull(boolean outputFull) {
this.outputFull = outputFull;
OutputPatternListener out;
for (int i = 0; i < outputs.size(); ++i) {
out = (OutputPatternListener) outputs.elementAt(i);
out.setOutputFull(outputFull);
}
}
/** Getter for property enabled.
* @return Value of property enabled.
*
*/
public boolean isEnabled() {
return enabled;
}
/** Setter for property enabled.
* @param enabled New value of property enabled.
*
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
// /**
// * @see org.joone.engine.Learnable#getLearner()
// */
// public Learner getLearner() {
// return null;
// }
//
// /** Calls the initLearner() on all the attached output components
// * @see org.joone.engine.Learnable#initLearner()
// */
// public void initLearner() {
// for (int i = 0; i < outputs.size(); ++i) {
// if (outputs.elementAt(i) instanceof Learnable)
// ((Learnable)outputs.elementAt(i)).initLearner();
// }
// }
public void init() {
for (int i = 0; i < outputs.size(); ++i) {
if (outputs.elementAt(i) instanceof NeuralElement)
((NeuralElement)outputs.elementAt(i)).init();
}
}
}
| Java |
package org.joone.engine;
import java.util.TreeSet;
/** <P>This is an unsupervised Kohonen Synapse which is a Self Organising Map.</P>
* <P>This KohonenSynapse works in conjunction with the next layer which should
* implement a SOM strategy such as a GuassianLayer or WTALayer (Winner Takes All). This synapse
* should connect to one of these layers, without a SOM Strategy in the next layer
* this component will not function correctly.</P>
* <P>This KohonenSynapse takes a pattern from the previous layer, calculates the
* distance between the input vector and the weights and passes this on to the next
* layer. In the learning phase the next layer should calculate the distance fall
* off between the winner and all other nodes (1.0 being the closest distance and
* 0.0 being furthest away and not being considered near to the winner). These
* distances are passed back to this KohonenSynapse and used to adjust the
* weights.</P>
* <P>The weights are adjusted based on the current learning rate and distance fall off.</P>
* <P>At each epoch/cycel the learning rate is adjusted in the following way ...
* </P>
* <P>If the current cycle is within the ordering phase then the learning rate is set to </P>
* <P> User setup learning rate * exp(-(double)(Current Cycle/Time Constant)).</P>
*/
public class KohonenSynapse extends FullSynapse implements NeuralNetListener {
private static final long serialVersionUID = -4966435217407942471L;
double currentLearningRate = 1;
private double timeConstant = 200.0;
private int orderingPhase = 1000;
/** <P>The default constructor for the KohonenSynapse class.</P> */
public KohonenSynapse() {
super();
learnable = false;
}
/** <P>Adjusts the weights of this Kohonen Synapse according to the neighborhood fall off distance calculated by the next
* layer.</P>
* @param pattern The pattern with the distance fall off's between the winner and all other nodes.
* (1.0 is the winner through 0.0 having no similarity to the original input
* vector)
*/
protected void backward(double[] pattern) {
// Adjust weights
// double [][] weights = array.getValue();
double dFalloff = 0;
int num_outs = this.getOutputDimension();
double[] o_pattern = b_pattern.getOutArray();
// Loop through the map and adjust the weights of each neighborhood output.
for (int x=0;x<num_outs;x++) {
dFalloff = o_pattern[x];
adjustNodeWeight(x, currentLearningRate, dFalloff, inps);
}
}
/** </P>Fowards the euclidean distance squared between the input vector and the weight vector to the next
* layer. If the learning phase is currently active then the next layer should
* process this and pass back the distance fall off between the winning output and
* all other outputs.</P>
* @param pattern The pattern containg the euclidean distance squared between each weight and the
* input.
*/
protected void forward(double[] pattern) {
double temp = 0f;
double curDist = 0f;
int num_outs = this.getOutputDimension();
for (int x=0;x<num_outs;x++) // Loop through outputs
{
curDist = 0f;
for (int inputs=0;inputs<pattern.length;inputs++){
temp = array.value[inputs][x] - pattern[inputs];
temp *= temp;
curDist += temp;
}
outs[x] = curDist; // Output = distance between input and weights.
}
}
/* -- Map Size -- */
/* -- Generic Functions -- */
/**
* Adjusts the weights for the node located at x,y,z using the given distance and learning rate.
*/
private void adjustNodeWeight(int curnode, double learningRate, double distanceFalloff,double[] pattern) {
double wt, vw;
int output = curnode;
for (int w=0; w < getInputDimension(); w++) {
wt = array.value[w][output];
vw = pattern[w];
wt += distanceFalloff * learningRate * (vw - wt);
array.value[w][output] = wt;
}
}
/* -- Net Listener Methods -- */
/** Sets the Monitor object of the synapse
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
super.setMonitor(newMonitor);
if (getMonitor() != null) {
getMonitor().addNeuralNetListener(this,false);
}
}
/** <P>Changes the learning rate for this synapse depending in the current epoch number.
* The learning rate is changed in the following way ... </P>
* <P>User setup learning rate * exp(-(double)(Current Cycle/Time Constant)).</P>
* @param e The original Net Event.
*/
public void cicleTerminated(NeuralNetEvent e) {
int currentCycle = getMonitor().getTotCicles() - getMonitor().getCurrentCicle();
if (currentCycle < getOrderingPhase())
// This method will start at the user defined learning rate then reduce exponentially.
currentLearningRate = getMonitor().getLearningRate() * Math.exp(-(currentCycle/getTimeConstant()));
else
currentLearningRate = 0.01;
}
/** Not implemented.
* @param e The original Net Event.
*/
public void errorChanged(NeuralNetEvent e) {
}
/** Initialises any shape sizes such as circular radius and time constant before possible training.
* @param e The original Net Event.
*/
public void netStarted(NeuralNetEvent e) {
currentLearningRate = getMonitor().getLearningRate();
}
/** Not implemented.
* @param e The original Net Event.
*/
public void netStopped(NeuralNetEvent e) {
}
/** Not implemented.
* @param e The original Net Event.
* @param error The error that caused this NetStoppedError event.
*/
public void netStoppedError(NeuralNetEvent e, String error) {
}
/** <P>Check that there are no errors or problems with the properties of this
* KohonenSynapse.</P>
* @return The TreeSet of errors / problems if any.
*/
public TreeSet check() {
TreeSet checks = super.check();
return checks;
}
/** Getter for property orderingPhase.
* @return Value of property orderingPhase.
*
*/
public int getOrderingPhase() {
return orderingPhase;
}
/** Setter for property orderingPhase.
* @param orderingPhase New value of property orderingPhase.
*
*/
public void setOrderingPhase(int orderingPhase) {
this.orderingPhase = orderingPhase;
}
/** Getter for property timeConstant.
* @return Value of property timeConstant.
*
*/
public double getTimeConstant() {
return timeConstant;
}
/** Setter for property timeConstant.
* @param timeConstant New value of property timeConstant.
*
*/
public void setTimeConstant(double timeConstant) {
this.timeConstant = timeConstant;
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = false;
return super.getLearner();
}
private void readObject(java.io.ObjectInputStream in)
throws java.io.IOException, java.lang.ClassNotFoundException {
in.defaultReadObject();
if (getMonitor()!=null) {
getMonitor().addNeuralNetListener(this, false); // Add this synapse as a net listener.
}
}
} | Java |
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.net.NetCheck;
import java.util.TreeSet;
import org.joone.inspection.implementations.BiasInspection;
/** Delay unit to create temporal windows from time series <BR>
* <CODE>
* O---> Yk(t-N) <BR>
* | <BR>
* ... <BR>
* | <BR>
* O---> Yk(t-1) <BR>
* | <BR>
* O---> Yk(t) <BR>
* | <BR>
* |<--------- Xk(t) <BR>
* </CODE>
* <BR>
* Where:<BR>
* Xk = Input signal <BR>
* Yk(t)...Yk(t-N+1) = Values of the output temporal window <BR>
* N = taps
*/
public class DelayLayer extends MemoryLayer {
private static final long serialVersionUID = 1547734529107850525L;
/** Constructor method
*/
public DelayLayer() {
super();
}
/** Constructor method
* @param ElemName The layer's name
*/
public DelayLayer(java.lang.String ElemName) {
super(ElemName);
}
protected void backward(double[] pattern) {
int x;
int y;
int ncell;
int length = getRows();
for (x = 0; x < length; ++x) {
ncell = x;
for (y = 0; y < getTaps(); ++y) {
backmemory[ncell] = backmemory[ncell + length];
backmemory[ncell] += pattern[ncell];
ncell += length;
}
backmemory[ncell] = pattern[ncell];
gradientOuts[x] = backmemory[x];
}
}
protected void forward(double[] pattern) {
int x;
int y;
int ncell;
int length = getRows();
for (x = 0; x < length; ++x) {
ncell = x + getTaps() * length;
for (y = getTaps(); y > 0; --y) {
memory[ncell] = memory[ncell - length];
outs[ncell] = memory[ncell];
ncell -= length;
}
memory[x] = pattern[x];
outs[x] = memory[x];
}
}
public TreeSet check() {
TreeSet checks = super.check();
if (getTaps() == 0) {
checks.add(new NetCheck(NetCheck.FATAL, "The Taps parameter cannot be equal to zero.", this));
}
if (monitor != null && monitor.getPreLearning() != getTaps() + 1) {
checks.add(new NetCheck(NetCheck.WARNING, "The correct value for the Monitor PreLearning parameter is Taps + 1", this));
}
return checks;
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
} | Java |
/*
* DeltaRuleExtender.java
*
* Created on September 14, 2004, 9:32 AM
*/
package org.joone.engine.extenders;
/**
* This abstract class describes the methods needed for a delta rule extender,
* that is, a class that computes / changes the delta (update weight) value
* according to some algorithm.
*
* @author Boris Jansen
*/
public abstract class DeltaRuleExtender extends LearnerExtender {
/** Creates a new instance of DeltaExtender */
public DeltaRuleExtender() {
}
/**
* Computes the delta value for a bias.
*
* @param currentGradientOuts the back propagated gradients.
* @param j the index of the bias.
* @param aPreviousDelta a delta value calculated by a previous delta extender.
*/
public abstract double getDelta(double[] currentGradientOuts, int j, double aPreviousDelta);
/**
* Computes the delta value for a weight.
*
* @param currentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
* @param aPreviousDelta a delta value calculated by a previous delta extender.
*/
public abstract double getDelta(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousDelta);
}
| Java |
/*
* OnlineExtender.java
*
* Created on September 14, 2004, 1:53 PM
*/
package org.joone.engine.extenders;
/**
* This is the default weight updater (online). It stores the weights after each
* update.
*
* @author Boris Jansen
*/
public class OnlineModeExtender extends UpdateWeightExtender {
/** Creates a new instance of OnlineExtender */
public OnlineModeExtender() {
}
public void postBiasUpdate(double[] currentGradientOuts) {
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void preBiasUpdate(double[] currentGradientOuts) {
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void updateBias(int j, double aDelta) {
getLearner().getLayer().getBias().delta[j][0] = aDelta;
getLearner().getLayer().getBias().value[j][0] += aDelta;
}
public void updateWeight(int j, int k, double aDelta) {
getLearner().getSynapse().getWeights().delta[j][k] = aDelta;
getLearner().getSynapse().getWeights().value[j][k] += aDelta;
}
public boolean storeWeightsBiases() {
return true; // we will always store the weights / biases in the online mode
}
}
| Java |
/*
* SimulatedAnnealingExtender.java
*
* Created on September 15, 2004, 1:18 PM
*/
package org.joone.engine.extenders;
/**
* Simulated annealing (SA) refers to the process in which random or thermal
* noise in a system is systematically decreased over time so as to enhance
* the system's response.
*
* Basically the change of weights and biases in SA is defined as:
* dW = dw + (n)(r)(2^-kt),
* where dw is the weight / bias change produced by standard back propagation,
* n is a constant controlling the initial intensity of the noise, k is the
* decay constant,t is the generation counter and r is a random number.
*
* @author Boris Jansen
*/
public class SimulatedAnnealingExtender extends DeltaRuleExtender {
/** Constant controlling the initial intensity of the noise. */
private double theN = 0.3; // default value
/** The noise decay constant. */
private double theK = 0.002; // default value
/** The random number boundary. */
private double theBoundary = 0.5; // default value, so the random number
// is between <-0.5, 0.5>
/** Creates a new instance of SimulatedAnnealingExtender */
public SimulatedAnnealingExtender() {
}
public double getDelta(double[] currentGradientOuts, int j, double aPreviousDelta) {
int myCycle;
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// the biases will be stored this cycle, add noise
myCycle = getLearner().getMonitor().getTotCicles()
- getLearner().getMonitor().getCurrentCicle();
aPreviousDelta += getN() * getRandom() * Math.pow(2, -1 * getK() * myCycle);
}
return aPreviousDelta;
}
public double getDelta(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousDelta) {
int myCycle;
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// the weights will be stored this cycle, add noise
myCycle = getLearner().getMonitor().getTotCicles()
- getLearner().getMonitor().getCurrentCicle();
aPreviousDelta += getN() * getRandom() * Math.pow(2, -1 * getK() * myCycle);
}
return aPreviousDelta;
}
public void postBiasUpdate(double[] currentGradientOuts) {
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void preBiasUpdate(double[] currentGradientOuts) {
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
}
/**
* Gets the constant controlling the initial noise.
*
* @return the constant controlling the initial noise.
*/
public double getN() {
return theN;
}
/**
* Sets the constant controlling the initial noise.
*
* @param aN the constant controlling the initial noise.
*/
public void setN(double aN) {
theN = aN;
}
/**
* Gets the noise decay constant.
*
* @return the noise decay constant.
*/
public double getK() {
return theK;
}
/**
* Sets the noise decay constant.
*
* @param aK the noise decay constant.
*/
public void setK(double aK) {
theK = aK;
}
/**
* Gets the random number boundary.
*
* @return the random number boundary.
*/
public double getRandomBoundary() {
return theBoundary;
}
/**
* Sets the noise decay constant.
*
* @param aK the noise decay constant.
*/
public void setRandomBoundary(double aBoundary) {
theBoundary = aBoundary;
}
/**
* Gets a random value between the random boundary.
*
* @return a random value between the random boundary.
*/
protected double getRandom() {
return Math.random() * 2 * getRandomBoundary() - getRandomBoundary();
}
}
| Java |
/*
* BatchModeExtender.java
*
* Created on September 14, 2004, 11:39 AM
*/
package org.joone.engine.extenders;
import org.joone.engine.*;
/**
* This class implements the offline learning, that is, batch mode. Weights are
* updated after Monitor.getBatchSize() cycles.
*
* @author Boris Jansen
*/
public class BatchModeExtender extends UpdateWeightExtender {
/** The batch size. This variable is mainly used for backward compatibility
* with the old batch learner who had the method setBatchSize. */
private int theBatchSize = -1; // -1 equals not set and retrieve batch size
// value from monitor
/** The number of rows (biases or input neurons to the synapses). */
private int theRows = -1;
/** The number of columns (output neurons of the synapses), */
private int theColumns = -1;
/** The matrix in which we save the updates before storing the weights (or biases)
* to the network itself. */
private Matrix theMatrix;
/** The counter to check if we have reached batchsize cycles (if so, we need
to store the weights).*/
private int theCounter = 0;
/** Creates a new instance of BatchModeExtender */
public BatchModeExtender() {
}
public void postBiasUpdate(double[] currentGradientOuts) {
if(storeWeightsBiases()) {
for(int x = 0; x < theRows; ++x) {
theMatrix.value[x][0] += theMatrix.delta[x][0]; // adjust bias
}
getLearner().getLayer().setBias((Matrix)theMatrix.clone()); // store updated biases
resetDelta(theMatrix);
theCounter = 0;
}
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
if(storeWeightsBiases()) {
for(int x = 0; x < theRows; ++x) {
for(int y = 0; y < theColumns; ++y) {
theMatrix.value[x][y] += theMatrix.delta[x][y]; // adjust weights
}
}
getLearner().getSynapse().setWeights((Matrix)theMatrix.clone()); // store updated weights
resetDelta(theMatrix);
theCounter = 0;
}
}
public void preBiasUpdate(double[] currentGradientOuts) {
if(theRows != getLearner().getLayer().getRows()) {
// dimensions have changed, so better start over
initiateNewBatch();
}
theCounter++;
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
if(theRows != getLearner().getSynapse().getInputDimension() ||
theColumns != getLearner().getSynapse().getOutputDimension())
{
initiateNewBatch();
}
theCounter++;
}
public void updateBias(int i, double aDelta) {
theMatrix.delta[i][0] += aDelta; // update the delta in our local copy
}
public void updateWeight(int j, int k, double aDelta) {
theMatrix.delta[j][k] += aDelta; // update the delta in our local copy
// System.out.println("batch updateWeight "+theCounter+" "+aDelta);
}
/**
* Resets delta values to zero.
*
* @param aMatrix the matrix for which we need to set the delta values to zero.
*/
protected void resetDelta(Matrix aMatrix) {
// reset the delta values to 0
for(int r = 0; r < aMatrix.delta.length; r++) {
for(int c = 0; c < aMatrix.delta[0].length; c++) {
aMatrix.delta[r][c] = 0;
}
}
}
/**
* Initiates a new batch (at the beginning or when the dimensions change).
*/
protected void initiateNewBatch() {
if (getLearner().getLayer() != null) {
theRows = getLearner().getLayer().getRows();
theMatrix = (Matrix)getLearner().getLayer().getBias().clone(); // get a copy
} else if (getLearner().getSynapse() != null) {
theRows = getLearner().getSynapse().getInputDimension();
theColumns = getLearner().getSynapse().getOutputDimension();
theMatrix = (Matrix)getLearner().getSynapse().getWeights().clone(); // get a copy
}
resetDelta(theMatrix);
theCounter = 0;
}
/**
* Sets the batchsize. Used for backward compatibility. Use monitor.setBatchSize()
* instead.
*
* @param aBatchSize the new batchsize.
* @deprecated use monitor.setBatchSize()
*/
public void setBatchSize(int aBatchSize) {
theBatchSize = aBatchSize;
}
/**
* Gets the batchsize. Used for backward compatibility. Use monitor.getBatchSize()
* instead.
*
* @return the batch size.
* @deprecated use monitor.getBatchSize()
*/
public int getBatchSize() {
if(theBatchSize < 0) {
return getLearner().getMonitor().getBatchSize();
}
return theBatchSize;
}
public boolean storeWeightsBiases() {
return theCounter >= getBatchSize();
}
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.