code stringlengths 3 1.18M | language stringclasses 1 value |
|---|---|
/*
* WeightDecayExtender.java
*
* Created on March 7, 2006, 4:43 PM
*/
package org.joone.engine.extenders;
/**
* Weight decay adds a penalty term to the error function. The penalty term
* penalizes large weights. The weight decay penalty term causes the weights to
* converge to smaller absolute values than they otherwise would. Smaller weights
* are expected to improve generalization.
*
* The update formula is changed in:
* Dw(t+1) = dw(t+1) - d x w(t)
*
* d is a weight decay value.
*
*
* @author boris
*/
public class WeightDecayExtender extends DeltaRuleExtender {
/** The decay parameter (d). */
private double decay;
/** Creates a new instance of WeightDecayExtender */
public WeightDecayExtender() {
}
public double getDelta(double[] currentGradientOuts, int j, double aPreviousDelta) {
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
aPreviousDelta -= getDecay() * getLearner().getLayer().getBias().value[j][0];
}
return aPreviousDelta;
}
public double getDelta(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousDelta) {
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
aPreviousDelta -= getDecay() * getLearner().getSynapse().getWeights().value[j][k];
}
return aPreviousDelta;
}
public void postBiasUpdate(double[] currentGradientOuts) {
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void preBiasUpdate(double[] currentGradientOuts) {
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
}
/**
* Sets the decay parameter.
*
* @param aDecay the decay parameter value.
*/
public void setDecay(double aDecay) {
decay = aDecay;
}
/**
* Gets the decay parameter.
*
* @return the decay parameter.
*/
public double getDecay() {
return decay;
}
}
| Java |
/*
* RpropExtender.java
*
* Created on September 14, 2004, 3:29 PM
*/
package org.joone.engine.extenders;
import org.joone.engine.RpropParameters;
import org.joone.log.*;
/**
* This class changes the delta value in such a way that it implements the
* RPROP algorithm.
*
* @author Boris Jansen
*/
public class RpropExtender extends DeltaRuleExtender {
// Note the gradient passed by the ExtendableLearner is multiplied by
// the learning rate. However, the RPROP learning algorithm looks only at
// the sign of the gradient. So as long the learning algorithm is positive
// there is no problem.
/** Logger */
private static final ILogger log = LoggerFactory.getLogger(RpropExtender.class);
/**
* Each weight has its own individual update-value (delta_ij(t)) represented
* by the next object.
*
* The weight update deltaW_ij(t) is defined as follows (dE(t) / dW_ij is the
* summed gradient for a single epoch):
* | -delta_ij(t), if dE(t) / dW_ij > 0
* deltaW_ij(t) = | delta_ij(t), if dE(t) / dW_ij < 0
* | 0 otherwise
*
* The delta_ij values are updated as follows:
* | eta_inc * delta_ij(t-1), if dE(t-1)/ dW_ij * dE(t)/ dW_ij > 0
* delta_ij(t) = | eta_dec * delta_ij(t-1), if dE(t-1)/ dW_ij * dE(t)/ dW_ij < 0
* | delta_ij(t-1), otherwise
* where 0 < eta_dec < 1 < eta_inc
*/
protected double[][] theDeltas;
/** The gradient pattern of the previous epoch (dE(t-1)/dW_ij). */
protected double[][] thePreviousGradients;
/** The parameters for the RPROP learning algorithm. */
protected RpropParameters theRpropParameters;
/** The current som of the gradients of all patterns seen so far. The number
* of summed gradients is smaller the "batch size" and will be reset to zero,
* if the number of sums becomes equal to "batch size" after the weights/biases
* have been updated. */
protected double[][] theSummedGradients;
/** Creates a new instance of RpropExtender */
public RpropExtender() {
}
/**
* (Re)Initializes this RPROP learner.
*/
public void reinit() {
if(getLearner().getMonitor().getLearningRate() != 1) {
log.warn("RPROP learning rate should be equal to 1.");
}
if(getLearner().getLayer() != null) {
thePreviousGradients = new double[getLearner().getLayer().getRows()][1];
theSummedGradients = new double[thePreviousGradients.length][1];
theDeltas = new double[thePreviousGradients.length][1];
} else if (getLearner().getSynapse() != null) {
int myRows = getLearner().getSynapse().getInputDimension();
int myCols = getLearner().getSynapse().getOutputDimension();
thePreviousGradients = new double[myRows][myCols];
theSummedGradients = new double[myRows][myCols];
theDeltas = new double[myRows][myCols];
}
for(int i = 0; i < theDeltas.length; i++) {
for(int j = 0; j < theDeltas[0].length; j++) {
theDeltas[i][j] = getParameters().getInitialDelta(i, j);
}
}
}
public double getDelta(double[] currentGradientOuts, int j, double aPreviousDelta) {
// we will hold our delta's in memory ourselves only when the weights will be
// stored we will pass on the calculated delta value !! Therefore, some
// DeltaExtenders executed after this delta might not work correct...
// Please think about the order of the delta extenders... !!
double myDelta = 0;
// Note:
// dE/dw = sum(dE/de * de/dy * ...) *...
// de/dy = -1, however * -1 is neglected, so aCurrentGradientOuts has a different sign than dE/dw
// we fix this here by multiplying aCurrentGradientOuts by -1.0
// remove -> theSummedGradients[i][0] += -1.0 * aCurrentGradientOuts[i];
// theSummedGradients[i][0] += -1.0 * getGradientBias(aCurrentGradientOuts, i);
theSummedGradients[j][0] -= aPreviousDelta;
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// biases will be stored this cycle
if(thePreviousGradients[j][0] * theSummedGradients[j][0] > 0) {
theDeltas[j][0] = Math.min(theDeltas[j][0] * getParameters().getEtaInc(),
getParameters().getMaxDelta());
myDelta = -1.0 * sign(theSummedGradients[j][0]) * theDeltas[j][0];
thePreviousGradients[j][0] = theSummedGradients[j][0];
} else if(thePreviousGradients[j][0] * theSummedGradients[j][0] < 0) {
theDeltas[j][0] = Math.max(theDeltas[j][0] * getParameters().getEtaDec(),
getParameters().getMinDelta());
// sign changed -> the previous step was to large and the minimum was missed,
// the previous weight-update is reverted
myDelta = -1.0 * getLearner().getLayer().getBias().delta[j][0];
// due the backtracking step the derivative is supposed to change its sign
// again in the following step. To prevent double punishement we set the
// gradient to 0
thePreviousGradients[j][0] = 0;
} else {
myDelta = -1.0 * sign(theSummedGradients[j][0]) * theDeltas[j][0];
thePreviousGradients[j][0] = theSummedGradients[j][0];
}
theSummedGradients[j][0] = 0; // reset to zero so we can start somming up again...
}
return myDelta;
}
public double getDelta(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousDelta) {
// read comments getDelta (for bias)...
double myDelta = 0;
// * -1.0, because de/dy = -1, but is neglected in aCurrentPattern
// remove -> theSummedGradients[i][j] += aCurrentPattern[j] * aCurrentInps[i] * - 1.0;
// theSummedGradients[i][j] += -1.0 * getGradientWeights(aCurrentInps, i, aCurrentPattern, j);
theSummedGradients[j][k] -= aPreviousDelta;
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// weights will be stored this cycle
if(thePreviousGradients[j][k] * theSummedGradients[j][k] > 0) {
theDeltas[j][k] = Math.min(theDeltas[j][k] * getParameters().getEtaInc(),
getParameters().getMaxDelta());
myDelta = -1.0 * sign(theSummedGradients[j][k]) * theDeltas[j][k];
thePreviousGradients[j][k] = theSummedGradients[j][k];
} else if(thePreviousGradients[j][k] * theSummedGradients[j][k] < 0) {
theDeltas[j][k] = Math.max(theDeltas[j][k] * getParameters().getEtaDec(),
getParameters().getMinDelta());
// sign changed -> the previous step was to large and the minimum was missed,
// the previous weight-update is reverted
myDelta = -1.0 * getLearner().getSynapse().getWeights().delta[j][k];
// due the backtracking step the derivative is supposed to change its sign
// again in the following step. To prevent double punishement we set the
// gradient to 0
thePreviousGradients[j][k] = 0;
} else {
myDelta = -1.0 * sign(theSummedGradients[j][k]) * theDeltas[j][k];
thePreviousGradients[j][k] = theSummedGradients[j][k];
}
theSummedGradients[j][k] = 0;
}
return myDelta;
}
public void postBiasUpdate(double[] currentGradientOuts) {
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void preBiasUpdate(double[] currentGradientOuts) {
if(theDeltas == null || theDeltas.length != getLearner().getLayer().getRows()) {
// first time or dimensions have changed
reinit();
}
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
if(theDeltas == null || theDeltas.length != getLearner().getSynapse().getInputDimension()
|| theDeltas[0].length != getLearner().getSynapse().getOutputDimension())
{
reinit();
}
}
/**
* Gets the parameters of this learning algorithm.
*
* @return the parameters of this learning algorithm.
*/
public RpropParameters getParameters() {
if(theRpropParameters == null) {
// create default parameters
theRpropParameters = new RpropParameters();
}
return theRpropParameters;
}
/**
* Sets the parameters for this learning algorithm.
*
* @param aParameters the parameters for this learning algorithm.
*/
public void setParameters(RpropParameters aParameters) {
theRpropParameters = aParameters;
}
/**
* Gets the sign of a double.
*
* return the sign of a double (-1, 0, 1).
*/
protected double sign(double d) {
if(d > 0) {
return 1.0;
} else if(d < 0) {
return -1.0;
}
return 0;
}
}
| Java |
/*
* LearnerExtender.java
*
* Created on September 14, 2004, 9:32 AM
*/
package org.joone.engine.extenders;
import org.joone.engine.*;
/**
* This abstract class describes the methods that any learner extender must
* provide.
*
* @author Boris Jansen
*/
public abstract class LearnerExtender {
/** This flag holds the mode of the learner extender (true for enabled,
false for disabled. */
private boolean theMode = true;
/** The learner this object is extending. */
private ExtendableLearner theLearner;
/**
* Sets the learner. This way the extender has a reference to the learner.
*
* @param aLearner the learner this object is extending.
*/
public void setLearner(ExtendableLearner aLearner) {
theLearner = aLearner;
}
/**
* Gets the learner this object is extending.
*
* @return the learner this object is extending.
*/
protected ExtendableLearner getLearner() {
return theLearner;
}
/**
* Checks if the learner extender is enabled.
*
* @return true if the extender is enabled, false otherwise.
*/
public boolean isEnabled() {
return theMode;
}
/**
* Sets the mode of this extender.
*
* @param aMode true for enabled, false for disabled.
*/
public void setEnabled(boolean aMode) {
theMode = aMode;
}
/**
* Gives extenders a change to do some pre-computing before the
* biases are updated.
*
* @param currentGradientOuts the back propagated gradients.
*/
public abstract void preBiasUpdate(double[] currentGradientOuts);
/**
* Gives extenders a change to do some post-computing after the
* biases are updated.
*
* @param currentGradientOuts the back propagated gradients.
*/
public abstract void postBiasUpdate(double[] currentGradientOuts);
/**
* Gives extenders a change to do some pre-computing before the
* weights are updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
public abstract void preWeightUpdate(double[] currentPattern, double[] currentInps);
/**
* Gives extenders a change to do some post-computing after the
* weights are updated.
*
* @param currentPattern the back propagated gradients.
* @param currentInps the forwarded input.
*/
public abstract void postWeightUpdate(double[] currentPattern, double[] currentInps);
}
| Java |
/*
* UpdateWeightExtender.java
*
* Created on September 14, 2004, 10:10 AM
*/
package org.joone.engine.extenders;
import java.util.*;
/**
* This abstract class describes the methods needed for a update weight extender, that is,
* a class that updates weights (storing) according to some algorithm (e.g. batch mode).
*
* @author Boris Jansen
*/
public abstract class UpdateWeightExtender extends LearnerExtender {
/** Creates a new instance of UpdateWeightExtender */
public UpdateWeightExtender() {
}
/**
* Updates a bias with the calculated delta value.
*
* @param i the index of the bias to update.
* @param aDelta the calculated delta value.
*/
public abstract void updateBias(int i, double aDelta);
/**
* Updates a weight with the calculated delta value.
*
* @param j the input index of the weight to update.
* @param k the output index of the weight to update.
* @param aDelta the calculated delta value.
*/
public abstract void updateWeight(int j, int k, double aDelta);
/**
* Checks if the weights or biases will be stored this cycle.
*
* @return true if the weights or biases will be stored this cycle, false
* otherwise.
*/
public abstract boolean storeWeightsBiases();
}
| Java |
package org.joone.engine.extenders;
/**
* This abstract class describes the methods needed for a gradient extender,
* that is, a class that computes / changes the gradient value according to
* some algorithm.
*
* @author Boris Jansen
*/
public abstract class GradientExtender extends LearnerExtender {
/** Creates a new instance of DeltaExtender */
public GradientExtender() {
}
/**
* Computes the gradient value for a bias.
*
* @param currentGradientOuts the back propagated gradients.
* @param j the index of the bias.
* @param aPreviousGradient a gradient value calculated by a previous
* gradient extender.
*/
public abstract double getGradientBias(double[] currentGradientOuts, int j, double aPreviousGradient);
/**
* Computes the gradient value for a weight.
*
* @param currentInps the forwarded input.
* @param j the input index of the weight.
* @param currentPattern the back propagated gradients.
* @param k the output index of the weight.
* @param aPreviousGradient a gradients value calculated by a previous gradients extender.
*/
public abstract double getGradientWeight(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousGradient);
}
| Java |
/*
* MomentumExtender.java
*
* Created on September 14, 2004, 11:18 AM
*/
package org.joone.engine.extenders;
// import org.joone.log.*;
/**
* This extender implements the momentum term.
*
* @author Boris Jansen
*/
public class MomentumExtender extends DeltaRuleExtender {
/** Logger */
//private static final ILogger log = LoggerFactory.getLogger(MomentumExtender.class);
/** Creates a new instance of MomentumExtender */
public MomentumExtender() {
}
public double getDelta(double[] currentGradientOuts, int j, double aPreviousDelta) {
// log.debug("Add momentum for bias.");
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// the biases will be stored this cycle, add momentum
aPreviousDelta +=
getLearner().getMonitor().getMomentum() * getLearner().getLayer().getBias().delta[j][0];
}
return aPreviousDelta;
}
public double getDelta(double[] currentInps, int j, double[] currentPattern, int k, double aPreviousDelta) {
// log.debug("Add momentum for weight.");
if(getLearner().getUpdateWeightExtender().storeWeightsBiases()) {
// the weights will be stored this cycle, add momentum
aPreviousDelta +=
getLearner().getMonitor().getMomentum() * getLearner().getSynapse().getWeights().delta[j][k];
}
return aPreviousDelta;
}
public void postBiasUpdate(double[] currentGradientOuts) {
}
public void postWeightUpdate(double[] currentPattern, double[] currentInps) {
}
public void preBiasUpdate(double[] currentGradientOuts) {
}
public void preWeightUpdate(double[] currentPattern, double[] currentInps) {
}
}
| Java |
package org.joone.engine;
import java.io.*;
import org.joone.engine.weights.*;
import org.joone.log.*;
/**
* The Matrix object represents the connection matrix of the weights of a synapse
* or the biases of a layer. In case of a synapse, it contains the weight of each
* connection. In case of a layer, it contains the bias of each neuron.
* <p>
* Besides the weights or biases, it holds the last modification (update value or
* delta) and 2 boolean values indicating whether the weight is on or off and
* trainable or fixed.
*/
public class Matrix implements Serializable, Cloneable {
/** Logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(Matrix.class);
/** This constant defines the boundaries of the default domain used for
* weight initialization. Weights or biases are initialised by default
* with a random value in the domain
* <code>[-DEFAULT_INITIAL, DEFAULT_INITIAL]</code>. Although different
* boundaries or even different weight intialization can be used by calling
* differnt constructors that that a <code>WeightInitializer</code> class
* as parameter or by calling the method {@link initialize()}.
*/
public static final double DEFAULT_INITIAL = 0.2;
private static final long serialVersionUID = -1392966842649908366L;
/** The values of the weights / biases. */
public double[][] value;
/** The value of the last modification, i.e. the last update. */
public double[][] delta;
/** Flag indicating whether the weight is on or off. */
public boolean[][] enabled;
/** Flag indicating whether the weight is fixed or trainable / adjustable. */
public boolean[][] fixed;
/** The number of rows. That is, in case of weights, the number of neurons on
* the input side of the synapse. In case of biases, the number of neurons. */
protected int m_rows;
/** The number of columns. That is, in case of weights, the number of neurons on
* the output side of the synapse. In case of biases, the value equals 0. */
protected int m_cols;
/** The weight initializer that is used by this class. */
protected WeightInitializer weightInitializer;
/**
* Default constructor
* Needed for Save as XML
*/
public Matrix() {
}
/**
* This constructur creates a weights or biases according to the values
* <code>aRows</code> and <code>aColumns</code>. The weights or biases
* are initialised with a random value in the domain of
* <code>[-DEFAULT_INITIAL, DEFAULT_INITIAL]</code>.
*
* @param aRows the number of rows (the number of neurons on the input side
* of a synapse or the number of biases).
* @param aColumns the number of colums (the number of neurons on the output
* side of a synapse or zero in case of biases).
*/
public Matrix(int aRows, int aColumns) {
this(aRows, aColumns, DEFAULT_INITIAL);
}
/**
* This constructur creates a weights or biases according to the values
* <code>aRows</code> and <code>aColumns</code>. And the weights or biases
* are initialized with a random value in the domain of
* <code>[-anInitial, anInitial]</code>.
*
* @param aRows the number of rows (the number of neurons on the input side
* of a synapse or the number of biases).
* @param aColumns the number of colums (the number of neurons on the output
* side of a synapse or zero in case of biases).
* @param anInitial the boundary of the domain within these weights or biases
* shoud be randomly initialized.
*/
public Matrix(int aRows, int aColumns, double anInitial) {
value = new double[aRows][aColumns];
delta = new double[aRows][aColumns];
enabled = new boolean[aRows][aColumns];
fixed = new boolean[aRows][aColumns];
m_rows = aRows;
m_cols = aColumns;
if(anInitial == 0.0) {
enableAll();
unfixAll();
setWeightInitializer(new RandomWeightInitializer(0), false);
clear();
} else {
enableAll();
unfixAll();
setWeightInitializer(new RandomWeightInitializer(anInitial));
}
}
/**
* Initializes the weights or biases by making a call to the weight initializer.
* The weight initializer can be set through {@link setWeightInitializer(WeightInitializer)}
*/
public void initialize() {
getWeightInitializer().initialize(this);
}
/**
* Sets the weight initializer and initializes the weights. This function calls
* setWeightInitializer(aWeightInitializer, true).
*
* @param aWeightInitializer the weight initializer to set.
*/
public void setWeightInitializer(WeightInitializer aWeightInitializer) {
setWeightInitializer(aWeightInitializer, true);
}
/**
* Sets the weight initializer.
*
* @param aWeightInitializer the weight initializer to set.
* @param anInitialize if true the weights will be initialized by the new
* weight initializer, if false the weights will not be initialized.
*/
public void setWeightInitializer(WeightInitializer aWeightInitializer, boolean anInitialize) {
weightInitializer = aWeightInitializer;
if(anInitialize) {
getWeightInitializer().initialize(this);
}
}
/**
* Gets the weight initializer.
*
* @return the weight initializer that is set for this matrix.
*/
public WeightInitializer getWeightInitializer() {
if (weightInitializer == null)
// Added for backward compatibility
weightInitializer = new RandomWeightInitializer(0.2);
return weightInitializer;
}
/**
* Clones this matrix object. It returns a copy of this matrix object.
*
* @return a copy of the current matrix object.
*/
public Object clone() {
Matrix o = null;
try {
o = (Matrix)super.clone();
} catch(CloneNotSupportedException e) {
log.error("Matrix can't clone", e);
}
o.value = (double[][])o.value.clone();
o.delta = (double[][])o.delta.clone();
o.enabled = (boolean[][])o.enabled.clone();
o.fixed = (boolean[][])o.fixed.clone();
for (int x = 0; x < m_rows; ++x) {
o.value[x] = (double[])o.value[x].clone();
o.delta[x] = (double[])o.delta[x].clone();
o.enabled[x] = (boolean[])o.enabled[x].clone();
o.fixed[x] = (boolean[])o.fixed[x].clone();
}
return o;
}
/**
* Adds noise to the weights. The noise that is added to the weights is
* within the domain <code>[-amplitude, amplitude]</code>.
*
* @param amplitude defines the domain of noise.
*/
public void addNoise(double amplitude) {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
if (enabled[x][y] && !fixed[x][y]) {
value[x][y] += (-amplitude + Math.random() * (2 * amplitude));
}
}
}
}
/**
* Removes a row.
*
* @param aRow the row to remove.
*/
public void removeRow(int aRow) {
double [][] myValue = new double[m_rows - 1][];
double [][] myDelta = new double[m_rows - 1][];
boolean [][] myEnabled = new boolean[m_rows - 1][];
boolean [][] myFixed = new boolean[m_rows - 1][];
for(int x = 0; x < m_rows; x++) {
if(x < aRow) {
myValue[x] = (double[])value[x].clone();
myDelta[x] = (double[])delta[x].clone();
myEnabled[x] = (boolean[])enabled[x].clone();
myFixed[x] = (boolean[])fixed[x].clone();
} else if(x > aRow) {
myValue[x - 1] = (double[])value[x].clone();
myDelta[x - 1] = (double[])delta[x].clone();
myEnabled[x - 1] = (boolean[])enabled[x].clone();
myFixed[x - 1] = (boolean[])fixed[x].clone();
}
}
value = myValue;
delta = myDelta;
enabled = myEnabled;
fixed = myFixed;
m_rows--;
}
/**
* Removes a column.
*
* @param aColumn the column to remove.
*/
public void removeColumn(int aColumn) {
double [][] myValue = new double[m_rows][m_cols - 1];
double [][] myDelta = new double[m_rows][m_cols - 1];
boolean [][] myEnabled = new boolean[m_rows][m_cols - 1];
boolean [][] myFixed = new boolean[m_rows][m_cols - 1];
for(int x = 0; x < m_rows; x++) {
for(int y = 0; y < m_cols; y++) {
if(y < aColumn) {
myValue[x][y] = value[x][y];
myDelta[x][y] = delta[x][y];
myEnabled[x][y] = enabled[x][y];
myFixed[x][y] = fixed[x][y];
} else if(y > aColumn) {
myValue[x][y - 1] = value[x][y];
myDelta[x][y - 1] = delta[x][y];
myEnabled[x][y - 1] = enabled[x][y];
myFixed[x][y - 1] = fixed[x][y];
}
}
}
value = myValue;
delta = myDelta;
enabled = myEnabled;
fixed = myFixed;
m_cols--;
}
/**
* Clears (resets) the matrix object. The weights/ biases (values) and its
* delta values are reset to zero.
*/
public void clear() {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
if (enabled[x][y] || !fixed[x][y]) {
value[x][y] = 0.0;
delta[x][y] = 0.0;
}
}
}
}
/**
* Enables all the weights (or biases) of this matrix.
*/
public void enableAll() {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
enabled[x][y] = true;
}
}
}
/**
* Disables all the weights (or biases) of this matrix.
*/
public void disableAll() {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
enabled[x][y] = false;
}
}
}
/**
* Fixes all the weights (or biases) of this matrix.
*/
public void fixAll() {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
fixed[x][y] = true;
}
}
}
/**
* Unfixes all the weights (or biases) of this matrix.
*/
public void unfixAll() {
for (int x = 0; x < m_rows; ++x) {
for (int y = 0; y < m_cols; ++y) {
fixed[x][y] = false;
}
}
}
/**
* Gets <code>m_rows</code>. Needed for Save as XML
*
* @return <code>m_rows</code>
*/
public int getM_rows() {
return m_rows;
}
/**
* Sets <code>m_rows</code>. Needed for Save as XML
*
* @param newm_rows the new number of rows to set.
*/
public void setM_rows(int newm_rows) {
m_rows = newm_rows;
}
/**
* Gets <code>m_cols</code>. Needed for Save as XML
*
* @return <code>m_cols</code>
*/
public int getM_cols() {
return m_cols;
}
/**
* Sets <code>m_cols</code>. Needed for Save as XML
*
* @param newm_cols the new number of columns to set.
*/
public void setM_cols(int newm_cols) {
m_cols = newm_cols;
}
/**
* Gets <code>delta[][]</code>. Needed for Save as XML
*
* @return <code>delta[][]</code>
*/
public double[][] getDelta() {
return delta;
}
/**
* Sets <code>delta[][]</code>. Needed for Save as XML
*
* @param newdelta the new delta to set.
*/
public void setDelta(double[][] newdelta) {
delta = newdelta;
}
/**
* Gets <code>value[][]</code>. Needed for Save as XML
*
* @return <code>value[][]</code>
*/
public double[][] getValue() {
return value;
}
/**
* Sets <code>value[][]</code>. Needed for Save as XML
*
* @param newvalue the new values to set
*/
public void setValue(double[][] newvalue) {
value = newvalue;
}
/**
* Gets <code>fixed[][]</code>. Needed for Save as XML
*
* @return <code>fixed[][]</code>
*/
public boolean[][] getFixed() {
return fixed;
}
/**
* Sets <code>fixed</code>. Needed for Save as XML
*
* @param newfixed the new fixed values to set
*/
public void setFixed(boolean[][] newfixed) {
fixed = newfixed;
}
/**
* Gets <code>enabled</code>. Needed for Save as XML
*
* @return <code>enabled[][]</code>
*/
public boolean[][] getEnabled() {
return enabled;
}
/**
* Sets <code>enabled[][]</code>. Needed for Save as XML
*
* @param newenabled the new enabled values to set.
*/
public void setEnabled(boolean[][] newenabled) {
enabled = newenabled;
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
// The following code is to assure the backward compatibility with the old Matrix object
if (enabled == null) {
enabled = new boolean[m_rows][m_cols];
this.enableAll();
}
if (fixed == null) {
fixed = new boolean[m_rows][m_cols];
this.unfixAll();
}
}
} | Java |
/*
* SineLayer.java
*
* Created on October 12, 2004, 4:20 PM
*/
package org.joone.engine;
import org.joone.exception.JooneRuntimeException;
import org.joone.log.*;
/**
* The output of a sine layer neuron is the sum of the weighted input values,
* applied to a sine (<code>sin(x)</code>). Neurons with sine activation
* problems might be useful in problems with periodicity.
*
* @see SimpleLayer parent
* @see Layer parent
* @see NeuralLayer implemented interface
*
* @author Boris Jansen
*/
public class SineLayer extends SimpleLayer implements LearnableLayer {
private static final long serialVersionUID = -2636086679111635756L;
/** The logger for this class. */
private static final ILogger log = LoggerFactory.getLogger (SineLayer.class);
/** Creates a new instance of SineLayer */
public SineLayer() {
super();
learnable = true;
}
/**
* Creates a new instance of SineLayer
*
* @param aName The name of the layer
*/
public SineLayer(String aName) {
this();
setLayerName(aName);
}
protected void forward(double[] aPattern) throws JooneRuntimeException {
double myNeuronInput;
int myRows = getRows(), i = 0;
try {
for(i = 0; i < myRows; i++) {
myNeuronInput = aPattern[i] + getBias().value[i][0];
outs[i] = Math.sin(myNeuronInput);
}
}catch (Exception aioobe) {
String msg;
log.error(msg = "Exception thrown while processing the element " + i + " of the array. Value is : " + aPattern[i]
+ " Exception thrown is " + aioobe.getClass ().getName () + ". Message is " + aioobe.getMessage());
throw new JooneRuntimeException (msg, aioobe);
}
}
public void backward(double[] aPattern) throws JooneRuntimeException {
super.backward(aPattern);
int myRows = getRows(), i = 0;
for(i = 0; i < myRows; i++) {
gradientOuts[i] = aPattern[i] * Math.cos(inps[i]);
}
myLearner.requestBiasUpdate(gradientOuts);
}
}
| Java |
/*
* RbfGaussianParameters.java
*
* Created on July 23, 2004, 1:46 PM
*/
package org.joone.engine;
import java.io.Serializable;
/**
* This class defines the parameters, like center, sigma, etc. for the Gaussian RBF.
*
* @author Boris Jansen
*/
public class RbfGaussianParameters implements Serializable {
/** The mean (center) of the RBF. */
private double[] theMean;
/** The standard deviation (sigma). */
private double theStdDeviation;
/** Creates a new instance of RbfGaussianParameters */
public RbfGaussianParameters() {
}
/**
* Creates a new instance of RbfGaussianParameters.
*
* @param aMean the mean.
* @param aStdDeviation the standard deviation.
*/
public RbfGaussianParameters(double[] aMean, double aStdDeviation) {
theMean = aMean;
theStdDeviation = aStdDeviation;
}
/**
* Gets the mean (center) of the Gaussian RBF.
*
* @return the mean of the Gaussian RBF.
*/
public double[] getMean() {
return theMean;
}
/**
* Sets the mean (center) of the Gaussian RBF.
*
* @param aMean the new mean to set.
*/
public void setMean(double[] aMean) {
theMean = aMean;
}
/**
* Gets the standard deviation (sigma) of the Gaussian RBF.
*
* @return the standard deviation of the Gaussian RBF.
*/
public double getStdDeviation() {
return theStdDeviation;
}
/**
* Sets the standard deviation (sigma) of the Gaussian RBF.
*
* @param aStdDeviation the new standard deviation to set.
*/
public void setStdDeviation(double aStdDeviation) {
theStdDeviation = aStdDeviation;
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class LayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.engine.Layer.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_bias = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_learner = 5;
private static final int PROPERTY_monitor = 6;
private static final int PROPERTY_outputLayer = 7;
private static final int PROPERTY_rows = 8;
private static final int PROPERTY_running = 9;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[10];
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", org.joone.engine.Layer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", org.joone.engine.Layer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", org.joone.engine.Layer.class, "getBias", "setBias" );
properties[PROPERTY_bias].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", org.joone.engine.Layer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", org.joone.engine.Layer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_layerName].setDisplayName ( "Name" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", org.joone.engine.Layer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", org.joone.engine.Layer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", org.joone.engine.Layer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", org.joone.engine.Layer.class, "getRows", "setRows" );
properties[PROPERTY_running] = new PropertyDescriptor ( "running", org.joone.engine.Layer.class, "isRunning", null );
properties[PROPERTY_running].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addInputSynapse0 = 0;
private static final int METHOD_addNoise1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_copyInto3 = 3;
private static final int METHOD_randomize4 = 4;
private static final int METHOD_removeAllInputs5 = 5;
private static final int METHOD_removeAllOutputs6 = 6;
private static final int METHOD_removeInputSynapse7 = 7;
private static final int METHOD_removeOutputSynapse8 = 8;
private static final int METHOD_run9 = 9;
private static final int METHOD_start10 = 10;
private static final int METHOD_stop11 = 11;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[12];
try {
methods[METHOD_addInputSynapse0] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("addInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class}));
methods[METHOD_addInputSynapse0].setDisplayName ( "" );
methods[METHOD_addNoise1] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("addNoise", new Class[] {Double.TYPE}));
methods[METHOD_addNoise1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_copyInto3] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("copyInto", new Class[] {org.joone.engine.NeuralLayer.class}));
methods[METHOD_copyInto3].setDisplayName ( "" );
methods[METHOD_randomize4] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("randomize", new Class[] {Double.TYPE}));
methods[METHOD_randomize4].setDisplayName ( "" );
methods[METHOD_removeAllInputs5] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("removeAllInputs", new Class[] {}));
methods[METHOD_removeAllInputs5].setDisplayName ( "" );
methods[METHOD_removeAllOutputs6] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("removeAllOutputs", new Class[] {}));
methods[METHOD_removeAllOutputs6].setDisplayName ( "" );
methods[METHOD_removeInputSynapse7] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("removeInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class}));
methods[METHOD_removeInputSynapse7].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse8] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("removeOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_removeOutputSynapse8].setDisplayName ( "" );
methods[METHOD_run9] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("run", new Class[] {}));
methods[METHOD_run9].setDisplayName ( "" );
methods[METHOD_start10] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("start", new Class[] {}));
methods[METHOD_start10].setDisplayName ( "" );
methods[METHOD_stop11] = new MethodDescriptor ( org.joone.engine.Layer.class.getMethod("stop", new Class[] {}));
methods[METHOD_stop11].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
import java.io.*;
import java.util.TreeSet;
import java.util.Collection;
import java.util.ArrayList;
import org.joone.log.*;
import org.joone.inspection.Inspectable;
import org.joone.inspection.implementations.WeightsInspection;
/**
* The Synapse is the connection element between two Layer objects.
* Its connections are represented by weights that transport the patterns
* from a layer to another.
* These weights are modified in the learning cycles, and represent the 'memory'
* of the trained neural net.
*/
public abstract class Synapse
implements
InputPatternListener,
OutputPatternListener,
LearnableSynapse,
Serializable,
Inspectable {
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger(Synapse.class);
/** Count of synapses for naming purposes. */
private static int synapseCount = 0;
/** Name set by default to "Synapse [synapse count]" */
private String fieldName = "Synapse " + ++synapseCount;
private double learningRate = 0;
private double momentum = 0;
private int inputDimension = 0;
private int outputDimension = 0;
private boolean inputFull;
private boolean outputFull;
private Monitor monitor;
private int ignoreBefore = -1; // not more used
private boolean loopBack = false;
// true if this synapse closes a loop of a recurrent neural network
protected Matrix array;
protected int m_batch = 0;
protected boolean enabled = true;
protected transient double[] inps = null;
protected transient double[] outs = null;
protected transient double[] bouts;
protected transient int items = 0;
protected transient int bitems = 0;
/**
* @label revPattern
*/
protected transient Pattern m_pattern;
// The last fwd pattern read
/**
* @label fwdPattern
*/
protected transient Pattern b_pattern; // The last back pattern read
protected transient int count = 0;
protected transient boolean notFirstTime;
protected transient boolean notFirstTimeB;
protected transient Learner myLearner = null;
// Objects used for synchronization
protected transient volatile Object fwdLock = null;
protected transient volatile Object revLock = null;
/** Contains true if for the current Synapse must be used
* a Learner instead of a built-in learning algorithm.
* Set it in the constructor of any inherited class.
* Used by the getLearner method.
* @see getLearner
*/
protected boolean learnable = false;
private static final long serialVersionUID = -5892822057908231022L;
/** The constructor
*/
public Synapse() {
//log.info ("Synapse instanciated");
}
/** Adds a noise to the weights of the synapse
* @param amplitude Amplitude of the noise: the value is centered around the zero.
* e.g.: an amplitude = 0.2 means a noise range from -0.2 to 0.2
*/
public void addNoise(double amplitude) {
if (array != null)
array.addNoise(amplitude);
}
/** Initializes all the weigths of the synapses with random values
* @param amplitude Amplitude of the random values: the value is centered around the zero.
* e.g.: an amplitude = 0.2 means a values' range from -0.2 to 0.2
*/
public void randomize(double amplitude) {
if (array != null)
// array.randomize(-1.0 * amplitude, amplitude);
array.initialize();
}
/**
* Funzione di TRAIN dell'elemento.
* @param pattern double[] - pattern di input sul quale applicare la funzione di trasferimento
*/
protected abstract void backward(double[] pattern);
/** Returns TRUE if the synapse calls the method nextStep()
* on the Monitor object when the fwdGet() method is called
* @return boolean
*/
public boolean canCountSteps() {
return false;
}
/**
* Recall function
* @param pattern double[] - input pattern
*/
protected abstract void forward(double[] pattern);
public Pattern fwdGet() {
if (!isEnabled())
return null;
synchronized (getFwdLock()) {
if ((notFirstTime) || (!loopBack)) {
while (items == 0) {
try {
fwdLock.wait();
} catch (InterruptedException e) {
// log.warn ( "wait () was interrupted");
//e.printStackTrace();
reset();
fwdLock.notify();
return null;
}
}
--items;
m_pattern.setArray(outs);
if (isLoopBack())
// To avoid sinc problems
m_pattern.setCount(0);
fwdLock.notify();
return m_pattern;
} else {
items = bitems = count = 0;
notFirstTime = true;
fwdLock.notify();
return null;
}
}
}
public void fwdPut(Pattern pattern) {
if (isEnabled()) {
synchronized (getFwdLock()) {
while (items > 0) {
try {
fwdLock.wait();
} catch (InterruptedException e) {
reset();
fwdLock.notify();
return;
} // End of catch
}
m_pattern = pattern;
count = m_pattern.getCount();
inps = (double[])pattern.getArray();
forward(inps);
++items;
fwdLock.notify();
}
}
}
/** Resets the internal state to be ready for the next run
*/
public void reset() {
items = bitems = 0;
notFirstTime = false;
notFirstTimeB = false;
}
/** Returns the number of the ignored cycles at beginning of each epoch.
* During these cycles the synapse returns null on the call to the xxxGet methods
*
* @return int
* @see Synapse#setIgnoreBefore
*/
public int getIgnoreBefore() {
return ignoreBefore;
}
/** Returns the input dimension of the synapse.
* @return int
*/
public int getInputDimension() {
return inputDimension;
}
/** Returns the value of the learning rate
* @return double
*/
public double getLearningRate() {
if (monitor != null)
return monitor.getLearningRate();
else
return 0.0;
}
/** Returns the value of the momentum
* @return double
*/
public double getMomentum() {
if (monitor != null)
return monitor.getMomentum();
else
return 0.0;
}
/** Returns the Monitor object attached to the synapse
* @return neural.engine.Monitor
*/
public Monitor getMonitor() {
return monitor;
}
/** Returns the name of the synapse
* @return String
* @see #setName
*/
public String getName() {
return fieldName;
}
/** Returns the output dimension of the synapse.
* @return int
*/
public int getOutputDimension() {
return outputDimension;
}
protected Object readResolve() {
setArrays(getInputDimension(), getOutputDimension());
return this;
}
public Pattern revGet() {
if (!isEnabled())
return null;
synchronized (getRevLock()) {
if ((notFirstTimeB) || (!loopBack)) {
while (bitems == 0) {
try {
revLock.wait();
} catch (InterruptedException e) {
// log.warn ( "wait () was interrupted");
//e.printStackTrace();
reset();
revLock.notify();
return null;
}
}
--bitems;
b_pattern.setArray(bouts);
revLock.notify();
return b_pattern;
} else {
//bitems = 0;
revLock.notify();
return null;
}
}
}
public void revPut(Pattern pattern) {
if (isEnabled()) {
synchronized (getRevLock()) {
while (bitems > 0) {
try {
revLock.wait();
} catch (InterruptedException e) {
reset();
revLock.notify();
return;
}
}
b_pattern = pattern;
count = b_pattern.getCount();
backward(pattern.getArray());
++bitems;
notFirstTimeB = true;
revLock.notify();
}
}
}
/**
* Insert the method's description here.
* Creation date: (23/09/2000 12.52.58)
*/
protected abstract void setArrays(int rows, int cols);
/**
* Dimensiona l'elemento
* @param int rows - righe
* @param int cols - colonne
*/
protected abstract void setDimensions(int rows, int cols);
/** Sets the number of the ignored cycles at beginning of each epoch.
* During these cycles the synapse is disabled.
* Useful when the synapse is attached as the Input2 of a SwitchSynapse
*
* @param newIgnoreBefore int
* @see SwitchSynapse
*/
public void setIgnoreBefore(int newIgnoreBefore) {
ignoreBefore = newIgnoreBefore;
}
/** Sets the input dimension of the synapse
* @param newInputDimension int
*/
public void setInputDimension(int newInputDimension) {
if (inputDimension != newInputDimension) {
inputDimension = newInputDimension;
setDimensions(newInputDimension, -1);
}
}
/** Sets the value of the learning rate
* @param newLearningRate double
*/
public void setLearningRate(double newLearningRate) {
learningRate = newLearningRate;
}
/** Sets the value of the momentum rate
* @param newMomentum double
*/
public void setMomentum(double newMomentum) {
momentum = newMomentum;
}
/** Sets the Monitor object of the synapse
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
monitor = newMonitor;
if (monitor != null) {
setLearningRate(monitor.getLearningRate());
setMomentum(monitor.getMomentum());
}
}
/** Sets the name of the synapse
* @param name The name of the component.
* @see #getName
*/
public void setName(java.lang.String name) {
fieldName = name;
}
/** Sets the output dimension of the synapse
* @param newOutputDimension int
*/
public void setOutputDimension(int newOutputDimension) {
if (outputDimension != newOutputDimension) {
outputDimension = newOutputDimension;
setDimensions(-1, newOutputDimension);
}
}
/** Getter for property enabled.
* @return Value of property enabled.
*/
public boolean isEnabled() {
return enabled;
}
/** Setter for property enabled.
* @param enabled New value of property enabled.
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/** Getter for property loopBack.
* @return Value of property loopBack.
*
*/
public boolean isLoopBack() {
return loopBack;
}
/** Setter for property loopBack.
* @param loopBack New value of property loopBack.
*
*/
public void setLoopBack(boolean loopBack) {
this.loopBack = loopBack;
}
/**
* Base for check messages.
* Subclasses should call this method from thier own check method.
*
* @see InputPaternListener
* @see OutputPaternListener
* @return validation errors.
*/
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
// Return check messages
return checks;
}
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new WeightsInspection(array));
return col;
}
public String InspectableTitle() {
return this.getName();
}
/** Getter for property inputFull.
* @return Value of property inputFull.
*
*/
public boolean isInputFull() {
return inputFull;
}
/** Setter for property inputFull.
* @param inputFull New value of property inputFull.
*
*/
public void setInputFull(boolean inputFull) {
this.inputFull = inputFull;
}
/** Getter for property outputFull.
* @return Value of property outputFull.
*
*/
public boolean isOutputFull() {
return outputFull;
}
/** Setter for property outputFull.
* @param outputFull New value of property outputFull.
*
*/
public void setOutputFull(boolean outputFull) {
this.outputFull = outputFull;
}
/** Getter for the internal matrix of weights
*
* @return the Matrix containing the 2D array of weights
*/
public Matrix getWeights() {
return array;
}
/** Setter for the internal matrix of weights
*
* @param the Matrix containing the 2D array of weights
*/
public void setWeights(Matrix newWeights) {
array = newWeights;
}
/** Returns the appropriate Learner object for this class
* depending on the Monitor.learningMode property value
* @return the Learner object if applicable, otherwise null
* @see org.joone.engine.Learnable#getLearner()
*/
public Learner getLearner() {
if (!learnable) {
return null;
}
return getMonitor().getLearner();
}
/** Initialize the Learner object
* @see org.joone.engine.Learnable#initLearner()
*/
public void initLearner() {
myLearner = getLearner();
if(myLearner != null) {
myLearner.registerLearnable(this);
}
}
/**
* Getter for property fwdLock.
* @return Value of property fwdLock.
*/
protected Object getFwdLock() {
if (fwdLock == null)
fwdLock = new Object();
return fwdLock;
}
/**
* Getter for property revLock.
* @return Value of property revLock.
*/
protected Object getRevLock() {
if (revLock == null)
revLock = new Object();
return revLock;
}
/** Synapse's initialization.
* It needs to be invoked at the starting of the neural network
* It's called within the Layer.init() method
*/
public void init() {
this.initLearner();
this.getFwdLock();
this.getRevLock();
}
} | Java |
package org.joone.engine;
public interface LearnableLayer extends Learnable, NeuralLayer {
} | Java |
package org.joone.engine;
/** Element of a connection representing a FIR filter (Finite Impulse Response).
* The DelaySynapse object implements a delayed full synapse where each connection
* is implemented with a FIRFilter object.
*
* In this connection is implemented the temporal backpropagation algorithm
* by Eric A. Wan, as in 'Time Series Prediction by Using a Connectionist Network
* with Internal Delay Lines' in Time Series Prediction. Forecasting the Future and
* Understanding the Past, by A.Weigend and N.Gershenfeld. Addison-Wesley, 1994.
*
* @author P.Marrone
* @see org.joone.engine.DelaySynapse
*/
// 1 2 N
// Xk(t) -->O--->O...-->O
// | | |
// Wk1 Wk2 Wkn
// | | |
// | | |
// -------------->(+)--> Yk
//
// Where: Yk = Xk(t)Wk1 + Xk(t-1)Wk2 +..+ Xk(t-n+1)Wkn
// n = taps (the delay of the connection)
public class FIRFilter implements java.io.Serializable {
protected int m_taps;
protected double[] memory;
protected double[] backmemory;
protected double[] outs;
protected double[] bouts;
protected Matrix array;
public double lrate;
public double momentum;
private static final long serialVersionUID = 2539307324689626619L;
public FIRFilter(int taps) {
outs = new double[taps];
bouts = new double[taps];
memory = new double[taps];
backmemory = new double[taps];
array = new Matrix(taps, 1);
m_taps = taps - 1;
}
public void addNoise(double amplitude) {
array.addNoise(amplitude);
}
protected double backDelay(double[] pattern) {
int y;
for (y = 0; y < m_taps; ++y) {
backmemory[y] = backmemory[y + 1];
backmemory[y] += pattern[y];
}
backmemory[m_taps] = pattern[m_taps];
return backmemory[0];
}
protected double[] backFilter(double input) {
int x;
double dw;
// Weights adj
for (x=0; x <= m_taps; ++x) {
bouts[x] = input * array.value[x][0];
dw = lrate * input * outs[x] + momentum * array.delta[x][0];
array.value[x][0] += dw;
array.delta[x][0] = dw;
}
return bouts;
}
public double backward(double input) {
return backDelay(backFilter(input));
}
protected double[] Delay(double input) {
int y;
for (y = m_taps; y > 0; --y) {
memory[y] = memory[y - 1];
outs[y] = memory[y];
}
memory[0] = input;
outs[0] = input;
return outs;
}
protected double Filter(double[] pattern) {
int x;
double s = 0;
for (x=0; x <= m_taps; ++x) {
s += pattern[x] * array.value[x][0];
}
return s;
}
public double forward(double input) {
return Filter(Delay(input));
}
} | Java |
package org.joone.engine;
import java.io.*;
import java.util.TreeSet;
public abstract class MemoryLayer extends Layer {
protected double memory[];
protected double backmemory[];
private int taps = 0;
private static final long serialVersionUID = 5447777678414684948L;
public MemoryLayer() {
super();
}
public MemoryLayer(String ElemName) {
super(ElemName);
}
public int getDimension() {
return (getRows() * (getTaps() + 1));
}
/**
* Return the taps value
* (06/04/00 1.08.26)
* @return int
*/
public int getTaps() {
return taps;
}
protected void setDimensions() {
inps = new double[getRows()];
outs = new double[getRows() * (getTaps() + 1)];
gradientInps = new double[getRows() * (getTaps() + 1)];
gradientOuts = new double[getRows()];
memory = new double[getRows() * (getTaps() + 1)];
backmemory = new double[getRows() * (getTaps() + 1)];
}
/**
* Sets the dimansion of the output
* (22/03/00 1.45.24)
* @param syn neural.engine.Synapse
*/
protected void setOutputDimension(OutputPatternListener syn) {
int n = getRows() * (getTaps() + 1);
if (syn.getInputDimension() != n)
syn.setInputDimension(n);
}
/**
* Inserire qui la descrizione del metodo.
* Data di creazione: (06/04/00 1.08.26)
* @param newTaps int
*/
public void setTaps(int newTaps) {
taps = newTaps;
setDimensions();
setConnDimensions();
}
protected void sumBackInput(double[] pattern) {
int x;
int length = getRows() * (getTaps() + 1);
for (x = 0; x < length; ++x)
gradientInps[x] += pattern[x];
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
setDimensions();
}
/** Reverse transfer function of the component.
* @param pattern double[] - input pattern on wich to apply the transfer function
*
*/
protected void backward(double[] pattern) {
}
/** Transfer function to recall a result on a trained net
* @param pattern double[] - input pattern
*
*/
protected void forward(double[] pattern) {
}
public TreeSet check() {
return super.check();
}
} | Java |
package org.joone.engine;
/**
* This class provides some basic simple functionality that can be used (extended) by other learners.
*
* @author Boris Jansen
*/
public abstract class AbstractLearner implements Learner {
/** The learnable, the object that is subjected to the learning process. */
protected Learnable learnable = null;
/** The layer (biases) that is subjected to the learning process. */
protected LearnableLayer learnableLayer = null;
/** The synapse (weights) that is subjected to the learning process. */
protected LearnableSynapse learnableSynapse = null;
/** The saved monitor object. */
protected Monitor monitor;
/** Creates a new instance of AbstractLearner */
public AbstractLearner() {
}
/** Learnable makes itself known to the Learner, also the type of Learnable is checked.
*/
public void registerLearnable(Learnable aLearnable) {
learnable = aLearnable;
if (aLearnable instanceof LearnableLayer) {
learnableLayer = (LearnableLayer) aLearnable; // this reduces the number of casts neccessary later
} else if (aLearnable instanceof LearnableSynapse) {
learnableSynapse = (LearnableSynapse) aLearnable;
}
}
/** Override this method to get the needed parameters from
* the Monitor object passed as parameter
*/
public void setMonitor(Monitor mon) {
monitor = mon;
}
/**
* Gets the monitor object.
*
* @return the monitor object.
*/
public Monitor getMonitor() {
return monitor;
}
/**
* Gets the layer the learner is associated with.
*
* @return the layer the learner is associated with.
*/
public LearnableLayer getLayer() {
return learnableLayer;
}
/**
* Gets the synapse the learner is associated with.
*
* @return the synapse the learner is associated wiht.
*/
public LearnableSynapse getSynapse() {
return learnableSynapse;
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class KohonenSynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.engine.KohonenSynapse.class , null ); // NOI18N//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_enabled = 0;
private static final int PROPERTY_loopBack = 1;
private static final int PROPERTY_monitor = 2;
private static final int PROPERTY_name = 3;
private static final int PROPERTY_orderingPhase = 4;
private static final int PROPERTY_timeConstant = 5;
private static final int PROPERTY_weights = 6;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[7];
try {
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", org.joone.engine.KohonenSynapse.class, "isEnabled", "setEnabled" ); // NOI18N
properties[PROPERTY_loopBack] = new PropertyDescriptor ( "loopBack", org.joone.engine.KohonenSynapse.class, "isLoopBack", "setLoopBack" ); // NOI18N
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", org.joone.engine.KohonenSynapse.class, "getMonitor", "setMonitor" ); // NOI18N
properties[PROPERTY_monitor].setHidden ( true );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", org.joone.engine.KohonenSynapse.class, "getName", "setName" ); // NOI18N
properties[PROPERTY_orderingPhase] = new PropertyDescriptor ( "orderingPhase", org.joone.engine.KohonenSynapse.class, "getOrderingPhase", "setOrderingPhase" ); // NOI18N
properties[PROPERTY_orderingPhase].setDisplayName ( "ordering phase (epochs)" );
properties[PROPERTY_timeConstant] = new PropertyDescriptor ( "timeConstant", org.joone.engine.KohonenSynapse.class, "getTimeConstant", "setTimeConstant" ); // NOI18N
properties[PROPERTY_weights] = new PropertyDescriptor ( "weights", org.joone.engine.KohonenSynapse.class, "getWeights", "setWeights" ); // NOI18N
properties[PROPERTY_weights].setHidden ( true );
}
catch(IntrospectionException e) {
e.printStackTrace();
}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addNoise0 = 0;
private static final int METHOD_canCountSteps1 = 1;
private static final int METHOD_check2 = 2;
private static final int METHOD_cicleTerminated3 = 3;
private static final int METHOD_errorChanged4 = 4;
private static final int METHOD_fwdGet5 = 5;
private static final int METHOD_fwdPut6 = 6;
private static final int METHOD_netStarted7 = 7;
private static final int METHOD_netStopped8 = 8;
private static final int METHOD_netStoppedError9 = 9;
private static final int METHOD_randomize10 = 10;
private static final int METHOD_reset11 = 11;
private static final int METHOD_revGet12 = 12;
private static final int METHOD_revPut13 = 13;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[14];
try {
methods[METHOD_addNoise0] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("addNoise", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_addNoise0].setDisplayName ( "" );
methods[METHOD_canCountSteps1] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("canCountSteps", new Class[] {})); // NOI18N
methods[METHOD_canCountSteps1].setDisplayName ( "" );
methods[METHOD_check2] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("check", new Class[] {})); // NOI18N
methods[METHOD_check2].setDisplayName ( "" );
methods[METHOD_cicleTerminated3] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("cicleTerminated", new Class[] {org.joone.engine.NeuralNetEvent.class})); // NOI18N
methods[METHOD_cicleTerminated3].setDisplayName ( "" );
methods[METHOD_errorChanged4] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("errorChanged", new Class[] {org.joone.engine.NeuralNetEvent.class})); // NOI18N
methods[METHOD_errorChanged4].setDisplayName ( "" );
methods[METHOD_fwdGet5] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("fwdGet", new Class[] {})); // NOI18N
methods[METHOD_fwdGet5].setDisplayName ( "" );
methods[METHOD_fwdPut6] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class})); // NOI18N
methods[METHOD_fwdPut6].setDisplayName ( "" );
methods[METHOD_netStarted7] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("netStarted", new Class[] {org.joone.engine.NeuralNetEvent.class})); // NOI18N
methods[METHOD_netStarted7].setDisplayName ( "" );
methods[METHOD_netStopped8] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("netStopped", new Class[] {org.joone.engine.NeuralNetEvent.class})); // NOI18N
methods[METHOD_netStopped8].setDisplayName ( "" );
methods[METHOD_netStoppedError9] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("netStoppedError", new Class[] {org.joone.engine.NeuralNetEvent.class, java.lang.String.class})); // NOI18N
methods[METHOD_netStoppedError9].setDisplayName ( "" );
methods[METHOD_randomize10] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("randomize", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_randomize10].setDisplayName ( "" );
methods[METHOD_reset11] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("reset", new Class[] {})); // NOI18N
methods[METHOD_reset11].setDisplayName ( "" );
methods[METHOD_revGet12] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("revGet", new Class[] {})); // NOI18N
methods[METHOD_revGet12].setDisplayName ( "" );
methods[METHOD_revPut13] = new MethodDescriptor ( org.joone.engine.KohonenSynapse.class.getMethod("revPut", new Class[] {org.joone.engine.Pattern.class})); // NOI18N
methods[METHOD_revPut13].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
import org.joone.engine.extenders.*;
/** BatchLearner stores the weight/bias changes during the batch and updates them
* after the batch is done.
*
* IMPORTANT: If you want to have standard batch learning, i.e. the BatchSize equals
* the number of training patterns available, just use monitor.
* setBatchSize(monitor.getTrainingPatterns());
*/
public class BatchLearner extends ExtendableLearner {
public BatchLearner() {
setUpdateWeightExtender(new BatchModeExtender());
// please be careful of the order of extenders...
addDeltaRuleExtender(new MomentumExtender());
}
/**
* @deprecated use BatchLearner() and set the batch size
* with monitor.setBatchSize()
*/
public BatchLearner(int batchSize) {
super();
setBatchSize(batchSize);
}
/**
* @deprecated not used, the BatchModeExtender takes care of everything
*/
public void initiateNewBatch() {
// if you want to call it any, probably the next lines are the best...
if (learnable instanceof LearnableLayer) {
theUpdateWeightExtender.preBiasUpdate(null);
} else if (learnable instanceof LearnableSynapse) {
theUpdateWeightExtender.preWeightUpdate(null, null);
}
}
/**
* @deprecated use monitor.setBatchSize()
*/
public void setBatchSize(int newBatchSize) {
((BatchModeExtender)theUpdateWeightExtender).setBatchSize(newBatchSize);
}
/**
* @deprecated use monitor.getBatchSize()
*/
public int getBatchSize() {
return ((BatchModeExtender)theUpdateWeightExtender).getBatchSize();
}
}
| Java |
/*
* SangerSynapse.java
*
* Created on 10 ottobre 2002, 23.26
*/
package org.joone.engine;
/**
* This is the synapse useful to extract the principal components
* from an input data set.
* This synapse implements the so called Sanger PCA algorithm.
* @author pmarrone
*/
public class SangerSynapse extends FullSynapse {
private static final long serialVersionUID = 1417085683178232377L;
/** Creates a new instance of SangerSynapse */
public SangerSynapse() {
super();
learnable = false;
}
/** Training Function
* @param pattern double[] - Input pattern used to calculate the weight's modifications
*
*/
protected void backward(double[] pattern) {
int x, y;
double dw, s;
double[] outArray;
outArray = b_pattern.getOutArray();
// Weights adjustement
int m_rows = getInputDimension();
int m_cols = getOutputDimension();
for (x = 0; x < m_rows; ++x) {
for (s=0, y=0; y < m_cols; ++y) {
s += array.value[x][y] * outArray[y];
dw = getLearningRate() * outArray[y];
dw = dw * (inps[x] - s);
array.value[x][y] += dw;
array.delta[x][y] = dw;
}
}
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = false;
return super.getLearner();
}
}
| Java |
/*
* PatternBeanInfo.java
*
* Created on 22 maggio 2004, 20.00
*/
package org.joone.engine;
import java.beans.*;
/**
* @author paolo
*/
public class PatternBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( Pattern.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_count = 0;
private static final int PROPERTY_values = 1;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[2];
try {
properties[PROPERTY_count] = new PropertyDescriptor ( "count", Pattern.class, "getCount", "setCount" );
properties[PROPERTY_values] = new PropertyDescriptor ( "values", Pattern.class, "getValues", "setValues" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_clone0 = 0;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[1];
try {
methods[METHOD_clone0] = new MethodDescriptor ( org.joone.engine.Pattern.class.getMethod("clone", new Class[] {}));
methods[METHOD_clone0].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
/**
* This layer consists of linear neurons, i.e. neurons that sum up their inputs
* (actually this is done by the (full) synapse in Joone) along with their biases.
* In the learning process the biases are adjusted in an attempt to output a value
* closer to the desired output.
*
* This layer differs from LinearLayer in two ways:
* - This layer uses biases. These biases can/will also be adjusted in the
* learning process.
* - It has no scalar beta parameter.
*
* @author Boris Jansen
*/
public class BiasedLinearLayer extends SimpleLayer implements LearnableLayer {
/** Creates a new instance of BiasedLinearLayer */
public BiasedLinearLayer() {
super();
}
/**
* Creates a new instance of BiasedLinearLayer.
*
* @param The name of the layer.
*/
public BiasedLinearLayer(String anElemName) {
super(anElemName);
}
public void backward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x) {
gradientOuts[x] = pattern[x];
}
myLearner.requestBiasUpdate(gradientOuts);
}
public void forward(double[] pattern) {
int x;
int n = getRows();
for (x = 0; x < n; ++x) {
outs[x] = pattern[x] + bias.value[x][0];
}
}
/** @deprecated - Used only for backward compatibility
*/
public Learner getLearner() {
learnable = true;
return super.getLearner();
}
}
| Java |
package org.joone.engine;
import org.joone.engine.extenders.*;
/*
* BasicLeaner implements Joone's standard learning (simple gradient descent,
* "incremental" ( or "pattern-by-pattern", "online") learning with momentum)
*
* @author dkern
* @author Boris Jansen
*/
public class BasicLearner extends ExtendableLearner {
public BasicLearner() {
setUpdateWeightExtender(new OnlineModeExtender());
// please be careful of the order of extenders...
addDeltaRuleExtender(new MomentumExtender());
}
} | Java |
/*
* GaussLayer.java
*
* Created on October 28, 2004, 11:58 AM
*/
package org.joone.engine;
import org.joone.exception.JooneRuntimeException;
import org.joone.log.*;
/**
* The output of a Gauss(ian) layer neuron is the sum of the weighted input values,
* applied to a gaussian curve (<code>exp(- x * x)</code>).
*
* @see SimpleLayer parent
* @see Layer parent
* @see NeuralLayer implemented interface
*
* @author Boris Jansen
*/
public class GaussLayer extends SimpleLayer implements LearnableLayer {
/** The logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(GaussLayer.class);
/** Creates a new instance of GaussLayer */
public GaussLayer() {
super();
learnable = true;
}
/**
* Creates a new instance of GaussLayer
*
* @param aName The name of the layer
*/
public GaussLayer(String aName) {
this();
setLayerName(aName);
}
protected void forward(double[] aPattern) throws JooneRuntimeException {
double myNeuronInput;
int myRows = getRows(), i = 0;
try {
for(i = 0; i < myRows; i++) {
myNeuronInput = aPattern[i] + getBias().value[i][0];
outs[i] = Math.exp(-myNeuronInput * myNeuronInput);
}
}catch (Exception aioobe) {
String msg;
log.error(msg = "Exception thrown while processing the element " + i + " of the array. Value is : " + aPattern[i]
+ " Exception thrown is " + aioobe.getClass ().getName () + ". Message is " + aioobe.getMessage());
throw new JooneRuntimeException (msg, aioobe);
}
}
public void backward(double[] aPattern) throws JooneRuntimeException {
super.backward(aPattern);
int myRows = getRows(), i = 0;
for(i = 0; i < myRows; i++) {
gradientOuts[i] = aPattern[i] * -2 * inps[i] * outs[i];
}
myLearner.requestBiasUpdate(gradientOuts);
}
}
| Java |
package org.joone.engine;
/**
* This object holds the global parameters for the RPROP learning
* algorithm (RpropLearner).
*
* @author Boris Jansen
*/
public class RpropParameters {
/** The initial delta value. */
private double theInitialDelta = 0.1; // default
/** The maximum delta value that is allowed. */
private double theMaxDelta = 50.0; // default
/** The minimum delta value that is allowed. */
private double theMinDelta = 1e-6; // default
/** The incremental learning factor/rate. */
private double theEtaInc = 1.2; // default
/** The decremental learning factor/rate. */
private double theEtaDec = 0.5; // default
/** The batch size. */
private int theBatchSize = 1;
/** Creates a new instance of RpropParameters */
public RpropParameters() {
}
/**
* Gets the initial delta value.
*
* @param i the index (i, j) of the weight/bias for which it should get the
* initial value. The RPROP learning algorithm gives every bias/weight
* the same initial value, but by passing the index of the weight/bias
* to this method, a user is able to give different initial values to
* different weights/biases based on their index by extending this
* class.
* @param j
*/
public double getInitialDelta(int i, int j) {
return theInitialDelta;
}
/**
* Sets the initial delta for all delta's.
*
* @param anInitialDelta the initial delta value.
*/
public void setInitialDelta(double anInitialDelta) {
theInitialDelta = anInitialDelta;
}
/**
* Gets the maximum allowed delta value.
*
* @return the maximum allowed delta value.
*/
public double getMaxDelta() {
return theMaxDelta;
}
/**
* Sets the maximum allowed delta value.
*
* @param aMaxDelta the maximum allowed delta value.
*/
public void setMaxDelta(double aMaxDelta) {
theMaxDelta = aMaxDelta;
}
/**
* Gets the minimum allowed delta value.
*
* @return the minimum allowed delta value.
*/
public double getMinDelta() {
return theMinDelta;
}
/**
* Sets the minimum allowed delta value.
*
* @param aMinDelta the minimum allowed delta value.
*/
public void setMinDelta(double aMinDelta) {
theMinDelta = aMinDelta;
}
/**
* Gets the incremental learning factor/rate.
*
* @return the incremental learning factor/rate.
*/
public double getEtaInc() {
return theEtaInc;
}
/**
* Sets the incremental learning factor/rate.
*
* @param anEtaInc the incremental learning factor/rate.
*/
public void setEtaInc(double anEtaInc) {
theEtaInc = anEtaInc;
}
/**
* Gets the decremental learning factor/rate.
*
* @return the decremental learning factor/rate.
*/
public double getEtaDec() {
return theEtaDec;
}
/**
* Sets the decremental learning factor/rate.
*
* @param anEtaDec the decremental learning factor/rate.
*/
public void setEtaDec(double anEtaDec) {
theEtaDec = anEtaDec;
}
/**
* Gets the batchsize.
*
* @return the batch size.
*/
public int getBatchSize() {
return theBatchSize;
}
/**
* Sets the batchsize.
*
* param aBatchsize the new batchsize.
*/
public void setBatchSize(int aBatchsize) {
theBatchSize = aBatchsize;
}
}
| Java |
package org.joone.engine;
/** This interface represents an input synapse for a generic layer.
* @author: Paolo Marrone
*/
public interface InputPatternListener extends NeuralElement {
/** Returns the pattern coming from the previous layer during the recall phase
* @return neural.engine.Pattern
*/
public Pattern fwdGet();
public boolean isInputFull();
public void setInputFull(boolean inputFull);
/** Returns the dimension of the input synapse
* @return int
*/
public int getOutputDimension();
/** Method to put an error pattern backward to the previous layer
* @param pattern neural.engine.Pattern
*/
public void revPut(Pattern pattern);
/** Sets the dimension of the input synapse
* @param newOutputDimension int
*/
public void setOutputDimension(int newOutputDimension);
/** reset of the input synapse
*/
public void reset();
} | Java |
package org.joone.engine;
import java.util.TreeSet;
/** This interface represents a generic element of a neural network
* @author: Paolo Marrone
*/
public interface NeuralElement {
public boolean isEnabled();
public void setEnabled(boolean enabled);
/** Sets the Monitor object of the output synapse
* @param newMonitor org.joone.engine.Monitor
*/
public void setMonitor(Monitor newMonitor);
/** Returns the monitor
* @return org.joone.engine.Monitor
*/
public Monitor getMonitor();
/** Returns the name of the output synapse
* @return String
*/
public String getName();
/** Sets the name of the output synapse
* @param name String
*/
public void setName(java.lang.String name);
public void init();
/**
* Validation checks for invalid parameter values, misconfiguration, etc.
* All network components should include a check method that firstly calls its ancestor check method and
* adds these to any check messages it produces. This allows check messages to be collected from all levels
* of a component to be returned to the caller's check method. Using a TreeSet ensures that
* duplicate messages are removed. Check messages should be produced using the generateValidationErrorMessage
* method of the NetChecker class.
*
* @return validation errors.
*/
public TreeSet check();
} | Java |
package org.joone.engine;
import java.beans.*;
public class LinearLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( LinearLayer.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_beta = 2;
private static final int PROPERTY_bias = 3;
private static final int PROPERTY_inputLayer = 4;
private static final int PROPERTY_layerName = 5;
private static final int PROPERTY_learner = 6;
private static final int PROPERTY_monitor = 7;
private static final int PROPERTY_outputLayer = 8;
private static final int PROPERTY_rows = 9;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[10];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", LinearLayer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", LinearLayer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_beta] = new PropertyDescriptor ( "beta", LinearLayer.class, "getBeta", "setBeta" );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", LinearLayer.class, "getBias", "setBias" );
properties[PROPERTY_bias].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", LinearLayer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", LinearLayer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", LinearLayer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", LinearLayer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", LinearLayer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", LinearLayer.class, "getRows", "setRows" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[0];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine.learning;
import java.beans.*;
public class TeachingSynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( TeachingSynapse.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_enabled = 0;
private static final int PROPERTY_desired = 1;
private static final int PROPERTY_name = 2;
private static final int PROPERTY_monitor = 3;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[4];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", TeachingSynapse.class, "isEnabled", "setEnabled" );
properties[PROPERTY_desired] = new PropertyDescriptor ( "desired", TeachingSynapse.class, "getDesired", "setDesired" );
properties[PROPERTY_desired].setExpert ( true );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", TeachingSynapse.class, "getName", "setName" );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", TeachingSynapse.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_fwdPut0 = 0;
private static final int METHOD_revGet1 = 1;
private static final int METHOD_start2 = 2;
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[3];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
static {
try {
methods[METHOD_fwdPut0] = new MethodDescriptor ( org.joone.engine.learning.TeachingSynapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_fwdPut0].setDisplayName ( "" );
methods[METHOD_revGet1] = new MethodDescriptor ( org.joone.engine.learning.TeachingSynapse.class.getMethod("revGet", new Class[] {}));
methods[METHOD_revGet1].setDisplayName ( "" );
methods[METHOD_start2] = new MethodDescriptor ( org.joone.engine.learning.TeachingSynapse.class.getMethod("start", new Class[] {}));
methods[METHOD_start2].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
}//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine.learning;
import java.beans.*;
public class TeacherSynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/;
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( TeacherSynapse.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_inputDimension = 0;
private static final int PROPERTY_name = 1;
private static final int PROPERTY_desired = 2;
private static final int PROPERTY_outputDimension = 3;
private static final int PROPERTY_momentum = 4;
private static final int PROPERTY_learningRate = 5;
private static final int PROPERTY_ignoreBefore = 6;
private static final int PROPERTY_enabled = 7;
private static final int PROPERTY_monitor = 8;
// Property array
/*lazy PropertyDescriptor*/;
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[9];
try {
properties[PROPERTY_inputDimension] = new PropertyDescriptor ( "inputDimension", TeacherSynapse.class, "getInputDimension", "setInputDimension" );
properties[PROPERTY_inputDimension].setExpert ( true );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", TeacherSynapse.class, "getName", "setName" );
properties[PROPERTY_desired] = new PropertyDescriptor ( "desired", TeacherSynapse.class, "getDesired", "setDesired" );
properties[PROPERTY_desired].setExpert ( true );
properties[PROPERTY_outputDimension] = new PropertyDescriptor ( "outputDimension", TeacherSynapse.class, "getOutputDimension", "setOutputDimension" );
properties[PROPERTY_outputDimension].setExpert ( true );
properties[PROPERTY_momentum] = new PropertyDescriptor ( "momentum", TeacherSynapse.class, "getMomentum", "setMomentum" );
properties[PROPERTY_learningRate] = new PropertyDescriptor ( "learningRate", TeacherSynapse.class, "getLearningRate", "setLearningRate" );
properties[PROPERTY_ignoreBefore] = new PropertyDescriptor ( "ignoreBefore", TeacherSynapse.class, "getIgnoreBefore", "setIgnoreBefore" );
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", TeacherSynapse.class, "isEnabled", "setEnabled" );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", TeacherSynapse.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/;
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_fwdGet0 = 0;
private static final int METHOD_fwdPut1 = 1;
private static final int METHOD_revGet2 = 2;
private static final int METHOD_revPut3 = 3;
private static final int METHOD_addNoise4 = 4;
private static final int METHOD_randomize5 = 5;
private static final int METHOD_canCountSteps6 = 6;
// Method array
/*lazy MethodDescriptor*/;
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[7];
try {
methods[METHOD_fwdGet0] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("fwdGet", new Class[] {}));
methods[METHOD_fwdGet0].setDisplayName ( "" );
methods[METHOD_fwdPut1] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_fwdPut1].setDisplayName ( "" );
methods[METHOD_revGet2] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("revGet", new Class[] {}));
methods[METHOD_revGet2].setDisplayName ( "" );
methods[METHOD_revPut3] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("revPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_revPut3].setDisplayName ( "" );
methods[METHOD_addNoise4] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("addNoise", new Class[] {Double.TYPE}));
methods[METHOD_addNoise4].setDisplayName ( "" );
methods[METHOD_randomize5] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("randomize", new Class[] {Double.TYPE}));
methods[METHOD_randomize5].setDisplayName ( "" );
methods[METHOD_canCountSteps6] = new MethodDescriptor ( org.joone.engine.learning.TeacherSynapse.class.getMethod("canCountSteps", new Class[] {}));
methods[METHOD_canCountSteps6].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* FahlmanTeacherSynapse.java
*
* Created on February 28, 2005, 1:55 PM
*/
package org.joone.engine.learning;
import org.joone.engine.Monitor;
import org.joone.log.*;
import org.joone.engine.listeners.*;
/**
* <p>
* This class extends the normal Teacher synapse and implements the Fahlman
* 40-20-40 criterion (the values can be changed). This teacher makes only sense
* in case of binary outputs.
* <p>
* In case of the default values (40-20-40) and considering [0,1] binary outputs
* the criterion is fullfilled if for all patterns the output is
* - within [0, 0.4] in case the desired output is 0
* - within [0.6, 1] in case the desired output is 1.
* <p>
* More about this criterion can be found at
* {@link http://citeseer.ist.psu.edu/fahlman88empirical.html}.
*
* @author Boris Jansen
*/
public class FahlmanTeacherSynapse extends TeacherSynapse {
/** Constant to indicate (key) the parameter for checking (in the monitor object)
* if the criterion has been forfilled or not. */
public static final String CRITERION = "FAHLMAN_CRITERION";
/**
* Logger
**/
protected static final ILogger log = LoggerFactory.getLogger(FahlmanTeacherSynapse.class);
/** The upperbit value (of the desired output), by default 1. */
private double upperBit = 1.0;
/** The lowerbit value (of the desired output), by default 0. */
private double lowerBit = 0.0;
/** The percentage that is considered as a lowerbit, by default 0.4.
* In case of desired output bits {0, 1} any output within [0, 0.4]
* is considered a lower bit
*/
private double lowerBitPercentage = 0.4;
/** The percentage that is considered as a upperbit, by default 0.4.
* In case of desired output bits {0, 1} any output within [0.6, 1]
* is considered a upper bit
*/
private double upperBitPercentage = 0.4;
/** Creates a new instance of FahlmanTeacherSynapse */
public FahlmanTeacherSynapse() {
}
/**
* Sets the upper bit.
*
* @param aValue sets the upper bit to <code>aValue</code>.
*/
public void setUpperBit(double aValue) {
upperBit = aValue;
}
/**
* Gets the upper bit value.
*
* @return the upper bit value.
*/
public double getUpperBit() {
return upperBit;
}
/**
* Sets the lower bit.
*
* @param aValue sets the lower bit to <code>aValue</code>.
*/
public void setLowerBit(double aValue) {
lowerBit = aValue;
}
/**
* Gets the lower bit value.
*
* @return the lower bit value.
*/
public double getLowerBit() {
return lowerBit;
}
/**
* Sets the upper bit percentage.
*
* @param aValue sets the upper bit percentage to <code>aValue</code>.
*/
public void setUpperBitPercentage(double aValue) {
upperBitPercentage = aValue;
}
/**
* Gets the upper bit percentage.
*
* @return the upper bit percentage.
*/
public double getUpperBitPercentage() {
return upperBitPercentage;
}
/**
* Sets the lower bit percentage.
*
* @param aValue sets the lower bit percentage to <code>aValue</code>.
*/
public void setLowerBitPercentage(double aValue) {
lowerBitPercentage = aValue;
}
/**
* Gets the lower bit percentage.
*
* @return the lower bit percentage.
*/
public double getLowerBitPercentage() {
return lowerBitPercentage;
}
protected double calculateError(double aDesired, double anOutput, int anIndex) {
if(getMonitor().isValidation()) {
double myRange = upperBit - lowerBit;
if(aDesired == lowerBit) {
myRange *= lowerBitPercentage;
if(!(anOutput >= lowerBit && anOutput <= lowerBit + myRange)) {
getMonitor().setParam(CRITERION, Boolean.FALSE);
}
} else if(aDesired == upperBit) {
myRange *= upperBitPercentage;
if(!(anOutput >= upperBit - myRange && anOutput <= upperBit)) {
getMonitor().setParam(CRITERION, Boolean.FALSE);
}
} else {
log.warn("The values for upper and/or lower bit are not correctly set. No match for desired output "
+ aDesired + ".");
getMonitor().setParam(CRITERION, Boolean.FALSE);
}
}
return super.calculateError(aDesired, anOutput, anIndex);
}
}
| Java |
package org.joone.engine.learning;
import java.beans.*;
public class ComparingSynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( ComparingSynapse.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_enabled = 0;
private static final int PROPERTY_name = 1;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[2];
try {
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", ComparingSynapse.class, "isEnabled", "setEnabled" );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", ComparingSynapse.class, "getName", "setName" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_fwdPut0 = 0;
private static final int METHOD_revGet1 = 1;
private static final int METHOD_addResultSynapse2 = 2;
private static final int METHOD_removeResultSynapse3 = 3;
private static final int METHOD_start4 = 4;
private static final int METHOD_stop5 = 5;
private static final int METHOD_resetInput6 = 6;
private static final int METHOD_check7 = 7;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[8];
try {
methods[METHOD_fwdPut0] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_fwdPut0].setDisplayName ( "" );
methods[METHOD_revGet1] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("revGet", new Class[] {}));
methods[METHOD_revGet1].setDisplayName ( "" );
methods[METHOD_addResultSynapse2] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("addResultSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_addResultSynapse2].setDisplayName ( "" );
methods[METHOD_removeResultSynapse3] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("removeResultSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_removeResultSynapse3].setDisplayName ( "" );
methods[METHOD_start4] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("start", new Class[] {}));
methods[METHOD_start4].setDisplayName ( "" );
methods[METHOD_stop5] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("stop", new Class[] {}));
methods[METHOD_stop5].setDisplayName ( "" );
methods[METHOD_resetInput6] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("resetInput", new Class[] {}));
methods[METHOD_resetInput6].setDisplayName ( "" );
methods[METHOD_check7] = new MethodDescriptor ( org.joone.engine.learning.ComparingSynapse.class.getMethod("check", new Class[] {}));
methods[METHOD_check7].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* ComparingElement.java
*
* Created on 25 may 2003, 16.53
*/
package org.joone.engine.learning;
import org.joone.engine.*;
import org.joone.io.*;
import java.io.Serializable;
/**
* This interface describes an element that can compare the output of the layer to which it
* is connected, with another input derivating from a StreamInputSynapse named 'desired'.
* To elaborate the result of the comparison, attach to its output a whatever component
* implementing the OutputPatternListener interface. (use addResultSynapse to do it).
* Its main purpose is to describe the interface of a component used to teach the
* neural network, but it can be used whenever it's necessary to compare two patterns.
* @author pmarrone
*/
public interface ComparingElement extends OutputPatternListener, Serializable {
/**
* Getter for the desired data set
*/
public StreamInputSynapse getDesired();
/**
* Setter for the desired data set
*/
public boolean setDesired(StreamInputSynapse desired);
/**
* Adds an output synapse to which the result must be sent
*/
public boolean addResultSynapse(OutputPatternListener listener);
/**
* Removes an output synapse
*/
public void removeResultSynapse(OutputPatternListener listener);
/**
* Returns the internal Layer used to transport the result to the connected output synapse
*/
public LinearLayer getTheLinearLayer();
/**
* Resets the internal buffer of the desired StreamInputSynapse
*/
public void resetInput();
}
| Java |
package org.joone.engine.learning;
import org.joone.log.*;
import org.joone.engine.*;
import org.joone.io.*;
import org.joone.net.NetCheck;
import java.io.IOException;
import java.util.TreeSet;
/**
* Final element of a neural network; it permits to calculate
* both the error of the last training cycle and the vector
* containing the error pattern to apply to the net to
* calculate the backprop algorithm.
*/
public class TeacherSynapse extends AbstractTeacherSynapse {
/**
* Logger
**/
protected static final ILogger log = LoggerFactory.getLogger(TeacherSynapse.class);
/** The error being calculated for the current epoch. */
protected transient double GlobalError = 0;
private static final long serialVersionUID = -1301682557631180066L;
public TeacherSynapse() {
super();
}
protected double calculateError(double aDesired, double anOutput, int anIndex) {
double myError = aDesired - anOutput;
// myError = Dn - Yn
// myError^2 = (Dn - yn)^2
// GlobalError += SUM[ SUM[ 1/2 (Dn - yn)^2]]
// GlobalError += SUM[ 1/2 SUM[(Dn - yn)^2]]
GlobalError += (myError * myError) / 2;
return myError;
}
protected double calculateGlobalError() {
double myError = GlobalError / getMonitor().getNumOfPatterns();
if(getMonitor().isUseRMSE()) {
myError = Math.sqrt(myError);
}
GlobalError = 0;
return myError;
}
public void fwdPut(Pattern pattern) {
super.fwdPut(pattern);
if (pattern.getCount() == -1) {
// reset error
GlobalError = 0;
}
}
} | Java |
package org.joone.engine.learning;
import org.joone.log.*;
import org.joone.engine.*;
import org.joone.io.*;
import org.joone.net.NetCheck;
import java.io.IOException;
import java.util.TreeSet;
/**
* Final element of a neural network; it permits to compare
* the outcome of the neural net and the input patterns
* from a StreamInputSynapse connected to the 'desired'
* property. Used by the ComparingSynapse object.
*/
public class ComparisonSynapse extends Synapse {
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger(TeacherSynapse.class);
private StreamInputSynapse desired;
protected transient Fifo fifo;
protected transient boolean firstTime = true;
private static final long serialVersionUID = -1301682557631180066L;
public ComparisonSynapse() {
super();
firstTime = true;
}
protected void backward(double[] pattern) {
// Not used.
}
protected void forward(double[] pActual) {
Pattern pattDesired;
double[] pTarget;
int x;
if ((m_pattern.getCount() == 1) || (m_pattern.getCount() == -1)) {
try {
desired.gotoFirstLine();
} catch (IOException ioe) {
log.warn("IOException while forwarding the influx. Message is : " + ioe.getMessage(),
ioe);
}
}
if (m_pattern.getCount() == -1) {
stopTheNet();
return;
}
firstTime = false;
outs = new double[pActual.length];
pattDesired = desired.fwdGet();
if (m_pattern.getCount() != pattDesired.getCount()) {
new NetErrorManager(getMonitor(),"ComparisonSynapse: No matching patterns - input#" + m_pattern.getCount() + " desired#" + pattDesired.getCount());
return;
}
pTarget = pattDesired.getArray();
if (pTarget != null) {
outs = new double[getOutputDimension()];
int i,n;
for (i=0, n=0; i < pActual.length; ++i,++n) {
outs[n] = pActual[i];
}
for (i=0; i < pTarget.length; ++i,++n) {
outs[n] = pTarget[i];
}
pushValue(outs, m_pattern.getCount());
}
}
protected void stopTheNet() {
pushStop();
firstTime = true;
}
public Pattern fwdGet() {
synchronized (this) {
while (getFifo().empty()) {
try {
wait();
} catch (InterruptedException ie) { //e.printStackTrace();
log.warn("wait() was interrupted. Message is : " + ie.getMessage());
return null;
}
}
Pattern errPatt = (Pattern)fifo.pop();
notifyAll();
return errPatt;
}
}
public void fwdPut(Pattern pattern) {
int step = pattern.getCount();
if (!isEnabled()) {
if (step == -1)
stopTheNet();
return;
}
super.fwdPut(pattern);
items = 0;
}
/**
* Inserire qui la descrizione del metodo.
* Data di creazione: (11/04/00 1.12.04)
* @return neural.engine.StreamInputSynapse
*/
public StreamInputSynapse getDesired() {
return desired;
}
/**
* Insert the method's description here.
* Creation date: (23/09/2000 2.16.17)
* @return neural.engine.Fifo
*/
private Fifo getFifo() {
if (fifo == null)
fifo = new Fifo();
return fifo;
}
public Pattern revGet() {
return null;
}
public void revPut(Pattern pattern) {
// Not used.
}
/**
* setArrays method comment.
*/
protected void setArrays(int rows, int cols) {
}
/**
* Set the input data stream containing desired training data
* @param newDesired neural.engine.StreamInputSynapse
*/
public boolean setDesired(StreamInputSynapse newDesired) {
if (newDesired == null) {
if (desired != null)
desired.setInputFull(false);
desired = newDesired;
}
else {
if (newDesired.isInputFull())
return false;
desired = newDesired;
desired.setStepCounter(false);
desired.setOutputDimension(getInputDimension());
desired.setInputFull(true);
}
return true;
}
public void resetInput() {
if (getDesired() != null)
getDesired().resetInput();
}
protected void setDimensions(int rows, int cols) {
}
public void setInputDimension(int newInputDimension) {
super.setInputDimension(newInputDimension);
if (getDesired() != null)
getDesired().setOutputDimension(newInputDimension);
}
public TreeSet check() {
TreeSet checks = super.check();
if (desired == null) {
checks.add(new NetCheck(NetCheck.FATAL, "Desired Input has not been set.", this));
}
else
checks.addAll(desired.check());
return checks;
}
/** reset of the input synapse
*
*/
public void reset() {
super.reset();
if (getDesired() != null)
getDesired().reset();
}
/** Sets the Monitor object of the Teacher Synapse.
* Adds this Techer Synapse as a NeuralNetListener so that it can reset after a critical error.
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
super.setMonitor(newMonitor);
if (getMonitor() != null) {
this.getMonitor().setSupervised(true);
}
}
public void netStoppedError(NeuralNetEvent e, String error) {
pushStop();
firstTime = true;
this.reset();
}
private void pushStop() {
double[] arr = new double[getOutputDimension()];
pushValue(arr, -1);
}
private void pushValue(double[] arr, int count) {
Pattern patt = new Pattern(arr);
patt.setCount(count);
synchronized (this) {
getFifo().push(patt);
notify();
}
}
/** Returns the output dimension of the synapse.
* @return int
*
*/
public int getOutputDimension() {
return getInputDimension() * 2;
}
public void init() {
super.init();
if (getDesired() != null)
getDesired().init();
}
} | Java |
/*
* AbstractTeacherSynapse.java
*
* Created on February 26, 2005, 3:51 PM
*/
package org.joone.engine.learning;
import org.joone.log.*;
import org.joone.engine.*;
import org.joone.io.*;
import org.joone.net.NetCheck;
import java.io.*;
import java.util.TreeSet;
/**
* This class provides a framework to extend in order to implement various teachers,
* just by overriding or implementing certain functions different functionality can
* easily implemented.
*
* @author Boris Jansen
*/
public abstract class AbstractTeacherSynapse extends Synapse {
private static final long serialVersionUID = -3501303723175798936L;
// Developer note:
// ---------------
// Basically almost every code from TeacherSynapse is moved to this class and some functions
// are split up in smaller functions. Whenever you (preferable a Joone developer) want to
// implement a new teacher a certain methods need to be called based on certain events or
// states, then add an abstract function to be called here and implement it in your teacher.
// This way the AbstractTeacherSynapse will become more abstract, creating a framework that
// enables the creation of more various teachers.
/**
* Logger
**/
protected static final ILogger log = LoggerFactory.getLogger(AbstractTeacherSynapse.class);
/** First time data is passed to this teacher? */
private transient boolean firstTime = true;
/** Number of patterns seen during the current epoch. */
private transient int patterns = 0;
/** The stream from where to read the desired input. */
protected StreamInputSynapse desired;
/** Into this FIFO (first-in-first-out) object, the calculated error (e.g. RMSE) after
* an epoch will be pushed. This way an (external) application/component is able to read
* the errors at any moment, providing a loose-coupling mechanism.
*/
protected transient Fifo error;
/** Creates a new instance of AbstractTeacherSynapse */
public AbstractTeacherSynapse() {
super();
setFirstTime(true);
}
/**
* Sets the first time flag (is it the first time data is forwarded to this teacher).
*
* @param aValue value for the first time flag.
*/
protected void setFirstTime(boolean aValue) {
firstTime = aValue;
}
/**
* Checks whether it is the first time data is passed to this teacher or not.
*
* @return <code>true</code> if it is the first time data is passed to this teacher,
* <code>false</code> otherwise.
*/
protected boolean isFirstTime() {
return firstTime;
}
protected void backward(double[] pattern) {
// Not used.
}
/**
* Pushes the calculated array in the FIFO queue at the end of a
* epoch, that is after all patterns have been seen.
*
* @param error, the calculated error.
* @param count, the cycle of the calculated error.
*/
protected void pushError(double error, int count) {
double[] cost = new double[1];
cost[0] = error;
Pattern ptnErr = new Pattern(cost);
ptnErr.setCount(count);
synchronized (this) {
getError().push(ptnErr);
notify();
}
}
/**
* Gets the object holding the errors.
*
* @return the FIFO object holding the errors.
*/
private Fifo getError() {
if (error == null) {
error = new Fifo();
}
return error;
}
protected void stopTheNet() {
pushError(0.0, -1);
patterns = 0;
setFirstTime(true);
if(getMonitor() != null) {
new NetStoppedEventNotifier(getMonitor()).start();
}
}
/**
* Get the value of the number of patterns seen during the current epoch.
*
* @return the patterns seen during the current epoch.
*/
protected int getSeenPatterns() {
return patterns;
}
/**
* Set the value of the number of patterns seen during the current epoch.
*
* @param aValue the new value for the number of patterns seen during the
* current epoch.
*/
protected void setSeenPatterns(int aValue) {
patterns = aValue;
}
/**
* Increases the number of seen patterns by one.
*/
protected void incSeenPatterns() {
patterns++;
}
/**
* Here, it forwards (returns) the pushed error (in FIFO order).
*
* @return the pattern holding the error of the network.
* {@link Synapse#fwdGet()
*/
public Pattern fwdGet() {
synchronized (this) {
while (getError().empty()) {
try {
wait();
} catch (InterruptedException ie) {
//e.printStackTrace();
//log.warn("wait() was interrupted. Message is : " + ie.getMessage());
return null;
}
}
Pattern errPatt = (Pattern) error.pop();
notify();
return errPatt;
}
}
/**
* Gets the stream to read the desired output.
*
* @return the desired output stream.
*/
public StreamInputSynapse getDesired() {
return desired;
}
/**
* Set the input data stream containing desired training data.
*
* @param newDesired the stream from where to read the desired output.
*/
public boolean setDesired(StreamInputSynapse newDesired) {
if(newDesired == null) {
if (desired != null) {
desired.setInputFull(false);
}
desired = newDesired;
} else {
if (newDesired.isInputFull()) {
return false;
}
desired = newDesired;
desired.setStepCounter(false);
desired.setOutputDimension(getInputDimension());
desired.setInputFull(true);
}
return true;
}
protected Object readResolve() {
super.readResolve();
setFirstTime(true);
if (getMonitor()!= null) {
getMonitor().setSupervised(true);
}
return this;
}
protected void setArrays(int rows, int cols) {
}
protected void setDimensions(int rows, int cols) {
}
public void setInputDimension(int newInputDimension) {
super.setInputDimension(newInputDimension);
if (getDesired() != null) {
getDesired().setOutputDimension(newInputDimension);
}
}
/**
* Reset the input and desired synapses
*/
public void reset() {
super.reset();
setSeenPatterns(0);
setFirstTime(true);
if (getDesired() != null) {
getDesired().reset();
}
}
public void resetInput() {
if(getDesired() != null) {
getDesired().resetInput();
}
}
/**
* Sets the Monitor object of the Teacher Synapse.
*
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
super.setMonitor(newMonitor);
if(getMonitor() != null) {
this.getMonitor().setSupervised(true);
}
}
public void netStoppedError() {
pushError(0.0, -1);
setSeenPatterns(0);
setFirstTime(true);
reset();
}
public void init() {
super.init();
setSeenPatterns(0);
setFirstTime(true);
if(getDesired() != null) {
getDesired().init();
}
}
public TreeSet check() {
TreeSet checks = super.check();
if(getDesired() == null) {
checks.add(new NetCheck(NetCheck.FATAL, "Desired Input has not been set.", this));
} else {
checks.addAll(getDesired().check());
}
return checks;
}
public void revPut(Pattern pattern) {
// Not used.
}
public Pattern revGet() {
if (isEnabled()) {
return super.fwdGet();
} else {
return null;
}
}
public void fwdPut(Pattern pattern) {
int step = pattern.getCount();
if (!getMonitor().isSingleThreadMode()) {
if((getMonitor() == null) || (!isEnabled())) {
if (step == -1) {
stopTheNet();
}
return;
}
}
super.fwdPut(pattern);
if (step != -1) {
if (!getMonitor().isLearningCicle(step)) {
items = 0;
}
} else {
items = 0;
}
}
protected void forward(double[] pattern) {
Pattern pattDesired;
double[] pDesired;
double myGlobalError; // error at the end of an epoch
if ((m_pattern.getCount() == 1) || (m_pattern.getCount() == -1)) {
// new epoch / end of previous epoch
try {
desired.gotoFirstLine();
if((!isFirstTime()) && (getSeenPatterns() == getMonitor().getNumOfPatterns())) {
myGlobalError = calculateGlobalError();
pushError(myGlobalError, getMonitor().getTotCicles() - getMonitor().getCurrentCicle());
getMonitor().setGlobalError(myGlobalError);
epochFinished();
setSeenPatterns(0);
}
} catch (IOException ioe) {
new NetErrorManager(getMonitor(),"TeacherSynapse: IOException while forwarding the influx. Message is : " + ioe.getMessage());
return;
}
}
if (m_pattern.getCount() == -1) {
if (!getMonitor().isSingleThreadMode()) {
stopTheNet();
} else {
pushError(0.0, -1);
}
return;
}
setFirstTime(false);
outs = new double[pattern.length];
pattDesired = desired.fwdGet();
if (m_pattern.getCount() != pattDesired.getCount()) {
try {
desired.gotoLine(m_pattern.getCount());
pattDesired = desired.fwdGet();
if (m_pattern.getCount() != pattDesired.getCount()) {
new NetErrorManager(getMonitor(),"TeacherSynapse: No matching patterns - input#" + m_pattern.getCount() + " desired#" + pattDesired.getCount());
return;
}
} catch (IOException ioe) {
new NetErrorManager(getMonitor(),"TeacherSynapse: IOException while forwarding the influx. Message is : " + ioe.getMessage());
return;
}
}
// The error calculation starts from the preLearning+1 pattern
if (getMonitor().getPreLearning() < m_pattern.getCount()) {
pDesired = pattDesired.getArray();
if (pDesired != null) {
if(pDesired.length != outs.length) {
// if the desired output differs in size, we will back propagate
// an pattern of the same size as the desired output so the output
// layer will adjust its size. The error pattern will contain zero
// values so no learning takes place during this backward pass.
log.warn("Size output pattern mismatches size desired pattern." +
" Zero-valued desired pattern sized error pattern will be backpropagated.");
outs = new double[pDesired.length];
} else {
constructErrorPattern(pDesired, pattern);
}
}
}
incSeenPatterns();
}
/**
* Constructs the error pattern that will be back-propagated.
*
* @param aDesired the desired pattern
* @param anOutput the actual output pattern
*/
protected void constructErrorPattern(double[] aDesired, double[] anOutput) {
for(int x = 0; x < aDesired.length; ++x) {
outs[x] = calculateError(aDesired[x], anOutput[x], x);
}
/** For debuging purpose to view the desired output
* String myText = "Desired: ";
* for (int x = 0; x < aDesired.length; ++x) {
* myText += aDesired[x] + " ";
* }
* System.out.println(myText);
* end debug */
}
/**
* Calculates the error to be backpropaged for a single output neuron.
* (The function should also update the global error internally).
*
* @param aDesired the desired output
* @param anOutput the actual output of a single neuron
* @param anIndex the index of the output neuron
* @return the error to be back propagated
*/
protected abstract double calculateError(double aDesired, double anOutput, int anIndex);
/**
* This method is called after an epoch finished and the global error should
* be calculated.
*
* @return the global error (at the end of an epoch).
*/
protected abstract double calculateGlobalError();
/**
* This method is called to signal that an epoch has finished. Better to say is
* that a new epoch has started, because this method is called when the first pattern
* of a new epoch arrives at the teacher.
* New implementations of teachers can overwrite this method for their own use. (Please
* do call <code>super.epochFinished()</code>).
*/
protected void epochFinished() {
}
}
| Java |
package org.joone.engine.learning;
import java.util.Iterator;
import org.joone.engine.*;
import org.joone.io.*;
import org.joone.net.NetCheck;
import java.util.TreeSet;
public class TeachingSynapse implements ComparingElement {
protected AbstractTeacherSynapse theTeacherSynapse;
private LinearLayer theLinearLayer;
private boolean enabled = true;
private boolean outputFull = false;
/** The teacher to use. If null a normal <code>TeacherSynapse</code> will be used.
* The moment the teacher (<code>theTeacherSynapse</code>) is initialized is done
* the first time <code>getTheTeacherSynapse()</code> method is called. At that
* moment we are also able to set the monitor object.
*/
private AbstractTeacherSynapse theTeacherToUse = null;
/**
* @label desired
*/
private StreamInputSynapse desired;
private Monitor monitor;
private String name;
private static final long serialVersionUID = -8893181016305737666L;
public TeachingSynapse() {
}
/**
* Creates a TeachingSynapse
*
* @param aTeacher the teacher to use. The default constructor
* (<code>TeachingSynapse()</code>) uses a normal <code>TeacherSynapse</code>.
*/
public TeachingSynapse(TeacherSynapse aTeacher) {
theTeacherToUse = aTeacher;
}
public void fwdPut(Pattern pattern) {
Monitor mon = getMonitor();
// In interrogation mode, the Teacher must not be used
if (!mon.isLearning() && !mon.isValidation())
return;
if (!mon.isSingleThreadMode())
if (!getTheLinearLayer().isRunning())
getTheLinearLayer().start();
boolean firstTime = getTheTeacherSynapse().getSeenPatterns() == 0;
getTheTeacherSynapse().fwdPut(pattern);
if (mon.isSingleThreadMode()) {
if (pattern.getCount() == -1)
getTheLinearLayer().fwdRun(null);
if ((pattern.getCount() == 1) && !firstTime)
getTheLinearLayer().fwdRun(null);
}
}
/**
* Insert the method's description here.
* Creation date: (03/08/2000 22.50.55)
* @return java.lang.String
*/
public StreamInputSynapse getDesired() {
return desired;
}
/**
* getInputDimension method comment.
*/
public int getInputDimension() {
return getTheTeacherSynapse().getInputDimension();
}
/**
* Insert the method's description here.
* Creation date: (03/08/2000 22.54.48)
* @return neural.engine.Monitor
*/
public Monitor getMonitor() {
return monitor;
}
/**
* @return neural.engine.LinearLayer
* changed to public for Save As XML
*/
public LinearLayer getTheLinearLayer() {
if (theLinearLayer == null) {
theLinearLayer = new LinearLayer();
theLinearLayer.setLayerName("(R)MSE Layer");
if (monitor != null)
theLinearLayer.setMonitor(monitor);
theLinearLayer.setRows(1);
theLinearLayer.addInputSynapse(getTheTeacherSynapse());
}
return theLinearLayer;
}
/**
* @return neural.engine.TeacherSynapse
* changed to public for Save As XML
*/
public AbstractTeacherSynapse getTheTeacherSynapse() {
if (theTeacherSynapse == null) {
if(theTeacherToUse != null) {
theTeacherSynapse = theTeacherToUse;
} else {
theTeacherSynapse = new TeacherSynapse();
theTeacherSynapse.setName("Teacher Synapse");
}
if (monitor != null) {
theTeacherSynapse.setMonitor(monitor);
}
}
return theTeacherSynapse;
}
public Pattern revGet() {
return getTheTeacherSynapse().revGet();
}
public boolean setDesired(StreamInputSynapse fn) {
desired = fn;
if (getTheTeacherSynapse().setDesired(fn)) {
if ((monitor != null) && (desired != null))
desired.setMonitor(monitor);
return true;
} else
return false;
}
public boolean addResultSynapse(OutputPatternListener listener) {
if (listener != null)
return getTheLinearLayer().addOutputSynapse(listener);
else
return false;
}
public void removeResultSynapse(OutputPatternListener listener) {
if (listener != null)
getTheLinearLayer().removeOutputSynapse(listener);
}
/**
* setInputDimension method.
*/
public void setInputDimension(int newInputDimension) {
getTheTeacherSynapse().setInputDimension(newInputDimension);
}
/**
* Inserire qui la descrizione del metodo.
* Data di creazione: (06/04/00 23.33.24)
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
monitor = newMonitor;
if (theTeacherSynapse != null)
theTeacherSynapse.setMonitor(newMonitor);
if (theLinearLayer != null)
theLinearLayer.setMonitor(newMonitor);
if (desired != null)
desired.setMonitor(newMonitor);
}
public void stop() {
getTheLinearLayer().stop();
}
public String getName() {
return name;
}
public void setName(java.lang.String newName) {
name = newName;
}
/**
* Recall phase
* @param pattern double[] - pattern di input sul quale applicare la funzione di trasferimento
*/
protected void forward(double[] pattern) {
/* Not used */
}
/**
* Insert the method's description here.
* Creation date: (23/09/2000 12.52.58)
*/
protected void setArrays(int rows, int cols) {
/* Not used */
}
/**
* @param int rows - righe
* @param int cols - colonne
*/
protected void setDimensions(int rows, int cols) {
/* Not used */
}
/**
* Training phase.
* @param pattern double[] - input pattern
*/
protected void backward(double[] pattern) {
/* Not used */
}
/**
* Needed for Save as XML
*/
public void setTheTeacherSynapse(TeacherSynapse newTheTeacherSynapse) {
this.theTeacherSynapse = newTheTeacherSynapse;
}
/**
* Needed for Save as XML
*/
public void setTheLinearLayer(LinearLayer newTheLinearLayer) {
this.theLinearLayer = newTheLinearLayer;
}
public void resetInput() {
getTheTeacherSynapse().resetInput();
}
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
if (!isOutputFull())
checks.add(new NetCheck(NetCheck.FATAL, "the Teacher seems to be not attached", this));
if (theLinearLayer != null) {
checks.addAll(setErrorSource(theLinearLayer.check()));
}
if (theTeacherSynapse != null) {
checks.addAll(setErrorSource(theTeacherSynapse.check()));
}
return checks;
}
/** Getter for property enabled.
* @return Value of property enabled.
*
*/
public boolean isEnabled() {
return enabled;
}
/** Setter for property enabled.
* @param enabled New value of property enabled.
*
*/
public void setEnabled(boolean enabled) {
getTheTeacherSynapse().setEnabled(enabled);
this.enabled = enabled;
}
/** Getter for property outputFull.
* @return Value of property outputFull.
*
*/
public boolean isOutputFull() {
return outputFull;
}
/** Setter for property outputFull.
* @param outputFull New value of property outputFull.
*
*/
public void setOutputFull(boolean outputFull) {
this.outputFull = outputFull;
}
public void init() {
if (theTeacherSynapse != null) {
theTeacherSynapse.init();
}
}
// Changes the source of the errors generated from internal components
private TreeSet setErrorSource(TreeSet errors) {
if (!errors.isEmpty()) {
Iterator iter = errors.iterator();
while (iter.hasNext()) {
NetCheck nc = (NetCheck)iter.next();
if (!(nc.getSource() instanceof Monitor) &&
!(nc.getSource() instanceof StreamInputSynapse) &&
!(nc.getSource() instanceof StreamOutputSynapse))
nc.setSource(this);
}
}
return errors;
}
} | Java |
package org.joone.engine.learning;
import java.util.Iterator;
import org.joone.engine.*;
import org.joone.io.*;
import java.util.TreeSet;
import org.joone.net.NetCheck;
public class ComparingSynapse implements ComparingElement {
private ComparisonSynapse theComparisonSynapse;
private LinearLayer theLinearLayer;
private boolean enabled = true;
private boolean outputFull = false;
/**
* @label desired
*/
private StreamInputSynapse desired;
private Monitor monitor;
private String name;
private static final long serialVersionUID = -8893181016305737666L;
public ComparingSynapse() {
}
public void fwdPut(Pattern pattern) {
if (!getTheLinearLayer().isRunning())
getTheLinearLayer().start();
getTheComparisonSynapse().fwdPut(pattern);
}
/**
* Insert the method's description here.
* Creation date: (03/08/2000 22.50.55)
* @return java.lang.String
*/
public StreamInputSynapse getDesired() {
return desired;
}
/**
* getInputDimension method comment.
*/
public int getInputDimension() {
return getTheComparisonSynapse().getInputDimension();
}
/**
* Insert the method's description here.
* Creation date: (03/08/2000 22.54.48)
* @return neural.engine.Monitor
*/
public Monitor getMonitor() {
return monitor;
}
/**
* @return neural.engine.LinearLayer
* changed to public for Save As XML
*/
public LinearLayer getTheLinearLayer() {
if (theLinearLayer == null) {
theLinearLayer = new LinearLayer();
theLinearLayer.setLayerName("Comparing LinearLayer");
if (monitor != null)
theLinearLayer.setMonitor(monitor);
theLinearLayer.setRows(1);
theLinearLayer.addInputSynapse(getTheComparisonSynapse());
}
return theLinearLayer;
}
/**
* @return neural.engine.TeacherSynapse
* changed to public for Save As XML
*/
public ComparisonSynapse getTheComparisonSynapse() {
if (theComparisonSynapse == null) {
theComparisonSynapse = new ComparisonSynapse();
theComparisonSynapse.setName("Teacher Synapse");
if (monitor != null)
theComparisonSynapse.setMonitor(monitor);
}
return theComparisonSynapse;
}
public Pattern revGet() {
return null;
}
public boolean setDesired(StreamInputSynapse fn) {
desired = fn;
if (getTheComparisonSynapse().setDesired(fn)) {
if ((monitor != null) && (desired != null))
desired.setMonitor(monitor);
return true;
} else
return false;
}
public boolean addResultSynapse(OutputPatternListener listener) {
if (listener != null)
return getTheLinearLayer().addOutputSynapse(listener);
else
return false;
}
public void removeResultSynapse(OutputPatternListener listener) {
if (listener != null)
getTheLinearLayer().removeOutputSynapse(listener);
}
/**
* setInputDimension method.
*/
public void setInputDimension(int newInputDimension) {
getTheComparisonSynapse().setInputDimension(newInputDimension);
getTheLinearLayer().setRows(newInputDimension * 2);
}
/**
* Data di creazione: (06/04/00 23.33.24)
* @param newMonitor neural.engine.Monitor
*/
public void setMonitor(Monitor newMonitor) {
monitor = newMonitor;
if (monitor != null) {
getTheComparisonSynapse().setMonitor(newMonitor);
getTheLinearLayer().setMonitor(newMonitor);
if (desired != null)
desired.setMonitor(newMonitor);
}
}
public void stop() {
getTheLinearLayer().stop();
}
public String getName() {
return name;
}
public void setName(java.lang.String newName) {
name = newName;
}
/**
* Recall phase
* @param pattern double[] - pattern di input sul quale applicare la funzione di trasferimento
*/
protected void forward(double[] pattern) {
/* Not used */
}
/**
* Insert the method's description here.
* Creation date: (23/09/2000 12.52.58)
*/
protected void setArrays(int rows, int cols) {
/* Not used */
}
/**
* @param int rows - righe
* @param int cols - colonne
*/
protected void setDimensions(int rows, int cols) {
/* Not used */
}
/**
* Training phase.
* @param pattern double[] - input pattern
*/
protected void backward(double[] pattern) {
/* Not used */
}
/**
* Needed for Save as XML
*/
public void setTheComparisonSynapse(ComparisonSynapse theComparisonSynapse) {
this.theComparisonSynapse = theComparisonSynapse;
}
/**
* Needed for Save as XML
*/
public void setTheLinearLayer(LinearLayer newTheLinearLayer) {
this.theLinearLayer = newTheLinearLayer;
}
public void resetInput() {
getTheComparisonSynapse().resetInput();
}
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
if (theLinearLayer != null) {
checks.addAll(setErrorSource(theLinearLayer.check()));
}
if (theComparisonSynapse != null) {
checks.addAll(setErrorSource(theComparisonSynapse.check()));
}
return checks;
}
/** Getter for property enabled.
* @return Value of property enabled.
*
*/
public boolean isEnabled() {
return enabled;
}
/** Setter for property enabled.
* @param enabled New value of property enabled.
*
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/** Getter for property outputFull.
* @return Value of property outputFull.
*
*/
public boolean isOutputFull() {
return outputFull;
}
/** Setter for property outputFull.
* @param outputFull New value of property outputFull.
*
*/
public void setOutputFull(boolean outputFull) {
this.outputFull = outputFull;
}
public void init() {
if (theComparisonSynapse != null) {
theComparisonSynapse.init();
}
}
// Changes the source of the errors generated from internal components
private TreeSet setErrorSource(TreeSet errors) {
if (!errors.isEmpty()) {
Iterator iter = errors.iterator();
while (iter.hasNext()) {
NetCheck nc = (NetCheck)iter.next();
if (!(nc.getSource() instanceof Monitor) &&
!(nc.getSource() instanceof StreamInputSynapse) &&
!(nc.getSource() instanceof StreamOutputSynapse))
nc.setSource(this);
}
}
return errors;
}
} | Java |
package org.joone.engine;
import java.beans.*;
public class SynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( Synapse.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_enabled = 0;
private static final int PROPERTY_learner = 1;
private static final int PROPERTY_loopBack = 2;
private static final int PROPERTY_monitor = 3;
private static final int PROPERTY_name = 4;
private static final int PROPERTY_outputFull = 5;
private static final int PROPERTY_weights = 6;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[7];
try {
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", Synapse.class, "isEnabled", "setEnabled" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", Synapse.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_loopBack] = new PropertyDescriptor ( "loopBack", Synapse.class, "isLoopBack", "setLoopBack" );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", Synapse.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", Synapse.class, "getName", "setName" );
properties[PROPERTY_outputFull] = new PropertyDescriptor ( "outputFull", Synapse.class, "isOutputFull", "setOutputFull" );
properties[PROPERTY_outputFull].setExpert ( true );
properties[PROPERTY_weights] = new PropertyDescriptor ( "weights", Synapse.class, "getWeights", "setWeights" );
properties[PROPERTY_weights].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addNoise0 = 0;
private static final int METHOD_canCountSteps1 = 1;
private static final int METHOD_fwdGet2 = 2;
private static final int METHOD_fwdPut3 = 3;
private static final int METHOD_randomize4 = 4;
private static final int METHOD_revGet5 = 5;
private static final int METHOD_revPut6 = 6;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[7];
try {
methods[METHOD_addNoise0] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("addNoise", new Class[] {Double.TYPE}));
methods[METHOD_addNoise0].setDisplayName ( "" );
methods[METHOD_canCountSteps1] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("canCountSteps", new Class[] {}));
methods[METHOD_canCountSteps1].setDisplayName ( "" );
methods[METHOD_fwdGet2] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("fwdGet", new Class[] {}));
methods[METHOD_fwdGet2].setDisplayName ( "" );
methods[METHOD_fwdPut3] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_fwdPut3].setDisplayName ( "" );
methods[METHOD_randomize4] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("randomize", new Class[] {Double.TYPE}));
methods[METHOD_randomize4].setDisplayName ( "" );
methods[METHOD_revGet5] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("revGet", new Class[] {}));
methods[METHOD_revGet5].setDisplayName ( "" );
methods[METHOD_revPut6] = new MethodDescriptor ( org.joone.engine.Synapse.class.getMethod("revPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_revPut6].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
public interface LearnableSynapse extends Learnable {
public int getInputDimension();
public int getOutputDimension();
public Matrix getWeights(); // getConnections()
public void setWeights(Matrix newWeights);
public double getLearningRate();
public double getMomentum();
} | Java |
/*
* NetErrorManager.java
*
*/
package org.joone.engine;
import org.joone.engine.Monitor;
/**
* <P>This class should be used when ever a critical error occurs that would impact on the training or running of the network.</P>
* Joone classes should construct a new NetErrorManager when an error occurs. By doing so this will stop and reset the network
* so the user can perform corrective action and re-start.
* <P>E.g</P>
* <P>new NetErrorManager(monitor,"An error has occurred.!");
* <BR>
* <P> The constructor of this class calls the monitors fireNetStoppedError event method which propogates the event to all the
* net listeners. This in turn stops and resets the network to allow correction and continuation.
* @author Julien Norman
*/
public class NetErrorManager {
/**
* Constructor that stops and resets the neural network.
* @param mon The monitor that should be made aware of the error.
* @param errMsg The string containing the critical network error.
*/
public NetErrorManager(Monitor mon, String errMsg) {
if ( mon != null )
mon.fireNetStoppedError(errMsg); // Raises the error in the monitor.
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class OutputSwitchSynapseBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( OutputSwitchSynapse.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_allOutputs = 0;
private static final int PROPERTY_inputDimension = 1;
private static final int PROPERTY_activeOutput = 2;
private static final int PROPERTY_defaultOutput = 3;
private static final int PROPERTY_enabled = 4;
private static final int PROPERTY_name = 5;
private static final int PROPERTY_monitor = 6;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[7];
try {
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", OutputSwitchSynapse.class, "getAllOutputs", null );
properties[PROPERTY_inputDimension] = new PropertyDescriptor ( "inputDimension", OutputSwitchSynapse.class, "getInputDimension", "setInputDimension" );
properties[PROPERTY_inputDimension].setExpert ( true );
properties[PROPERTY_activeOutput] = new PropertyDescriptor ( "activeOutput", OutputSwitchSynapse.class, "getActiveOutput", "setActiveOutput" );
properties[PROPERTY_defaultOutput] = new PropertyDescriptor ( "defaultOutput", OutputSwitchSynapse.class, "getDefaultOutput", "setDefaultOutput" );
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", OutputSwitchSynapse.class, "isEnabled", "setEnabled" );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", OutputSwitchSynapse.class, "getName", "setName" );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", OutputSwitchSynapse.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_reset0 = 0;
private static final int METHOD_removeOutputSynapse1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_resetOutput3 = 3;
private static final int METHOD_fwdPut4 = 4;
private static final int METHOD_revGet5 = 5;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[6];
try {
methods[METHOD_reset0] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("reset", new Class[] {}));
methods[METHOD_reset0].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse1] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("removeOutputSynapse", new Class[] {java.lang.String.class}));
methods[METHOD_removeOutputSynapse1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_resetOutput3] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("resetOutput", new Class[] {}));
methods[METHOD_resetOutput3].setDisplayName ( "" );
methods[METHOD_fwdPut4] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("fwdPut", new Class[] {org.joone.engine.Pattern.class}));
methods[METHOD_fwdPut4].setDisplayName ( "" );
methods[METHOD_revGet5] = new MethodDescriptor ( org.joone.engine.OutputSwitchSynapse.class.getMethod("revGet", new Class[] {}));
methods[METHOD_revGet5].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* SpatialMap.java
*
*/
package org.joone.engine;
/** <P>SpatialMap is intended to be an abstract spatial map for use with a
* GaussianLayer. Custom SpatialMap's need to extend the ApplyNeighborhood method
* and implement it based on their own spatial shape implementation.
* The Gaussian spatial size is updated if the current epoch is less than the ordering phase,
* it is also reduced over the ordering phase based on the time constant.</P>
*/
public abstract class SpatialMap implements java.io.Serializable {
private double InitialGaussianSize = 1; // The initial Gaussian size.
private double CurrentGaussianSize = 1; // The current Gaussian size.
private int map_width = 1; // Width of map.
private int map_height = 1; // Height of map.
private int map_depth = 1; // Depth of the map.
private int win_x = 0, win_y = 0, win_z= 0; // The winning neuron.
private int TotalEpochs = 1;
private int orderingPhase;
// TimeConstant
double TimeConstant = 1;
/** <P>Initialises this spatial map according to the total number of
* epochs/cycles.</P>
* @param total_epochs The total number of epochs that will be used.
*/
public final void init(int total_epochs) {
// TimeConstant = total_epochs / Math.log(getInitialGaussianSize());
// TotalEpochs = total_epochs;
updateCurrentGaussianSize(1);
}
/** <P>Gets the total number of epochs for the current session.</P>
* @return The total number of epochs for the current session.
*/
public final int getTotalEpochs() {
return(TotalEpochs);
}
/** <P>Sets the initial guassian size of the spatial neighborhood.</P>
* @param size The size of the neighborhood.
*/
public final void setInitialGaussianSize(double size) {
setCurrentGaussianSize(size);
InitialGaussianSize = size;
}
/** <P>Gets the size of the spatial neighborhood.</P>
* @return The size of the spatial neighborhood.
*/
public final double getInitialGaussianSize() {
return(InitialGaussianSize);
}
/** <P>Sets the current guassian size of the spatial neighborhood.</P>
* @param size The current guassian size of the spatial neighborhood.
*/
public final void setCurrentGaussianSize(double size) {
CurrentGaussianSize = size;
}
/** <P>Gets the current gaussian size of the spatial neighborhood.</P>
* @return The current gaussian size of the spatial neighborhood.
*/
public final double getCurrentGaussianSize() {
return(CurrentGaussianSize);
}
/** <P>Updates the current Gaussian Size depending on the current epoch and the time
* constant.</P>
* @param current_epoch The current epoch or cycle.
*/
public final void updateCurrentGaussianSize(int current_epoch) {
if (current_epoch < getOrderingPhase())
setCurrentGaussianSize( getInitialGaussianSize() * Math.exp(-(current_epoch/getTimeConstant())) );
else
setCurrentGaussianSize(0.01);
}
/** <P>Applies the neighborhood strategy based on this spatial maps
* implementation.</P>
* @param distances The euclidean distances between input and weights calculated by previous
* synapse.
* @param n_outs The outputs of this spatial maps neighborhood strategy.
* @param isLearning Is the network in the learning phase.
*/
abstract public void ApplyNeighborhoodFunction(double [] distances, double [] n_outs, boolean isLearning);
/** Extracts the X,Y,Z co-ordinates of the winning neuron in this spatial map. The co-ordinates are placed into
* internal variables, Co-ordinates can be accessed by using the getWinnerX() , getWinnerY() , getWinnerZ() methods.
* A neuron is considered the winner if it's distance between the input and weights vector is the smallest.
* @param distances The distances between the input and weights vector, this should be passed in by the
* previous synapse.
*/
protected final void extractWinner(double [] distances) {
int current_output = 0;
double curDist = 0f;
double bestDist = 999999999999999f;
for (int z=0;z<getMapDepth();z++){
for (int y=0; y<getMapHeight(); y++) {
for (int x=0; x<getMapWidth(); x++) {
current_output = x+(y* getMapWidth())+(z*( getMapWidth()*getMapHeight()));
curDist = distances[current_output];
if ( curDist < bestDist ) {
bestDist = curDist;
win_x = x;
win_y = y;
win_z = z;
}
}
}
}
}
/** <P>Returns the X Co-ordinate of the current winning neuron.</P>
* @return The X Co-ordinate of the current winning neuron.
*/
protected final int getWinnerX() {
return(win_x);
}
/** <P>Returns the Y Co-ordinate of the current winning neuron.</P>
* @return The Y Co-ordinate of the current winning neuron.
*/
protected final int getWinnerY() {
return(win_y);
}
/** <P>Returns the Z Co-ordinate of the current winning neuron.</P>
* @return The Z Co-ordinate of the current winning neuron.
*/
protected final int getWinnerZ() {
return(win_z);
}
/** <P>Sets the dimensions of the spatial map. Allows dimension setting in one
* call.</P>
* @param x The x size or width of the map.
* @param y The y size or height of the map.
* @param z The z size of depth of the map.
*/
public final void setMapDimensions(int x,int y,int z) {
setMapWidth(x);
setMapHeight(y);
setMapDepth(z);
}
/** Sets the width of this spatial map.
* @param w The width or x size of the map.
*/
public final void setMapWidth(int w) {
if ( w> 0)
map_width = w;
else map_width=1;
}
/** Sets the height of this spatial map.
* @param h The height or y size of the map.
*/
public final void setMapHeight(int h) {
if ( h>0)
map_height=h;
else map_height=1;
}
/** Sets the depth of this spatial map.
* @param d The depth or z size of the map.
*/
public final void setMapDepth(int d) {
if ( d>0)
map_depth = d;
else map_depth=1;
}
/** <P>Gets the width of this spatial map.</P>
* @return The width of this spatial map.
*/
public final int getMapWidth() {
return(map_width);
}
/** <P>Gets the height of this spatial map.</P>
* @return The height of this spatial map.
*/
public final int getMapHeight() {
return(map_height);
}
/** <P>Gets the depth of this spatial map.</P>
* @return The depth of this spatial map.
*/
public final int getMapDepth() {
return(map_depth);
}
/** <P>Calculates the squared distance between vector (x1,y1,z1) and (x2,y2,z2) and returns the
* result.</P>
* @return The squared distance between vector (x1,y1,z1) and (x2,y2,z2)
* @param x1 The x location of the first vector.
* @param y1 The y location of the first vector.
* @param z1 The z location of the first vector.
* @param x2 The x location of the second vector.
* @param y2 The y location of the second vector.
* @param z2 The z location of the second vector.
*/
protected final double distanceBetween(int x1,int y1,int z1,int x2,int y2,int z2) {
int xleg=0,yleg=0,zleg=0;
xleg = (x1-x2);
xleg *= xleg;
yleg = (y1-y2);
yleg *= yleg;
zleg = (z1-z2);
zleg*=zleg;
return( xleg + yleg + zleg);
}
/** Getter for property orderingPhase.
* @return Value of property orderingPhase.
*
*/
public int getOrderingPhase() {
return orderingPhase;
}
/** Setter for property orderingPhase.
* @param orderingPhase New value of property orderingPhase.
*
*/
public void setOrderingPhase(int orderingPhase) {
this.orderingPhase = orderingPhase;
}
/** Getter for property TimeConstant.
* @return Value of property TimeConstant.
*
*/
public double getTimeConstant() {
return TimeConstant;
}
/** Setter for property TimeConstant.
* @param TimeConstant New value of property TimeConstant.
*
*/
public void setTimeConstant(double TimeConstant) {
this.TimeConstant = TimeConstant;
}
}
| Java |
package org.joone.engine;
/*
* @author dkern
*/
public interface Learnable {
Learner getLearner();
Monitor getMonitor();
void initLearner();
} | Java |
package org.joone.engine;
import org.joone.log.*;
/** This abstract class represents layers that are composed
* by neurons that implement some transfer function.
*/
public abstract class SimpleLayer extends Layer {
private static final ILogger log = LoggerFactory.getLogger(SimpleLayer.class);
private double lrate;
private double momentum;
private static final long serialVersionUID = -2579073586181182767L;
/** The constructor
*/
public SimpleLayer() {
super(); // Logging of instanciation made by the Layer
}
/** The constructor
* @param ElemName The name of the Layer
*/
public SimpleLayer(String ElemName) {
super(ElemName); // Logging of instanciation made by the Layer
}
/**
*
* */
protected void backward(double[] parm1) {
if (monitor != null) {
lrate = monitor.getLearningRate();
momentum = monitor.getMomentum();
}
}
/** Returns the value of the learning rate of the Layer
* @return double
*/
public double getLearningRate() {
return lrate;
}
/** Returns the value of the momentum of the Layer
* @return double
*/
public double getMomentum() {
return momentum;
}
protected void setDimensions() {
inps = new double[getRows()];
outs = new double[getRows()];
gradientInps = new double[getRows()];
gradientOuts = new double[getRows()];
}
public void setMonitor(Monitor parm1) {
super.setMonitor( parm1);
if (parm1 != null) {
lrate = monitor.getLearningRate();
momentum = monitor.getMomentum();
}
}
/**
* Needed for Save As XML
*/
public double getLrate() {
return this.lrate;
}
/**
* Needed for Save As XML
*/
public void setLrate(double newLrate) {
this.lrate = newLrate;
}
/**
* Needed for Save As XML
*/
public void setMomentum(double newMomentum) {
this.momentum = newMomentum;
}
}
| Java |
package org.joone.engine;
import java.beans.*;
public class DelayLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
private static BeanDescriptor beanDescriptor = new BeanDescriptor ( DelayLayer.class , null );
private static BeanDescriptor getBdescriptor(){
return beanDescriptor;
}
static {//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
}//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_allInputs = 0;
private static final int PROPERTY_allOutputs = 1;
private static final int PROPERTY_bias = 2;
private static final int PROPERTY_inputLayer = 3;
private static final int PROPERTY_layerName = 4;
private static final int PROPERTY_learner = 5;
private static final int PROPERTY_monitor = 6;
private static final int PROPERTY_outputLayer = 7;
private static final int PROPERTY_rows = 8;
private static final int PROPERTY_taps = 9;
// Property array
private static PropertyDescriptor[] properties = new PropertyDescriptor[10];
private static PropertyDescriptor[] getPdescriptor(){
return properties;
}
static {
try {
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", DelayLayer.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_allOutputs] = new PropertyDescriptor ( "allOutputs", DelayLayer.class, "getAllOutputs", "setAllOutputs" );
properties[PROPERTY_allOutputs].setExpert ( true );
properties[PROPERTY_bias] = new PropertyDescriptor ( "bias", DelayLayer.class, "getBias", "setBias" );
properties[PROPERTY_bias].setExpert ( true );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", DelayLayer.class, "isInputLayer", null );
properties[PROPERTY_inputLayer].setExpert ( true );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", DelayLayer.class, "getLayerName", "setLayerName" );
properties[PROPERTY_learner] = new PropertyDescriptor ( "learner", DelayLayer.class, "getLearner", null );
properties[PROPERTY_learner].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", DelayLayer.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", DelayLayer.class, "isOutputLayer", null );
properties[PROPERTY_outputLayer].setExpert ( true );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", DelayLayer.class, "getRows", "setRows" );
properties[PROPERTY_taps] = new PropertyDescriptor ( "taps", DelayLayer.class, "getTaps", "setTaps" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
}//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
private static EventSetDescriptor[] eventSets = new EventSetDescriptor[0];
private static EventSetDescriptor[] getEdescriptor(){
return eventSets;
}
//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addInputSynapse0 = 0;
private static final int METHOD_addNoise1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_copyInto3 = 3;
private static final int METHOD_removeAllInputs4 = 4;
private static final int METHOD_removeAllOutputs5 = 5;
private static final int METHOD_removeInputSynapse6 = 6;
private static final int METHOD_removeOutputSynapse7 = 7;
private static final int METHOD_run8 = 8;
private static final int METHOD_start9 = 9;
// Method array
private static MethodDescriptor[] methods = new MethodDescriptor[10];
private static MethodDescriptor[] getMdescriptor(){
return methods;
}
static {
try {
methods[METHOD_addInputSynapse0] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("addInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class}));
methods[METHOD_addInputSynapse0].setDisplayName ( "" );
methods[METHOD_addNoise1] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("addNoise", new Class[] {Double.TYPE}));
methods[METHOD_addNoise1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_copyInto3] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("copyInto", new Class[] {org.joone.engine.NeuralLayer.class}));
methods[METHOD_copyInto3].setDisplayName ( "" );
methods[METHOD_removeAllInputs4] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("removeAllInputs", new Class[] {}));
methods[METHOD_removeAllInputs4].setDisplayName ( "" );
methods[METHOD_removeAllOutputs5] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("removeAllOutputs", new Class[] {}));
methods[METHOD_removeAllOutputs5].setDisplayName ( "" );
methods[METHOD_removeInputSynapse6] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("removeInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class}));
methods[METHOD_removeInputSynapse6].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse7] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("removeOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class}));
methods[METHOD_removeOutputSynapse7].setDisplayName ( "" );
methods[METHOD_run8] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("run", new Class[] {}));
methods[METHOD_run8].setDisplayName ( "" );
methods[METHOD_start9] = new MethodDescriptor ( org.joone.engine.DelayLayer.class.getMethod("start", new Class[] {}));
methods[METHOD_start9].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
}//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return beanDescriptor;
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return properties;
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return eventSets;
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return methods;
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.engine;
import java.io.Serializable;
/*
* @author dkern
*/
public interface Learner extends Serializable {
/** The Learnable calls this method to make itself known to the Learner
*/
public abstract void registerLearnable(Learnable l);
/** Override this method to implement what should be done to LearnableLayers
*/
public abstract void requestBiasUpdate(double[] currentGradientOuts);
/** Override this method to implement what should be done to LearnableSynapses
*/
public abstract void requestWeightUpdate(double[] currentPattern, double[] currentInps);
/** Used to set the parameters used by this Learner
*/
public void setMonitor(Monitor mon);
} | Java |
/*
* MatrixBeanInfo.java
*
* Created on 28 aprile 2004, 23.40
*/
package org.joone.engine;
import java.beans.*;
/**
* @author paolo
*/
public class MatrixBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( Matrix.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_enabled = 0;
private static final int PROPERTY_fixed = 1;
private static final int PROPERTY_m_cols = 2;
private static final int PROPERTY_m_rows = 3;
private static final int PROPERTY_value = 4;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[5];
try {
properties[PROPERTY_enabled] = new PropertyDescriptor ( "enabled", Matrix.class, "getEnabled", "setEnabled" );
properties[PROPERTY_fixed] = new PropertyDescriptor ( "fixed", Matrix.class, "getFixed", "setFixed" );
properties[PROPERTY_m_cols] = new PropertyDescriptor ( "m_cols", Matrix.class, "getM_cols", "setM_cols" );
properties[PROPERTY_m_rows] = new PropertyDescriptor ( "m_rows", Matrix.class, "getM_rows", "setM_rows" );
properties[PROPERTY_value] = new PropertyDescriptor ( "value", Matrix.class, "getValue", "setValue" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_addNoise0 = 0;
private static final int METHOD_clear1 = 1;
private static final int METHOD_clone2 = 2;
private static final int METHOD_disableAll3 = 3;
private static final int METHOD_enableAll4 = 4;
private static final int METHOD_fixAll5 = 5;
private static final int METHOD_randomize6 = 6;
private static final int METHOD_unfixAll7 = 7;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[8];
try {
methods[METHOD_addNoise0] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("addNoise", new Class[] {Double.TYPE}));
methods[METHOD_addNoise0].setDisplayName ( "" );
methods[METHOD_clear1] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("clear", new Class[] {}));
methods[METHOD_clear1].setDisplayName ( "" );
methods[METHOD_clone2] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("clone", new Class[] {}));
methods[METHOD_clone2].setDisplayName ( "" );
methods[METHOD_disableAll3] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("disableAll", new Class[] {}));
methods[METHOD_disableAll3].setDisplayName ( "" );
methods[METHOD_enableAll4] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("enableAll", new Class[] {}));
methods[METHOD_enableAll4].setDisplayName ( "" );
methods[METHOD_fixAll5] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("fixAll", new Class[] {}));
methods[METHOD_fixAll5].setDisplayName ( "" );
methods[METHOD_randomize6] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("randomize", new Class[] {Double.TYPE, Double.TYPE}));
methods[METHOD_randomize6].setDisplayName ( "" );
methods[METHOD_unfixAll7] = new MethodDescriptor ( org.joone.engine.Matrix.class.getMethod("unfixAll", new Class[] {}));
methods[METHOD_unfixAll7].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* LearnerFactory.java
*
* Created on September 15, 2004, 3:43 PM
*/
package org.joone.engine;
/**
* Learner factories are used to provide the synapses and layers, through the
* monitor object with Leaners.
*
* @author Boris Jansen
*/
public interface LearnerFactory extends java.io.Serializable {
// We used to set learners at the monitor object in the following way:
// Monitor.getLearners().add(0, "org.joone.engine.BasicLearner");
// Monitor.getLearners().add(1, "org.joone.engine.BatchLearner");
// Monitor.setLearningMode(1);
// This method is still available and is an easy and fast way to set learners.
//
// However, thanks to the ExtendableLearner and extenders it is quite easy
// to create various different learners by combination different extenders.
// Furthermore, some people would like to set/change certain parameters for
// the learners before they are used.
// For those purposes the LearnerFactory is created. Once a learner factory
// is registered at a monitor, the method getLearner() will be used to get
// a learner and the user can implement the method as he/she likes.
/**
* Gets a learner for a synapse or layer.
*
* @param aMonitor the monitor.
*/
public Learner getLearner(Monitor aMonitor);
}
| Java |
package org.joone.engine;
import org.joone.net.NetCheck;
import java.util.TreeSet;
public class DirectSynapse extends Synapse {
private static final long serialVersionUID = 3079898042708755094L;
protected void backward(double[] pattern) {
// Never called. See revPut()
}
protected void forward(double[] pattern) {
outs = pattern;
}
/**
* setArrays method comment.
*/
protected void setArrays(int rows, int cols) {
inps = new double[rows];
outs = new double[rows];
bouts = new double[rows];
}
protected void setDimensions(int rows, int cols) {
if (rows > -1)
setArrays(rows, rows);
else
if (cols > -1)
setArrays(cols, cols);
}
public void revPut(Pattern pattern) {
}
public Pattern revGet() {
return null;
}
public TreeSet check() {
TreeSet checks = super.check();
if (getInputDimension() != getOutputDimension()) {
checks.add(new NetCheck(NetCheck.FATAL, "Connected layers are not the same size.", this));
}
return checks;
}
} | Java |
/*
* RbfGaussianLayer.java
*
* Created on July 21, 2004, 3:15 PM
*/
package org.joone.engine;
import java.util.ArrayList;
import java.util.Collection;
import org.joone.exception.JooneRuntimeException;
import org.joone.inspection.implementations.BiasInspection;
import org.joone.io.StreamInputSynapse;
import org.joone.log.*;
import org.joone.util.*;
/**
* This class implements the nonlinear layer in Radial Basis Function (RBF)
* networks using Gaussian functions.
*
* @author Boris Jansen
*/
public class RbfGaussianLayer extends RbfLayer {
/** Logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(RbfGaussianLayer.class);
/** The parameters for the different Gaussian RBFs (neurons) */
private RbfGaussianParameters[] theGaussianParameters;
/** Flag indication if we should use randomly chosen fixed centers. */
private boolean theUseRandomSelector = true;
/** The random selector (if theUseRandomSelector equals true). */
private RbfRandomCenterSelector theRandomSelector;
/** Creates a new instance of RbfGaussianLayer */
public RbfGaussianLayer() {
}
protected void backward(double[] pattern) throws org.joone.exception.JooneRuntimeException {
// we don't use back propagation (doesn't make sense for RBF layers), so
// the rule is copy gradientInps to gradientOuts
for(int i = 0; i < gradientInps.length; i++) {
gradientOuts[i] = gradientInps[i];
}
}
protected void forward(double[] pattern) throws org.joone.exception.JooneRuntimeException {
if(theUseRandomSelector && theGaussianParameters == null) {
setGaussianParameters(theRandomSelector.getGaussianParameters());
}
int i = 0;
try {
// for every RBF neuron
for(i = 0; i < getRows(); i++) {
// perform Gaussian function on pattern...
// Calculate squared Euclidian distance
double mySquaredEuclDist = 0;
double myTemp;
for(int j = 0; j < pattern.length; j++) {
myTemp = pattern[j] - theGaussianParameters[i].getMean()[j];
mySquaredEuclDist += (myTemp * myTemp);
}
outs[i] = Math.exp(mySquaredEuclDist /
(-2 * theGaussianParameters[i].getStdDeviation() * theGaussianParameters[i].getStdDeviation()));
//log.debug("Gaussian: " + outs[i]);
}
} catch (Exception aioobe) {
aioobe.printStackTrace();
String msg;
log.error(msg = "Exception thrown while processing the element " + i + " of the array. Value is : " + pattern[i]
+ " Exception thrown is " + aioobe.getClass().getName() + ". Message is " + aioobe.getMessage());
throw new JooneRuntimeException(msg, aioobe);
}
}
protected void setDimensions() {
super.setDimensions();
// we don't use back propagation in RBF layers, but the rule is to
// copy gradientInps to gradientOuts, so here we set the size of
// gradientOuts to gradientInps
gradientOuts = new double[gradientInps.length];
}
/**
* Gets the parameters that define the Gaussian RBFs.
*
* @return the Gaussian RBFs parameters.
*/
public RbfGaussianParameters[] getGaussianParameters() {
return theGaussianParameters;
}
/**
* Sets the parameters that define the Gaussian RBFs.
*
* @param aGaussianParameters The new parameters for the RBFs.
*/
public void setGaussianParameters(RbfGaussianParameters[] aGaussianParameters) {
if(aGaussianParameters.length != getRows()) {
setRows(aGaussianParameters.length);
log.warn("Setting new RBF Gaussian parameters -> # neurons changed.");
}
theGaussianParameters = aGaussianParameters;
}
/**
* Sets the Gaussian parameters to centers chosen randomly from the input/training data.
*
* @param aStreamInput the synapse providing the input, from where we will select random centers.
*/
public void useRandomCenter(StreamInputSynapse aStreamInput) {
theUseRandomSelector = true;
theRandomSelector = new RbfRandomCenterSelector(this);
// find last plugin of aStreamInput and attach the random selector plug in at the end
if(aStreamInput.getPlugIn() == null) {
aStreamInput.setPlugIn(theRandomSelector);
} else {
AbstractConverterPlugIn myPlugin = aStreamInput.getPlugIn();
// while(myPlugin.getNextPlugIn() != null) {
// myPlugin = myPlugin.getNextPlugIn();
// }
myPlugin.addPlugIn(theRandomSelector);
}
}
/**
* It doesn't make sense to return biases for this layer
* @return null
*/
public Collection Inspections() {
Collection col = new ArrayList();
col.add(new BiasInspection(null));
return col;
}
}
| Java |
package org.joone.exception;
/**
* This is a wrapper class for the <code>RuntimeException</code> thrown by
* the application.
*
* @see java.lang.RuntimeException
* @author tsmets
*/
public class JooneException
extends Exception
{
private Exception initialException = null;
/**
* Constructor for JooneRunTimeException.
*/
public JooneException()
{
super();
}
/**
* Constructor for JooneRunTimeException.
* @param s
*/
public JooneException(String s)
{
super(s);
}
}
| Java |
package org.joone.exception;
/**
* This is a wrapper class for the <code>Exception</code> thrown by
* the application.
*
* @see java.lang.Exception
* @author tsmets
*/
public class JooneRuntimeException
extends RuntimeException
{
private Throwable initialException = null;
private String msg = null;
/**
* Constructor for JooneRuntimeException.
*
* @param aMessage to be displayed.
*/
public JooneRuntimeException (String aMessage)
{
super (aMessage);
}
/**
* Constructor for JooneRunTimeException.
* @param anInitialException When applying the Original Exception that was thrown.
*/
public JooneRuntimeException (Throwable anInitialException)
{
super();
initialException = anInitialException;
}
/**
* Constructor for JooneRunTimeException.
*
* @param aMessage The message explaining the origin of the Exception
* @param anInitialException When applying the Original Exception that was thrown.
*
*/
public JooneRuntimeException (String aMessage, Throwable anInitialException)
{
super ( aMessage );
initialException = anInitialException;
}
/**
* @see java.lang.Throwable#getLocalizedMessage()
*/
public String getLocalizedMessage ()
{
return super.getLocalizedMessage( );
}
/**
* @see java.lang.Throwable#getMessage()
*/
public String getMessage()
{
StringBuffer buf = new StringBuffer (super.getMessage());
// TO DO
// Provide a properly formatted Message.
// The Message building procedure should hold consideration that
// the Exception may not have a reference to a RTE...
//
return buf.toString ();
}
}
| Java |
/*
* User: Harry Glasgow
* Date: 12/12/2002
* Time: 18:13:12
* Interface that defines methods for classes that
* can expose their internal values for inspection.
*/
package org.joone.inspection;
import java.util.Collection;
public interface Inspectable {
/**
* Method to get a collection of inspectable objects.
* @see org.joone.Inspection
* @return list of Inspectable objects
*/
public Collection Inspections();
/**
* Method to get the title to show
* in the InspectionFrame tab.
* @see org.joone.InspectionFrame
* @return title of the class.
*/
public String InspectableTitle();
}
| Java |
/*
* User: Harry Glasgow
* Date: 12/12/2002
* Time: 18:39:22
* Class to get bias values for a Layer.
*/
package org.joone.inspection.implementations;
import org.joone.inspection.Inspection;
import org.joone.engine.Matrix;
public class BiasInspection implements Inspection {
private Matrix bias;
public BiasInspection(Matrix biasArg) {
bias = biasArg;
}
public Object[][] getComponent() {
if (bias == null)
return null;
double[][] values = bias.getValue();
if (values.length > 0 && values[0].length > 0) {
Object[][] bigValues = new Object[values.length][values[0].length];
for (int i = 0; i < values[0].length; i++) {
for (int j = 0; j < values.length; j++) {
bigValues[j][i] = new Double(values[j][i]);
}
}
return bigValues;
} else
return null;
}
public Object[] getNames() {
if (bias == null)
return null;
double[][] values = bias.getValue();
if (values.length > 0 && values[0].length > 0) {
Object[] names = new String[values[0].length];
for (int i = 0; i < values[0].length; i++) {
names[i] = "Column "+i;
}
return names;
} else
return null;
}
public String getTitle() {
return "Bias";
}
/* (non-Javadoc)
* @see org.joone.inspection.Inspection#rowNumbers()
*/
public boolean rowNumbers() {
return false;
}
public void setComponent(Object[][] newValues) {
double[][] values = bias.getValue();
for (int x=0; (x < values.length) && (x < newValues.length); ++x)
for (int y=0; (y < values[0].length) && (y < newValues[0].length); ++y)
values[x][y] = ((Double)(newValues[x][y])).doubleValue();
}
}
| Java |
/*
* User: Harry Glasgow
* Date: 12/12/2002
* Time: 18:39:22
* Class to get bias values for a Layer.
*/
package org.joone.inspection.implementations;
import org.joone.inspection.Inspection;
import org.joone.engine.Pattern;
import java.util.*;
public class InputsInspection implements Inspection {
private Vector inputs = null;
public InputsInspection(Vector inputsArg) {
inputs = inputsArg;
}
public Object[][] getComponent() {
if ((inputs != null) && (inputs.size() > 0)) {
Object[][] bigValues = null;
for (int i = 0; i < inputs.size(); i++) {
Pattern pattern = (Pattern)inputs.elementAt(i);
double [] array = pattern.getArray();
if (bigValues == null) {
bigValues =
new Object[inputs.size()][array.length + 1];
}
for (int j = 1; j < array.length + 1; j++) {
Double d = new Double(array[j - 1]);
bigValues[i][j] = d;
}
}
for (int i = 0; i < inputs.size(); i++) {
bigValues[i][0] = new Integer(i + 1);
}
return bigValues;
} else {
return null;
}
}
public String getTitle() {
return "Inputs";
}
/* (non-Javadoc)
* @see org.joone.inspection.Inspection#getNames()
*/
public Object[] getNames() {
if ((inputs != null) && (inputs.size() > 0)) {
Object[] names = null;
for (int i = 0; i < inputs.size(); i++) {
Pattern pattern = (Pattern)inputs.elementAt(i);
double[] array = pattern.getArray();
if (names == null) {
names = new String[array.length + 1];
}
names[0] = "Row Number";
for (int j = 1; j < array.length + 1; j++) {
names[j] = "Column " + j;
}
}
return names;
} else {
return null;
}
}
/* (non-Javadoc)
* @see org.joone.inspection.Inspection#rowNumbers()
*/
public boolean rowNumbers() {
return true;
}
public void setComponent(java.lang.Object[][] newValues) {
for (int x=0; (x < inputs.size()) && (x < newValues.length); ++x) {
double[] values = ((Pattern)inputs.elementAt(x)).getArray();
int n = ((Pattern)inputs.elementAt(x)).getCount();
for (int y=0; (y < values.length) && (y < (newValues[0].length - 1)); ++y) {
values[y] = ((Double)newValues[x][y+1]).doubleValue();
}
Pattern patt = new Pattern(values);
patt.setCount(n);
inputs.setElementAt(patt, x);
}
}
}
| Java |
/*
* User: Harry Glasgow
* Date: 12/12/2002
* Time: 18:39:22
* Class to get bias values for a Layer.
*/
package org.joone.inspection.implementations;
import org.joone.inspection.Inspection;
import org.joone.engine.Matrix;
public class WeightsInspection implements Inspection {
private Matrix weights;
public WeightsInspection(Matrix weightsArg) {
weights = weightsArg;
}
public Object[][] getComponent() {
if (weights != null && weights.getValue() != null) {
double[][] values = weights.getValue();
if (values.length > 0 && values[0].length > 0) {
Object[][] bigValues = new Object[values.length][values[0].length];
for (int i = 0; i < values[0].length; i++) {
for (int j = 0; j < values.length; j++) {
bigValues[j][i] = new Double(values[j][i]);
}
}
return bigValues;
}
}
return null;
}
public String getTitle() {
return "Weights";
}
/* (non-Javadoc)
* @see org.joone.inspection.Inspection#getNames()
*/
public Object[] getNames() {
if (weights == null)
return null;
double[][] values = weights.getValue();
if (values.length > 0 && values[0].length > 0) {
Object[] names = new String[values[0].length];
for (int i = 0; i < values[0].length; i++) {
names[i] = "Column "+i;
}
return names;
} else
return null;
}
/* (non-Javadoc)
* @see org.joone.inspection.Inspection#rowNumbers()
*/
public boolean rowNumbers() {
return false;
}
public void setComponent(java.lang.Object[][] newValues) {
double[][] values = weights.getValue();
for (int x=0; (x < values.length) && (x < newValues.length); ++x)
for (int y=0; (y < values[0].length) && (y < newValues[0].length); ++y)
values[x][y] = ((Double)(newValues[x][y])).doubleValue();
}
}
| Java |
/*
* User: Harry Glasgow
* Date: 12/12/2002
* Time: 18:19:16
* Interface that defines classes of data that
* an Inspectable object can produce.
* The constructor should parameters sufficient to
* be able to create the Component.
*/
package org.joone.inspection;
public interface Inspection {
/**
* Method to get the inspectable values of the Inspectable class.
* @see org.joone.Inspectable
* @return representation of the Inspectable class data.
*/
public Object[][] getComponent();
/**
* Method to get the title of the object for
* display in the InspectionFrame title bar.
* @see org.joone.InspectionFrame
* @return title of the object.
*/
public String getTitle();
/** Method to get the necessity to display a
* column containing the row's numbers for
* these inspection values.
* @return true if the rows numbers must be displayed
*/
public boolean rowNumbers();
/** Method to get the names of each column
*
* @return the columns' names
*/
public Object[] getNames();
/** Sets the array of values for this component
* @param newValues Array of new values
*/
public void setComponent(final java.lang.Object[][] newValues);
}
| Java |
/*
* JooneTools.java
*
* Created on January 16, 2006, 4:19 PM
*
* Copyright @2005 by Paolo Marrone and the Joone team
* Licensed under the Lesser General Public License (LGPL);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.gnu.org/
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joone.helpers.factory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.TreeSet;
import java.util.Vector;
import org.joone.engine.DelayLayer;
import org.joone.engine.DirectSynapse;
import org.joone.engine.FullSynapse;
import org.joone.engine.GaussianLayer;
import org.joone.engine.KohonenSynapse;
import org.joone.engine.Layer;
import org.joone.engine.LinearLayer;
import org.joone.engine.Monitor;
import org.joone.engine.NeuralNetEvent;
import org.joone.engine.NeuralNetListener;
import org.joone.engine.Pattern;
import org.joone.engine.SigmoidLayer;
import org.joone.engine.SoftmaxLayer;
import org.joone.engine.Synapse;
import org.joone.engine.WTALayer;
import org.joone.engine.learning.ComparingSynapse;
import org.joone.engine.learning.TeachingSynapse;
import org.joone.engine.listeners.ErrorBasedTerminator;
import org.joone.io.MemoryInputSynapse;
import org.joone.io.MemoryOutputSynapse;
import org.joone.io.StreamInputSynapse;
import org.joone.net.NeuralNet;
import org.joone.net.NeuralNetAttributes;
/**
* Utility class to build/train/interrogate neural networks.
* By using this class, it's possible to easily build/train and interrogate
* a neural network with only 3 rows of code, as in this example:
*
* // Create an MLP network with 3 layers [2,2,1 nodes] with a logistic output layer
* NeuralNet nnet = JooneTools.create_standard(new int[]{2,2,1}, JooneTools.LOGISTIC);
* // Train the network for 5000 epochs, or until the rmse < 0.01
* double rmse = JooneTools.train(nnet, inputArray, desiredArray, 5000, 0.01, 0, null);
* // Interrogate the network
* double[] output = JooneTools.interrogate(nnet, testArray);
*
* @author paolo
*/
public class JooneTools {
// Kinds of output layer
/**
* Linear output layer
*/
public static final int LINEAR = 1;
/**
* Logistic (sigmoid) output layer
*/
public static final int LOGISTIC = 2;
/**
* Softmax output layer
*/
public static final int SOFTMAX = 3;
/**
* WTA output layer (unsupervised Kohonen)
*/
public static final int WTA = 4;
/**
* Gaussian output layer (unsupervised Kohonen)
*/
public static final int GAUSSIAN = 5;
// Training algorithms
/**
* Backprop on-line (incremental) learning algorithm
*/
public static final int BPROP_ONLINE = 0;
/**
* Backprop batch learning algorithm
*/
public static final int BPROP_BATCH = 1;
/**
* Resilient Backprop learning algorithm
*/
public static final int RPROP = 2;
/**
* Creates a feed forward neural network without I/O components.
* @param nodes array of integers containing the nodes of each layer
* @param outputType the type of output layer. One of 'LINEAR', 'SOFTMAX', 'LOGISTIC'
* @return The neural network created
* @throws java.lang.IllegalArgumentException .
*/
public static NeuralNet create_standard(int nodes[], int outputType) throws IllegalArgumentException {
NeuralNet nnet = new NeuralNet();
if ((nodes == null) || (nodes.length < 2)) {
throw new IllegalArgumentException("create_standard: Nodes is empty");
}
Layer[] layers = new Layer[nodes.length];
// Input layer
layers[0] = new LinearLayer();
layers[0].setRows(nodes[0]);
layers[0].setLayerName("input");
nnet.addLayer(layers[0], NeuralNet.INPUT_LAYER);
// Hidden layers
if (nodes.length > 2) {
for (int i=1; i < nodes.length - 1; ++i) {
layers[i] = new SigmoidLayer();
layers[i].setRows(nodes[i]);
layers[i].setLayerName("hidden"+i);
nnet.addLayer(layers[i], NeuralNet.HIDDEN_LAYER);
}
}
// Output layer
int outp = nodes.length - 1;
switch (outputType) {
case LINEAR:
layers[outp] = new LinearLayer(); break;
case LOGISTIC:
layers[outp] = new SigmoidLayer(); break;
case SOFTMAX:
layers[outp] = new SoftmaxLayer(); break;
default:
throw new IllegalArgumentException("create_standard: output type not supported");
}
layers[outp].setRows(nodes[outp]);
layers[outp].setLayerName("output");
nnet.addLayer(layers[outp], NeuralNet.OUTPUT_LAYER);
// Internal connections
for (int i=0; i < layers.length - 1; ++i) {
connect(layers[i], new FullSynapse(), layers[i+1]);
}
// Prepares the learning parameters
Monitor mon = nnet.getMonitor();
mon.addLearner(BPROP_ONLINE, "org.joone.engine.BasicLearner"); // Default
mon.addLearner(BPROP_BATCH, "org.joone.engine.BatchLearner");
mon.addLearner(RPROP, "org.joone.engine.RpropLearner");
mon.setLearningRate(0.7);
mon.setMomentum(0.7);
return nnet;
}
/**
* Creates a feed forward neural network without I/O components.
* @param nodes array of integers containing the nodes of each layer
* @param outputType the type of output layer. One of 'LINEAR', 'SOFTMAX', 'LOGISTIC'
* @return The neural network created
* @throws java.lang.IllegalArgumentException .
*/
public static NeuralNet create_timeDelay(int nodes[], int taps, int outputType) throws IllegalArgumentException {
NeuralNet nnet = new NeuralNet();
if ((nodes == null) || (nodes.length < 2)) {
throw new IllegalArgumentException("create_standard: nodes: not enough elements");
}
Layer[] layers = new Layer[nodes.length];
// Input layer
layers[0] = new DelayLayer();
layers[0].setRows(nodes[0]);
((DelayLayer)layers[0]).setTaps(taps);
layers[0].setLayerName("input");
nnet.addLayer(layers[0], NeuralNet.INPUT_LAYER);
// Hidden layers
if (nodes.length > 2) {
for (int i=1; i < nodes.length - 1; ++i) {
layers[i] = new SigmoidLayer();
layers[i].setRows(nodes[i]);
layers[i].setLayerName("hidden"+i);
nnet.addLayer(layers[i], NeuralNet.HIDDEN_LAYER);
}
}
// Output layer
int outp = nodes.length - 1;
switch (outputType) {
case LINEAR:
layers[outp] = new LinearLayer(); break;
case LOGISTIC:
layers[outp] = new SigmoidLayer(); break;
case SOFTMAX:
layers[outp] = new SoftmaxLayer(); break;
default:
throw new IllegalArgumentException("create_standard: output type not supported");
}
layers[outp].setRows(nodes[outp]);
layers[outp].setLayerName("output");
nnet.addLayer(layers[outp], NeuralNet.OUTPUT_LAYER);
// Internal connections
for (int i=0; i < layers.length - 1; ++i) {
connect(layers[i], new FullSynapse(), layers[i+1]);
}
// Prepares the learning parameters
Monitor mon = nnet.getMonitor();
mon.addLearner(BPROP_ONLINE, "org.joone.engine.BasicLearner"); // Default
mon.addLearner(BPROP_BATCH, "org.joone.engine.BatchLearner");
mon.addLearner(RPROP, "org.joone.engine.RpropLearner");
mon.setLearningRate(0.7);
mon.setMomentum(0.7);
return nnet;
}
/**
* Creates an unsupervised neural network without I/O components.
* This method is able to build the following kind of networks, depending on the 'outputType' parameter:
* WTA - Kohonen network with a WinnerTakeAll output layer
* GAUSSIAN - Kohonen network with a gaussian output layer
* The nodes array must contain 3 elements, with the following meaning:
* nodes[0] = Rows of the input layer
* nodes[1] = Width of the output map
* nodes[2] = Height of the output map
* @param nodes array of integers containing the nodes of each layer (see note above)
* @param outputType the type of output layer. One of 'WTA', 'GAUSSIAN'
* @return The neural network created
* @throws java.lang.IllegalArgumentException
*/
public static NeuralNet create_unsupervised(int nodes[], int outputType) throws IllegalArgumentException {
NeuralNet nnet = new NeuralNet();
if ((nodes == null) || (nodes.length < 3)) {
throw new IllegalArgumentException("create_unsupervised: nodes: not enough elements");
}
// A Kohonen network contains 2 layers
Layer[] layers = new Layer[2];
// Input layer
layers[0] = new LinearLayer();
layers[0].setRows(nodes[0]);
layers[0].setLayerName("input");
nnet.addLayer(layers[0], NeuralNet.INPUT_LAYER);
// Output layer
switch (outputType) {
case WTA:
layers[1] = new WTALayer();
((WTALayer)layers[1]).setLayerWidth(nodes[1]);
((WTALayer)layers[1]).setLayerHeight(nodes[2]);
break;
case GAUSSIAN:
layers[1] = new GaussianLayer();
((GaussianLayer)layers[1]).setLayerWidth(nodes[1]);
((GaussianLayer)layers[1]).setLayerHeight(nodes[2]);
break;
default:
throw new IllegalArgumentException("create_unsupervised: output type not supported");
}
layers[1].setLayerName("output");
nnet.addLayer(layers[1], NeuralNet.OUTPUT_LAYER);
// Synapse
connect(layers[0], new KohonenSynapse(), layers[1]);
Monitor mon = nnet.getMonitor();
mon.setLearningRate(0.7);
return nnet;
}
/**
* Interrogate a neural network with an array of doubles and returns the output
* of the neural network.
* @param nnet The neural network to interrogate
* @param input The input pattern (must have the size = # of input nodes)
* @return An array of double having size = # of output nodes
*/
public static double[] interrogate(NeuralNet nnet, double[] input) {
nnet.removeAllInputs();
nnet.removeAllOutputs();
DirectSynapse inputSynapse = new DirectSynapse();
DirectSynapse outputSynapse = new DirectSynapse();
nnet.addInputSynapse(inputSynapse);
nnet.addOutputSynapse(outputSynapse);
Pattern inputPattern = new Pattern(input);
inputPattern.setCount(1);
nnet.getMonitor().setLearning(false);
// Start the network
// TODO: Adapt to the single-thread mode
nnet.start();
// Interrogate the network
inputSynapse.fwdPut(inputPattern);
Pattern outputPattern = outputSynapse.fwdGet();
// Stop the network
inputSynapse.fwdPut(stopPattern(input.length));
outputSynapse.fwdGet();
nnet.join();
return outputPattern.getArray();
}
/**
* Trains a neural network using the input/desired pairs contained in 2D arrays of double.
* If Monitor.trainingPatterns = 0, all the input array's rows will be used for training.
* @param nnet The neural network to train
* @param input 2D array of double containing the training data. The # of columns must be equal to the # of input nodes
* @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes
* @param epochs Number of max training epochs
* @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached.
* @param epochs_btw_reports Number of epochs between the notifications on the stdOut
* @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made.
* @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero.
* @return The final training RMSE (or MSE)
*/
public static double train(NeuralNet nnet,
double[][] input, double[][] desired,
int epochs, double stopRMSE,
int epochs_btw_reports, Object stdOut,
boolean async) {
MemoryInputSynapse memInput = new MemoryInputSynapse();
memInput.setInputArray(input);
memInput.setAdvancedColumnSelector("1-"+input[0].length);
MemoryInputSynapse memTarget = null;
if (desired != null) {
memTarget = new MemoryInputSynapse();
memTarget.setInputArray(desired);
memTarget.setAdvancedColumnSelector("1-"+desired[0].length);
}
Monitor mon = nnet.getMonitor();
if (mon.isValidation()) {
if (mon.getValidationPatterns() == 0)
mon.setValidationPatterns(input.length);
} else {
if (mon.getTrainingPatterns() == 0)
mon.setTrainingPatterns(input.length);
}
return train_on_stream(nnet, memInput, memTarget,
epochs, stopRMSE, epochs_btw_reports, stdOut, async);
}
/**
* Trains a neural network in unsupervised mode (SOM and PCA networks)
* using the input contained in a 2D array of double.
* @param nnet The neural network to train
* @param input 2D array of double containing the training data. The # of columns must be equal to the # of input nodes
* @param epochs Number of max training epochs
* @param epochs_btw_reports Number of epochs between the notifications on the stdOut
* @param stdOut The object representing the output. It can be either the System.out or a NeuralNetListener instance.
* @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero.
*/
public static void train_unsupervised(NeuralNet nnet,
double[][] input,
int epochs,
int epochs_btw_reports, Object stdOut,
boolean async) {
nnet.getMonitor().setSupervised(false);
train(nnet, input, null, epochs, 0, epochs_btw_reports, stdOut, async);
}
/**
* Trains a neural network using StreamInputSynapses as the input/desired data sources.
* The Monitor.trainingPatterns must be set before to call this method.
* @param nnet The neural network to train
* @param input the StreamInputSynapse containing the training data. The advColumnSelector must be set according to the # of input nodes
* @param desired the StreamInputSynapse containing the target data. The advColumnSelector must be set according to the # of output nodes
* @param epochs Number of max training epochs
* @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached.
* @param epochs_btw_reports Number of epochs between the notifications on the stdOut
* @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made.
* @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero.
* @return The final training RMSE (or MSE)
*/
public static double train_on_stream(NeuralNet nnet,
StreamInputSynapse input, StreamInputSynapse desired,
int epochs, double stopRMSE,
int epochs_btw_reports, Object stdOut,
boolean async) {
nnet.removeAllInputs();
nnet.removeAllOutputs();
nnet.addInputSynapse(input);
if (desired != null) {
TeachingSynapse teacher = new TeachingSynapse();
teacher.setDesired(desired);
nnet.addOutputSynapse(teacher);
nnet.setTeacher(teacher);
}
return train_complete(nnet, epochs, stopRMSE, epochs_btw_reports, stdOut, async);
}
/**
* Trains a complete neural network, i.e. a network having
* all the parameters and the I/O components already set.
* @param nnet The neural network to train
* @param epochs Number of max training epochs
* @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached.
* @param epochs_btw_reports Number of epochs between the notifications on the stdOut
* @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made.
* @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero.
* @return The final training RMSE (or MSE)
*/
public static double train_complete(NeuralNet nnet,
int epochs, double stopRMSE,
int epochs_btw_reports, Object stdOut,
boolean async) {
nnet.removeAllListeners();
Monitor mon = nnet.getMonitor();
if (stdOut != null) {
mon.addNeuralNetListener(createListener(nnet, stdOut, epochs_btw_reports));
}
ErrorBasedTerminator term = null;
if (stopRMSE > 0) {
term = new ErrorBasedTerminator(stopRMSE);
term.setNeuralNet(nnet);
mon.addNeuralNetListener(term);
}
mon.setTotCicles(epochs);
mon.setLearning(!mon.isValidation());
TreeSet tree = nnet.check();
if (tree.isEmpty()) {
nnet.go(!async);
// Returns if async=true
if (async)
return 0.0d;
NeuralNetAttributes attrib = nnet.getDescriptor();
if (term != null) {
if (term.isStopRequestPerformed()) {
attrib.setLastEpoch(term.getStoppedCycle());
} else {
attrib.setLastEpoch(mon.getTotCicles());
}
}
if (mon.isValidation()) {
attrib.setValidationError(mon.getGlobalError());
} else {
attrib.setTrainingError(mon.getGlobalError());
}
return mon.getGlobalError();
} else {
throw new IllegalArgumentException("Cannot start, errors found:"+tree.toString());
}
}
/**
* Tests a neural network using the input/desired pairs contained in 2D arrays of double.
* This method doesn't change the weights, but calculates only the RMSE.
* If Monitor.validationPatterns = 0, all the input array's rows will be used for testing.
* @param nnet The neural network to test
* @param input 2D array of double containing the test data. The # of columns must be equal to the # of input nodes
* @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes
* @return The test RMSE (or MSE)
*/
public static double test(NeuralNet nnet,
double[][] input, double[][] desired) {
nnet.getMonitor().setValidation(true);
return train(nnet, input, desired, 1, 0, 0, null, false);
}
/**
* Tests a neural network using using StreamInputSynapses as the input/desired data sources.
* This method doesn't change the weights, but calculates only the RMSE.
* The Monitor.validationPatterns must be set before the call to this method.
* @param nnet The neural network to test
* @param input the StreamInputSynapse containing the test data. The advColumnSelector must be set according to the # of input nodes
* @param desired the StreamInputSynapse containing the target data. The advColumnSelector must be set according to the # of output nodes
* @return The test RMSE (or MSE)
*/
public static double test_on_stream(NeuralNet nnet,
StreamInputSynapse input, StreamInputSynapse desired) {
nnet.getMonitor().setValidation(true);
return train_on_stream(nnet, input, desired, 1, 0, 0, null, false);
}
/**
* Permits to compare the output and target data of a trained neural network using 2D array of double as the input/desired data sources.
* If Monitor.validationPatterns = 0, all the input array's rows will be used for testing.
* @param nnet The neural network to test
* @param input 2D array of double containing the test data. The # of columns must be equal to the # of input nodes
* @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes
* @return a 2D of double containing the output+desired data for each pattern.
*/
public static double[][] compare(NeuralNet nnet,
double[][] input, double[][] desired) {
MemoryInputSynapse memInput = new MemoryInputSynapse();
memInput.setInputArray(input);
memInput.setAdvancedColumnSelector("1-"+input[0].length);
MemoryInputSynapse memTarget = null;
if (desired != null) {
memTarget = new MemoryInputSynapse();
memTarget.setInputArray(desired);
memTarget.setAdvancedColumnSelector("1-"+desired[0].length);
}
Monitor mon = nnet.getMonitor();
nnet.getMonitor().setValidation(true);
if (mon.getValidationPatterns() == 0)
mon.setValidationPatterns(input.length);
return compare_on_stream(nnet, memInput, memTarget);
}
/**
* Permits to compare the output and target data of a trained neural network using StreamInputSynapses as the input/desired data sources.
*
* @param nnet The neural network to train
* @param input the StreamInputSynapse containing the training data. The advColumnSelector must be set according to the # of input nodes
* @param desired the StreamInputSynapse containing the target data. The advColumnSelector must be set according to the # of output nodes
* @return a 2D of double containing the output+desired data for each pattern.
*/
public static double[][] compare_on_stream(NeuralNet nnet,
StreamInputSynapse input, StreamInputSynapse desired) {
nnet.removeAllInputs();
nnet.removeAllOutputs();
nnet.addInputSynapse(input);
ComparingSynapse teacher = new ComparingSynapse();
teacher.setDesired(desired);
nnet.addOutputSynapse(teacher);
MemoryOutputSynapse outStream = new MemoryOutputSynapse();
teacher.addResultSynapse(outStream);
train_complete(nnet, 1, 0, 0, null, false);
Vector results = outStream.getAllPatterns();
int rows = results.size();
int columns = ((Pattern)results.get(0)).getArray().length;
double[][] output = new double[rows][columns];
for (int i=0; i < rows; ++i) {
output[i] = ((Pattern)results.get(i)).getArray();
}
return output;
}
/**
* Extracts a subset of data from the StreamInputSynapse passed as parameter.
* @return A 2D array of double containing the extracted data
* @param dataSet The input StreamInputSynapse. Must be buffered.
* @param firstRow The first row (relative to the internal buffer) to extract
* @param lastRow The last row (relative to the internal buffer) to extract
* @param firstCol The first column (relative to the internal buffer) to extract
* @param lastCol The last column (relative to the internal buffer) to extract
*/
public static double[][] getDataFromStream(StreamInputSynapse dataSet,
int firstRow, int lastRow,
int firstCol, int lastCol) {
// Force the reading of all the input data
dataSet.Inspections();
Vector data = dataSet.getInputPatterns();
int rows = lastRow - firstRow + 1;
int columns = lastCol - firstCol + 1;
double[][] array = new double[rows][columns];
for (int r=0; r < rows; ++r) {
double[] temp = ((Pattern)data.get(r + firstRow - 1)).getArray();
for (int c=0; c < columns; ++c) {
array[r][c] = temp[c + firstCol - 1];
}
}
return array;
}
/**
* Saves a neural network to a file
* @param nnet The network to save
* @param fileName the file name on which the network is saved
* @throws java.io.FileNotFoundException if the file name is invalid
* @throws java.io.IOException when an IO error occurs
*/
public static void save(NeuralNet nnet, String fileName) throws FileNotFoundException, IOException {
FileOutputStream stream = new FileOutputStream(fileName);
save_toStream(nnet, stream);
}
/**
* Saves a neural network to a file
* @param nnet The network to save
* @param fileName the file on which the network is saved
* @throws java.io.FileNotFoundException if the file name is invalid
* @throws java.io.IOException when an IO error occurs
*/
public static void save(NeuralNet nnet, File fileName) throws FileNotFoundException, IOException {
FileOutputStream stream = new FileOutputStream(fileName);
save_toStream(nnet, stream);
}
/**
* Saves a neural network to an OutputStream
* @param nnet The neural network to save
* @param stream The OutputStream on which the network is saved
* @throws java.io.IOException when an IO error occurs
*/
public static void save_toStream(NeuralNet nnet, OutputStream stream) throws IOException {
ObjectOutput output = new ObjectOutputStream(stream);
output.writeObject(nnet);
output.close();
}
/**
* Loads a neural network from a file
* @param fileName the name of the file from which the network is loaded
* @throws java.io.IOException when an IO error occurs
* @throws java.io.FileNotFoundException if the file name is invalid
* @throws java.lang.ClassNotFoundException if some neural network's object is not found in the classpath
* @return The loaded neural network
*/
public static NeuralNet load(String fileName) throws FileNotFoundException, IOException, ClassNotFoundException {
File NNFile = new File(fileName);
FileInputStream fin = new FileInputStream(NNFile);
NeuralNet nnet = load_fromStream(fin);
fin.close();
return nnet;
}
/**
* Loads a neural network from an InputStream
* @param stream The InputStream from which the network is loaded
* @throws java.io.IOException when an IO error occurs
* @throws java.lang.ClassNotFoundException some neural network's object is not found in the classpath
* @return The loaded neural network
*/
public static NeuralNet load_fromStream(InputStream stream) throws IOException, ClassNotFoundException {
ObjectInputStream oin = new ObjectInputStream(stream);
NeuralNet nnet = (NeuralNet)oin.readObject();
oin.close();
return nnet;
}
/**
* Connects two layers with the given synapse
* @param l1 The source layer
* @param syn The synapse to use to connect the two layers
* @param l2 The destination layer
*/
protected static void connect(Layer l1, Synapse syn, Layer l2) {
l1.addOutputSynapse(syn);
l2.addInputSynapse(syn);
}
/**
* Creates a stop pattern (i.e. a Pattern with counter = -1)
* @param size The size of the Pattern's array
* @return the created stop Pattern
*/
protected static Pattern stopPattern(int size) {
Pattern stop = new Pattern(new double[size]);
stop.setCount(-1);
return stop;
}
/**
* Creates a listener for a NeuralNet object.
* The listener writes the results to the stdOut object every 'interval' epochs.
* If stdOut points to a NeuralNetListener instance, the corresponding methods are invoked.
* If stdOut points to a PrintStream instance, a corresponding message is written.
* @param nnet The NeuralNetwork to which the listener will be attached
* @param stdOut the NeuralNetListener, or the PrintStream instance to which the notifications will be made
* @param interval The interval of epochs between two calls to the cyclic events cycleTerminated and errorChanged
* @return The created listener
*/
protected static NeuralNetListener createListener(final NeuralNet nnet,
final Object stdOut, final int interval) {
NeuralNetListener listener = new NeuralNetListener() {
Object output = stdOut;
int interv = interval;
NeuralNet neuralNet = nnet;
public void netStarted(NeuralNetEvent e) {
if (output == null) {
return;
}
if (output instanceof PrintStream) {
((PrintStream)output).println("Network started");
} else if (output instanceof NeuralNetListener) {
e.setNeuralNet(neuralNet);
((NeuralNetListener)output).netStarted(e);
}
}
public void cicleTerminated(NeuralNetEvent e) {
if (output == null) {
return;
}
Monitor mon = (Monitor)e.getSource();
int epoch = mon.getCurrentCicle() - 1;
if ((interval == 0) || (epoch % interval > 0))
return;
if (output instanceof PrintStream) {
((PrintStream)output).print("Epoch n."+(mon.getTotCicles()-epoch)+" terminated");
if (mon.isSupervised()) {
((PrintStream)output).print(" - rmse: "+mon.getGlobalError());
}
((PrintStream)output).println("");
} else if (output instanceof NeuralNetListener) {
e.setNeuralNet(neuralNet);
((NeuralNetListener)output).cicleTerminated(e);
}
}
public void errorChanged(NeuralNetEvent e) {
if (output == null) {
return;
}
Monitor mon = (Monitor)e.getSource();
int epoch = mon.getCurrentCicle() - 1;
if ((interval == 0) || (epoch % interval > 0))
return;
if (output instanceof NeuralNetListener) {
e.setNeuralNet(neuralNet);
((NeuralNetListener)output).errorChanged(e);
}
}
public void netStopped(NeuralNetEvent e) {
if (output == null) {
return;
}
if (output instanceof PrintStream) {
((PrintStream)output).println("Network stopped");
} else if (output instanceof NeuralNetListener) {
e.setNeuralNet(neuralNet);
((NeuralNetListener)output).netStopped(e);
}
}
public void netStoppedError(NeuralNetEvent e,String error) {
if (output == null) {
return;
}
if (output instanceof PrintStream) {
((PrintStream)output).println("Network stopped with error:"+error);
} else if (output instanceof NeuralNetListener) {
e.setNeuralNet(neuralNet);
((NeuralNetListener)output).netStoppedError(e, error);
}
}
};
return listener;
}
}
| Java |
/*
* CodeGenerator.java
*
* Created on September 21, 2005, 3:53 PM
*/
package org.joone.helpers.templating;
import java.io.StringWriter;
import java.util.Properties;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.Template;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.exception.MethodInvocationException;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.joone.helpers.structure.NeuralNetMatrix;
import org.joone.net.NeuralNet;
import org.joone.net.NeuralNetLoader;
/**
* This Class generates the source code to build the neural network passed
* as parameter to the getCode method. It searches the file containing the
* template either in the file system or in the classpath.
*
* @author Paolo Marrone
*/
public class CodeGenerator {
VelocityContext context = null;
/** Creates a new instance of CodeGenerator */
public CodeGenerator() throws Exception {
context = init();
}
public CodeGenerator(Properties props) throws Exception {
context = init(props);
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) throws Exception {
CodeGenerator me = new CodeGenerator();
NeuralNetLoader loader = new NeuralNetLoader("xor.snet");
NeuralNet nnet = loader.getNeuralNet();
String code = me.getCode(nnet,
"codeTemplate.vm",
"org.joone.test.templating",
"TestClass");
System.out.println(code);
}
protected VelocityContext init() throws Exception {
Properties props = new Properties();
// Specify two resource loaders to use: file and class
// TODO: Get the following settings from an external property file?
props.setProperty("resource.loader","file, class");
props.setProperty("file.resource.loader.description", "Velocity File Resource Loader");
props.setProperty("file.resource.loader.class", "org.apache.velocity.runtime.resource.loader.FileResourceLoader");
props.setProperty("file.resource.loader.path", ".");
props.setProperty("file.resource.loader.cache", "false");
props.setProperty("file.resource.loader.modificationCheckInterval", "0");
props.setProperty("class.resource.loader.description","Velocity Classpath Resource Loader");
props.setProperty("class.resource.loader.class","org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
return init(props);
}
protected VelocityContext init(Properties props) throws Exception {
Velocity.init(props);
return new VelocityContext();
}
public String getCode(NeuralNet nnet,
String templateName,
String packageName,
String className) {
String message;
StringWriter sw = new StringWriter();
try {
NeuralNetMatrix nMatrix = new NeuralNetMatrix(nnet.cloneNet());
context.put("netDescriptor", nMatrix);
context.put("package", packageName);
context.put("class", className);
Template template = null;
try {
template = Velocity.getTemplate(templateName);
template.merge( context, sw );
} catch( ResourceNotFoundException rnfe ) {
message = "couldn't find the template";
throw new Exception(message, rnfe);
} catch( ParseErrorException pee ) {
message = "syntax error : problem parsing the template";
throw new Exception(message, pee);
} catch( MethodInvocationException mie ) {
message = "Exception threw in the template code";
throw new Exception(message, mie);
} catch( Exception e ) {
e.printStackTrace();
}
} catch (Exception exc) {
exc.printStackTrace();
}
return sw.toString().trim();
}
}
| Java |
/*
* NeuralNetMatrix.java
*
* Created on 20 gennaio 2004, 22.11
*/
package org.joone.helpers.structure;
import org.joone.engine.*;
import org.joone.engine.learning.*;
import org.joone.net.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.*;
/**
* Utility class that performs several useful 'decompositions'
* of a NeuralNet object, making very simple to handle its internal
* structure.
* This class can be useful to analyze or transfor a neural network,
* because it transforms the network to a 'flat' structure,
* where there isn't any object pointed more than once.
* It, also, by means of the getConnectionMatrix public method,
* returns a 2D representation of the internal Synapses of
* a neural network.
*
* @author P.Marrone
*/
public class NeuralNetMatrix {
private ArrayList layers;
private ArrayList connectionSet;
private Monitor monitor;
private Layer inputLayer = null;
private Layer outputLayer = null;
private int inputLayerInd = -1;
private int outputLayerInd = -1;
transient Hashtable synTemp;
/** Creates a new instance of NeuralNetMatrix */
public NeuralNetMatrix() {
}
/** Creates a new instance of NeuralNetMatrix */
public NeuralNetMatrix(NeuralNet net) {
this.setNeuralNet(net);
}
/** Method used to set the neural network to dissect
*/
public void setNeuralNet(NeuralNet net) {
int n = net.getLayers().size();
inputLayer = net.findInputLayer();
outputLayer = net.findOutputLayer();
// Extract and save the Monitor
monitor = net.getMonitor();
/* Puts the layers into an ArrayList and extracts
* the synapses by inserting them into a hashtable
*/
layers = new ArrayList(net.getLayers());
synTemp = new Hashtable();
for (int i=0; i < n; ++i) {
Layer ly = (Layer)layers.get(i);
checkInputs(i, ly);
checkOutputs(i, ly);
}
Enumeration enumerat = synTemp.keys();
connectionSet = new ArrayList();
while (enumerat.hasMoreElements()) {
Object key = enumerat.nextElement();
Connection tsyn = (Connection)synTemp.get(key);
int x = tsyn.getInput();
int y = tsyn.getOutput();
if (x * y > 0) {
connectionSet.add(tsyn);
}
}
}
/** Converts the neural structure to a matrix
* containing all the synapses of the neural network.
* At each not-null [x][y] element of the 2D array,
* there is a pointer to the Synapse connecting
* the Layer[x] to the Layer[y].
* The returned 2D array of Synapses could be used, for instance,
* to draw a graphical representation of the neural network.
*/
public Synapse[][] getConnectionMatrix() {
Synapse[][] connectionMatrix = new Synapse[layers.size()][layers.size()];
for (int n=0; n < connectionSet.size(); ++n) {
Connection tsyn = (Connection)connectionSet.get(n);
int x = tsyn.getInput();
int y = tsyn.getOutput();
connectionMatrix[x-1][y-1] = tsyn.getSynapse();
}
return connectionMatrix;
}
/** Searches a path between two layers.
* Useful in order to discover recurrent networks
*
* @return true if there is a path from fromLayer to toLayer
*/
public boolean isThereAnyPath(Layer fromLayer, Layer toLayer) {
boolean retValue = false;
int iFrom = getLayerInd(fromLayer);
int iTo = getLayerInd(toLayer);
retValue = isThereAnyPath(iFrom, iTo, getConnectionMatrix());
return retValue;
}
/* Same as the above method, but with layers' indexes instead of pointers
* Used recursively to discover paths
*/
private boolean isThereAnyPath(int iFrom, int iTo, Synapse[][] matrix) {
boolean retValue = false;
for (int t=0; (t < layers.size()) && !retValue; ++t) {
Synapse conn = matrix[iFrom][t];
if ((conn != null) && (!conn.isLoopBack())) {
if (t == iTo)
retValue = true;
else
retValue = isThereAnyPath(t, iTo, matrix);
}
}
return retValue;
}
/** Converts the neural structure to a matrix
* containing all the synapses of the neural network.
* The indexes indicates the layer in Layer[] getOrderedLayers().
* At each not-null [x][y] element of the 2D array,
* there is a pointer to the Synapse connecting
* the (ordered)Layer[x] to the (ordered)Layer[y].
* The returned 2D array of Synapses could be used, for instance,
* to draw a graphical representation of the neural network.
*
* @return a matrix where the indexes x,y point to the Layers
* returned by getOrderedLayers().
*/
public Synapse[][] getOrderedConnectionMatrix() {
// Just to fill the translation array
getOrderedLayers();
Synapse[][] connectionMatrix = new Synapse[layers.size()][layers.size()];
for (int n=0; n < connectionSet.size(); ++n) {
Connection tsyn = (Connection)connectionSet.get(n);
int x = tsyn.getInput();
int y = tsyn.getOutput();
connectionMatrix[translation[x-1]][translation[y-1]] = tsyn.getSynapse();
}
return connectionMatrix;
}
/** Converts the neural structure to a matrix
* containing all the synapses of the neural network.
* The index indicates the layer in Layer[] getOrderedLayers().
* True means connection between the corresponding layers.
*
* @return a matrix where the indexes x,y point to the Layers
* returned by getOrderedLayers().
*/
public boolean[][] getBinaryOrderedConnectionMatrix() {
// Just to fill the translation array
getOrderedLayers();
boolean[][] booleanConnectionMatrix = new boolean[layers.size()][layers.size()];
for (int n=0; n < connectionSet.size(); ++n) {
Connection tsyn = (Connection)connectionSet.get(n);
int x = tsyn.getInput();
int y = tsyn.getOutput();
booleanConnectionMatrix[translation[x-1]][translation[y-1]] = true;
}
return booleanConnectionMatrix;
}
// This array, after the call to getOrderedLayers, contains
// the correspondence between the old and the new layers' order
int[] translation = null;
/**
* This method calculates the order of the layers
* of a neural network, from the input to the output.
* This method fills also the translations array.
*
* @return An array containing the ordered Layers, from the input to the output (i.e. layers[0]=input layer, layers[n-1]=output layer.
*/
public Layer[] getOrderedLayers() {
// TODO: To adapt to recurrent networks
Synapse[][] connMatrix = getConnectionMatrix();
if (connMatrix == null)
return null;
// An array containing the index of each layer
int[] ord = new int[layers.size()];
// First of all, finds the input layers, and assign them the order #1
ArrayList inputLayers = getInputLayers(connMatrix);
for (int i=0; i < inputLayers.size(); ++i) {
int ind = ((Integer)inputLayers.get(i)).intValue();
ord[ind] = 1;
}
boolean changed = assignOrderToLayers(ord, connMatrix);
// Calculate the order until the array is OK (it didn't change)
while (changed) {
changed = assignOrderToLayers(ord, connMatrix);
}
/* Now puts the layers into ordLayers according to
* the order contained in the ord[] array, and
* fills the translation array.
*/
translation = new int[layers.size()];
Layer[] ordLayers = new Layer[layers.size()];
int n=1; // the current order number to find within ord[]
for (int d=0; d < layers.size(); ++n) {
// Searches in ord[] the elements containing n
for (int x=0; x < ord.length; ++x) {
if (ord[x] == n) {
ordLayers[d] = (Layer)layers.get(x);
translation[x] = d; // Layers[x] ==> orderedLayers[d]
++d;
}
}
}
return ordLayers;
}
/*
* This routine assignes the correct order to each layer,
* depending on the order of the layer that feeds it.
* If the Layer[n] feeds the Layer[m], then the Layer[m] is assigned
* the Layer[n]'s order + 1.
* Returns false only if no order is changed (to understand when to stop).
*/
private boolean assignOrderToLayers(int[] ord, Synapse[][] connMatrix) {
boolean changed = false;
for (int x=0; x < ord.length; ++x) {
int currLayer = ord[x];
if (currLayer > 0) {
for (int y=0; y < connMatrix[x].length; ++y) {
if ((connMatrix[x][y] != null) && !connMatrix[x][y].isLoopBack()) {
if (currLayer >= ord[y]) {
ord[y] = currLayer + 1;
changed = true;
}
}
}
}
}
return changed;
}
/** Searches for all the input layers of the network.
* An input layer is represented by each column in
* connectionMatrix that doesn't contain any Synapse
*
* @return an ArrayList containing Integers that point to the indexes of the input layers
*/
public ArrayList getInputLayers(Synapse[][] connMatrix) {
ArrayList inputs = new ArrayList();
for (int y=0; y < connMatrix.length; ++y) {
boolean found = false;
for (int x=0; x < connMatrix[y].length; ++x) {
if (connMatrix[x][y] != null) {
// Recurrent connections are ignored
if (!connMatrix[x][y].isLoopBack()) {
found = true;
break;
}
}
}
if (!found) {
inputs.add(new Integer(y));
}
}
return inputs;
}
/**
* Clones a neural element.
* @return the clone of the element passed as parameter
* @param element The element to clone
*/
public Serializable cloneElement(Serializable element){
try {
//Serialize to a byte array
ByteArrayOutputStream bos = new ByteArrayOutputStream() ;
ObjectOutput out = new ObjectOutputStream(bos) ;
out.writeObject(element);
out.close();
byte[] buf = bos.toByteArray();
// Deserialize from a byte array
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buf));
Object theCLone = in.readObject();
in.close();
return (Serializable)theCLone;
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
// private void insertChild(int x, Synapse[][] connMatrix, Integer[] ord) {
// for (int y=0; y < connMatrix[x].length; ++y) {
// if (connMatrix[x][y] != null) {
// insertOrderedLayer(ord, y+1);
// }
// }
// }
//
// private void insertOrderedLayer(Integer[] aInd, int ind) {
// for (int i=0; i < aInd.length; ++i) {
// if ((aInd[i] != null) && (aInd[i].intValue() == ind))
// break;
// if ((aInd[i] == null) || (aInd[i].intValue() == 0)) {
// aInd[i] = new Integer(ind);
// break;
// }
// }
// }
private void checkInputs(int n, Layer ly) {
Vector inps = ly.getAllInputs();
if (inps == null)
return;
for (int i=0; i < inps.size(); ++i) {
InputPatternListener ipl = (InputPatternListener)inps.elementAt(i);
if ((ipl != null) && (ipl instanceof Synapse)) {
Connection temp = getSynapse((Synapse)ipl);
temp.setOutput(n+1);
temp.setOutIndex(i);
}
}
}
private void checkOutputs(int n, Layer ly) {
Vector outs = ly.getAllOutputs();
if (outs == null)
return;
for (int i=0; i < outs.size(); ++i) {
OutputPatternListener opl = (OutputPatternListener)outs.elementAt(i);
if ((opl != null) && (opl instanceof Synapse)) {
Connection temp = getSynapse((Synapse)opl);
temp.setInput(n+1);
temp.setInpIndex(i);
}
}
}
/** Gets a Connection from the hashtable, and if it doesn't
* exist, it is created and put into the hashtable
*/
private Connection getSynapse(Synapse s) {
Connection temp = (Connection)synTemp.get(s);
if (temp == null) {
temp = new Connection();
temp.setSynapse(s);
synTemp.put(s, temp);
}
return temp;
}
/** Getter for property layers.
* @return Value of property layers.
*
*/
public ArrayList getLayers() {
return this.layers;
}
/** Setter for property layers.
* @param layers New value of property layers.
*
*/
public void setLayers(ArrayList layers) {
this.layers = layers;
}
/** Getter for property connectionSet.
* @return Value of property connectionSet.
*
*/
public ArrayList getConnectionSet() {
return this.connectionSet;
}
/** Setter for property connectionSet.
* @param connectionSet New value of property connectionSet.
*
*/
public void setConnectionSet(ArrayList connectionSet) {
this.connectionSet = connectionSet;
}
/** Getter for property monitor.
* @return Value of property monitor.
*
*/
public Monitor getMonitor() {
return monitor;
}
/** Setter for property monitor.
* @param monitor New value of property monitor.
*
*/
public void setMonitor(Monitor monitor) {
this.monitor = monitor;
}
public Layer getInputLayer() {
return inputLayer;
}
public void setInputLayer(Layer inputLayer) {
this.inputLayer = inputLayer;
}
public Layer getOutputLayer() {
return outputLayer;
}
public void setOutputLayer(Layer outputLayer) {
this.outputLayer = outputLayer;
}
public int getNumLayers() {
return layers.size();
}
/**
* Calculates the index of the input layer
* @return The index, within NeuralNet.layers[], of the input layer
*/
public int getInputLayerInd() {
if (inputLayerInd == -1) {
inputLayerInd = getLayerInd(inputLayer);
}
return inputLayerInd;
}
public int getOutputLayerInd() {
if (outputLayerInd == -1) {
outputLayerInd = getLayerInd(outputLayer);
}
return outputLayerInd;
}
/** Calculates the index of a layer within the layers array.
* This method uses the NeuralNet's
* @return the Layer's index starting from 0. Returns -1 if not found
*/
public int getLayerInd(Layer layer) {
int layerInd = -1;
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.get(i);
if (ly == layer) {
layerInd = i;
break;
}
}
return layerInd;
}
}
| Java |
/*
* ConnectionHelper.java
*
* Created on March 16, 2006, 5:11 PM
*
* Copyright @2005 by Paolo Marrone and the Joone team
* Licensed under the Lesser General Public License;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.gnu.org/
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joone.helpers.structure;
import java.util.Vector;
import org.joone.engine.InputPatternListener;
import org.joone.engine.Layer;
import org.joone.engine.OutputSwitchSynapse;
import org.joone.engine.Synapse;
import org.joone.engine.learning.ComparingElement;
import org.joone.io.InputConnector;
import org.joone.io.InputSwitchSynapse;
import org.joone.io.StreamInputSynapse;
import org.joone.io.StreamOutputSynapse;
import org.joone.util.AbstractConverterPlugIn;
import org.joone.util.ConverterPlugIn;
import org.joone.util.LearningSwitch;
import org.joone.util.OutputConverterPlugIn;
/**
* This class permits to easily make connections between elements of a neural network.
* In this class all the needed logic is already implemented.
*
* @author P.Marrone
*/
public class ConnectionHelper {
/**
* Checks if two elements of a neural network can be attached
* Warning: This method is sensitive to the order of the elements.
* Example:
* canConnect(Layer, StreamInputSynapse) returns false
* canConnect(StreamInputSynapse, Layer) returns true
* @param source he source element
* @param target The target element
* @return true if the connection can be established
*/
public static boolean canConnect(Object source, Object target) {
boolean retValue = false;
if (source == target) {
// An object cannot connect to itself
return false;
}
if (target instanceof InputConnector) {
if (source instanceof LearningSwitch) {
if (((LearningSwitch)source).getValidationSet() == null)
if (!((StreamInputSynapse)target).isInputFull())
retValue = true;
return retValue;
}
if (!((InputConnector)target).isOutputFull())
if (source instanceof StreamInputSynapse)
retValue = true;
return retValue;
}
if (target instanceof LearningSwitch) {
if (((LearningSwitch)target).getTrainingSet() == null)
if (source instanceof StreamInputSynapse)
if (!((StreamInputSynapse)source).isInputFull())
retValue = true;
return retValue;
}
if (target instanceof InputSwitchSynapse) {
if (source instanceof StreamInputSynapse)
if (!((StreamInputSynapse)source).isInputFull())
retValue = true;
return retValue;
}
if (target instanceof Layer) {
if (source instanceof Layer)
retValue = true;
if ((source instanceof InputPatternListener) &&
!(source instanceof StreamOutputSynapse))
if (!((InputPatternListener)source).isInputFull())
retValue = true;
return retValue;
}
if (target instanceof StreamInputSynapse) {
if (source instanceof LearningSwitch)
if (((LearningSwitch)source).getValidationSet() == null)
if (!((StreamInputSynapse)target).isInputFull())
retValue = true;
if (source instanceof ConverterPlugIn)
if (!((ConverterPlugIn)source).isConnected())
retValue = true;
return retValue;
}
if (target instanceof StreamOutputSynapse) {
StreamOutputSynapse sos = (StreamOutputSynapse)target;
if (!sos.isOutputFull()) {
if (source instanceof Layer)
retValue = true;
if (source instanceof ComparingElement)
retValue = true;
if (source instanceof OutputConverterPlugIn)
if (!((OutputConverterPlugIn)source).isConnected())
retValue = true;
if (source instanceof OutputSwitchSynapse)
retValue = true;
}
return retValue;
}
if (target instanceof ComparingElement) {
if (source instanceof Layer)
if (!((ComparingElement)target).isOutputFull())
retValue = true;
if (source instanceof StreamInputSynapse)
if (((ComparingElement)target).getDesired() == null)
if (!((StreamInputSynapse)source).isInputFull())
retValue = true;
return retValue;
}
if (target instanceof AbstractConverterPlugIn) {
if (source instanceof ConverterPlugIn)
if (!((ConverterPlugIn)source).isConnected())
retValue = true;
return retValue;
}
if (target instanceof OutputSwitchSynapse) {
OutputSwitchSynapse oss = (OutputSwitchSynapse)target;
if (!oss.isOutputFull()) {
if (source instanceof Layer)
retValue = true;
if (source instanceof ComparingElement)
retValue = true;
if (source instanceof OutputSwitchSynapse)
retValue = true;
}
}
return retValue;
}
/**
* Connects two elements of a neural network
* Warning: This method is sensitive to the order of the elements.
* Example:
* connect(Layer, null, StreamInputSynapse) returns false
* connect(StreamInputSynapse, null, Layer) returns true
*
* @param source The source element
* @param target The target element
* @param media If both source and target are Layers, this parameter contains the Synapse to use to connect them, otherwise null
* @return true if the connection has been established
*/
public static boolean connect(Object source, Object media, Object target) {
boolean retValue = false;
if (target instanceof InputConnector) {
if (source instanceof LearningSwitch) {
return ((LearningSwitch)source).addValidationSet((StreamInputSynapse)target);
}
if (source instanceof StreamInputSynapse)
retValue = ((InputConnector)target).setInputSynapse((StreamInputSynapse)source);
return retValue;
}
if (target instanceof LearningSwitch) {
if (source instanceof StreamInputSynapse)
retValue = ((LearningSwitch)target).addTrainingSet((StreamInputSynapse)source);
return retValue;
}
if (target instanceof InputSwitchSynapse) {
if (source instanceof StreamInputSynapse)
retValue = ((InputSwitchSynapse)target).addInputSynapse((StreamInputSynapse)source);
return retValue;
}
if (target instanceof Layer) {
retValue = connectToLayer(source, media, (Layer)target);
return retValue;
}
if (target instanceof StreamInputSynapse) {
if (source instanceof LearningSwitch) {
retValue = ((LearningSwitch)source).addValidationSet((StreamInputSynapse)target);
}
if (source instanceof ConverterPlugIn) {
retValue = ((StreamInputSynapse)target).addPlugIn((ConverterPlugIn)source);
}
return retValue;
}
if (target instanceof StreamOutputSynapse) {
retValue = connectToStreamOutputSynapse(source, (StreamOutputSynapse)target);
return retValue;
}
if (target instanceof ComparingElement) {
retValue = connectToComparingElement(source, (ComparingElement)target);
return retValue;
}
if (target instanceof AbstractConverterPlugIn) {
if (source instanceof ConverterPlugIn) {
retValue = ((AbstractConverterPlugIn)target).addPlugIn((ConverterPlugIn)source);
}
return retValue;
}
if (target instanceof OutputSwitchSynapse) {
retValue = connectToOutputSwitchSynapse(source, (OutputSwitchSynapse)target);
}
return retValue;
}
private static boolean connectToLayer(Object source, Object media, Layer target) {
boolean retValue = false;
if (source instanceof Layer) {
if ((media != null) && (media instanceof Synapse)) {
if (((Layer)source).addOutputSynapse((Synapse)media)) {
retValue = target.addInputSynapse((Synapse)media);
}
}
}
if (source instanceof InputPatternListener) {
retValue = target.addInputSynapse((InputPatternListener)source);
}
return retValue;
}
private static boolean connectToStreamOutputSynapse(Object source, StreamOutputSynapse target) {
boolean retValue = false;
if (source instanceof Layer)
retValue = ((Layer)source).addOutputSynapse(target);
if (source instanceof ComparingElement)
retValue = ((ComparingElement)source).addResultSynapse(target);
if (source instanceof OutputConverterPlugIn)
retValue = target.addPlugIn((OutputConverterPlugIn)source);
if (source instanceof OutputSwitchSynapse)
retValue = ((OutputSwitchSynapse)source).addOutputSynapse(target);
return retValue;
}
private static boolean connectToComparingElement(Object source, ComparingElement target) {
boolean retValue = false;
if (source instanceof Layer)
retValue = ((Layer)source).addOutputSynapse(target);
if (source instanceof StreamInputSynapse)
retValue = target.setDesired((StreamInputSynapse)source);
return retValue;
}
private static boolean connectToOutputSwitchSynapse(Object source, OutputSwitchSynapse target) {
boolean retValue = false;
if (source instanceof Layer)
retValue = ((Layer)source).addOutputSynapse(target);
if (source instanceof ComparingElement)
retValue = ((ComparingElement)source).addResultSynapse(target);
if (source instanceof OutputSwitchSynapse)
retValue = ((OutputSwitchSynapse)source).addOutputSynapse(target);
return retValue;
}
/**
* Disconnects two elements.
* Warning: This method is sensitive to the order of the elements.
* If this method returns false when called with (obj1, obj2) as parameters,
* you should recall it using the parameters in reverse order (obj2, obj1)
* @param source The source element to disconnect
* @param target The target element to disconnect
* @return true if the two elements have been disconnected
*/
public static boolean disconnect(Object source, Object target) {
boolean retValue = false;
if (target instanceof InputConnector) {
if (source instanceof StreamInputSynapse)
retValue = ((InputConnector)target).setInputSynapse(null);
return retValue;
}
if (target instanceof LearningSwitch) {
if (source instanceof StreamInputSynapse) {
if (((LearningSwitch)target).getTrainingSet() == source) {
((LearningSwitch)target).removeTrainingSet();
retValue = true;
}
if (((LearningSwitch)target).getValidationSet() == source) {
((LearningSwitch)target).removeValidationSet();
retValue = true;
}
}
return retValue;
}
if (target instanceof InputSwitchSynapse) {
if (source instanceof StreamInputSynapse)
retValue = ((InputSwitchSynapse)target).removeInputSynapse(((StreamInputSynapse)source).getName());
return retValue;
}
if (target instanceof Layer) {
retValue = disconnectFromLayer(source, (Layer)target);
return retValue;
}
if (target instanceof StreamInputSynapse) {
if (source instanceof ConverterPlugIn) {
retValue = ((StreamInputSynapse)target).addPlugIn(null);
}
return retValue;
}
if (target instanceof StreamOutputSynapse) {
retValue = disconnectFromStreamOutputSynapse(source, (StreamOutputSynapse)target);
return retValue;
}
if (target instanceof ComparingElement) {
retValue = disconnectFromComparingElement(source, (ComparingElement)target);
return retValue;
}
if (target instanceof AbstractConverterPlugIn) {
if (source instanceof ConverterPlugIn) {
retValue = ((AbstractConverterPlugIn)target).addPlugIn(null);
}
return retValue;
}
if (target instanceof OutputSwitchSynapse) {
retValue = disconnectFromOutputSwitchSynapse(source, (OutputSwitchSynapse)target);
}
return retValue;
}
private static boolean disconnectFromLayer(Object source, Layer target) {
boolean retValue = false;
if (source instanceof Layer) {
Object media = getConnection((Layer)source, target);
if ((media != null) && (media instanceof Synapse)) {
((Layer)source).removeOutputSynapse((Synapse)media);
target.removeInputSynapse((Synapse)media);
retValue = true;
}
}
if (source instanceof InputPatternListener) {
target.removeInputSynapse((InputPatternListener)source);
retValue = true;
}
return retValue;
}
private static boolean disconnectFromStreamOutputSynapse(Object source, StreamOutputSynapse target) {
boolean retValue = false;
if (source instanceof Layer) {
((Layer)source).removeOutputSynapse(target);
retValue = true;
}
if (source instanceof ComparingElement) {
((ComparingElement)source).removeResultSynapse(target);
retValue = true;
}
if (source instanceof OutputConverterPlugIn)
retValue = target.addPlugIn(null);
if (source instanceof OutputSwitchSynapse)
retValue = ((OutputSwitchSynapse)source).removeOutputSynapse(target.getName());
return retValue;
}
private static boolean disconnectFromComparingElement(Object source, ComparingElement target) {
boolean retValue = false;
if (source instanceof Layer) {
((Layer)source).removeOutputSynapse(target);
retValue = true;
}
if (source instanceof StreamInputSynapse)
retValue = target.setDesired(null);
return retValue;
}
private static boolean disconnectFromOutputSwitchSynapse(Object source, OutputSwitchSynapse target) {
boolean retValue = false;
if (source instanceof Layer) {
((Layer)source).removeOutputSynapse(target);
retValue = true;
}
if (source instanceof ComparingElement) {
((ComparingElement)source).removeResultSynapse(target);
retValue = true;
}
if (source instanceof OutputSwitchSynapse)
retValue = ((OutputSwitchSynapse)source).removeOutputSynapse(target.getName());
return retValue;
}
// Searches the synapse that connects two Layers
private static Object getConnection(Layer source, Layer target) {
Object conn = null;
Vector inps = target.getAllInputs();
Vector outs = source.getAllOutputs();
if ((inps != null) && (inps.size() > 0) && (outs != null) && (outs.size() > 0)) {
for (int i=0; (conn == null) && (i < inps.size()); ++i) {
Object cc = inps.elementAt(i);
if (cc instanceof Synapse) {
for (int u=0; (conn == null) && (u < outs.size()); ++u) {
if (outs.elementAt(u) == cc) {
conn = cc;
}
}
}
}
}
return conn;
}
}
| Java |
/*
* Connection.java
*
* Created on 21 gennaio 2004, 16.39
*/
package org.joone.helpers.structure;
import org.joone.engine.*;
/**
* This class represents a container for a Synapse during
* the process of transforming a neural network to a
* suitable form for the XML serialization.
*
* @see org.joone.joonepad.NeuralNetMatrix
* @author P.Marrone
*/
public class Connection {
Synapse synapse;
int input;
int output;
int inpIndex, outIndex;
public Connection() {
}
/** Getter for property input.
* @return Value of property input.
*
*/
public int getInput() {
return input;
}
/** Setter for property input.
* @param input New value of property input.
*
*/
public void setInput(int input) {
this.input = input;
}
/** Getter for property output.
* @return Value of property output.
*
*/
public int getOutput() {
return output;
}
/** Setter for property output.
* @param output New value of property output.
*
*/
public void setOutput(int output) {
this.output = output;
}
/** Getter for property inpIndex.
* @return Value of property inpIndex.
*
*/
public int getInpIndex() {
return inpIndex;
}
/** Setter for property inpIndex.
* @param inpIndex New value of property inpIndex.
*
*/
public void setInpIndex(int inpIndex) {
this.inpIndex = inpIndex;
}
/** Getter for property outIndex.
* @return Value of property outIndex.
*
*/
public int getOutIndex() {
return outIndex;
}
/** Setter for property outIndex.
* @param outIndex New value of property outIndex.
*
*/
public void setOutIndex(int outIndex) {
this.outIndex = outIndex;
}
/** Getter for property synapse.
* @return Value of property synapse.
*
*/
public Synapse getSynapse() {
return synapse;
}
/** Setter for property synapse.
* @param synapse New value of property synapse.
*
*/
public void setSynapse(Synapse synapse) {
this.synapse = synapse;
}
}
| Java |
/*
* NeuralNetFactory.java
*
* Created on August 18, 2005, 2:41 PM
*
*/
package org.joone.helpers.structure;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.StringTokenizer;
import org.joone.engine.Monitor;
import org.joone.engine.learning.TeachingSynapse;
import org.joone.helpers.factory.JooneTools;
import org.joone.io.FileInputSynapse;
import org.joone.io.StreamInputSynapse;
import org.joone.net.NeuralNet;
import org.joone.util.NormalizerPlugIn;
/**
* This utility class creates a new neural network according to some parameters
* @author P.Marrone
*/
public class NeuralNetFactory {
public static final int CLASSIFICATION = 1;
public static final int ONEOFC_CLASSIF = 2;
public static final int APPROXIMIMATION = 3;
public static final int PREDICTION = 4;
public static final int CLUSTERING = 5;
private int type;
private String inputFileName;
private String inputCols;
private boolean skipFirstInputRow;
private String desiredFileName;
private String desiredCols;
private boolean skipFirstDesiredRow;
// Time Series parameters
private int taps;
private int predictLength;
// SOM output map parameters
private int mapWidth;
private int mapHeight;
/**
* Creates a new instance of NeuralNetFactory
*/
public NeuralNetFactory() {
}
public NeuralNet getNeuralNetwork() {
NeuralNet nnet = null;
switch (getType()) {
case CLASSIFICATION:
case ONEOFC_CLASSIF:
case APPROXIMIMATION: nnet = createFFNN(); break;
case PREDICTION: nnet = createTimeSeries(); break;
case CLUSTERING: nnet = createKohonen(); break;
}
create_IO(nnet);
return nnet;
}
protected NeuralNet createFFNN() {
int inputRows = getNumOfColumns(inputCols);
int outputRows = getNumOfColumns(desiredCols);
int nodes[] = { inputRows, inputRows, outputRows };
int outputType = JooneTools.LINEAR;
switch (getType()) {
case CLASSIFICATION:
outputType = JooneTools.LOGISTIC; break;
case ONEOFC_CLASSIF:
outputType = JooneTools.SOFTMAX; break;
case APPROXIMIMATION:
outputType = JooneTools.LINEAR; break;
}
NeuralNet nnet = JooneTools.create_standard(nodes, outputType);
Monitor mon = nnet.getMonitor();
mon.setTotCicles(5000);
mon.setTrainingPatterns(getNumOfRows(inputFileName, skipFirstInputRow));
mon.setLearning(true);
return nnet;
}
protected NeuralNet createKohonen() {
int inputRows = getNumOfColumns(inputCols);
int outputRows = 10;
int nodes[] = { inputRows, getMapWidth(), getMapHeight() };
int outputType = JooneTools.WTA;
NeuralNet nnet = JooneTools.create_unsupervised(nodes, outputType);
Monitor mon = nnet.getMonitor();
mon.setTotCicles(5000);
mon.setTrainingPatterns(getNumOfRows(inputFileName, skipFirstInputRow));
mon.setLearning(true);
return nnet;
}
protected NeuralNet createTimeSeries() {
int inputRows = getNumOfColumns(inputCols);
int outputRows = getNumOfColumns(desiredCols);
int nodes[] = { inputRows, getTaps(), outputRows };
int outputType = JooneTools.LINEAR;
NeuralNet nnet = JooneTools.create_timeDelay(nodes, getTaps()-1, outputType);
Monitor mon = nnet.getMonitor();
mon.setTotCicles(5000);
mon.setTrainingPatterns(getNumOfRows(inputFileName, skipFirstInputRow) - getPredictionLength());
mon.setLearning(true);
mon.setPreLearning(getTaps());
return nnet;
}
protected void create_IO(NeuralNet nnet) {
StreamInputSynapse inputData = createInput(inputFileName, inputCols, skipFirstInputRow? 2:1);
nnet.getInputLayer().addInputSynapse(inputData);
if (getType() != CLUSTERING) {
StreamInputSynapse targetData = createInput(desiredFileName, desiredCols, skipFirstDesiredRow? 2:1);
if (getPredictionLength() > 0) {
targetData.setFirstRow(inputData.getFirstRow()+getPredictionLength());
}
TeachingSynapse teacher = new TeachingSynapse();
teacher.setName("Teacher");
teacher.setDesired(targetData);
nnet.getOutputLayer().addOutputSynapse(teacher);
nnet.setTeacher(teacher);
}
}
protected StreamInputSynapse createInput(String fileName, String columns, int firstRow) {
FileInputSynapse in = new FileInputSynapse();
in.setInputFile(new File(fileName));
in.setAdvancedColumnSelector(columns);
in.setFirstRow(firstRow);
NormalizerPlugIn norm = new NormalizerPlugIn();
int cols = getNumOfColumns(columns);
norm.setAdvancedSerieSelector("1-"+cols);
in.addPlugIn(norm);
return in;
}
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public String getInputFileName() {
return inputFileName;
}
public void setInputFileName(String inputFileName) {
this.inputFileName = inputFileName;
}
public String getInputCols() {
return inputCols;
}
public void setInputCols(String inputCols) {
this.inputCols = inputCols;
}
public boolean isSkipFirstInputRow() {
return skipFirstInputRow;
}
public void setSkipFirstInputRow(boolean skipFirstInputRow) {
this.skipFirstInputRow = skipFirstInputRow;
}
public String getDesiredFileName() {
return desiredFileName;
}
public void setDesiredFileName(String desiredFileName) {
this.desiredFileName = desiredFileName;
}
public String getDesiredCols() {
return desiredCols;
}
public void setDesiredCols(String desiredCols) {
this.desiredCols = desiredCols;
}
public boolean isSkipFirstDesiredRow() {
return skipFirstDesiredRow;
}
public void setSkipFirstDesiredRow(boolean skipFirstDesiredRow) {
this.skipFirstDesiredRow = skipFirstDesiredRow;
}
protected int getNumOfColumns(String columns) {
int c = 0;
StringTokenizer tokens = new StringTokenizer(columns, ",");
int n = tokens.countTokens();
for (int i=0; i < n; ++i) {
String t = tokens.nextToken();
if (t.indexOf('-') == -1)
++c;
else {
StringTokenizer tt = new StringTokenizer(t, "-");
int low = Integer.valueOf(tt.nextToken()).intValue();
int hig = Integer.valueOf(tt.nextToken()).intValue();
c += hig - low + 1;
}
}
return c;
}
protected int getNumOfRows(String fileName, boolean skipFirstLine) {
int c = 0;
BufferedReader file = null;
try {
// get the content of text file into text variable
file = new BufferedReader(new FileReader(fileName));
if (skipFirstLine) file.readLine();
while (file.readLine() != null)
++c;
} catch (IOException ioe) { ioe.printStackTrace(); } finally {
if (file != null)
try {
file.close();
} catch (IOException doNothing) { /* Do Nothing */ }
}
return c;
}
public int getTaps() {
return taps;
}
public void setTaps(int taps) {
this.taps = taps;
}
public int getPredictionLength() {
return predictLength;
}
public void setPredictionLength(int predictLength) {
this.predictLength = predictLength;
}
public int getMapWidth() {
return mapWidth;
}
public void setMapWidth(int mapWidth) {
this.mapWidth = mapWidth;
}
public int getMapHeight() {
return mapHeight;
}
public void setMapHeight(int mapHeight) {
this.mapHeight = mapHeight;
}
}
| Java |
package org.joone.script;
import bsh.*;
import java.io.*;
import org.joone.log.*;
public class JooneScript
{
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger (JooneScript.class);
private Interpreter jShell;
public JooneScript(){
// constructor
}
private Interpreter getShell() {
if (jShell == null) {
jShell = new Interpreter();
// set up all required stuff to run Joone
try {
jShell.eval("import org.joone.engine.*;");
jShell.eval("import org.joone.engine.learning.*;");
jShell.eval("import org.joone.edit.*;");
jShell.eval("import org.joone.util.*;");
jShell.eval("import org.joone.net.*;");
jShell.eval("import org.joone.io.*;");
jShell.eval("import org.joone.script.*;");
} catch (EvalError err)
{
// TO DO :
// Evaluate if an simple System.out is not
// more approprate here ?
log.error ( "EvalError thrown. Message is " + err.getMessage(),
err);
return null;
}
}
return jShell;
}
public static void main(String [] args){
if (args.length == 0) {
System.out.println("Usage: java org.joone.script.JooneScript <script_file>");
}
else {
JooneScript jScript = new JooneScript();
jScript.source(args[0]);
}
}
//
// TO DO :
// Check if it is not better to leave a simple
// System.out or
// System.err
//
public void source(String fileName){
try
{
getShell().source(fileName);
} catch (FileNotFoundException fnfe)
{
log.error ( "FileNotFoundException thrown. Message is : " + fnfe.getMessage(),
fnfe);
} catch (IOException ioe)
{
log.error ( "IOException thrown. Message is : " + ioe.getMessage(),
ioe);
} catch (EvalError err)
{
log.warn ( "EvalError thrown. Message is : " + err.getMessage(),
err );
System.out.println("Invalid BeanShell code!");
}
}
public void eval(String script){
try {
getShell().eval(script);
} catch (EvalError err)
{
log.warn ( "Error while evaluating. Message is : " + err.getMessage(),
err );
System.out.println("Invalid BeanShell code!");
err.printStackTrace();
}
}
public void set(String name, Object jObject){
try {
getShell().set(name, jObject);
}
catch(EvalError err){
err.printStackTrace();
}
}
} | Java |
/*
* JooneMacro.java
*
* Created on 25 august 2002, 17.52
*/
package org.joone.script;
/**
* This class represents a runnable BeanShell macro
* @author P.Marrone
*/
public class JooneMacro implements java.io.Serializable, java.lang.Cloneable {
static final long serialVersionUID = 6361561451436197429L;
private String text; // the text of the macro
private boolean event;
private String name;
/** Creates a new instance of JooneMacro */
public JooneMacro() {
}
public String toString() {
return text;
}
/** Getter for property text.
* @return Value of property text.
*/
public java.lang.String getText() {
return text;
}
/** Setter for property text.
* @param text New value of property text.
*/
public void setText(java.lang.String text) {
this.text = text;
}
/** Getter for property system.
* This property indicates if the macro is bound to a neural net's event.
* An event macro can't be deleted, nor renamed.
* @return Value of property system.
*/
public boolean isEventMacro() {
return event;
}
/** Setter for property system.
* This property indicates if the macro is bound to a neural net's event.
* An event macro can't be deleted, nor renamed.
* @param system New value of property system.
*/
public void setEventMacro(boolean newValue) {
this.event = newValue;
}
/** Getter for property name.
* @return Value of property name.
*/
public java.lang.String getName() {
return name;
}
/** Setter for property name.
* @param name New value of property name.
*/
public void setName(java.lang.String name) {
this.name = name;
}
public synchronized Object clone() {
JooneMacro newMacro = new JooneMacro();
newMacro.setText(text);
newMacro.setName(name);
newMacro.setEventMacro(event);
return newMacro;
}
}
| Java |
package org.joone.script;
import groovy.lang.*;
import org.codehaus.groovy.control.*;
import java.io.*;
import org.joone.log.*;
public class JooneGroovyScript {
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger(JooneGroovyScript.class);
//
// Groovy does not evaluate import statement as a single expression. As a workaround,
// we need to append a list of import statements in front of every expression.
//
// For example, the following code will throw an exception
//
// groovyShell.evaluate("import groovy.swing.SwingBuilder");
// groovyShell.evaluate("swing = new SwingBuilder()");
//
// while the following code works fine
//
// groovyShell.evaluate("import groovy.swing.SwingBuilder\nswing = new SwingBuilder()");
//
// In the future, groovy may support groovyShell.setImport("groovy.swing.SwingBuilder").
// During that time, we should drop out the import prefix string workaround.
//
private static final String GROOVY_IMPORT_PREFIX =
"import org.joone.engine.*\n"+
"import org.joone.engine.learning.*\n"+
"import org.joone.edit.*\n"+
"import org.joone.util.*\n"+
"import org.joone.net.*\n"+
"import org.joone.io.*\n"+
"import org.joone.script.*\n";
private GroovyShell groovyShell;
public JooneGroovyScript(){
// constructor
}
private GroovyShell getShell() {
if (groovyShell == null) {
groovyShell = new GroovyShell();
}
return groovyShell;
}
public static void main(String [] args){
if (args.length == 0) {
System.out.println("Usage: java org.joone.script.JooneGroovyScript <script_file>");
}
else {
JooneGroovyScript jScript = new JooneGroovyScript();
jScript.source(args[0]);
}
}
//
// TO DO :
// Check if it is not better to leave a simple
// System.out or
// System.err
//
public void source(String fileName){
try {
getShell().run(new File(fileName), new String[0]) ;
} catch (CompilationFailedException cfe) {
log.error( "CompilationFailedException thrown. Message is : " + cfe.getMessage(),
cfe);
} catch (IOException ioe) {
log.error( "IOException thrown. Message is : " + ioe.getMessage(),
ioe);
}
}
public void eval(String script){
try {
getShell().evaluate(GROOVY_IMPORT_PREFIX+script);
} catch (CompilationFailedException cfe) {
log.error( "CompilationFailedException thrown. Message is : " + cfe.getMessage(),
cfe);
}
catch (IOException ioe) {
log.error( "IOException thrown. Message is : " + ioe.getMessage(),
ioe);
}
}
public void set(String name, Object jObject){
getShell().setVariable(name, jObject);
}
}
| Java |
/*
* MacroManager.java
*
* Created on 25 agosto 2002, 20.20
*/
package org.joone.script;
import java.util.Hashtable;
/**
* This class is a manager of the macros of a Neural Network
* @author P.Marrone
*/
public class MacroManager implements java.io.Serializable {
static final long serialVersionUID = 2855350620727616763L;
private Hashtable macros; // The container of the macros
/** Creates a new instance of MacroManager */
public MacroManager() {
macros = new Hashtable();
macros = initMacro(macros);
}
protected Hashtable initMacro(Hashtable mm) {
JooneMacro macro;
macro = new JooneMacro();
macro.setName("cycleTerminated");
macro.setText("");
macro.setEventMacro(true);
mm.put(macro.getName(), macro);
macro = new JooneMacro();
macro.setName("errorChanged");
macro.setText("");
macro.setEventMacro(true);
mm.put(macro.getName(), macro);
macro = new JooneMacro();
macro.setName("netStarted");
macro.setText("");
macro.setEventMacro(true);
mm.put(macro.getName(), macro);
macro = new JooneMacro();
macro.setName("netStopped");
macro.setText("");
macro.setEventMacro(true);
mm.put(macro.getName(), macro);
return mm;
}
public synchronized void addMacro(String name, String text) {
/* All the macros added by the user can't be event macro.
* To insert an event macro, override the initMacro method. */
boolean oldEvent = false;
JooneMacro macro;
if (macros.containsKey(name)) {
macro = (JooneMacro)macros.get(name);
oldEvent = macro.isEventMacro();
}
else
macro = new JooneMacro();
macro.setName(name);
macro.setText(text);
macro.setEventMacro(oldEvent);
if (!macros.containsKey(name))
macros.put(name, macro);
}
public String getMacro(String name) {
JooneMacro macro = (JooneMacro)macros.get(name);
if (macro != null)
return macro.getText();
else
return null;
}
public boolean isEventMacro(String name) {
JooneMacro macro = (JooneMacro)macros.get(name);
if (macro != null)
return macro.isEventMacro();
else
return false;
}
/** Removes a macro.
* @return false if the macro doesn't exist or it's a system macro. Oterwise true
*/
public boolean removeMacro(String name) {
JooneMacro macro = (JooneMacro)macros.get(name);
if (macro != null) {
if (macro.isEventMacro())
return false;
else {
macros.remove(name);
return true;
}
}
else
return false;
}
/** Renames a macro.
* @return false if the macro doesn't exist or it's a system macro. Oterwise true
*/
public boolean renameMacro(String oldName, String newName) {
JooneMacro macro = (JooneMacro)macros.get(oldName);
if (macro != null) {
if (macro.isEventMacro())
return false;
else {
macros.remove(oldName);
this.addMacro(newName, macro.getText());
return true;
}
}
else
return false;
}
/** Getter for property macros.
* @return a clone of the internal Hashtable
*/
public Hashtable getMacros() {
return (Hashtable)macros.clone();
}
}
| Java |
/*
* NeuralValidationEvent.java
*
* Created on 28 aprile 2002, 16.07
*/
package org.joone.net;
/**
*
* @author pmarrone
*/
public class NeuralValidationEvent extends java.util.EventObject {
/** Creates a new instance of NeuralValidationEvent */
public NeuralValidationEvent(NeuralNet event) {
super(event);
}
}
| Java |
/*
* NeuralNet.java
*
* Created on 17 april 2001, 12.08
*/
package org.joone.net;
import java.util.*;
import java.io.*;
import org.joone.helpers.structure.ConnectionHelper;
import org.joone.helpers.structure.NeuralNetMatrix;
import org.joone.log.*;
import org.joone.engine.*;
import org.joone.engine.learning.*;
import org.joone.io.*;
import org.joone.script.MacroInterface;
import org.joone.exception.JooneRuntimeException;
/** This object represents a container of a neural network,
* giving to the developer the possibility to manage a
* neural network as a whole.
* Thanks to it, a neural network can be saved and restored
* using an unique writeObject and readObject command, without
* be worried about its internal composition.
* Not only this, because using a NeuralNet object, we can
* also easily transport a neural network on remote machines
* and runnit there, writing only few and generalized java code.
*
*/
public class NeuralNet implements NeuralLayer, NeuralNetListener, Serializable {
private static final int MAJOR_RELEASE = 2;
private static final int MINOR_RELEASE = 0;
private static final int BUILD = 0;
private static final String SUFFIX = "RC1";
private static final ILogger log = LoggerFactory.getLogger( NeuralNet.class );
private Vector layers;
private String NetName;
private Monitor mon;
private Layer inputLayer;
private Layer outputLayer;
private ComparingElement teacher;
private static final long serialVersionUID = 8351124226081783962L;
public static final int INPUT_LAYER = 0;
public static final int HIDDEN_LAYER = 1;
public static final int OUTPUT_LAYER = 2;
protected Vector listeners;
private MacroInterface macroPlugin;
private boolean scriptingEnabled = false;
private NeuralNetAttributes descriptor = null;
private Hashtable params;
private Layer[] orderedLayers = null;
private transient Layer[] intOrderedLayers = null;
/** Creates new NeuralNet */
public NeuralNet() {
layers = new Vector();
// Creates a monitor with a back-reference to this neural-net
mon = new Monitor();
}
/**
* Starts all the Layers' threads, in order
* to prepare the launch of the neural network
* in multi-thread mode.
* DO NOT use for single-thread mode.
*/
public void start() {
this.terminate(false);
if (readyToStart()) {
getMonitor().addNeuralNetListener(this, false);
Layer ly = null;
int i;
try {
for (i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
ly.start();
}
} catch (RuntimeException rte) {
// If some layer can't start, resets the state of the NN
this.stop();
String msg;
log.error(msg = "RuntimeException was thrown while starting the neural network. Message is:" + rte.getMessage(), rte);
throw new JooneRuntimeException(msg, rte);
}
} else {
String msg;
log.warn(msg = "NeuralNet: The neural net is already running");
throw new JooneRuntimeException(msg);
}
}
/**
* Check if the neural net is ready to be started again
* @return true only if there isn't any running layer
*/
private boolean readyToStart() {
for (int i=0; i < 100; ++i) {
if (!this.isRunning())
return true;
else
try {
// waits for 10 millisecs
Thread.sleep(10);
} catch (InterruptedException e) {
return false;
}
}
return false;
}
/** Waits for all the termination of all running Threads
* @see Thread.join()
*/
public void join() {
if (getMonitor().isSingleThreadMode()) {
if (getSingleThread() != null) {
try {
getSingleThread().join();
} catch (InterruptedException doNothing) { }
}
} else {
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.elementAt(i);
ly.join();
}
if (teacher != null)
teacher.getTheLinearLayer().join();
}
}
/** Terminates the execution of this NeuralNet
* independently from the threading mode activated.
*/
public void stop() {
if (getMonitor().isSingleThreadMode()) {
this.stopFastRun();
} else {
getMonitor().Stop();
}
}
/**
* Terminates the execution of all the threads
* of the neural network.
* Used to force a neural network independently
* from its internal state.
* Use ONLY in multi-thread mode, and ONLY when
* the call to the stop() method doesn't give
* the expected results.
* @param notify if true, the netStopped event is raised
*/
public void terminate(boolean notify) {
if (isRunning()) {
Layer ly = null;
int i;
for (i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
ly.stop();
}
if (teacher != null) {
teacher.getTheLinearLayer().stop();
if (teacher instanceof AbstractTeacherSynapse) {
((AbstractTeacherSynapse)teacher).netStoppedError();
}
}
if ((getMonitor() != null) && (notify))
new NetStoppedEventNotifier(getMonitor()).start();
}
}
/**
* Terminates the execution of all the threads
* of the neural network.
* @see this.terminate(boolean notify)
*/
public void terminate() {
this.terminate(true);
}
protected int getNumOfstepCounters() {
int count = 0;
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.elementAt(i);
if (ly.hasStepCounter())
++count;
}
if (teacher != null) {
if ((teacher.getDesired() != null) && (teacher.getDesired().isStepCounter()))
++count;
}
return count;
}
/** Returns the input layer of the network.
* If the method setInputLayer has been never invoked, the
* input layer is found, set and returned.
*/
public Layer getInputLayer() {
if (inputLayer != null)
return inputLayer;
setInputLayer(findInputLayer());
return inputLayer;
}
/** Returns the input layer, by searching for it following
* the rules written in Layer.isInputLayer. Ignores any
* previous call made to setInputLayer.
*/
public Layer findInputLayer() {
Layer input = null;
if (layers == null)
return null;
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.elementAt(i);
if (ly.isInputLayer()) {
input = ly;
break;
}
}
return input;
}
/** Returns the output layer of the network.
* If the method setOutputLayer has been never invoked, the
* output layer is found, set and returned.
*/
public Layer getOutputLayer() {
if (outputLayer != null)
return outputLayer;
setOutputLayer(findOutputLayer());
return outputLayer;
}
/** Returns the output layer by searching for it following
* the rules written in Layer.isOutputLayer. Ignores any
* previous call made to setOutputLayer.
*/
public Layer findOutputLayer() {
Layer output = null;
if (layers == null)
return null;
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.elementAt(i);
if (ly.isOutputLayer()) {
output = ly;
break;
}
}
return output;
}
public int getRows() {
Layer ly = this.getInputLayer();
if (ly != null)
return ly.getRows();
else
return 0;
}
public void setRows(int p1) {
Layer ly = this.getInputLayer();
if (ly != null)
ly.setRows(p1);
}
public void addNoise(double p1) {
Layer ly;
int i;
for (i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
ly.addNoise(p1);
}
}
public void randomize(double amplitude) {
Layer ly;
int i;
for (i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
ly.randomize(amplitude);
}
}
public Matrix getBias() {
Layer ly = this.getInputLayer();
if (ly != null)
return ly.getBias();
else
return null;
}
public Vector getAllOutputs() {
Layer ly = this.getOutputLayer();
if (ly != null)
return ly.getAllOutputs();
else
return null;
}
public String getLayerName() {
return NetName;
}
public void removeOutputSynapse(OutputPatternListener p1) {
Layer ly = this.getOutputLayer();
if (ly != null)
ly.removeOutputSynapse(p1);
}
public void setAllInputs(Vector p1) {
Layer ly = this.getInputLayer();
if (ly != null)
ly.setAllInputs(p1);
}
public void removeAllOutputs() {
Layer ly = this.getOutputLayer();
if (ly != null)
ly.removeAllOutputs();
setTeacher(null);
}
public Vector getAllInputs() {
Layer ly = this.getInputLayer();
if (ly != null)
return ly.getAllInputs();
else
return null;
}
public boolean addOutputSynapse(OutputPatternListener p1) {
Layer ly = this.getOutputLayer();
if (ly != null)
return ly.addOutputSynapse(p1);
else
return false;
}
public void setBias(Matrix p1) {
Layer ly = this.getInputLayer();
if (ly != null)
ly.setBias(p1);
}
public void removeInputSynapse(InputPatternListener p1) {
Layer ly = this.getInputLayer();
if (ly != null)
ly.removeInputSynapse(p1);
}
public void setLayerName(String p1) {
NetName = p1;
}
public boolean addInputSynapse(InputPatternListener p1) {
Layer ly = this.getInputLayer();
if (ly != null)
return ly.addInputSynapse(p1);
else
return false;
}
public void setAllOutputs(Vector p1) {
Layer ly = this.getOutputLayer();
if (ly != null)
ly.setAllOutputs(p1);
}
public void setMonitor(Monitor p1) {
mon = p1;
for (int i=0; i < layers.size(); ++i) {
Layer ly = (Layer)layers.elementAt(i);
ly.setMonitor(mon);
}
setScriptingEnabled(isScriptingEnabled());
if (getTeacher() != null)
getTeacher().setMonitor(p1);
}
public Monitor getMonitor() {
return mon;
}
public void removeAllInputs() {
Layer ly = this.getInputLayer();
if (ly != null)
ly.removeAllInputs();
}
public NeuralLayer copyInto(NeuralLayer p1) {
return null;
}
public void addLayer(Layer layer) {
this.addLayer(layer, HIDDEN_LAYER);
}
public void addLayer(Layer layer, int tier) {
if (!layers.contains(layer)) {
layer.setMonitor(mon);
layers.addElement(layer);
}
if (tier == INPUT_LAYER)
setInputLayer(layer);
if (tier == OUTPUT_LAYER)
setOutputLayer(layer);
}
public void removeLayer(Layer layer) {
if (layers.contains(layer)) {
layers.removeElement(layer);
// Remove the synapses
NeuralNetMatrix matrix = new NeuralNetMatrix(this);
Synapse[][] conn = matrix.getConnectionMatrix();
removeSynapses(matrix.getLayerInd(layer), conn);
if (layer == inputLayer)
setInputLayer(null);
else
if (layer == outputLayer)
setOutputLayer(null);
}
}
private void removeSynapses(int ind, Synapse[][] conn) {
if (ind >= 0) {
// Removes input synapses
for (int i=0; i < conn.length; ++i) {
if (conn[i][ind] != null) {
ConnectionHelper.disconnect(layers.get(i), layers.get(ind));
}
}
// Removes output synapses
for (int i=0; i < conn[0].length; ++i) {
if (conn[ind][i] != null) {
ConnectionHelper.disconnect(layers.get(ind), layers.get(i));
}
}
}
}
/**
* Resets all the StreamInputLayer of the net
*/
public void resetInput() {
Layer ly = null;
int i;
for (i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
Vector inputs = ly.getAllInputs();
if (inputs != null)
for (int x=0; x < inputs.size(); ++x) {
InputPatternListener syn = (InputPatternListener)inputs.elementAt(x);
if (syn instanceof StreamInputSynapse)
((StreamInputSynapse)syn).resetInput();
// if (syn instanceof InputSwitchSynapse)
// ((InputSwitchSynapse)syn).resetInput();
}
}
if (getTeacher() != null)
getTeacher().resetInput();
}
public void addNeuralNetListener(NeuralNetListener listener) {
if (getListeners().contains(listener))
return;
listeners.addElement(listener);
if (getMonitor() != null)
getMonitor().addNeuralNetListener(listener);
}
public Vector getListeners() {
if (listeners == null)
listeners = new Vector();
return listeners;
}
public void removeNeuralNetListener(NeuralNetListener listener) {
getListeners().removeElement(listener);
if (getMonitor() != null)
getMonitor().removeNeuralNetListener(listener);
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
/* Since the listeners' vector in the Monitor object is transient,
* we must fill it from the NeuralNet.listeners vector
*/
Vector lst = getListeners();
if (getMonitor() != null)
for (int i=0; i < lst.size(); ++i) {
getMonitor().addNeuralNetListener((NeuralNetListener)lst.elementAt(i));
}
// Restores the exported variables jNet and jMon
setMacroPlugin(macroPlugin);
}
/**
* Method to get the version.
* @return A string containing the version of joone's engine in the format xx.yy.zz
*/
public static String getVersion() {
return MAJOR_RELEASE +
"." +
MINOR_RELEASE +
"." +
BUILD + SUFFIX;
}
/**
* Method to get the numeric version.
* @return an integer containing the joone's engine version
*/
public static Integer getNumericVersion() {
return new Integer(MAJOR_RELEASE * 1000000 +
MINOR_RELEASE * 1000 +
BUILD);
}
public Layer getLayer(String layerName) {
Layer ly = null;
for (int i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
if (ly.getLayerName().compareToIgnoreCase(layerName) == 0)
return ly;
}
return null;
}
public Vector getLayers() {
return this.layers;
}
/** Permits to initialize a neural network with a Vector
* containing layers.
*
*/
public void setLayers(Vector newlayers) {
this.layers = newlayers;
}
/** Permits to initialize a neural network with an
* ArrayList containing layers. Added for Spring.
*
*/
public void setLayersList(ArrayList list) {
this.setLayers(new Vector(list));
}
/** Sets the Teacher for this NeuralNet object
* @param TeachingSynapse - the new teacher. It can be null to make unsupervised this neural network
*/
public void setTeacher(ComparingElement ts) {
if (getMonitor() != null)
if (ts != null)
getMonitor().setSupervised(true);
else
getMonitor().setSupervised(false);
teacher = ts;
}
public ComparingElement getTeacher() {
return teacher;
}
public void setListeners(Vector listeners) {
//addNeuralNetListener(listeners);
}
public void setInputLayer(Layer newLayer) {
inputLayer = newLayer;
}
public void setOutputLayer(Layer newLayer) {
outputLayer = newLayer;
}
public NeuralNetAttributes getDescriptor() {
if (descriptor == null) {
descriptor = new NeuralNetAttributes();
descriptor.setNeuralNetName(this.getLayerName());
}
return descriptor;
}
public void setDescriptor(NeuralNetAttributes newdescriptor) {
this.descriptor = newdescriptor;
}
/**
* Returns true if the network is running
* @return boolean
*/
public boolean isRunning() {
if (getMonitor().isSingleThreadMode()) {
if ((getSingleThread() != null) && getSingleThread().isAlive()) {
return true;
}
} else {
Layer ly = null;
for (int i=0; i < layers.size(); ++i) {
ly = (Layer)layers.elementAt(i);
if (ly.isRunning()) {
return true;
}
}
if ((teacher != null) && (teacher.getTheLinearLayer().isRunning())) {
return true;
}
}
return false;
}
/**
* Creates a copy of the contained neural network
*
* @return the cloned NeuralNet
*/
public NeuralNet cloneNet() {
NeuralNet newnet = null;
try {
ByteArrayOutputStream f = new ByteArrayOutputStream();
ObjectOutput s = new ObjectOutputStream(f);
s.writeObject(this);
s.flush();
ByteArrayInputStream fi = new ByteArrayInputStream(f.toByteArray());
ObjectInput oi = new ObjectInputStream(fi);
newnet = (NeuralNet)oi.readObject();
} catch (Exception ioe) {
log.warn( "IOException while cloning the Net. Message is : " + ioe.getMessage(),
ioe );
}
return newnet;
}
public void removeAllListeners() {
listeners = null;
if (getMonitor() != null)
getMonitor().removeAllListeners();
}
/** Enable/disable the scripting engine for the net.
* If disabled, all the event-driven macros will be not run
* @param enabled true to enable the scripting, otherwise false
*/
public void setScriptingEnabled(boolean enabled) {
scriptingEnabled = enabled;
if (enabled) {
NeuralNetListener listener = getMacroPlugin();
if (listener == null)
log.info("MacroPlugin not set: Impossible to enable the scripting");
else
this.addNeuralNetListener(getMacroPlugin());
} else {
if (macroPlugin != null)
this.removeNeuralNetListener(macroPlugin);
}
}
/** Gets if the scripting engine is enabled
* @return true if enabled
*/
public boolean isScriptingEnabled() {
return scriptingEnabled;
}
/** Getter for property macroPlugin.
* @return Value of property macroPlugin.
*/
public MacroInterface getMacroPlugin() {
return macroPlugin;
}
/** Setter for property macroPlugin.
* @param macroPlugin New value of property macroPlugin.
*/
public void setMacroPlugin(MacroInterface macroPlugin) {
if(macroPlugin != null) {
// Unregister the old listener
this.removeNeuralNetListener(this.macroPlugin);
// Should we register the new listener?
if(scriptingEnabled) {
this.addNeuralNetListener(macroPlugin);
}
}
this.macroPlugin = macroPlugin;
if (macroPlugin != null) {
macroPlugin.set("jNet", this);
macroPlugin.set("jMon", getMonitor());
}
}
/** Gets a custom parameter from the neural net.
* The user is free to use the custom parameters as s/he wants.
* They are useful to transport a whatever value along with the net.
* @param key The searched key
* @return The value of the parameter if found, otherwise null
*/
public Object getParam(String key) {
if (params == null)
return null;
return params.get(key);
}
/** Sets a custom parameter of the neural net.
* The user is free to use the custom parameters as s/he wants.
* They are useful to transport a whatever value along with the net.
* @param key The key of the param
* @param obj The value of the param
*/
public void setParam(String key, Object obj) {
if (params == null)
params = new Hashtable();
if (params.containsKey(key))
params.remove(key);
params.put(key, obj);
}
/** Return all the keys of the parameters contained in the net.
* @return An array of Strings containing all the keys if found, otherwise null
*/
public String[] getKeys() {
if (params == null)
return null;
String[] keys = new String[params.keySet().size()];
Enumeration myEnum = params.keys();
for (int i=0; myEnum.hasMoreElements(); ++i) {
keys[i] = (String)myEnum.nextElement();
}
return keys;
}
/**
* Compiles all layers' check messages.
*
* @see NeuralLayer
* @return validation errors.
*/
public TreeSet check() {
// Prepare an empty set for check messages;
TreeSet checks = new TreeSet();
// Check for an empty neural network
if ((layers == null) || layers.isEmpty()) {
checks.add(new NetCheck(NetCheck.FATAL, "The Neural Network doesn't contain any layer", mon));
// If empty it makes no sense to continue the check
return checks;
} else
// Check for the presence of more than one InputSynpase having stepCounter set to true
if (getNumOfstepCounters() > 1)
checks.add(new NetCheck(NetCheck.FATAL, "More than one InputSynapse having stepCounter set to true is present", mon));
// Check all layers.
for (int i = 0; i < layers.size(); i++) {
Layer layer = (Layer) layers.elementAt(i);
checks.addAll(layer.check());
}
// Check the teacher (only if it exists and this is not a nested neural network)
if (mon.getParent() == null) {
if (teacher != null) {
checks.addAll(teacher.check());
if (mon != null && mon.isLearning() && !mon.isSupervised())
checks.add(new NetCheck(NetCheck.WARNING, "Teacher is present: the supervised property should be set to true", mon));
} else {
if (mon != null && mon.isLearning() && mon.isSupervised())
checks.add(new NetCheck(NetCheck.FATAL, "Teacher not present: set to false the supervised property", mon));
}
}
// Check the Monitor.
if (mon != null) {
checks.addAll(mon.check());
}
// Return check messages.
return checks;
}
// NET LISTENER METHODS
/**
* Not implemented.
* @param e
*/
public void netStarted(NeuralNetEvent e) {
}
/**
* Not implemented.
* @param e
*/
public void cicleTerminated(NeuralNetEvent e) {
}
/**
* Not implemented.
* @param e
*/
public void netStopped(NeuralNetEvent e) {
}
/**
* Not implemented.
* @param e
*/
public void errorChanged(NeuralNetEvent e) {
}
/**
* Stops the execution threads and resets all the layers
* in the event of an crtitical network error.
* @param error The error message.
* @param e The event source of this event.
*/
public void netStoppedError(NeuralNetEvent e, String error) {
// Stop and reset all the Layers.
this.terminate(false);
}
/**
* This method permits to set externally a particular order to
* traverse the Layers. If not used, the order will be calculated
* automatically. Use this method in cases where the automatic
* ordering doesn't work (e.g. in case of complex recurrent connections)
* NOTE: if you set this property, you're responsible to update the array
* whenever a layer is added/removed.
* @param orderedLayers an array containing the ordered layers
*/
public void setOrderedLayers(Layer[] orderedLayers) {
this.orderedLayers = orderedLayers;
}
public Layer[] getOrderedLayers() {
return orderedLayers;
}
/**
* This method calculates the order of the layers of the network, from the input to the output.
* If the setOrderedLayers method has been invoked before, that array will be returned, otherwise
* the order will be calculated automatically.
* @return An array containing the ordered Layers, from the input to the output (i.e. layers[0]=input layer, layers[n-1]=output layer.
*/
public Layer[] calculateOrderedLayers() {
if (getOrderedLayers() == null) {
if (intOrderedLayers == null) {
NeuralNetMatrix matrix = new NeuralNetMatrix(this);
intOrderedLayers = matrix.getOrderedLayers();
}
} else {
intOrderedLayers = getOrderedLayers();
}
return intOrderedLayers;
}
/** Runs the network.
* @param singleThreadMode If true, runs the network in single thread mode
* @param sync If true, runs the network in a separated thread and returns immediately.
*/
public void go(boolean singleThreadMode, boolean sync) {
getMonitor().setSingleThreadMode(singleThreadMode);
this.go(sync);
}
private transient Thread singleThread = null;
/** Runs the network. The running mode is determined by
* the value of the singleThreadMode property.
* @param sync If true, runs the network in a separated thread and returns immediately.
*/
public void go(boolean sync) {
if (getMonitor().isSingleThreadMode()) {
Runnable runner = new Runnable() {
public void run() {
fastRun();
}
};
setSingleThread(new Thread(runner));
getSingleThread().start();
} else {
this.start();
getMonitor().Go();
}
// If launched in synch mode, waits for the thread completition
if (sync) {
this.join();
}
}
/** Runs the network in async mode (i.e. equivalent to go(false) ).
* The running mode is determined by the value of the singleThreadMode property.
*/
public void go() {
this.go(false);
}
/** Continue the execution of the network after the stop() method is called.
*/
public void restore() {
if (getMonitor().isSingleThreadMode()) {
Runnable runner = new Runnable() {
public void run() {
fastContinue();
}
};
setSingleThread(new Thread(runner));
getSingleThread().start();
} else {
this.start();
getMonitor().runAgain();
}
}
/*********************************************************
* Implementation code for the single-thread version of Joone
*********************************************************/
private boolean stopFastRun;
/* This method runs the neural network in single-thread mode.
*/
protected void fastRun() {
this.fastRun(getMonitor().getTotCicles());
}
/* This method restore the running of the neural network
* in single-thread mode, starting from the epoch at which
* it had been previously stopped.
*/
protected void fastContinue() {
this.fastRun(getMonitor().getCurrentCicle());
}
/* This method runs the neural network in single-thread mode
* starting from the epoch passed as parameter.
* NOTE: the network will count-down from firstEpoch to 0.
* @param firstEpoch the epoch from which the network will start.
*/
protected void fastRun(int firstEpoch) {
Monitor mon = getMonitor();
mon.setSingleThreadMode(true);
int epochs = firstEpoch;
int patterns = mon.getNumOfPatterns();
Layer[] ordLayers = calculateOrderedLayers();
int layers = ordLayers.length;
// Calls the init method for all the Layers
for (int ly=0; ly < layers; ++ly) {
ordLayers[ly].init();
}
stopFastRun = false;
mon.fireNetStarted();
for (int epoch=epochs; epoch > 0 ; --epoch) {
mon.setCurrentCicle(epoch);
for (int p=0; p < patterns; ++p) {
// Forward
stepForward(null);
if (getMonitor().isLearningCicle(p+1)) {
// Backward
stepBackward(null);
}
}
mon.fireCicleTerminated();
if (stopFastRun) {
break;
}
}
Pattern stop = new Pattern(new double[ordLayers[0].getRows()]);
stop.setCount(-1);
stepForward(stop);
mon.fireNetStopped();
}
/* Use this method to perform a single step forward.
* The network is interrogated using the next available
* input pattern (only one).
* @param pattern The input pattern to use. If null, the input pattern is read from the input synapse connected to the input layer.
*/
protected void singleStepForward(Pattern pattern) {
getMonitor().setSingleThreadMode(true);
Layer[] ordLayers = calculateOrderedLayers();
int layers = ordLayers.length;
// Calls the init method for all the layers
for (int ly=0; ly < layers; ++ly) {
ordLayers[ly].init();
}
this.stepForward(pattern);
}
/* Use this method to perform a single step backward.
* The pattern passed as parameter, that is backpropagated, must contain
* the error in terms of differences from the desired pattern.
* @param pattern The error pattern to backpropagate. If null, the pattern is read from the teacher connected to the output layer. */
protected void singleStepBackward(Pattern error) {
getMonitor().setSingleThreadMode(true);
this.stepBackward(error);
}
protected void stepForward(Pattern pattern) {
Layer[] ordLayers = calculateOrderedLayers();
int layers = ordLayers.length;
ordLayers[0].fwdRun(pattern);
for (int ly=1; ly < layers; ++ly) {
ordLayers[ly].fwdRun(null);
}
}
protected void stepBackward(Pattern error) {
Layer[] ordLayers = calculateOrderedLayers();
int layers = ordLayers.length;
for (int ly=layers; ly > 0; --ly) {
ordLayers[ly-1].revRun(error);
}
}
/* This method serves to stop the network
* when running in single-thread mode.
* It DOES NOT affect the multi-thread running
* (i.e. a network launched with Monitor.Go() method.
*/
protected void stopFastRun() {
stopFastRun = true;
}
protected Thread getSingleThread() {
return singleThread;
}
protected void setSingleThread(Thread singleThread) {
this.singleThread = singleThread;
}
}
| Java |
/*
* NeuralNetLoader.java
*
* Created on 15 aprile 2002, 22.54
*/
package org.joone.net;
import java.io.*;
import org.joone.log.*;
/**
*
* @author pmarrone
*/
public class NeuralNetLoader
{
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger (NeuralNetLoader.class);
NeuralNet nnet;
/** Creates a new instance of NeuralNetLoader */
public NeuralNetLoader(String netName) {
try {
nnet = readNeuralNet(netName);
}
catch (Exception e)
{
log.error ( "Cannot create the NeuralNet with the following name : \"" + netName + "\"",
e);
}
}
public NeuralNet getNeuralNet() {
return nnet;
}
// Read the object of NeuralNet from the file with name NeuralNet
private NeuralNet readNeuralNet(String NeuralNet) throws IOException, ClassNotFoundException {
if (NeuralNet == null)
return null;
if (NeuralNet.equals(new String("")))
return null;
File NNFile = new File(NeuralNet);
FileInputStream fin = new FileInputStream(NNFile);
ObjectInputStream oin = new ObjectInputStream(fin);
NeuralNet newNeuralNet = (NeuralNet)oin.readObject();
oin.close();
fin.close();
return newNeuralNet;
}
}
| Java |
/*
* NeuralNetBeanInfo.java
*
* Created on 28 aprile 2004, 22.51
*/
package org.joone.net;
import java.beans.*;
/**
* @author paolo
*/
public class NeuralNetBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( NeuralNet.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_descriptor = 0;
private static final int PROPERTY_inputLayer = 1;
private static final int PROPERTY_layerName = 2;
private static final int PROPERTY_layers = 3;
private static final int PROPERTY_macroPlugin = 4;
private static final int PROPERTY_monitor = 5;
private static final int PROPERTY_outputLayer = 6;
private static final int PROPERTY_scriptingEnabled = 7;
private static final int PROPERTY_teacher = 8;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[9];
try {
properties[PROPERTY_descriptor] = new PropertyDescriptor ( "descriptor", NeuralNet.class, "getDescriptor", "setDescriptor" );
properties[PROPERTY_inputLayer] = new PropertyDescriptor ( "inputLayer", NeuralNet.class, "getInputLayer", "setInputLayer" );
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", NeuralNet.class, "getLayerName", "setLayerName" );
properties[PROPERTY_layers] = new PropertyDescriptor ( "layers", NeuralNet.class, "getLayers", "setLayers" );
properties[PROPERTY_macroPlugin] = new PropertyDescriptor ( "macroPlugin", NeuralNet.class, "getMacroPlugin", "setMacroPlugin" );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", NeuralNet.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_outputLayer] = new PropertyDescriptor ( "outputLayer", NeuralNet.class, "getOutputLayer", "setOutputLayer" );
properties[PROPERTY_scriptingEnabled] = new PropertyDescriptor ( "scriptingEnabled", NeuralNet.class, "isScriptingEnabled", "setScriptingEnabled" );
properties[PROPERTY_teacher] = new PropertyDescriptor ( "teacher", NeuralNet.class, "getTeacher", "setTeacher" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.net;
import java.util.Vector;
import org.joone.engine.*;
/**
* This class is useful to validate a neural network.
* It simply sets some parameters of the neural network passed as parameter
* and starts itself in a separated thread, notifying a listener when the
* validation step finishes.
*
* @author pmarrone
*/
public class NeuralNetValidator implements Runnable, NeuralNetListener {
final private Vector listeners;
/** The network to validate. */
final private NeuralNet nnet;
private Thread myThread = null;
private int currentCycle;
private int totCycles;
/** Flag indicating if we should use the training data for validation (if
* <code>true</code>) or should we use the validation data (if <code>false</code>)
* which is the default. */
private boolean useTrainingData = false;
public NeuralNetValidator(NeuralNet nn) {
listeners = new Vector();
nnet = nn;
}
public synchronized void addValidationListener(NeuralValidationListener newListener){
if (!listeners.contains(newListener))
listeners.addElement(newListener);
}
protected void validate(){
totCycles = nnet.getMonitor().getTotCicles();
currentCycle = nnet.getMonitor().getCurrentCicle();
nnet.getMonitor().addNeuralNetListener(this);
nnet.getMonitor().setLearning(false);
nnet.getMonitor().setValidation(true);
nnet.getMonitor().setTrainingDataForValidation(useTrainingData);
nnet.getMonitor().setTotCicles(1);
nnet.go();
}
public void fireNetValidated(){
double error = nnet.getMonitor().getGlobalError();
nnet.getDescriptor().setValidationError(error);
Object[] list;
synchronized (this) {
list = listeners.toArray();
}
for (int i=0; i < list.length; ++i) {
NeuralValidationListener nvl = (NeuralValidationListener)list[i];
nvl.netValidated(new NeuralValidationEvent(nnet));
}
}
/**
* By default the validator validates a neural network with validation data,
* however by calling this method before calling the <code>start()</code>
* method, one can decide if the network should be validated with validation
* data (the parameter <code>anUse</code> should be <code>false</code>) or
* by using the training data (the parameter <code>anUse</code> should be
* <code>true</code>).
*
* @param anUse <code>true</code> if we should use training data for validation,
* <code>false</code> if we should use the validation data for validation (default).
*/
public void useTrainingData(boolean anUse) {
useTrainingData = anUse;
}
/** Starts the validation into a separated thread
*/
public void start() {
if (myThread == null) {
myThread = new Thread(this, "Validator");
myThread.start();
}
}
public void run() {
this.validate();
myThread = null;
}
public void netStopped(NeuralNetEvent e) {
this.fireNetValidated();
}
public void cicleTerminated(NeuralNetEvent e) {
}
public void netStarted(NeuralNetEvent e) {
}
public void errorChanged(NeuralNetEvent e) {
}
public void netStoppedError(NeuralNetEvent e, String error) {
}
/**
* Gets the network to validate (or has been validated).
*
* @return the netork to validate (or the network that has been validated).
*/
public NeuralNet getNeuralNet() {
return nnet;
}
}
| Java |
/*
* NeuralValidatorListener.java
*
* Created on 28 aprile 2002, 15.59
*/
package org.joone.net;
import java.util.EventListener;
/**
*
* @author pmarrone
*/
public interface NeuralValidationListener extends EventListener {
public void netValidated(NeuralValidationEvent event);
}
| Java |
/*
* NestedNeuralLayer.java
*
* Created on 29 February 2002
*/
package org.joone.net;
import java.io.*;
import java.util.*;
import org.joone.io.StreamInputSynapse;
import org.joone.io.StreamOutputSynapse;
import org.joone.log.*;
import org.joone.engine.*;
public class NestedNeuralLayer extends Layer {
/**
* Logger
* */
private static final ILogger log = LoggerFactory.getLogger(NestedNeuralLayer.class);
static final long serialVersionUID = -3697306754884303651L;
private String sNeuralNet; // The file name for the stored NeuralNet
private NeuralNet NestedNeuralNet;
private LinearLayer lin;
private transient File embeddedNet = null;
public NestedNeuralLayer() {
this("");
}
public NestedNeuralLayer(String ElemName) {
super();
NestedNeuralNet = new NeuralNet();
/* We add a dummy layer to store the connections made before the true NestedNN is set
* so the NestedNeuralLayer acts as a LinearLayer in absence of an internal NN
*/
lin = new LinearLayer();
lin.setLayerName("Nested LinearLayer");
NestedNeuralNet.addLayer(lin, NeuralNet.INPUT_LAYER);
sNeuralNet = new String();
setLayerName(ElemName);
}
protected void setDimensions() {
}
protected void forward(double[] pattern) {
}
protected void backward(double[] pattern) {
}
public String getNeuralNet() {
return sNeuralNet;
}
public void setNeuralNet(String NNFile) {
sNeuralNet = NNFile;
try {
NeuralNet newNeuralNet = readNeuralNet();
if (newNeuralNet != null)
this.setNestedNeuralNet(newNeuralNet);
} catch (Exception e) {
log.warn( "Exception thrown. Message is : " + e.getMessage(),
e );
}
}
public void start() {
NestedNeuralNet.start();
}
public void stop() {
NestedNeuralNet.stop();
}
public int getRows() {
return NestedNeuralNet.getRows();
}
public void setRows(int p1) {
NestedNeuralNet.setRows(p1);
}
public void addNoise(double p1) {
if (this.isLearning())
NestedNeuralNet.addNoise(p1);
}
public void randomize(double amplitude) {
if (this.isLearning())
NestedNeuralNet.randomize(amplitude);
}
public Matrix getBias() {
return NestedNeuralNet.getBias();
}
public Vector getAllOutputs() {
return NestedNeuralNet.getAllOutputs();
}
public String getLayerName() {
return NestedNeuralNet.getLayerName();
}
public void removeOutputSynapse(OutputPatternListener p1) {
NestedNeuralNet.removeOutputSynapse(p1);
}
public void setAllInputs(Vector p1) {
NestedNeuralNet.setAllInputs(p1);
}
public void removeAllOutputs() {
NestedNeuralNet.removeAllOutputs();
}
public Vector getAllInputs() {
return NestedNeuralNet.getAllInputs();
}
public boolean addOutputSynapse(OutputPatternListener p1) {
return NestedNeuralNet.addOutputSynapse(p1);
}
public void setBias(Matrix p1) {
NestedNeuralNet.setBias(p1);
}
public void removeInputSynapse(InputPatternListener p1) {
NestedNeuralNet.removeInputSynapse(p1);
}
public void setLayerName(String p1) {
NestedNeuralNet.setLayerName(p1);
}
public boolean addInputSynapse(InputPatternListener p1) {
return NestedNeuralNet.addInputSynapse(p1);
}
public void setAllOutputs(Vector p1) {
NestedNeuralNet.setAllOutputs(p1);
}
public void setMonitor(Monitor p1) {
getMonitor().setParent(p1);
}
public Monitor getMonitor() {
return NestedNeuralNet.getMonitor();
}
public void removeAllInputs() {
NestedNeuralNet.removeAllInputs();
}
public NeuralLayer copyInto(NeuralLayer p1) {
return NestedNeuralNet.copyInto(p1);
}
/**
* Reads the object of NeuralNet from the file with name NeuralNet
*/
private NeuralNet readNeuralNet() throws IOException, ClassNotFoundException {
if (sNeuralNet == null)
return null;
if (sNeuralNet.equals(new String("")))
return null;
File NNFile = new File(sNeuralNet);
FileInputStream fin = new FileInputStream(NNFile);
ObjectInputStream oin = new ObjectInputStream(fin);
NeuralNet newNeuralNet = (NeuralNet)oin.readObject();
oin.close();
fin.close();
return newNeuralNet;
}
public boolean isRunning() {
if (NestedNeuralNet == null)
return false;
else
return NestedNeuralNet.isRunning();
}
/** Getter for property NestedNeuralNet.
* @return Value of property NestedNeuralNet.
*
*/
public NeuralNet getNestedNeuralNet() {
return NestedNeuralNet;
}
/** Setter for property NestedNeuralNet.
* @param NestedNeuralNet New value of property NestedNeuralNet.
*
*/
public void setNestedNeuralNet(NeuralNet newNeuralNet) {
newNeuralNet.removeAllListeners();
newNeuralNet.setLayerName(NestedNeuralNet.getLayerName());
newNeuralNet.setTeacher(null); // The nested NN cannot have a own teacher
newNeuralNet.setAllInputs(NestedNeuralNet.getAllInputs());
newNeuralNet.setAllOutputs(NestedNeuralNet.getAllOutputs());
Monitor extMonitor = getMonitor();
lin = null;
NestedNeuralNet = newNeuralNet;
NestedNeuralNet.setMonitor(new Monitor());
this.setMonitor(extMonitor);
}
/** Getter for property learning.
* @return Value of property learning.
*
*/
public boolean isLearning() {
return NestedNeuralNet.getMonitor().isLearning();
}
/** Setter for property learning.
* @param learning New value of property learning.
*
*/
public void setLearning(boolean learning) {
NestedNeuralNet.getMonitor().setLearning(learning);
}
public TreeSet check() {
return setErrorSource(NestedNeuralNet.check());
}
public File getEmbeddedNet() {
return embeddedNet;
}
public void setEmbeddedNet(File embeddedNet) {
if (embeddedNet != null) {
if (!sNeuralNet.equals(embeddedNet.getAbsolutePath())) {
this.embeddedNet = embeddedNet;
setNeuralNet(embeddedNet.getAbsolutePath());
}
} else {
this.embeddedNet = embeddedNet;
sNeuralNet = "";
}
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.embeddedNet = new File(sNeuralNet);
}
// Changes the source of the errors generated from internal components
private TreeSet setErrorSource(TreeSet errors) {
if (!errors.isEmpty()) {
Iterator iter = errors.iterator();
while (iter.hasNext()) {
NetCheck nc = (NetCheck)iter.next();
if (!(nc.getSource() instanceof Monitor) &&
!(nc.getSource() instanceof StreamInputSynapse) &&
!(nc.getSource() instanceof StreamOutputSynapse))
nc.setSource(this);
}
}
return errors;
}
public void fwdRun(Pattern pattIn) {
NestedNeuralNet.singleStepForward(pattIn);
}
public void revRun(Pattern pattIn) {
NestedNeuralNet.singleStepBackward(pattIn);
}
}
| Java |
/*
* NeuralNetDescriptor.java
*
* Created on 29 april 2002, 15.14
* @author pmarrone
*/
package org.joone.net;
/**
* This class represents a descriptor of a neural network.
* It stores some parameters of the neural network useful
* to manage the training and the validation error of the
* net without having it loaded in memory.
* Created for the distributed environment to store the 'state'
* of the training of a net without the necessity to load in
* memory all the nets belonging to a generation, it can be
* extended to store more parameters for any other future use.
*
* WARNING: This class must be compatible with the SOAP-Serialization
* mechanism, hence add ONLY public parameters having:
* - public getter/setter methods
* - a basic type, like String, int, ... or a SOAP-Serializable interface
*
* @author pmarrone
*/
public class NeuralNetAttributes implements java.io.Serializable {
private static final long serialVersionUID = -3122881040378874490L;
private double validationError = -1.0;
private double trainingError = -1.0;
private String neuralNetName;
private int lastEpoch = 0;
/** Creates a new instance of NeuralNetDescriptor */
public NeuralNetAttributes() {
}
/** Getter for property trainingError.
* @return Value of property trainingError.
*/
public double getTrainingError() {
return trainingError;
}
/** Setter for property trainingError.
* @param trainingError New value of property trainingError.
*/
public void setTrainingError(double trainingError) {
this.trainingError = trainingError;
}
/** Getter for property validationError.
* @return Value of property validationError.
*/
public double getValidationError() {
return validationError;
}
/** Setter for property validationError.
* @param validationError New value of property validationError.
*/
public void setValidationError(double validationError) {
this.validationError = validationError;
}
/** Getter for property neuralNetName.
* @return Value of property neuralNetName.
*/
public String getNeuralNetName() {
return neuralNetName;
}
/** Setter for property neuralNetName.
* @param neuralNetName New value of property neuralNetName.
*/
public void setNeuralNetName(String neuralNetName) {
this.neuralNetName = neuralNetName;
}
public int getLastEpoch() {
return lastEpoch;
}
public void setLastEpoch(int lastEpoch) {
this.lastEpoch = lastEpoch;
}
}
| Java |
package org.joone.net;
import org.joone.net.NeuralNet;
import java.util.*;
public class NetChecker {
/** NeuralNet linked to this instance. */
final private NeuralNet netToCheck;
/**
* Constructor. Sets the NeuralNet for this instance.
*
* @param netToCheckArg the NeuralNEt to use.
*/
public NetChecker(NeuralNet netToCheckArg) {
netToCheck = netToCheckArg;
}
/**
* Validation checks for invalid parameter values, misconfiguration, etc.
* All network components should include a check method that firstly calls its ancestor check method and
* adds these to any check messages it produces. This allows check messages to be collected from all levels
* of a component to be returned to the caller's check method. Using a TreeSet ensures that
* duplicate messages are removed. Check messages should be produced using the generateValidationErrorMessage
* method of the NetChecker class.
*
* @return validation errors.
*/
public TreeSet check() {
return netToCheck.check();
}
/**
* Method to determine whether there are validation errors in the net.
*
* @return true if errors exist.
*/
public boolean hasErrors() {
TreeSet checks = netToCheck.check();
Iterator iter = checks.iterator();
while (iter.hasNext()) {
NetCheck netCheck = (NetCheck) iter.next();
if (netCheck.isFatal()) {
return true;
}
}
return false;
}
}
| Java |
package org.joone.net;
import java.beans.*;
public class NestedNeuralLayerBeanInfo extends SimpleBeanInfo {
// Bean descriptor//GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( org.joone.net.NestedNeuralLayer.class , null ); // NOI18N//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers//GEN-FIRST:Properties
private static final int PROPERTY_embeddedNet = 0;
private static final int PROPERTY_layerName = 1;
private static final int PROPERTY_learning = 2;
private static final int PROPERTY_monitor = 3;
private static final int PROPERTY_nestedNeuralNet = 4;
private static final int PROPERTY_neuralNet = 5;
private static final int PROPERTY_rows = 6;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[7];
try {
properties[PROPERTY_embeddedNet] = new PropertyDescriptor ( "embeddedNet", org.joone.net.NestedNeuralLayer.class, "getEmbeddedNet", "setEmbeddedNet" ); // NOI18N
properties[PROPERTY_layerName] = new PropertyDescriptor ( "layerName", org.joone.net.NestedNeuralLayer.class, "getLayerName", "setLayerName" ); // NOI18N
properties[PROPERTY_learning] = new PropertyDescriptor ( "learning", org.joone.net.NestedNeuralLayer.class, "isLearning", "setLearning" ); // NOI18N
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", org.joone.net.NestedNeuralLayer.class, "getMonitor", "setMonitor" ); // NOI18N
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_nestedNeuralNet] = new PropertyDescriptor ( "nestedNeuralNet", org.joone.net.NestedNeuralLayer.class, "getNestedNeuralNet", "setNestedNeuralNet" ); // NOI18N
properties[PROPERTY_nestedNeuralNet].setHidden ( true );
properties[PROPERTY_neuralNet] = new PropertyDescriptor ( "neuralNet", org.joone.net.NestedNeuralLayer.class, "getNeuralNet", "setNeuralNet" ); // NOI18N
properties[PROPERTY_neuralNet].setHidden ( true );
properties[PROPERTY_neuralNet].setDisplayName ( "Nested ANN" );
//properties[PROPERTY_neuralNet].setPropertyEditorClass ( org.joone.edit.JooneFileChooserEditor.class );
properties[PROPERTY_rows] = new PropertyDescriptor ( "rows", org.joone.net.NestedNeuralLayer.class, "getRows", "setRows" ); // NOI18N
properties[PROPERTY_rows].setHidden ( true );
}
catch(IntrospectionException e) {
e.printStackTrace();
}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers//GEN-FIRST:Methods
private static final int METHOD_addInputSynapse0 = 0;
private static final int METHOD_addNoise1 = 1;
private static final int METHOD_addOutputSynapse2 = 2;
private static final int METHOD_copyInto3 = 3;
private static final int METHOD_randomize4 = 4;
private static final int METHOD_removeAllInputs5 = 5;
private static final int METHOD_removeAllOutputs6 = 6;
private static final int METHOD_removeInputSynapse7 = 7;
private static final int METHOD_removeOutputSynapse8 = 8;
private static final int METHOD_run9 = 9;
private static final int METHOD_start10 = 10;
private static final int METHOD_stop11 = 11;
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[12];
try {
methods[METHOD_addInputSynapse0] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("addInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_addInputSynapse0].setDisplayName ( "" );
methods[METHOD_addNoise1] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("addNoise", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_addNoise1].setDisplayName ( "" );
methods[METHOD_addOutputSynapse2] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("addOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_addOutputSynapse2].setDisplayName ( "" );
methods[METHOD_copyInto3] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("copyInto", new Class[] {org.joone.engine.NeuralLayer.class})); // NOI18N
methods[METHOD_copyInto3].setDisplayName ( "" );
methods[METHOD_randomize4] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("randomize", new Class[] {Double.TYPE})); // NOI18N
methods[METHOD_randomize4].setDisplayName ( "" );
methods[METHOD_removeAllInputs5] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("removeAllInputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllInputs5].setDisplayName ( "" );
methods[METHOD_removeAllOutputs6] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("removeAllOutputs", new Class[] {})); // NOI18N
methods[METHOD_removeAllOutputs6].setDisplayName ( "" );
methods[METHOD_removeInputSynapse7] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("removeInputSynapse", new Class[] {org.joone.engine.InputPatternListener.class})); // NOI18N
methods[METHOD_removeInputSynapse7].setDisplayName ( "" );
methods[METHOD_removeOutputSynapse8] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("removeOutputSynapse", new Class[] {org.joone.engine.OutputPatternListener.class})); // NOI18N
methods[METHOD_removeOutputSynapse8].setDisplayName ( "" );
methods[METHOD_run9] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("run", new Class[] {})); // NOI18N
methods[METHOD_run9].setDisplayName ( "" );
methods[METHOD_start10] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("start", new Class[] {})); // NOI18N
methods[METHOD_start10].setDisplayName ( "" );
methods[METHOD_stop11] = new MethodDescriptor ( org.joone.net.NestedNeuralLayer.class.getMethod("stop", new Class[] {})); // NOI18N
methods[METHOD_stop11].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.net;
import org.joone.engine.NeuralLayer;
import org.joone.engine.InputPatternListener;
import org.joone.engine.OutputPatternListener;
import org.joone.engine.Monitor;
/**
* Class to represent a network validation check error.
* NetChecks of FATAL severity prevent the network from running,
* NetChecks of WARNING severity do not.
*/
public class NetCheck implements Comparable {
/** Fatal check severity. */
public static final int FATAL = 0;
/** Non-fatal check severity. */
public static final int WARNING = 1;
/** The check severity. */
private int severity;
/** The check message. */
private String message;
/**
* The network source producing the check.
*/
private Object source;
/**
* Constructor.
*
* @param severityArg the severity of the check. Should be FATAL or WARNING.
* @param messageArg the message assosiated with the check.
* @param objectArg the network component producing the check.
*/
public NetCheck(int severityArg, String messageArg, Object objectArg) {
setSeverity(severityArg);
setMessage(messageArg);
setSource(objectArg);
}
/**
* Produce a String representation of the check.
*
* @return the String representation of the check.
*/
public String toString() {
// Get the class name without the package extension.
String className = getSource().getClass().getName();
className = className.substring(1 + className.lastIndexOf("."));
// Get the instance name. Try to find the Joone interface name if possible.
String instanceName;
if (getSource() instanceof NeuralLayer) {
instanceName = ((NeuralLayer) getSource()).getLayerName();
} else if (getSource() instanceof InputPatternListener) {
instanceName = ((InputPatternListener) getSource()).getName();
} else if (getSource() instanceof OutputPatternListener) {
instanceName = ((OutputPatternListener) getSource()).getName();
} else if (getSource() instanceof Monitor) {
instanceName = "Monitor";
} else {
instanceName = getSource().toString();
}
// Build up the check message.
StringBuffer checkMessage = new StringBuffer();
if (isFatal()) {
checkMessage.append("FATAL - ");
} else if (isWarning()) {
checkMessage.append("WARNING - ");
}
checkMessage.append(className);
checkMessage.append(" - ");
if (instanceName != null && !instanceName.trim().equals("")) {
checkMessage.append(instanceName);
checkMessage.append(" - ");
}
checkMessage.append(getMessage());
return checkMessage.toString();
}
/**
* Method to see if this check is a WARNING.
*
* @return true if warning.
*/
public boolean isWarning() {
return getSeverity() == WARNING;
}
/**
* Method to see if this check is a FATAL.
*
* @return true if error.
*/
public boolean isFatal() {
return getSeverity() == FATAL;
}
/**
* Method to order by message when in TreeSet.
*
* @return true if error.
*/
public int compareTo(Object o) {
if (o instanceof NetCheck) {
NetCheck nc = (NetCheck) o;
return toString().compareTo(nc.toString());
} else {
return 0;
}
}
/**
* Getter for the object that caused the error
* @return the source object
*/
public Object getSource() {
return source;
}
/**
* Setter for the object that caused the error
* @param source the source object
*/
public void setSource(Object source) {
this.source = source;
}
/**
* Getter for the error message
* @return the error message
*/
public String getMessage() {
return message;
}
/**
* Setter for the error message
* @param message the error message
*/
public void setMessage(String message) {
this.message = message;
}
/**
* Getter for the error severity
*
* @return the error severity (either NetCheck.FATAL or NetCheck.WARNING)
*/
public int getSeverity() {
return severity;
}
/**
* Setter for the error severity
*
* @param severity the error severity (either NetCheck.FATAL or NetCheck.WARNING)
*/
public void setSeverity(int severity) {
this.severity = severity;
}
}
| Java |
/*
* JooneLogger.java
*
* Created on 26 febbraio 2004, 15.52
*/
package org.joone.log;
/**
* Internal logger. Use it instead of LogJ4, by declaring the property
* -Dorg.joone.logger="org.joone.log.JooneLogger"
*
* @author PMLMAPA
*/
public class JooneLogger implements ILogger {
protected Class pClass;
/** Creates a new instance of JooneLogger */
public JooneLogger() {
}
public void debug(Object obj) {
System.out.println(formatMsg("DEBUG", (String)obj));
}
public void debug(Object obj, Throwable thr) {
System.out.println(formatMsg("DEBUG", (String)obj));
thr.printStackTrace();
}
public void error(Object obj) {
System.out.println(formatMsg("ERROR", (String)obj));
}
public void error(Object obj, Throwable thr) {
System.out.println(formatMsg("ERROR", (String)obj));
thr.printStackTrace();
}
public void fatal(Object obj) {
System.out.println(formatMsg("FATAL", (String)obj));
}
public void fatal(Object obj, Throwable thr) {
System.out.println(formatMsg("FATAL", (String)obj));
thr.printStackTrace();
}
public void info(Object obj) {
System.out.println(formatMsg("INFO", (String)obj));
}
public void info(Object obj, Throwable thr) {
System.out.println(formatMsg("INFO", (String)obj));
thr.printStackTrace();
}
public void warn(Object obj) {
System.out.println(formatMsg("WARN", (String)obj));
}
public void warn(Object obj, Throwable thr) {
System.out.println(formatMsg("WARN", (String)obj));
thr.printStackTrace();
}
public void setParentClass(Class cls) {
pClass = cls;
}
protected String formatMsg(String sev, String msg) {
return "["+Thread.currentThread().getName()+"] ["+sev+"] - "
+pClass.getName()+" - "+msg;
}
}
| Java |
/*
* Logger.java
*
* Created on 26 feb 2004, 15.17
*/
package org.joone.log;
/**
* Interface that defines the public methods of the logger object.
* To be fully compatible with apache Log4j, its interface derivates
* from the org.apache.log4j.Category class.
*
* @author P.Marrone
*/
public interface ILogger {
void setParentClass(Class cls);
void debug(Object obj);
void debug(Object obj, Throwable thr);
void error(Object obj);
void error(Object obj, Throwable thr);
void fatal(Object obj);
void fatal(Object obj, Throwable thr);
void info(Object obj);
void info(Object obj, Throwable thr);
void warn(Object obj);
void warn(Object obj, Throwable thr);
}
| Java |
/*
* Log4JLogger.java
*
* Created on 26 febbraio 2004, 15.26
*/
package org.joone.log;
import org.apache.log4j.Logger;
/**
* Logger that uses Apache's Log4J to log the messages.
* Use it by declaring the property
* -Dorg.joone.logger="org.joone.log.JooneLogger"
* @author PMLMAPA
*/
public class Log4JLogger implements ILogger {
private Logger log = null;
/** Creates a new instance of Log4JLogger */
public Log4JLogger() {
}
public void debug(Object obj) {
log.debug(obj);
}
public void debug(Object obj, Throwable thr) {
log.debug(obj, thr);
}
public void error(Object obj) {
log.error(obj);
}
public void error(Object obj, Throwable thr) {
log.error(obj, thr);
}
public void fatal(Object obj) {
log.fatal(obj);
}
public void fatal(Object obj, Throwable thr) {
log.fatal(obj, thr);
}
public void info(Object obj) {
log.info(obj);
}
public void info(Object obj, Throwable thr) {
log.info(obj, thr);
}
public void warn(Object obj) {
log.warn(obj);
}
public void warn(Object obj, Throwable thr) {
log.warn(obj, thr);
}
public void setParentClass(Class cls) {
log = Logger.getLogger( cls );
}
}
| Java |
/*
* Logger.java
*
* Created on 26 febbraio 2004, 15.03
*/
package org.joone.log;
/**
* Class used to decouple the engine's logging requests from
* the libraries that expose the logging service.
*
* @author P.Marrone
*/
public class LoggerFactory {
/** Method to get the Logger to use to print out the log messages.
* @return The instance of the Logger
* @param cls The Class of the calling object
*/
public static ILogger getLogger(Class cls) {
ILogger iLog = null;
String logger = null;
// If JOONE is loaded in applet environment, we need
// to take into consideration on the security exception issues.
try {
logger = System.getProperty("org.joone.logger");
if (logger != null) {
iLog = (ILogger)Class.forName(logger).newInstance();
iLog.setParentClass(cls);
}
}
catch(java.security.AccessControlException e) {
// Do nothing. Let it falls through the below code
// to become JooneLogger object.
}
catch (ClassNotFoundException cnfe) {
cnfe.printStackTrace();
return null;
}
catch (InstantiationException ie) {
ie.printStackTrace();
return null;
}
catch (IllegalAccessException iae) {
iae.printStackTrace();
return null;
}
if (logger == null) {
iLog = new JooneLogger();
iLog.setParentClass(cls);
}
return iLog;
}
} | Java |
/*
* Nakayama.java
*
* Created on October 22, 2004, 1:30 PM
*/
package org.joone.structure;
import java.util.*;
import org.joone.engine.*;
import org.joone.engine.listeners.*;
import org.joone.log.*;
import org.joone.net.*;
/**
* This class performs the method of optimizing activation functions as described
* in: <p>
* K.Nakayama and Y.Kimura, <i>"Optimization of activation functions in multilayer
* neural network applied to pattern classification"</i>, Proc. IEEE ICNN'94 Florida,
* pp.431-436, June 1994.
* <p>
* <p>
* <i>This techniques probably fails whenever the <code>NeuralNet.join()</code> method
* is called because this optimization technique stops the network to perform the
* optimization, use a <code>NeuralNetListener</code> instead.</i>
*
*
* @author Boris Jansen
*/
public class Nakayama implements NeuralNetListener, NeuralValidationListener, ConvergenceListener, java.io.Serializable {
/** Logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(Nakayama.class);
/** Constant indicating the neuron will not be removed (should be zero). */
private static final int NO_REMOVE = 0;
/** Constant indicating a neuron will be removed based on its information value Ij. */
private static final int INFO_REMOVE = 1;
/** Constant indicating a neuron will be removed based on its variance value Vj. */
private static final int VARIANCE_REMOVE = 2;
/** Constant indicating a neuron MIGHT be removed based on its correlation value Rjmin. */
private static final int CORRELATION_POSSIBLE_REMOVE = 3;
/** Constant indicating a neuron WILL be removed based on its correlation value Rjmin. */
private static final int CORRELATION_REMOVE = 4;
/** Constant indicating that a neuron is removed. */
private static final int REMOVE_DONE = -1;
/** List with layers to be optimized (layers should be on the same (layer) level). */
private List layers = new ArrayList();
/** Flag to remember if the network was running when the request come to optimize
* the network. If so we will re-start it after optimization. */
private boolean isRunning;
/** The net to optimize. */
private NeuralNet net;
/** The clone of a network to collect information (outputs of neurons after a pattern has been
* forwarded through the network). */
private NeuralNet clone;
/** The threshold to decide if the neuron should be deleted or not, i.e. if
* Cj = Ij / max-j{Ij} * Vj / max-j{Vj} * Rjmin <= epsilon. */
private double epsilon = 0.05; // default value
/** The original listeners of the neural network that will be temporarely removed.
* We will add them again after optimization when we restart the network. */
private List listeners = new ArrayList();
/** Holds the outputs of the neurons after each pattern [pattern->layer->neuron]. */
private List outputsAfterPattern;
/** Holds the information for every neuron (excluding input and output neurons)
* to its output layers [layer->neuron]. */
private List information;
/** The maximum value of the information from a neuron to its output layer. */
private double infoMax;
/** Holds the average output over all patterns for every neuron (excluding input and
* output neurons) [layer->neuron]. */
private List averageOutputs;
/** Holds the variance for every neuron. */
private List variance;
/** The maximum variance value [layer->neuron]. */
private double varianceMax;
/** Holds the gamma value for every neuron [layer->neuron->layer->neuron]. */
private List gamma;
/** Flag indicating if the network is optimized, i.e. if neurons are removed. */
private boolean optimized = false;
/**
* Creates a new instance of Nakayama.
*
* @param aNet the network to be optimized.
*/
public Nakayama(NeuralNet aNet) {
net = aNet;
}
/**
* Adds layers to this optimizer. The layers will be optimized. Layers should
* be on the same (layer) level, otherwise the optimization does not make sense.
*
* @param aLayer the layer to be added.
*/
public void addLayer(Layer aLayer) {
layers.add(aLayer);
}
/**
* Adds all the hidden layers to this optimizer. The layers will be optimized.
* The neuron network should consist of only one hidden layer, i.e. the hidden
* layers should be on the same level else this method doesn't make any sense.
* If the hidden layers are not all on the same level then the layers should be
* added individually my using {@ling addLayer(Layer)}, adding only the layers
* that are on the same hidden level.
*
* @param aNeuralNet the network holding the hidden layers.
*/
public void addLayers(NeuralNet aNeuralNet) {
for(int i = 0; i < aNeuralNet.getLayers().size(); i++) {
Object myLayer = aNeuralNet.getLayers().get(i);
if(myLayer != aNeuralNet.getInputLayer() && myLayer != aNeuralNet.getOutputLayer()) {
layers.add(myLayer);
}
}
}
/**
* Optimizes the activation functions of the neural network.
*
* @return whether the network was optimized or not, i.e. if neurons where deleted
* or not.
*/
public boolean optimize() {
// init (throw away any old values from previous optimization round)
outputsAfterPattern = new ArrayList();
information = new ArrayList();
infoMax = 0;
averageOutputs = new ArrayList();
variance = new ArrayList();
varianceMax = 0;
gamma = new ArrayList();
optimized = false;
log.debug("Optimization request [cycle : " + net.getMonitor().getCurrentCicle() + "]");
isRunning = net.isRunning();
if(isRunning) {
log.debug("Stopping network...");
removeAllListeners();
net.addNeuralNetListener(this);
net.getMonitor().Stop();
// runValidation() will be called from cicleTerminated() after the network has been stopped
} else {
runValidation();
}
return optimized;
}
/**
* Runs the network with a validator, this way we are able to collect information
* related to the different patterns. Everytime a pattern is forwarded to the
* network we collect certain info {@link patternFinished()}.
*/
protected void runValidation() {
net.getMonitor().setExporting(true);
clone = net.cloneNet();
net.getMonitor().setExporting(false);
clone.removeAllListeners();
// add the following synapse so everytime a pattern has been forwarded through
// the network patternFinished() is called so we can collect certain info
clone.getOutputLayer().addOutputSynapse(new PatternForwardedSynapse(this));
// run the network to collect information
log.debug("Validating network...");
NeuralNetValidator myValidator = new NeuralNetValidator(clone);
myValidator.addValidationListener(this);
myValidator.useTrainingData(true); // just use the normal training data
myValidator.start();
}
/**
* Optimizes the activation functions of the network.
*/
protected void doOptimize() {
log.debug("Optimizing...");
evaluateNeurons();
selectNeurons();
log.debug("Optimization done.");
cleanUp();
// debug info
Layer myLayer;
for(int i = 0; i < net.getLayers().size(); i++) {
myLayer = (Layer)net.getLayers().get(i);
log.debug("Layer [" + myLayer.getClass().getName() + "] - neurons : " + myLayer.getRows());
}
// end debug
}
/**
* This method is called after optimization, e.g. to restore the listeners.
*/
protected void cleanUp() {
log.debug("Cleaning up...");
outputsAfterPattern = null;
information = null;
averageOutputs = null;
variance = null;
gamma = null;
clone = null;
// remove layers that have no input and output synapses
Layer myLayer;
for(int i = 0; i < layers.size(); i++) {
myLayer = (Layer)layers.get(i);
if(myLayer.getRows() == 0) {
log.debug("Remove layer [" + myLayer.getClass().getName() + "]");
net.removeLayer(myLayer);
layers.remove(i);
i--; // layer is removed so index changes
}
}
net.removeNeuralNetListener(this);
restoreAllListeners();
log.debug("Clean ;)");
if(isRunning) {
log.debug("Restarting net...");
net.start();
net.getMonitor().runAgain();
}
}
/**
* Selects neurons to be deleted based on the information calculated by
* <code>evaluateNeurons()</code>. It selects neurons to be deleted and
* performs the deletion.
*/
protected void selectNeurons() {
log.debug("Selecting neurons...");
Layer myLayer;
List myStatuses = new ArrayList(); // will hold the status for every neuron
int [] myStatus; // holds the status of neurons for a single layer
double myScaledInfo, myScaledVariance; // scaled info Ij^ = Ij / max-j{Ij}, scaled variance Vj^ = Vj / max-j{Vj}
double[] myMinCorrelation; // array holding the minimum correlation, together with the index of the neuron j and j'
// which have the minimum correlation
List myMinCorrelationPointers = new ArrayList(); // if the min correlation Rjj' is the lowest of Ij^, Vj^ and Rjj'
// then we save the min correlation info, because the neuron that
// is part of this min correlation needs more investigation later
// to decide if the neuron should be deleted or not
for(int i = 0; i < layers.size(); i++) {
myLayer = (Layer)layers.get(i);
myStatus = new int[myLayer.getRows()]; // initially all elements equal 0, so status is NO_REMOVE
for(int n = 0; n < myLayer.getRows(); n++) {
myScaledInfo = ((double[])information.get(i))[n] / infoMax;
myScaledVariance = ((double[])variance.get(i))[n] / varianceMax;
myMinCorrelation = getMinCorrelation(i, n);
// log.debug("Info : " + myScaledInfo + ", variance : " + myScaledVariance + ", correlation : " + myMinCorrelation[0]);
if(myScaledInfo * myScaledVariance * myMinCorrelation[0] <= epsilon) {
if(myScaledInfo <= myScaledVariance && myScaledInfo <= myMinCorrelation[0]) {
myStatus[n] = INFO_REMOVE; // scaled info is the smallest, so neuron should be removed based on its info
} else if(myScaledVariance < myScaledInfo && myScaledVariance <= myMinCorrelation[0]) {
myStatus[n] = VARIANCE_REMOVE; // scaled variance is the smallest, so neuron should be removed based on its variance
} else {
myStatus[n] = CORRELATION_POSSIBLE_REMOVE; // set it to possible remove, because it needs more
// examination to decide if it really should be removed.
myMinCorrelationPointers.add(myMinCorrelation); // save the pointer for later investigation
}
}
}
myStatuses.add(myStatus);
}
// now we will investigate which CORRELATION_POSSIBLE_REMOVE neurons should be really removed
List myCorrelations = new ArrayList(); // this list will hold the arrays indicating the index of the neuron that will be deleted
// together with the index of the neuron it was most closely correlated to (this neuron will
// take over the weights of the other neuron that will be deleted).
int mySingleStatus; // status of a single neuron
for(int i = 0; i < myMinCorrelationPointers.size(); i++) {
myMinCorrelation = (double[])myMinCorrelationPointers.get(i);
if(myMinCorrelation[5] < 0 && // also check if status is still CORRELATION_POSSIBLE_REMOVE, it might have been changed
((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] == CORRELATION_POSSIBLE_REMOVE)
{
// position 1, 2 contain the argument (CORRELATION_POSSIBLE_REMOVE) which should
// be checked with status neuron argument 3, 4
mySingleStatus = ((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]];
if(mySingleStatus == INFO_REMOVE || mySingleStatus == VARIANCE_REMOVE || mySingleStatus == CORRELATION_REMOVE) {
// neuron that caused the minimum correlation will be removed, so we don't need to remove the other neuron
((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] = NO_REMOVE;
} else if(((double[])variance.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] <=
((double[])variance.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]])
{
((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] = CORRELATION_REMOVE;
myCorrelations.add(new int[] {(int)myMinCorrelation[1], (int)myMinCorrelation[2], (int)myMinCorrelation[3], (int)myMinCorrelation[4]});
} else {
((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] = NO_REMOVE;
((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]] = CORRELATION_REMOVE;
myCorrelations.add(new int[] {(int)myMinCorrelation[3], (int)myMinCorrelation[4], (int)myMinCorrelation[1], (int)myMinCorrelation[2]});
}
} else if(myMinCorrelation[5] > 0 && // also check if status is still CORRELATION_POSSIBLE_REMOVE, it might have been changed
((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]] == CORRELATION_POSSIBLE_REMOVE)
{
mySingleStatus = ((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]];
if(mySingleStatus == INFO_REMOVE || mySingleStatus == VARIANCE_REMOVE || mySingleStatus == CORRELATION_REMOVE) {
((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]] = NO_REMOVE;
} else if(((double[])variance.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] >=
((double[])variance.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]])
{
((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]] = CORRELATION_REMOVE;
myCorrelations.add(new int[] {(int)myMinCorrelation[3], (int)myMinCorrelation[4], (int)myMinCorrelation[1], (int)myMinCorrelation[2]});
} else {
((int[])myStatuses.get((int)myMinCorrelation[3]))[(int)myMinCorrelation[4]] = NO_REMOVE;
((int[])myStatuses.get((int)myMinCorrelation[1]))[(int)myMinCorrelation[2]] = CORRELATION_REMOVE;
myCorrelations.add(new int[] {(int)myMinCorrelation[1], (int)myMinCorrelation[2], (int)myMinCorrelation[3], (int)myMinCorrelation[4]});
}
}
}
int myNeuron; // used to index neurons taking into account the effect of neurons deleted before
for(int l = 0; l < myStatuses.size(); l++) {
myStatus = (int[])myStatuses.get(l);
myNeuron = 0;
for(int n = 0; n < myStatus.length; n++) {
if(myStatus[n] == INFO_REMOVE) {
log.debug("Remove[info]: " + l + " " + n);
removeNeuron(l, myNeuron);
optimized = true; // neurons are moved, so the network is really optimized (changed)
myStatus[n] = REMOVE_DONE;
} else if(myStatus[n] == VARIANCE_REMOVE) {
log.debug("Remove[variance]: " + l + " " + n);
weightsUpdateVariance(l, n, myNeuron);
removeNeuron(l, myNeuron);
optimized = true;
myStatus[n] = REMOVE_DONE;
} else if(myStatus[n] == CORRELATION_REMOVE) {
log.debug("Remove[correlation]: " + l + " " + n);
weightsUpdateCorrelation(myStatuses, myCorrelations, l, n);
removeNeuron(l, myNeuron);
optimized = true;
myStatus[n] = REMOVE_DONE;
} else if(myStatus[n] == NO_REMOVE) {
// neuron is not removed so move to next neuron in layer
myNeuron++;
}
}
}
log.debug("Selection done.");
}
/**
* Updates weights before a neuron is removed (because of its similar correlation).
*
* @param aStatuses the status of the neurons (used to find the correct neuron taking into account
* any deletions of neurons).
* @param aCorrelations a list holding all the correlations (neurons to be removed and the
* correlated neuron (which will take over the weights)).
* @param aLayer the layer of the neuron to be removed.
* @param aNeuron the neuron to be removed.
*/
protected void weightsUpdateCorrelation(List aStatuses, List aCorrelations, int aLayer, int aNeuron) {
int [] myCorrelatedNeuron = null, myTemp = findCorrelation(aCorrelations, aLayer, aNeuron);
// the correlated neuron of the neuron to be removed might be part of another correlation and will be removed
// so we'll search in the chain of correlated neurons for the neuron that will not be removed (myCorrelatedNeuron)
while(myTemp != null) {
myCorrelatedNeuron = myTemp;
myTemp = findCorrelation(aCorrelations, myCorrelatedNeuron[0], myCorrelatedNeuron[1]);
}
// take into account any deletions of previous neurons
int myAdjustedNeuron = findIndex(aStatuses, aLayer, aNeuron);
int myAdjustedCorrelatedNeuron = findIndex(aStatuses, myCorrelatedNeuron[0], myCorrelatedNeuron[1]);
// the weights of the correlated neuron j will be updated in the following way taking into account
// the effect of the neuron j that will be removed:
// wjk = wjk + a * wj'k
// bk = bk + wj'k * (_vj' - a * _vj)
// a = sign(gammajj') * {Vj' / Vj}^1/2
double myAlpha = (getGamma(aLayer, aNeuron, myCorrelatedNeuron[0], myCorrelatedNeuron[1]) >= 0 ? 1 : -1) *
Math.sqrt(((double[])variance.get(aLayer))[aNeuron] / ((double[])variance.get(aLayer))[myCorrelatedNeuron[1]]);
NeuralElement myElement, myElementCorrelation;
Synapse mySynapse, mySynapseCorrelation = null;
Matrix myBiases, myWeights, myWeightsCorrelation;
Layer myOutputLayer, myLayer = (Layer)layers.get(aLayer), myLayerCorrelation = (Layer)layers.get(myCorrelatedNeuron[0]);
if(myLayer.getAllOutputs().size() != myLayerCorrelation.getAllInputs().size()) {
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. #output layers for neuron and correlated neuron are not equal.");
}
for(int i = 0; i < myLayer.getAllOutputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllOutputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with outputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Output of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
myOutputLayer = findOutputLayer(mySynapse);
// find synapse from correlation layer to the same output layer
for(int j = 0; j < myLayerCorrelation.getAllOutputs().size() && mySynapseCorrelation == null; j++) {
myElementCorrelation = (NeuralElement)myLayerCorrelation.getAllOutputs().get(j);
if(myElementCorrelation instanceof Synapse) {
mySynapseCorrelation = (Synapse)myElementCorrelation;
if(findOutputLayer(mySynapseCorrelation) != myOutputLayer) {
mySynapseCorrelation = null;
}
}
}
if(mySynapseCorrelation == null) {
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Unable to find same output layer for correlated layer.");
}
myBiases = myOutputLayer.getBias();
myWeights = mySynapse.getWeights();
myWeightsCorrelation = mySynapseCorrelation.getWeights();
for(int r = 0; r < myOutputLayer.getRows(); r++) {
myBiases.value[r][0] += myWeights.value[myAdjustedNeuron][r] *
(((double[])averageOutputs.get(aLayer))[aNeuron] - myAlpha * ((double[])averageOutputs.get(myCorrelatedNeuron[0]))[myCorrelatedNeuron[1]]);
myBiases.delta[r][0] = 0;
myWeightsCorrelation.value[myAdjustedCorrelatedNeuron][r] += myWeights.value[myAdjustedNeuron][r];
myWeightsCorrelation.delta[myAdjustedCorrelatedNeuron][r] = 0;
}
}
}
/**
* Gets gammajj' (is equal to gammaj'j).
*
* @param aLayer1 the layer of neuron j.
* @param aNeuron1 the neuron j.
* @param aLayer2 the layer of neuron j'.
* @param aNeuron2 the neuron j'.
* return gammajj'.
*/
protected double getGamma(int aLayer1, int aNeuron1, int aLayer2, int aNeuron2) {
int mySwapLayer, mySwapNeuron;
if(aLayer1 > aLayer2 || (aLayer1 == aLayer2 && aNeuron1 > aNeuron2)) {
mySwapLayer = aLayer1;
mySwapNeuron = aNeuron1;
aLayer1 = aLayer2;
aNeuron1 = aNeuron2;
aLayer2 = mySwapLayer;
aNeuron2 = mySwapNeuron;
}
return ((double[])((List[])gamma.get(aLayer1))[aNeuron1].get(aLayer2))[aNeuron2];
}
/**
* Finds the index of a neuron taking into account the deletion of previous neurons.
*
* @param aStatuses the status of neurons.
* @param aLayer the layer of the neuron.
* @param aNeuron the index of the neuron, not considering any deletions.
* @return the index of the neuron taking into account any previous deletions.
*/
protected int findIndex(List aStatuses, int aLayer, int aNeuron) {
int[] myStatusLayer = (int[])aStatuses.get(aLayer);
int myNewIndex = aNeuron;
for(int i = 0; i < aNeuron; i++) {
if(myStatusLayer[i] == REMOVE_DONE) {
myNewIndex--;
}
}
return myNewIndex;
}
/**
* Finds a correlation from a given list of correlations.
*
* @param aCorrelations a list holding correlations.
* @param aLayer the layer of the neuron to find the correlation for.
* @param aNeuron the neuron to find the correlation for.
* @return the index of the neuron of the correlation with <code>(aLayer, aNeuron)</code>,
* return <code>null</code> in case the correlation is not found.
*/
protected int[] findCorrelation(List aCorrelations, int aLayer, int aNeuron) {
int[] myCorrelation;
for(int i = 0; i < aCorrelations.size(); i++) {
myCorrelation = (int [])aCorrelations.get(i);
if(myCorrelation[0] == aLayer && myCorrelation[1] == aNeuron) {
return new int [] {myCorrelation[2], myCorrelation[3]};
}
}
return null;
}
/**
* Updates weights before a neuron is removed (because of its low variance).
*
* @param aLayer the index of the layer of the neuron.
* @param aNeuronOriginal the index of the neuron to be removed (NOT taking into account
* previous deletions).
* @param aNeuron the index of the neuron to be removed (taking into account previous
* deletions).
*/
protected void weightsUpdateVariance(int aLayer, int aNeuronOriginal, int aNeuron) {
// the biases of the neurons in the output layer will be updated by taking
// the effects of the neuron that will be removed into account:
// bk = bk + wjk * _vj, b = bias, k = index output neuron, j = aNeuron,
// _vj is average output neuron aNeuron
NeuralElement myElement;
Synapse mySynapse;
Matrix myBiases, myWeights;
double myAverageOutput;
Layer myOutputLayer, myLayer = (Layer)layers.get(aLayer);
for(int i = 0; i < myLayer.getAllOutputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllOutputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with outputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Output of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
myOutputLayer = findOutputLayer(mySynapse);
myBiases = myOutputLayer.getBias();
myWeights = mySynapse.getWeights();
myAverageOutput = ((double[])averageOutputs.get(aLayer))[aNeuronOriginal];
for(int r = 0; r < myOutputLayer.getRows(); r++) {
myBiases.value[r][0] += myWeights.value[aNeuron][r] * myAverageOutput;
myBiases.delta[r][0] = 0;
}
}
}
/**
* Removes a neuron.
*
* @param aLayer the index of the layer in which we should remove the neuron.
* @param aNeuron the index of the neuron to be removed (taking into account previous
* deletions).
*/
protected void removeNeuron(int aLayer, int aNeuron) {
Layer myLayer = (Layer)layers.get(aLayer);
NeuralElement myElement;
Synapse mySynapse;
Matrix myWeights;
if(myLayer.getRows() > 1) {
for(int i = 0; i < myLayer.getAllInputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllInputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with inputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Input of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
myWeights = mySynapse.getWeights();
myWeights.removeColumn(aNeuron);
mySynapse.setOutputDimension(mySynapse.getOutputDimension() - 1);
mySynapse.setWeights(myWeights);
}
for(int i = 0; i < myLayer.getAllOutputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllOutputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with outputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Output of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
myWeights = mySynapse.getWeights();
myWeights.removeRow(aNeuron);
mySynapse.setInputDimension(mySynapse.getInputDimension() - 1);
mySynapse.setWeights(myWeights);
}
myWeights = myLayer.getBias();
myWeights.removeRow(aNeuron);
myLayer.setRows(myLayer.getRows() - 1);
myLayer.setBias(myWeights);
} else {
// we are going to remove the last neuron so remove the layer and its input an output synapses
for(int i = 0; i < myLayer.getAllInputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllInputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with inputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Input of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
Layer myInputLayer = findInputLayer(mySynapse);
myInputLayer.removeOutputSynapse(mySynapse);
}
for(int i = 0; i < myLayer.getAllOutputs().size(); i++) {
myElement = (NeuralElement)myLayer.getAllOutputs().get(i);
if(!(myElement instanceof Synapse)) {
// TODO how to deal with outputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Output of layer is not a synapse.");
}
mySynapse = (Synapse)myElement;
Layer myOutputLayer = findOutputLayer(mySynapse);
myOutputLayer.removeInputSynapse(mySynapse);
}
// if we remove the layer here from the network the index of layers goes out of order,
// after optimization is done we will remove the layer
myWeights = myLayer.getBias();
myWeights.removeRow(aNeuron);
myLayer.setRows(myLayer.getRows() - 1);
myLayer.setBias(myWeights);
}
}
/**
* Finds the input layer of a synapse.
*
* @param aSynapse the synapse to find the input layer for.
*/
protected Layer findInputLayer(Synapse aSynapse) {
Layer myLayer;
for(int i = 0; i < net.getLayers().size(); i++) {
myLayer = (Layer)net.getLayers().get(i);
if(myLayer.getAllOutputs().contains(aSynapse)) {
return myLayer;
}
}
return null;
}
/**
* Finds the output layer of a synapse.
*
* @param aSynapse the synapse to find the output layer for.
*/
protected Layer findOutputLayer(Synapse aSynapse) {
Layer myLayer;
for(int i = 0; i < net.getLayers().size(); i++) {
myLayer = (Layer)net.getLayers().get(i);
if(myLayer.getAllInputs().contains(aSynapse)) {
return myLayer;
}
}
return null;
}
/**
* Evaluates neurons, that is, this function calculates information related to
* the contribution of the activation functions, based on the following three
* criteria: <br>
* <ul>
* <li>Information from neurons to its output layers. </li>
* <li>Variance of the output of the neurons.</li>
* <li>Correlation between outputs of neurons.</li>
* </ul>
* This information will be used in a next stage to select neurons to delete.
*/
protected void evaluateNeurons() {
log.debug("Evaluation of neurons...");
Layer myLayer; // help variable
int myNrOfPatterns = net.getMonitor().getTrainingPatterns(); // number of patterns
double [] myInfo; // Ij (for a single layer)
double [] myAvgOutputs; // the average output over all patterns (for a single layer)
// first we will calculate the information from the jth hidden unit to its output layer (Ij)
// Ij = 1/M * sum{p=1:M}(sum{k=1:No}(|wjk*vpj|)), this is equal to
// = 1/M * sum{p=1:M}(|vpj|)) * sum{k=1:No}(|wjk|)
// M is number of patterns, No is number of output units,
// vpj = the output of a neuron j for pattern p, wjk is the weight from neuron j to k
// during this the calculation of Ij we also calculate the average output for each neuron j
// _vj = 1/M sum{p=1:M}(vpj)
for(int i = 0; i < layers.size(); i++) {
myLayer = (Layer)layers.get(i);
myInfo = new double[myLayer.getRows()];
myAvgOutputs = new double[myLayer.getRows()];
for(int n = 0; n < myLayer.getRows(); n++) {
// for all neurons in a layer
double myTempSumWeights = getSumAbsoluteWeights(myLayer, n); // get sum{k=1:No}(|wjk|)
double[] myTempSumOutputs = getSumOutputs(i, n); // get sum{p=1:M}(vpj) and sum{p=1:M}(|vpj|)
myInfo[n] = (myTempSumWeights * myTempSumOutputs[1]) / myNrOfPatterns;
if(myInfo[n] > infoMax) {
// also find max value of the information max-j{Ij}
infoMax = myInfo[n];
}
myAvgOutputs[n] = myTempSumOutputs[0] / myNrOfPatterns;
}
information.add(myInfo);
averageOutputs.add(myAvgOutputs);
}
// at this moment we have calculated (A) information from each neuron j to its output
// layer (Ij and maxj{Ij}) and we have calculated the average of outputs for each neuron
// ((B) _vj)
// In the next step we will calculate the variance (B) Vj
// Vj = sum{p=1:M}(vpj - _vj)^2, _vj the average is calculate above
// we will also store (vpj - _vj) for all neurons, so in the following step it will be easier
// to calculate the correlation
double [] myVariance; // variances for a layer
double [] myTempDifferences; // differences (output-pattern - avg-output <=> vjp - _vj) for a single layer and single pattern
List myDifferences = new ArrayList(); // all differences (all layers and over all patterns)
// vpj - _vj [layer->pattern->neuron], NOTE outputsAfterPattern is [pattern->layer->neuron]
List myDifferencesForLayer; // differences of all patterns for a single layer [pattern->neuron]
for(int i = 0; i < layers.size(); i++) {
myLayer = (Layer)layers.get(i);
myVariance = new double[myLayer.getRows()]; // Vj
myDifferencesForLayer = new ArrayList();
for(int p = 0; p < outputsAfterPattern.size(); p++) {
myTempDifferences = new double[myLayer.getRows()]; // differences for a single pattern for a single layer
for(int n = 0; n < myLayer.getRows(); n++) {
List myOutputs = (List)outputsAfterPattern.get(p);
myTempDifferences[n] = ((double[])myOutputs.get(i))[n]
- ((double[])averageOutputs.get(i))[n]; // vpj - _vj
myVariance[n] += myTempDifferences[n] * myTempDifferences[n];
}
myDifferencesForLayer.add(myTempDifferences);
}
for(int n = 0; n < myLayer.getRows(); n++) {
// also find max variance
if(myVariance[n] > varianceMax) {
varianceMax = myVariance[n];
}
}
myDifferences.add(myDifferencesForLayer);
variance.add(myVariance);
}
// Now we have calculated the variance for each neuron (B) Vj and myDifferences holds the differences
// between the output of a neuron and the average output over all patterns and all layers. Now we will
// calculate gamma (C), which is closely related to the correlation, which will be needed later
// Ajj' = sum{p=1:M}((vpj - _vj) * (vpj' - _vj'))
// Bjj' = sum{p=1:M}(vpj - _vj)^2 * sum{p=1:M}(vpj' - _vj')^2
// = Vj * Vj'
// Gammajj' = Ajj' / Bjj'^1/2
// The correlation between units is defined by:
// Rjj' = 1 - |Gammajj'|, however we will not calculate Rjj' here
Layer myLayer1, myLayer2;
List myTempDifferencesForLayer1, myTempDifferencesForLayer2; // differences between vpj - _vj and vpj' - _vj' (j = n1, j' = n2)
// Pointers to construct the tree to save the gammas [gamma->layer1->neuron1->layer2->neuron2]
List [] myNeurons1Pointer;
double [] myNeurons2Pointer;
for(int l1 = 0; l1 < layers.size(); l1++) { // l1 = layer 1
myLayer1 = (Layer)layers.get(l1);
myNeurons1Pointer = new List[myLayer1.getRows()];
gamma.add(myNeurons1Pointer); // so gamma.get(an index) gets a neuron pointer of layer index
for(int n1 = 0; n1 < myLayer1.getRows(); n1++) {
myNeurons1Pointer[n1] = new ArrayList();
for(int l2 = 0; l2 < layers.size(); l2++) { // l2 = layer 1
myLayer2 = (Layer)layers.get(l2);
if(l2 < l1) {
myNeurons1Pointer[n1].add(new double[0]); // a little waste of memory, but this way it allows us to
// index layers easily later on, because the index of layers
// will be still be matching
} else {
myNeurons2Pointer = new double[myLayer2.getRows()];
myNeurons1Pointer[n1].add(myNeurons2Pointer);
for(int n2 = (l1 == l2 ? n1+1 : 0); n2 < myLayer2.getRows(); n2++) {
double myA = 0, myB = 0;
for(int p = 0; p < myNrOfPatterns; p++) {
myTempDifferencesForLayer1 = (List)myDifferences.get(l1);
myTempDifferencesForLayer2 = (List)myDifferences.get(l2);
myA += ((double[])myTempDifferencesForLayer1.get(p))[n1] *
((double[])myTempDifferencesForLayer2.get(p))[n2];
}
myB = ((double[])variance.get(l1))[n1] * ((double[])variance.get(l2))[n2];
myNeurons2Pointer[n2] = myA / Math.sqrt(myB);
}
}
}
}
}
log.debug("Evaluation done.");
}
/**
* Gets the minimum correlation for a certain neuron j. A correlation between neuron
* j and j' is defined as Rjj' = 1 - |Gammajj'|. Rjmin (which this function calculates
* is defined as Rjmin = min-j{Rjj'}. It also returns the index of the neuron j that is the
* argument to this function as well as j' that is the minimum.
*
* @param anLayer the index of the layer of the neuron <code>aNeuron</code>.
* @param aNeuron the neuron within the layer (j).
* @return the minimum correlation Rjmin, together with the index of the neuron as an argument
* and the neuron of the minimum (layer and neuron). The lower index of the two neurons is at position
* 1, 2 and the higher index is at position 3, 4. The minimum itself is at position 0. Finally at position
* 5 we will indicate if the argument was the lower index neuron (<0, so it is now at position 1, 2) or
* if the argument was the higher index neuron (>0, so now it is at position 3, 4).
*/
protected double[] getMinCorrelation(int aLayer, int aNeuron) {
double [] myReturnValue = new double[] {2, -1, -1, -1, -1, 0}; // 2 => 0 <= min <= 1
List[] myNeurons;
double myCorrelation;
// check neurons before aLayerIndex and aNeuron
for(int l = 0; l <= aLayer; l++) {
myNeurons = (List[])gamma.get(l);
for(int n = 0; n < (l == aLayer ? aNeuron : myNeurons.length); n++) {
myCorrelation = 1 - Math.abs(((double[])myNeurons[n].get(aLayer))[aNeuron]);
if(myReturnValue[0] > myCorrelation) {
myReturnValue[0] = myCorrelation;
myReturnValue[1] = l; // the lower index neuron
myReturnValue[2] = n;
myReturnValue[3] = aLayer; // the higher index neuron
myReturnValue[4] = aNeuron;
myReturnValue[5] = 1; // argument is higher index neuron
}
}
}
List myLayers;
double[] myNeurons2;
// check neurons after aLayerIndex and aNeuron
myLayers = ((List[])gamma.get(aLayer))[aNeuron];
for(int l = aLayer; l < myLayers.size(); l++) {
myNeurons2 = (double[])myLayers.get(l);
for(int n = (l == aLayer ? aNeuron + 1 : 0); n < myNeurons2.length; n++) {
myCorrelation = 1 - Math.abs(myNeurons2[n]);
if(myReturnValue[0] > myCorrelation) {
myReturnValue[0] = myCorrelation;
myReturnValue[1] = aLayer; // the lower index neuron
myReturnValue[2] = aNeuron;
myReturnValue[3] = l; // the higher index neuron
myReturnValue[4] = n;
myReturnValue[5] = -1; // argument is lower neuron
}
}
}
return myReturnValue;
}
/**
* Sums up the (normal and absolute) values of the outputs of a neuron over all patterns.
*
* @param aLayer an index of the layer to retrieve the outputs of that layer.
* @param aNeuron the neuron in the layer.
* @return the sum of (index 0: normal, index 1: absolute) outputs of neuron <code>aNeuron</code>.
*/
protected double[] getSumOutputs(int aLayer, int aNeuron) {
List myOutputs;
double myOutput;
double [] mySum = new double[2]; // index 0 normal sum, index 1 absolute sum
for(int i = 0; i < outputsAfterPattern.size(); i++) {
// for all patterns
myOutputs = (List)outputsAfterPattern.get(i);
myOutput = ((double[])myOutputs.get(aLayer))[aNeuron];
mySum[0] += myOutput;
mySum[1] += Math.abs(myOutput);
}
return mySum;
}
/**
* Sums up all the absolute values of the output weights of a neuron within a layer.
*
* @param aLayer the layer holding neuron <code>aNeuron</code>.
* @param aNeuron the neuron in the layer.
* @return the sum of absolute values of the output weights of neuron
* <code>aNeuron</code> within layer <code>aLayer</code>.
*/
protected double getSumAbsoluteWeights(Layer aLayer, int aNeuron) {
double mySum = 0;
OutputPatternListener myListener;
Synapse mySynapse;
for(int i = 0; i < aLayer.getAllOutputs().size(); i++) {
myListener = (OutputPatternListener)aLayer.getAllOutputs().get(i);
if(!(myListener instanceof Synapse)) {
// TODO how to deal with outputs that are not synpases?
throw new org.joone.exception.JooneRuntimeException("Unable to optimize. Output of layer is not a synapse.");
}
mySynapse = (Synapse)myListener;
for(int j = 0; j < mySynapse.getOutputDimension(); j++) {
mySum += Math.abs(mySynapse.getWeights().value[aNeuron][j]);
}
}
return mySum;
}
public void cicleTerminated(NeuralNetEvent e) {
}
public void errorChanged(NeuralNetEvent e) {
}
public void netStarted(NeuralNetEvent e) {
}
public void netStopped(NeuralNetEvent e) {
log.debug("Network stopped.");
runValidation();
}
public void netStoppedError(NeuralNetEvent e, String error) {
}
public void netValidated(NeuralValidationEvent event) {
// validation is finished, so we should have collected all the information
// to optimize the activation functions
log.debug("Network validated.");
doOptimize();
}
/**
* Removes all the listeners from the neural network (temporarely). They will
* be added again after optimization and the network is restarted.
*/
protected void removeAllListeners() {
Vector myListeners = net.getListeners();
while(myListeners.size() > 0) {
NeuralNetListener myListener = (NeuralNetListener)myListeners.get(myListeners.size() - 1);
listeners.add(myListener);
net.removeNeuralNetListener(myListener);
}
}
/**
* Restore all the listeners to the neural network.
*/
protected void restoreAllListeners() {
Iterator myIterator = listeners.iterator();
while(myIterator.hasNext()) {
NeuralNetListener myListener = (NeuralNetListener)myIterator.next();
net.addNeuralNetListener(myListener);
}
listeners = new Vector(); // clear the list
}
/**
* This method is called after every pattern, so we can retrieve information
* from the network that is related to the pattern that was just forwarded
* through the network.
*/
void patternFinished() {
Layer myLayer;
List myOutputs = new ArrayList(); // the outputs of the neurons after a pattern
// log.debug("Single pattern has been forwarded through the network.");
// in this stage we only need to save the outputs of the neurons after
// each pattern, then later we can calculate all the necessary information
for(int i = 0; i < layers.size(); i++) {
myLayer = findClonedLayer((Layer)layers.get(i));
myOutputs.add(myLayer.getLastOutputs());
}
outputsAfterPattern.add(myOutputs);
}
/**
* Finds the cloned equal layer from the cloned neuron network given its corresponding
* layer from the normal neural network.
*
* @param aLayer the layer to find its cloned version in <code>clone</code>.
* @return the cloned layer from <code>clone</code> corresponding to <code>aLayer</code>.
*/
private Layer findClonedLayer(Layer aLayer) {
for(int i = 0; i < net.getLayers().size(); i++) {
if(net.getLayers().get(i) == aLayer) {
// index of layer found
return (Layer)clone.getLayers().get(i);
}
}
return null;
}
/**
* Gets epsilon, the threshold to decide if a neuron should be deleted or not.
*
* @return the threshold epsilon.
*/
public double getEpsilon() {
return epsilon;
}
/**
* Sets epsilon, the threshold to decide if a neuron should be deleted or not.
*
* @param anEpsilon the new epsilon.
*/
public void setEpsilon(double anEpsilon) {
epsilon = anEpsilon;
}
public void netConverged(ConvergenceEvent anEvent, ConvergenceObserver anObserver) {
// whenever this object is added to a convegence observer, this method will be called
// when convergence is reached, otherwise the user itself should call optimize()
// based on some criteria
if(!optimize()) {
// the network was not optimized, so the network stayes probably in the same
// convergence state, so new event shouldn't be created until we move out of
// the convergence state
anObserver.disableCurrentConvergence();
}
}
}
/**
* This class/synapse is only used to inform a Nakayama object whenever a single
* patterns has been forwarded through the network.
*/
class PatternForwardedSynapse extends Synapse {
/** The nakayama object that needs to be informed. */
protected Nakayama nakayama;
/**
* Constructor
*
* @param aNakayama the object that needs to be informed whenever a pattern
* has been forwarded through the network.
*/
public PatternForwardedSynapse(Nakayama aNakayama) {
nakayama = aNakayama;
}
public synchronized void fwdPut(Pattern pattern) {
if(pattern.getCount() > -1) {
nakayama.patternFinished();
items++;
}
}
protected void backward(double[] pattern) {
}
protected void forward(double[] pattern) {
}
protected void setArrays(int rows, int cols) {
}
protected void setDimensions(int rows, int cols) {
}
}
| Java |
package org.joone.util;
import org.joone.engine.*;
import org.joone.net.NeuralNet;
/**
* This class represents a generic listener of the net's events.
* Any new listener can be created simply extending this class
* and filling the manageXxxx methods with the necessary code.
*
* @author: Administrator
*/
public abstract class MonitorPlugin implements java.io.Serializable, NeuralNetListener {
private String name;
private static final long serialVersionUID = 951079164859904152L;
private int rate = 1;
private NeuralNet neuralNet;
/**
* cicleTerminated method.
*/
public void cicleTerminated(NeuralNetEvent e) {
Monitor mon = (Monitor)e.getSource();
if (toBeManaged(mon))
manageCycle(mon);
}
/**
* netStopped method comment.
*/
public void netStopped(NeuralNetEvent e) {
Monitor mon = (Monitor)e.getSource();
manageStop(mon);
}
public void netStarted(NeuralNetEvent e) {
Monitor mon = (Monitor)e.getSource();
manageStart(mon);
}
public void errorChanged(NeuralNetEvent e) {
Monitor mon = (Monitor)e.getSource();
if (toBeManaged(mon))
manageError(mon);
}
public void netStoppedError(NeuralNetEvent e,String error) {
Monitor mon = (Monitor)e.getSource();
manageStopError(mon, error);
}
protected boolean toBeManaged(Monitor monitor) {
if (getRate() == 0) // If rate is zero the events are never managed
return false;
int currentCycle = monitor.getTotCicles() - monitor.getCurrentCicle() + 1;
int cl = currentCycle / getRate();
/* We want manage the events only every rate cycles */
if ((cl * getRate()) == currentCycle)
return true;
else
return false;
}
protected abstract void manageStop(Monitor mon);
protected abstract void manageCycle(Monitor mon);
protected abstract void manageStart(Monitor mon);
protected abstract void manageError(Monitor mon);
protected abstract void manageStopError(Monitor mon, String msgErr);
/** Getter for property name.
* @return Value of property name.
*/
public java.lang.String getName() {
return name;
}
/** Setter for property name.
* @param name New value of property name.
*/
public void setName(java.lang.String name) {
this.name = name;
}
/** Getter for property rate.
* This property represents the interval (# of cycles)
* between two calls to the manageXxxx methods.
* @return Value of property rate.
*/
public int getRate() {
return rate;
}
/** Setter for property rate.
* This property represents the interval (# of cycles)
* between two calls to the manageXxxx methods.
* @param rate New value of property rate.
*/
public void setRate(int rate) {
this.rate = rate;
}
public NeuralNet getNeuralNet(){
return neuralNet;
}
public void setNeuralNet(NeuralNet neuralNet){
this.neuralNet = neuralNet;
}
} | Java |
package org.joone.util;
import org.joone.engine.*;
import org.joone.net.*;
import java.util.Vector;
import org.joone.util.CSVParser;
import java.util.*;
import java.io.*;
import org.joone.log.*;
/**
* This abstract class must be extended to implement plug-ins for input data
* preprocessing. The objects extending this class can be inserted into objects
* that extend the <code>org.joone.io.StreamInputSynapse</code>.
*/
public abstract class ConverterPlugIn extends AbstractConverterPlugIn {
/** The object used when logging debug, errors, warnings and info. */
private static final ILogger log = LoggerFactory.getLogger(ConverterPlugIn.class);
/** The Vector of input patterns which this converter must process. */
//private transient Vector InputVector;
/** The serial version of this object. */
private static final long serialVersionUID = 1698511686417967414L;
/** Flag indicating if every cycle the data should be preprocesed. */
private boolean applyEveryCycle;
/** The default constructor of the ConverterPlugIn. */
public ConverterPlugIn() {
}
/**
* Constructor of the ConverterPlugIn.
*
* @param anAdvancedSerieSelector
* @see AbstractConverterPlugIn#AbstractConverterPlugIn(String)
*/
public ConverterPlugIn(String anAdvancedSerieSelector) {
super(anAdvancedSerieSelector);
}
/**
* This method is called at the start of a new cycle, and
* permit to apply the conversion for the components having
* the applyEveryCycle property set to true.
* This different entry point has been added in order to
* avoid to applying the conversion for plugins having
* the applyEveryCycle property set to false.
*
* @return true if the input buffer is changed
*/
public boolean newCycle() {
boolean retValue = false;
if (isApplyEveryCycle()) {
retValue = apply();
}
if (getNextPlugIn() != null) {
ConverterPlugIn myPlugIn = (ConverterPlugIn)getNextPlugIn();
myPlugIn.setInputVector(getInputVector());
retValue = myPlugIn.newCycle() | retValue;
}
return retValue;
}
/**
* Getter for property applyEachCycle.
*
* @return Value of property applyEachCycle.
*/
public boolean isApplyEveryCycle() {
return applyEveryCycle;
}
/**
* Setter for property applyEachCycle.
*
* @param anApplyEachCycle New value of property applyEachCycle.
*/
public void setApplyEveryCycle(boolean anApplyEveryCycle) {
applyEveryCycle = anApplyEveryCycle;
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
// To maintain the compatibility with the old saved classes
if (getAdvancedSerieSelector() == null)
setAdvancedSerieSelector(new String("1"));
if (getName() == null)
setName("InputPlugin 9");
}
} | Java |
package org.joone.util;
import java.beans.*;
public class MovingAveragePlugInBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( MovingAveragePlugIn.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_advancedMovAvgSpec = 0;
private static final int PROPERTY_advancedSerieSelector = 1;
private static final int PROPERTY_name = 2;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[3];
try {
properties[PROPERTY_advancedMovAvgSpec] = new PropertyDescriptor ( "advancedMovAvgSpec", MovingAveragePlugIn.class, "getAdvancedMovAvgSpec", "setAdvancedMovAvgSpec" );
properties[PROPERTY_advancedMovAvgSpec].setDisplayName ( "Moving Average" );
properties[PROPERTY_advancedSerieSelector] = new PropertyDescriptor ( "advancedSerieSelector", MovingAveragePlugIn.class, "getAdvancedSerieSelector", "setAdvancedSerieSelector" );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", MovingAveragePlugIn.class, "getName", "setName" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* ToBinaryPluginBeanInfo.java
*
* Created on 24 settembre 2004, 20.11
*/
package org.joone.util;
import java.beans.*;
/**
* @author paolo
*/
public class ToBinaryPluginBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( ToBinaryPlugin.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_advancedSerieSelector = 0;
private static final int PROPERTY_name = 1;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[2];
try {
properties[PROPERTY_advancedSerieSelector] = new PropertyDescriptor ( "advancedSerieSelector", ToBinaryPlugin.class, "getAdvancedSerieSelector", "setAdvancedSerieSelector" );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", ToBinaryPlugin.class, "getName", "setName" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* ToBinaryPlugin.java
*
* Created on September 24, 2004, 2:36 PM
*/
package org.joone.util;
import java.util.*;
import org.joone.engine.*;
import org.joone.log.*;
/**
* This plug-in converts 10-base data to binary format. The plug-in ingnores
* the broken part (the part after the . ) for non-integer numbers. It works
* correct for positive as well as for negative numbers.
*
* @author Boris Jansen
*/
public class ToBinaryPlugin extends ConverterPlugIn {
/** The logger for this class. */
private static final ILogger log = LoggerFactory.getLogger(ToBinaryPlugin.class);
/** The sizes of the (binary) arrays of converted series. This way we are able
* to find the correct position of serie we have to convert related taking into
* account any previous converted series. */
private List theConvertedSeries = new ArrayList();
/** The value for the upper bit. */
private double upperBit = 1.0; // default
/** The value for the lower bit. */
private double lowerBit = 0.0; // default
/** Creates a new instance of ToBinaryPlugin */
public ToBinaryPlugin() {
}
/**
* Creates a new instance of ToBinaryPlugin
*
* @param anAdvancedSerieSelector the advanced serie selector to use.
* @see setAdvancedSerieSelector()
*/
public ToBinaryPlugin(String anAdvancedSerieSelector) {
super(anAdvancedSerieSelector);
}
protected boolean convert(int serie) {
boolean retValue = false;
int mySerie = serie, mySignBitLenght;
boolean myHasPositiveValues = false;
boolean myHasNegativeValues = false;
for(int i = 0; i < theConvertedSeries.size(); i++) {
// get the correct serie, taking into account any previous converted
// series, by which the serie changes (from integer to binary results
// in more columns)
if(((int[])theConvertedSeries.get(i))[0] < serie) {
mySerie += ((int[])theConvertedSeries.get(i))[1];
}
}
int mySize = 0; // the (largest) size of the converted values = binary arrays (#bits)
double[] myArray;
double[][] myBinaries = new double[getInputVector().size()][];
for(int i = 0; i < getInputVector().size(); i++) {
myArray = ((Pattern)getInputVector().get(i)).getArray();
myBinaries[i] = getBinary(myArray[mySerie]);
if(myBinaries[i].length > mySize) {
mySize = myBinaries[i].length;
}
if(myArray[mySerie] > 0) {
myHasPositiveValues = true;
} else if(myArray[mySerie] < 0) {
myHasNegativeValues = true;
}
}
// if there are positive as well as negative values we should include a sign bit
mySignBitLenght = (myHasPositiveValues && myHasNegativeValues) ? 1 : 0;
for(int i = 0; i < getInputVector().size(); i++) {
myArray = ((Pattern)getInputVector().get(i)).getArray();
// if we have positive and negative numbers we add an extra bit (the sign bit)
double[] myNewArray = new double[myArray.length -1 + mySize + mySignBitLenght];
for(int j = 0; j < myArray.length; j++) {
// copy myArray into myNewArray, but skip the part where we
// will place the converted (binary) myArray[mySerie]
if(j < mySerie) {
myNewArray[j] = myArray[j];
} else if(j > mySerie) {
myNewArray[j + mySize + mySignBitLenght - 1] = myArray[j]; // -1 added by yccheok
}
}
for(int j = 0; j < mySize + mySignBitLenght; j++) {
// now we will copy the binary part to the array
if(j >= myBinaries[i].length) {
myNewArray[mySerie + j] = getLowerBit();
// if it is the sign bit and the/ value is negative we will update it
if(j == mySize) {
// this is only possible when mySignBitLenght == 1, else always j < mySize
if(myArray[mySerie] < 0) {
myNewArray[mySerie + j] = getUpperBit();
}
}
} else {
myNewArray[mySerie + j] = myBinaries[i][j];
}
}
((Pattern)getInputVector().get(i)).setArray(myNewArray);
retValue = true;
// debugging (print the original input array and the converted array
/* debug
String myTemp = "";
for(int j = 0; j < myArray.length; j++) {
myTemp += (int)myArray[j] + " ";
}
log.debug(myTemp + " <- original array");
myTemp = "";
for(int j = 0; j < myNewArray.length; j++) {
myTemp += myNewArray[j] + " ";
}
log.debug(myTemp + " <- converted (including binary part) array");
end debug */
}
// we have converted a serie -> so the positions change, to find the original
// position of a serie we save the amount of bits changed
theConvertedSeries.add(new int[] {serie, mySize + mySignBitLenght -1});
return retValue;
}
protected boolean apply() {
// new convertion ->
theConvertedSeries = new ArrayList();
return super.apply();
}
/**
* Converts a number to a binary number (the part after the . (like 348
* in 321.348) is ignored).
*
* @param aNumber the number to convert.
* @return the converted number as an array in binary form.
*/
protected double[] getBinary(double aNumber) {
aNumber = Math.floor(aNumber); // throw away the part after the .
aNumber = Math.abs(aNumber); // here we ignore the sign part (we deal with this
// in the convert() method)
double myTemp = aNumber;
int mySize = 0;
while(myTemp > 0) {
mySize++;
myTemp /= 2;
myTemp = Math.floor(myTemp);
}
double[] myBinary = new double[mySize];
for(int i = 0; i < mySize; i++) {
myTemp = aNumber / 2;
aNumber = Math.floor(myTemp);
if(myTemp > aNumber) {
myBinary[i] = getUpperBit();
} else {
myBinary[i] = getLowerBit();
}
}
return myBinary;
}
/**
* Sets the value for the upper bit. In binary problems it is often better to use 0.3 and 0.7
* or -0.7 and 0.7 as target instead of 0 and 1 or -1 and 1, because the asymptotes (0 and 1)
* tend to take a long time to train, worsen generalization and drive the weights to very
* large values.
* By using this function you can set a different value for the upper bit.
*
* @param aValue the value to use for the upper bit.
*/
public void setUpperBit(double aValue) {
upperBit = aValue;
}
/**
* Gets the value used for the upper bit.
*
* @returns the value used for the upper bit.
*/
public double getUpperBit() {
return upperBit;
}
/**
* Sets the value for the lower bit.
*
* @param aValue the value to use for the lower bit.
*/
public void setLowerBit(double aValue) {
lowerBit = aValue;
}
/**
* Gets the value used for the lower bit.
*
* @returns the value used for the lower bit.
*/
public double getLowerBit() {
return lowerBit;
}
}
| Java |
package org.joone.util;
import java.beans.*;
public class LearningSwitchBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( LearningSwitch.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_activeInput = 0;
private static final int PROPERTY_allInputs = 1;
private static final int PROPERTY_defaultInput = 2;
private static final int PROPERTY_monitor = 3;
private static final int PROPERTY_name = 4;
private static final int PROPERTY_trainingSet = 5;
private static final int PROPERTY_validationSet = 6;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[7];
try {
properties[PROPERTY_activeInput] = new PropertyDescriptor ( "activeInput", LearningSwitch.class, "getActiveInput", "setActiveInput" );
properties[PROPERTY_activeInput].setExpert ( true );
properties[PROPERTY_allInputs] = new PropertyDescriptor ( "allInputs", LearningSwitch.class, "getAllInputs", "setAllInputs" );
properties[PROPERTY_allInputs].setExpert ( true );
properties[PROPERTY_defaultInput] = new PropertyDescriptor ( "defaultInput", LearningSwitch.class, "getDefaultInput", "setDefaultInput" );
properties[PROPERTY_defaultInput].setExpert ( true );
properties[PROPERTY_monitor] = new PropertyDescriptor ( "monitor", LearningSwitch.class, "getMonitor", "setMonitor" );
properties[PROPERTY_monitor].setExpert ( true );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", LearningSwitch.class, "getName", "setName" );
properties[PROPERTY_trainingSet] = new PropertyDescriptor ( "trainingSet", LearningSwitch.class, "getTrainingSet", "setTrainingSet" );
properties[PROPERTY_trainingSet].setExpert ( true );
properties[PROPERTY_validationSet] = new PropertyDescriptor ( "validationSet", LearningSwitch.class, "getValidationSet", "setValidationSet" );
properties[PROPERTY_validationSet].setExpert ( true );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* PlugInEvent.java
*
* Created on October 11, 2004, 4:36 PM
*/
package org.joone.util;
import java.util.EventObject;
/**
* This event is sent by plug-ins indicating that data is changed. Listeners
* (implemening {@link PlugInListener}) will be notified by this event that data
* is changed.
*
* @author Boris Jansen
*/
public class PlugInEvent extends EventObject {
/**
* Creates a new instance of PlugInEvent
*
* @param anObject the object that creates and sends this event to the listeners.
*/
public PlugInEvent(Object anObject) {
super(anObject);
}
}
| Java |
/*
* PlugInListener.java
*
* Created on October 11, 2004, 4:29 PM
*/
package org.joone.util;
/**
* This interface defines the methods needed to be implemented by listeners that
* listens to plug-ins that might send data changed / plug-in events.
*
* <!-- This class replaces InputPluginListener and OutputPluginListener -->
*
* @author Boris Jansen
*/
public interface PlugInListener {
/**
* This method is called by plug-ins whenever data is changed.
*
* @param anEvent the event that is send, i.e. the event indicating that the
* data is changed.
*/
public void dataChanged(PlugInEvent anEvent);
}
| Java |
package org.joone.util;
import java.beans.*;
public class CenterOnZeroPlugInBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( CenterOnZeroPlugIn.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_advancedSerieSelector = 0;
private static final int PROPERTY_name = 1;
// Property array
/*lazy PropertyDescriptor*/
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[2];
try {
properties[PROPERTY_advancedSerieSelector] = new PropertyDescriptor ( "advancedSerieSelector", CenterOnZeroPlugIn.class, "getAdvancedSerieSelector", "setAdvancedSerieSelector" );
properties[PROPERTY_name] = new PropertyDescriptor ( "name", CenterOnZeroPlugIn.class, "getName", "setName" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
// Method array
/*lazy MethodDescriptor*/
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[0];//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.util;
import java.beans.*;
public class SnapshotRecorderBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/;
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( SnapshotRecorder.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_name = 0;
private static final int PROPERTY_rate = 1;
private static final int PROPERTY_filename = 2;
private static final int PROPERTY_format = 3;
// Property array
/*lazy PropertyDescriptor*/;
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[4];
try {
properties[PROPERTY_name] = new PropertyDescriptor ( "name", SnapshotRecorder.class, "getName", "setName" );
properties[PROPERTY_rate] = new PropertyDescriptor ( "rate", SnapshotRecorder.class, "getRate", "setRate" );
properties[PROPERTY_filename] = new PropertyDescriptor ( "filename", SnapshotRecorder.class, "getFilename", "setFilename" );
//properties[PROPERTY_filename].setPropertyEditorClass ( org.joone.edit.JooneFileChooserEditor.class );
properties[PROPERTY_format] = new PropertyDescriptor ( "format", SnapshotRecorder.class, "getFormat", "setFormat" );
properties[PROPERTY_format].setPropertyEditorClass ( SnapshotFormatEditor.class );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/;
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_netStarted0 = 0;
private static final int METHOD_cicleTerminated1 = 1;
private static final int METHOD_netStopped2 = 2;
// Method array
/*lazy MethodDescriptor*/;
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[3];
try {
methods[METHOD_netStarted0] = new MethodDescriptor ( org.joone.util.SnapshotRecorder.class.getMethod("netStarted", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_netStarted0].setDisplayName ( "" );
methods[METHOD_cicleTerminated1] = new MethodDescriptor ( org.joone.util.SnapshotRecorder.class.getMethod("cicleTerminated", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_cicleTerminated1].setDisplayName ( "" );
methods[METHOD_netStopped2] = new MethodDescriptor ( org.joone.util.SnapshotRecorder.class.getMethod("netStopped", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_netStopped2].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
/*
* CSVParser.java
*
* Created on 21 feb 2003, 21.23
*/
package org.joone.util;
import java.util.*;
import org.joone.log.*;
/** Comma Separated Values Parser
* This helper class parses a string containing comma separated tokens.
* Each token can contain a single value or a range represented by two values
* separated by a separator.
* @author pmarrone
*/
public class CSVParser {
private String m_values;
private boolean range_allowed = true;
private static final ILogger log = LoggerFactory.getLogger( CSVParser.class );
private static final char RANGE_SEPARATOR = '-';
/** Creates a new instance of CSVParser
* @param values The string containing the values to parse
*/
public CSVParser(String values) {
this(values, true);
}
/** Creates a new instance of CSVParser
* @param values The string containing the values to parse
* @param range true (default) if ranges allowed
*/
public CSVParser(String values, boolean range) {
m_values = values;
range_allowed = range;
}
/** Parse the string and returns an array containing all the values encountered.
* Let we have a string containing:
* '1,3-5,8,10-12'
* The method parseInt() will return an array containing:
* [1,3,4,5,8,10,11,12]
* WARNING: A RANGE CANNOT CONTAIN NEGATIVE NUMBERS
* @return an array of integer containing all the values parsed
*/
public int[] parseInt() throws NumberFormatException {
int[] ivalues = null;
Vector values = new Vector();
StringTokenizer tokens = new StringTokenizer(m_values, ",");
while (tokens.hasMoreTokens()) {
String tk = tokens.nextToken().trim();
int rpos = tk.indexOf(RANGE_SEPARATOR);
if (rpos <= 0) // not a range or negative number
try {
values.add(new Integer(Integer.parseInt(tk)));
}
catch (NumberFormatException nfe) {
String error = "Error parsing '"+m_values+"' : '"+tk+"' is not a valid integer value";
throw new NumberFormatException(error);
}
else {
if (range_allowed) {
String tkl = tk.substring(0, rpos);
String tkr = tk.substring(rpos+1);
try {
int iv = Integer.parseInt(tkl);
int fv = Integer.parseInt(tkr);
if (iv > fv) {
int ii = fv;
fv = iv;
iv = ii;
}
for (int i=iv; i <= fv; ++i)
values.add(new Integer(i));
}
catch (NumberFormatException nfe) {
String error = "Error parsing '"+m_values+"' : '"+tk+"' contains not valid integer values";
throw new NumberFormatException(error);
}
}
else {
String error = "Error parsing '"+m_values+"' : range not allowed";
throw new NumberFormatException(error);
}
}
}
ivalues = new int[values.size()];
for (int v=0; v < values.size(); ++v)
ivalues[v] = ((Integer)values.elementAt(v)).intValue();
return ivalues;
}
/** Parse the string and returns an array containing all the values encountered.
* Let we have a string containing:
* '1.0,-1.0,0.0'
* The method parseDouble() will return an array containing:
* [1.0,-1.0,0.0]
* WARNING: RANGE NOT ALLOWED, AS IT MAKES NO SENSE IN THIS CASE
* @return an array of double containing all the values parsed
*/
public double[] parseDouble() throws NumberFormatException {
double[] dvalues = null;
Vector values = new Vector();
StringTokenizer tokens = new StringTokenizer(m_values, ",");
while (tokens.hasMoreTokens()) {
String tk = tokens.nextToken().trim();
int rpos = tk.indexOf(RANGE_SEPARATOR);
if (rpos <= 0) // not a range or negative number
try {
values.add(new Double(Double.parseDouble(tk)));
}
catch (NumberFormatException nfe) {
String error = "Error parsing '"+m_values+"' : '"+tk+"' is not a valid numeric value";
throw new NumberFormatException(error);
}
else {
String error = "Error parsing '"+m_values+"' : range not allowed for not integer values";
throw new NumberFormatException(error);
}
}
dvalues = new double[values.size()];
for (int v=0; v < values.size(); ++v)
dvalues[v] = ((Double)values.elementAt(v)).doubleValue();
return dvalues;
}
/** Test
*/
public static void main(String args[]) {
CSVParser parser = new CSVParser("1.0,-3.6,1.4,15");
double[] dd = parser.parseDouble();
log.debug("Double values:");
if (dd != null)
for (int i=0; i < dd.length; ++i)
log.debug("array["+i+"] = "+dd[i]);
parser = new CSVParser("1,-3,4-8,11");
int[] ii = parser.parseInt();
log.debug("Integer values:");
if (ii != null)
for (int i=0; i < ii.length; ++i)
log.debug("array["+i+"] = "+ii[i]);
}
}
| Java |
package org.joone.util;
import org.joone.engine.*;
import org.joone.util.CSVParser;
/** <P>Changes the specified input serie data so that it becomes a moving average of
* itself. This plugin operates on specified serie/s of data in a vertical fashion.</P>
* <BR>
* <P>For example if the serie to be converted contained the following data
* ....</P>
* <BR>5<BR>15<BR>5<BR> <P> and the requested moving average was set at 2 then the
* serie would become <BR>0<BR>10<BR>12.5<BR> <P> Any data prior to the moving
* average spec is set at 0 as there is not enough data to calculate the actual
* moving average. The data is NOT
* normalised. To normalise the data use a {@link
* org.joone.util#NormalizerConverterPlugIn NormalizerConverterPlugIn}.</P>
* @author Julien Norman
*/
public class MovingAveragePlugIn extends ConverterPlugIn {
static final long serialVersionUID = -5679399800426091523L;
private String AdvancedMovAvgSpec = new String("");
/**
* Default MovingAveragePlugIn constructor.
*/
public MovingAveragePlugIn() {
super();
}
/**
* MovingAveragePlugIn constructor that allows specification of the Advanced Serie Selector
* and the Moving Average Specification.
*/
public MovingAveragePlugIn(String newAdvSerieSel,String newMovAvgSpec) {
super();
setAdvancedMovAvgSpec(newMovAvgSpec);
setAdvancedSerieSelector(newAdvSerieSel);
}
/**
* Start the convertion to a moving average for the required serie.
*/
protected boolean convert(int serie) {
boolean retValue = false;
int s = getInputVector().size();
int i;
double CurrentMovingAvgerage = 0;
double Sum = 0;
int MovingAverageSpec = 0;
int CurrentItem = 0;
CSVParser MovParse = new CSVParser(AdvancedMovAvgSpec,false);
int [] MovAvgArray = MovParse.parseInt();
int index = getSerieIndexNumber(serie);
Pattern currPE;
if ( index > -1 ) // If the serie was found in the spec list
{
if ( index < MovAvgArray.length ) // Check that we have an appropriate average.
{
MovingAverageSpec = MovAvgArray[index];
}
}
if ( MovingAverageSpec > 0 ) // If we have found a moving average spec for this serie then start to convert
{
if ( getInputVector().size() > MovingAverageSpec ) // Check that there is enough data
{
Sum = 0;
CurrentMovingAvgerage = 0;
// Loop through data starting at the end
for (i=getInputVector().size()-1;i>-1;i--) {
currPE = (Pattern) getInputVector().elementAt(i); // Set current pattern
if ( i<MovingAverageSpec-1 ) {
// Set any data less than MovingAverageSpec to 0
CurrentMovingAvgerage = 0;
}
else {
// Sum all data and average
Sum = 0;
for (int j=i;j>i-MovingAverageSpec;j--)
Sum = Sum + getValuePoint(j, serie);
CurrentMovingAvgerage = Sum / MovingAverageSpec;
}
currPE.setValue(serie, CurrentMovingAvgerage );
retValue = true;
}
}
}
//double vMax = getValuePoint(0, serie);
//currPE = (Pattern) getInputVector().elementAt(i);
//currPE.setValue(serie, v);
return retValue;
}
/**
* Gets the Moving Average value/s requested by the user.
* @return double The moving average .
*/
public String getAdvancedMovAvgSpec() {
return(AdvancedMovAvgSpec);
}
/**
* Sets the Moving Average value/s requested by the user. It must be a comma delimeted list of moving average values.
* E.g 10,20,12 would request a moving average 10 in the first specified serie as in
* the Advanced Serie Selector then a moving average of 20 on the second and a
* moving average of 12 on the third.
* @param newMovAvg double
*/
public void setAdvancedMovAvgSpec(String newAdvancedMovAvgSpec ) {
if ( AdvancedMovAvgSpec.compareTo(newAdvancedMovAvgSpec ) != 0 ) {
AdvancedMovAvgSpec = newAdvancedMovAvgSpec ;
this.fireDataChanged();
}
}
} | Java |
package org.joone.util;
import java.beans.*;
public class DynamicAnnealingBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/;
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( DynamicAnnealing.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_name = 0;
private static final int PROPERTY_step = 1;
private static final int PROPERTY_rate = 2;
// Property array
/*lazy PropertyDescriptor*/;
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[3];
try {
properties[PROPERTY_name] = new PropertyDescriptor ( "name", DynamicAnnealing.class, "getName", "setName" );
properties[PROPERTY_step] = new PropertyDescriptor ( "step", DynamicAnnealing.class, "getStep", "setStep" );
properties[PROPERTY_step].setDisplayName ( "change %" );
properties[PROPERTY_rate] = new PropertyDescriptor ( "rate", DynamicAnnealing.class, "getRate", "setRate" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/;
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_cicleTerminated0 = 0;
private static final int METHOD_netStopped1 = 1;
// Method array
/*lazy MethodDescriptor*/;
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[2];
try {
methods[METHOD_cicleTerminated0] = new MethodDescriptor ( org.joone.util.DynamicAnnealing.class.getMethod("cicleTerminated", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_cicleTerminated0].setDisplayName ( "" );
methods[METHOD_netStopped1] = new MethodDescriptor ( org.joone.util.DynamicAnnealing.class.getMethod("netStopped", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_netStopped1].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.util;
import java.beans.*;
public class MonitorPluginBeanInfo extends SimpleBeanInfo {
// Bean descriptor //GEN-FIRST:BeanDescriptor
/*lazy BeanDescriptor*/;
private static BeanDescriptor getBdescriptor(){
BeanDescriptor beanDescriptor = new BeanDescriptor ( MonitorPlugin.class , null );//GEN-HEADEREND:BeanDescriptor
// Here you can add code for customizing the BeanDescriptor.
return beanDescriptor; }//GEN-LAST:BeanDescriptor
// Property identifiers //GEN-FIRST:Properties
private static final int PROPERTY_name = 0;
private static final int PROPERTY_rate = 1;
// Property array
/*lazy PropertyDescriptor*/;
private static PropertyDescriptor[] getPdescriptor(){
PropertyDescriptor[] properties = new PropertyDescriptor[2];
try {
properties[PROPERTY_name] = new PropertyDescriptor ( "name", MonitorPlugin.class, "getName", "setName" );
properties[PROPERTY_rate] = new PropertyDescriptor ( "rate", MonitorPlugin.class, "getRate", "setRate" );
}
catch( IntrospectionException e) {}//GEN-HEADEREND:Properties
// Here you can add code for customizing the properties array.
return properties; }//GEN-LAST:Properties
// EventSet identifiers//GEN-FIRST:Events
// EventSet array
/*lazy EventSetDescriptor*/;
private static EventSetDescriptor[] getEdescriptor(){
EventSetDescriptor[] eventSets = new EventSetDescriptor[0];//GEN-HEADEREND:Events
// Here you can add code for customizing the event sets array.
return eventSets; }//GEN-LAST:Events
// Method identifiers //GEN-FIRST:Methods
private static final int METHOD_cicleTerminated0 = 0;
private static final int METHOD_netStopped1 = 1;
private static final int METHOD_netStarted2 = 2;
private static final int METHOD_errorChanged3 = 3;
// Method array
/*lazy MethodDescriptor*/;
private static MethodDescriptor[] getMdescriptor(){
MethodDescriptor[] methods = new MethodDescriptor[4];
try {
methods[METHOD_cicleTerminated0] = new MethodDescriptor ( org.joone.util.MonitorPlugin.class.getMethod("cicleTerminated", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_cicleTerminated0].setDisplayName ( "" );
methods[METHOD_netStopped1] = new MethodDescriptor ( org.joone.util.MonitorPlugin.class.getMethod("netStopped", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_netStopped1].setDisplayName ( "" );
methods[METHOD_netStarted2] = new MethodDescriptor ( org.joone.util.MonitorPlugin.class.getMethod("netStarted", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_netStarted2].setDisplayName ( "" );
methods[METHOD_errorChanged3] = new MethodDescriptor ( org.joone.util.MonitorPlugin.class.getMethod("errorChanged", new Class[] {org.joone.engine.NeuralNetEvent.class}));
methods[METHOD_errorChanged3].setDisplayName ( "" );
}
catch( Exception e) {}//GEN-HEADEREND:Methods
// Here you can add code for customizing the methods array.
return methods; }//GEN-LAST:Methods
private static final int defaultPropertyIndex = -1;//GEN-BEGIN:Idx
private static final int defaultEventIndex = -1;//GEN-END:Idx
//GEN-FIRST:Superclass
// Here you can add code for customizing the Superclass BeanInfo.
//GEN-LAST:Superclass
/**
* Gets the bean's <code>BeanDescriptor</code>s.
*
* @return BeanDescriptor describing the editable
* properties of this bean. May return null if the
* information should be obtained by automatic analysis.
*/
public BeanDescriptor getBeanDescriptor() {
return getBdescriptor();
}
/**
* Gets the bean's <code>PropertyDescriptor</code>s.
*
* @return An array of PropertyDescriptors describing the editable
* properties supported by this bean. May return null if the
* information should be obtained by automatic analysis.
* <p>
* If a property is indexed, then its entry in the result array will
* belong to the IndexedPropertyDescriptor subclass of PropertyDescriptor.
* A client of getPropertyDescriptors can use "instanceof" to check
* if a given PropertyDescriptor is an IndexedPropertyDescriptor.
*/
public PropertyDescriptor[] getPropertyDescriptors() {
return getPdescriptor();
}
/**
* Gets the bean's <code>EventSetDescriptor</code>s.
*
* @return An array of EventSetDescriptors describing the kinds of
* events fired by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public EventSetDescriptor[] getEventSetDescriptors() {
return getEdescriptor();
}
/**
* Gets the bean's <code>MethodDescriptor</code>s.
*
* @return An array of MethodDescriptors describing the methods
* implemented by this bean. May return null if the information
* should be obtained by automatic analysis.
*/
public MethodDescriptor[] getMethodDescriptors() {
return getMdescriptor();
}
/**
* A bean may have a "default" property that is the property that will
* mostly commonly be initially chosen for update by human's who are
* customizing the bean.
* @return Index of default property in the PropertyDescriptor array
* returned by getPropertyDescriptors.
* <P> Returns -1 if there is no default property.
*/
public int getDefaultPropertyIndex() {
return defaultPropertyIndex;
}
/**
* A bean may have a "default" event that is the event that will
* mostly commonly be used by human's when using the bean.
* @return Index of default event in the EventSetDescriptor array
* returned by getEventSetDescriptors.
* <P> Returns -1 if there is no default event.
*/
public int getDefaultEventIndex() {
return defaultEventIndex;
}
}
| Java |
package org.joone.util;
import org.joone.engine.*;
import java.util.Vector;
/**
* <P>UnNormalizes the input data within a predefined range.
* To enable the UnNormalizer to find the min and max within the input data specify zero
* values for setInDataMin and setInDataMax. To set user defined values for the input data
* max and min in a serie then specify non-zero values for setInDataMin and setInDataMax.</P>
* The PlugIn supports two modes - Buffered and UnBuffered. </P
* </BR>
* <P>Buffered Mode</P>
* </BR>
* <P>If the StreamOutputSynapse that this PlugIn is attached to is in buffered mode
* then the PlugIn can either search for input data min/max values
* if getInDataMin()==0 and getInDataMax()==0 or if either of these methods returns a
* non-zero value then it will use these values.</P>
* </BR>
* <P>UnBuffered Mode </P>
* </BR>
* <P>If the StreamOutputSynapse that this PlugIn is attached to is not in buffered mode
* then one of the methods {@link #setInDataMin(double newMin) setInDataMin} or {@link #setInDataMax(double newMax) setInDataMax} should have been
* called with a non-zero value. If this is not the case then this converter will not convert
* any data.</P>
* </BR>
* @author Julien Norman
*/
public class UnNormalizerOutputPlugIn extends OutputConverterPlugIn {
/** The Out Data Min property. */
private double min = 0;
/** The Out Data Max property. */
private double max = 1;
/** The In Data Max property. */
private double datamax = 0;
/** The In Data Min property. */
private double datamin = 0;
private transient double datadiff = 0;
private transient double tmpmin = 0;
private transient double tmpmax = 0;
static final long serialVersionUID = 5322361123972428588L;
/**
* The default UnNormalizerOutputPlugIn constructor.
*/
public UnNormalizerOutputPlugIn() {
super();
}
/** <P>This constructor enables a new UnNormalizerOutputPlugin to be fully constructed during
* initialisation. The format of the Advanced Serie Selector parameter
* newAdvSerieSel can be found in the javadoc documentation for {@link
* org.joone.util.OutputConverterPlugIn#setAdvancedSerieSelector(String newAdvSerie)
* setAdvancedSerieSelector} in the OutputConverterPlugIn class.</P>
* @param newAdvSerieSel The new range of serie that should be converted by this plugin.
* @param newInDataMin The minimum value to be found in the input data.
* @param newInDataMax The maximum value to be found in the input data.
* @param newOutDataMin The minimum value of the unnormalised output data.
* @param newOutDataMax The maximum value of the unnormalised output data.
*/
public UnNormalizerOutputPlugIn(String newAdvSerieSel,double newInDataMin,double newInDataMax,double newOutDataMin,double newOutDataMax) {
super();
setAdvancedSerieSelector(newAdvSerieSel);
setInDataMin(newInDataMin);
setInDataMax(newInDataMax);
setOutDataMin(newOutDataMin);
setOutDataMax(newOutDataMax);
}
/**
* Gets the max output value
* @return double The max output value
*/
public double getOutDataMax() {
return max;
}
/**
* Gets the min output value
* @return double The min output value
*/
public double getOutDataMin() {
return min;
}
/**
* Sets the new max value for the output data set.
* @param newMax double The new max value of the output data serie.
*/
public void setOutDataMax(double newMax) {
if ( max != newMax) {
max = newMax;
super.fireDataChanged();
}
}
/**
* Sets the new min value for the output data set.
* @param newMin double The new min value of the output data serie.
*/
public void setOutDataMin(double newMin) {
if ( min != newMin ) {
min = newMin;
super.fireDataChanged();
}
}
/**
* Gets the max value of the input data set
* @return double The max value of the input data set
*/
public double getInDataMax() {
return datamax;
}
/**
* Gets the min value of the input data set
* @return double The min value of the input data set
*/
public double getInDataMin() {
return datamin;
}
/**
* Sets the max value of the input data set
* @param newMax double The new max value of the input data serie.
*/
public void setInDataMax(double newMax) {
if ( datamax != newMax ) {
datamax = newMax;
super.fireDataChanged();
}
}
/**
* Sets the min value of the input data set
* @param newMin double The new min value of the input data serie.
*/
public void setInDataMin(double newMin) {
if ( datamin != newMin ) {
datamin = newMin;
super.fireDataChanged();
}
}
/** Provides buffer conversion support by converting the patterns in the buffer returned
* by getInputVector(). If both getInDataMax and getInDataMin return 0 then this method will
* search for the min/max values in the input data serie and it will use these values together
* with the methods getOutDataMin and getOutDataMax to UnNormalize the serie.
* @param serie The data serie with in the buffered patterns to convert.
*/
protected boolean convert(int serie) {
boolean retValue = false;
Vector con_pats = getInputVector();
double v = 0;
int i = 0;
int datasize = 0;
datasize = con_pats.size();
// Convert if pattern array not null
if ( con_pats != null) {
// Only convert if serie is positive or 0
if ( serie >= 0 ) {
if ( (datamax == 0) && (datamin == 0) ) // Find the data min max in the Patterns
{
setupMinMax(serie,con_pats);
}
else {
tmpmin = datamin;
tmpmax = datamax;
}
datadiff = tmpmax - tmpmin;
// Do the conversion
for (i = 0; i < datasize; i++) {
if (datadiff != 0.0) {
v = getValuePoint(i, serie);
v = (v - tmpmin) / datadiff;
v = v * (getOutDataMax() - getOutDataMin()) + getOutDataMin();
}
else {
v = getOutDataMin();
}
((Pattern)con_pats.elementAt(i)).setValue(serie,v);
retValue = true;
}
}
}
return retValue;
}
/** Converts a pattern indicated by getPattern() method. Only if one of the methods
* setInDataMin and setInDataMax have been called with non-zero values.
* Note : No conversion will be perfomed if both getInDataMin()==0 and getInDataMax()==0.
* @param serie The data serie with in the buffered patterns to convert.
*/
protected void convert_pattern(int serie) {
Pattern con_pat = getPattern();
double v = 0;
// Convert if pattern not null
if ( con_pat != null ) {
// Only convert if serie is positive or 0
if ( serie >= 0 ) {
if ( (datamax != 0) || (datamin != 0) ) // If user has over ridden these vales then we can do on a pattern basis
{
datadiff = datamax - datamin;
// Do the conversion
if (datadiff != 0.0) {
v = (con_pat.getArray())[serie];
v = (v - datamin) / datadiff;
v = v * (getOutDataMax() - getOutDataMin()) + getOutDataMin();
}
else
v = getOutDataMin();
con_pat.setValue(serie,v);
}
return;
}
}
}
/**
* Find the min and max values for the specified serie in the buffer specified
* by pats_to_convert.
*/
private void setupMinMax(int serie, Vector pats_to_convert) {
int datasize = pats_to_convert.size();
int i;
double v, d;
tmpmax = getValuePoint(0, serie);
tmpmin = tmpmax;
Pattern currPE;
for (i = 0; i < datasize; i++) {
v = getValuePoint(i, serie);
if (v > tmpmax)
tmpmax = v;
else
if (v < tmpmin)
tmpmin = v;
}
}
} | Java |
/*
* AbstractConverterPlugIn.java
*
* Created on October 11, 2004, 3:52 PM
*/
package org.joone.util;
import java.util.*;
import java.io.*;
import org.joone.net.NetCheck;
import org.joone.engine.*;
import org.joone.log.*;
/**
* This abstract class must be extended to implement plug-ins for input or output
* data pre- or post-processing.
*
* <!-- Note
* This class is created to remove differences and duplicated code between
* the ConverterPlugIn and the OutputConverterPlugIn.
* -->
*
* @author Boris Jansen
*/
public abstract class AbstractConverterPlugIn implements java.io.Serializable, PlugInListener {
/** The serial version of this object. */
private static final long serialVersionUID = 5698511686417862414L;
/** The object used when logging debug, errors, warnings and info. */
private static final ILogger log = LoggerFactory.getLogger(AbstractConverterPlugIn.class);
/** The next plugin in this series of cascading plugins. */
private AbstractConverterPlugIn nextPlugIn = null;
/** The name of this plug-in object. */
private String name;
/** This flag indicates if this plug-in is connected. Whenever a plug in is connected
* it cannot be connected / added to another input stream / plug-in. */
private boolean connected;
/** A vector of objects that are listening to this object for plug-in (data changed) events. */
protected Vector pluginListeners;
/** The Vector of input patterns which this converter must process. */
private transient Vector InputVector;
/**
* The <code>AdvancedSerieSelector</code> instructs this plug-in what serie/columns
* it should process. The format of this specification is a common separated list of
* values and ranges. E.g '1,2,5,7' will instruct the converter to convert serie 1
* and 2 and 5 and 7. A range can also be used e.g '2,4,5-8,9' will instruct the
* converter to process serie 2 and 4 and 5 and 6 and 7 and 8 and 9. A range is specifed
* using a '-' character with the number of the serie on either side.
* <P>Note <b>NO</b> negative numbers can be used in the <code>AdvancedSerieSelector</code>.</P>
*/
private String AdvancedSerieSelector = new String("");
/** The series to be converted. */
private transient int [] serieSelected;
/** Creates a new instance of AbstractConverterPlugIn */
public AbstractConverterPlugIn() {
}
/**
* Creates a new instance of AbstractConverterPlugIn
*
* @param anAdvancedSerieSelector the advanced serie selector to use.
* @see setAdvancedSerieSelector()
*/
public AbstractConverterPlugIn(String anAdvancedSerieSelector) {
setAdvancedSerieSelector(anAdvancedSerieSelector);
}
/**
* Converts all the patterns contained by {@link #InputVector} and on the
* serie specifed by the call to {@link setAdvancedSerieSelector#setAdvancedSerieSelector}.
* It cascades also the conversion to the next-plugin connected in the chain.
*/
public void convertPatterns() {
apply();
cascade();
}
/**
* Applies all the conversions on the patterns contained by {@link #InputVector}
* @return true if the input buffer is changed
*/
protected boolean apply() {
boolean retValue = false;
if ((getInputVector() != null) && (getInputVector().size() > 0)) {
retValue = applyOnColumns() | applyOnRows();
} else {
log.warn( getName()+" : Plugin has no input data to convert." );
}
return retValue;
}
/**
* Applies the conversion on the patterns contained by {@link #InputVector} and on the
* columns specifed by the call to {@link setAdvancedSerieSelector#setAdvancedSerieSelector}.
*/
protected boolean applyOnColumns() {
boolean retValue = false;
Pattern currPE = (Pattern) getInputVector().elementAt(0);
int aSize = currPE.getArray().length;
// Use Advanced Serie Selector to select the serie to convert
if ( (getAdvancedSerieSelector() != null ) && (!getAdvancedSerieSelector().equals(new String(""))) ) {
int [] mySerieSelected = getSerieSelected();
for(int i = 0; i < mySerieSelected.length; i++) {
if(mySerieSelected[i]-1 < aSize) { // Check we don't go over array bounds.
retValue = convert(mySerieSelected[i]-1) | retValue;
} else {
log.warn(getName() + " : Advanced Serie Selector contains too many serie. Check the number of columns in the appropriate input synapse.");
}
}
}
return retValue;
}
/**
* Applies the conversion on the patterns contained by {@link #InputVector}
* on all the rows. Override this empty method to apply any change to the
* order of the input vector's rows.
*/
protected boolean applyOnRows() {
return false;
}
/**
* Cascades the <code>convertPatterns()</code> method call to the next plug-in.
*/
protected void cascade() {
if (getNextPlugIn() != null) { // Loop through other cascading plugins
AbstractConverterPlugIn myPlugIn = getNextPlugIn();
myPlugIn.setInputVector(getInputVector());
myPlugIn.convertPatterns();
}
}
/**
* Applies the conversion on the Nth serie of the buffered pattern data. The method is abstract
* and should be overridden by the implementing class. Implementing classes can obtain the
* input patterns by calling the {@link #getInputVector()} method. The result is a
* <code>Vector</code> of <code>Pattern</code> objects which this method should use by converting
* the requested serie.
*
* @param serie the serie to convert
*/
protected abstract boolean convert(int serie);
/**
* Gets the double value at the specified row (point) in the specifed serie /
* column.
*
* @param point The row at which to get the pattern's double value.
* @param serie The serie or column from which to obtain the value.
* @return The value at the specified point in the input vector.
*/
protected double getValuePoint(int point, int serie) {
Pattern currPE = (Pattern) getInputVector().elementAt(point);
return currPE.getArray()[serie];
}
/**
* Gets the name of this plug-in object.
*
* @return The name of this plug-in.
*/
public String getName() {
return name;
}
/**
* Sets the name of this plug-in object.
*
* @param aName New name for this object.
*/
public void setName(String aName) {
name = aName;
}
/**
* Getter for property connected.
* This property is true when this plugin has been
* attached either to a StreamInputSynapse or to
* another plugin.
* @return Value of property connected.
*/
public boolean isConnected() {
return connected;
}
/**
* Setter for property connected.
* This property is true when this plugin has been
* attached either to a StreamInputSynapse or to
* another plugin.
* @param aConnected New value of property connected.
*/
public void setConnected(boolean aConnected) {
connected = aConnected;
}
/**
* Adds an {@link PlugInListener} to this plug-in. Usually this will be the
* previous plug-in in the series of cascading plug-ins or the stream
* input/output synapse.
*
* @param aListener The listener that requires notification of events from
* this plug-in whenever data changes.
*/
public synchronized void addPlugInListener(PlugInListener aListener) {
if(!getPluginListeners().contains(aListener)) {
getPluginListeners().add(aListener);
}
}
/**
* Removes a {@link PlugInListener} that was previously registered to receive
* plugin (data changed) events.
*
* @param aListener The listener that does not want to receive any events
* anymore from this plug-in.
*/
public synchronized void removePlugInListener(PlugInListener aListener) {
if (getPluginListeners().contains(aListener)) {
getPluginListeners().remove(aListener);
}
}
/**
* Gets a vector of all the {@link PlugInListener}s that have been registerd
* to receive events from this plug-in.
*
* @return The vector of <code>PlugInListener</code>s listening to this
* converter plug-in object.
*/
protected Vector getPluginListeners() {
if (pluginListeners == null) {
pluginListeners = new Vector();
}
return pluginListeners;
}
public void dataChanged(PlugInEvent anEvent) {
fireDataChanged();
}
/**
* Fires a data changed event to all {@link PlugInListeners} that are registered
* to receive events from this plug-in object. This method calls the
* {@link InputPlugInListener#dataChanged()} method in all registered listeners.
*/
protected void fireDataChanged() {
Object[] myList;
synchronized (this) {
myList = getPluginListeners().toArray();
}
for (int i=0; i < myList.length; ++i) {
PlugInListener myListener = (PlugInListener)myList[i];
if (myListener != null) {
myListener.dataChanged(new PlugInEvent(this));
}
}
}
/**
* Gets the AdvancedSerieSelector.
*
* @return Value of property AdvancedSerieSelector.
*/
public String getAdvancedSerieSelector() {
return AdvancedSerieSelector;
}
/**
* Sets the AdvancedSerieSelector for this plugin.
* <P>The AdvancedSerieSelector instructs this plug-in what serie/columns it
* should process. The format of this specification is a common seperated list of
* values and ranges. E.g '1,2,5,7' will instruct the converter to convert serie 1
* and 2 and 5 and 7. A range can also be used e.g '2,4,5-8,9' will instruct the
* converter to process serie 2 and 4 and 5 and 6 and 7 and 8 and 9. A range is specifed
* using a '-' character with the number of the serie on either side.
* <P>Note <b>NO</b> negative numbers can be used in the <code>AdvancedSerieSelector</code>.</P>
*
* @param aNewSerieSelector New value for the <code>AdvancedSerieSelector</code>.
*/
public void setAdvancedSerieSelector(String aNewSerieSelector) {
if((AdvancedSerieSelector == null) || (AdvancedSerieSelector.compareTo(aNewSerieSelector) != 0)) {
AdvancedSerieSelector = aNewSerieSelector;
serieSelected = null;
fireDataChanged();
}
}
/**
* Getter for property <code>serieSelected</code>. Returns the list of
* selected columns to elaborate.
*
* @return Value of property <code>serieSelected</code>.
*/
protected int[] getSerieSelected() {
if(serieSelected == null) {
// if the advanced serie selected string is not parsed yet, then parse
// it now to obtain the serie selected
CSVParser myParser = new CSVParser(getAdvancedSerieSelector(), true);
serieSelected = myParser.parseInt();
}
return serieSelected;
}
/**
* Adds a plug-in at the end of the list of plug-ins.
*
* @param aNewPlugIn the new plug in to add at the end of plug ins.
* @return <code>true</code> when the plug in is added succesfully,
* <code>false</code> when the plug in is not added, e.g. in case the
* plug in is already added / connected to another synapse / plug-in.
*/
public boolean addPlugIn(AbstractConverterPlugIn aNewPlugIn) {
if(nextPlugIn == aNewPlugIn) {
return false;
}
// The null parameter is used to detach or delete a plugin
if(aNewPlugIn == null) {
// We need to declare the next plugin, if existing,
// as not more used, so it could be used again.
if (nextPlugIn != null) {
nextPlugIn.setConnected(false);
}
nextPlugIn = null;
fireDataChanged();
return true;
}
if(aNewPlugIn.isConnected()) {
// The new plugin is already connected to another plugin,
// hence cannot be used.
return false;
}
if(nextPlugIn == null) {
aNewPlugIn.setConnected(true);
aNewPlugIn.addPlugInListener(this);
nextPlugIn = aNewPlugIn;
fireDataChanged();
return true;
} else {
return nextPlugIn.addPlugIn(aNewPlugIn);
}
}
/**
* Removes (and disconnects) all (cascading) plug ins.
*/
public void removeAllPlugIns() {
if(nextPlugIn != null) {
nextPlugIn.setConnected(false);
nextPlugIn.removeAllPlugIns();
nextPlugIn = null;
} else {
// this is the last plug-in in a chain of plug ins that are removed
// just only one time it should be notified that these plug-ins are
// not used anymore (the data should not be converted anymore), so
// here we fire a data changed event JUST ONCE.
fireDataChanged();
}
}
/**
* Sets the next plug-in in a cascading series of plugins.
*
* @param aNewNextPlugIn The next plug-in in the series.
* @return <code>true</code> when the plug-in is successfully added,
* <code>false</code> otherwise.
* @deprecated {@link addPlugIn(AbstractConverterPlugIn)}
*/
public boolean setNextPlugin(AbstractConverterPlugIn aNewNextPlugIn) {
if (aNewNextPlugIn == nextPlugIn) {
return false;
}
if (aNewNextPlugIn == null) {
nextPlugIn.setConnected(false);
} else {
if (aNewNextPlugIn.isConnected()) {
return false;
}
aNewNextPlugIn.setConnected(true);
aNewNextPlugIn.addPlugInListener(this);
}
nextPlugIn = aNewNextPlugIn;
fireDataChanged();
return true;
}
/**
* Gets the next converter plug-in within this cascading series of plug-ins.
*
* @return the next plug-in within this cascading series of plug-ins.
*/
public AbstractConverterPlugIn getNextPlugIn() {
return nextPlugIn;
}
/**
* Added for XML serialization
* <p><b> **** DO NOT USE **** </b>
* <p>Use {@link #addPlugIn(AbstractConverterPlugIn)}
*/
public void setNextPlugIn(AbstractConverterPlugIn newNextPlugIn) {
addPlugIn(newNextPlugIn);
}
/**
* Sets the input vector of <code>Patterns</code> that this converter plugin should process.
* @param newInputVector The vector of Pattern objects to process.
*/
public void setInputVector(java.util.Vector newInputVector) {
InputVector = newInputVector;
}
/**
* Gets the input vector of <code>Patterns</code> with which this converter must process.
*
* @return the vector with patterns that this converter processes.
*/
protected Vector getInputVector() {
return InputVector;
}
/**
* This method is called to perform a check on this converter's properties to
* ensure there are no errors or problems. If there is an error or problem with
* one of the properties then the issues are returned in a <code>TreeSet</code>
* object.
*
* @param checks A <code>TreeSet</code> of issues that should be added to by this
* plug-in.
* @return A <code>TreeSet</code> of errors or problems relating to the setup of
* this converter plug-in object.
* @see Synapse
*/
public TreeSet check(TreeSet checks) {
if(AdvancedSerieSelector == null || AdvancedSerieSelector.equals(new String(""))) {
checks.add(new NetCheck(NetCheck.FATAL, "Advanced Serie Selector should be populated, e.g 1,2,4." , this));
}
// Call next converter plug-in in the chain of converter plug-ins
if(getNextPlugIn() != null) {
getNextPlugIn().check(checks);
}
return checks;
}
/**
* Gets the index of the current serie number.
* @return int -1 if the serie could not be found in the serie specification.
*/
protected int getSerieIndexNumber(int serie) {
CSVParser Parse = new CSVParser(getAdvancedSerieSelector(),true);
int [] checker = Parse.parseInt();
for ( int i=0; i<checker.length;i++) {
if(checker[i] == serie+1)
return(i); // Returns index in array
}
return(-1); // Serie not found
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
if (getAdvancedSerieSelector() == null) // To maintain the compatibility with the old saved classes
setAdvancedSerieSelector(new String("1"));
}
}
| Java |
package org.joone.util;
import org.joone.engine.*;
/**
* Normalizes the input data within a predefined range
* Creation date: (23/10/2000 23.23.25)
* @author: Administrator
*
*/
public class NormalizerPlugIn extends ConverterPlugIn {
private double min = 0;
private double max = 1;
private double datamin = 0;
private double datamax = 0;
private static final long serialVersionUID = 4662839350631576461L;
/**
* NormalizerPlugIn constructor
*/
public NormalizerPlugIn() {
super();
}
/**
* Start the convertion
*/
protected boolean convert(int serie) {
boolean retValue = false;
int s = getInputVector().size();
int i;
double v, d;
double vMax = getValuePoint(0, serie);
double vMin = vMax;
Pattern currPE;
// If user has set the datamin and datamax to these special values 0 and 0 respectively then the min max is determined from the data.
if ( (datamin == 0 ) && ( datamax == 0 ) ) {
for (i = 0; i < s; ++i) {
v = getValuePoint(i, serie);
if (v > vMax)
vMax = v;
else
if (v < vMin)
vMin = v;
}
}
else // Otherwise set it to the what the user has requested.
{
vMax = datamax;
vMin = datamin;
}
d = vMax - vMin;
for (i = 0; i < s; ++i) {
if (d != 0.0) {
v = getValuePoint(i, serie);
v = (v - vMin) / d;
v = v * (getMax() - getMin()) + getMin();
}
else {
v = getMin();
}
currPE = (Pattern) getInputVector().elementAt(i);
currPE.setValue(serie, v);
retValue = true;
}
return retValue;
}
/**
* Gets the max value
* Creation date: (23/10/2000 23.25.55)
* @return float
*/
public double getMax() {
return max;
}
/**
* Gets the min value
* Creation date: (23/10/2000 23.25.32)
* @return float
*/
public double getMin() {
return min;
}
/**
* Sets the max value of the normalization range
* Creation date: (23/10/2000 23.25.55)
* @param newMax float
*/
public void setMax(double newMax) {
if (max != newMax ) {
max = newMax;
super.fireDataChanged();
}
}
/**
* Sets the min value of the normalization range
* Creation date: (23/10/2000 23.25.32)
* @param newMin float
*/
public void setMin(double newMin) {
if ( min != newMin ) {
min = newMin;
super.fireDataChanged();
}
}
/** Data Min / Max Params **/
/**
* Gets the max value of the input data
* Creation date: (23/10/2000 23.25.55)
* @return double The maximum value of the input data.
*/
public double getDataMax() {
return datamax;
}
/**
* Gets the min value of the input data
* Creation date: (23/10/2000 23.25.32)
* @return double The minimum value of the input data.
*/
public double getDataMin() {
return datamin;
}
/**
* Sets the max value of the input data.
* Note : The DataMin and DataMax values should be set to 99999 and -99999 respectively if the
* user requires that this plugin uses the min , max values found in the serie.
* Creation date: (23/10/2000 23.25.55)
* @param newMax double The maximum value of the input data.
*/
public void setDataMax(double newDataMax) {
if ( datamax != newDataMax ) {
datamax = newDataMax;
super.fireDataChanged();
}
}
/**
* Sets the min value of the input data
* Note : The DataMin and DataMax values should be set to 99999 and -99999 respectively if the
* user requires that this plugin uses the min , max values found in the serie.
* Creation date: (23/10/2000 23.25.32)
* @param newDataMin double The minimum value of the input data.
*/
public void setDataMin(double newDataMin) {
if ( datamin != newDataMin ) {
datamin = newDataMin;
super.fireDataChanged();
}
}
} | Java |
/*
* NotSerialize.java
*
* Created on 29 marzo 2002, 16.03
*/
package org.joone.util;
/**
* This interface is implemented by all the input/output Synapses that
* must not be serialized when the NeuralNet is exported.
* This feature is useful to avoid to serialize the GUI I/O components
* (for instance the ChartOutputSynapse) along with the core NeuralNet object
* @author Paolo Marrone
*/
public interface NotSerialize {
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.