code stringlengths 3 1.18M | language stringclasses 1 value |
|---|---|
/*
* NullAttributeClassObserver.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.core.attributeclassobservers;
import moa.classifiers.core.AttributeSplitSuggestion;
import moa.classifiers.core.splitcriteria.SplitCriterion;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.tasks.TaskMonitor;
/**
* Class for observing the class data distribution for a null attribute.
* This method is used to disable the observation for an attribute.
* Used in decision trees to monitor data statistics on leaves.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class NullAttributeClassObserver extends AbstractOptionHandler implements
AttributeClassObserver {
private static final long serialVersionUID = 1L;
@Override
public void observeAttributeClass(double attVal, int classVal, double weight) {
}
@Override
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal) {
return 0.0;
}
public double totalWeightOfClassObservations() {
return 0.0;
}
public double weightOfObservedMissingValues() {
return 0.0;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
SplitCriterion criterion, double[] preSplitDist, int attIndex,
boolean binaryOnly) {
return null;
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
// TODO Auto-generated method stub
}
@Override
public void observeAttributeTarget(double attVal, double target) {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| Java |
/*
* AttributeClassObserver.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.core.attributeclassobservers;
import moa.classifiers.core.AttributeSplitSuggestion;
import moa.classifiers.core.splitcriteria.SplitCriterion;
import moa.options.OptionHandler;
/**
* Interface for observing the class data distribution for an attribute.
* This observer monitors the class distribution of a given attribute.
* Used in naive Bayes and decision trees to monitor data statistics on leaves.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public interface AttributeClassObserver extends OptionHandler {
/**
* Updates statistics of this observer given an attribute value, a class
* and the weight of the instance observed
*
* @param attVal the value of the attribute
* @param classVal the class
* @param weight the weight of the instance
*/
public void observeAttributeClass(double attVal, int classVal, double weight);
/**
* Gets the probability for an attribute value given a class
*
* @param attVal the attribute value
* @param classVal the class
* @return probability for an attribute value given a class
*/
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal);
/**
* Gets the best split suggestion given a criterion and a class distribution
*
* @param criterion the split criterion to use
* @param preSplitDist the class distribution before the split
* @param attIndex the attribute index
* @param binaryOnly true to use binary splits
* @return suggestion of best attribute split
*/
public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
SplitCriterion criterion, double[] preSplitDist, int attIndex,
boolean binaryOnly);
public void observeAttributeTarget(double attVal, double target);
}
| Java |
/*
* NominalAttributeClassObserver.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.core.attributeclassobservers;
import moa.classifiers.core.AttributeSplitSuggestion;
import moa.classifiers.core.conditionaltests.NominalAttributeBinaryTest;
import moa.classifiers.core.conditionaltests.NominalAttributeMultiwayTest;
import moa.classifiers.core.splitcriteria.SplitCriterion;
import moa.core.ObjectRepository;
import moa.tasks.TaskMonitor;
import weka.core.Utils;
import moa.core.AutoExpandVector;
import moa.core.DoubleVector;
import moa.options.AbstractOptionHandler;
/**
* Class for observing the class data distribution for a nominal attribute.
* This observer monitors the class distribution of a given attribute.
* Used in naive Bayes and decision trees to monitor data statistics on leaves.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class NominalAttributeClassObserver extends AbstractOptionHandler implements
DiscreteAttributeClassObserver {
private static final long serialVersionUID = 1L;
protected double totalWeightObserved = 0.0;
protected double missingWeightObserved = 0.0;
public AutoExpandVector<DoubleVector> attValDistPerClass = new AutoExpandVector<DoubleVector>();
@Override
public void observeAttributeClass(double attVal, int classVal, double weight) {
if (Utils.isMissingValue(attVal)) {
this.missingWeightObserved += weight;
} else {
int attValInt = (int) attVal;
DoubleVector valDist = this.attValDistPerClass.get(classVal);
if (valDist == null) {
valDist = new DoubleVector();
this.attValDistPerClass.set(classVal, valDist);
}
valDist.addToValue(attValInt, weight);
}
this.totalWeightObserved += weight;
}
@Override
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal) {
DoubleVector obs = this.attValDistPerClass.get(classVal);
return obs != null ? (obs.getValue((int) attVal) + 1.0)
/ (obs.sumOfValues() + obs.numValues()) : 0.0;
}
public double totalWeightOfClassObservations() {
return this.totalWeightObserved;
}
public double weightOfObservedMissingValues() {
return this.missingWeightObserved;
}
@Override
public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
SplitCriterion criterion, double[] preSplitDist, int attIndex,
boolean binaryOnly) {
AttributeSplitSuggestion bestSuggestion = null;
int maxAttValsObserved = getMaxAttValsObserved();
if (!binaryOnly) {
double[][] postSplitDists = getClassDistsResultingFromMultiwaySplit(maxAttValsObserved);
double merit = criterion.getMeritOfSplit(preSplitDist,
postSplitDists);
bestSuggestion = new AttributeSplitSuggestion(
new NominalAttributeMultiwayTest(attIndex), postSplitDists,
merit);
}
for (int valIndex = 0; valIndex < maxAttValsObserved; valIndex++) {
double[][] postSplitDists = getClassDistsResultingFromBinarySplit(valIndex);
double merit = criterion.getMeritOfSplit(preSplitDist,
postSplitDists);
if ((bestSuggestion == null) || (merit > bestSuggestion.merit)) {
bestSuggestion = new AttributeSplitSuggestion(
new NominalAttributeBinaryTest(attIndex, valIndex),
postSplitDists, merit);
}
}
return bestSuggestion;
}
public int getMaxAttValsObserved() {
int maxAttValsObserved = 0;
for (DoubleVector attValDist : this.attValDistPerClass) {
if ((attValDist != null)
&& (attValDist.numValues() > maxAttValsObserved)) {
maxAttValsObserved = attValDist.numValues();
}
}
return maxAttValsObserved;
}
public double[][] getClassDistsResultingFromMultiwaySplit(
int maxAttValsObserved) {
DoubleVector[] resultingDists = new DoubleVector[maxAttValsObserved];
for (int i = 0; i < resultingDists.length; i++) {
resultingDists[i] = new DoubleVector();
}
for (int i = 0; i < this.attValDistPerClass.size(); i++) {
DoubleVector attValDist = this.attValDistPerClass.get(i);
if (attValDist != null) {
for (int j = 0; j < attValDist.numValues(); j++) {
resultingDists[j].addToValue(i, attValDist.getValue(j));
}
}
}
double[][] distributions = new double[maxAttValsObserved][];
for (int i = 0; i < distributions.length; i++) {
distributions[i] = resultingDists[i].getArrayRef();
}
return distributions;
}
public double[][] getClassDistsResultingFromBinarySplit(int valIndex) {
DoubleVector equalsDist = new DoubleVector();
DoubleVector notEqualDist = new DoubleVector();
for (int i = 0; i < this.attValDistPerClass.size(); i++) {
DoubleVector attValDist = this.attValDistPerClass.get(i);
if (attValDist != null) {
for (int j = 0; j < attValDist.numValues(); j++) {
if (j == valIndex) {
equalsDist.addToValue(i, attValDist.getValue(j));
} else {
notEqualDist.addToValue(i, attValDist.getValue(j));
}
}
}
}
return new double[][]{equalsDist.getArrayRef(),
notEqualDist.getArrayRef()};
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
// TODO Auto-generated method stub
}
@Override
public void observeAttributeTarget(double attVal, double target) {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| Java |
/*
* BinaryTreeNumericAttributeClassObserver.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz), Andreas Hapfelmeier (Andreas.Hapfelmeier@in.tum.de)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
*/
package moa.classifiers.core.attributeclassobservers;
import java.io.Serializable;
import moa.classifiers.core.AttributeSplitSuggestion;
import moa.classifiers.core.conditionaltests.NumericAttributeBinaryTest;
import moa.classifiers.core.splitcriteria.SplitCriterion;
import moa.core.DoubleVector;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.tasks.TaskMonitor;
/**
* Class for observing the class data distribution for a numeric attribute using a binary tree.
* This observer monitors the class distribution of a given attribute.
* Used in naive Bayes and decision trees to monitor data statistics on leaves.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class BinaryTreeNumericAttributeClassObserver extends AbstractOptionHandler
implements NumericAttributeClassObserver {
private static final long serialVersionUID = 1L;
public class Node implements Serializable {
private static final long serialVersionUID = 1L;
public double cut_point;
public DoubleVector classCountsLeft = new DoubleVector();
public DoubleVector classCountsRight = new DoubleVector();
public Node left;
public Node right;
public Node(double val, int label, double weight) {
this.cut_point = val;
this.classCountsLeft.addToValue(label, weight);
}
public void insertValue(double val, int label, double weight) {
if (val == this.cut_point) {
this.classCountsLeft.addToValue(label, weight);
} else if (val <= this.cut_point) {
this.classCountsLeft.addToValue(label, weight);
if (this.left == null) {
this.left = new Node(val, label, weight);
} else {
this.left.insertValue(val, label, weight);
}
} else { // val > cut_point
this.classCountsRight.addToValue(label, weight);
if (this.right == null) {
this.right = new Node(val, label, weight);
} else {
this.right.insertValue(val, label, weight);
}
}
}
}
public Node root = null;
@Override
public void observeAttributeClass(double attVal, int classVal, double weight) {
if (Double.isNaN(attVal)) { //Instance.isMissingValue(attVal)
} else {
if (this.root == null) {
this.root = new Node(attVal, classVal, weight);
} else {
this.root.insertValue(attVal, classVal, weight);
}
}
}
@Override
public double probabilityOfAttributeValueGivenClass(double attVal,
int classVal) {
// TODO: NaiveBayes broken until implemented
return 0.0;
}
@Override
public AttributeSplitSuggestion getBestEvaluatedSplitSuggestion(
SplitCriterion criterion, double[] preSplitDist, int attIndex,
boolean binaryOnly) {
return searchForBestSplitOption(this.root, null, null, null, null, false,
criterion, preSplitDist, attIndex);
}
protected AttributeSplitSuggestion searchForBestSplitOption(
Node currentNode, AttributeSplitSuggestion currentBestOption,
double[] actualParentLeft,
double[] parentLeft, double[] parentRight, boolean leftChild,
SplitCriterion criterion, double[] preSplitDist, int attIndex) {
if (currentNode == null) {
return currentBestOption;
}
DoubleVector leftDist = new DoubleVector();
DoubleVector rightDist = new DoubleVector();
if (parentLeft == null) {
leftDist.addValues(currentNode.classCountsLeft);
rightDist.addValues(currentNode.classCountsRight);
} else {
leftDist.addValues(parentLeft);
rightDist.addValues(parentRight);
if (leftChild) {
//get the exact statistics of the parent value
DoubleVector exactParentDist = new DoubleVector();
exactParentDist.addValues(actualParentLeft);
exactParentDist.subtractValues(currentNode.classCountsLeft);
exactParentDist.subtractValues(currentNode.classCountsRight);
// move the subtrees
leftDist.subtractValues(currentNode.classCountsRight);
rightDist.addValues(currentNode.classCountsRight);
// move the exact value from the parent
rightDist.addValues(exactParentDist);
leftDist.subtractValues(exactParentDist);
} else {
leftDist.addValues(currentNode.classCountsLeft);
rightDist.subtractValues(currentNode.classCountsLeft);
}
}
double[][] postSplitDists = new double[][]{leftDist.getArrayRef(),
rightDist.getArrayRef()};
double merit = criterion.getMeritOfSplit(preSplitDist, postSplitDists);
if ((currentBestOption == null) || (merit > currentBestOption.merit)) {
currentBestOption = new AttributeSplitSuggestion(
new NumericAttributeBinaryTest(attIndex,
currentNode.cut_point, true), postSplitDists, merit);
}
currentBestOption = searchForBestSplitOption(currentNode.left,
currentBestOption, currentNode.classCountsLeft.getArrayRef(), postSplitDists[0], postSplitDists[1], true,
criterion, preSplitDist, attIndex);
currentBestOption = searchForBestSplitOption(currentNode.right,
currentBestOption, currentNode.classCountsLeft.getArrayRef(), postSplitDists[0], postSplitDists[1], false,
criterion, preSplitDist, attIndex);
return currentBestOption;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
// TODO Auto-generated method stub
}
@Override
public void observeAttributeTarget(double attVal, double target) {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| Java |
/*
* DDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in Cusum
*
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class CusumDM extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
public FloatOption deltaOption = new FloatOption("delta", 'd',
"Delta parameter of the Cusum Test", 0.005, 0.0, 1.0);
public FloatOption lambdaOption = new FloatOption("lambda", 'l',
"Threshold parameter of the Cusum Test", 50, 0.0, Float.MAX_VALUE);
private int m_n;
private double sum;
private double x_mean;
private double alpha;
private double delta;
private double lambda;
public CusumDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1;
x_mean = 0.0;
sum = 0.0;
delta = this.deltaOption.getValue();
lambda = this.lambdaOption.getValue();
}
@Override
public void input(double x) {
// It monitors the error rate
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
x_mean = x_mean + (x - x_mean) / (double) m_n;
sum = Math.max(0, sum + x - x_mean - this.delta);
m_n++;
// System.out.print(prediction + " " + m_n + " " + (m_p+m_s) + " ");
this.estimation = x_mean;
this.isChangeDetected = false;
this.isWarningZone = false;
this.delay = 0;
if (m_n < this.minNumInstancesOption.getValue()) {
return;
}
if (sum > this.lambda) {
this.isChangeDetected = true;
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* DDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in DDM method of Joao Gama SBIA 2004.
*
* <p>João Gama, Pedro Medas, Gladys Castillo, Pedro Pereira Rodrigues: Learning
* with Drift Detection. SBIA 2004: 286-295 </p>
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class DDM extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
//private static final int DDM_MINNUMINST = 30;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
private int m_n;
private double m_p;
private double m_s;
private double m_psmin;
private double m_pmin;
private double m_smin;
public DDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1;
m_p = 1;
m_s = 0;
m_psmin = Double.MAX_VALUE;
m_pmin = Double.MAX_VALUE;
m_smin = Double.MAX_VALUE;
}
@Override
public void input(double prediction) {
// prediction must be 1 or 0
// It monitors the error rate
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
m_p = m_p + (prediction - m_p) / (double) m_n;
m_s = Math.sqrt(m_p * (1 - m_p) / (double) m_n);
m_n++;
// System.out.print(prediction + " " + m_n + " " + (m_p+m_s) + " ");
this.estimation = m_p;
this.isChangeDetected = false;
this.isWarningZone = false;
this.delay = 0;
if (m_n < this.minNumInstancesOption.getValue()) {
return;
}
if (m_p + m_s <= m_psmin) {
m_pmin = m_p;
m_smin = m_s;
m_psmin = m_p + m_s;
}
if (m_n > this.minNumInstancesOption.getValue() && m_p + m_s > m_pmin + 3 * m_smin) {
//System.out.println(m_p + ",D");
this.isChangeDetected = true;
//resetLearning();
} else if (m_p + m_s > m_pmin + 2 * m_smin) {
//System.out.println(m_p + ",W");
this.isWarningZone = true;
} else {
this.isWarningZone = false;
//System.out.println(m_p + ",N");
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* ADWIN.java
* Copyright (C) 2008 UPC-Barcelona Tech, Catalonia
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.core.driftdetection;
import moa.AbstractMOAObject;
/**
* ADaptive sliding WINdow method. This method is a change detector and estimator.
* It keeps a variable-length window of recently seen
* items, with the property that the window has the maximal length statistically
* consistent with the hypothesis "there has been no change in the average value
* inside the window".
*
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class ADWIN extends AbstractMOAObject {
private class List extends AbstractMOAObject {
protected int count;
protected ListItem head;
protected ListItem tail;
public List() {
// post: initializes the list to be empty.
clear();
addToHead();
}
/* Interface Store Methods */
public int size() {
// post: returns the number of elements in the list.
return this.count;
}
public ListItem head() {
// post: returns the number of elements in the list.
return this.head;
}
public ListItem tail() {
// post: returns the number of elements in the list.
return this.tail;
}
public boolean isEmpty() {
// post: returns the true iff store is empty.
return (this.size() == 0);
}
public void clear() {
// post: clears the list so that it contains no elements.
this.head = null;
this.tail = null;
this.count = 0;
}
/* Interface List Methods */
public void addToHead() {
// pre: anObject is non-null
// post: the object is added to the beginning of the list
this.head = new ListItem(this.head, null);
if (this.tail == null) {
this.tail = this.head;
}
this.count++;
}
public void removeFromHead() {
// pre: list is not empty
// post: removes and returns first object from the list
// ListItem temp;
// temp = this.head;
this.head = this.head.next();
if (this.head != null) {
this.head.setPrevious(null);
} else {
this.tail = null;
}
this.count--;
//temp=null;
return;
}
public void addToTail() {
// pre: anObject is non-null
// post: the object is added at the end of the list
this.tail = new ListItem(null, this.tail);
if (this.head == null) {
this.head = this.tail;
}
this.count++;
}
public void removeFromTail() {
// pre: list is not empty
// post: the last object in the list is removed and returned
// ListItem temp;
// temp = this.tail;
this.tail = this.tail.previous();
if (this.tail == null) {
this.head = null;
} else {
this.tail.setNext(null);
}
this.count--;
//temp=null;
return;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
}
}
private class ListItem extends AbstractMOAObject {
// protected Object data;
protected ListItem next;
protected ListItem previous;
protected int bucketSizeRow = 0;
protected int MAXBUCKETS = ADWIN.MAXBUCKETS;
protected double bucketTotal[] = new double[MAXBUCKETS + 1];
protected double bucketVariance[] = new double[MAXBUCKETS + 1];
public ListItem() {
// post: initializes the node to be a tail node
// containing the given value.
this(null, null);
}
public void clear() {
bucketSizeRow = 0;
for (int k = 0; k <= MAXBUCKETS; k++) {
clearBucket(k);
}
}
private void clearBucket(int k) {
setTotal(0, k);
setVariance(0, k);
}
public ListItem(ListItem nextNode, ListItem previousNode) {
// post: initializes the node to contain the given
// object and link to the given next node.
//this.data = element;
this.next = nextNode;
this.previous = previousNode;
if (nextNode != null) {
nextNode.previous = this;
}
if (previousNode != null) {
previousNode.next = this;
}
clear();
}
public void insertBucket(double Value, double Variance) {
// insert a Bucket at the end
int k = bucketSizeRow;
bucketSizeRow++;
//Insert new bucket
setTotal(Value, k);
setVariance(Variance, k);
}
public void RemoveBucket() {
// Removes the first Buvket
compressBucketsRow(1);
}
public void compressBucketsRow(int NumberItemsDeleted) {
//Delete first elements
for (int k = NumberItemsDeleted; k <= MAXBUCKETS; k++) {
bucketTotal[k - NumberItemsDeleted] = bucketTotal[k];
bucketVariance[k - NumberItemsDeleted] = bucketVariance[k];
}
for (int k = 1; k <= NumberItemsDeleted; k++) {
clearBucket(MAXBUCKETS - k + 1);
}
bucketSizeRow -= NumberItemsDeleted;
//BucketNumber-=NumberItemsDeleted;
}
public ListItem previous() {
// post: returns the previous node.
return this.previous;
}
public void setPrevious(ListItem previous) {
// post: sets the previous node to be the given node
this.previous = previous;
}
public ListItem next() {
// post: returns the next node.
return this.next;
}
public void setNext(ListItem next) {
// post: sets the next node to be the given node
this.next = next;
}
public double Total(int k) {
// post: returns the element in this node
return bucketTotal[k];
}
public double Variance(int k) {
// post: returns the element in this node
return bucketVariance[k];
}
public void setTotal(double value, int k) {
// post: sets the element in this node to the given
// object.
bucketTotal[k] = value;
}
public void setVariance(double value, int k) {
// post: sets the element in this node to the given
// object.
bucketVariance[k] = value;
}
/*
public ListItem(Object element,
ListItem nextNode){
// post: initializes the node to contain the given
// object and link to the given next node.
this.data = element;
this.next = nextNode;
}
public ListItem(Object element) {
// post: initializes the node to be a tail node
// containing the given value.
this(element, null);
}
public Object value() {
// post: returns the element in this node
return this.data;
}
public void setValue(Object anObject) {
// post: sets the element in this node to the given
// object.
this.data = anObject;
}
*/
@Override
public void getDescription(StringBuilder sb, int indent) {
}
}
public static final double DELTA = .002; //.1;
private static final int mintMinimLongitudWindow = 10; //10
private double mdbldelta = .002; //.1;
private int mintTime = 0;
private int mintClock = 32;
private double mdblWidth = 0; // Mean of Width = mdblWidth/Number of items
//BUCKET
public static final int MAXBUCKETS = 5;
private int lastBucketRow = 0;
private double TOTAL = 0;
private double VARIANCE = 0;
private int WIDTH = 0;
private int BucketNumber = 0;
private int Detect = 0;
private int numberDetections = 0;
private int DetectTwice = 0;
private boolean blnBucketDeleted = false;
private int BucketNumberMAX = 0;
private int mintMinWinLength = 5;
private List listRowBuckets;
public boolean getChange() {
return blnBucketDeleted;
}
public void resetChange() {
blnBucketDeleted = false;
}
public int getBucketsUsed() {
return BucketNumberMAX;
}
public int getWidth() {
return WIDTH;
}
public void setClock(int intClock) {
mintClock = intClock;
}
public int getClock() {
return mintClock;
}
public boolean getWarning() {
return false;
}
public boolean getDetect() {
return (Detect == mintTime);
}
public int getNumberDetections() {
return numberDetections;
}
public double getTotal() {
return TOTAL;
}
public double getEstimation() {
return TOTAL / WIDTH;
}
public double getVariance() {
return VARIANCE / WIDTH;
}
public double getWidthT() {
return mdblWidth;
}
private void initBuckets() {
//Init buckets
listRowBuckets = new List();
lastBucketRow = 0;
TOTAL = 0;
VARIANCE = 0;
WIDTH = 0;
BucketNumber = 0;
}
private void insertElement(double Value) {
WIDTH++;
insertElementBucket(0, Value, listRowBuckets.head());
double incVariance = 0;
if (WIDTH > 1) {
incVariance = (WIDTH - 1) * (Value - TOTAL / (WIDTH - 1)) * (Value - TOTAL / (WIDTH - 1)) / WIDTH;
}
VARIANCE += incVariance;
TOTAL += Value;
compressBuckets();
}
private void insertElementBucket(double Variance, double Value, ListItem Node) {
//Insert new bucket
Node.insertBucket(Value, Variance);
BucketNumber++;
if (BucketNumber > BucketNumberMAX) {
BucketNumberMAX = BucketNumber;
}
}
private int bucketSize(int Row) {
return (int) Math.pow(2, Row);
}
public int deleteElement() {
//LIST
//Update statistics
ListItem Node;
Node = listRowBuckets.tail();
int n1 = bucketSize(lastBucketRow);
WIDTH -= n1;
TOTAL -= Node.Total(0);
double u1 = Node.Total(0) / n1;
double incVariance = Node.Variance(0) + n1 * WIDTH * (u1 - TOTAL / WIDTH) * (u1 - TOTAL / WIDTH) / (n1 + WIDTH);
VARIANCE -= incVariance;
//Delete Bucket
Node.RemoveBucket();
BucketNumber--;
if (Node.bucketSizeRow == 0) {
listRowBuckets.removeFromTail();
lastBucketRow--;
}
return n1;
}
public void compressBuckets() {
//Traverse the list of buckets in increasing order
int n1, n2;
double u2, u1, incVariance;
ListItem cursor;
ListItem nextNode;
cursor = listRowBuckets.head();
int i = 0;
do {
//Find the number of buckets in a row
int k = cursor.bucketSizeRow;
//If the row is full, merge buckets
if (k == MAXBUCKETS + 1) {
nextNode = cursor.next();
if (nextNode == null) {
listRowBuckets.addToTail();
nextNode = cursor.next();
lastBucketRow++;
}
n1 = bucketSize(i);
n2 = bucketSize(i);
u1 = cursor.Total(0) / n1;
u2 = cursor.Total(1) / n2;
incVariance = n1 * n2 * (u1 - u2) * (u1 - u2) / (n1 + n2);
nextNode.insertBucket(cursor.Total(0) + cursor.Total(1), cursor.Variance(0) + cursor.Variance(1) + incVariance);
BucketNumber++;
cursor.compressBucketsRow(2);
if (nextNode.bucketSizeRow <= MAXBUCKETS) {
break;
}
} else {
break;
}
cursor = cursor.next();
i++;
} while (cursor != null);
}
public boolean setInput(double intEntrada) {
return setInput(intEntrada, mdbldelta);
}
public boolean setInput(double intEntrada, double delta) {
boolean blnChange = false;
boolean blnExit = false;
ListItem cursor;
mintTime++;
//1,2)Increment window in one element
insertElement(intEntrada);
blnBucketDeleted = false;
//3)Reduce window
if (mintTime % mintClock == 0 && getWidth() > mintMinimLongitudWindow) {
boolean blnReduceWidth = true; // Diference
while (blnReduceWidth) // Diference
{
blnReduceWidth = false; // Diference
blnExit = false;
int n0 = 0;
int n1 = WIDTH;
double u0 = 0;
double u1 = getTotal();
double v0 = 0;
double v1 = VARIANCE;
double n2 = 0;
double u2 = 0;
cursor = listRowBuckets.tail();
int i = lastBucketRow;
do {
for (int k = 0; k <= (cursor.bucketSizeRow - 1); k++) {
n2 = bucketSize(i);
u2 = cursor.Total(k);
if (n0 > 0) {
v0 += cursor.Variance(k) + (double) n0 * n2 * (u0 / n0 - u2 / n2) * (u0 / n0 - u2 / n2) / (n0 + n2);
}
if (n1 > 0) {
v1 -= cursor.Variance(k) + (double) n1 * n2 * (u1 / n1 - u2 / n2) * (u1 / n1 - u2 / n2) / (n1 + n2);
}
n0 += bucketSize(i);
n1 -= bucketSize(i);
u0 += cursor.Total(k);
u1 -= cursor.Total(k);
if (i == 0 && k == cursor.bucketSizeRow - 1) {
blnExit = true;
break;
}
double absvalue = (double) (u0 / n0) - (u1 / n1); //n1<WIDTH-mintMinWinLength-1
if ((n1 > mintMinWinLength + 1 && n0 > mintMinWinLength + 1) && // Diference NEGATIVE
//if(
blnCutexpression(n0, n1, u0, u1, v0, v1, absvalue, delta)) {
blnBucketDeleted = true;
Detect = mintTime;
if (Detect == 0) {
Detect = mintTime;
//blnFirst=true;
//blnWarning=true;
} else if (DetectTwice == 0) {
DetectTwice = mintTime;
//blnDetect=true;
}
blnReduceWidth = true; // Diference
blnChange = true;
if (getWidth() > 0) { //Reduce width of the window
//while (n0>0) // Diference NEGATIVE
n0 -= deleteElement();
blnExit = true;
break;
}
} //End if
}//Next k
cursor = cursor.previous();
i--;
} while (((!blnExit && cursor != null)));
}//End While // Diference
}//End if
mdblWidth += getWidth();
if (blnChange) {
numberDetections++;
}
return blnChange;
}
private boolean blnCutexpression(int n0, int n1, double u0, double u1, double v0, double v1, double absvalue, double delta) {
int n = getWidth();
double dd = Math.log(2 * Math.log(n) / delta); // -- ull perque el ln n va al numerador.
// Formula Gener 2008
double v = getVariance();
double m = ((double) 1 / ((n0 - mintMinWinLength + 1))) + ((double) 1 / ((n1 - mintMinWinLength + 1)));
double epsilon = Math.sqrt(2 * m * v * dd) + (double) 2 / 3 * dd * m;
return (Math.abs(absvalue) > epsilon);
}
public ADWIN() {
mdbldelta = DELTA;
initBuckets();
Detect = 0;
numberDetections = 0;
DetectTwice = 0;
}
public ADWIN(double d) {
mdbldelta = d;
initBuckets();
Detect = 0;
numberDetections = 0;
DetectTwice = 0;
}
public ADWIN(int cl) {
mdbldelta = DELTA;
initBuckets();
Detect = 0;
numberDetections = 0;
DetectTwice = 0;
mintClock = cl;
}
public String getEstimatorInfo() {
return "ADWIN;;";
}
public void setW(int W0) {
}
@Override
public void getDescription(StringBuilder sb, int indent) {
}
}
| Java |
/*
* DDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in Geometric Moving Average Test
*
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class GeometricMovingAverageDM extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
public FloatOption lambdaOption = new FloatOption("lambda", 'l',
"Threshold parameter of the Geometric Moving Average Test", 1, 0.0, Float.MAX_VALUE);
public FloatOption alphaOption = new FloatOption("alpha", 'a',
"Alpha parameter of the Geometric Moving Average Test", .99, 0.0, 1.0);
private double m_n;
private double sum;
private double x_mean;
private double alpha;
private double delta;
private double lambda;
public GeometricMovingAverageDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1.0;
x_mean = 0.0;
sum = 0.0;
alpha = this.alphaOption.getValue();
lambda = this.lambdaOption.getValue();
}
@Override
public void input(double x) {
// It monitors the error rate
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
x_mean = x_mean + (x - x_mean) / m_n;
sum = alpha * sum + ( 1.0- alpha) * (x - x_mean);
m_n++;
// System.out.print(prediction + " " + m_n + " " + (m_p+m_s) + " ");
this.estimation = x_mean;
this.isChangeDetected = false;
this.isWarningZone = false;
this.delay = 0;
if (m_n < this.minNumInstancesOption.getValue()) {
return;
}
if (sum > this.lambda) {
this.isChangeDetected = true;
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* ChangeDetector.java
* Copyright (C) 2011 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.options.OptionHandler;
/**
* Change Detector interface to implement methods that detects change.
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public interface ChangeDetector extends OptionHandler {
/**
* Resets this change detector. It must be similar to starting a new change
* detector from scratch.
*
*/
public void resetLearning();
/**
* Adding a numeric value to the change detector<br><br>
*
* The output of the change detector is modified after the insertion of a
* new item inside.
*
* @param inputValue the number to insert into the change detector
*/
public void input(double inputValue);
/**
* Gets whether there is change detected.
*
* @return true if there is change
*/
public boolean getChange();
/**
* Gets whether the change detector is in the warning zone, after a warning alert and before a change alert.
*
* @return true if the change detector is in the warning zone
*/
public boolean getWarningZone();
/**
* Gets the prediction of next values.
*
* @return a prediction of the next value
*/
public double getEstimation();
/**
* Gets the length of the delay in the change detected.
*
* @return he length of the delay in the change detected
*/
public double getDelay();
/**
* Gets the output state of the change detection.
*
* @return an array with the number of change detections, number of
* warnings, delay, and estimation.
*/
public double[] getOutput();
/**
* Returns a string representation of the model.
*
* @param out the stringbuilder to add the description
* @param indent the number of characters to indent
*/
@Override
public void getDescription(StringBuilder sb, int indent);
/**
* Produces a copy of this drift detection method
*
* @return the copy of this drift detection method
*/
@Override
public ChangeDetector copy();
} | Java |
/*
* EWMAChartDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in EWMA Charts of Ross, Adams, Tasoulis and Hand
* 2012
*
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class EWMAChartDM extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
//private static final int DDM_MINNUMINST = 30;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
public FloatOption lambdaOption = new FloatOption("lambda", 'l',
"Lambda parameter of the EWMA Chart Method", 0.2, 0.0, Float.MAX_VALUE);
private double m_n;
private double m_sum;
private double m_p;
private double m_s;
private double lambda;
private double z_t;
public EWMAChartDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1.0;
m_sum = 0.0;
m_p = 0.0;
m_s = 0.0;
z_t = 0.0;
lambda = this.lambdaOption.getValue();
}
@Override
public void input(double prediction) {
// prediction must be 1 or 0
// It monitors the error rate
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
m_sum += prediction;
m_p = m_sum/m_n; // m_p + (prediction - m_p) / (double) (m_n+1);
m_s = Math.sqrt( m_p * (1.0 - m_p)* lambda * (1.0 - Math.pow(1.0 - lambda, 2.0 * m_n)) / (2.0 - lambda));
m_n++;
z_t += lambda * (prediction - z_t);
//double L_t = 2.76 - 6.23 * m_p + 18.12 * Math.pow(m_p, 3) - 312.45 * Math.pow(m_p, 5) + 1002.18 * Math.pow(m_p, 7); //%1 FP
double L_t = 3.97 - 6.56 * m_p + 48.73 * Math.pow(m_p, 3) - 330.13 * Math.pow(m_p, 5) + 848.18 * Math.pow(m_p, 7); //%1 FP
//double L_t = 1.17 + 7.56 * m_p - 21.24 * Math.pow(m_p, 3) + 112.12 * Math.pow(m_p, 5) - 987.23 * Math.pow(m_p, 7); //%1 FP
// System.out.print(prediction + " " + m_n + " " + (m_p+m_s) + " ");
this.estimation = m_p;
this.isChangeDetected = false;
this.isWarningZone = false;
this.delay = 0;
if (m_n < this.minNumInstancesOption.getValue()) {
return;
}
if (m_n > this.minNumInstancesOption.getValue() && z_t > m_p + L_t * m_s) {
//System.out.println(m_p + ",D");
this.isChangeDetected = true;
//resetLearning();
} else if (z_t > m_p + 0.5 * L_t * m_s) {
//System.out.println(m_p + ",W");
this.isWarningZone = true;
} else {
this.isWarningZone = false;
//System.out.println(m_p + ",N");
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* AbstractChangeDetector.java
* Copyright (C) 2011 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.options.AbstractOptionHandler;
/**
* Abstract Change Detector. All change detectors in MOA extend this class.
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public abstract class AbstractChangeDetector extends AbstractOptionHandler
implements ChangeDetector {
/**
* Change was detected
*/
protected boolean isChangeDetected;
/**
* Warning Zone: after a warning and before a change
*/
protected boolean isWarningZone;
/**
* Prediction for the next value based in previous seen values
*/
protected double estimation;
/**
* Delay in detecting change
*/
protected double delay;
/**
* The change detector has been initialized with the option values
*/
protected boolean isInitialized;
/**
* Resets this change detector. It must be similar to starting a new change
* detector from scratch.
*
*/
public void resetLearning() {
this.isChangeDetected = false;
this.isWarningZone = false;
this.estimation = 0.0;
this.delay = 0.0;
this.isInitialized = false;
}
/**
* Adding a numeric value to the change detector<br><br>
*
* The output of the change detector is modified after the insertion of a
* new item inside.
*
* @param inputValue the number to insert into the change detector
*/
public abstract void input(double inputValue);
/**
* Gets whether there is change detected.
*
* @return true if there is change
*/
public boolean getChange() {
return this.isChangeDetected;
}
/**
* Gets whether the change detector is in the warning zone, after a warning
* alert and before a change alert.
*
* @return true if the change detector is in the warning zone
*/
public boolean getWarningZone() {
return this.isWarningZone;
}
/**
* Gets the prediction of next values.
*
* @return a prediction of the next value
*/
public double getEstimation() {
return this.estimation;
}
/**
* Gets the length of the delay in the change detected.
*
* @return he length of the delay in the change detected
*/
public double getDelay() {
return this.delay;
}
/**
* Gets the output state of the change detection.
*
* @return an array with the number of change detections, number of
* warnings, delay, and estimation.
*/
public double[] getOutput() {
double[] res = {this.isChangeDetected ? 1 : 0, this.isWarningZone ? 1 : 0, this.delay, this.estimation};
return res;
}
/**
* Returns a string representation of the model.
*
* @param out the stringbuilder to add the description
* @param indent the number of characters to indent
*/
@Override
public abstract void getDescription(StringBuilder sb, int indent);
/**
* Produces a copy of this change detector method
*
* @return the copy of this change detector method
*/
@Override
public ChangeDetector copy() {
return (ChangeDetector) super.copy();
}
} | Java |
/*
* SeqDrift2ChangeDetector.java
* Copyright (C) 2011 University of Waikato, Hamilton, New Zealand
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
import java.util.ArrayList;
import java.util.List;
import moa.AbstractMOAObject;
/**
* SeqDriftChangeDetector.java. This extends Abstract Change Detector
* as required by MOA.
*
* Pears, R., Sakthithasan, S., & Koh, Y. (2014). Detecting concept change in
* dynamic data streams. Machine Learning, 97(3), 259-293. doi:10.1007/s10994-013-5433-9
*
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
* @version $Revision: 7 $
*/
public class SeqDrift2ChangeDetector extends AbstractChangeDetector {
protected SeqDrift2 seqdrift;
public FloatOption deltaSeqDrift2Option = new FloatOption("deltaSeq2Drift", 'd',
"Delta of SeqDrift2 change detection",0.01, 0.0, 1.0);
public IntOption blockSeqDrift2Option = new IntOption("blockSeqDrift2Option",'b',"Block size of SeqDrift2 change detector", 200, 100, 10000);
@Override
public void input(double inputValue) {
if (this.seqdrift == null) {
resetLearning();
}
this.isChangeDetected = seqdrift.setInput(inputValue);
this.isWarningZone = false;
this.delay = 0.0;
this.estimation = seqdrift.getEstimation();
}
@Override
public void resetLearning() {
seqdrift = new SeqDrift2((double) this.deltaSeqDrift2Option.getValue(),((int) this.blockSeqDrift2Option.getValue()));
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
/**
* SeqDrift2 uses reservoir sampling to build a sequential change detection
* model that uses statistically sound guarantees defined using Bernstein Bound
* on false positive and false negative rates. This is a block based approach and checks
* for changes in the data values only at block boundaries as opposed to the
* methods on per instance basis. SeqDrift maintains a reservoir and a repository.
* Repository gathers the new instances and reservoir stores only the data values
* that are statistically not different, in other words from the same distribution.
* If the data values in the repository are consistent with the values in reservoir,
* the data values of the repository are copied to the reservoir applying
* reservoir algorithm.The hypothesis is that the mean values of the reservoir and right repository
* are not statistically different
*
*
*
* Pears, R., Sakthithasan, S., & Koh, Y. (2014). Detecting concept change in
* dynamic data streams. Machine Learning, 97(3), 259-293. doi:10.1007/s10994-013-5433-9
*
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
*/
public class SeqDrift2 extends AbstractMOAObject {
private final Reservoir rightRepository;
private final Reservoir leftReservoir;
//parameters
private final int blockSize;
//parameters that are optimized
private final double significanceLevel;
private int leftReservoirSize;
private final int rightRepositorySize;
private final double k;
//variables
private int instanceCount = 0;
private double leftReservoirMean = 0.0;
private double rightRepositoryMean = 0.0;
private double variance = 0.0;
private double total = 0.0;
private double epsilon = 0.0;
private final static int DRIFT = 0;
private final static int NODRIFT = 2;
private final static int INTERNAL_DRIFT = 3;
/**
* SeqDrift change detector requires two parameters: significance level and
* block size. Significance level controls the false positive rate and block
* size sets the interval of two consecutive hypothesis tests
* Block Size is a positive integer and significance level is a double value
* between 0 and 1
* @param _significanceLevel
* @param _blockSize
*/
public SeqDrift2(double _significanceLevel, int _blockSize) {
significanceLevel = _significanceLevel;
blockSize = _blockSize;
leftReservoirSize = _blockSize;
rightRepositorySize = _blockSize;
k = 0.5;
instanceCount = 0;
variance = 0;
total = 0.0;
epsilon = 0.0;
//Data Structures
leftReservoir = new Reservoir(leftReservoirSize, blockSize);
rightRepository = new Reservoir(rightRepositorySize, blockSize);
}
/**
* This method can be used to directly interface with SeqDrift change
* detector. This method requires a numerical value as an input. The return
* value indicates whether there is a change detected or not.
* @param input numerical value
*/
public boolean setInput(double _inputValue) {
++instanceCount;
//i_numInstances++;
addToRightReservoir(_inputValue);
total = total + _inputValue;
if ((instanceCount % blockSize) == 0) //checking for drift at block boundary
{
int iDriftType = getDriftType();
if (iDriftType == DRIFT) {
clearLeftReservoir();
moveFromRepositoryToReservoir();
return true;
}
/*
else if (iDriftType == INTERNAL_DRIFT) { //Ignoring the changes where
mean values decrease
clearLeftReservoir();
moveFromRepositoryToReservoir();
return false;
}
*/
else //No drift is detected
{
moveFromRepositoryToReservoir();
return false;
}
}
return false;
}
/**
*
* <p>
* This method adds new value to right reservoir
*
* @param _inputValue A double data value
* @return void
*/
private void addToRightReservoir(double _inputValue) {
rightRepository.addElement(_inputValue);
}
/**
*
* <p>
* This method copies the data values of the repository to the reservoir
* applying reservoir algorithm
*
* @param void
* @return void
*/
private void moveFromRepositoryToReservoir() {
leftReservoir.copy(rightRepository);
}
/**
*
* <p>
* This method removes all elements from the reservoir after a drift is
* detected.
*
* @param void
* @return void
*/
private void clearLeftReservoir() {
total = total - leftReservoir.getTotal();
leftReservoir.clear();
}
/**
*
* <p>
* This method returns the type of drift detected The possible values are:
* DRIFT, INTERNAL_DRIFT and NODRIFT
*
* @param
* @return boolean True - if drift is detected. False - otherwise
*/
private int getDriftType() {
if (getWidth() > blockSize) {
leftReservoirMean = getLeftReservoirMean();
rightRepositoryMean = getRightRepositoryMean();
optimizeEpsilon();
if ((instanceCount > blockSize) && (leftReservoir.getSize() > 0)) {
if (epsilon <= Math.abs(rightRepositoryMean - leftReservoirMean)) {
//if (rightRepositoryMean > leftReservoirMean) {
return DRIFT;
//}
/*else {
return INTERNAL_DRIFT;
}
*/
} else {
return NODRIFT;
}
}
return NODRIFT;
} else {
return NODRIFT;
}
}
private double getLeftReservoirMean() {
return leftReservoir.getSampleMean();
}
private double getRightRepositoryMean() {
return rightRepository.getSampleMean();
}
private double getVariance() {
double mean = getMean();
double meanminum1 = mean - 1;
double size = getWidth();
double x = getTotal() * meanminum1 * meanminum1 + (size - getTotal()) * mean * mean;
double y = size - 1;
return x / y;
}
private void optimizeEpsilon() {
int tests = leftReservoir.getSize() / blockSize;
if (tests >= 1) {
variance = getVariance();
if (variance == 0) {
variance = 0.0001; // to avoid divide by zero exception
}
//Drift epsilon
double ddeltadash = getDriftEpsilon(tests);
double x = Math.log(4.0 / ddeltadash);
double ktemp = this.k;
double previousStepEpsilon;
double currentStepEpsilon;
double squareRootValue = 0.0;
boolean IsNotOptimized = true;
while (IsNotOptimized) {
squareRootValue = Math.sqrt(x * x + 18 * rightRepositorySize * x * variance);
previousStepEpsilon = (1.0 / (3 * rightRepositorySize * (1 - ktemp))) * (x + squareRootValue);
ktemp = 3 * ktemp / 4;
currentStepEpsilon = (1.0 / (3 * rightRepositorySize * (1 - ktemp))) * (x + squareRootValue);
if (((previousStepEpsilon - currentStepEpsilon) / previousStepEpsilon) < 0.0001) {
IsNotOptimized = false;
}
}
ktemp = 4 * ktemp / 3;
ktemp = adjustForDataRate(ktemp);
leftReservoirSize = (int) (rightRepositorySize * (1 - ktemp) / ktemp);
leftReservoir.setMaxSize(leftReservoirSize);
squareRootValue = Math.sqrt(x * x + 18 * rightRepositorySize * x * variance);
currentStepEpsilon = (1.0 / (3 * rightRepositorySize * (1 - ktemp))) * (x + squareRootValue);
epsilon = currentStepEpsilon;
}
}
private double getDriftEpsilon(int _inumTests) {
double dSeriesTotal = 2.0 * (1.0 - Math.pow(0.5, _inumTests));
double ddeltadash = significanceLevel / dSeriesTotal;
return ddeltadash;
}
private double getMean() {
return getTotal() / getWidth();
}
private double getTotal() {
return rightRepository.getTotal() + leftReservoir.getTotal();
}
private double adjustForDataRate(double _dKr) {
double meanIncrease = (rightRepository.getSampleMean() - leftReservoir.getSampleMean());
double dk = _dKr;
if (meanIncrease > 0) {
dk = dk + ((-1) * (meanIncrease * meanIncrease * meanIncrease * meanIncrease) + 1) * _dKr;
} else if (meanIncrease <= 0) {
dk = _dKr;
}
return dk;
}
private int getWidth() {
return leftReservoir.getSize() + rightRepository.getSize();
}
/**
* Gets the prediction of next values.
* @return Predicted value of next data value
*/
public double getEstimation() {
int iWidth = getWidth();
if (iWidth != 0) {
return getTotal() / getWidth();
} else {
return 0;
}
}
public void getDescription(StringBuilder sb, int indent) {
}
}
class Reservoir {
private int size;
private double total;
private final int blocksize;
private final Repository dataContainer;
private int instanceCount;
private int MAX_SIZE;
public Reservoir(int _iSize, int _iBlockSize) {
MAX_SIZE = _iSize;
total = 0;
blocksize = _iBlockSize;
instanceCount = 0;
dataContainer = new Repository(blocksize);
}
public double getSampleMean() {
return total / size;
}
public void addElement(double _dValue) {
try {
if (size < MAX_SIZE) {
dataContainer.add(new Double(_dValue));
total = total + _dValue;
size++;
} else {
int irIndex = (int) (Math.random() * instanceCount);
if (irIndex < MAX_SIZE) {
total = total - dataContainer.get(irIndex);
dataContainer.addAt(irIndex, _dValue);
total = total + _dValue;
}
}
instanceCount++;
} catch (Exception e) {
System.out.println("2 Exception" + e);
}
}
public double get(int _iIndex) {
return dataContainer.get(_iIndex);
}
public int getSize() {
return size;
}
public void clear() {
dataContainer.removeAll();
total = 0;
size = 0;
}
public double getTotal() {
return total;
}
public void copy(Reservoir _oSource) {
for (int iIndex = 0; iIndex < _oSource.getSize(); iIndex++) {
addElement(_oSource.get(iIndex));
}
_oSource.clear();
}
public void setMaxSize(int _iMaxSize) {
MAX_SIZE = _iMaxSize;
}
}
class Repository {
private final int blockSize;
private final List<Block> blocks;
private int indexOfLastBlock;
private int instanceCount;
private double total;
public Repository(int _iBlockSize) {
blockSize = _iBlockSize;
indexOfLastBlock = -1;
instanceCount = 0;
total = 0;
blocks = new ArrayList<Block>();
}
public void add(double _dValue) {
if ((instanceCount % blockSize) == 0) {
blocks.add(new Block(blockSize));
indexOfLastBlock++;
}
blocks.get(indexOfLastBlock).add(_dValue);
instanceCount++;
total = total + _dValue;
}
public void add(double _dValue, boolean _isTested)
{
if((instanceCount % blockSize) == 0)
{
blocks.add(new Block(blockSize, _isTested));
indexOfLastBlock++;
}
blocks.get(indexOfLastBlock).add(_dValue);
instanceCount++;
total= total + _dValue;
}
public double get(int _iIndex) {
return blocks.get(_iIndex / blockSize).data[(_iIndex % blockSize)];
}
public void addAt(int _iIndex, double _dValue) {
blocks.get(_iIndex / blockSize).addAt(_iIndex % blockSize, _dValue);
}
public int getSize() {
return instanceCount;
}
public double getTotal() {
double dTotal = 0.0;
for (int iIndex = 0; iIndex < blocks.size(); iIndex++) {
dTotal = dTotal + blocks.get(iIndex).total;
}
return dTotal;
}
public double getFirstBlockTotal()
{
return blocks.get(0).total;
}
public void markLastAddedBlock()
{
if(blocks.size() > 0)
{
blocks.get(blocks.size() - 1).setTested(true);
}
}
public void removeFirstBlock()
{
total = total - blocks.get(0).total;
blocks.remove(0);
instanceCount = instanceCount - blockSize;
indexOfLastBlock--;
}
public void removeAll() {
blocks.clear();
indexOfLastBlock = -1;
instanceCount = 0;
total = 0;
}
public int getNumOfTests()
{
int iNumTests = 0;
for(int iIndex = 0; iIndex < blocks.size(); iIndex++)
{
if(blocks.get(iIndex).IsTested())
iNumTests++;
}
return iNumTests;
}
}
class Block {
public double[] data;
public double total;
private int indexOfLastValue;
private boolean b_IsTested;
Block(int _iLength) {
data = new double[_iLength];
total = 0.0;
indexOfLastValue = 0;
for (int iIndex = 0; iIndex < data.length; iIndex++) {
data[iIndex] = -1;
}
}
Block(int _iLength, boolean _isTested)
{
data = new double[_iLength];
total = 0.0;
indexOfLastValue = 0;
b_IsTested = _isTested;
for(int iIndex=0;iIndex < data.length;iIndex++)
{
data[iIndex] = -1;
}
}
public void add(double _dValue) {
if (indexOfLastValue < data.length) {
data[indexOfLastValue] = _dValue;
total = total + _dValue;
indexOfLastValue++;
} else {
System.out.println("ERROR in adding to Block. Last Index :" + indexOfLastValue + " Total :" + total + " Array Length :" + data.length);
System.exit(2);
}
}
public void addAt(int _iIndex, double _dNewValue) {
total = total - data[_iIndex] + _dNewValue;
data[_iIndex] = _dNewValue;
}
public void setTested(boolean _isTested)
{
b_IsTested = _isTested;
}
public boolean IsTested()
{
return b_IsTested;
}
}
}
| Java |
/*
* HDDM_A_Test.java
*
* @author Isvani Frias-Blanco (ifriasb@udg.co.cu)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import moa.tasks.TaskMonitor;
/**
* <p>Online drift detection method based on Hoeffding's bounds.
* HDDM<sub><i>A</i>-test</sub> uses the average as estimator.
* It receives as input a stream of real values and returns the estimated status
* of the stream: STABLE, WARNING or DRIFT.</p>
*
* <p>I. Frias-Blanco, J. del Campo-Avila, G. Ramos-Jimenez, R. Morales-Bueno,
* A. Ortiz-Diaz, and Y. Caballero-Mota, Online and non-parametric drift
* detection methods based on Hoeffding's bound, IEEE Transactions on Knowledge
* and Data Engineering, 2014. DOI 10.1109/TKDE.2014.2345382.</p>
*
* <p>Parameters:</p> <ul> <li>-d : Confidence to the drift</li><li>-w :
* Confidence to the warning</li><li>-t : Option to monitor error increments and
* decrements (two-sided) or only increments (one-sided)</li>
* </ul>
*
* @author Isvani Frias-Blanco (ifriasb@udg.co.cu)
*
*/
public class HDDM_A_Test extends AbstractChangeDetector {
public FloatOption driftConfidenceOption = new FloatOption("driftConfidence", 'd',
"Confidence to the drift",
0.001, 0, 1);
public FloatOption warningConfidenceOption = new FloatOption("warningConfidence", 'w',
"Confidence to the warning",
0.005, 0, 1);
public MultiChoiceOption oneSidedTestOption = new MultiChoiceOption(
"typeOfTest", 't',
"Monitors error increments and decrements (two-sided) or only increments (one-sided)", new String[]{
"One-sided", "Two-sided"}, new String[]{
"One-sided", "Two-sided"},
1);
protected int n_min = 0;
protected double c_min = 0;
protected int total_n = 0;
protected double total_c = 0;
protected int n_max = 0;
protected double c_max = 0;
protected double cEstimacion = 0;
protected int nEstimacion = 0;
public HDDM_A_Test() {
resetLearning();
}
@Override
public void input(double value) {
total_n++;
total_c += value;
if (n_min == 0) {
n_min = total_n;
c_min = total_c;
}
if (n_max == 0) {
n_max = total_n;
c_max = total_c;
}
double cota = Math.sqrt(1.0 / (2 * n_min) * Math.log(1.0 / driftConfidenceOption.getValue())),
cota1 = Math.sqrt(1.0 / (2 * total_n) * Math.log(1.0 / driftConfidenceOption.getValue()));
if (c_min / n_min + cota >= total_c / total_n + cota1) {
c_min = total_c;
n_min = total_n;
}
cota = Math.sqrt(1.0 / (2 * n_max) * Math.log(1.0 / driftConfidenceOption.getValue()));
if (c_max / n_max - cota <= total_c / total_n - cota1) {
c_max = total_c;
n_max = total_n;
}
if (meanIncr(c_min, n_min, total_c, total_n, driftConfidenceOption.getValue())) {
nEstimacion = total_n - n_min;
cEstimacion = total_c - c_min;
n_min = n_max = total_n = 0;
c_min = c_max = total_c = 0;
this.isChangeDetected = true;
this.isWarningZone = false;
} else if (meanIncr(c_min, n_min, total_c, total_n, warningConfidenceOption.getValue())) {
this.isChangeDetected = false;
this.isWarningZone = true;
} else {
this.isChangeDetected = false;
this.isWarningZone = false;
}
if (this.oneSidedTestOption.getChosenIndex() == 1
&& meanDecr(c_max, n_max, total_c, total_n)) {
nEstimacion = total_n - n_max;
cEstimacion = total_c - c_max;
n_min = n_max = total_n = 0;
c_min = c_max = total_c = 0;
}
updateEstimations();
}
private boolean meanIncr(double c_min, int n_min, double total_c, int total_n, double confianzaCambio) {
if (n_min == total_n) {
return false;
}
double m = (double) (total_n - n_min) / n_min * (1.0 / total_n);
double cota = Math.sqrt(m / 2 * Math.log(2.0 / confianzaCambio));
return total_c / total_n - c_min / n_min >= cota;
}
private boolean meanDecr(double c_max, int n_max, double total_c, int total_n) {
if (n_max == total_n) {
return false;
}
double m = (double) (total_n - n_max) / n_max * (1.0 / total_n);
double cota = Math.sqrt(m / 2 * Math.log(2.0 / driftConfidenceOption.getValue()));
return c_max / n_max - total_c / total_n >= cota;
}
@Override
public void resetLearning() {
super.resetLearning();
n_min = 0;
c_min = 0;
total_n = 0;
total_c = 0;
n_max = 0;
c_max = 0;
cEstimacion = 0;
nEstimacion = 0;
}
protected void updateEstimations() {
if (this.total_n >= this.nEstimacion) {
this.cEstimacion = this.nEstimacion = 0;
this.estimation = this.total_c / this.total_n;
this.delay = this.total_n;
} else {
this.estimation = this.cEstimacion / this.nEstimacion;
this.delay = this.nEstimacion;
}
}
@Override
public double getEstimation() {
return this.estimation;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
resetLearning();
}
}
| Java |
/*
* EDDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in EDDM method of Manuel Baena et al.
*
* <p>Early Drift Detection Method. Manuel Baena-Garcia, Jose Del Campo-Avila,
* Raúl Fidalgo, Albert Bifet, Ricard Gavalda, Rafael Morales-Bueno. In Fourth
* International Workshop on Knowledge Discovery from Data Streams, 2006.</p>
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class EDDM extends AbstractChangeDetector {
/**
*
*/
private static final long serialVersionUID = 140980267062162000L;
private static final double FDDM_OUTCONTROL = 0.9;
private static final double FDDM_WARNING = 0.95;
private static final double FDDM_MINNUMINSTANCES = 30;
private double m_numErrors;
private int m_minNumErrors = 30;
private int m_n;
private int m_d;
private int m_lastd;
private double m_mean;
private double m_stdTemp;
private double m_m2smax;
private int m_lastLevel;
public EDDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1;
m_numErrors = 0;
m_d = 0;
m_lastd = 0;
m_mean = 0.0;
m_stdTemp = 0.0;
m_m2smax = 0.0;
//m_lastLevel = DDM_INCONTROL_LEVEL;
this.estimation = 0.0;
}
@Override
public void input(double prediction) {
// prediction must be 1 or 0
// It monitors the error rate
// System.out.print(prediction + " " + m_n + " " + probability + " ");
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
this.isChangeDetected = false;
m_n++;
if (prediction == 1.0) {
this.isWarningZone = false;
this.delay = 0;
m_numErrors += 1;
m_lastd = m_d;
m_d = m_n - 1;
int distance = m_d - m_lastd;
double oldmean = m_mean;
m_mean = m_mean + ((double) distance - m_mean) / m_numErrors;
this.estimation = m_mean;
m_stdTemp = m_stdTemp + (distance - m_mean) * (distance - oldmean);
double std = Math.sqrt(m_stdTemp / m_numErrors);
double m2s = m_mean + 2 * std;
// System.out.print(m_numErrors + " " + m_mean + " " + std + " " +
// m2s + " " + m_m2smax + " ");
if (m2s > m_m2smax) {
if (m_n > FDDM_MINNUMINSTANCES) {
m_m2smax = m2s;
}
//m_lastLevel = DDM_INCONTROL_LEVEL;
// System.out.print(1 + " ");
} else {
double p = m2s / m_m2smax;
// System.out.print(p + " ");
if (m_n > FDDM_MINNUMINSTANCES && m_numErrors > m_minNumErrors
&& p < FDDM_OUTCONTROL) {
//System.out.println(m_mean + ",D");
this.isChangeDetected = true;
//resetLearning();
//return DDM_OUTCONTROL_LEVEL;
} else if (m_n > FDDM_MINNUMINSTANCES
&& m_numErrors > m_minNumErrors && p < FDDM_WARNING) {
//System.out.println(m_mean + ",W");
//m_lastLevel = DDM_WARNING_LEVEL;
this.isWarningZone = true;
//return DDM_WARNING_LEVEL;
} else {
this.isWarningZone = false;
//System.out.println(m_mean + ",N");
//m_lastLevel = DDM_INCONTROL_LEVEL;
//return DDM_INCONTROL_LEVEL;
}
}
} else {
// System.out.print(m_numErrors + " " + m_mean + " " +
// Math.sqrt(m_stdTemp/m_numErrors) + " " + (m_mean +
// 2*Math.sqrt(m_stdTemp/m_numErrors)) + " " + m_m2smax + " ");
// System.out.print(((m_mean +
// 2*Math.sqrt(m_stdTemp/m_numErrors))/m_m2smax) + " ");
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* HDDM_W_Test.java
*
* @author Isvani Frias-Blanco (ifriasb@udg.co.cu)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import moa.tasks.TaskMonitor;
/**
* <p>Online drift detection method based on McDiarmid's bounds.
* HDDM<sub><i>W</i>-test</sub> uses the EWMA statistic as estimator.
* It receives as input a stream of real values and returns the estimated status
* of the stream: STABLE, WARNING or DRIFT.</p>
*
* <p>I. Frias-Blanco, J. del Campo-Avila, G. Ramos-Jimenez, R. Morales-Bueno,
* A. Ortiz-Diaz, and Y. Caballero-Mota, Online and non-parametric drift
* detection methods based on Hoeffding's bound, IEEE Transactions on Knowledge
* and Data Engineering, 2014. DOI 10.1109/TKDE.2014.2345382.</p>
*
* <p>Parameters:</p> <ul> <li>-d : Confidence to the drift</li><li>-w :
* Confidence to the warning</li><li>-m : Controls how much weight is given to
* more recent data compared to older data. Smaller values mean less weight
* given to recent data</li><li>-t : Option to monitor error increments and
* decrements (two-sided) or only increments (one-sided)</li>
* </ul>
*
* @author Isvani Frias-Blanco (ifriasb@udg.co.cu)
*
*/
public class HDDM_W_Test extends AbstractChangeDetector {
protected static final long serialVersionUID = 1L;
public FloatOption driftConfidenceOption = new FloatOption("driftConfidence", 'd',
"Confidence to the drift",
0.001, 0, 1);
public FloatOption warningConfidenceOption = new FloatOption("warningConfidence", 'w',
"Confidence to the warning",
0.005, 0, 1);
public FloatOption lambdaOption = new FloatOption("lambda",
'm', "Controls how much weight is given to more recent data compared to older data. Smaller values mean less weight given to recent data.",
0.050, 0, 1);
public MultiChoiceOption oneSidedTestOption = new MultiChoiceOption(
"typeOfTest", 't',
"Monitors error increments and decrements (two-sided) or only increments (one-sided)", new String[]{
"One-sided", "Two-sided"}, new String[]{
"One-sided", "Two-sided"},
0);
public static class SampleInfo {
private static final long serialVersionUID = 1L;
public double EWMA_Estimator;
public double independentBoundedConditionSum;
public SampleInfo() {
this.EWMA_Estimator = -1.0;
}
}
private static SampleInfo sample1_IncrMonitoring,
sample2_IncrMonitoring,
sample1_DecrMonitoring,
sample2_DecrMonitoring,
total;
protected double incrCutPoint, decrCutPoint;
protected double lambda;
protected double warningConfidence;
protected double driftConfidence;
protected boolean oneSidedTest;
protected int width;
public HDDM_W_Test() {
resetLearning();
}
@Override
public void resetLearning() {
super.resetLearning();
this.total = new SampleInfo();
this.sample1_DecrMonitoring = new SampleInfo();
this.sample1_IncrMonitoring = new SampleInfo();
this.sample2_DecrMonitoring = new SampleInfo();
this.sample2_IncrMonitoring = new SampleInfo();
this.incrCutPoint = Double.MAX_VALUE;
this.decrCutPoint = Double.MIN_VALUE;
this.lambda = this.lambdaOption.getValue();
this.driftConfidence = this.driftConfidenceOption.getValue();
this.warningConfidence = this.warningConfidenceOption.getValue();
this.oneSidedTest = this.oneSidedTestOption.getChosenIndex() == 0;
this.width = 0;
this.delay = 0;
}
public void input(boolean prediction) {
double value = prediction == false ? 1.0 : 0.0;
input(value);
}
@Override
public void input(double value) {
double auxDecayRate = 1.0 - lambda;
this.width++;
if (total.EWMA_Estimator < 0) {
total.EWMA_Estimator = value;
total.independentBoundedConditionSum = 1;
} else {
total.EWMA_Estimator = lambda * value + auxDecayRate * total.EWMA_Estimator;
total.independentBoundedConditionSum = lambda * lambda + auxDecayRate * auxDecayRate * total.independentBoundedConditionSum;
}
updateIncrStatistics(value, driftConfidence);
if (monitorMeanIncr(value, driftConfidence)) {
resetLearning();
this.isChangeDetected = true;
this.isWarningZone = false;
} else if (monitorMeanIncr(value, warningConfidence)) {
this.isChangeDetected = false;
this.isWarningZone = true;
} else {
this.isChangeDetected = false;
this.isWarningZone = false;
}
updateDecrStatistics(value, driftConfidence);
if (!oneSidedTest && monitorMeanDecr(value, driftConfidence)) {
resetLearning();
}
this.estimation = this.total.EWMA_Estimator;
}
public boolean detectMeanIncrement(SampleInfo sample1, SampleInfo sample2, double confidence) {
if (sample1.EWMA_Estimator < 0 || sample2.EWMA_Estimator < 0) {
return false;
}
double bound = Math.sqrt((sample1.independentBoundedConditionSum + sample2.independentBoundedConditionSum) * Math.log(1 / confidence) / 2);
return sample2.EWMA_Estimator - sample1.EWMA_Estimator > bound;
}
void updateIncrStatistics(double valor, double confidence) {
double auxDecay = 1.0 - lambda;
double bound = Math.sqrt(total.independentBoundedConditionSum * Math.log(1.0 / driftConfidence) / 2);
if (total.EWMA_Estimator + bound < incrCutPoint) {
incrCutPoint = total.EWMA_Estimator + bound;
sample1_IncrMonitoring.EWMA_Estimator = total.EWMA_Estimator;
sample1_IncrMonitoring.independentBoundedConditionSum = total.independentBoundedConditionSum;
sample2_IncrMonitoring = new SampleInfo();
this.delay = 0;
} else {
this.delay++;
if (sample2_IncrMonitoring.EWMA_Estimator < 0) {
sample2_IncrMonitoring.EWMA_Estimator = valor;
sample2_IncrMonitoring.independentBoundedConditionSum = 1;
} else {
sample2_IncrMonitoring.EWMA_Estimator = lambda * valor + auxDecay * sample2_IncrMonitoring.EWMA_Estimator;
sample2_IncrMonitoring.independentBoundedConditionSum = lambda * lambda + auxDecay * auxDecay * sample2_IncrMonitoring.independentBoundedConditionSum;
}
}
}
protected boolean monitorMeanIncr(double valor, double confidence) {
return detectMeanIncrement(sample1_IncrMonitoring, sample2_IncrMonitoring, confidence);
}
void updateDecrStatistics(double valor, double confidence) {
double auxDecay = 1.0 - lambda;
double epsilon = Math.sqrt(total.independentBoundedConditionSum * Math.log(1.0 / driftConfidence) / 2);
if (total.EWMA_Estimator - epsilon > decrCutPoint) {
decrCutPoint = total.EWMA_Estimator - epsilon;
sample1_DecrMonitoring.EWMA_Estimator = total.EWMA_Estimator;
sample1_DecrMonitoring.independentBoundedConditionSum = total.independentBoundedConditionSum;
sample2_DecrMonitoring = new SampleInfo();
} else {
if (sample2_DecrMonitoring.EWMA_Estimator < 0) {
sample2_DecrMonitoring.EWMA_Estimator = valor;
sample2_DecrMonitoring.independentBoundedConditionSum = 1;
} else {
sample2_DecrMonitoring.EWMA_Estimator = lambda * valor + auxDecay * sample2_DecrMonitoring.EWMA_Estimator;
sample2_DecrMonitoring.independentBoundedConditionSum = lambda * lambda + auxDecay * auxDecay * sample2_DecrMonitoring.independentBoundedConditionSum;
}
}
}
protected boolean monitorMeanDecr(double valor, double confidence) {
return detectMeanIncrement(sample2_DecrMonitoring, sample1_DecrMonitoring, confidence);
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
resetLearning();
}
public void getDescription(StringBuilder sb, int indent) {
}
}
| Java |
/*
* DDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.classifiers.trees.HoeffdingAdaptiveTree;
import moa.core.ObjectRepository;
import moa.options.*;
import moa.streams.filters.StreamFilter;
import moa.tasks.TaskMonitor;
/**
* Ensemble Drift detection method
*
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class EnsembleDriftDetectionMethods extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
//private static final int DDM_MINNUMINST = 30;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
public ListOption changeDetectorsOption = new ListOption("changeDetectors", 'c',
"Change Detectors to use.", new ClassOption("driftDetectionMethod", 'd',
"Drift detection method to use.", ChangeDetector.class, "DDM"),
new Option[0], ',');
public MultiChoiceOption predictionOption = new MultiChoiceOption(
"prediction", 'l', "Prediction to use.", new String[]{
"max", "min", "majority"}, new String[]{
"Maximum",
"Minimum",
"Majority"}, 0);
public EnsembleDriftDetectionMethods() {
resetLearning();
}
@Override
public void resetLearning() {
//if (preds == null) {
preds = new Boolean[this.changeDetectorsOption.getList().length];
//}
for (int i = 0; i < preds.length; i++) {
preds[i] = false;
}
}
protected ChangeDetector[] cds;
protected Boolean[] preds;
@Override
public void input(double prediction) {
for (int i = 0; i < cds.length; i++) {
cds[i].input(prediction);
if (cds[i].getChange()) {
preds[i] = true;
}
}
int typePrediction = this.predictionOption.getChosenIndex();
int numberDetections = 0;
for (int i = 0; i < cds.length; i++) {
if (preds[i] == true) {
numberDetections++;
}
}
if (typePrediction == 0) {
//Choose Max
this.isChangeDetected = (numberDetections == cds.length);
} else if (typePrediction == 1) {
//Choose Min
this.isChangeDetected = (numberDetections > 0);
} else if (typePrediction == 2) {
//Choose Avg
this.isChangeDetected = (numberDetections > cds.length/2) ;
}
if (this.isChangeDetected == true) {
this.resetLearning();
}
}
//public double[] getOutput() {
// double[] res = {this.isChangeDetected ? 1 : 0, this.isWarningZone ? 1 : 0, this.delay, this.estimation};
// return res;
//}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
Option[] changeDetectorOptions = this.changeDetectorsOption.getList();
cds = new ChangeDetector[changeDetectorOptions.length];
preds = new Boolean[changeDetectorOptions.length];
for (int i = 0; i < cds.length; i++) {
//monitor.setCurrentActivity("Materializing change detector " + (i + 1)
// + "...", -1.0);
cds[i] = ((ChangeDetector) ((ClassOption) changeDetectorOptions[i]).materializeObject(monitor, repository)).copy();
if (monitor.taskShouldAbort()) {
return;
}
if (cds[i] instanceof OptionHandler) {
monitor.setCurrentActivity("Preparing change detector " + (i + 1)
+ "...", -1.0);
((OptionHandler) cds[i]).prepareForUse(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
}
preds[i] = false;
}
}
} | Java |
/*
* ADWINChangeDetector.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in ADWIN. ADaptive sliding WINdow is a change
* detector and estimator. It keeps a variable-length window of recently seen
* items, with the property that the window has the maximal length statistically
* consistent with the hypothesis "there has been no change in the average value
* inside the window".
*
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class ADWINChangeDetector extends AbstractChangeDetector {
protected ADWIN adwin;
public FloatOption deltaAdwinOption = new FloatOption("deltaAdwin", 'a',
"Delta of Adwin change detection", 0.002, 0.0, 1.0);
@Override
public void input(double inputValue) {
if (this.adwin == null) {
resetLearning();
}
this.isChangeDetected = adwin.setInput(inputValue);
this.isWarningZone = false;
this.delay = 0.0;
this.estimation = adwin.getEstimation();
}
@Override
public void resetLearning() {
adwin = new ADWIN((double) this.deltaAdwinOption.getValue());
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* SeqDrift1ChangeDetector.java
* Copyright (C) 2011 University of Waikato, Hamilton, New Zealand
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
import java.util.ArrayList;
import moa.AbstractMOAObject;
import moa.classifiers.core.driftdetection.SeqDrift2ChangeDetector.Repository;
/**
* SeqDrift1ChangeDetector.java. This extends Abstract Change Detector
* as required by MOA.
*
* Sakthithasan, S., Pears, R., & Koh, Y. (2013). One Pass Concept Change
* Detection for Data Streams. In J. Pei, V. Tseng, L. Cao, H. Motoda, & G. Xu
* (Eds.), Advances in Knowledge Discovery and Data Mining (Vol. 7819, pp.
* 461-472): Springer Berlin Heidelberg.
*
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
* @version $Revision: 7 $
*/
public class SeqDrift1ChangeDetector extends AbstractChangeDetector {
protected SeqDrift1 seqDrift1;
//protected ADWIN adwin;
public FloatOption deltaOption = new FloatOption("deltaSeqDrift1", 'd',
"Delta of SeqDrift1 change detection",0.01, 0.0, 1.0);
public FloatOption deltaWarningOption = new FloatOption("deltaWarningOption",
'w', "Delta of SeqDrift1 change detector to declare warning state",0.1, 0.0, 1.0);
public IntOption blockSeqDriftOption = new IntOption("blockSeqDrift1Option",'b',"Block size of SeqDrift1 change detector", 200, 100, 10000);
@Override
public void input(double inputValue) {
if (this.seqDrift1 == null) {
resetLearning();
}
this.isChangeDetected = seqDrift1.setInput(inputValue);
this.isWarningZone = false;
this.delay = 0.0;
this.estimation = seqDrift1.getEstimation();
}
@Override
public void resetLearning() {
seqDrift1 = new SeqDrift1((double) this.deltaOption.getValue(),((int) this.blockSeqDriftOption.getValue()), ((double) this.deltaWarningOption.getValue()));
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
/**
* SeqDrift1 uses sliding window to build a sequential change detection model
* that uses statistically sound guarantees defined using Bernstein Bound on
* false positive and false negative rates. This is a block based approach and
* checks for changes in the data values only at block boundaries as opposed to
* the methods on per instance basis. SeqDrift1 maintains a sliding window and
* repository. Repository gathers the new instances and sliding window stores
* only the data values that are statistically not different, in other words
* from the same distribution. If the data values in the repository are
* consistent with the values in sliding window the data values of the
* repository are copied to the sliding window applying reservoir algorithm. The
* hypothesis is that the mean values of the sliding window and right repository
* are not statistically different. In addition, SeqDrift1 declares a warning
* state depending on warning significance level and increases sample size to
* get a statistically more rigorous mean value
*
*
*
* Sakthithasan, S., Pears, R., & Koh, Y. (2013). One Pass Concept Change
* Detection for Data Streams. In J. Pei, V. Tseng, L. Cao, H. Motoda, & G. Xu
* (Eds.), Advances in Knowledge Discovery and Data Mining (Vol. 7819, pp.
* 461-472): Springer Berlin Heidelberg.
*
* @author Sakthithasan Sripirakas sripirakas363 at yahoo dot com
*/
class SeqDrift1 extends AbstractMOAObject {
private Repository leftRepository = null;
private Repository rightRepository = null;
private ArrayList<Integer> uniqueRandomNumbers = null;
//parameters
private double significanceLevel = 0.01;
private int blockSize = 200;
private int sampleSize = 200;
private int slidingWindowBlockCount;
private double warningSignificanceLevel = 0.1;
//variables
private int instanceCount = 0;
private double leftRepositoryMean = 0.0;
private double rightRepositoryMean = 0.0;
private int blockCount = 0;
private double variance = 0.0;
private boolean isWarning = false;
private double total = 0.0;
private Epsilon epsilon = null;
public final static int DRIFT = 0;
public final static int WARNING = 1;
public final static int HOMOGENEOUS = 2;
public final static int INTERNAL_DRIFT = 3;
public SeqDrift1(double _significanceLevel, int _blockSize, double _significanceWarningLevel) {
//Parameters
significanceLevel = _significanceLevel;
blockSize = _blockSize;
sampleSize = _blockSize;
slidingWindowBlockCount = (int) (1 / _significanceLevel); //Sliding window size is automaticall set as 1/delta
warningSignificanceLevel = _significanceWarningLevel;
//Variables
instanceCount = 0;
blockCount = 0;
variance = 0.0;
isWarning = false;
total = 0.0;
epsilon = new Epsilon();
//Data Structures
SeqDrift2ChangeDetector sd = new SeqDrift2ChangeDetector();
leftRepository = sd.new Repository(blockSize);
rightRepository = sd.new Repository(blockSize);
uniqueRandomNumbers = new ArrayList<Integer>();
}
public boolean setInput(double _inputValue) {
++instanceCount;
addToRightRepository(_inputValue);
total = total + _inputValue;
if ((instanceCount % sampleSize) == 0) //Drift point check
{
rightRepository.markLastAddedBlock();
if (isWarning) {
removeExcessRightRepositoryValues();
}
//Assume that there is no warning or drift now
isWarning = false;
//Detect a drift with warning significance level
int iDriftType = getDriftType();
if (iDriftType == DRIFT)
{
isWarning = false; //Warning state is set to false as it is an actual drift
clearLeftRepository();
moveValuesFromRightToLeft();
sampleSize = blockSize;
return true;
} else if (iDriftType == WARNING) {
isWarning = true;
sampleSize = sampleSize * 2;
return false;//In warning state no instance is moved from sliding window to repository. Thus returning now
} /*
else if(iDriftType == INTERNAL_DRIFT)
{
isWarning = false;
isDrift = false;
sampleSize = blockSize;
clearrepository(); //Due to drift clear the data that belong to the old concept in repository
movefromsliding windowTorepository();
return false;
}
*/
else
{
isWarning = false;
moveValuesFromRightToLeft(); //All instances in sliding window should be moved to repository if no drift or if drift but not in warning
return false;
}
}
return false;
}
/**
* Adding new instance to sliding window
* <p>
* @param _inputValue A double instance
* @return void
*/
private void addToRightRepository(double _inputValue) {
if ((rightRepository.getSize() < sampleSize) || isWarning) //By default sliding window should have maximum of blockSize instances except in warning state
{
rightRepository.add(new Double(_inputValue));
} else {
System.out.println("request to add to sliding window sliding window size :" + rightRepository.getSize() + " Warning :" + isWarning);
}
}
/**
* Removes excess instances in sliding window when the number of blocks is more than
* the sliding window size threshold
* <p>
* @param void
* @return void
*/
private void removeExcessRightRepositoryValues() {
int maxRightRepositorySize = slidingWindowBlockCount * blockSize;
while (rightRepository.getSize() > maxRightRepositorySize) {
total = total - rightRepository.getFirstBlockTotal();
rightRepository.removeFirstBlock();
}
}
/**
* Moving the instances from sliding window to repository
* <p>
* @param void
* @return void
*/
private void moveValuesFromRightToLeft() {
for (int iIndex = 0; iIndex < rightRepository.getSize(); iIndex++) // Copy all instances from sliding window to repository
{
if (((iIndex) % sampleSize) == 0) {
leftRepository.add(rightRepository.get(iIndex), true);
} else {
leftRepository.add(rightRepository.get(iIndex));
}
}
blockCount = blockCount + rightRepository.getSize() / blockSize; //Determine the block counter value after adding the instances
if (slidingWindowBlockCount > 0) //Sliding is enabled
{
while (blockCount > slidingWindowBlockCount) //Remove old instances from repository
{
total = total - leftRepository.getFirstBlockTotal();
leftRepository.removeFirstBlock();
--blockCount;
}
}
if (!isWarning) //If in warning do not remove the instances from sliding window
{
rightRepository.removeAll();
} else {
System.out.println("ERROR: requested to move instances from sliding window to repository");
System.exit(2);
}
}
/**
* Remove all elements from repository after a drift is detected
* <p>
* After deletion the block counter is set to zero
*
* @param void
* @return void
*/
private void clearLeftRepository() {
blockCount = 0;
total = total - leftRepository.getTotal();
leftRepository.removeAll();
}
/**
* Detects a drift
* <p>
* @param _bIsWarning Drift detection in warning significance level or
* actual significance level
* @return boolean True - if drift is detected. False - otherwise
*/
private int getDriftType() {
if (getWidth() > blockSize) {
leftRepositoryMean = getLeftRepositorySampleMean(); //Get the subsample mean from repository
rightRepositoryMean = getRightRepositorySampleMean(); //Get the subsample mean from sliding window
epsilon = getEpsilon();
double absValue = Math.abs(rightRepositoryMean - leftRepositoryMean);
if (instanceCount > sampleSize && leftRepository.getSize() > 0) {
if (epsilon.d_warningEpsilon <= absValue) //Warning Drift is detetced
{
if (epsilon.d_driftEpsilon <= absValue) //Drift is detetced
{
//if(rightRepositoryMean > leftRepositoryMean)
//{
return DRIFT;
//}
/*
else
{
return INTERNAL_DRIFT;
}
*/
} else {
return WARNING;
}
} else {
return HOMOGENEOUS;
}
}
return HOMOGENEOUS;
} else {
return HOMOGENEOUS;
}
}
private double getLeftRepositorySampleMean() {
double leftTotal = 0.0;
if (leftRepository.getSize() > 0) {
if (leftRepository.getSize() <= sampleSize) {
return getLeftResitoryMean();
} else {
int iPossibleSampleSize = getPossibleSampleSize();
for (int iCount = 0; iCount < iPossibleSampleSize; iCount++) {
int iNextRandomNumber = getNextRandomNumber(leftRepository.getSize() - 1);
if (isUniqueRandomNumber(iNextRandomNumber)) {
leftTotal = leftTotal + leftRepository.get(iNextRandomNumber);
} else {
iCount--;
}
}
uniqueRandomNumbers.clear();
}
}
return leftTotal / sampleSize;
}
private int getPossibleSampleSize() {
int iNumberSampleElements = 0;
int leftResitorySize = leftRepository.getSize();
int rightRepositorySize = rightRepository.getSize();
if (sampleSize <= leftResitorySize && sampleSize <= rightRepositorySize) {
iNumberSampleElements = sampleSize;
} else {
if (leftResitorySize > rightRepositorySize) {
sampleSize = leftResitorySize;
} else {
sampleSize = rightRepositorySize;
}
}
return iNumberSampleElements;
}
private boolean isUniqueRandomNumber(int _iTrialNum) {
for (int iIndex = 0; iIndex < uniqueRandomNumbers.size(); iIndex++) {
if (uniqueRandomNumbers.get(iIndex) == _iTrialNum) {
return false;
}
}
uniqueRandomNumbers.add(_iTrialNum);
return true;
}
private double getLeftResitoryMean() {
double dTotal = 0.0;
dTotal = dTotal + leftRepository.getTotal();
return dTotal / sampleSize;
}
private double getRightRepositoryMean() {
double dTotal = 0.0;
dTotal = dTotal + rightRepository.getTotal();
return dTotal / sampleSize;
}
private double getRightRepositorySampleMean() {
double dTotal = 0.0;
if (rightRepository.getSize() > 0) {
if (rightRepository.getSize() <= sampleSize) {
return getRightRepositoryMean();
} else {
int iPossibleSampleSize = getPossibleSampleSize();
for (int iCount = 0; iCount < iPossibleSampleSize; iCount++) {
int iNextRandomNumber = getNextRandomNumber(rightRepository.getSize() - 1);
if (isUniqueRandomNumber(iNextRandomNumber)) {
dTotal = dTotal + rightRepository.get(iNextRandomNumber);
} else {
iCount--;
}
}
uniqueRandomNumbers.clear();
}
}
return dTotal / sampleSize;
}
private int getNextRandomNumber(int _iSize) {
double dRandomNumber = _iSize * Math.random();
long lRoundedNumber = Math.round(dRandomNumber);
return (int) lRoundedNumber;
}
private double getVariance() {
double dMean = getPopulationMean();
double d1minusMean = dMean - 1;
double dtotalsize = getWidth();
double x = total * d1minusMean * d1minusMean + (dtotalsize - total) * dMean * dMean;
double y = dtotalsize - 1;
return x / y;
}
private Epsilon getEpsilon() {
int iNumberOfTests = leftRepository.getNumOfTests() + rightRepository.getNumOfTests();
if (iNumberOfTests > 1) {
variance = getVariance();
//Drift epsilon
double ddeltadash = getDriftEpsilon(iNumberOfTests);
double x = Math.log(4.0 / ddeltadash);
double squareRootValue = Math.sqrt(x * x + 18 * variance * sampleSize * x);
double depsilon = (2.0 / (3 * sampleSize)) * (x + squareRootValue);
epsilon.d_driftEpsilon = depsilon;
//warning epsilon
ddeltadash = getWarningEpsilon(iNumberOfTests);
x = Math.log(4.0 / ddeltadash);
squareRootValue = Math.sqrt(x * x + 18 * variance * sampleSize * x);
depsilon = (2.0 / (3 * sampleSize)) * (x + squareRootValue);
epsilon.d_warningEpsilon = depsilon;
}
return epsilon;
}
private double getDriftEpsilon(int _inumTests) {
double errorValue = 2.0 * (1.0 - Math.pow(0.5, _inumTests));
double ddeltadash = significanceLevel;
ddeltadash = significanceLevel / errorValue;
return ddeltadash;
}
private double getWarningEpsilon(int _inumTests) {
double dTotalError = 2.0 * (1.0 - Math.pow(0.5, _inumTests));
double ddeltadash = warningSignificanceLevel / dTotalError;
return ddeltadash;
}
private double getPopulationMean() {
return getTotal() / getWidth();
}
private double getTotal() {
return total;
}
public int getWidth() {
return leftRepository.getSize() + rightRepository.getSize();
}
public double getEstimation() {
int iWidth = getWidth();
if (iWidth != 0) {
return getTotal() / getWidth();
} else {
return 0;
}
}
public void getDescription(StringBuilder sb, int indent) {
}
}
class Epsilon {
double d_warningEpsilon = 0.0;
double d_driftEpsilon = 0.0;
public Epsilon() {
d_warningEpsilon = 0.0;
d_driftEpsilon = 0.0;
}
public void getDescription(StringBuilder sb, int indent) {
}
}
}
| Java |
/*
* DDM.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Manuel Baena (mbaena@lcc.uma.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package moa.classifiers.core.driftdetection;
import moa.core.ObjectRepository;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Drift detection method based in Page Hinkley Test.
*
*
* @author Manuel Baena (mbaena@lcc.uma.es)
* @version $Revision: 7 $
*/
public class PageHinkleyDM extends AbstractChangeDetector {
private static final long serialVersionUID = -3518369648142099719L;
public IntOption minNumInstancesOption = new IntOption(
"minNumInstances",
'n',
"The minimum number of instances before permitting detecting change.",
30, 0, Integer.MAX_VALUE);
public FloatOption deltaOption = new FloatOption("delta", 'd',
"Delta parameter of the Page Hinkley Test", 0.005, 0.0, 1.0);
public FloatOption lambdaOption = new FloatOption("lambda", 'l',
"Lambda parameter of the Page Hinkley Test", 50, 0.0, Float.MAX_VALUE);
public FloatOption alphaOption = new FloatOption("alpha", 'a',
"Alpha parameter of the Page Hinkley Test", 1 - 0.0001, 0.0, 1.0);
private int m_n;
private double sum;
private double x_mean;
private double alpha;
private double delta;
private double lambda;
public PageHinkleyDM() {
resetLearning();
}
@Override
public void resetLearning() {
m_n = 1;
x_mean = 0.0;
sum = 0.0;
delta = this.deltaOption.getValue();
alpha = this.alphaOption.getValue();
lambda = this.lambdaOption.getValue();
}
@Override
public void input(double x) {
// It monitors the error rate
if (this.isChangeDetected == true || this.isInitialized == false) {
resetLearning();
this.isInitialized = true;
}
x_mean = x_mean + (x - x_mean) / (double) m_n;
sum = this.alpha * sum + (x - x_mean - this.delta);
m_n++;
// System.out.print(prediction + " " + m_n + " " + (m_p+m_s) + " ");
this.estimation = x_mean;
this.isChangeDetected = false;
this.isWarningZone = false;
this.delay = 0;
if (m_n < this.minNumInstancesOption.getValue()) {
return;
}
if (sum > this.lambda) {
this.isChangeDetected = true;
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// TODO Auto-generated method stub
}
} | Java |
/*
* SemiSupervisedLearner.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers;
/**
* Learner interface for incremental semi supervised models. It is used only in the GUI Regression Tab.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public interface SemiSupervisedLearner {
}
| Java |
/*
* Regressor.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers;
/**
* Regressor interface for incremental regression models. It is used only in the GUI Regression Tab.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public interface Regressor {
}
| Java |
/*
* MEKAClassifier.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel;
import moa.classifiers.meta.WEKAClassifier;
import moa.core.InstancesHeader;
import weka.classifiers.UpdateableClassifier;
import weka.core.Instance;
import weka.core.Instances;
/**
* Class for using a MEKA classifier.
* NOTE: This class only exists to adjust the classIndex by +1
* We can use the standard WEKAClassifier if we set -c L where, L = the number of labels + 1
* (Because MOA understands that L specified on the command line is the (L-1)th index).
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version $Revision: 1 $
*/
public class MEKAClassifier extends WEKAClassifier {
private static final long serialVersionUID = 1L;
protected int m_L = -1;
@Override
public void setModelContext(InstancesHeader raw_header) {
m_L = (m_L < 0 ? raw_header.classIndex()+1 : m_L);
super.setModelContext(raw_header);
}
@Override
public void trainOnInstanceImpl(Instance inst) {
if (m_L < 0) {
m_L = inst.classIndex()+1;
}
try {
if (numberInstances < 1) { // INIT
Instances D = inst.dataset();
D.setClassIndex(m_L);
this.instancesBuffer = new Instances(D);
if (classifier instanceof UpdateableClassifier) {
this.instancesBuffer.setClassIndex(m_L);
this.classifier.buildClassifier(instancesBuffer);
this.isClassificationEnabled = true;
} else {
System.err.println("Only suports UpdateableClassifiers for now.");
System.exit(1);
}
}
else { // UPDATE
((UpdateableClassifier) classifier).updateClassifier(inst);
}
numberInstances++;
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
| Java |
/*
* MajorityLabelset.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel;
import java.util.HashMap;
import moa.classifiers.AbstractClassifier;
import moa.core.InstancesHeader;
import moa.core.Measurement;
import weka.core.Instance;
/**
* Majority Labelset classifier. Each labelset combination of relevances, e.g.
* [0,0,1,1,0,0], is treated as a single class value.
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version $Revision: 1 $
*/
public class MajorityLabelset extends AbstractClassifier {
private static final long serialVersionUID = 1L;
private int m_L = -1;
private double maxValue = 0.0;
private double prediction[] = null;
private HashMap<String, Double> classFreqs = new HashMap<String, Double>();
// raw instance to bit string (i.e. from binary representation)
private static final String toBitString(Instance ins, int c) {
StringBuilder sb = new StringBuilder(c);
for (int i = 0; i < c; i++) {
sb.append((int) Math.round(ins.value(i)));
}
return sb.toString();
}
protected void updateCount(Instance x, int L) {
String y = toBitString(x, L);
if (classFreqs.containsKey(y)) {
double freq = classFreqs.get(y) + x.weight();
classFreqs.put(y, freq);
if (freq >= maxValue) {
maxValue = freq;
this.prediction = new double[L];
for (int j = 0; j < L; j++) {
this.prediction[j] = x.value(j);
}
}
} else {
classFreqs.put(y, x.weight());
}
}
@Override
public void setModelContext(InstancesHeader raw_header) {
//set the multilabel model context
this.modelContext = raw_header;
m_L = raw_header.classIndex() + 1;
prediction = new double[m_L];
}
@Override
public void resetLearningImpl() {
}
@Override
public void trainOnInstanceImpl(Instance x) {
updateCount(x, m_L);
}
@Override
public double[] getVotesForInstance(Instance x) {
int L = x.classIndex() + 1;
if (m_L != L) {
System.err.println("set L = " + L);
m_L = L;
prediction = new double[m_L];
}
return prediction;
//System.out.println("getVotesForInstance(): "+x.classIndex());
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{};
}
@Override
public boolean isRandomizable() {
return false;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
}
| Java |
/*
* MultilabelHoeffdingTree.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel;
import java.io.StringReader;
import java.util.List;
import moa.classifiers.Classifier;
import moa.classifiers.core.attributeclassobservers.AttributeClassObserver;
import moa.classifiers.trees.HoeffdingTree;
import moa.core.InstancesHeader;
import moa.core.utils.Converter;
import weka.core.Instance;
import weka.core.Instances;
/**
* Hoeffding Tree for classifying multi-label data.
*
* A Hoeffding tree is an incremental, anytime decision tree induction algorithm
* that is capable of learning from massive data streams, assuming that the
* distribution generating examples does not change over time.
*
*
*/
public class MultilabelHoeffdingTree extends HoeffdingTreeClassifLeaves {
// Needs to use InfoGainSplitCriterionMultiLabel, since multilabel entropy is calculated in a different way
// Trains a mlinstance adding statistics of several class values and training node classifiers
// Get votes from the training node classifier
private static final long serialVersionUID = 1L;
public int m_L = -1;
// Converts multi-label format to single-label format
protected Converter converter = null;
@Override
public void setModelContext(InstancesHeader raw_header) {
//set the multilabel model context
this.modelContext = raw_header;
}
//It uses several class values
public static class MultilabelInactiveLearningNode extends InactiveLearningNode {
private static final long serialVersionUID = 1L;
public MultilabelInactiveLearningNode(double[] initialClassObservations) {
super(initialClassObservations);
}
@Override
public void learnFromInstance(Instance inst, HoeffdingTree ht) {
List<Integer> labels = ((MultilabelHoeffdingTree) ht).converter.getRelevantLabels(inst);
for (int l : labels){
this.observedClassDistribution.addToValue( l, inst.weight());
}
}
}
// It uses classifier at nodes, and to be able to train with several class values
public class MultilabelLearningNodeClassifier extends LearningNodeClassifier {
//protected Classifier classifier;
private static final long serialVersionUID = 1L;
public MultilabelLearningNodeClassifier(double[] initialClassObservations, Classifier cl, MultilabelHoeffdingTree ht ) {
super(initialClassObservations);
if (cl== null) {
this.classifier = ((Classifier) getPreparedClassOption(ht.learnerOption)).copy();
this.classifier.resetLearning();
InstancesHeader raw_header = ht.getModelContext();
this.classifier.setModelContext(raw_header);
}
else{
this.classifier = cl.copy();
}
}
@Override
public double[] getClassVotes(Instance inst, HoeffdingTree ht) {
if (this.classifier == null) {
return new double[((MultilabelHoeffdingTree) ht).converter.getL()];
}
return this.classifier.getVotesForInstance(inst);
}
@Override
public void disableAttribute(int attIndex) {
// should not disable poor atts - they are used in NB calc
}
public Classifier getClassifier() {
return this.classifier;
}
//It uses different class values
@Override
public void learnFromInstance(Instance mlinst, HoeffdingTree ht) {
this.classifier.trainOnInstance(mlinst);
MultilabelHoeffdingTree mht = ((MultilabelHoeffdingTree) ht);
List<Integer> labels = mht.converter.getRelevantLabels(mlinst);
for (int l : labels){
this.observedClassDistribution.addToValue( l, mlinst.weight());
}
Instance inst = mht.converter.formatInstance(mlinst);
for (int i = 0; i < inst.numAttributes() - 1; i++) {
//for (int i = 1; i < inst.numAttributes(); i++) {
int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
AttributeClassObserver obs = this.attributeObservers.get(i);
if (obs == null) {
obs = inst.attribute(instAttIndex).isNominal() ? mht.newNominalClassObserver() : mht.newNumericClassObserver();
this.attributeObservers.set(i, obs);
}
for (int l : labels){
obs.observeAttributeClass(inst.value(instAttIndex), l, inst.weight());
//obs.observeAttributeClass(inst.value(instAttIndex), 0, inst.weight());
}
}
}
}
public MultilabelHoeffdingTree() {
this.removePoorAttsOption = null;
}
@Override
protected LearningNode newLearningNode(double[] initialClassObservations) {
// Create new Learning Node null
return new MultilabelLearningNodeClassifier(initialClassObservations,null,this);
}
//@Override
protected LearningNode newLearningNode(double[] initialClassObservations, Classifier cl) {
// Create new Learning Node
return new MultilabelLearningNodeClassifier(initialClassObservations,cl,this);
}
//It uses MultilabelInactiveLearningNode since there are several class values
@Override
protected void deactivateLearningNode(ActiveLearningNode toDeactivate,
SplitNode parent, int parentBranch) {
Node newLeaf = new MultilabelInactiveLearningNode(toDeactivate
.getObservedClassDistribution());
if (parent == null) {
this.treeRoot = newLeaf;
} else {
parent.setChild(parentBranch, newLeaf);
}
this.activeLeafNodeCount--;
this.inactiveLeafNodeCount++;
}
@Override
public double[] getVotesForInstance(Instance inst) {
int L = inst.classIndex()+1;
if (m_L != L) {
// Update class labels
m_L = L;
// Create a converter, and its template
converter = new Converter(m_L);
try {
converter.createTemplate(new Instances(new StringReader(this.modelContext.toString()),0));
} catch(Exception e) {
System.err.println("Error, failed to create a multi-label Instances template with L = "+m_L);
System.out.println("Instances: "+this.modelContext.toString());
e.printStackTrace();
System.exit(1);
}
}
if (this.treeRoot != null) {
FoundNode foundNode = this.treeRoot.filterInstanceToLeaf(inst, null, -1);
Node leafNode = foundNode.node;
if (leafNode == null) {
leafNode = foundNode.parent;
}
//System.out.println("y[] = "+Arrays.toString(leafNode.getClassVotes(inst,this)));
return leafNode.getClassVotes(inst, this);
}
// Return empty array (this should only happen once! -- before we build the root node).
return new double[this.m_L];
}
}
| Java |
/*
* HoeffdingTreeClassifLeaves.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import moa.classifiers.Classifier;
import moa.classifiers.core.AttributeSplitSuggestion;
import moa.classifiers.core.splitcriteria.SplitCriterion;
import moa.classifiers.trees.HoeffdingTree;
import moa.options.ClassOption;
import weka.core.Instance;
/**
* Hoeffding Tree that have a classifier at the leaves.
*
* A Hoeffding tree is an incremental, anytime decision tree induction algorithm
* that is capable of learning from massive data streams, assuming that the
* distribution generating examples does not change over time.
*
*
*/
public class HoeffdingTreeClassifLeaves extends HoeffdingTree {
private static final long serialVersionUID = 1L;
public ClassOption learnerOption = new ClassOption("learner", 'a',
"Classifier to train.", Classifier.class, "bayes.NaiveBayes");
public class LearningNodeClassifier extends ActiveLearningNode {
protected Classifier classifier;
private static final long serialVersionUID = 1L;
public LearningNodeClassifier(double[] initialClassObservations) {
super(initialClassObservations);
}
public LearningNodeClassifier(double[] initialClassObservations, Classifier cl, HoeffdingTreeClassifLeaves ht) {
super(initialClassObservations);
//public void LearningNodeClassifier1(double[] initialClassObservations, Classifier cl, HoeffdingTreeClassifLeaves ht ) {
if (cl == null) {
this.classifier = (Classifier) getPreparedClassOption(ht.learnerOption);
} else {
this.classifier = cl.copy();
}
}
@Override
public double[] getClassVotes(Instance inst, HoeffdingTree ht) {
if (getWeightSeen() >= ((HoeffdingTreeClassifLeaves) ht).nbThresholdOption.getValue()) {
return this.classifier.getVotesForInstance(inst);
}
return super.getClassVotes(inst, ht);
}
@Override
public void disableAttribute(int attIndex) {
// should not disable poor atts - they are used in NB calc
}
@Override
public void learnFromInstance(Instance inst, HoeffdingTree ht) {
this.classifier.trainOnInstance(inst);
super.learnFromInstance(inst, ht);
}
public Classifier getClassifier() {
return this.classifier;
}
}
public HoeffdingTreeClassifLeaves() {
this.removePoorAttsOption = null;
}
@Override
protected LearningNode newLearningNode(double[] initialClassObservations) {
return new LearningNodeClassifier(initialClassObservations, null, this);
}
//@Override
protected LearningNode newLearningNode(double[] initialClassObservations, Classifier cl) {
return new LearningNodeClassifier(initialClassObservations, cl, this);
}
@Override
protected void attemptToSplit(ActiveLearningNode node, SplitNode parent,
int parentIndex) {
if (!node.observedClassDistributionIsPure()) {
SplitCriterion splitCriterion = (SplitCriterion) getPreparedClassOption(this.splitCriterionOption);
AttributeSplitSuggestion[] bestSplitSuggestions = node.getBestSplitSuggestions(splitCriterion, this);
Arrays.sort(bestSplitSuggestions);
boolean shouldSplit = false;
if (bestSplitSuggestions.length < 2) {
shouldSplit = bestSplitSuggestions.length > 0;
} else {
double hoeffdingBound = computeHoeffdingBound(splitCriterion.getRangeOfMerit(node.getObservedClassDistribution()),
this.splitConfidenceOption.getValue(), node.getWeightSeen());
AttributeSplitSuggestion bestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 1];
AttributeSplitSuggestion secondBestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 2];
if ((bestSuggestion.merit - secondBestSuggestion.merit > hoeffdingBound)
|| (hoeffdingBound < this.tieThresholdOption.getValue())) {
shouldSplit = true;
}
// }
if ((this.removePoorAttsOption != null)
&& this.removePoorAttsOption.isSet()) {
Set<Integer> poorAtts = new HashSet<Integer>();
// scan 1 - add any poor to set
for (int i = 0; i < bestSplitSuggestions.length; i++) {
if (bestSplitSuggestions[i].splitTest != null) {
int[] splitAtts = bestSplitSuggestions[i].splitTest.getAttsTestDependsOn();
if (splitAtts.length == 1) {
if (bestSuggestion.merit
- bestSplitSuggestions[i].merit > hoeffdingBound) {
poorAtts.add(new Integer(splitAtts[0]));
}
}
}
}
// scan 2 - remove good ones from set
for (int i = 0; i < bestSplitSuggestions.length; i++) {
if (bestSplitSuggestions[i].splitTest != null) {
int[] splitAtts = bestSplitSuggestions[i].splitTest.getAttsTestDependsOn();
if (splitAtts.length == 1) {
if (bestSuggestion.merit
- bestSplitSuggestions[i].merit < hoeffdingBound) {
poorAtts.remove(new Integer(splitAtts[0]));
}
}
}
}
for (int poorAtt : poorAtts) {
node.disableAttribute(poorAtt);
}
}
}
if (shouldSplit) {
AttributeSplitSuggestion splitDecision = bestSplitSuggestions[bestSplitSuggestions.length - 1];
if (splitDecision.splitTest == null) {
// preprune - null wins
deactivateLearningNode(node, parent, parentIndex);
} else {
SplitNode newSplit = newSplitNode(splitDecision.splitTest,
node.getObservedClassDistribution());
for (int i = 0; i < splitDecision.numSplits(); i++) {
//Unique Change of HoeffdingTree
Node newChild = newLearningNode(splitDecision.resultingClassDistributionFromSplit(i), ((LearningNodeClassifier) node).getClassifier());
newSplit.setChild(i, newChild);
}
this.activeLeafNodeCount--;
this.decisionNodeCount++;
this.activeLeafNodeCount += splitDecision.numSplits();
if (parent == null) {
this.treeRoot = newSplit;
} else {
parent.setChild(parentIndex, newSplit);
}
}
// manage memory
enforceTrackerLimit();
}
}
}
}
| Java |
/*
* MLOzaBagAdwin.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel.meta;
import moa.classifiers.Classifier;
import moa.classifiers.core.driftdetection.ADWIN;
import moa.classifiers.meta.OzaBagAdwin;
import moa.core.InstancesHeader;
import moa.core.MiscUtils;
import weka.core.Instance;
/**
* MLOzaBagAdwin: Changes the way to compute accuracy as an input for Adwin
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version $Revision: 1 $
*/
public class MLOzaBagAdwin extends OzaBagAdwin {
protected int m_L = -1;
@Override
public void setModelContext(InstancesHeader raw_header) {
//set the multilabel model context
this.modelContext = raw_header;
m_L = raw_header.classIndex() + 1;
// reset ensemble
this.resetLearningImpl();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i].setModelContext(raw_header);
this.ensemble[i].resetLearning();
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
boolean Change = false;
for (int i = 0; i < this.ensemble.length; i++) {
int k = MiscUtils.poisson(1.0, this.classifierRandom);
if (k > 0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
double[] prediction = this.ensemble[i].getVotesForInstance(inst);
//Compute accuracy
double actual[] = new double[prediction.length];
for (short j = 0; j < prediction.length; j++) {
actual[j] = inst.value(j);
}
// calculate
int p_sum = 0, r_sum = 0;
int set_union = 0;
int set_inter = 0;
double t = 0.01;
for (int j = 0; j < prediction.length; j++) {
int p = (prediction[j] >= t) ? 1 : 0;
int R = (int) actual[j];
if (p == 1) {
p_sum++;
// predt 1, real 1
if (R == 1) {
set_inter++;
set_union++;
} // predt 1, real 0
else {
set_union++;
}
} else {
// predt 0, real 1
if (R == 1) {
set_union++;
} // predt 0, real 0
else {
}
}
}
double accuracy = 0.0;
if (set_union > 0) //avoid NaN
{
accuracy = ((double) set_inter / (double) set_union);
}
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(1.0 - accuracy)) {
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
}
}
}
if (Change) {
System.err.println("change!");
double max = 0.0;
int imax = -1;
for (int i = 0; i < this.ensemble.length; i++) {
if (max < this.ADError[i].getEstimation()) {
max = this.ADError[i].getEstimation();
imax = i;
}
}
if (imax != -1) {
this.ensemble[imax] = null;
this.ensemble[imax] = (Classifier) getPreparedClassOption(this.baseLearnerOption);
this.ensemble[imax].setModelContext(this.modelContext);
this.ensemble[imax].trainOnInstance(inst);
this.ADError[imax] = new ADWIN();
}
}
}
@Override
public double[] getVotesForInstance(Instance x) {
int L = x.classIndex() + 1;
if (m_L != L) {
m_L = L;
}
double y[] = new double[m_L];
for (int i = 0; i < this.ensemble.length; i++) {
double w[] = this.ensemble[i].getVotesForInstance(x);
for (int j = 0; j < w.length; j++) {
y[j] += w[j];
}
}
return y;
}
}
| Java |
/*
* MLOzaBag.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Jesse Read (jesse@tsc.uc3m.es)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.multilabel.meta;
import moa.classifiers.Classifier;
import moa.classifiers.meta.OzaBag;
import moa.core.InstancesHeader;
import weka.core.Instance;
/**
* OzaBag for Multi-label data.
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version $Revision: 1 $
*/
public class MLOzaBag extends OzaBag {
protected int m_L = -1;
//protected Random random = null;
/*
* @Override public void resetLearningImpl() { super.resetLearningImpl();
* //this.random = new Random(randomSeedOption.getValue()); }
*/
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
}
@Override
public void setModelContext(InstancesHeader raw_header) {
//set the multilabel model context
this.modelContext = raw_header;
m_L = raw_header.classIndex() + 1;
// reset ensemble
this.resetLearningImpl();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i].setModelContext(raw_header);
this.ensemble[i].resetLearning();
}
}
@Override // @note don't need this here
public boolean isRandomizable() {
return true;
}
@Override
public double[] getVotesForInstance(Instance x) {
int L = x.classIndex() + 1;
if (m_L != L) {
m_L = L;
}
double y[] = new double[m_L];
for (int i = 0; i < this.ensemble.length; i++) {
double w[] = this.ensemble[i].getVotesForInstance(x);
for (int j = 0; j < w.length; j++) {
y[j] += w[j];
}
}
return y;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* MedianOfWidestDimension.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
/**
<!-- globalinfo-start -->
* The class that splits a KDTree node based on the median value of a dimension in which the node's points have the widest spread.<br/>
* <br/>
* For more information see also:<br/>
* <br/>
* Jerome H. Friedman, Jon Luis Bentley, Raphael Ari Finkel (1977). An Algorithm for Finding Best Matches in Logarithmic Expected Time. ACM Transactions on Mathematics Software. 3(3):209-226.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @article{Friedman1977,
* author = {Jerome H. Friedman and Jon Luis Bentley and Raphael Ari Finkel},
* journal = {ACM Transactions on Mathematics Software},
* month = {September},
* number = {3},
* pages = {209-226},
* title = {An Algorithm for Finding Best Matches in Logarithmic Expected Time},
* volume = {3},
* year = {1977}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
<!-- options-end -->
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class MedianOfWidestDimension
extends KDTreeNodeSplitter {
/** for serialization. */
private static final long serialVersionUID = 1383443320160540663L;
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"The class that splits a KDTree node based on the median value of "
+ "a dimension in which the node's points have the widest spread.\n\n"
+ "For more information see also:\n\n";
}
/**
* Splits a node into two based on the median value of the dimension
* in which the points have the widest spread. After splitting two
* new nodes are created and correctly initialised. And, node.left
* and node.right are set appropriately.
*
* @param node The node to split.
* @param numNodesCreated The number of nodes that so far have been
* created for the tree, so that the newly created nodes are
* assigned correct/meaningful node numbers/ids.
* @param nodeRanges The attributes' range for the points inside
* the node that is to be split.
* @param universe The attributes' range for the whole
* point-space.
* @throws Exception If there is some problem in splitting the
* given node.
*/
public void splitNode(KDTreeNode node, int numNodesCreated,
double[][] nodeRanges, double[][] universe) throws Exception {
correctlyInitialized();
int splitDim = widestDim(nodeRanges, universe);
//In this case median is defined to be either the middle value (in case of
//odd number of values) or the left of the two middle values (in case of
//even number of values).
int medianIdxIdx = node.m_Start + (node.m_End-node.m_Start)/2;
//the following finds the median and also re-arranges the array so all
//elements to the left are < median and those to the right are > median.
int medianIdx = select(splitDim, m_InstList, node.m_Start, node.m_End, (node.m_End-node.m_Start)/2+1);
node.m_SplitDim = splitDim;
node.m_SplitValue = m_Instances.instance(m_InstList[medianIdx]).value(splitDim);
node.m_Left = new KDTreeNode(numNodesCreated+1, node.m_Start, medianIdxIdx,
m_EuclideanDistance.initializeRanges(m_InstList, node.m_Start, medianIdxIdx));
node.m_Right = new KDTreeNode(numNodesCreated+2, medianIdxIdx+1, node.m_End,
m_EuclideanDistance.initializeRanges(m_InstList, medianIdxIdx+1, node.m_End));
}
/**
* Partitions the instances around a pivot. Used by quicksort and
* kthSmallestValue.
*
* @param attIdx The attribution/dimension based on which the
* instances should be partitioned.
* @param index The master index array containing indices of the
* instances.
* @param l The begining index of the portion of master index
* array that should be partitioned.
* @param r The end index of the portion of master index array
* that should be partitioned.
* @return the index of the middle element
*/
protected int partition(int attIdx, int[] index, int l, int r) {
double pivot = m_Instances.instance(index[(l + r) / 2]).value(attIdx);
int help;
while (l < r) {
while ((m_Instances.instance(index[l]).value(attIdx) < pivot) && (l < r)) {
l++;
}
while ((m_Instances.instance(index[r]).value(attIdx) > pivot) && (l < r)) {
r--;
}
if (l < r) {
help = index[l];
index[l] = index[r];
index[r] = help;
l++;
r--;
}
}
if ((l == r) && (m_Instances.instance(index[r]).value(attIdx) > pivot)) {
r--;
}
return r;
}
/**
* Implements computation of the kth-smallest element according
* to Manber's "Introduction to Algorithms".
*
* @param attIdx The dimension/attribute of the instances in
* which to find the kth-smallest element.
* @param indices The master index array containing indices of
* the instances.
* @param left The begining index of the portion of the master
* index array in which to find the kth-smallest element.
* @param right The end index of the portion of the master index
* array in which to find the kth-smallest element.
* @param k The value of k
* @return The index of the kth-smallest element
*/
public int select(int attIdx, int[] indices, int left, int right, int k) {
if (left == right) {
return left;
} else {
int middle = partition(attIdx, indices, left, right);
if ((middle - left + 1) >= k) {
return select(attIdx, indices, left, middle, k);
} else {
return select(attIdx, indices, middle + 1, right, k - (middle - left + 1));
}
}
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SlidingMidPointOfWidestSide.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
/**
<!-- globalinfo-start -->
* The class that splits a node into two based on the midpoint value of the dimension in which the node's rectangle is widest. If after splitting one side is empty then it is slided towards the non-empty side until there is at least one point on the empty side.<br/>
* <br/>
* For more information see also:<br/>
* <br/>
* David M. Mount (2006). ANN Programming Manual. College Park, MD, USA.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @manual{Mount2006,
* address = {College Park, MD, USA},
* author = {David M. Mount},
* organization = {Department of Computer Science, University of Maryland},
* title = {ANN Programming Manual},
* year = {2006},
* HTTP = {Available from http://www.cs.umd.edu/\~mount/ANN/}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
<!-- options-end -->
*
* @author Ashraf M. Kibriya (amk14@waikato.ac.nz)
* @version $Revision: 8034 $
*/
public class SlidingMidPointOfWidestSide
extends KDTreeNodeSplitter {
/** for serialization. */
private static final long serialVersionUID = 852857628205680562L;
/** The floating point error to tolerate in finding the widest
* rectangular side. */
protected static double ERR = 0.001;
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"The class that splits a node into two based on the midpoint value of "
+ "the dimension in which the node's rectangle is widest. If after "
+ "splitting one side is empty then it is slided towards the non-empty "
+ "side until there is at least one point on the empty side.\n\n"
+ "For more information see also:\n\n";
}
/**
* Splits a node into two based on the midpoint value of the dimension
* in which the node's rectangle is widest. If after splitting one side
* is empty then it is slided towards the non-empty side until there is
* at least one point on the empty side. The two nodes created after the
* whole splitting are correctly initialised. And, node.left and
* node.right are set appropriately.
* @param node The node to split.
* @param numNodesCreated The number of nodes that so far have been
* created for the tree, so that the newly created nodes are
* assigned correct/meaningful node numbers/ids.
* @param nodeRanges The attributes' range for the points inside
* the node that is to be split.
* @param universe The attributes' range for the whole
* point-space.
* @throws Exception If there is some problem in splitting the
* given node.
*/
public void splitNode(KDTreeNode node, int numNodesCreated,
double[][] nodeRanges, double[][] universe) throws Exception {
correctlyInitialized();
if (node.m_NodesRectBounds == null) {
node.m_NodesRectBounds = new double[2][node.m_NodeRanges.length];
for (int i = 0; i < node.m_NodeRanges.length; i++) {
node.m_NodesRectBounds[MIN][i] = node.m_NodeRanges[i][MIN];
node.m_NodesRectBounds[MAX][i] = node.m_NodeRanges[i][MAX];
}
}
// finding widest side of the hyper rectangle
double maxRectWidth = Double.NEGATIVE_INFINITY, maxPtWidth = Double.NEGATIVE_INFINITY, tempval;
int splitDim = -1, classIdx = m_Instances.classIndex();
for (int i = 0; i < node.m_NodesRectBounds[0].length; i++) {
if (i == classIdx)
continue;
tempval = node.m_NodesRectBounds[MAX][i] - node.m_NodesRectBounds[MIN][i];
if (m_NormalizeNodeWidth) {
tempval = tempval / universe[i][WIDTH];
}
if (tempval > maxRectWidth && node.m_NodeRanges[i][WIDTH] > 0.0)
maxRectWidth = tempval;
}
for (int i = 0; i < node.m_NodesRectBounds[0].length; i++) {
if (i == classIdx)
continue;
tempval = node.m_NodesRectBounds[MAX][i] - node.m_NodesRectBounds[MIN][i];
if (m_NormalizeNodeWidth) {
tempval = tempval / universe[i][WIDTH];
}
if (tempval >= maxRectWidth * (1 - ERR)
&& node.m_NodeRanges[i][WIDTH] > 0.0) {
if (node.m_NodeRanges[i][WIDTH] > maxPtWidth) {
maxPtWidth = node.m_NodeRanges[i][WIDTH];
if (m_NormalizeNodeWidth)
maxPtWidth = maxPtWidth / universe[i][WIDTH];
splitDim = i;
}
}
}
double splitVal = node.m_NodesRectBounds[MIN][splitDim]
+ (node.m_NodesRectBounds[MAX][splitDim] - node.m_NodesRectBounds[MIN][splitDim])
* 0.5;
// might want to try to slide it further to contain more than one point on
// the
// side that is resulting empty
if (splitVal < node.m_NodeRanges[splitDim][MIN])
splitVal = node.m_NodeRanges[splitDim][MIN];
else if (splitVal >= node.m_NodeRanges[splitDim][MAX])
splitVal = node.m_NodeRanges[splitDim][MAX]
- node.m_NodeRanges[splitDim][WIDTH] * 0.001;
int rightStart = rearrangePoints(m_InstList, node.m_Start, node.m_End,
splitDim, splitVal);
if (rightStart == node.m_Start || rightStart > node.m_End) {
if (rightStart == node.m_Start)
throw new Exception("Left child is empty in node " + node.m_NodeNumber
+ ". Not possible with "
+ "SlidingMidPointofWidestSide splitting method. Please "
+ "check code.");
else
throw new Exception("Right child is empty in node " + node.m_NodeNumber
+ ". Not possible with "
+ "SlidingMidPointofWidestSide splitting method. Please "
+ "check code.");
}
node.m_SplitDim = splitDim;
node.m_SplitValue = splitVal;
double[][] widths = new double[2][node.m_NodesRectBounds[0].length];
System.arraycopy(node.m_NodesRectBounds[MIN], 0, widths[MIN], 0,
node.m_NodesRectBounds[MIN].length);
System.arraycopy(node.m_NodesRectBounds[MAX], 0, widths[MAX], 0,
node.m_NodesRectBounds[MAX].length);
widths[MAX][splitDim] = splitVal;
node.m_Left = new KDTreeNode(numNodesCreated + 1, node.m_Start,
rightStart - 1, m_EuclideanDistance.initializeRanges(m_InstList,
node.m_Start, rightStart - 1), widths);
widths = new double[2][node.m_NodesRectBounds[0].length];
System.arraycopy(node.m_NodesRectBounds[MIN], 0, widths[MIN], 0,
node.m_NodesRectBounds[MIN].length);
System.arraycopy(node.m_NodesRectBounds[MAX], 0, widths[MAX], 0,
node.m_NodesRectBounds[MAX].length);
widths[MIN][splitDim] = splitVal;
node.m_Right = new KDTreeNode(numNodesCreated + 2, rightStart, node.m_End,
m_EuclideanDistance.initializeRanges(m_InstList, rightStart, node.m_End), widths);
}
/**
* Re-arranges the indices array such that the points <= to the splitVal
* are on the left of the array and those > the splitVal are on the right.
*
* @param indices The master index array.
* @param startidx The begining index of portion of indices that needs
* re-arranging.
* @param endidx The end index of portion of indices that needs
* re-arranging.
* @param splitDim The split dimension/attribute.
* @param splitVal The split value.
* @return The startIdx of the points > the splitVal (the points
* belonging to the right child of the node).
*/
protected int rearrangePoints(int[] indices, final int startidx,
final int endidx, final int splitDim, final double splitVal) {
int tmp, left = startidx - 1;
for (int i = startidx; i <= endidx; i++) {
if (m_EuclideanDistance.valueIsSmallerEqual(m_Instances
.instance(indices[i]), splitDim, splitVal)) {
left++;
tmp = indices[left];
indices[left] = indices[i];
indices[i] = tmp;
}// end valueIsSmallerEqual
}// endfor
return left + 1;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* KMeansInpiredMethod.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
import weka.core.Instance;
import weka.core.Instances;
/**
<!-- globalinfo-start -->
* The class that splits a node into two such that the overall sum of squared distances of points to their centres on both sides of the (axis-parallel) splitting plane is minimum.<br/>
* <br/>
* For more information see also:<br/>
* <br/>
* Ashraf Masood Kibriya (2007). Fast Algorithms for Nearest Neighbour Search. Hamilton, New Zealand.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @mastersthesis{Kibriya2007,
* address = {Hamilton, New Zealand},
* author = {Ashraf Masood Kibriya},
* school = {Department of Computer Science, School of Computing and Mathematical Sciences, University of Waikato},
* title = {Fast Algorithms for Nearest Neighbour Search},
* year = {2007}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
<!-- options-end -->
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class KMeansInpiredMethod
extends KDTreeNodeSplitter {
/** for serialization. */
private static final long serialVersionUID = -866783749124714304L;
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"The class that splits a node into two such that the overall sum "
+ "of squared distances of points to their centres on both sides "
+ "of the (axis-parallel) splitting plane is minimum.\n\n"
+ "For more information see also:\n\n";
}
/**
* Splits a node into two such that the overall sum of squared distances
* of points to their centres on both sides of the (axis-parallel)
* splitting plane is minimum. The two nodes created after the whole
* splitting are correctly initialised. And, node.left and node.right
* are set appropriately.
* @param node The node to split.
* @param numNodesCreated The number of nodes that so far have been
* created for the tree, so that the newly created nodes are
* assigned correct/meaningful node numbers/ids.
* @param nodeRanges The attributes' range for the points inside
* the node that is to be split.
* @param universe The attributes' range for the whole
* point-space.
* @throws Exception If there is some problem in splitting the
* given node.
*/
public void splitNode(KDTreeNode node, int numNodesCreated,
double[][] nodeRanges, double[][] universe) throws Exception {
correctlyInitialized();
int splitDim = -1;
double splitVal = Double.NEGATIVE_INFINITY;
double leftAttSum[] = new double[m_Instances.numAttributes()],
rightAttSum[] = new double[m_Instances.numAttributes()],
leftAttSqSum[] = new double[m_Instances.numAttributes()],
rightAttSqSum[] = new double[m_Instances.numAttributes()],
rightSqMean, leftSqMean, leftSqSum, rightSqSum,
minSum = Double.POSITIVE_INFINITY, val;
for (int dim = 0; dim < m_Instances.numAttributes(); dim++) {
// m_MaxRelativeWidth in KDTree ensure there'll be atleast one dim with
// width > 0.0
if (node.m_NodeRanges[dim][WIDTH] == 0.0
|| dim == m_Instances.classIndex())
continue;
quickSort(m_Instances, m_InstList, dim, node.m_Start, node.m_End);
for (int i = node.m_Start; i <= node.m_End; i++) {
for (int j = 0; j < m_Instances.numAttributes(); j++) {
if (j == m_Instances.classIndex())
continue;
val = m_Instances.instance(m_InstList[i]).value(j);
if (m_NormalizeNodeWidth) {
if (Double.isNaN(universe[j][MIN])
|| universe[j][MIN] == universe[j][MAX])
val = 0.0;
else
val = ((val - universe[j][MIN]) / universe[j][WIDTH]); // normalizing
// value
}
if (i == node.m_Start) {
leftAttSum[j] = rightAttSum[j] = leftAttSqSum[j] = rightAttSqSum[j] = 0.0;
}
rightAttSum[j] += val;
rightAttSqSum[j] += val * val;
}
}
for (int i = node.m_Start; i <= node.m_End - 1; i++) {
Instance inst = m_Instances.instance(m_InstList[i]);
leftSqSum = rightSqSum = 0.0;
for (int j = 0; j < m_Instances.numAttributes(); j++) {
if (j == m_Instances.classIndex())
continue;
val = inst.value(j);
if (m_NormalizeNodeWidth) {
if (Double.isNaN(universe[j][MIN])
|| universe[j][MIN] == universe[j][MAX])
val = 0.0;
else
val = ((val - universe[j][MIN]) / universe[j][WIDTH]); // normalizing
// value
}
leftAttSum[j] += val;
rightAttSum[j] -= val;
leftAttSqSum[j] += val * val;
rightAttSqSum[j] -= val * val;
leftSqMean = leftAttSum[j] / (i - node.m_Start + 1);
leftSqMean *= leftSqMean;
rightSqMean = rightAttSum[j] / (node.m_End - i);
rightSqMean *= rightSqMean;
leftSqSum += leftAttSqSum[j] - (i - node.m_Start + 1) * leftSqMean;
rightSqSum += rightAttSqSum[j] - (node.m_End - i) * rightSqMean;
}
if (minSum > (leftSqSum + rightSqSum)) {
minSum = leftSqSum + rightSqSum;
if (i < node.m_End)
splitVal = (m_Instances.instance(m_InstList[i]).value(dim) + m_Instances
.instance(m_InstList[i + 1]).value(dim)) / 2;
else
splitVal = m_Instances.instance(m_InstList[i]).value(dim);
splitDim = dim;
}
}// end for instance i
}// end for attribute dim
int rightStart = rearrangePoints(m_InstList, node.m_Start, node.m_End,
splitDim, splitVal);
if (rightStart == node.m_Start || rightStart > node.m_End) {
System.out.println("node.m_Start: " + node.m_Start + " node.m_End: "
+ node.m_End + " splitDim: " + splitDim + " splitVal: " + splitVal
+ " node.min: " + node.m_NodeRanges[splitDim][MIN] + " node.max: "
+ node.m_NodeRanges[splitDim][MAX] + " node.numInstances: "
+ node.numInstances());
if (rightStart == node.m_Start)
throw new Exception("Left child is empty in node " + node.m_NodeNumber
+ ". Not possible with "
+ "KMeanInspiredMethod splitting method. Please " + "check code.");
else
throw new Exception("Right child is empty in node " + node.m_NodeNumber
+ ". Not possible with "
+ "KMeansInspiredMethod splitting method. Please " + "check code.");
}
node.m_SplitDim = splitDim;
node.m_SplitValue = splitVal;
node.m_Left = new KDTreeNode(numNodesCreated + 1, node.m_Start,
rightStart - 1, m_EuclideanDistance.initializeRanges(m_InstList,
node.m_Start, rightStart - 1));
node.m_Right = new KDTreeNode(numNodesCreated + 2, rightStart, node.m_End,
m_EuclideanDistance
.initializeRanges(m_InstList, rightStart, node.m_End));
}
/**
* Partitions the instances around a pivot. Used by quicksort and
* kthSmallestValue.
*
* @param insts The instances on which the tree is (or is
* to be) built.
* @param index The master index array containing indices
* of the instances.
* @param attidx The attribution/dimension based on which
* the instances should be partitioned.
* @param l The begining index of the portion of master index
* array that should be partitioned.
* @param r The end index of the portion of master index array
* that should be partitioned.
* @return the index of the middle element
*/
protected static int partition(Instances insts, int[] index, int attidx, int l, int r) {
double pivot = insts.instance(index[(l + r) / 2]).value(attidx);
int help;
while (l < r) {
while ((insts.instance(index[l]).value(attidx) < pivot) && (l < r)) {
l++;
}
while ((insts.instance(index[r]).value(attidx) > pivot) && (l < r)) {
r--;
}
if (l < r) {
help = index[l];
index[l] = index[r];
index[r] = help;
l++;
r--;
}
}
if ((l == r) && (insts.instance(index[r]).value(attidx) > pivot)) {
r--;
}
return r;
}
/**
* Sorts the instances according to the given attribute/dimension.
* The sorting is done on the master index array and not on the
* actual instances object.
*
* @param insts The instances on which the tree is (or is
* to be) built.
* @param indices The master index array containing indices
* of the instances.
* @param attidx The dimension/attribute based on which
* the instances should be sorted.
* @param left The begining index of the portion of the master
* index array that needs to be sorted.
* @param right The end index of the portion of the master index
* array that needs to be sorted.
*/
protected static void quickSort(Instances insts, int[] indices, int attidx, int left, int right) {
if (left < right) {
int middle = partition(insts, indices, attidx, left, right);
quickSort(insts, indices, attidx, left, middle);
quickSort(insts, indices, attidx, middle + 1, right);
}
}
/**
* Method to validate the sorting done by quickSort().
*
* @param insts The instances on which the tree is (or is
* to be) built.
* @param indices The master index array containing indices
* of the instances.
* @param attidx The dimension/attribute based on which
* the instances should be sorted.
* @param start The start of the portion in master index
* array that needs to be sorted.
* @param end The end of the portion in master index
* array that needs to be sorted.
* @throws Exception If the indices of the instances
* are not in sorted order.
*/
private static void checkSort(Instances insts, int[] indices, int attidx,
int start, int end) throws Exception {
for(int i=start+1; i<=end; i++) {
if( insts.instance(indices[i-1]).value(attidx) >
insts.instance(indices[i]).value(attidx) ) {
System.out.println("value[i-1]: "+insts.instance(indices[i-1]).value(attidx));
System.out.println("value[i]: "+insts.instance(indices[i]).value(attidx));
System.out.println("indices[i-1]: "+indices[i-1]);
System.out.println("indices[i]: "+indices[i]);
System.out.println("i: "+i);
if(insts.instance(indices[i-1]).value(attidx) > insts.instance(indices[i]).value(attidx))
System.out.println("value[i-1] > value[i]");
throw new Exception("Indices not sorted correctly.");
}//end if
}
}
/**
* Re-arranges the indices array so that in the portion of the array
* belonging to the node to be split, the points <= to the splitVal
* are on the left of the portion and those > the splitVal are on the right.
*
* @param indices The master index array.
* @param startidx The begining index of portion of indices that needs
* re-arranging.
* @param endidx The end index of portion of indices that needs
* re-arranging.
* @param splitDim The split dimension/attribute.
* @param splitVal The split value.
* @return The startIdx of the points > the splitVal (the points
* belonging to the right child of the node).
*/
protected int rearrangePoints(int[] indices, final int startidx, final int endidx,
final int splitDim, final double splitVal) {
int tmp, left = startidx - 1;
for (int i = startidx; i <= endidx; i++) {
if (m_EuclideanDistance.valueIsSmallerEqual(m_Instances
.instance(indices[i]), splitDim, splitVal)) {
left++;
tmp = indices[left];
indices[left] = indices[i];
indices[i] = tmp;
}// end valueIsSmallerEqual
}// endfor
return left + 1;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* KDTreeNode.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
import java.io.Serializable;
import weka.core.RevisionHandler;
import weka.core.RevisionUtils;
/**
* A class representing a KDTree node. A node does not explicitly
* store the instances that it contains. Instead, it only stores
* the start and end index of a portion in a master index array. Each
* node is assigned a portion in the master index array that stores
* the indices of the instances that the node contains. Every time a
* node is split by the KDTree's contruction method, the instances of
* its left child are moved to the left and the instances of its
* right child are moved to the right, in the portion of the master
* index array belonging to the node. The start and end index in each
* of its children are then set accordingly within that portion so
* that each have their own portion which contains their instances.
* P.S.: The master index array is only stored in KDTree class.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class KDTreeNode
implements Serializable, RevisionHandler {
/** for serialization. */
private static final long serialVersionUID = -3660396067582792648L;
/** node number (only for debug). */
public int m_NodeNumber;
/** left subtree; contains instances with smaller or equal to split value. */
public KDTreeNode m_Left = null;
/** right subtree; contains instances with larger than split value. */
public KDTreeNode m_Right = null;
/** value to split on. */
public double m_SplitValue;
/** attribute to split on. */
public int m_SplitDim;
/**
* lowest and highest value and width (= high - low) for each
* dimension.
*/
public double[][] m_NodeRanges;
/**
* The lo and high bounds of the hyper rectangle described by the
* node.
*/
public double[][] m_NodesRectBounds;
/**
* The start index of the portion of the master index array,
* which stores the indices of the instances/points the node
* contains.
*/
public int m_Start = 0;
/**
* The end index of the portion of the master index array,
* which stores indices of the instances/points the node
* contains.
*/
public int m_End = 0;
/**
* Constructor.
*/
public KDTreeNode() {}
/**
* Constructor.
*
* @param nodeNum The node number/id.
* @param startidx The start index of node's portion
* in master index array.
* @param endidx The start index of node's portion
* in master index array.
* @param nodeRanges The attribute ranges of the
* Instances/points contained in this node.
*/
public KDTreeNode(int nodeNum, int startidx, int endidx, double[][] nodeRanges) {
m_NodeNumber = nodeNum;
m_Start = startidx; m_End = endidx;
m_NodeRanges = nodeRanges;
}
/**
*
* @param nodeNum The node number/id.
* @param startidx The start index of node's portion
* in master index array.
* @param endidx The start index of node's portion
* in master index array.
* @param nodeRanges The attribute ranges of the
* Instances/points contained in this node.
* @param rectBounds The range of the rectangular
* region in the point space that this node
* represents (points inside this rectangular
* region can have different range).
*/
public KDTreeNode(int nodeNum, int startidx, int endidx, double[][] nodeRanges, double[][] rectBounds) {
m_NodeNumber = nodeNum;
m_Start = startidx; m_End = endidx;
m_NodeRanges = nodeRanges;
m_NodesRectBounds = rectBounds;
}
/**
* Gets the splitting dimension.
*
* @return splitting dimension
*/
public int getSplitDim() {
return m_SplitDim;
}
/**
* Gets the splitting value.
*
* @return splitting value
*/
public double getSplitValue() {
return m_SplitValue;
}
/**
* Checks if node is a leaf.
*
* @return true if it is a leaf
*/
public boolean isALeaf() {
return (m_Left == null);
}
/**
* Returns the number of Instances
* in the rectangular region defined
* by this node.
* @return The number of instances in
* this KDTreeNode.
*/
public int numInstances() {
return (m_End-m_Start+1);
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 8034 $");
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* MidPointOfWidestDimension.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
/**
<!-- globalinfo-start -->
* The class that splits a KDTree node based on the midpoint value of a dimension in which the node's points have the widest spread.<br/>
* <br/>
* For more information see also:<br/>
* <br/>
* Andrew Moore (1991). A tutorial on kd-trees.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @techreport{Moore1991,
* author = {Andrew Moore},
* booktitle = {University of Cambridge Computer Laboratory Technical Report No. 209},
* howpublished = {Extract from PhD Thesis},
* title = {A tutorial on kd-trees},
* year = {1991},
* HTTP = {http://www.autonlab.org/autonweb/14665.html}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
<!-- options-end -->
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class MidPointOfWidestDimension
extends KDTreeNodeSplitter {
/** for serialization. */
private static final long serialVersionUID = -7617277960046591906L;
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"The class that splits a KDTree node based on the midpoint value of "
+ "a dimension in which the node's points have the widest spread.\n\n"
+ "For more information see also:\n\n";
}
/**
* Splits a node into two based on the midpoint value of the dimension
* in which the points have the widest spread. After splitting two
* new nodes are created and correctly initialised. And, node.left
* and node.right are set appropriately.
* @param node The node to split.
* @param numNodesCreated The number of nodes that so far have been
* created for the tree, so that the newly created nodes are
* assigned correct/meaningful node numbers/ids.
* @param nodeRanges The attributes' range for the points inside
* the node that is to be split.
* @param universe The attributes' range for the whole
* point-space.
* @throws Exception If there is some problem in splitting the
* given node.
*/
public void splitNode(KDTreeNode node, int numNodesCreated,
double[][] nodeRanges, double[][] universe) throws Exception {
correctlyInitialized();
int splitDim = widestDim(nodeRanges, universe);
double splitVal = m_EuclideanDistance.getMiddle(nodeRanges[splitDim]);
int rightStart = rearrangePoints(m_InstList, node.m_Start, node.m_End,
splitDim, splitVal);
if (rightStart == node.m_Start || rightStart > node.m_End) {
if (rightStart == node.m_Start)
throw new Exception("Left child is empty in node "
+ node.m_NodeNumber +
". Not possible with " +
"MidPointofWidestDim splitting method. Please " +
"check code.");
else
throw new Exception("Right child is empty in node " + node.m_NodeNumber +
". Not possible with " +
"MidPointofWidestDim splitting method. Please " +
"check code.");
}
node.m_SplitDim = splitDim;
node.m_SplitValue = splitVal;
node.m_Left = new KDTreeNode(numNodesCreated + 1, node.m_Start,
rightStart - 1, m_EuclideanDistance.initializeRanges(m_InstList,
node.m_Start, rightStart - 1));
node.m_Right = new KDTreeNode(numNodesCreated + 2, rightStart, node.m_End,
m_EuclideanDistance
.initializeRanges(m_InstList, rightStart, node.m_End));
}
/**
* Re-arranges the indices array such that the points <= to the splitVal
* are on the left of the array and those > the splitVal are on the right.
*
* @param indices The master index array.
* @param startidx The begining index of portion of indices that needs
* re-arranging.
* @param endidx The end index of portion of indices that needs
* re-arranging.
* @param splitDim The split dimension/attribute.
* @param splitVal The split value.
* @return The startIdx of the points > the splitVal (the points
* belonging to the right child of the node).
*/
protected int rearrangePoints(int[] indices, final int startidx, final int endidx,
final int splitDim, final double splitVal) {
int tmp, left = startidx - 1;
for (int i = startidx; i <= endidx; i++) {
if (m_EuclideanDistance.valueIsSmallerEqual(m_Instances
.instance(indices[i]), splitDim, splitVal)) {
left++;
tmp = indices[left];
indices[left] = indices[i];
indices[i] = tmp;
}//end if valueIsSmallerEqual
}//end for
return left + 1;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* KDTreeNodeSplitter.java
* Copyright (C) 1999-2012 University of Waikato
*/
package moa.classifiers.lazy.neighboursearch.kdtrees;
import java.io.Serializable;
import java.util.Enumeration;
import java.util.Vector;
import moa.classifiers.lazy.neighboursearch.EuclideanDistance;
import weka.core.Instances;
/**
* Class that splits up a KDTreeNode.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public abstract class KDTreeNodeSplitter
implements Serializable {
/** The instances that'll be used for tree construction. */
protected Instances m_Instances;
/** The distance function used for building the tree. */
protected EuclideanDistance m_EuclideanDistance;
/**
* The master index array that'll be reshuffled as nodes
* are split and the tree is constructed.
*/
protected int[] m_InstList;
/**
* Stores whether if the width of a KDTree
* node is normalized or not.
*/
protected boolean m_NormalizeNodeWidth;
// Constants
/** Index of min value in an array of attributes' range. */
public static final int MIN = EuclideanDistance.R_MIN;
/** Index of max value in an array of attributes' range. */
public static final int MAX = EuclideanDistance.R_MAX;
/** Index of width value (max-min) in an array of attributes' range. */
public static final int WIDTH = EuclideanDistance.R_WIDTH;
/**
* default constructor.
*/
public KDTreeNodeSplitter() {
}
/**
* Creates a new instance of KDTreeNodeSplitter.
* @param instList Reference of the master index array.
* @param insts The set of training instances on which
* the tree is built.
* @param e The EuclideanDistance object that is used
* in tree contruction.
*/
public KDTreeNodeSplitter(int[] instList, Instances insts, EuclideanDistance e) {
m_InstList = instList;
m_Instances = insts;
m_EuclideanDistance = e;
}
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
public Enumeration listOptions() {
return new Vector().elements();
}
/**
* Parses a given list of options.
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
public void setOptions(String[] options) throws Exception {
}
/**
* Gets the current settings of the object.
*
* @return an array of strings suitable for passing to setOptions
*/
public String[] getOptions() {
return new String[0];
}
/**
* Checks whether an object of this class has been correctly
* initialized. Performs checks to see if all the necessary
* things (master index array, training instances, distance
* function) have been supplied or not.
* @throws Exception If the object has not been correctly
* initialized.
*/
protected void correctlyInitialized() throws Exception {
if(m_Instances==null)
throw new Exception("No instances supplied.");
else if(m_InstList==null)
throw new Exception("No instance list supplied.");
else if(m_EuclideanDistance==null)
throw new Exception("No Euclidean distance function supplied.");
else if(m_Instances.numInstances() != m_InstList.length)
throw new Exception("The supplied instance list doesn't seem to match " +
"the supplied instances");
}
/**
* Splits a node into two. After splitting two new nodes are created
* and correctly initialised. And, node.left and node.right are
* set appropriately.
* @param node The node to split.
* @param numNodesCreated The number of nodes that so far have been
* created for the tree, so that the newly created nodes are
* assigned correct/meaningful node numbers/ids.
* @param nodeRanges The attributes' range for the points inside
* the node that is to be split.
* @param universe The attributes' range for the whole
* point-space.
* @throws Exception If there is some problem in splitting the
* given node.
*/
public abstract void splitNode(KDTreeNode node, int numNodesCreated,
double[][] nodeRanges, double[][] universe)
throws Exception;
/**
* Sets the training instances on which the tree is (or is
* to be) built.
* @param inst The training instances.
*/
public void setInstances(Instances inst) {
m_Instances = inst;
}
/**
* Sets the master index array containing indices of the
* training instances. This array will be rearranged as
* the tree is built, so that each node is assigned a
* portion in this array which contain the instances
* insides the node's region.
* @param instList The master index array.
*/
public void setInstanceList(int[] instList) {
m_InstList = instList;
}
/**
* Sets the EuclideanDistance object to use for
* splitting nodes.
* @param func The EuclideanDistance object.
*/
public void setEuclideanDistanceFunction(EuclideanDistance func) {
m_EuclideanDistance = func;
}
/**
* Sets whether if a nodes region is normalized
* or not. If set to true then, when selecting
* the widest attribute/dimension for splitting,
* the width of each attribute/dimension,
* of the points inside the node's region, is
* divided by the width of that
* attribute/dimension for the whole point-space.
* Thus, each attribute/dimension of that node
* is normalized.
*
* @param normalize Should be true if
* normalization is required.
*/
public void setNodeWidthNormalization(boolean normalize) {
m_NormalizeNodeWidth = normalize;
}
/**
* Returns the widest dimension. The width of each
* dimension (for the points inside the node) is
* normalized, if m_NormalizeNodeWidth is set to
* true.
* @param nodeRanges The attributes' range of the
* points inside the node that is to be split.
* @param universe The attributes' range for the
* whole point-space.
* @return The index of the attribute/dimension
* in which the points of the node have widest
* spread.
*/
protected int widestDim(double[][] nodeRanges, double[][] universe) {
final int classIdx = m_Instances.classIndex();
double widest = 0.0;
int w = -1;
if (m_NormalizeNodeWidth) {
for (int i = 0; i < nodeRanges.length; i++) {
double newWidest = nodeRanges[i][WIDTH] / universe[i][WIDTH];
if (newWidest > widest) {
if (i == classIdx)
continue;
widest = newWidest;
w = i;
}
}
} else {
for (int i = 0; i < nodeRanges.length; i++) {
if (nodeRanges[i][WIDTH] > widest) {
if (i == classIdx)
continue;
widest = nodeRanges[i][WIDTH];
w = i;
}
}
}
return w;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* KDTree.java
* Copyright (C) 2000-2012 University of Waikato
*
*/
package moa.classifiers.lazy.neighboursearch;
import java.util.Enumeration;
import java.util.Vector;
import moa.classifiers.lazy.neighboursearch.kdtrees.KDTreeNode;
import moa.classifiers.lazy.neighboursearch.kdtrees.KDTreeNodeSplitter;
import moa.classifiers.lazy.neighboursearch.kdtrees.SlidingMidPointOfWidestSide;
import weka.core.Instance;
import weka.core.Instances;
/**
<!-- globalinfo-start -->
* Class implementing the KDTree search algorithm for nearest neighbour search.<br/>
* The connection to dataset is only a reference. For the tree structure the indexes are stored in an array. <br/>
* Building the tree:<br/>
* If a node has <maximal-inst-number> (option -L) instances no further splitting is done. Also if the split would leave one side empty, the branch is not split any further even if the instances in the resulting node are more than <maximal-inst-number> instances.<br/>
* **PLEASE NOTE:** The algorithm can not handle missing values, so it is advisable to run ReplaceMissingValues filter if there are any missing values in the dataset.<br/>
* <br/>
* For more information see:<br/>
* <br/>
* Jerome H. Friedman, Jon Luis Bentley, Raphael Ari Finkel (1977). An Algorithm for Finding Best Matches in Logarithmic Expected Time. ACM Transactions on Mathematics Software. 3(3):209-226.<br/>
* <br/>
* Andrew Moore (1991). A tutorial on kd-trees.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @article{Friedman1977,
* author = {Jerome H. Friedman and Jon Luis Bentley and Raphael Ari Finkel},
* journal = {ACM Transactions on Mathematics Software},
* month = {September},
* number = {3},
* pages = {209-226},
* title = {An Algorithm for Finding Best Matches in Logarithmic Expected Time},
* volume = {3},
* year = {1977}
* }
*
* @techreport{Moore1991,
* author = {Andrew Moore},
* booktitle = {University of Cambridge Computer Laboratory Technical Report No. 209},
* howpublished = {Extract from PhD Thesis},
* title = {A tutorial on kd-trees},
* year = {1991},
* HTTP = {Available from http://www.autonlab.org/autonweb/14665.html}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -S <classname and options>
* Node splitting method to use.
* (default: weka.core.neighboursearch.kdtrees.SlidingMidPointOfWidestSide)</pre>
*
* <pre> -W <value>
* Set minimal width of a box
* (default: 1.0E-2).</pre>
*
* <pre> -L
* Maximal number of instances in a leaf
* (default: 40).</pre>
*
* <pre> -N
* Normalizing will be done
* (Select dimension for split, with normalising to universe).</pre>
*
<!-- options-end -->
*
* @author Gabi Schmidberger (gabi[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @author Malcolm Ware (mfw4[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class KDTree
extends NearestNeighbourSearch {
/** For serialization. */
private static final long serialVersionUID = 1505717283763272533L;
/**
* Array holding the distances of the nearest neighbours. It is filled up both
* by nearestNeighbour() and kNearestNeighbours().
*/
protected double[] m_DistanceList;
/**
* Indexlist of the instances of this kdtree. Instances get sorted according
* to the splits. the nodes of the KDTree just hold their start and end
* indices
*/
protected int[] m_InstList;
/** The root node of the tree. */
protected KDTreeNode m_Root;
/** The node splitter. */
protected KDTreeNodeSplitter m_Splitter = new SlidingMidPointOfWidestSide();
/** Tree stats. */
protected int m_NumNodes, m_NumLeaves, m_MaxDepth;
// Constants
/** The index of MIN value in attributes' range array. */
public static final int MIN = EuclideanDistance.R_MIN;
/** The index of MAX value in attributes' range array. */
public static final int MAX = EuclideanDistance.R_MAX;
/** The index of WIDTH (MAX-MIN) value in attributes' range array. */
public static final int WIDTH = EuclideanDistance.R_WIDTH;
/**
* Creates a new instance of KDTree.
*/
public KDTree() {
super();
}
/**
* Creates a new instance of KDTree.
* It also builds the tree on supplied set of Instances.
* @param insts The instances/points on which the BallTree
* should be built on.
*/
public KDTree(Instances insts) {
super(insts);
}
/**
* Builds the KDTree on the supplied set of instances/points. It
* is adviseable to run the replace missing attributes filter
* on the passed instances first.
* NOTE: This method should not be called from outside this
* class. Outside classes should call setInstances(Instances)
* instead.
*
* @param instances The instances to build the tree on
* @throws Exception if something goes wrong
*/
protected void buildKDTree(Instances instances) throws Exception {
checkMissing(instances);
if (m_EuclideanDistance == null)
m_DistanceFunction = m_EuclideanDistance = new EuclideanDistance(
instances);
else
m_EuclideanDistance.setInstances(instances);
m_Instances = instances;
int numInst = m_Instances.numInstances();
// Make the global index list
m_InstList = new int[numInst];
for (int i = 0; i < numInst; i++) {
m_InstList[i] = i;
}
double[][] universe = m_EuclideanDistance.getRanges();
// initializing internal fields of KDTreeSplitter
m_Splitter.setInstances(m_Instances);
m_Splitter.setInstanceList(m_InstList);
m_Splitter.setEuclideanDistanceFunction(m_EuclideanDistance);
m_Splitter.setNodeWidthNormalization(m_NormalizeNodeWidth);
// building tree
m_NumNodes = m_NumLeaves = 1;
m_MaxDepth = 0;
m_Root = new KDTreeNode(m_NumNodes, 0, m_Instances.numInstances() - 1,
universe);
splitNodes(m_Root, universe, m_MaxDepth + 1);
}
/**
* Recursively splits nodes of a tree starting from the supplied node.
* The splitting stops for any node for which the number of instances/points
* falls below a given threshold (given by m_MaxInstInLeaf), or if the
* maximum relative width/range of the instances/points
* (i.e. max_i(max(att_i) - min(att_i)) ) falls below a given threshold
* (given by m_MinBoxRelWidth).
*
* @param node The node to start splitting from.
* @param universe The attribute ranges of the whole dataset.
* @param depth The depth of the supplied node.
* @throws Exception If there is some problem
* splitting.
*/
protected void splitNodes(KDTreeNode node, double[][] universe,
int depth) throws Exception {
double[][] nodeRanges = m_EuclideanDistance.initializeRanges(m_InstList,
node.m_Start, node.m_End);
if (node.numInstances() <= m_MaxInstInLeaf
|| getMaxRelativeNodeWidth(nodeRanges, universe) <= m_MinBoxRelWidth)
return;
// splitting a node so it is no longer a leaf
m_NumLeaves--;
if (depth > m_MaxDepth)
m_MaxDepth = depth;
m_Splitter.splitNode(node, m_NumNodes, nodeRanges, universe);
m_NumNodes += 2;
m_NumLeaves += 2;
splitNodes(node.m_Left, universe, depth + 1);
splitNodes(node.m_Right, universe, depth + 1);
}
/**
* Returns (in the supplied heap object) the k nearest
* neighbours of the given instance starting from the give
* tree node. >k neighbours are returned if there are more than
* one neighbours at the kth boundary. NOTE: This method should
* not be used from outside this class. Outside classes should
* call kNearestNeighbours(Instance, int).
*
* @param target The instance to find the nearest neighbours for.
* @param node The KDTreeNode to start the search from.
* @param k The number of neighbours to find.
* @param heap The MyHeap object to store/update the kNNs found
* during the search.
* @param distanceToParents The distance of the supplied target
* to the parents of the supplied tree node.
* @throws Exception if the nearest neighbour could not be found.
*/
protected void findNearestNeighbours(Instance target, KDTreeNode node, int k,
MyHeap heap, double distanceToParents) throws Exception {
if (node.isALeaf()) {
double distance;
// look at all the instances in this leaf
for (int idx = node.m_Start; idx <= node.m_End; idx++) {
if (target == m_Instances.instance(m_InstList[idx])) // for
// hold-one-out
// cross-validation
continue;
if (heap.size() < k) {
distance = m_EuclideanDistance.distance(target, m_Instances
.instance(m_InstList[idx]), Double.POSITIVE_INFINITY);
heap.put(m_InstList[idx], distance);
} else {
MyHeapElement temp = heap.peek();
distance = m_EuclideanDistance.distance(target, m_Instances
.instance(m_InstList[idx]), temp.distance);
if (distance < temp.distance) {
heap.putBySubstitute(m_InstList[idx], distance);
} else if (distance == temp.distance) {
heap.putKthNearest(m_InstList[idx], distance);
}
}// end else heap.size==k
}// end for
} else {
KDTreeNode nearer, further;
boolean targetInLeft = m_EuclideanDistance.valueIsSmallerEqual(target,
node.m_SplitDim, node.m_SplitValue);
if (targetInLeft) {
nearer = node.m_Left;
further = node.m_Right;
} else {
nearer = node.m_Right;
further = node.m_Left;
}
findNearestNeighbours(target, nearer, k, heap, distanceToParents);
// ... now look in further half if maxDist reaches into it
if (heap.size() < k) { // if haven't found the first k
double distanceToSplitPlane = distanceToParents
+ m_EuclideanDistance.sqDifference(node.m_SplitDim, target
.value(node.m_SplitDim), node.m_SplitValue);
findNearestNeighbours(target, further, k, heap, distanceToSplitPlane);
return;
} else { // else see if ball centered at query intersects with the other
// side.
double distanceToSplitPlane = distanceToParents
+ m_EuclideanDistance.sqDifference(node.m_SplitDim, target
.value(node.m_SplitDim), node.m_SplitValue);
if (heap.peek().distance >= distanceToSplitPlane) {
findNearestNeighbours(target, further, k, heap, distanceToSplitPlane);
}
}// end else
}// end else_if an internal node
}
/**
* Returns the k nearest neighbours of the supplied instance.
* >k neighbours are returned if there are more than one
* neighbours at the kth boundary.
*
* @param target The instance to find the nearest neighbours for.
* @param k The number of neighbours to find.
* @return The k nearest neighbours (or >k if more there are than
* one neighbours at the kth boundary).
* @throws Exception if the nearest neighbour could not be found.
*/
public Instances kNearestNeighbours(Instance target, int k) throws Exception {
checkMissing(target);
MyHeap heap = new MyHeap(k);
findNearestNeighbours(target, m_Root, k, heap, 0.0);
Instances neighbours = new Instances(m_Instances, (heap.size() + heap
.noOfKthNearest()));
m_DistanceList = new double[heap.size() + heap.noOfKthNearest()];
int[] indices = new int[heap.size() + heap.noOfKthNearest()];
int i = indices.length - 1;
MyHeapElement h;
while (heap.noOfKthNearest() > 0) {
h = heap.getKthNearest();
indices[i] = h.index;
m_DistanceList[i] = h.distance;
i--;
}
while (heap.size() > 0) {
h = heap.get();
indices[i] = h.index;
m_DistanceList[i] = h.distance;
i--;
}
m_DistanceFunction.postProcessDistances(m_DistanceList);
for (int idx = 0; idx < indices.length; idx++) {
neighbours.add(m_Instances.instance(indices[idx]));
}
return neighbours;
}
/**
* Returns the nearest neighbour of the supplied target
* instance.
*
* @param target The instance to find the nearest neighbour for.
* @return The nearest neighbour from among the previously
* supplied training instances.
* @throws Exception if the neighbours could not be found.
*/
public Instance nearestNeighbour(Instance target) throws Exception {
return (kNearestNeighbours(target, 1)).instance(0);
}
/**
* Returns the distances to the kNearest or 1 nearest neighbour currently
* found with either the kNearestNeighbours or the nearestNeighbour method.
*
* @return array containing the distances of the
* nearestNeighbours. The length and ordering of the array
* is the same as that of the instances returned by
* nearestNeighbour functions.
* @throws Exception if called before calling kNearestNeighbours or
* nearestNeighbours.
*/
public double[] getDistances() throws Exception {
if (m_Instances == null || m_DistanceList == null)
throw new Exception("The tree has not been supplied with a set of "
+ "instances or getDistances() has been called "
+ "before calling kNearestNeighbours().");
return m_DistanceList;
}
/**
* Builds the KDTree on the given set of instances.
* @param instances The insts on which the KDTree is to be
* built.
* @throws Exception If some error occurs while
* building the KDTree
*/
public void setInstances(Instances instances) throws Exception {
super.setInstances(instances);
buildKDTree(instances);
}
/**
* Adds one instance to the KDTree. This updates the KDTree structure to take
* into account the newly added training instance.
*
* @param instance the instance to be added. Usually the newly added instance in the
* training set.
* @throws Exception If the instance cannot be added.
*/
public void update(Instance instance) throws Exception { // better to change
// to addInstance
if (m_Instances == null)
throw new Exception("No instances supplied yet. Have to call "
+ "setInstances(instances) with a set of Instances " + "first.");
addInstanceInfo(instance);
addInstanceToTree(instance, m_Root);
}
/**
* Recursively adds an instance to the tree starting from
* the supplied KDTreeNode.
* NOTE: This should not be called by outside classes,
* outside classes should instead call update(Instance)
* method.
*
* @param inst The instance to add to the tree
* @param node The node to start the recursive search
* from, for the leaf node where the supplied instance
* would go.
* @throws Exception If some error occurs while adding
* the instance.
*/
protected void addInstanceToTree(Instance inst, KDTreeNode node)
throws Exception {
if (node.isALeaf()) {
int instList[] = new int[m_Instances.numInstances()];
try {
System.arraycopy(m_InstList, 0, instList, 0, node.m_End + 1); // m_InstList.squeezeIn(m_End,
// index);
if (node.m_End < m_InstList.length - 1)
System.arraycopy(m_InstList, node.m_End + 1, instList,
node.m_End + 2, m_InstList.length - node.m_End - 1);
instList[node.m_End + 1] = m_Instances.numInstances() - 1;
} catch (ArrayIndexOutOfBoundsException ex) {
System.err.println("m_InstList.length: " + m_InstList.length
+ " instList.length: " + instList.length + "node.m_End+1: "
+ (node.m_End + 1) + "m_InstList.length-node.m_End+1: "
+ (m_InstList.length - node.m_End - 1));
throw ex;
}
m_InstList = instList;
node.m_End++;
node.m_NodeRanges = m_EuclideanDistance.updateRanges(inst,
node.m_NodeRanges);
m_Splitter.setInstanceList(m_InstList);
// split this leaf node if necessary
double[][] universe = m_EuclideanDistance.getRanges();
if (node.numInstances() > m_MaxInstInLeaf
&& getMaxRelativeNodeWidth(node.m_NodeRanges, universe) > m_MinBoxRelWidth) {
m_Splitter.splitNode(node, m_NumNodes, node.m_NodeRanges, universe);
m_NumNodes += 2;
}
}// end if node is a leaf
else {
if (m_EuclideanDistance.valueIsSmallerEqual(inst, node.m_SplitDim,
node.m_SplitValue)) {
addInstanceToTree(inst, node.m_Left);
afterAddInstance(node.m_Right);
} else
addInstanceToTree(inst, node.m_Right);
node.m_End++;
node.m_NodeRanges = m_EuclideanDistance.updateRanges(inst,
node.m_NodeRanges);
}
}
/**
* Corrects the start and end indices of a
* KDTreeNode after an instance is added to
* the tree. The start and end indices for
* the master index array (m_InstList)
* stored in the nodes need to be updated
* for all nodes in the subtree on the
* right of a node where the instance
* was added.
* NOTE: No outside class should call this
* method.
*
* @param node KDTreeNode whose start and end indices
* need to be updated.
*/
protected void afterAddInstance(KDTreeNode node) {
node.m_Start++;
node.m_End++;
if (!node.isALeaf()) {
afterAddInstance(node.m_Left);
afterAddInstance(node.m_Right);
}
}
/**
* Adds one instance to KDTree loosly. It only changes the ranges in
* EuclideanDistance, and does not affect the structure of the KDTree.
*
* @param instance the new instance. Usually this is the test instance
* supplied to update the range of attributes in the distance function.
*/
public void addInstanceInfo(Instance instance) {
m_EuclideanDistance.updateRanges(instance);
}
/**
* Checks if there is any instance with missing values. Throws an exception if
* there is, as KDTree does not handle missing values.
*
* @param instances the instances to check
* @throws Exception if missing values are encountered
*/
protected void checkMissing(Instances instances) throws Exception {
for (int i = 0; i < instances.numInstances(); i++) {
Instance ins = instances.instance(i);
for (int j = 0; j < ins.numValues(); j++) {
if (ins.index(j) != ins.classIndex())
if (ins.isMissingSparse(j)) {
throw new Exception("ERROR: KDTree can not deal with missing "
+ "values. Please run ReplaceMissingValues filter "
+ "on the dataset before passing it on to the KDTree.");
}
}
}
}
/**
* Checks if there is any missing value in the given
* instance.
* @param ins The instance to check missing values in.
* @throws Exception If there is a missing value in the
* instance.
*/
protected void checkMissing(Instance ins) throws Exception {
for (int j = 0; j < ins.numValues(); j++) {
if (ins.index(j) != ins.classIndex())
if (ins.isMissingSparse(j)) {
throw new Exception("ERROR: KDTree can not deal with missing "
+ "values. Please run ReplaceMissingValues filter "
+ "on the dataset before passing it on to the KDTree.");
}
}
}
/**
* Returns the maximum attribute width of instances/points
* in a KDTreeNode relative to the whole dataset.
*
* @param nodeRanges The attribute ranges of the
* KDTreeNode whose maximum relative width is to be
* determined.
* @param universe The attribute ranges of the whole
* dataset (training instances + test instances so
* far encountered).
* @return The maximum relative width
*/
protected double getMaxRelativeNodeWidth(double[][] nodeRanges,
double[][] universe) {
int widest = widestDim(nodeRanges, universe);
if(widest < 0)
return 0.0;
else
return nodeRanges[widest][WIDTH] / universe[widest][WIDTH];
}
/**
* Returns the widest dimension/attribute in a
* KDTreeNode (widest after normalizing).
* @param nodeRanges The attribute ranges of
* the KDTreeNode.
* @param universe The attribute ranges of the
* whole dataset (training instances + test
* instances so far encountered).
* @return The index of the widest
* dimension/attribute.
*/
protected int widestDim(double[][] nodeRanges, double[][] universe) {
final int classIdx = m_Instances.classIndex();
double widest = 0.0;
int w = -1;
if (m_NormalizeNodeWidth) {
for (int i = 0; i < nodeRanges.length; i++) {
double newWidest = nodeRanges[i][WIDTH] / universe[i][WIDTH];
if (newWidest > widest) {
if (i == classIdx)
continue;
widest = newWidest;
w = i;
}
}
} else {
for (int i = 0; i < nodeRanges.length; i++) {
if (nodeRanges[i][WIDTH] > widest) {
if (i == classIdx)
continue;
widest = nodeRanges[i][WIDTH];
w = i;
}
}
}
return w;
}
/**
* Returns the size of the tree.
*
* @return the size of the tree
*/
public double measureTreeSize() {
return m_NumNodes;
}
/**
* Returns the number of leaves.
*
* @return the number of leaves
*/
public double measureNumLeaves() {
return m_NumLeaves;
}
/**
* Returns the depth of the tree.
*
* @return The depth of the tree
*/
public double measureMaxDepth() {
return m_MaxDepth;
}
/**
* Returns an enumeration of the additional measure names.
*
* @return an enumeration of the measure names
*/
public Enumeration enumerateMeasures() {
Vector<String> newVector = new Vector<String>();
newVector.addElement("measureTreeSize");
newVector.addElement("measureNumLeaves");
newVector.addElement("measureMaxDepth");
return newVector.elements();
}
/**
* Returns the value of the named measure.
*
* @param additionalMeasureName the name of
* the measure to query for its value.
* @return The value of the named measure
* @throws IllegalArgumentException If the named measure
* is not supported.
*/
public double getMeasure(String additionalMeasureName) {
if (additionalMeasureName.compareToIgnoreCase("measureMaxDepth") == 0) {
return measureMaxDepth();
} else if (additionalMeasureName.compareToIgnoreCase("measureTreeSize") == 0) {
return measureTreeSize();
} else if (additionalMeasureName.compareToIgnoreCase("measureNumLeaves") == 0) {
return measureNumLeaves();
} else {
throw new IllegalArgumentException(additionalMeasureName
+ " not supported (KDTree)");
}
}
/**
* Sets whether to calculate the performance statistics or not.
* @param measurePerformance Should be true if performance
* statistics are to be measured.
*/
public void setMeasurePerformance(boolean measurePerformance) {
m_MeasurePerformance = measurePerformance;
}
/**
* Assigns instances to centers using KDTree.
*
* @param centers the current centers
* @param assignments the centerindex for each instance
* @param pc the threshold value for pruning.
* @throws Exception If there is some problem
* assigning instances to centers.
*/
public void centerInstances(Instances centers, int[] assignments, double pc)
throws Exception {
int[] centList = new int[centers.numInstances()];
for (int i = 0; i < centers.numInstances(); i++)
centList[i] = i;
determineAssignments(m_Root, centers, centList, assignments, pc);
}
/**
* Assigns instances to the current centers called candidates.
*
* @param node The node to start assigning the instances from.
* @param centers all the current centers.
* @param candidates the current centers the method works on.
* @param assignments the center index for each instance.
* @param pc the threshold value for pruning.
* @throws Exception If there is some problem assigning
* instances to centers.
*/
protected void determineAssignments(KDTreeNode node, Instances centers,
int[] candidates, int[] assignments, double pc) throws Exception {
// reduce number of owners for current hyper rectangle
int[] owners = refineOwners(node, centers, candidates);
// only one owner
if (owners.length == 1) {
// all instances of this node are owned by one center
for (int i = node.m_Start; i <= node.m_End; i++) {
assignments[m_InstList[i]] // the assignment of this instance
= owners[0]; // is the current owner
}
} else if (!node.isALeaf()) {
// more than one owner and it is not a leaf
determineAssignments(node.m_Left, centers, owners, assignments, pc);
determineAssignments(node.m_Right, centers, owners, assignments, pc);
} else {
// this is a leaf and there are more than 1 owner
// XMeans.
assignSubToCenters(node, centers, owners, assignments);
}
}
/**
* Refines the ownerlist.
*
* @param node The current tree node.
* @param centers all centers
* @param candidates the indexes of those centers that are candidates.
* @return list of owners
* @throws Exception If some problem occurs in refining.
*/
protected int[] refineOwners(KDTreeNode node, Instances centers,
int[] candidates) throws Exception {
int[] owners = new int[candidates.length];
double minDistance = Double.POSITIVE_INFINITY;
int ownerIndex = -1;
Instance owner;
int numCand = candidates.length;
double[] distance = new double[numCand];
boolean[] inside = new boolean[numCand];
for (int i = 0; i < numCand; i++) {
distance[i] = distanceToHrect(node, centers.instance(candidates[i]));
inside[i] = (distance[i] == 0.0);
if (distance[i] < minDistance) {
minDistance = distance[i];
ownerIndex = i;
}
}
owner = (Instance)centers.instance(candidates[ownerIndex]).copy();
// are there other owners
// loop also goes over already found owner, keeps order
// in owner list
int index = 0;
for (int i = 0; i < numCand; i++) {
// 1. all centers that are points within rectangle are owners
if ((inside[i])
// 2. take all points with same distance to the rect. as the owner
|| (distance[i] == distance[ownerIndex])) {
// add competitor to owners list
owners[index++] = candidates[i];
} else {
Instance competitor = (Instance)centers.instance(candidates[i]).copy();
if
// 3. point has larger distance to rectangle but still can compete
// with owner for some points in the rectangle
(!candidateIsFullOwner(node, owner, competitor))
{
// also add competitor to owners list
owners[index++] = candidates[i];
}
}
}
int[] result = new int[index];
for (int i = 0; i < index; i++)
result[i] = owners[i];
return result;
}
/**
* Returns the distance between a point and an hyperrectangle.
*
* @param node The current node from whose hyperrectangle
* the distance is to be measured.
* @param x the point
* @return the distance
* @throws Exception If some problem occurs in determining
* the distance to the hyperrectangle.
*/
protected double distanceToHrect(KDTreeNode node, Instance x) throws Exception {
double distance = 0.0;
Instance closestPoint = (Instance)x.copy();
boolean inside;
inside = clipToInsideHrect(node, closestPoint);
if (!inside)
distance = m_EuclideanDistance.distance(closestPoint, x);
return distance;
}
/**
* Finds the closest point in the hyper rectangle to a given point. Change the
* given point to this closest point by clipping of at all the dimensions to
* be clipped of. If the point is inside the rectangle it stays unchanged. The
* return value is true if the point was not changed, so the the return value
* is true if the point was inside the rectangle.
*
* @param node The current KDTreeNode in whose hyperrectangle the closest
* point is to be found.
* @param x a point
* @return true if the input point stayed unchanged.
*/
protected boolean clipToInsideHrect(KDTreeNode node, Instance x) {
boolean inside = true;
for (int i = 0; i < m_Instances.numAttributes(); i++) {
// TODO treat nominals differently!??
if (x.value(i) < node.m_NodeRanges[i][MIN]) {
x.setValue(i, node.m_NodeRanges[i][MIN]);
inside = false;
} else if (x.value(i) > node.m_NodeRanges[i][MAX]) {
x.setValue(i, node.m_NodeRanges[i][MAX]);
inside = false;
}
}
return inside;
}
/**
* Returns true if candidate is a full owner in respect to a competitor.
* <p>
*
* The candidate has been the closer point to the current rectangle or even
* has been a point within the rectangle. The competitor is competing with the
* candidate for a few points out of the rectangle although it is a point
* further away from the rectangle then the candidate. The extrem point is the
* corner of the rectangle that is furthest away from the candidate towards
* the direction of the competitor.
*
* If the distance candidate to this extreme point is smaller then the
* distance competitor to this extreme point, then it is proven that none of
* the points in the rectangle can be owned be the competitor and the
* candidate is full owner of the rectangle in respect to this competitor. See
* also D. Pelleg and A. Moore's paper 'Accelerating exact k-means Algorithms
* with Geometric Reasoning'.
* <p>
*
* @param node The current KDTreeNode / hyperrectangle.
* @param candidate instance that is candidate to be owner
* @param competitor instance that competes against the candidate
* @return true if candidate is full owner
* @throws Exception If some problem occurs.
*/
protected boolean candidateIsFullOwner(KDTreeNode node, Instance candidate,
Instance competitor) throws Exception {
// get extreme point
Instance extreme = (Instance)candidate.copy();
for (int i = 0; i < m_Instances.numAttributes(); i++) {
if ((competitor.value(i) - candidate.value(i)) > 0) {
extreme.setValue(i, node.m_NodeRanges[i][MAX]);
} else {
extreme.setValue(i, node.m_NodeRanges[i][MIN]);
}
}
boolean isFullOwner = m_EuclideanDistance.distance(extreme, candidate) < m_EuclideanDistance
.distance(extreme, competitor);
return isFullOwner;
}
/**
* Assigns instances of this node to center. Center to be assign to is decided
* by the distance function.
*
* @param node The KDTreeNode whose instances are to be assigned.
* @param centers all the input centers
* @param centList the list of centers to work with
* @param assignments index list of last assignments
* @throws Exception If there is error assigning the instances.
*/
public void assignSubToCenters(KDTreeNode node, Instances centers,
int[] centList, int[] assignments) throws Exception {
// todo: undecided situations
int numCent = centList.length;
// WARNING: assignments is "input/output-parameter"
// should not be null and the following should not happen
if (assignments == null) {
assignments = new int[m_Instances.numInstances()];
for (int i = 0; i < assignments.length; i++) {
assignments[i] = -1;
}
}
// set assignments for all instances of this node
for (int i = node.m_Start; i <= node.m_End; i++) {
int instIndex = m_InstList[i];
Instance inst = m_Instances.instance(instIndex);
// if (instList[i] == 664) System.out.println("664***");
int newC = m_EuclideanDistance.closestPoint(inst, centers, centList);
// int newC = clusterProcessedInstance(inst, centers);
assignments[instIndex] = newC;
}
}
/**
* Properties' variables =====================================================
*/
/** flag for normalizing. */
boolean m_NormalizeNodeWidth = true;
/** The euclidean distance function to use. */
protected EuclideanDistance m_EuclideanDistance;
{ // to make sure we have only one object of EuclideanDistance
if (m_DistanceFunction instanceof EuclideanDistance)
m_EuclideanDistance = (EuclideanDistance) m_DistanceFunction;
else
m_DistanceFunction = m_EuclideanDistance = new EuclideanDistance();
}
/** minimal relative width of a KDTree rectangle. */
protected double m_MinBoxRelWidth = 1.0E-2;
/** maximal number of instances in a leaf. */
protected int m_MaxInstInLeaf = 40;
/**
* the GET and SET - functions ===============================================
*/
/**
* Tip text for this property.
*
* @return the tip text for this property
*/
public String minBoxRelWidthTipText() {
return "The minimum relative width of the box. A node is only made a leaf "
+ "if the width of the split dimension of the instances in a node "
+ "normalized over the width of the split dimension of all the "
+ "instances is less than or equal to this minimum relative width.";
}
/**
* Sets the minimum relative box width.
*
* @param i the minimum relative box width
*/
public void setMinBoxRelWidth(double i) {
m_MinBoxRelWidth = i;
}
/**
* Gets the minimum relative box width.
*
* @return the minimum relative box width
*/
public double getMinBoxRelWidth() {
return m_MinBoxRelWidth;
}
/**
* Tip text for this property.
*
* @return the tip text for this property
*/
public String maxInstInLeafTipText() {
return "The max number of instances in a leaf.";
}
/**
* Sets the maximum number of instances in a leaf.
*
* @param i the maximum number of instances in a leaf
*/
public void setMaxInstInLeaf(int i) {
m_MaxInstInLeaf = i;
}
/**
* Get the maximum number of instances in a leaf.
*
* @return the maximum number of instances in a leaf
*/
public int getMaxInstInLeaf() {
return m_MaxInstInLeaf;
}
/**
* Tip text for this property.
*
* @return the tip text for this property
*/
public String normalizeNodeWidthTipText() {
return "Whether if the widths of the KDTree node should be normalized "
+ "by the width of the universe or not. "
+ "Where, width of the node is the range of the split attribute "
+ "based on the instances in that node, and width of the "
+ "universe is the range of the split attribute based on all the "
+ "instances (default: false).";
}
/**
* Sets the flag for normalizing the widths of a KDTree Node by the width of
* the dimension in the universe.
*
* @param n true to use normalizing.
*/
public void setNormalizeNodeWidth(boolean n) {
m_NormalizeNodeWidth = n;
}
/**
* Gets the normalize flag.
*
* @return True if normalizing
*/
public boolean getNormalizeNodeWidth() {
return m_NormalizeNodeWidth;
}
/**
* returns the distance function currently in use.
*
* @return the distance function
*/
public DistanceFunction getDistanceFunction() {
return (DistanceFunction) m_EuclideanDistance;
}
/**
* sets the distance function to use for nearest neighbour search.
*
* @param df the distance function to use
* @throws Exception if not EuclideanDistance
*/
public void setDistanceFunction(DistanceFunction df) throws Exception {
if (!(df instanceof EuclideanDistance))
throw new Exception("KDTree currently only works with "
+ "EuclideanDistanceFunction.");
m_DistanceFunction = m_EuclideanDistance = (EuclideanDistance) df;
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String nodeSplitterTipText() {
return "The the splitting method to split the nodes of the KDTree.";
}
/**
* Returns the splitting method currently in use to split the nodes of the
* KDTree.
*
* @return The KDTreeNodeSplitter currently in use.
*/
public KDTreeNodeSplitter getNodeSplitter() {
return m_Splitter;
}
/**
* Sets the splitting method to use to split the nodes of the KDTree.
*
* @param splitter The KDTreeNodeSplitter to use.
*/
public void setNodeSplitter(KDTreeNodeSplitter splitter) {
m_Splitter = splitter;
}
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"Class implementing the KDTree search algorithm for nearest "
+ "neighbour search.\n"
+ "The connection to dataset is only a reference. For the tree "
+ "structure the indexes are stored in an array. \n"
+ "Building the tree:\n"
+ "If a node has <maximal-inst-number> (option -L) instances no "
+ "further splitting is done. Also if the split would leave one "
+ "side empty, the branch is not split any further even if the "
+ "instances in the resulting node are more than "
+ "<maximal-inst-number> instances.\n"
+ "**PLEASE NOTE:** The algorithm can not handle missing values, so it "
+ "is advisable to run ReplaceMissingValues filter if there are any "
+ "missing values in the dataset.\n\n"
+ "For more information see:\n\n";
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* LinearNNSearch.java
* Copyright (C) 1999-2012 University of Waikato
*/
package moa.classifiers.lazy.neighboursearch;
import weka.core.Instance;
import weka.core.Instances;
//import weka.core.Option;
//import weka.core.RevisionUtils;
//import weka.core.Utils;
/**
<!-- globalinfo-start -->
* Class implementing the brute force search algorithm for nearest neighbour search.
* <p/>
<!-- globalinfo-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -S
* Skip identical instances (distances equal to zero).
* </pre>
*
<!-- options-end -->
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public class LinearNNSearch
extends NearestNeighbourSearch {
/** for serialization. */
private static final long serialVersionUID = 1915484723703917241L;
/** Array holding the distances of the nearest neighbours. It is filled up
* both by nearestNeighbour() and kNearestNeighbours().
*/
protected double[] m_Distances;
/** Whether to skip instances from the neighbours that are identical to the query instance. */
protected boolean m_SkipIdentical = false;
/**
* Constructor. Needs setInstances(Instances)
* to be called before the class is usable.
*/
public LinearNNSearch() {
super();
}
/**
* Constructor that uses the supplied set of
* instances.
*
* @param insts the instances to use
*/
public LinearNNSearch(Instances insts) {
super(insts);
m_DistanceFunction.setInstances(insts);
}
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"Class implementing the brute force search algorithm for nearest "
+ "neighbour search.";
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String skipIdenticalTipText() {
return "Whether to skip identical instances (with distance 0 to the target)";
}
/**
* Sets the property to skip identical instances (with distance zero from
* the target) from the set of neighbours returned.
*
* @param skip if true, identical intances are skipped
*/
public void setSkipIdentical(boolean skip) {
m_SkipIdentical = skip;
}
/**
* Gets whether if identical instances are skipped from the neighbourhood.
*
* @return true if identical instances are skipped
*/
public boolean getSkipIdentical() {
return m_SkipIdentical;
}
/**
* Returns the nearest instance in the current neighbourhood to the supplied
* instance.
*
* @param target The instance to find the nearest neighbour for.
* @return the nearest instance
* @throws Exception if the nearest neighbour could not be found.
*/
public Instance nearestNeighbour(Instance target) throws Exception {
return (kNearestNeighbours(target, 1)).instance(0);
}
/**
* Returns k nearest instances in the current neighbourhood to the supplied
* instance.
*
* @param target The instance to find the k nearest neighbours for.
* @param kNN The number of nearest neighbours to find.
* @return the k nearest neighbors
* @throws Exception if the neighbours could not be found.
*/
public Instances kNearestNeighbours(Instance target, int kNN) throws Exception {
//debug
boolean print=false;
MyHeap heap = new MyHeap(kNN);
double distance; int firstkNN=0;
for(int i=0; i<m_Instances.numInstances(); i++) {
if(target == m_Instances.instance(i)) //for hold-one-out cross-validation
continue;
if(firstkNN<kNN) {
if(print)
System.out.println("K(a): "+(heap.size()+heap.noOfKthNearest()));
distance = m_DistanceFunction.distance(target, m_Instances.instance(i), Double.POSITIVE_INFINITY);
if(distance == 0.0 && m_SkipIdentical)
if(i<m_Instances.numInstances()-1)
continue;
else
heap.put(i, distance);
heap.put(i, distance);
firstkNN++;
}
else {
MyHeapElement temp = heap.peek();
if(print)
System.out.println("K(b): "+(heap.size()+heap.noOfKthNearest()));
distance = m_DistanceFunction.distance(target, m_Instances.instance(i), temp.distance);
if(distance == 0.0 && m_SkipIdentical)
continue;
if(distance < temp.distance) {
heap.putBySubstitute(i, distance);
}
else if(distance == temp.distance) {
heap.putKthNearest(i, distance);
}
}
}
Instances neighbours = new Instances(m_Instances, (heap.size()+heap.noOfKthNearest()));
m_Distances = new double[heap.size()+heap.noOfKthNearest()];
int [] indices = new int[heap.size()+heap.noOfKthNearest()];
int i=1; MyHeapElement h;
while(heap.noOfKthNearest()>0) {
h = heap.getKthNearest();
indices[indices.length-i] = h.index;
m_Distances[indices.length-i] = h.distance;
i++;
}
while(heap.size()>0) {
h = heap.get();
indices[indices.length-i] = h.index;
m_Distances[indices.length-i] = h.distance;
i++;
}
m_DistanceFunction.postProcessDistances(m_Distances);
for(int k=0; k<indices.length; k++) {
neighbours.add(m_Instances.instance(indices[k]));
}
return neighbours;
}
/**
* Returns the distances of the k nearest neighbours. The kNearestNeighbours
* or nearestNeighbour must always be called before calling this function. If
* this function is called before calling either the kNearestNeighbours or
* the nearestNeighbour, then it throws an exception. If, however, if either
* of the nearestNeighbour functions are called at any point in the
* past then no exception is thrown and the distances of the training set from
* the last supplied target instance (to either one of the nearestNeighbour
* functions) is/are returned.
*
* @return array containing the distances of the
* nearestNeighbours. The length and ordering of the
* array is the same as that of the instances returned
* by nearestNeighbour functions.
* @throws Exception if called before calling kNearestNeighbours
* or nearestNeighbours.
*/
public double[] getDistances() throws Exception {
if(m_Distances==null)
throw new Exception("No distances available. Please call either "+
"kNearestNeighbours or nearestNeighbours first.");
return m_Distances;
}
/**
* Sets the instances comprising the current neighbourhood.
*
* @param insts The set of instances on which the nearest neighbour
* search is carried out. Usually this set is the
* training set.
* @throws Exception if setting of instances fails
*/
public void setInstances(Instances insts) throws Exception {
m_Instances = insts;
m_DistanceFunction.setInstances(insts);
}
/**
* Updates the LinearNNSearch to cater for the new added instance. This
* implementation only updates the ranges of the DistanceFunction class,
* since our set of instances is passed by reference and should already have
* the newly added instance.
*
* @param ins The instance to add. Usually this is the instance that
* is added to our neighbourhood i.e. the training
* instances.
* @throws Exception if the given instances are null
*/
public void update(Instance ins) throws Exception {
if(m_Instances==null)
throw new Exception("No instances supplied yet. Cannot update without"+
"supplying a set of instances first.");
m_DistanceFunction.update(ins);
}
/**
* Adds the given instance info. This implementation updates the range
* datastructures of the DistanceFunction class.
*
* @param ins The instance to add the information of. Usually this is
* the test instance supplied to update the range of
* attributes in the distance function.
*/
public void addInstanceInfo(Instance ins) {
if(m_Instances!=null)
try{ update(ins); }
catch(Exception ex) { ex.printStackTrace(); }
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* EuclideanDistance.java
* Copyright (C) 1999-2012 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.lazy.neighboursearch;
import weka.core.Instance;
import weka.core.Instances;
/**
<!-- globalinfo-start -->
* Implementing Euclidean distance (or similarity) function.<br/>
* <br/>
* One object defines not one distance but the data model in which the distances between objects of that data model can be computed.<br/>
* <br/>
* Attention: For efficiency reasons the use of consistency checks (like are the data models of the two instances exactly the same), is low.<br/>
* <br/>
* For more information, see:<br/>
* <br/>
* Wikipedia. Euclidean distance. URL http://en.wikipedia.org/wiki/Euclidean_distance.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @misc{missing_id,
* author = {Wikipedia},
* title = {Euclidean distance},
* URL = {http://en.wikipedia.org/wiki/Euclidean_distance}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -D
* Turns off the normalization of attribute
* values in distance calculation.</pre>
*
* <pre> -R <col1,col2-col4,...>
* Specifies list of columns to used in the calculation of the
* distance. 'first' and 'last' are valid indices.
* (default: first-last)</pre>
*
* <pre> -V
* Invert matching sense of column indices.</pre>
*
<!-- options-end -->
*
* @author Gabi Schmidberger (gabi@cs.waikato.ac.nz)
* @author Ashraf M. Kibriya (amk14@cs.waikato.ac.nz)
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision: 8034 $
*/
public class EuclideanDistance
extends NormalizableDistance
implements Cloneable{
/** for serialization. */
private static final long serialVersionUID = 1068606253458807903L;
/**
* Constructs an Euclidean Distance object, Instances must be still set.
*/
public EuclideanDistance() {
super();
}
/**
* Constructs an Euclidean Distance object and automatically initializes the
* ranges.
*
* @param data the instances the distance function should work on
*/
public EuclideanDistance(Instances data) {
super(data);
}
/**
* Returns a string describing this object.
*
* @return a description of the evaluator suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return
"Implementing Euclidean distance (or similarity) function.\n\n"
+ "One object defines not one distance but the data model in which "
+ "the distances between objects of that data model can be computed.\n\n"
+ "Attention: For efficiency reasons the use of consistency checks "
+ "(like are the data models of the two instances exactly the same), "
+ "is low.\n\n";
}
/**
* Calculates the distance between two instances.
*
* @param first the first instance
* @param second the second instance
* @return the distance between the two given instances
*/
public double distance(Instance first, Instance second) {
return Math.sqrt(distance(first, second, Double.POSITIVE_INFINITY));
}
/**
* Updates the current distance calculated so far with the new difference
* between two attributes. The difference between the attributes was
* calculated with the difference(int,double,double) method.
*
* @param currDist the current distance calculated so far
* @param diff the difference between two new attributes
* @return the update distance
* @see #difference(int, double, double)
*/
protected double updateDistance(double currDist, double diff) {
double result;
result = currDist;
result += diff * diff;
return result;
}
/**
* Does post processing of the distances (if necessary) returned by
* distance(distance(Instance first, Instance second, double cutOffValue). It
* is necessary to do so to get the correct distances if
* distance(distance(Instance first, Instance second, double cutOffValue) is
* used. This is because that function actually returns the squared distance
* to avoid inaccuracies arising from floating point comparison.
*
* @param distances the distances to post-process
*/
public void postProcessDistances(double distances[]) {
for(int i = 0; i < distances.length; i++) {
distances[i] = Math.sqrt(distances[i]);
}
}
/**
* Returns the squared difference of two values of an attribute.
*
* @param index the attribute index
* @param val1 the first value
* @param val2 the second value
* @return the squared difference
*/
public double sqDifference(int index, double val1, double val2) {
double val = difference(index, val1, val2);
return val*val;
}
/**
* Returns value in the middle of the two parameter values.
*
* @param ranges the ranges to this dimension
* @return the middle value
*/
public double getMiddle(double[] ranges) {
double middle = ranges[R_MIN] + ranges[R_WIDTH] * 0.5;
return middle;
}
/**
* Returns the index of the closest point to the current instance.
* Index is index in Instances object that is the second parameter.
*
* @param instance the instance to assign a cluster to
* @param allPoints all points
* @param pointList the list of points
* @return the index of the closest point
* @throws Exception if something goes wrong
*/
public int closestPoint(Instance instance, Instances allPoints,
int[] pointList) throws Exception {
double minDist = Integer.MAX_VALUE;
int bestPoint = 0;
for (int i = 0; i < pointList.length; i++) {
double dist = distance(instance, allPoints.instance(pointList[i]), Double.POSITIVE_INFINITY);
if (dist < minDist) {
minDist = dist;
bestPoint = i;
}
}
return pointList[bestPoint];
}
/**
* Returns true if the value of the given dimension is smaller or equal the
* value to be compared with.
*
* @param instance the instance where the value should be taken of
* @param dim the dimension of the value
* @param value the value to compare with
* @return true if value of instance is smaller or equal value
*/
public boolean valueIsSmallerEqual(Instance instance, int dim,
double value) { //This stays
return instance.value(dim) <= value;
}
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* DistanceFunction.java
* Copyright (C) 1999-2012 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.lazy.neighboursearch;
import weka.core.Instance;
import weka.core.Instances;
/**
* Interface for any class that can compute and return distances between two
* instances.
*
* @author Ashraf M. Kibriya (amk14@cs.waikato.ac.nz)
* @version $Revision: 8034 $
*/
public interface DistanceFunction {
/**
* Sets the instances.
*
* @param insts the instances to use
*/
public void setInstances(Instances insts);
/**
* returns the instances currently set.
*
* @return the current instances
*/
public Instances getInstances();
/**
* Sets the range of attributes to use in the calculation of the distance.
* The indices start from 1, 'first' and 'last' are valid as well.
* E.g.: first-3,5,6-last
*
* @param value the new attribute index range
*/
public void setAttributeIndices(String value);
/**
* Gets the range of attributes used in the calculation of the distance.
*
* @return the attribute index range
*/
public String getAttributeIndices();
/**
* Sets whether the matching sense of attribute indices is inverted or not.
*
* @param value if true the matching sense is inverted
*/
public void setInvertSelection(boolean value);
/**
* Gets whether the matching sense of attribute indices is inverted or not.
*
* @return true if the matching sense is inverted
*/
public boolean getInvertSelection();
/**
* Calculates the distance between two instances.
*
* @param first the first instance
* @param second the second instance
* @return the distance between the two given instances
*/
public double distance(Instance first, Instance second);
/**
* Calculates the distance between two instances. Offers speed up (if the
* distance function class in use supports it) in nearest neighbour search by
* taking into account the cutOff or maximum distance. Depending on the
* distance function class, post processing of the distances by
* postProcessDistances(double []) may be required if this function is used.
*
* @param first the first instance
* @param second the second instance
* @param cutOffValue If the distance being calculated becomes larger than
* cutOffValue then the rest of the calculation is
* discarded.
* @return the distance between the two given instances or
* Double.POSITIVE_INFINITY if the distance being
* calculated becomes larger than cutOffValue.
*/
public double distance(Instance first, Instance second, double cutOffValue);
/**
* Does post processing of the distances (if necessary) returned by
* distance(distance(Instance first, Instance second, double cutOffValue). It
* may be necessary, depending on the distance function, to do post processing
* to set the distances on the correct scale. Some distance function classes
* may not return correct distances using the cutOffValue distance function to
* minimize the inaccuracies resulting from floating point comparison and
* manipulation.
*
* @param distances the distances to post-process
*/
public void postProcessDistances(double distances[]);
/**
* Update the distance function (if necessary) for the newly added instance.
*
* @param ins the instance to add
*/
public void update(Instance ins);
}
| Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* NormalizableDistance.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.lazy.neighboursearch;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
/**
* Represents the abstract ancestor for normalizable distance functions, like
* Euclidean or Manhattan distance.
*
* @author Fracpete (fracpete at waikato dot ac dot nz)
* @author Gabi Schmidberger (gabi@cs.waikato.ac.nz) -- original code from weka.core.EuclideanDistance
* @author Ashraf M. Kibriya (amk14@cs.waikato.ac.nz) -- original code from weka.core.EuclideanDistance
* @version $Revision: 8034 $
*/
public abstract class NormalizableDistance
implements DistanceFunction {
/** Index in ranges for MIN. */
public static final int R_MIN = 0;
/** Index in ranges for MAX. */
public static final int R_MAX = 1;
/** Index in ranges for WIDTH. */
public static final int R_WIDTH = 2;
/** the instances used internally. */
protected Instances m_Data = null;
/** True if normalization is turned off (default false).*/
protected boolean m_DontNormalize = false;
/** The range of the attributes. */
protected double[][] m_Ranges;
/** The range of attributes to use for calculating the distance. */
// protected Range m_AttributeIndices = new Range("first-last");
/** The boolean flags, whether an attribute will be used or not. */
protected boolean[] m_ActiveIndices;
/** Whether all the necessary preparations have been done. */
protected boolean m_Validated;
/**
* Invalidates the distance function, Instances must be still set.
*/
public NormalizableDistance() {
invalidate();
}
/**
* Initializes the distance function and automatically initializes the
* ranges.
*
* @param data the instances the distance function should work on
*/
public NormalizableDistance(Instances data) {
setInstances(data);
}
/**
* Returns a string describing this object.
*
* @return a description of the evaluator suitable for
* displaying in the explorer/experimenter gui
*/
public abstract String globalInfo();
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String dontNormalizeTipText() {
return "Whether if the normalization of attributes should be turned off " +
"for distance calculation (Default: false i.e. attribute values " +
"are normalized). ";
}
/**
* Sets whether if the attribute values are to be normalized in distance
* calculation.
*
* @param dontNormalize if true the values are not normalized
*/
public void setDontNormalize(boolean dontNormalize) {
m_DontNormalize = dontNormalize;
invalidate();
}
/**
* Gets whether if the attribute values are to be normazlied in distance
* calculation. (default false i.e. attribute values are normalized.)
*
* @return false if values get normalized
*/
public boolean getDontNormalize() {
return m_DontNormalize;
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String attributeIndicesTipText() {
return
"Specify range of attributes to act on. "
+ "This is a comma separated list of attribute indices, with "
+ "\"first\" and \"last\" valid values. Specify an inclusive "
+ "range with \"-\". E.g: \"first-3,5,6-10,last\".";
}
/**
* Sets the range of attributes to use in the calculation of the distance.
* The indices start from 1, 'first' and 'last' are valid as well.
* E.g.: first-3,5,6-last
*
* @param value the new attribute index range
*/
public void setAttributeIndices(String value) {
//m_AttributeIndices.setRanges(value);
invalidate();
}
/**
* Gets the range of attributes used in the calculation of the distance.
*
* @return the attribute index range
*/
public String getAttributeIndices() {
return null; //m_AttributeIndices.getRanges();
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String invertSelectionTipText() {
return
"Set attribute selection mode. If false, only selected "
+ "attributes in the range will be used in the distance calculation; if "
+ "true, only non-selected attributes will be used for the calculation.";
}
/**
* Sets whether the matching sense of attribute indices is inverted or not.
*
* @param value if true the matching sense is inverted
*/
public void setInvertSelection(boolean value) {
//m_AttributeIndices.setInvert(value);
invalidate();
}
/**
* Gets whether the matching sense of attribute indices is inverted or not.
*
* @return true if the matching sense is inverted
*/
public boolean getInvertSelection() {
return false; //m_AttributeIndices.getInvert();
}
/**
* invalidates all initializations.
*/
protected void invalidate() {
m_Validated = false;
}
/**
* performs the initializations if necessary.
*/
protected void validate() {
if (!m_Validated) {
initialize();
m_Validated = true;
}
}
/**
* initializes the ranges and the attributes being used.
*/
protected void initialize() {
initializeAttributeIndices();
initializeRanges();
}
/**
* initializes the attribute indices.
*/
protected void initializeAttributeIndices() {
//m_AttributeIndices.setUpper(m_Data.numAttributes() - 1);
m_ActiveIndices = new boolean[m_Data.numAttributes()];
for (int i = 0; i < m_ActiveIndices.length; i++)
m_ActiveIndices[i] = true; //m_AttributeIndices.isInRange(i);
}
/**
* Sets the instances.
*
* @param insts the instances to use
*/
public void setInstances(Instances insts) {
m_Data = insts;
invalidate();
}
/**
* returns the instances currently set.
*
* @return the current instances
*/
public Instances getInstances() {
return m_Data;
}
/**
* Does nothing, derived classes may override it though.
*
* @param distances the distances to post-process
*/
public void postProcessDistances(double[] distances) {
}
/**
* Update the distance function (if necessary) for the newly added instance.
*
* @param ins the instance to add
*/
public void update(Instance ins) {
validate();
m_Ranges = updateRanges(ins, m_Ranges);
}
/**
* Calculates the distance between two instances.
*
* @param first the first instance
* @param second the second instance
* @return the distance between the two given instances
*/
public double distance(Instance first, Instance second) {
return distance(first, second, Double.POSITIVE_INFINITY);
}
/**
* Calculates the distance between two instances. Offers speed up (if the
* distance function class in use supports it) in nearest neighbour search by
* taking into account the cutOff or maximum distance. Depending on the
* distance function class, post processing of the distances by
* postProcessDistances(double []) may be required if this function is used.
*
* @param first the first instance
* @param second the second instance
* @param cutOffValue If the distance being calculated becomes larger than
* cutOffValue then the rest of the calculation is
* discarded.
* @return the distance between the two given instances or
* Double.POSITIVE_INFINITY if the distance being
* calculated becomes larger than cutOffValue.
*/
public double distance(Instance first, Instance second, double cutOffValue) {
double distance = 0;
int firstI, secondI;
int firstNumValues = first.numValues();
int secondNumValues = second.numValues();
int numAttributes = m_Data.numAttributes();
int classIndex = m_Data.classIndex();
validate();
for (int p1 = 0, p2 = 0; p1 < firstNumValues || p2 < secondNumValues; ) {
if (p1 >= firstNumValues)
firstI = numAttributes;
else
firstI = first.index(p1);
if (p2 >= secondNumValues)
secondI = numAttributes;
else
secondI = second.index(p2);
if (firstI == classIndex) {
p1++;
continue;
}
if ((firstI < numAttributes) && !m_ActiveIndices[firstI]) {
p1++;
continue;
}
if (secondI == classIndex) {
p2++;
continue;
}
if ((secondI < numAttributes) && !m_ActiveIndices[secondI]) {
p2++;
continue;
}
double diff;
if (firstI == secondI) {
diff = difference(firstI,
first.valueSparse(p1),
second.valueSparse(p2));
p1++;
p2++;
}
else if (firstI > secondI) {
diff = difference(secondI,
0, second.valueSparse(p2));
p2++;
}
else {
diff = difference(firstI,
first.valueSparse(p1), 0);
p1++;
}
distance = updateDistance(distance, diff);
if (distance > cutOffValue)
return Double.POSITIVE_INFINITY;
}
return distance;
}
/**
* Updates the current distance calculated so far with the new difference
* between two attributes. The difference between the attributes was
* calculated with the difference(int,double,double) method.
*
* @param currDist the current distance calculated so far
* @param diff the difference between two new attributes
* @return the update distance
* @see #difference(int, double, double)
*/
protected abstract double updateDistance(double currDist, double diff);
/**
* Normalizes a given value of a numeric attribute.
*
* @param x the value to be normalized
* @param i the attribute's index
* @return the normalized value
*/
protected double norm(double x, int i) {
if (Double.isNaN(m_Ranges[i][R_MIN]) || (m_Ranges[i][R_MAX] == m_Ranges[i][R_MIN]))
return 0;
else
return (x - m_Ranges[i][R_MIN]) / (m_Ranges[i][R_WIDTH]);
}
/**
* Computes the difference between two given attribute
* values.
*
* @param index the attribute index
* @param val1 the first value
* @param val2 the second value
* @return the difference
*/
protected double difference(int index, double val1, double val2) {
switch (m_Data.attribute(index).type()) {
case Attribute.NOMINAL:
if (isMissingValue(val1) ||
isMissingValue(val2) ||
((int) val1 != (int) val2)) {
return 1;
}
else {
return 0;
}
case Attribute.NUMERIC:
if (isMissingValue(val1) ||
isMissingValue(val2)) {
if (isMissingValue(val1) &&
isMissingValue(val2)) {
if (!m_DontNormalize) //We are doing normalization
return 1;
else
return (m_Ranges[index][R_MAX] - m_Ranges[index][R_MIN]);
}
else {
double diff;
if (isMissingValue(val2)) {
diff = (!m_DontNormalize) ? norm(val1, index) : val1;
}
else {
diff = (!m_DontNormalize) ? norm(val2, index) : val2;
}
if (!m_DontNormalize && diff < 0.5) {
diff = 1.0 - diff;
}
else if (m_DontNormalize) {
if ((m_Ranges[index][R_MAX]-diff) > (diff-m_Ranges[index][R_MIN]))
return m_Ranges[index][R_MAX]-diff;
else
return diff-m_Ranges[index][R_MIN];
}
return diff;
}
}
else {
return (!m_DontNormalize) ?
(norm(val1, index) - norm(val2, index)) :
(val1 - val2);
}
default:
return 0;
}
}
/**
* Initializes the ranges using all instances of the dataset.
* Sets m_Ranges.
*
* @return the ranges
*/
public double[][] initializeRanges() {
if (m_Data == null) {
m_Ranges = null;
return m_Ranges;
}
int numAtt = m_Data.numAttributes();
double[][] ranges = new double [numAtt][3];
if (m_Data.numInstances() <= 0) {
initializeRangesEmpty(numAtt, ranges);
m_Ranges = ranges;
return m_Ranges;
}
else {
// initialize ranges using the first instance
updateRangesFirst(m_Data.instance(0), numAtt, ranges);
}
// update ranges, starting from the second
for (int i = 1; i < m_Data.numInstances(); i++)
updateRanges(m_Data.instance(i), numAtt, ranges);
m_Ranges = ranges;
return m_Ranges;
}
/**
* Used to initialize the ranges. For this the values of the first
* instance is used to save time.
* Sets low and high to the values of the first instance and
* width to zero.
*
* @param instance the new instance
* @param numAtt number of attributes in the model
* @param ranges low, high and width values for all attributes
*/
public void updateRangesFirst(Instance instance, int numAtt, double[][] ranges) {
for (int j = 0; j < numAtt; j++) {
if (!instance.isMissing(j)) {
ranges[j][R_MIN] = instance.value(j);
ranges[j][R_MAX] = instance.value(j);
ranges[j][R_WIDTH] = 0.0;
}
else { // if value was missing
ranges[j][R_MIN] = Double.POSITIVE_INFINITY;
ranges[j][R_MAX] = -Double.POSITIVE_INFINITY;
ranges[j][R_WIDTH] = Double.POSITIVE_INFINITY;
}
}
}
/**
* Updates the minimum and maximum and width values for all the attributes
* based on a new instance.
*
* @param instance the new instance
* @param numAtt number of attributes in the model
* @param ranges low, high and width values for all attributes
*/
public void updateRanges(Instance instance, int numAtt, double[][] ranges) {
// updateRangesFirst must have been called on ranges
for (int j = 0; j < numAtt; j++) {
double value = instance.value(j);
if (!instance.isMissing(j)) {
if (value < ranges[j][R_MIN]) {
ranges[j][R_MIN] = value;
ranges[j][R_WIDTH] = ranges[j][R_MAX] - ranges[j][R_MIN];
if (value > ranges[j][R_MAX]) { //if this is the first value that is
ranges[j][R_MAX] = value; //not missing. The,0
ranges[j][R_WIDTH] = ranges[j][R_MAX] - ranges[j][R_MIN];
}
}
else {
if (value > ranges[j][R_MAX]) {
ranges[j][R_MAX] = value;
ranges[j][R_WIDTH] = ranges[j][R_MAX] - ranges[j][R_MIN];
}
}
}
}
}
/**
* Used to initialize the ranges.
*
* @param numAtt number of attributes in the model
* @param ranges low, high and width values for all attributes
*/
public void initializeRangesEmpty(int numAtt, double[][] ranges) {
for (int j = 0; j < numAtt; j++) {
ranges[j][R_MIN] = Double.POSITIVE_INFINITY;
ranges[j][R_MAX] = -Double.POSITIVE_INFINITY;
ranges[j][R_WIDTH] = Double.POSITIVE_INFINITY;
}
}
/**
* Updates the ranges given a new instance.
*
* @param instance the new instance
* @param ranges low, high and width values for all attributes
* @return the updated ranges
*/
public double[][] updateRanges(Instance instance, double[][] ranges) {
// updateRangesFirst must have been called on ranges
for (int j = 0; j < ranges.length; j++) {
double value = instance.value(j);
if (!instance.isMissing(j)) {
if (value < ranges[j][R_MIN]) {
ranges[j][R_MIN] = value;
ranges[j][R_WIDTH] = ranges[j][R_MAX] - ranges[j][R_MIN];
} else {
if (instance.value(j) > ranges[j][R_MAX]) {
ranges[j][R_MAX] = value;
ranges[j][R_WIDTH] = ranges[j][R_MAX] - ranges[j][R_MIN];
}
}
}
}
return ranges;
}
/**
* Initializes the ranges of a subset of the instances of this dataset.
* Therefore m_Ranges is not set.
*
* @param instList list of indexes of the subset
* @return the ranges
* @throws Exception if something goes wrong
*/
public double[][] initializeRanges(int[] instList) throws Exception {
if (m_Data == null)
throw new Exception("No instances supplied.");
int numAtt = m_Data.numAttributes();
double[][] ranges = new double [numAtt][3];
if (m_Data.numInstances() <= 0) {
initializeRangesEmpty(numAtt, ranges);
return ranges;
}
else {
// initialize ranges using the first instance
updateRangesFirst(m_Data.instance(instList[0]), numAtt, ranges);
// update ranges, starting from the second
for (int i = 1; i < instList.length; i++) {
updateRanges(m_Data.instance(instList[i]), numAtt, ranges);
}
}
return ranges;
}
/**
* Initializes the ranges of a subset of the instances of this dataset.
* Therefore m_Ranges is not set.
* The caller of this method should ensure that the supplied start and end
* indices are valid (start <= end, end<instList.length etc) and
* correct.
*
* @param instList list of indexes of the instances
* @param startIdx start index of the subset of instances in the indices array
* @param endIdx end index of the subset of instances in the indices array
* @return the ranges
* @throws Exception if something goes wrong
*/
public double[][] initializeRanges(int[] instList, int startIdx, int endIdx) throws Exception {
if (m_Data == null)
throw new Exception("No instances supplied.");
int numAtt = m_Data.numAttributes();
double[][] ranges = new double [numAtt][3];
if (m_Data.numInstances() <= 0) {
initializeRangesEmpty(numAtt, ranges);
return ranges;
}
else {
// initialize ranges using the first instance
updateRangesFirst(m_Data.instance(instList[startIdx]), numAtt, ranges);
// update ranges, starting from the second
for (int i = startIdx+1; i <= endIdx; i++) {
updateRanges(m_Data.instance(instList[i]), numAtt, ranges);
}
}
return ranges;
}
/**
* Update the ranges if a new instance comes.
*
* @param instance the new instance
*/
public void updateRanges(Instance instance) {
validate();
m_Ranges = updateRanges(instance, m_Ranges);
}
/**
* Test if an instance is within the given ranges.
*
* @param instance the instance
* @param ranges the ranges the instance is tested to be in
* @return true if instance is within the ranges
*/
public boolean inRanges(Instance instance, double[][] ranges) {
boolean isIn = true;
// updateRangesFirst must have been called on ranges
for (int j = 0; isIn && (j < ranges.length); j++) {
if (!instance.isMissing(j)) {
double value = instance.value(j);
isIn = value <= ranges[j][R_MAX];
if (isIn) isIn = value >= ranges[j][R_MIN];
}
}
return isIn;
}
/**
* Check if ranges are set.
*
* @return true if ranges are set
*/
public boolean rangesSet() {
return (m_Ranges != null);
}
/**
* Method to get the ranges.
*
* @return the ranges
* @throws Exception if no randes are set yet
*/
public double[][] getRanges() throws Exception {
validate();
if (m_Ranges == null)
throw new Exception("Ranges not yet set.");
return m_Ranges;
}
/**
* Returns an empty string.
*
* @return an empty string
*/
public String toString() {
return "";
}
/**
* Tests if the given value codes "missing".
*
* @param val the value to be tested
* @return true if val codes "missing"
*/
public static boolean isMissingValue(double val) {
return Double.isNaN(val);
}
} | Java |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* NearestNeighbourSearch.java
* Copyright (C) 1999-2012 University of Waikato
*/
package moa.classifiers.lazy.neighboursearch;
import java.io.Serializable;
import weka.core.Instance;
import weka.core.Instances;
/**
* Abstract class for nearest neighbour search. All algorithms (classes) that
* do nearest neighbour search should extend this class.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
public abstract class NearestNeighbourSearch
implements Serializable{
/**
* A class for a heap to store the nearest k neighbours to an instance.
* The heap also takes care of cases where multiple neighbours are the same
* distance away.
* i.e. the minimum size of the heap is k.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
protected class MyHeap{
/** the heap. */
MyHeapElement m_heap[] = null;
/**
* constructor.
*
* @param maxSize the maximum size of the heap
*/
public MyHeap(int maxSize) {
if((maxSize%2)==0)
maxSize++;
m_heap = new MyHeapElement[maxSize+1];
m_heap[0] = new MyHeapElement(0, 0);
}
/**
* returns the size of the heap.
*
* @return the size
*/
public int size() {
return m_heap[0].index;
}
/**
* peeks at the first element.
*
* @return the first element
*/
public MyHeapElement peek() {
return m_heap[1];
}
/**
* returns the first element and removes it from the heap.
*
* @return the first element
* @throws Exception if no elements in heap
*/
public MyHeapElement get() throws Exception {
if(m_heap[0].index==0)
throw new Exception("No elements present in the heap");
MyHeapElement r = m_heap[1];
m_heap[1] = m_heap[m_heap[0].index];
m_heap[0].index--;
downheap();
return r;
}
/**
* adds the value to the heap.
*
* @param i the index
* @param d the distance
* @throws Exception if the heap gets too large
*/
public void put(int i, double d) throws Exception {
if((m_heap[0].index+1)>(m_heap.length-1))
throw new Exception("the number of elements cannot exceed the "+
"initially set maximum limit");
m_heap[0].index++;
m_heap[m_heap[0].index] = new MyHeapElement(i, d);
upheap();
}
/**
* Puts an element by substituting it in place of
* the top most element.
*
* @param i the index
* @param d the distance
* @throws Exception if distance is smaller than that of the head
* element
*/
public void putBySubstitute(int i, double d) throws Exception {
MyHeapElement head = get();
put(i, d);
// System.out.println("previous: "+head.distance+" current: "+m_heap[1].distance);
if(head.distance == m_heap[1].distance) { //Utils.eq(head.distance, m_heap[1].distance)) {
putKthNearest(head.index, head.distance);
}
else if(head.distance > m_heap[1].distance) { //Utils.gr(head.distance, m_heap[1].distance)) {
m_KthNearest = null;
m_KthNearestSize = 0;
initSize = 10;
}
else if(head.distance < m_heap[1].distance) {
throw new Exception("The substituted element is smaller than the "+
"head element. put() should have been called "+
"in place of putBySubstitute()");
}
}
/** the kth nearest ones. */
MyHeapElement m_KthNearest[] = null;
/** The number of kth nearest elements. */
int m_KthNearestSize = 0;
/** the initial size of the heap. */
int initSize=10;
/**
* returns the number of k nearest.
*
* @return the number of k nearest
* @see #m_KthNearestSize
*/
public int noOfKthNearest() {
return m_KthNearestSize;
}
/**
* Stores kth nearest elements (if there are
* more than one).
* @param i the index
* @param d the distance
*/
public void putKthNearest(int i, double d) {
if(m_KthNearest==null) {
m_KthNearest = new MyHeapElement[initSize];
}
if(m_KthNearestSize>=m_KthNearest.length) {
initSize += initSize;
MyHeapElement temp[] = new MyHeapElement[initSize];
System.arraycopy(m_KthNearest, 0, temp, 0, m_KthNearest.length);
m_KthNearest = temp;
}
m_KthNearest[m_KthNearestSize++] = new MyHeapElement(i, d);
}
/**
* returns the kth nearest element or null if none there.
*
* @return the kth nearest element
*/
public MyHeapElement getKthNearest() {
if(m_KthNearestSize==0)
return null;
m_KthNearestSize--;
return m_KthNearest[m_KthNearestSize];
}
/**
* performs upheap operation for the heap
* to maintian its properties.
*/
protected void upheap() {
int i = m_heap[0].index;
MyHeapElement temp;
while( i > 1 && m_heap[i].distance>m_heap[i/2].distance) {
temp = m_heap[i];
m_heap[i] = m_heap[i/2];
i = i/2;
m_heap[i] = temp; //this is i/2 done here to avoid another division.
}
}
/**
* performs downheap operation for the heap
* to maintian its properties.
*/
protected void downheap() {
int i = 1;
MyHeapElement temp;
while( ( (2*i) <= m_heap[0].index &&
m_heap[i].distance < m_heap[2*i].distance )
||
( (2*i+1) <= m_heap[0].index &&
m_heap[i].distance < m_heap[2*i+1].distance) ) {
if((2*i+1)<=m_heap[0].index) {
if(m_heap[2*i].distance>m_heap[2*i+1].distance) {
temp = m_heap[i];
m_heap[i] = m_heap[2*i];
i = 2*i;
m_heap[i] = temp;
}
else {
temp = m_heap[i];
m_heap[i] = m_heap[2*i+1];
i = 2*i+1;
m_heap[i] = temp;
}
}
else {
temp = m_heap[i];
m_heap[i] = m_heap[2*i];
i = 2*i;
m_heap[i] = temp;
}
}
}
/**
* returns the total size.
*
* @return the total size
*/
public int totalSize() {
return size()+noOfKthNearest();
}
}
/**
* A class for storing data about a neighboring instance.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/
protected class MyHeapElement{
/** the index of this element. */
public int index;
/** the distance of this element. */
public double distance;
/**
* constructor.
*
* @param i the index
* @param d the distance
*/
public MyHeapElement(int i, double d) {
distance = d;
index = i;
}
}
/**
* A class for storing data about a neighboring instance.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/ //better to change this into a heap element
protected class NeighborNode {
/** The neighbor instance. */
public Instance m_Instance;
/** The distance from the current instance to this neighbor. */
public double m_Distance;
/** A link to the next neighbor instance. */
public NeighborNode m_Next;
/**
* Create a new neighbor node.
*
* @param distance the distance to the neighbor
* @param instance the neighbor instance
* @param next the next neighbor node
*/
public NeighborNode(double distance, Instance instance, NeighborNode next) {
m_Distance = distance;
m_Instance = instance;
m_Next = next;
}
/**
* Create a new neighbor node that doesn't link to any other nodes.
*
* @param distance the distance to the neighbor
* @param instance the neighbor instance
*/
public NeighborNode(double distance, Instance instance) {
this(distance, instance, null);
}
}
/**
* A class for a linked list to store the nearest k neighbours
* to an instance. We use a list so that we can take care of
* cases where multiple neighbours are the same distance away.
* i.e. the minimum length of the list is k.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 8034 $
*/ //better to change this into a heap
protected class NeighborList{
/** The first node in the list. */
protected NeighborNode m_First;
/** The last node in the list. */
protected NeighborNode m_Last;
/** The number of nodes to attempt to maintain in the list. */
protected int m_Length = 1;
/**
* Creates the neighborlist with a desired length.
*
* @param length the length of list to attempt to maintain
*/
public NeighborList(int length) {
m_Length = length;
}
/**
* Gets whether the list is empty.
*
* @return true if list is empty
*/
public boolean isEmpty() {
return (m_First == null);
}
/**
* Gets the current length of the list.
*
* @return the current length of the list
*/
public int currentLength() {
int i = 0;
NeighborNode current = m_First;
while (current != null) {
i++;
current = current.m_Next;
}
return i;
}
/**
* Inserts an instance neighbor into the list, maintaining the list
* sorted by distance.
*
* @param distance the distance to the instance
* @param instance the neighboring instance
*/
public void insertSorted(double distance, Instance instance) {
if (isEmpty()) {
m_First = m_Last = new NeighborNode(distance, instance);
} else {
NeighborNode current = m_First;
if (distance < m_First.m_Distance) {// Insert at head
m_First = new NeighborNode(distance, instance, m_First);
} else { // Insert further down the list
for( ;(current.m_Next != null) &&
(current.m_Next.m_Distance < distance);
current = current.m_Next);
current.m_Next = new NeighborNode(distance, instance,
current.m_Next);
if (current.equals(m_Last)) {
m_Last = current.m_Next;
}
}
// Trip down the list until we've got k list elements (or more if the
// distance to the last elements is the same).
int valcount = 0;
for(current = m_First; current.m_Next != null;
current = current.m_Next) {
valcount++;
if ((valcount >= m_Length) && (current.m_Distance !=
current.m_Next.m_Distance)) {
m_Last = current;
current.m_Next = null;
break;
}
}
}
}
/**
* Prunes the list to contain the k nearest neighbors. If there are
* multiple neighbors at the k'th distance, all will be kept.
*
* @param k the number of neighbors to keep in the list.
*/
public void pruneToK(int k) {
if (isEmpty()) {
return;
}
if (k < 1) {
k = 1;
}
int currentK = 0;
double currentDist = m_First.m_Distance;
NeighborNode current = m_First;
for(; current.m_Next != null; current = current.m_Next) {
currentK++;
currentDist = current.m_Distance;
if ((currentK >= k) && (currentDist != current.m_Next.m_Distance)) {
m_Last = current;
current.m_Next = null;
break;
}
}
}
/**
* Prints out the contents of the neighborlist.
*/
public void printList() {
if (isEmpty()) {
System.out.println("Empty list");
} else {
NeighborNode current = m_First;
while (current != null) {
System.out.println("Node: instance " + current.m_Instance
+ ", distance " + current.m_Distance);
current = current.m_Next;
}
System.out.println();
}
}
/**
* returns the first element in the list.
*
* @return the first element
*/
public NeighborNode getFirst() {
return m_First;
}
/**
* returns the last element in the list.
*
* @return the last element
*/
public NeighborNode getLast() {
return m_Last;
}
}
/** The neighbourhood of instances to find neighbours in. */
protected Instances m_Instances;
/** The number of neighbours to find. */
protected int m_kNN;
/** the distance function used. */
protected DistanceFunction m_DistanceFunction = new EuclideanDistance();
/** Should we measure Performance. */
protected boolean m_MeasurePerformance = false;
/**
* Constructor.
*/
public NearestNeighbourSearch() {
}
/**
* Constructor.
*
* @param insts The set of instances that constitute the neighbourhood.
*/
public NearestNeighbourSearch(Instances insts) {
this();
m_Instances = insts;
}
/**
* Returns a string describing this nearest neighbour search algorithm.
*
* @return a description of the algorithm for displaying in the
* explorer/experimenter gui
*/
public String globalInfo() {
return
"Abstract class for nearest neighbour search. All algorithms (classes) that "
+ "do nearest neighbour search should extend this class.";
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String distanceFunctionTipText() {
return "The distance function to use for finding neighbours " +
"(default: weka.core.EuclideanDistance). ";
}
/**
* returns the distance function currently in use.
*
* @return the distance function
*/
public DistanceFunction getDistanceFunction() {
return m_DistanceFunction;
}
/**
* sets the distance function to use for nearest neighbour search.
*
* @param df the new distance function to use
* @throws Exception if instances cannot be processed
*/
public void setDistanceFunction(DistanceFunction df) throws Exception {
m_DistanceFunction = df;
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String measurePerformanceTipText() {
return "Whether to calculate performance statistics " +
"for the NN search or not";
}
/**
* Gets whether performance statistics are being calculated or not.
*
* @return true if the measure performance is calculated
*/
public boolean getMeasurePerformance() {
return m_MeasurePerformance;
}
/**
* Returns the nearest instance in the current neighbourhood to the supplied
* instance.
*
* @param target The instance to find the nearest neighbour for.
* @return the nearest neighbor
* @throws Exception if the nearest neighbour could not be found.
*/
public abstract Instance nearestNeighbour(Instance target) throws Exception;
/**
* Returns k nearest instances in the current neighbourhood to the supplied
* instance.
*
* @param target The instance to find the k nearest neighbours for.
* @param k The number of nearest neighbours to find.
* @return the k nearest neighbors
* @throws Exception if the neighbours could not be found.
*/
public abstract Instances kNearestNeighbours(Instance target, int k) throws Exception;
/**
* Returns the distances of the k nearest neighbours. The kNearestNeighbours
* or nearestNeighbour needs to be called first for this to work.
*
* @return the distances
* @throws Exception if called before calling kNearestNeighbours
* or nearestNeighbours.
*/
public abstract double[] getDistances() throws Exception;
/**
* Updates the NearNeighbourSearch algorithm for the new added instance.
* P.S.: The method assumes the instance has already been added to the
* m_Instances object by the caller.
*
* @param ins the instance to add
* @throws Exception if updating fails
*/
public abstract void update(Instance ins) throws Exception;
/**
* Adds information from the given instance without modifying the
* datastructure a lot.
*
* @param ins the instance to add the information from
*/
public void addInstanceInfo(Instance ins) {
}
/**
* Sets the instances.
*
* @param insts the instances to use
* @throws Exception if setting fails
*/
public void setInstances(Instances insts) throws Exception {
m_Instances = insts;
}
/**
* returns the instances currently set.
*
* @return the current instances
*/
public Instances getInstances() {
return m_Instances;
}
/**
* sorts the two given arrays.
*
* @param arrayToSort The array sorting should be based on.
* @param linkedArray The array that should have the same ordering as
* arrayToSort.
*/
public static void combSort11(double arrayToSort[], int linkedArray[]) {
int switches, j, top, gap;
double hold1; int hold2;
gap = arrayToSort.length;
do {
gap=(int)(gap/1.3);
switch(gap) {
case 0:
gap = 1;
break;
case 9:
case 10:
gap=11;
break;
default:
break;
}
switches=0;
top = arrayToSort.length-gap;
for(int i=0; i<top; i++) {
j=i+gap;
if(arrayToSort[i] > arrayToSort[j]) {
hold1=arrayToSort[i];
hold2=linkedArray[i];
arrayToSort[i]=arrayToSort[j];
linkedArray[i]=linkedArray[j];
arrayToSort[j]=hold1;
linkedArray[j]=hold2;
switches++;
}//endif
}//endfor
} while(switches>0 || gap>1);
}
/**
* Partitions the instances around a pivot. Used by quicksort and
* kthSmallestValue.
*
* @param arrayToSort the array of doubles to be sorted
* @param linkedArray the linked array
* @param l the first index of the subset
* @param r the last index of the subset
* @return the index of the middle element
*/
protected static int partition(double[] arrayToSort, double[] linkedArray, int l, int r) {
double pivot = arrayToSort[(l + r) / 2];
double help;
while (l < r) {
while ((arrayToSort[l] < pivot) && (l < r)) {
l++;
}
while ((arrayToSort[r] > pivot) && (l < r)) {
r--;
}
if (l < r) {
help = arrayToSort[l];
arrayToSort[l] = arrayToSort[r];
arrayToSort[r] = help;
help = linkedArray[l];
linkedArray[l] = linkedArray[r];
linkedArray[r] = help;
l++;
r--;
}
}
if ((l == r) && (arrayToSort[r] > pivot)) {
r--;
}
return r;
}
/**
* performs quicksort.
*
* @param arrayToSort the array to sort
* @param linkedArray the linked array
* @param left the first index of the subset
* @param right the last index of the subset
*/
public static void quickSort(double[] arrayToSort, double[] linkedArray, int left, int right) {
if (left < right) {
int middle = partition(arrayToSort, linkedArray, left, right);
quickSort(arrayToSort, linkedArray, left, middle);
quickSort(arrayToSort, linkedArray, middle + 1, right);
}
}
}
| Java |
/*
* kNNAdaptive.java
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.lazy;
import java.util.ArrayList;
import moa.classifiers.core.driftdetection.ADWIN;
import weka.core.Instance;
import weka.core.Instances;
/**
* k Nearest Neighbor ADAPTIVE with ADWIN+PAW.<p>
*
* Valid options are:
* <p>
*
* -k number of neighbours <br>
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version 03.2012
*/
public class kNNwithPAWandADWIN extends kNN {
private static final long serialVersionUID = 1L;
private ADWIN adwin;
protected int marker = 0;
protected ArrayList<Integer> timeStamp;
@Override
public String getPurposeString() {
return "kNNwithPAWandADWIN: kNN with Probabilistic Approximate Window and ADWIN";
}
protected double prob;
@Override
public void resetLearningImpl() {
this.window = null;
this.adwin = new ADWIN();
this.prob = Math.pow(2.0, -1.0 / this.limitOption.getValue());
this.time = 0;
}
protected int time;
@Override
public void trainOnInstanceImpl(Instance inst) {
if (inst.classValue() > C) {
C = (int) inst.classValue();
}
// ADWIN
if (this.window == null) {
this.window = new Instances(inst.dataset());
}
if (this.timeStamp == null) {
this.timeStamp = new ArrayList<Integer>(10);
}
for (int i = 0; i < this.window.size(); i++) {
if (this.classifierRandom.nextDouble() > this.prob) {
this.window.delete(i);
this.timeStamp.remove(i);
}
}
this.window.add(inst);
this.timeStamp.add(this.time);
this.time++;
boolean correctlyClassifies = this.correctlyClassifies(inst);
if (this.adwin.setInput(correctlyClassifies ? 0 : 1)) {
//Change
int size = (int) this.adwin.getWidth();
for (int i = 0; i < this.window.size(); i++) {
if (this.timeStamp.get(i) < this.time - size) {
this.window.delete(i);
this.timeStamp.remove(i);
}
}
}
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
@Override
public boolean isRandomizable() {
return true;
}
}
| Java |
/*
* kNNwithPAW.java
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.lazy;
import moa.classifiers.core.driftdetection.ADWIN;
import weka.core.Instance;
import weka.core.Instances;
/**
* k Nearest Neighbor ADAPTIVE with PAW.<p>
*
* Valid options are:
* <p>
*
* -k number of neighbours <br>
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version 03.2012
*/
public class kNNwithPAW extends kNN {
private static final long serialVersionUID = 1L;
protected int marker = 0;
@Override
public String getPurposeString() {
return "kNN+PAW: kNN with Probabilistic Approximate Window";
}
protected double prob;
@Override
public void resetLearningImpl() {
this.window = null;
this.prob = Math.pow(2.0, -1.0 / this.limitOption.getValue());
}
@Override
public void trainOnInstanceImpl(Instance inst) {
if (inst.classValue() > C) {
C = (int) inst.classValue();
}
if (this.window == null) {
this.window = new Instances(inst.dataset());
}
for (int i = 0; i < this.window.size(); i++) {
if (this.classifierRandom.nextDouble() > this.prob) {
this.window.delete(i);
}
}
this.window.add(inst);
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
@Override
public boolean isRandomizable() {
return true;
}
}
| Java |
/*
* kNN.java
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.lazy;
import java.io.StringReader;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.lazy.neighboursearch.KDTree;
import moa.classifiers.lazy.neighboursearch.LinearNNSearch;
import moa.classifiers.lazy.neighboursearch.NearestNeighbourSearch;
import moa.core.InstancesHeader;
import moa.core.Measurement;
import moa.options.IntOption;
import moa.options.MultiChoiceOption;
import weka.core.Instance;
import weka.core.Instances;
/**
* k Nearest Neighbor.<p>
*
* Valid options are:<p>
*
* -k number of neighbours <br> -m max instances <br>
*
* @author Jesse Read (jesse@tsc.uc3m.es)
* @version 03.2012
*/
public class kNN extends AbstractClassifier {
private static final long serialVersionUID = 1L;
public IntOption kOption = new IntOption( "k", 'k', "The number of neighbors", 10, 1, Integer.MAX_VALUE);
public IntOption limitOption = new IntOption( "limit", 'w', "The maximum number of instances to store", 1000, 1, Integer.MAX_VALUE);
public MultiChoiceOption nearestNeighbourSearchOption = new MultiChoiceOption(
"nearestNeighbourSearch", 'n', "Nearest Neighbour Search to use", new String[]{
"LinearNN", "KDTree"},
new String[]{"Brute force search algorithm for nearest neighbour search. ",
"KDTree search algorithm for nearest neighbour search"
}, 0);
int C = 0;
@Override
public String getPurposeString() {
return "kNN: special.";
}
protected Instances window;
@Override
public void setModelContext(InstancesHeader context) {
try {
this.window = new Instances(new StringReader(context.toString()),0);
this.window.setClassIndex(context.classIndex());
} catch(Exception e) {
System.err.println("Error: no Model Context available.");
e.printStackTrace();
System.exit(1);
}
}
@Override
public void resetLearningImpl() {
this.window = null;
}
@Override
public void trainOnInstanceImpl(Instance inst) {
if (inst.classValue() > C)
C = (int)inst.classValue();
if (this.window == null) {
this.window = new Instances(inst.dataset());
}
if (this.limitOption.getValue() <= this.window.numInstances()) {
this.window.delete(0);
}
this.window.add(inst);
}
@Override
public double[] getVotesForInstance(Instance inst) {
double v[] = new double[C+1];
try {
NearestNeighbourSearch search;
if (this.nearestNeighbourSearchOption.getChosenIndex()== 0) {
search = new LinearNNSearch(this.window);
} else {
search = new KDTree();
search.setInstances(this.window);
}
if (this.window.numInstances()>0) {
Instances neighbours = search.kNearestNeighbours(inst,Math.min(kOption.getValue(),this.window.numInstances()));
for(int i = 0; i < neighbours.numInstances(); i++) {
v[(int)neighbours.instance(i).classValue()]++;
}
}
} catch(Exception e) {
//System.err.println("Error: kNN search failed.");
//e.printStackTrace();
//System.exit(1);
return new double[inst.numClasses()];
}
return v;
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
public boolean isRandomizable() {
return false;
}
} | Java |
/*
* SGD.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
* @author Eibe Frank (eibe{[at]}cs{[dot]}waikato{[dot]}ac{[dot]}nz)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/*
* SGD.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Regressor;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.StringUtils;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import weka.core.Instance;
import weka.core.Utils;
/**
<!-- globalinfo-start -->
* Implements stochastic gradient descent for learning various linear models (binary class SVM, binary class logistic regression and linear regression).
* <p/>
<!-- globalinfo-end -->
*
*/
public class SGD extends AbstractClassifier implements Regressor{
/** For serialization */
private static final long serialVersionUID = -3732968666673530290L;
@Override
public String getPurposeString() {
return "AStochastic gradient descent for learning various linear models (binary class SVM, binary class logistic regression and linear regression).";
}
/** The regularization parameter */
protected double m_lambda = 0.0001;
public FloatOption lambdaRegularizationOption = new FloatOption("lambdaRegularization",
'l', "Lambda regularization parameter .",
0.0001, 0.00, Integer.MAX_VALUE);
/** The learning rate */
protected double m_learningRate = 0.01;
public FloatOption learningRateOption = new FloatOption("learningRate",
'r', "Learning rate parameter.",
0.0001, 0.00, Integer.MAX_VALUE);
/** Stores the weights (+ bias in the last element) */
protected DoubleVector m_weights;
protected double m_bias;
/** Holds the current iteration number */
protected double m_t;
/** The number of training instances */
protected double m_numInstances;
protected static final int HINGE = 0;
protected static final int LOGLOSS = 1;
protected static final int SQUAREDLOSS = 2;
/** The current loss function to minimize */
protected int m_loss = HINGE;
public MultiChoiceOption lossFunctionOption = new MultiChoiceOption(
"lossFunction", 'o', "The loss function to use.", new String[]{
"HINGE", "LOGLOSS", "SQUAREDLOSS"}, new String[]{
"Hinge loss (SVM)",
"Log loss (logistic regression)",
"Squared loss (regression)"}, 0);
/**
* Set the value of lambda to use
*
* @param lambda the value of lambda to use
*/
public void setLambda(double lambda) {
m_lambda = lambda;
}
/**
* Get the current value of lambda
*
* @return the current value of lambda
*/
public double getLambda() {
return m_lambda;
}
/**
* Set the loss function to use.
*
* @param function the loss function to use.
*/
public void setLossFunction(int function) {
m_loss = function;
}
/**
* Get the current loss function.
*
* @return the current loss function.
*/
public int getLossFunction() {
return m_loss;
}
/**
* Set the learning rate.
*
* @param lr the learning rate to use.
*/
public void setLearningRate(double lr) {
m_learningRate = lr;
}
/**
* Get the learning rate.
*
* @return the learning rate
*/
public double getLearningRate() {
return m_learningRate;
}
/**
* Reset the classifier.
*/
public void reset() {
m_t = 1;
m_weights = null;
m_bias = 0.0;
}
protected double dloss(double z) {
if (m_loss == HINGE) {
return (z < 1) ? 1 : 0;
}
if (m_loss == LOGLOSS) {
// log loss
if (z < 0) {
return 1.0 / (Math.exp(z) + 1.0);
} else {
double t = Math.exp(-z);
return t / (t + 1);
}
}
// squared loss
return z;
}
protected static double dotProd(Instance inst1, DoubleVector weights, int classIndex) {
double result = 0;
int n1 = inst1.numValues();
int n2 = weights.numValues();
for (int p1 = 0, p2 = 0; p1 < n1 && p2 < n2;) {
int ind1 = inst1.index(p1);
int ind2 = p2;
if (ind1 == ind2) {
if (ind1 != classIndex && !inst1.isMissingSparse(p1)) {
result += inst1.valueSparse(p1) * weights.getValue(p2);
}
p1++;
p2++;
} else if (ind1 > ind2) {
p2++;
} else {
p1++;
}
}
return (result);
}
@Override
public void resetLearningImpl() {
reset();
setLambda(this.lambdaRegularizationOption.getValue());
setLearningRate(this.learningRateOption.getValue());
setLossFunction(this.lossFunctionOption.getChosenIndex());
}
/**
* Trains the classifier with the given instance.
*
* @param instance the new training instance to include in the model
*/
@Override
public void trainOnInstanceImpl(Instance instance) {
if (m_weights == null) {
m_weights = new DoubleVector(new double[instance.numAttributes()+1]);
m_bias = 0.0;
}
if (!instance.classIsMissing()) {
double wx = dotProd(instance, m_weights, instance.classIndex());
double y;
double z;
if (instance.classAttribute().isNominal()) {
y = (instance.classValue() == 0) ? -1 : 1;
z = y * (wx + m_bias);
} else {
y = instance.classValue();
z = y - (wx + m_bias);
y = 1;
}
// Compute multiplier for weight decay
double multiplier = 1.0;
if (m_numInstances == 0) {
multiplier = 1.0 - (m_learningRate * m_lambda) / m_t;
} else {
multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances;
}
for (int i = 0; i < m_weights.numValues(); i++) {
m_weights.setValue(i,m_weights.getValue (i) * multiplier);
}
// Only need to do the following if the loss is non-zero
if (m_loss != HINGE || (z < 1)) {
// Compute Factor for updates
double factor = m_learningRate * y * dloss(z);
// Update coefficients for attributes
int n1 = instance.numValues();
for (int p1 = 0; p1 < n1; p1++) {
int indS = instance.index(p1);
if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
m_weights.addToValue(indS, factor * instance.valueSparse(p1));
}
}
// update the bias
m_bias += factor;
}
m_t++;
}
}
/**
* Calculates the class membership probabilities for the given test
* instance.
*
* @param instance the instance to be classified
* @return predicted class probability distribution
*/
@Override
public double[] getVotesForInstance(Instance inst) {
if (m_weights == null) {
return new double[inst.numClasses()];
}
double[] result = (inst.classAttribute().isNominal())
? new double[2]
: new double[1];
double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
double z = (wx + m_bias);
if (inst.classAttribute().isNumeric()) {
result[0] = z;
return result;
}
if (z <= 0) {
// z = 0;
if (m_loss == LOGLOSS) {
result[0] = 1.0 / (1.0 + Math.exp(z));
result[1] = 1.0 - result[0];
} else {
result[0] = 1;
}
} else {
if (m_loss == LOGLOSS) {
result[1] = 1.0 / (1.0 + Math.exp(-z));
result[0] = 1.0 - result[1];
} else {
result[1] = 1;
}
}
return result;
}
@Override
public void getModelDescription(StringBuilder result, int indent) {
StringUtils.appendIndented(result, indent, toString());
StringUtils.appendNewline(result);
}
/**
* Prints out the classifier.
*
* @return a description of the classifier as a string
*/
public String toString() {
if (m_weights == null) {
return "SGD: No model built yet.\n";
}
StringBuffer buff = new StringBuffer();
buff.append("Loss function: ");
if (m_loss == HINGE) {
buff.append("Hinge loss (SVM)\n\n");
} else if (m_loss == LOGLOSS) {
buff.append("Log loss (logistic regression)\n\n");
} else {
buff.append("Squared loss (linear regression)\n\n");
}
// buff.append(m_data.classAttribute().name() + " = \n\n");
int printed = 0;
for (int i = 0; i < m_weights.numValues(); i++) {
// if (i != m_data.classIndex()) {
if (printed > 0) {
buff.append(" + ");
} else {
buff.append(" ");
}
buff.append(Utils.doubleToString(m_weights.getValue(i), 12, 4) + " "
// + m_data.attribute(i).name()
+ "\n");
printed++;
//}
}
if (m_bias > 0) {
buff.append(" + " + Utils.doubleToString(m_bias, 12, 4));
} else {
buff.append(" - " + Utils.doubleToString(-m_bias, 12, 4));
}
return buff.toString();
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public boolean isRandomizable() {
return false;
}
}
| Java |
/*
* SPegasos.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/*
* SPegasos.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.core.Measurement;
import moa.core.StringUtils;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import weka.core.Instance;
import weka.core.Utils;
/**
* <!-- globalinfo-start --> Implements the stochastic variant of the Pegasos
* (Primal Estimated sub-GrAdient SOlver for SVM) method of Shalev-Shwartz et
* al. (2007). For more information, see<br/> <br/> S. Shalev-Shwartz, Y.
* Singer, N. Srebro: Pegasos: Primal Estimated sub-GrAdient SOlver for SVM. In:
* 24th International Conference on MachineLearning, 807-814, 2007.
* <p/>
* <!-- globalinfo-end -->
* *
<!-- technical-bibtex-start --> BibTeX:
* <pre>
* @inproceedings{Shalev-Shwartz2007,
* author = {S. Shalev-Shwartz and Y. Singer and N. Srebro},
* booktitle = {24th International Conference on MachineLearning},
* pages = {807-814},
* title = {Pegasos: Primal Estimated sub-GrAdient SOlver for SVM},
* year = {2007}
* }
* </pre>
* <p/>
* <!-- technical-bibtex-end -->
*
*/
public class SPegasos extends AbstractClassifier {
/**
* For serialization
*/
private static final long serialVersionUID = -3732968666673530290L;
@Override
public String getPurposeString() {
return "Stochastic variant of the Pegasos (Primal Estimated sub-GrAdient SOlver for SVM) method of Shalev-Shwartz et al. (2007).";
}
/**
* The regularization parameter
*/
protected double m_lambda = 0.0001;
public FloatOption lambdaRegularizationOption = new FloatOption("lambdaRegularization",
'l', "Lambda regularization parameter .",
0.0001, 0.00, Integer.MAX_VALUE);
protected static final int HINGE = 0;
protected static final int LOGLOSS = 1;
/**
* The current loss function to minimize
*/
protected int m_loss = HINGE;
public MultiChoiceOption lossFunctionOption = new MultiChoiceOption(
"lossFunction", 'o', "The loss function to use.", new String[]{
"HINGE", "LOGLOSS"}, new String[]{
"Hinge loss (SVM)",
"Log loss (logistic regression)"}, 0);
/**
* Stores the weights (+ bias in the last element)
*/
protected double[] m_weights;
/**
* Holds the current iteration number
*/
protected double m_t;
/**
* Set the value of lambda to use
*
* @param lambda the value of lambda to use
*/
public void setLambda(double lambda) {
m_lambda = lambda;
}
/**
* Get the current value of lambda
*
* @return the current value of lambda
*/
public double getLambda() {
return m_lambda;
}
/**
* Set the loss function to use.
*
* @param function the loss function to use.
*/
public void setLossFunction(int function) {
m_loss = function;
}
/**
* Get the current loss function.
*
* @return the current loss function.
*/
public int getLossFunction() {
return m_loss;
}
/**
* Reset the classifier.
*/
public void reset() {
m_t = 2;
m_weights = null;
}
protected static double dotProd(Instance inst1, double[] weights, int classIndex) {
double result = 0;
int n1 = inst1.numValues();
int n2 = weights.length - 1;
for (int p1 = 0, p2 = 0; p1 < n1 && p2 < n2;) {
int ind1 = inst1.index(p1);
int ind2 = p2;
if (ind1 == ind2) {
if (ind1 != classIndex && !inst1.isMissingSparse(p1)) {
result += inst1.valueSparse(p1) * weights[p2];
}
p1++;
p2++;
} else if (ind1 > ind2) {
p2++;
} else {
p1++;
}
}
return (result);
}
protected double dloss(double z) {
if (m_loss == HINGE) {
return (z < 1) ? 1 : 0;
}
// log loss
if (z < 0) {
return 1.0 / (Math.exp(z) + 1.0);
} else {
double t = Math.exp(-z);
return t / (t + 1);
}
}
@Override
public void resetLearningImpl() {
reset();
setLambda(this.lambdaRegularizationOption.getValue());
setLossFunction(this.lossFunctionOption.getChosenIndex());
}
/**
* Trains the classifier with the given instance.
*
* @param instance the new training instance to include in the model
*/
@Override
public void trainOnInstanceImpl(Instance instance) {
if (m_weights == null) {
m_weights = new double[instance.numAttributes() + 1];
}
if (!instance.classIsMissing()) {
double learningRate = 1.0 / (m_lambda * m_t);
//double scale = 1.0 - learningRate * m_lambda;
double scale = 1.0 - 1.0 / m_t;
double y = (instance.classValue() == 0) ? -1 : 1;
double wx = dotProd(instance, m_weights, instance.classIndex());
double z = y * (wx + m_weights[m_weights.length - 1]);
for (int j = 0; j < m_weights.length - 1; j++) {
if (j != instance.classIndex()) {
m_weights[j] *= scale;
}
}
if (m_loss == LOGLOSS || (z < 1)) {
double loss = dloss(z);
int n1 = instance.numValues();
for (int p1 = 0; p1 < n1; p1++) {
int indS = instance.index(p1);
if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
double m = learningRate * loss * (instance.valueSparse(p1) * y);
m_weights[indS] += m;
}
}
// update the bias
m_weights[m_weights.length - 1] += learningRate * loss * y;
}
double norm = 0;
for (int k = 0; k < m_weights.length - 1; k++) {
if (k != instance.classIndex()) {
norm += (m_weights[k] * m_weights[k]);
}
}
double scale2 = Math.min(1.0, (1.0 / (m_lambda * norm)));
if (scale2 < 1.0) {
scale2 = Math.sqrt(scale2);
for (int j = 0; j < m_weights.length - 1; j++) {
if (j != instance.classIndex()) {
m_weights[j] *= scale2;
}
}
}
m_t++;
}
}
/**
* Calculates the class membership probabilities for the given test
* instance.
*
* @param instance the instance to be classified
* @return predicted class probability distribution
*/
@Override
public double[] getVotesForInstance(Instance inst) {
if (m_weights == null) {
return new double[inst.numAttributes() + 1];
}
double[] result = new double[2];
double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
double z = (wx + m_weights[m_weights.length - 1]);
//System.out.print("" + z + ": ");
// System.out.println(1.0 / (1.0 + Math.exp(-z)));
if (z <= 0) {
// z = 0;
if (m_loss == LOGLOSS) {
result[0] = 1.0 / (1.0 + Math.exp(z));
result[1] = 1.0 - result[0];
} else {
result[0] = 1;
}
} else {
if (m_loss == LOGLOSS) {
result[1] = 1.0 / (1.0 + Math.exp(-z));
result[0] = 1.0 - result[1];
} else {
result[1] = 1;
}
}
return result;
}
@Override
public void getModelDescription(StringBuilder result, int indent) {
StringUtils.appendIndented(result, indent, toString());
StringUtils.appendNewline(result);
}
/**
* Prints out the classifier.
*
* @return a description of the classifier as a string
*/
@Override
public String toString() {
if (m_weights == null) {
return "SPegasos: No model built yet.\n";
}
StringBuffer buff = new StringBuffer();
buff.append("Loss function: ");
if (m_loss == HINGE) {
buff.append("Hinge loss (SVM)\n\n");
} else {
buff.append("Log loss (logistic regression)\n\n");
}
int printed = 0;
for (int i = 0; i < m_weights.length - 1; i++) {
// if (i != m_data.classIndex()) {
if (printed > 0) {
buff.append(" + ");
} else {
buff.append(" ");
}
buff.append(Utils.doubleToString(m_weights[i], 12, 4) + " "
//+ m_data.attribute(i).name()
+ "\n");
printed++;
}
//}
if (m_weights[m_weights.length - 1] > 0) {
buff.append(" + " + Utils.doubleToString(m_weights[m_weights.length - 1], 12, 4));
} else {
buff.append(" - " + Utils.doubleToString(-m_weights[m_weights.length - 1], 12, 4));
}
return buff.toString();
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public boolean isRandomizable() {
return false;
}
}
| Java |
/*
* MajorityClass.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.StringUtils;
import weka.core.Instance;
/**
* Majority class learner. This is the simplest classifier.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class MajorityClass extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Majority class classifier: always predicts the class that has been observed most frequently the in the training data.";
}
protected DoubleVector observedClassDistribution;
@Override
public void resetLearningImpl() {
this.observedClassDistribution = new DoubleVector();
}
@Override
public void trainOnInstanceImpl(Instance inst) {
this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
}
public double[] getVotesForInstance(Instance i) {
return this.observedClassDistribution.getArrayCopy();
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
StringUtils.appendIndented(out, indent, "Predicted majority ");
out.append(getClassNameString());
out.append(" = ");
out.append(getClassLabelString(this.observedClassDistribution.maxIndex()));
StringUtils.appendNewline(out);
for (int i = 0; i < this.observedClassDistribution.numValues(); i++) {
StringUtils.appendIndented(out, indent, "Observed weight of ");
out.append(getClassLabelString(i));
out.append(": ");
out.append(this.observedClassDistribution.getValue(i));
StringUtils.appendNewline(out);
}
}
public boolean isRandomizable() {
return false;
}
}
| Java |
/*
* Perceptron.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.core.Measurement;
import moa.options.FloatOption;
import weka.core.Instance;
/**
* Single perceptron classifier.
*
* <p>Performs classic perceptron multiclass learning incrementally.</p>
*
* <p>Parameters:</p> <ul> <li>-r : Learning ratio of the classifier</li> </ul>
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class Perceptron extends AbstractClassifier {
private static final long serialVersionUID = 221L;
@Override
public String getPurposeString() {
return "Perceptron classifier: Single perceptron classifier.";
}
public FloatOption learningRatioOption = new FloatOption("learningRatio", 'l', "Learning ratio", 1);
protected double[][] weightAttribute;
protected boolean reset;
protected int numberAttributes;
protected int numberClasses;
protected int numberDetections;
@Override
public void resetLearningImpl() {
this.reset = true;
}
@Override
public void trainOnInstanceImpl(Instance inst) {
//Init Perceptron
if (this.reset == true) {
this.reset = false;
this.numberAttributes = inst.numAttributes();
this.numberClasses = inst.numClasses();
this.weightAttribute = new double[inst.numClasses()][inst.numAttributes()];
for (int i = 0; i < inst.numClasses(); i++) {
for (int j = 0; j < inst.numAttributes(); j++) {
weightAttribute[i][j] = 0.2 * this.classifierRandom.nextDouble()- 0.1;
}
}
}
double[] preds = new double[inst.numClasses()];
for (int i = 0; i < inst.numClasses(); i++) {
preds[i] = prediction(inst, i);
}
double learningRatio = learningRatioOption.getValue();
int actualClass = (int) inst.classValue();
for (int i = 0; i < inst.numClasses(); i++) {
double actual = (i == actualClass) ? 1.0 : 0.0;
double delta = (actual - preds[i]) * preds[i] * (1 - preds[i]);
for (int j = 0; j < inst.numAttributes() - 1; j++) {
this.weightAttribute[i][j] += learningRatio * delta * inst.value(j);
}
this.weightAttribute[i][inst.numAttributes() - 1] += learningRatio * delta;
}
}
public void setWeights(double[][] w) {
//Perceptron Hoeffding Tree
this.weightAttribute = w;
}
public double[][] getWeights() {
//Perceptron Hoeffding Tree
return this.weightAttribute;
}
public int getNumberAttributes() {
//Perceptron Hoeffding Tree
return this.numberAttributes;
}
public int getNumberClasses() {
//Perceptron Hoeffding Tree
return this.numberClasses;
}
public double prediction(Instance inst, int classVal) {
double sum = 0.0;
for (int i = 0; i < inst.numAttributes() - 1; i++) {
sum += weightAttribute[classVal][i] * inst.value(i);
}
sum += weightAttribute[classVal][inst.numAttributes() - 1];
return 1.0 / (1.0 + Math.exp(-sum));
}
@Override
public double[] getVotesForInstance(Instance inst) {
double[] votes = new double[inst.numClasses()];
if (this.reset == false) {
for (int i = 0; i < votes.length; i++) {
votes[i] = prediction(inst, i);
}
try {
weka.core.Utils.normalize(votes);
} catch (Exception e) {
// ignore all zero votes error
}
}
return votes;
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
@Override
public boolean isRandomizable() {
return true;
}
}
| Java |
/*
* SGDMultiClass.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
* @author Eibe Frank (eibe{[at]}cs{[dot]}waikato{[dot]}ac{[dot]}nz)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/*
* SGDMultiClass.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Regressor;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.StringUtils;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import weka.core.Instance;
import weka.core.Utils;
/**
<!-- globalinfo-start -->
* Implements stochastic gradient descent for learning various linear models (binary class SVM, binary class logistic regression and linear regression).
* <p/>
<!-- globalinfo-end -->
*
*/
public class SGDMultiClass extends AbstractClassifier implements Regressor{
/** For serialization */
private static final long serialVersionUID = -3732968666673530290L;
@Override
public String getPurposeString() {
return "AStochastic gradient descent for learning various linear models (binary class SVM, binary class logistic regression and linear regression).";
}
/** The regularization parameter */
protected double m_lambda = 0.0001;
public FloatOption lambdaRegularizationOption = new FloatOption("lambdaRegularization",
'l', "Lambda regularization parameter .",
0.0001, 0.00, Integer.MAX_VALUE);
/** The learning rate */
protected double m_learningRate = 0.01;
public FloatOption learningRateOption = new FloatOption("learningRate",
'r', "Learning rate parameter.",
0.0001, 0.00, Integer.MAX_VALUE);
/** Stores the weights (+ bias in the last element) */
protected DoubleVector[] m_weights;
protected double[] m_bias;
/** Holds the current iteration number */
protected double m_t;
/** The number of training instances */
protected double m_numInstances;
protected static final int HINGE = 0;
protected static final int LOGLOSS = 1;
protected static final int SQUAREDLOSS = 2;
/** The current loss function to minimize */
protected int m_loss = HINGE;
public MultiChoiceOption lossFunctionOption = new MultiChoiceOption(
"lossFunction", 'o', "The loss function to use.", new String[]{
"HINGE", "LOGLOSS", "SQUAREDLOSS"}, new String[]{
"Hinge loss (SVM)",
"Log loss (logistic regression)",
"Squared loss (regression)"}, 0);
/**
* Set the value of lambda to use
*
* @param lambda the value of lambda to use
*/
public void setLambda(double lambda) {
m_lambda = lambda;
}
/**
* Get the current value of lambda
*
* @return the current value of lambda
*/
public double getLambda() {
return m_lambda;
}
/**
* Set the loss function to use.
*
* @param function the loss function to use.
*/
public void setLossFunction(int function) {
m_loss = function;
}
/**
* Get the current loss function.
*
* @return the current loss function.
*/
public int getLossFunction() {
return m_loss;
}
/**
* Set the learning rate.
*
* @param lr the learning rate to use.
*/
public void setLearningRate(double lr) {
m_learningRate = lr;
}
/**
* Get the learning rate.
*
* @return the learning rate
*/
public double getLearningRate() {
return m_learningRate;
}
/**
* Reset the classifier.
*/
public void reset() {
m_t = 1;
m_weights = null;
m_bias = null; //0.0;
}
protected double dloss(double z) {
if (m_loss == HINGE) {
return (z < 1) ? 1 : 0;
}
if (m_loss == LOGLOSS) {
// log loss
if (z < 0) {
return 1.0 / (Math.exp(z) + 1.0);
} else {
double t = Math.exp(-z);
return t / (t + 1);
}
}
// squared loss
return z;
}
protected static double dotProd(Instance inst1, DoubleVector weights, int classIndex) {
double result = 0;
int n1 = inst1.numValues();
int n2 = weights.numValues();
for (int p1 = 0, p2 = 0; p1 < n1 && p2 < n2;) {
int ind1 = inst1.index(p1);
int ind2 = p2;
if (ind1 == ind2) {
if (ind1 != classIndex && !inst1.isMissingSparse(p1)) {
result += inst1.valueSparse(p1) * weights.getValue(p2);
}
p1++;
p2++;
} else if (ind1 > ind2) {
p2++;
} else {
p1++;
}
}
return (result);
}
@Override
public void resetLearningImpl() {
reset();
setLambda(this.lambdaRegularizationOption.getValue());
setLearningRate(this.learningRateOption.getValue());
setLossFunction(this.lossFunctionOption.getChosenIndex());
}
/**
* Trains the classifier with the given instance.
*
* @param instance the new training instance to include in the model
*/
@Override
public void trainOnInstanceImpl(Instance instance) {
if (m_weights == null) {
int length;
if (instance.classAttribute().isNominal()) {
length = instance.numClasses();
} else {
length = 1;
}
m_weights = new DoubleVector[length];
m_bias = new double[length];
for (int i = 0; i < m_weights.length; i++){
m_weights[i] = new DoubleVector();
m_bias[i] = 0.0;
}
}
for (int i = 0; i < m_weights.length; i++){
this.trainOnInstanceImpl(instance, i);
}
m_t++;
}
public void trainOnInstanceImpl(Instance instance, int classLabel) {
if (!instance.classIsMissing()) {
double wx = dotProd(instance, m_weights[classLabel], instance.classIndex());
double y;
double z;
if (instance.classAttribute().isNominal()) {
y = (instance.classValue() != classLabel) ? -1 : 1;
z = y * (wx + m_bias[classLabel]);
} else {
y = instance.classValue();
z = y - (wx + m_bias[classLabel]);
y = 1;
}
// Compute multiplier for weight decay
double multiplier = 1.0;
if (m_numInstances == 0) {
multiplier = 1.0 - (m_learningRate * m_lambda) / m_t;
} else {
multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances;
}
for (int i = 0; i < m_weights[classLabel].numValues(); i++) {
m_weights[classLabel].setValue(i,m_weights[classLabel].getValue (i) * multiplier);
}
// Only need to do the following if the loss is non-zero
if (m_loss != HINGE || (z < 1)) {
// Compute Factor for updates
double factor = m_learningRate * y * dloss(z);
// Update coefficients for attributes
int n1 = instance.numValues();
for (int p1 = 0; p1 < n1; p1++) {
int indS = instance.index(p1);
if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
m_weights[classLabel].addToValue(indS, factor * instance.valueSparse(p1));
}
}
// update the bias
m_bias[classLabel] += factor;
}
}
}
/**
* Calculates the class membership probabilities for the given test
* instance.
*
* @param instance the instance to be classified
* @return predicted class probability distribution
*/
@Override
public double[] getVotesForInstance(Instance inst) {
if (m_weights == null) {
return new double[inst.numClasses()];
}
double[] result = (inst.classAttribute().isNominal())
? new double[inst.numClasses()]
: new double[1];
if (inst.classAttribute().isNumeric()) {
double wx = dotProd(inst, m_weights[0], inst.classIndex());// * m_wScale;
double z = (wx + m_bias[0]);
result[0] = z;
return result;
}
for (int i = 0; i < m_weights.length; i++){
double wx = dotProd(inst, m_weights[i], inst.classIndex());// * m_wScale;
double z = (wx + m_bias[i]);
if (z <= 0) {
// z = 0;
if (m_loss == LOGLOSS) {
//result[0] = 1.0 / (1.0 + Math.exp(z));
//result[1] = 1.0 - result[0];
result[i] = 1.0 - 1.0 / (1.0 + Math.exp(z));
} else {
//result[0] = 1;
result[i] = 0;
}
} else {
if (m_loss == LOGLOSS) {
//result[1] = 1.0 / (1.0 + Math.exp(-z));
//result[0] = 1.0 - result[1];
result[i] = 1.0 / (1.0 + Math.exp(-z));
} else {
//result[1] = 1;
result[i] = 1;
}
}
}
return result;
}
@Override
public void getModelDescription(StringBuilder result, int indent) {
StringUtils.appendIndented(result, indent, toString());
StringUtils.appendNewline(result);
}
/**
* Prints out the classifier.
*
* @return a description of the classifier as a string
*/
public String toString() {
if (m_weights == null) {
return "SGD: No model built yet.\n";
}
StringBuffer buff = new StringBuffer();
buff.append("Loss function: ");
if (m_loss == HINGE) {
buff.append("Hinge loss (SVM)\n\n");
} else if (m_loss == LOGLOSS) {
buff.append("Log loss (logistic regression)\n\n");
} else {
buff.append("Squared loss (linear regression)\n\n");
}
// buff.append(m_data.classAttribute().name() + " = \n\n");
int printed = 0;
for (int i = 0; i < m_weights[0].numValues(); i++) {
// if (i != m_data.classIndex()) {
if (printed > 0) {
buff.append(" + ");
} else {
buff.append(" ");
}
buff.append(Utils.doubleToString(m_weights[0].getValue(i), 12, 4) + " "
// + m_data.attribute(i).name()
+ "\n");
printed++;
//}
}
if (m_bias[0] > 0) {
buff.append(" + " + Utils.doubleToString(m_bias[0], 12, 4));
} else {
buff.append(" - " + Utils.doubleToString(-m_bias[0], 12, 4));
}
return buff.toString();
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public boolean isRandomizable() {
return false;
}
}
| Java |
/*
* NoChange.java
* Copyright (C) 2013 University of Waikato, Hamilton, New Zealand
* @author Bernhard Pfahringer (bernhard@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.functions;
import moa.classifiers.AbstractClassifier;
import moa.core.Measurement;
import weka.core.Instance;
/**
* NoChange class classifier. It always predicts the last class seen.
*
* @author Bernhard Pfahringer (bernhard@cs.waikato.ac.nz)
* @version $Revision: 1 $
*/
public class NoChange extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Weather Forecast class classifier: always predicts the last class seen.";
}
protected double lastSeenClass;
@Override
public void resetLearningImpl() {
this.lastSeenClass = 0;
}
@Override
public void trainOnInstanceImpl(Instance inst) {
this.lastSeenClass = inst.classValue();
}
public double[] getVotesForInstance(Instance i) {
double[] votes = new double[i.numClasses()];
votes[(int) lastSeenClass] = 1.0;
return votes;
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return null;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
public boolean isRandomizable() {
return false;
}
}
| Java |
/*
* TemporallyAugmentedClassifier.java
* Copyright (C) 2013 University of Waikato, Hamilton, New Zealand
* @author Bernhard Pfahringer (bernhard@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.Measurement;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.IntOption;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
/**
* Include labels of previous instances into the training data
*
* <p>This enables a classifier to exploit potentially present auto-correlation
* </p>
*
* <p>Parameters:</p> <ul> <li>-l : Classifier to train</li> <li>-n : The number
* of old labels to include</li> </ul>
*
* @author Bernhard Pfahringer (bernhard@cs.waikato.ac.nz)
* @version $Revision: 1 $
*/
public class TemporallyAugmentedClassifier extends AbstractClassifier {
@Override
public String getPurposeString() {
return "Add some old labels to every instance";
}
private static final long serialVersionUID = 1L;
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption numOldLabelsOption = new IntOption("numOldLabels", 'n',
"The number of old labels to add to each example.", 1, 0, Integer.MAX_VALUE);
protected Classifier baseLearner;
protected double[] oldLabels;
protected Instances header;
public FlagOption labelDelayOption = new FlagOption("labelDelay", 'd',
"Labels arrive with Delay. Use predictions instead of true Labels.");
@Override
public void resetLearningImpl() {
this.baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
this.oldLabels = new double[this.numOldLabelsOption.getValue()];
this.header = null;
baseLearner.resetLearning();
}
@Override
public void trainOnInstanceImpl(Instance instance) {
this.baseLearner.trainOnInstance(extendWithOldLabels(instance));
if (this.labelDelayOption.isSet() == false) {
// Use true old Labels to add attributes to instances
addOldLabel(instance.classValue());
}
}
public void addOldLabel(double newPrediction) {
int numLabels = this.oldLabels.length;
if (numLabels > 0) {
for (int i = 1; i < numLabels; i++) {
this.oldLabels[i - 1] = this.oldLabels[i];
}
this.oldLabels[ numLabels - 1] = newPrediction;
}
}
public void initHeader(Instances dataset) {
int numLabels = this.numOldLabelsOption.getValue();
Attribute target = dataset.classAttribute();
List<String> possibleValues = new ArrayList<String>();
int n = target.numValues();
for (int i = 0; i < n; i++) {
possibleValues.add(target.value(i));
}
ArrayList<Attribute> attrs = new ArrayList<Attribute>(numLabels + dataset.numAttributes());
for (int i = 0; i < numLabels; i++) {
attrs.add(new Attribute(target.name() + "_" + i, possibleValues));
}
for (int i = 0; i < dataset.numAttributes(); i++) {
attrs.add((Attribute) dataset.attribute(i).copy());
}
this.header = new Instances("extended_" + dataset.relationName(), attrs, 0);
this.header.setClassIndex(numLabels + dataset.classIndex());
}
public Instance extendWithOldLabels(Instance instance) {
if (this.header == null) {
initHeader(instance.dataset());
}
int numLabels = this.oldLabels.length;
if (numLabels == 0) {
return instance;
}
double[] x = instance.toDoubleArray();
double[] x2 = Arrays.copyOfRange(this.oldLabels, 0, numLabels + x.length);
System.arraycopy(x, 0, x2, numLabels, x.length);
Instance extendedInstance = new DenseInstance(instance.weight(), x2);
extendedInstance.setDataset(this.header);
//System.out.println( extendedInstance);
return extendedInstance;
}
@Override
public double[] getVotesForInstance(Instance instance) {
double[] prediction = this.baseLearner.getVotesForInstance(extendWithOldLabels(instance));
if (this.labelDelayOption.isSet() == true) {
// Use predicted Labels to add attributes to instances
addOldLabel(Utils.maxIndex(prediction));
}
return prediction;
}
@Override
public boolean isRandomizable() {
return false; // ??? this.baseLearner.isRandomizable;
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
List<Measurement> measurementList = new LinkedList<Measurement>();
Measurement[] modelMeasurements = ((AbstractClassifier) this.baseLearner).getModelMeasurements();
if (modelMeasurements != null) {
for (Measurement measurement : modelMeasurements) {
measurementList.add(measurement);
}
}
return measurementList.toArray(new Measurement[measurementList.size()]);
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
public String toString() {
return "TemporallyAugmentedClassifier using " + this.numOldLabelsOption.getValue() + " labels\n" + this.baseLearner;
}
}
| Java |
/*
* OzaBag.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import weka.core.Instance;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.ClassOption;
import moa.options.IntOption;
/**
* Incremental on-line bagging of Oza and Russell.
*
* <p>Oza and Russell developed online versions of bagging and boosting for
* Data Streams. They show how the process of sampling bootstrap replicates
* from training data can be simulated in a data stream context. They observe
* that the probability that any individual example will be chosen for a
* replicate tends to a Poisson(1) distribution.</p>
*
* <p>[OR] N. Oza and S. Russell. Online bagging and boosting.
* In Artificial Intelligence and Statistics 2001, pages 105–112.
* Morgan Kaufmann, 2001.</p>
*
* <p>Parameters:</p> <ul>
* <li>-l : Classifier to train</li>
* <li>-s : The number of models in the bag</li> </ul>
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class OzaBag extends AbstractClassifier {
@Override
public String getPurposeString() {
return "Incremental on-line bagging of Oza and Russell.";
}
private static final long serialVersionUID = 1L;
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models in the bag.", 10, 1, Integer.MAX_VALUE);
protected Classifier[] ensemble;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
for (int i = 0; i < this.ensemble.length; i++) {
int k = MiscUtils.poisson(1.0, this.classifierRandom);
if (k > 0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
combinedVote.addValues(vote);
}
}
return combinedVote.getArrayRef();
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* ADACC.java
*
* @author Ghazal Jaber (ghazal.jaber@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.classifiers.meta;
import moa.classifiers.Classifier;
import moa.core.Measurement;
import moa.options.FloatOption;
import moa.options.IntOption;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import java.util.Arrays;
import java.util.Collections;
/**
* Anticipative and Dynamic Adaptation to Concept Changes.
* Ensemble method for data streams that adapts to concept changes
* and deals with concept recurrence.
*
* Reference: JABER, G., CORNUEJOLS, A., and TARROUX, P. A New On-Line Learning Method
* for Coping with Recurring Concepts: The ADACC System. In : Neural Information
* Processing. Springer Berlin Heidelberg, 2013. p. 595-604.
*
* @author Ghazal Jaber (ghazal.jaber@gmail.com)
*
*/
public class ADACC extends DACC {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Anticipative and Dynamic Adaptation to Concept Changes for data streams.";
}
/**
* Evaluation window for the stability index computation
*/
public IntOption tauSizeOption = new IntOption("tau", 't',
"The size of the evaluation window for the meta-learning.", 100, 1, 10000);
/**
* Threshold for the stability index
*/
public FloatOption stabIndexSizeOption = new FloatOption("StabThr", 'z',
"The threshold for stability", 0.8, 0, 1);
/**
* Threshold for concept equivalence
*/
public FloatOption equivIndexSizeOption = new FloatOption("CeThr", 'q',
"The threshold for concept equivalence", 0.7, 0, 1);
/**
* Size of the evaluation window to compute the stability index
*/
protected int tau_size = 0;
/**
* Last chunk of data of size (tau_size) to compute the stability index
*/
protected Instances recentChunk;
/**
* Threshold values for the stability index and concept equivalence
*/
protected double theta_stab, theta_diff;
/**
* Current stability index
*/
protected double index;
/**
* Maximum number of snapshots (copies of classifiers kept in case of recurrence)
*/
protected final static int MAXPERMANENT = 100;
/**
* Number of added snapshots
*/
protected int addedPermanent = 0;
@Override
protected void initVariables(){
this.tau_size = this.tauSizeOption.getValue();
this.theta_stab = this.stabIndexSizeOption.getValue();
this.theta_diff = this.equivIndexSizeOption.getValue();
this.recentChunk = null;
int ensembleSize = (int)this.memberCountOption.getValue() + MAXPERMANENT;
this.ensemble = new Classifier[ensembleSize];
this.ensembleAges = new double[ensembleSize];
this.ensembleWindows = new int[ensembleSize][(int)this.evaluationSizeOption.getValue()];
}
@Override
public void trainOnInstanceImpl(Instance inst) {
if (recentChunk == null)
recentChunk = new Instances(this.getModelContext());
if (recentChunk.size() < this.tau_size)
recentChunk.add(inst);
else
recentChunk.set(this.nbInstances % this.tau_size,inst);
trainAndClassify(inst);
if ((this.nbInstances % this.tau_size)==0)
takeSnapshot();
}
/**
* If the environment is stable enough, take a snapshot
* (a copy) of the best adaptive classifier and keep it
* for future use, in case of concept recurrence
*/
private void takeSnapshot(){
this.index = computeStabilityIndex();
if (this.index >= this.theta_stab)
if (addedPermanent == 0){
this.ensemble[this.ensemble.length-MAXPERMANENT+addedPermanent] = getBestAdaptiveClassifier().copy();
addedPermanent++;
}
else{
Classifier candidate = getBestAdaptiveClassifier().copy();
boolean duplicate = false;
for (int j=0;j<Math.min(MAXPERMANENT,addedPermanent);j++){
Classifier lastSnapshot=this.ensemble[this.ensemble.length-MAXPERMANENT+j];
int[][] votes=new int[2][tau_size];
for (int k=0;k<tau_size;k++){
votes[0][k]=Utils.maxIndex(candidate.getVotesForInstance(recentChunk.get(k)));
votes[1][k]=Utils.maxIndex(lastSnapshot.getVotesForInstance(recentChunk.get(k)));
}
double kappa=computeKappa(votes[0],votes[1]);
if (kappa>=this.theta_diff){
duplicate = true; break;
}
}
if (!duplicate){
this.ensemble[this.ensemble.length-MAXPERMANENT+(addedPermanent%MAXPERMANENT)]=candidate;
addedPermanent++;
}
}
}
/**
* Returns the kappa statistics,
* a statistical measure of agreement in the predictions
* of 2 classifiers. Used as a measure of diversity of predictive
* models: the higher the kappa value, the smaller the diversity
* @param y1 the predictions of classifier A
* @param y2 the predictions of classifier B
* @return the kappa measure
*/
private double computeKappa(int[] y1,int[] y2){
int m=y1.length;
double theta1=0;
double counts[][]=new double[2][this.modelContext.numClasses()];
for (int i=0;i<m;i++){
if (y1[i]==y2[i])
theta1=theta1+1;
counts[0][y1[i]]=counts[0][y1[i]]+1;
counts[1][y2[i]]=counts[1][y2[i]]+1;
}
theta1=theta1/m;
double theta2=0;
for(int i=0;i<this.modelContext.numClasses();i++)
theta2+=counts[0][i]/m*counts[1][i]/m;
if (theta1==theta2 && theta2==1)
return 1;
return (theta1-theta2)/(1-theta2);
}
/**
* Returns the stability index of the adaptive ensemble
* of classifiers. The ensemble is considered stable here
* if its diversity level and error rates are low.
* @return the stability measure value
*/
private double computeStabilityIndex(){
int m = (int)Math.floor((this.ensemble.length-MAXPERMANENT)/2);
int[][] votes=new int[m][tau_size];
double errors=0;
int count=0;
Pair[] arr = getHalf(true);
for (int i=0;i<m;i++){
for (int j=0;j<tau_size;j++){
votes[i][j]=Utils.maxIndex(this.ensemble[arr[i].index].getVotesForInstance(recentChunk.get(j)));
errors+=(votes[i][j]==(int) this.recentChunk.get(j).classValue())?0:1;
count++;
}
}
errors = errors/count;
double res=0; count=0;
for (int i=0;i<m;i++)
for (int j=i+1;j<m;j++)
if (i!=j){
res+=computeKappa(votes[i],votes[j]);
count++;
}
return res/count-errors;
}
/**
* Returns the adaptive classifier with the highest weight
* @return the best adaptive classifier
*/
private Classifier getBestAdaptiveClassifier(){
//take a copy of the ensemble weights (excluding snapshots)
Pair[] newEnsembleWeights = new Pair[ensembleWeights.length-MAXPERMANENT];
for (int i = 0 ; i < newEnsembleWeights.length; i++)
newEnsembleWeights[i]=ensembleWeights[i];
//sort the weight values
Arrays.sort(newEnsembleWeights,Collections.reverseOrder());
return this.ensemble[newEnsembleWeights[0].index].copy();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] measurements = new Measurement[10];
measurements[0] = new Measurement("size ",
this.ensemble.length-MAXPERMANENT);
measurements[1] = new Measurement("maturity ",
this.maturityOption.getValue());
measurements[2] = new Measurement("evalsize ",
this.evaluationSizeOption.getValue());
measurements[3] = new Measurement("cmb ",
this.combinationOption.getChosenIndex());
measurements[4] = new Measurement("tau",
this.tau_size);
measurements[5] = new Measurement("MaxSnapshotsSize",
MAXPERMANENT);
measurements[6] = new Measurement("SnapshotsSize",
this.addedPermanent);
measurements[7] = new Measurement("stabilityIndex",
this.index);
measurements[8]=new Measurement("stabilityThreshold",
this.theta_stab);
measurements[9]=new Measurement("differenceThreshold",
this.theta_diff);
return measurements;
}
@Override
protected int getNbActiveClassifiers(){
return ensemble.length-MAXPERMANENT+Math.min(addedPermanent,MAXPERMANENT);
}
@Override
protected int getNbAdaptiveClassifiers(){
return this.ensemble.length-MAXPERMANENT;
}
}
| Java |
/*
* AccuracyUpdatedEnsemble.java
* Copyright (C) 2010 Poznan University of Technology, Poznan, Poland
* @author Dariusz Brzezinski (dariusz.brzezinski@cs.put.poznan.pl)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.classifiers.trees.HoeffdingTree;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.ObjectRepository;
import moa.options.ClassOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
import weka.core.Instances;
/**
* The revised version of the Accuracy Updated Ensemble as proposed by
* Brzezinski and Stefanowski in "Reacting to Different Types of Concept Drift:
* The Accuracy Updated Ensemble Algorithm", IEEE Trans. Neural Netw, 2013.
*/
public class AccuracyUpdatedEnsemble extends AbstractClassifier {
private static final long serialVersionUID = 1L;
/**
* Type of classifier to use as a component classifier.
*/
public ClassOption learnerOption = new ClassOption("learner", 'l', "Classifier to train.", Classifier.class,
"trees.HoeffdingTree -e 2000000 -g 100 -c 0.01");
/**
* Number of component classifiers.
*/
public IntOption memberCountOption = new IntOption("memberCount", 'n',
"The maximum number of classifiers in an ensemble.", 10, 1, Integer.MAX_VALUE);
/**
* Chunk size.
*/
public IntOption chunkSizeOption = new IntOption("chunkSize", 'c',
"The chunk size used for classifier creation and evaluation.", 500, 1, Integer.MAX_VALUE);
/**
* Determines the maximum size of model (evaluated after every chunk).
*/
public IntOption maxByteSizeOption = new IntOption("maxByteSize", 'm', "Maximum memory consumed by ensemble.",
33554432, 0, Integer.MAX_VALUE);
/**
* The weights of stored classifiers.
* weights[x][0] = weight
* weights[x][1] = classifier number in learners
*/
protected double[][] weights;
/**
* Class distributions.
*/
protected long[] classDistributions;
/**
* Ensemble classifiers.
*/
protected Classifier[] learners;
/**
* Number of processed examples.
*/
protected int processedInstances;
/**
* Candidate classifier.
*/
protected Classifier candidate;
/**
* Current chunk of instances.
*/
protected Instances currentChunk;
@Override
public void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
this.candidate = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidate.resetLearning();
super.prepareForUseImpl(monitor, repository);
}
@Override
public void resetLearningImpl() {
this.currentChunk = null;
this.classDistributions = null;
this.processedInstances = 0;
this.learners = new Classifier[0];
this.candidate = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidate.resetLearning();
}
@Override
public void trainOnInstanceImpl(Instance inst) {
this.initVariables();
this.classDistributions[(int) inst.classValue()]++;
this.currentChunk.add(inst);
this.processedInstances++;
if (this.processedInstances % this.chunkSizeOption.getValue() == 0) {
this.processChunk();
}
}
/**
* Determines whether the classifier is randomizable.
*/
public boolean isRandomizable() {
return false;
}
/**
* Predicts a class for an example.
*/
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
if (this.trainingWeightSeenByModel > 0.0) {
for (int i = 0; i < this.learners.length; i++) {
if (this.weights[i][0] > 0.0) {
DoubleVector vote = new DoubleVector(this.learners[(int) this.weights[i][1]].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
// scale weight and prevent overflow
vote.scaleValues(this.weights[i][0] / (1.0 * this.learners.length + 1.0));
combinedVote.addValues(vote);
}
}
}
}
//combinedVote.normalize();
return combinedVote.getArrayRef();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
@Override
public Classifier[] getSubClassifiers() {
return this.learners.clone();
}
/**
* Processes a chunk of instances.
* This method is called after collecting a chunk of examples.
*/
protected void processChunk() {
Classifier addedClassifier = null;
double mse_r = this.computeMseR();
// Compute weights
double candidateClassifierWeight = 1.0 / (mse_r + Double.MIN_VALUE);
for (int i = 0; i < this.learners.length; i++) {
this.weights[i][0] = 1.0 / (mse_r + this.computeMse(this.learners[(int) this.weights[i][1]], this.currentChunk) + Double.MIN_VALUE);
}
if (this.learners.length < this.memberCountOption.getValue()) {
// Train and add classifier
addedClassifier = this.addToStored(this.candidate, candidateClassifierWeight);
} else {
// Substitute poorest classifier
int poorestClassifier = this.getPoorestClassifierIndex();
if (this.weights[poorestClassifier][0] < candidateClassifierWeight) {
this.weights[poorestClassifier][0] = candidateClassifierWeight;
addedClassifier = this.candidate.copy();
this.learners[(int) this.weights[poorestClassifier][1]] = addedClassifier;
}
}
// train classifiers
for (int i = 0; i < this.learners.length; i++) {
this.trainOnChunk(this.learners[(int) this.weights[i][1]]);
}
this.classDistributions = null;
this.currentChunk = null;
this.candidate = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidate.resetLearning();
this.enforceMemoryLimit();
}
/**
* Checks if the memory limit is exceeded and if so prunes the classifiers in the ensemble.
*/
protected void enforceMemoryLimit() {
double memoryLimit = this.maxByteSizeOption.getValue() / (double) (this.learners.length + 1);
for (int i = 0; i < this.learners.length; i++) {
((HoeffdingTree) this.learners[(int) this.weights[i][1]]).maxByteSizeOption.setValue((int) Math
.round(memoryLimit));
((HoeffdingTree) this.learners[(int) this.weights[i][1]]).enforceTrackerLimit();
}
}
/**
* Computes the MSEr threshold.
*
* @return The MSEr threshold.
*/
protected double computeMseR() {
double p_c;
double mse_r = 0;
for (int i = 0; i < this.classDistributions.length; i++) {
p_c = (double) this.classDistributions[i] / (double) this.chunkSizeOption.getValue();
mse_r += p_c * ((1 - p_c) * (1 - p_c));
}
return mse_r;
}
/**
* Computes the MSE of a learner for a given chunk of examples.
* @param learner classifier to compute error
* @param chunk chunk of examples
* @return the computed error.
*/
protected double computeMse(Classifier learner, Instances chunk) {
double mse_i = 0;
double f_ci;
double voteSum;
for (int i = 0; i < chunk.numInstances(); i++) {
try {
voteSum = 0;
for (double element : learner.getVotesForInstance(chunk.instance(i))) {
voteSum += element;
}
if (voteSum > 0) {
f_ci = learner.getVotesForInstance(chunk.instance(i))[(int) chunk.instance(i).classValue()]
/ voteSum;
mse_i += (1 - f_ci) * (1 - f_ci);
} else {
mse_i += 1;
}
} catch (Exception e) {
mse_i += 1;
}
}
mse_i /= this.chunkSizeOption.getValue();
return mse_i;
}
/**
* Adds ensemble weights to the measurements.
*/
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] measurements = new Measurement[(int) this.memberCountOption.getValue()];
for (int m = 0; m < this.memberCountOption.getValue(); m++) {
measurements[m] = new Measurement("Member weight " + (m + 1), -1);
}
if (this.weights != null) {
for (int i = 0; i < this.weights.length; i++) {
measurements[i] = new Measurement("Member weight " + (i + 1), this.weights[i][0]);
}
}
return measurements;
}
/**
* Adds a classifier to the storage.
*
* @param newClassifier
* The classifier to add.
* @param newClassifiersWeight
* The new classifiers weight.
*/
protected Classifier addToStored(Classifier newClassifier, double newClassifiersWeight) {
Classifier addedClassifier = null;
Classifier[] newStored = new Classifier[this.learners.length + 1];
double[][] newStoredWeights = new double[newStored.length][2];
for (int i = 0; i < newStored.length; i++) {
if (i < this.learners.length) {
newStored[i] = this.learners[i];
newStoredWeights[i][0] = this.weights[i][0];
newStoredWeights[i][1] = this.weights[i][1];
} else {
newStored[i] = addedClassifier = newClassifier.copy();
newStoredWeights[i][0] = newClassifiersWeight;
newStoredWeights[i][1] = i;
}
}
this.learners = newStored;
this.weights = newStoredWeights;
return addedClassifier;
}
/**
* Finds the index of the classifier with the smallest weight.
* @return
*/
private int getPoorestClassifierIndex() {
int minIndex = 0;
for (int i = 1; i < this.weights.length; i++) {
if(this.weights[i][0] < this.weights[minIndex][0]){
minIndex = i;
}
}
return minIndex;
}
/**
* Initiates the current chunk and class distribution variables.
*/
private void initVariables() {
if (this.currentChunk == null) {
this.currentChunk = new Instances(this.getModelContext());
}
if (this.classDistributions == null) {
this.classDistributions = new long[this.getModelContext().classAttribute().numValues()];
for (int i = 0; i < this.classDistributions.length; i++) {
this.classDistributions[i] = 0;
}
}
}
/**
* Trains a component classifier on the most recent chunk of data.
*
* @param classifierToTrain
* Classifier being trained.
*/
private void trainOnChunk(Classifier classifierToTrain) {
for (int num = 0; num < this.chunkSizeOption.getValue(); num++) {
classifierToTrain.trainOnInstance(this.currentChunk.instance(num));
}
}
}
| Java |
/*
* OzaBagAdwin.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.core.driftdetection.ADWIN;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import weka.core.Instance;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.ClassOption;
import moa.options.IntOption;
/**
* Bagging for evolving data streams using ADWIN.
*
* <p>ADWIN is a change detector and estimator that solves in
* a well-specified way the problem of tracking the average of
* a stream of bits or real-valued numbers. ADWIN keeps a
* variable-length window of recently seen items, with the property
* that the window has the maximal length statistically consistent
* with the hypothesis “there has been no change in the average value
* inside the window”.<br />
* More precisely, an older fragment of the window is dropped if and only
* if there is enough evidence that its average value differs from that of
* the rest of the window. This has two consequences: one, that change
* reliably declared whenever the window shrinks; and two, that at any time
* the average over the existing window can be reliably taken as an estimation
* of the current average in the stream (barring a very small or very recent
* change that is still not statistically visible). A formal and quantitative
* statement of these two points (a theorem) appears in<p>
*
* Albert Bifet and Ricard Gavaldà. Learning from time-changing data
* with adaptive windowing. In SIAM International Conference on Data Mining,
* 2007.</p>
* <p>ADWIN is parameter- and assumption-free in the sense that it automatically
* detects and adapts to the current rate of change. Its only parameter is a
* confidence bound δ, indicating how confident we want to be in the algorithm’s
* output, inherent to all algorithms dealing with random processes. Also
* important, ADWIN does not maintain the window explicitly, but compresses it
* using a variant of the exponential histogram technique. This means that it
* keeps a window of length W using only O(log W) memory and O(log W) processing
* time per item.<br />
* ADWIN Bagging is the online bagging method of Oza and Rusell with the
* addition of the ADWIN algorithm as a change detector and as an estimator for
* the weights of the boosting method. When a change is detected, the worst
* classifier of the ensemble of classifiers is removed and a new classifier is
* added to the ensemble.</p>
* <p>See details in:<br />
* [BHPKG] Albert Bifet, Geoff Holmes, Bernhard Pfahringer, Richard Kirkby,
* and Ricard Gavaldà . New ensemble methods for evolving data streams.
* In 15th ACM SIGKDD International Conference on Knowledge Discovery and
* Data Mining, 2009.</p>
* <p>Example:</p>
* <code>OzaBagAdwin -l HoeffdingTreeNBAdaptive -s 10</code>
* <p>Parameters:</p> <ul>
* <li>-l : Classifier to train</li>
* <li>-s : The number of models in the bag</li> </ul>
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class OzaBagAdwin extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Bagging for evolving data streams using ADWIN.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models in the bag.", 10, 1, Integer.MAX_VALUE);
protected Classifier[] ensemble;
protected ADWIN[] ADError;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.ADError = new ADWIN[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ADError[i] = new ADWIN();
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
boolean Change = false;
for (int i = 0; i < this.ensemble.length; i++) {
int k = MiscUtils.poisson(1.0, this.classifierRandom);
if (k > 0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst);
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
}
}
}
if (Change) {
double max = 0.0;
int imax = -1;
for (int i = 0; i < this.ensemble.length; i++) {
if (max < this.ADError[i].getEstimation()) {
max = this.ADError[i].getEstimation();
imax = i;
}
}
if (imax != -1) {
this.ensemble[imax].resetLearning();
//this.ensemble[imax].trainOnInstance(inst);
this.ADError[imax] = new ADWIN();
}
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
combinedVote.addValues(vote);
}
}
return combinedVote.getArrayRef();
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* WeightedMajorityAlgorithm.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.ObjectRepository;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.ListOption;
import moa.options.Option;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
import weka.core.Utils;
/**
* Weighted majority algorithm for data streams.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class WeightedMajorityAlgorithm extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Weighted majority algorithm for data streams.";
}
public ListOption learnerListOption = new ListOption(
"learners",
'l',
"The learners to combine.",
new ClassOption("learner", ' ', "", Classifier.class,
"trees.HoeffdingTree"),
new Option[]{
new ClassOption("", ' ', "", Classifier.class,
"trees.HoeffdingTree -l MC"),
new ClassOption("", ' ', "", Classifier.class,
"trees.HoeffdingTree -l NB"),
new ClassOption("", ' ', "", Classifier.class,
"trees.HoeffdingTree -l NBAdaptive"),
new ClassOption("", ' ', "", Classifier.class, "bayes.NaiveBayes")},
',');
public FloatOption betaOption = new FloatOption("beta", 'b',
"Factor to punish mistakes by.", 0.9, 0.0, 1.0);
public FloatOption gammaOption = new FloatOption("gamma", 'g',
"Minimum fraction of weight per model.", 0.01, 0.0, 0.5);
public FlagOption pruneOption = new FlagOption("prune", 'p',
"Prune poorly performing models from ensemble.");
protected Classifier[] ensemble;
protected double[] ensembleWeights;
@Override
public void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
Option[] learnerOptions = this.learnerListOption.getList();
this.ensemble = new Classifier[learnerOptions.length];
for (int i = 0; i < learnerOptions.length; i++) {
monitor.setCurrentActivity("Materializing learner " + (i + 1)
+ "...", -1.0);
this.ensemble[i] = (Classifier) ((ClassOption) learnerOptions[i]).materializeObject(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
monitor.setCurrentActivity("Preparing learner " + (i + 1) + "...",
-1.0);
this.ensemble[i].prepareForUse(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
}
super.prepareForUseImpl(monitor, repository);
}
@Override
public void resetLearningImpl() {
this.ensembleWeights = new double[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i].resetLearning();
this.ensembleWeights[i] = 1.0;
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
double totalWeight = 0.0;
for (int i = 0; i < this.ensemble.length; i++) {
boolean prune = false;
if (!this.ensemble[i].correctlyClassifies(inst)) {
if (this.ensembleWeights[i] > this.gammaOption.getValue()
/ this.ensembleWeights.length) {
this.ensembleWeights[i] *= this.betaOption.getValue()
* inst.weight();
} else if (this.pruneOption.isSet()) {
prune = true;
discardModel(i);
i--;
}
}
if (!prune) {
totalWeight += this.ensembleWeights[i];
this.ensemble[i].trainOnInstance(inst);
}
}
// normalize weights
for (int i = 0; i < this.ensembleWeights.length; i++) {
this.ensembleWeights[i] /= totalWeight;
}
}
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
if (this.trainingWeightSeenByModel > 0.0) {
for (int i = 0; i < this.ensemble.length; i++) {
if (this.ensembleWeights[i] > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
vote.scaleValues(this.ensembleWeights[i]);
combinedVote.addValues(vote);
}
}
}
}
return combinedVote.getArrayRef();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] measurements = null;
if (this.ensembleWeights != null) {
measurements = new Measurement[this.ensembleWeights.length];
for (int i = 0; i < this.ensembleWeights.length; i++) {
measurements[i] = new Measurement("member weight " + (i + 1),
this.ensembleWeights[i]);
}
}
return measurements;
}
@Override
public boolean isRandomizable() {
return false;
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
public void discardModel(int index) {
Classifier[] newEnsemble = new Classifier[this.ensemble.length - 1];
double[] newEnsembleWeights = new double[newEnsemble.length];
int oldPos = 0;
for (int i = 0; i < newEnsemble.length; i++) {
if (oldPos == index) {
oldPos++;
}
newEnsemble[i] = this.ensemble[oldPos];
newEnsembleWeights[i] = this.ensembleWeights[oldPos];
oldPos++;
}
this.ensemble = newEnsemble;
this.ensembleWeights = newEnsembleWeights;
}
protected int removePoorestModelBytes() {
int poorestIndex = Utils.minIndex(this.ensembleWeights);
int byteSize = this.ensemble[poorestIndex].measureByteSize();
discardModel(poorestIndex);
return byteSize;
}
}
| Java |
/*
* WEKAClassifier.java
* Copyright (C) 2009 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @author FracPete (fracpete at waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.core.Measurement;
import moa.options.IntOption;
import moa.options.WEKAClassOption;
import weka.classifiers.Classifier;
import weka.classifiers.UpdateableClassifier;
import weka.core.Instance;
import weka.core.Instances;
/**
* Class for using a classifier from WEKA.
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision$
*/
public class WEKAClassifier
extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Classifier from Weka";
}
public WEKAClassOption baseLearnerOption = new WEKAClassOption("baseLearner", 'l',
"Classifier to train.", weka.classifiers.Classifier.class, "weka.classifiers.bayes.NaiveBayesUpdateable");
public IntOption widthOption = new IntOption("width",
'w', "Size of Window for training learner.", 0, 0, Integer.MAX_VALUE);
public IntOption widthInitOption = new IntOption("widthInit",
'i', "Size of first Window for training learner.", 1000, 0, Integer.MAX_VALUE);
public IntOption sampleFrequencyOption = new IntOption("sampleFrequency",
'f',
"How many instances between samples of the learning performance.",
0, 0, Integer.MAX_VALUE);
protected Classifier classifier;
protected int numberInstances;
protected Instances instancesBuffer;
protected boolean isClassificationEnabled;
protected boolean isBufferStoring;
@Override
public void resetLearningImpl() {
try {
//System.out.println(baseLearnerOption.getValue());
String[] options = weka.core.Utils.splitOptions(baseLearnerOption.getValueAsCLIString());
createWekaClassifier(options);
} catch (Exception e) {
System.err.println("Creating a new classifier: " + e.getMessage());
}
numberInstances = 0;
isClassificationEnabled = false;
this.isBufferStoring = true;
}
@Override
public void trainOnInstanceImpl(Instance inst) {
try {
if (numberInstances == 0) {
this.instancesBuffer = new Instances(inst.dataset());
if (classifier instanceof UpdateableClassifier) {
classifier.buildClassifier(instancesBuffer);
this.isClassificationEnabled = true;
} else {
this.isBufferStoring = true;
}
}
numberInstances++;
if (classifier instanceof UpdateableClassifier) {
if (numberInstances > 0) {
((UpdateableClassifier) classifier).updateClassifier(inst);
}
} else {
if (numberInstances == widthInitOption.getValue()) {
//Build first time Classifier
buildClassifier();
isClassificationEnabled = true;
//Continue to store instances
if (sampleFrequencyOption.getValue() != 0) {
isBufferStoring = true;
}
}
if (widthOption.getValue() == 0) {
//Used from SingleClassifierDrift
if (isBufferStoring == true) {
instancesBuffer.add(inst);
}
} else {
//Used form WekaClassifier without using SingleClassifierDrift
int numInstances = numberInstances % sampleFrequencyOption.getValue();
if (sampleFrequencyOption.getValue() == 0) {
numInstances = numberInstances;
}
if (numInstances == 0) {
//Begin to store instances
isBufferStoring = true;
}
if (isBufferStoring == true && numInstances <= widthOption.getValue()) {
//Store instances
instancesBuffer.add(inst);
}
if (numInstances == widthOption.getValue()) {
//Build Classifier
buildClassifier();
isClassificationEnabled = true;
this.instancesBuffer = new Instances(inst.dataset());
}
}
}
} catch (Exception e) {
System.err.println("Training: " + e.getMessage());
}
}
public void buildClassifier() {
try {
if ((classifier instanceof UpdateableClassifier) == false) {
Classifier auxclassifier = weka.classifiers.AbstractClassifier.makeCopy(classifier);
auxclassifier.buildClassifier(instancesBuffer);
classifier = auxclassifier;
isBufferStoring = false;
}
} catch (Exception e) {
System.err.println("Building WEKA Classifier: " + e.getMessage());
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
double[] votes = new double[inst.numClasses()];
if (isClassificationEnabled == false) {
for (int i = 0; i < inst.numClasses(); i++) {
votes[i] = 1.0 / inst.numClasses();
}
} else {
try {
votes = this.classifier.distributionForInstance(inst);
} catch (Exception e) {
System.err.println(e.getMessage());
}
}
return votes;
}
@Override
public boolean isRandomizable() {
return false;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
if (classifier != null) {
out.append(classifier.toString());
}
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] m = new Measurement[0];
return m;
}
public void createWekaClassifier(String[] options) throws Exception {
String classifierName = options[0];
String[] newoptions = options.clone();
newoptions[0] = "";
this.classifier = weka.classifiers.AbstractClassifier.forName(classifierName, newoptions);
}
}
| Java |
/*
* OzaBoostAdwin.java
* Copyright (C) 2010 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.core.driftdetection.ADWIN;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import weka.core.Instance;
/**
* Boosting for evolving data streams using ADWIN.
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class OzaBoostAdwin extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Boosting for evolving data streams using ADWIN.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models to boost.", 10, 1, Integer.MAX_VALUE);
public FlagOption pureBoostOption = new FlagOption("pureBoost", 'p',
"Boost with weights only; no poisson.");
public FloatOption deltaAdwinOption = new FloatOption("deltaAdwin", 'a',
"Delta of Adwin change detection", 0.002, 0.0, 1.0);
public FlagOption outputCodesOption = new FlagOption("outputCodes", 'o',
"Use Output Codes to use binary classifiers.");
public FlagOption sammeOption = new FlagOption("same", 'e',
"Use Samme Algorithm.");
protected Classifier[] ensemble;
protected double[] scms;
protected double[] swms;
protected ADWIN[] ADError;
protected int numberOfChangesDetected;
protected int[][] matrixCodes;
protected boolean initMatrixCodes = false;
protected double logKm1 = 0.0;
protected int Km1 = 1;
protected boolean initKm1 = false;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.scms = new double[this.ensemble.length];
this.swms = new double[this.ensemble.length];
this.ADError = new ADWIN[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ADError[i] = new ADWIN((double) this.deltaAdwinOption.getValue());
}
this.numberOfChangesDetected = 0;
if (this.outputCodesOption.isSet()) {
this.initMatrixCodes = true;
}
if (this.sammeOption.isSet()) {
this.initKm1 = true;
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
int numClasses = inst.numClasses();
// Set log (k-1) and (k-1) for SAMME Method
if (this.sammeOption.isSet()) {
this.Km1 = numClasses - 1;
this.logKm1 = Math.log(this.Km1);
this.initKm1 = false;
}
//Output Codes
if (this.initMatrixCodes == true) {
this.matrixCodes = new int[this.ensemble.length][inst.numClasses()];
for (int i = 0; i < this.ensemble.length; i++) {
int numberOnes;
int numberZeros;
do { // until we have the same number of zeros and ones
numberOnes = 0;
numberZeros = 0;
for (int j = 0; j < numClasses; j++) {
int result = 0;
if (j == 1 && numClasses == 2) {
result = 1 - this.matrixCodes[i][0];
} else {
result = (this.classifierRandom.nextBoolean() ? 1 : 0);
}
this.matrixCodes[i][j] = result;
if (result == 1) {
numberOnes++;
} else {
numberZeros++;
}
}
} while ((numberOnes - numberZeros) * (numberOnes - numberZeros) > (this.ensemble.length % 2));
}
this.initMatrixCodes = false;
}
boolean Change = false;
double lambda_d = 1.0;
Instance weightedInst = (Instance) inst.copy();
for (int i = 0; i < this.ensemble.length; i++) {
double k = this.pureBoostOption.isSet() ? lambda_d : MiscUtils.poisson(lambda_d * this.Km1, this.classifierRandom);
if (k > 0.0) {
if (this.outputCodesOption.isSet()) {
weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]);
}
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst);
if (correctlyClassifies) {
this.scms[i] += lambda_d;
lambda_d *= this.trainingWeightSeenByModel / (2 * this.scms[i]);
} else {
this.swms[i] += lambda_d;
lambda_d *= this.trainingWeightSeenByModel / (2 * this.swms[i]);
}
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
}
}
}
if (Change) {
numberOfChangesDetected++;
double max = 0.0;
int imax = -1;
for (int i = 0; i < this.ensemble.length; i++) {
if (max < this.ADError[i].getEstimation()) {
max = this.ADError[i].getEstimation();
imax = i;
}
}
if (imax != -1) {
this.ensemble[imax].resetLearning();
//this.ensemble[imax].trainOnInstance(inst);
this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue());
this.scms[imax] = 0;
this.swms[imax] = 0;
}
}
}
protected double getEnsembleMemberWeight(int i) {
double em = this.swms[i] / (this.scms[i] + this.swms[i]);
if ((em == 0.0) || (em > 0.5)) {
return this.logKm1;
}
return Math.log((1.0 - em) / em) + this.logKm1;
}
@Override
public double[] getVotesForInstance(Instance inst) {
if (this.outputCodesOption.isSet()) {
return getVotesForInstanceBinary(inst);
}
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
double memberWeight = getEnsembleMemberWeight(i);
if (memberWeight > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
vote.scaleValues(memberWeight);
combinedVote.addValues(vote);
}
} else {
break;
}
}
return combinedVote.getArrayRef();
}
public double[] getVotesForInstanceBinary(Instance inst) {
double combinedVote[] = new double[(int) inst.numClasses()];
Instance weightedInst = (Instance) inst.copy();
if (this.initMatrixCodes == false) {
for (int i = 0; i < this.ensemble.length; i++) {
//Replace class by OC
weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]);
double vote[];
vote = this.ensemble[i].getVotesForInstance(weightedInst);
//Binary Case
int voteClass = 0;
if (vote.length == 2) {
voteClass = (vote[1] > vote[0] ? 1 : 0);
}
//Update votes
for (int j = 0; j < inst.numClasses(); j++) {
if (this.matrixCodes[i][j] == voteClass) {
combinedVote[j] += getEnsembleMemberWeight(i);
}
}
}
}
return combinedVote;
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0),
new Measurement("change detections", this.numberOfChangesDetected)
};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* OnlineAccuracyUpdatedEnsemble.java
* Copyright (C) 2013 Poznan University of Technology, Poznan, Poland
* @author Dariusz Brzezinski (dariusz.brzezinski@cs.put.poznan.pl)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.classifiers.trees.HoeffdingTree;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.ObjectRepository;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
/**
* The online version of the Accuracy Updated Ensemble as proposed by
* Brzezinski and Stefanowski in "Combining block-based and online methods
* in learning ensembles from concept drifting data streams", Information Sciences, 2014.
*/
public class OnlineAccuracyUpdatedEnsemble extends AbstractClassifier {
private static final long serialVersionUID = 1L;
/**
* Type of classifier to use as a component classifier.
*/
public ClassOption learnerOption = new ClassOption("learner", 'l', "Classifier to train.", Classifier.class,
"trees.HoeffdingTree -e 2000000 -g 100 -c 0.01");
/**
* Number of component classifiers.
*/
public IntOption memberCountOption = new IntOption("memberCount", 'n',
"The maximum number of classifiers in an ensemble.", 10, 1, Integer.MAX_VALUE);
/**
* Chunk size.
*/
public FloatOption windowSizeOption = new FloatOption("windowSize", 'w',
"The window size used for classifier creation and evaluation.", 500, 1, Integer.MAX_VALUE);
/**
* Determines the maximum size of model (evaluated after every chunk).
*/
public IntOption maxByteSizeOption = new IntOption("maxByteSize", 'm', "Maximum memory consumed by ensemble.",
33554432, 0, Integer.MAX_VALUE);
/**
* Determines whether additional information should be sent to the output.
*/
public FlagOption verboseOption = new FlagOption("verbose", 'v', "When checked the algorithm outputs additional information about component classifier weights.");
/**
* Determines whether additional information should be sent to the output.
*/
public FlagOption linearOption = new FlagOption("linearFunction", 'f', "When checked the algorithm uses a linear weighting function.");
/**
* The weights of stored classifiers.
* weights[x][0] = weight
* weights[x][1] = classifier number in learners
*/
protected double[][] weights;
/**
* Class distributions.
*/
protected long[] classDistributions;
/**
* Ensemble classifiers.
*/
protected ClassifierWithMemory[] ensemble;
/**
* Number of processed examples.
*/
protected int processedInstances;
/**
* Candidate classifier.
*/
protected ClassifierWithMemory candidate;
/**
* Current window of instance class values.
*/
protected int[] currentWindow;
/**
* The mean square residual in a given moment, based on a window of latest examples.
*/
protected double mse_r = 0;
/**
* Window size.
*/
protected int windowSize = 0;
@Override
public void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
this.windowSize = (int)this.windowSizeOption.getValue();
this.candidate = new ClassifierWithMemory(((Classifier) getPreparedClassOption(this.learnerOption)).copy(), this.windowSize);
this.candidate.classifier.resetLearning();
super.prepareForUseImpl(monitor, repository);
}
@Override
public void resetLearningImpl() {
this.currentWindow = null;
this.windowSize = (int)this.windowSizeOption.getValue();
this.classDistributions = null;
this.processedInstances = 0;
this.ensemble = new ClassifierWithMemory[0];
this.candidate = new ClassifierWithMemory(((Classifier) getPreparedClassOption(this.learnerOption)).copy(), this.windowSize);
this.candidate.classifier.resetLearning();
}
@Override
public void trainOnInstanceImpl(Instance inst) {
this.initVariables();
if(this.processedInstances < this.windowSize)
{
this.classDistributions[(int) inst.classValue()]++;
}
else
{
this.classDistributions[this.currentWindow[processedInstances % this.windowSize]]--;
this.classDistributions[(int) inst.classValue()]++;
}
this.currentWindow[processedInstances % this.windowSize] = (int)inst.classValue();
this.processedInstances++;
this.computeMseR();
if (this.processedInstances % this.windowSize == 0) {
this.createNewClassifier(inst);
} else {
this.candidate.classifier.trainOnInstance(inst);
for (int i = 0; i < this.ensemble.length; i++) {
this.weights[i][0] = this.computeWeight(i, inst);
}
}
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i].classifier.trainOnInstance(inst);
}
}
/**
* Determines whether the classifier is randomizable.
*/
public boolean isRandomizable() {
return false;
}
/**
* Predicts a class for an example.
*/
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
if (this.trainingWeightSeenByModel > 0.0) {
for (int i = 0; i < this.ensemble.length; i++) {
if (this.weights[i][0] > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[(int) this.weights[i][1]].classifier.getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
// scale weight and prevent overflow
vote.scaleValues(this.weights[i][0] / (1.0 * this.ensemble.length + 1.0));
combinedVote.addValues(vote);
}
}
}
}
//combinedVote.normalize();
return combinedVote.getArrayRef();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
@Override
public Classifier[] getSubClassifiers() {
Classifier[] subClassifiers = new Classifier[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
subClassifiers[i] = this.ensemble[i].classifier;
}
return subClassifiers;
}
/**
* Processes a chunk.
*
* @param inst New example
*/
protected void createNewClassifier(Instance inst) {
// Compute weights
double candidateClassifierWeight = 1.0 / (this.mse_r + Double.MIN_VALUE);
if(linearOption.isSet())
{
candidateClassifierWeight = java.lang.Math.max(this.mse_r, Double.MIN_VALUE);
}
for (int i = 0; i < this.ensemble.length; i++) {
this.weights[i][0] = this.computeWeight(i, inst);
}
// Configure candidate classifier
// The candidate classifier has been trained on the last window of examples
this.candidate.birthday = this.processedInstances;
if (this.ensemble.length < this.memberCountOption.getValue()) {
// Add candidate classifier
this.addToStored(this.candidate, candidateClassifierWeight);
} else {
// Substitute poorest classifier
int poorestClassifier = this.getPoorestClassifierIndex();
if (this.weights[poorestClassifier][0] < candidateClassifierWeight) {
this.weights[poorestClassifier][0] = candidateClassifierWeight;
this.candidate.classifier = this.candidate.classifier;
this.ensemble[(int) this.weights[poorestClassifier][1]] = this.candidate;
}
}
this.candidate = new ClassifierWithMemory(((Classifier) getPreparedClassOption(this.learnerOption)).copy(), this.windowSize);
this.candidate.classifier.resetLearning();
this.enforceMemoryLimit();
}
/**
* Checks if the memory limit is exceeded and if so prunes the classifiers in the ensemble.
*/
protected void enforceMemoryLimit() {
double memoryLimit = this.maxByteSizeOption.getValue() / (double) (this.ensemble.length + 1);
for (int i = 0; i < this.ensemble.length; i++) {
((HoeffdingTree) this.ensemble[(int) this.weights[i][1]].classifier).maxByteSizeOption.setValue((int) Math
.round(memoryLimit));
((HoeffdingTree) this.ensemble[(int) this.weights[i][1]].classifier).enforceTrackerLimit();
}
}
/**
* Computes the MSEr threshold.
*
* @return The MSEr threshold.
*/
protected void computeMseR() {
double p_c;
this.mse_r = 0;
for (int i = 0; i < this.classDistributions.length; i++) {
p_c = (double) this.classDistributions[i] / (double) this.windowSize;
this.mse_r += p_c * ((1 - p_c) * (1 - p_c));
}
}
/**
* Computes the weight of a learner before training a given example.
* @param i the identifier (in terms of array learners)
* of the classifier for which the weight is supposed to be computed
* @param example the newest example
* @return the computed weight.
*/
protected double computeWeight(int i, Instance example) {
int d = this.windowSize;
int t = this.processedInstances - this.ensemble[i].birthday;
double e_it = 0;
double mse_it = 0;
double voteSum = 0;
try{
double[] votes = this.ensemble[i].classifier.getVotesForInstance(example);
for (double element : votes) {
voteSum += element;
}
if (voteSum > 0) {
double f_it = 1 - (votes[(int) example.classValue()] / voteSum);
e_it = f_it * f_it;
} else {
e_it = 1;
}
} catch (Exception e) {
e_it = 1;
}
if(t > d)
{
mse_it = this.ensemble[i].mse_it + e_it/(double)d - this.ensemble[i].squareErrors[t % d]/(double)d;
}
else
{
mse_it = this.ensemble[i].mse_it*(t-1)/t + e_it/(double)t;
}
this.ensemble[i].squareErrors[t % d] = e_it;
this.ensemble[i].mse_it = mse_it;
if(linearOption.isSet())
{
return java.lang.Math.max(mse_r - mse_it, Double.MIN_VALUE);
}
else
{
return 1.0 / (this.mse_r + mse_it + Double.MIN_VALUE);
}
}
/**
* Adds ensemble weights to the measurements.
*/
@Override
protected Measurement[] getModelMeasurementsImpl() {
if(this.verboseOption.isSet())
{
Measurement[] measurements = new Measurement[(int) this.memberCountOption.getValue()];
for (int m = 0; m < this.memberCountOption.getValue(); m++) {
measurements[m] = new Measurement("Member weight " + (m + 1), -1);
}
if (this.weights != null) {
for (int i = 0; i < this.weights.length; i++) {
measurements[i] = new Measurement("Member weight " + (i + 1), this.weights[i][0]);
}
}
return measurements;
}
else
{
return null;
}
}
/**
* Adds a classifier to the storage.
*
* @param newClassifier
* The classifier to add.
* @param newClassifiersWeight
* The new classifiers weight.
*/
protected void addToStored(ClassifierWithMemory newClassifier, double newClassifiersWeight) {
ClassifierWithMemory[] newStored = new ClassifierWithMemory[this.ensemble.length + 1];
double[][] newStoredWeights = new double[newStored.length][2];
for (int i = 0; i < newStored.length; i++) {
if (i < this.ensemble.length) {
newStored[i] = this.ensemble[i];
newStoredWeights[i][0] = this.weights[i][0];
newStoredWeights[i][1] = this.weights[i][1];
} else {
newStored[i] = newClassifier;
newStoredWeights[i][0] = newClassifiersWeight;
newStoredWeights[i][1] = i;
}
}
this.ensemble = newStored;
this.weights = newStoredWeights;
}
/**
* Finds the index of the classifier with the smallest weight.
* @return
*/
private int getPoorestClassifierIndex() {
int minIndex = 0;
for (int i = 1; i < this.weights.length; i++) {
if(this.weights[i][0] < this.weights[minIndex][0]){
minIndex = i;
}
}
return minIndex;
}
/**
* Initiates the current chunk and class distribution variables.
*/
private void initVariables() {
if (this.currentWindow == null) {
this.currentWindow = new int[this.windowSize];
}
if (this.classDistributions == null) {
this.classDistributions = new long[this.getModelContext().classAttribute().numValues()];
}
}
protected class ClassifierWithMemory
{
private Classifier classifier;
private int birthday;
private double[] squareErrors;
private double mse_it;
protected ClassifierWithMemory(Classifier classifier, int windowSize)
{
this.classifier = classifier;
this.squareErrors = new double[windowSize];
this.mse_it = 0;
}
}
}
| Java |
/*
* RandomRules.java
* Copyright (C) 2014 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.classifiers.Regressor;
import weka.core.Instance;
import moa.core.DoubleVector;
import moa.core.InstancesHeader;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instances;
public class RandomRules extends AbstractClassifier implements Regressor {
@Override
public String getPurposeString() {
return "RandomRules";
}
private static final long serialVersionUID = 1L;
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "rules.AMRules");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models in the bag.", 10, 1, Integer.MAX_VALUE);
public FloatOption numAttributesPercentageOption = new FloatOption("numAttributesPercentage", 'n',
"The number of attributes to use per model.", 63.2, 0, 100);
public FlagOption useBaggingOption = new FlagOption("useBagging", 'p',
"Use Bagging.");
protected Classifier[] ensemble;
protected boolean isRegression;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.isRegression = (baseLearner instanceof Regressor);
}
@Override
public void trainOnInstanceImpl(Instance inst) {
for (int i = 0; i < this.ensemble.length; i++) {
int k = 1;
if ( this.useBaggingOption.isSet()) {
k = MiscUtils.poisson(1.0, this.classifierRandom);
}
if (k > 0) {
Instance weightedInst = transformInstance(inst,i);
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(transformInstance(inst,i)));
if (vote.sumOfValues() != 0.0) {
if (this.isRegression == false)
vote.normalize();
combinedVote.addValues(vote);
}
}
if (this.isRegression == true){
combinedVote.scaleValues(1.0/this.ensemble.length);
}
return combinedVote.getArrayRef();
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble; //.clone();
}
protected int[][] listAttributes;
protected int numAttributes;
protected InstancesHeader[] dataset;
private Instance transformInstance(Instance inst, int classifierIndex) {
if (this.listAttributes == null) {
this.numAttributes = (int) (this.numAttributesPercentageOption.getValue() * inst.numAttributes()/100.0);
this.listAttributes = new int[this.numAttributes][this.ensemble.length];
this.dataset = new InstancesHeader[this.ensemble.length];
for (int ensembleIndex = 0; ensembleIndex < this.ensemble.length; ensembleIndex++) {
for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
boolean isUnique = false;
while (isUnique == false) {
this.listAttributes[attributeIndex][ensembleIndex] = this.classifierRandom.nextInt(inst.numAttributes() - 1);
isUnique = true;
for (int k = 0; k < attributeIndex; k++) {
if (this.listAttributes[attributeIndex][ensembleIndex] == this.listAttributes[k][ensembleIndex]) {
isUnique = false;
break;
}
}
}
//this.listAttributes[attributeIndex][ensembleIndex] = attributeIndex;
}
//Create Header
FastVector attributes = new FastVector();
for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
attributes.addElement(inst.attribute(this.listAttributes[attributeIndex][ensembleIndex]));
System.out.print(this.listAttributes[attributeIndex][ensembleIndex]);
}
System.out.println("Number of attributes: "+this.numAttributes+ ","+inst.numAttributes());
attributes.addElement(inst.classAttribute());
this.dataset[ensembleIndex] = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.dataset[ensembleIndex].setClassIndex(this.numAttributes);
this.ensemble[ensembleIndex].setModelContext(this.dataset[ensembleIndex]);
}
}
//Instance instance = new DenseInstance(this.numAttributes+1);
//instance.setDataset(dataset[classifierIndex]);
double[] attVals = new double[this.numAttributes + 1];
for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
//instance.setValue(attributeIndex, inst.value(this.listAttributes[attributeIndex][classifierIndex]));
attVals[attributeIndex] = inst.value(this.listAttributes[attributeIndex][classifierIndex]);
}
Instance instance = new DenseInstance(1.0, attVals);
instance.setDataset(dataset[classifierIndex]);
instance.setClassValue(inst.classValue());
// System.out.println(inst.toString());
// System.out.println(instance.toString());
// System.out.println("============");
return instance;
}
}
| Java |
/*
* OzaBoost.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import weka.core.Instance;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.IntOption;
/**
* Incremental on-line boosting of Oza and Russell.
*
* <p>See details in:<br /> N. Oza and S. Russell. Online bagging and boosting.
* In Artificial Intelligence and Statistics 2001, pages 105–112. Morgan
* Kaufmann, 2001.</p> <p>For the boosting method, Oza and Russell note that the
* weighting procedure of AdaBoost actually divides the total example weight
* into two halves – half of the weight is assigned to the correctly classified
* examples, and the other half goes to the misclassified examples. They use the
* Poisson distribution for deciding the random probability that an example is
* used for training, only this time the parameter changes according to the
* boosting weight of the example as it is passed through each model in
* sequence.</p>
*
* <p>Parameters:</p> <ul> <li>-l : Classifier to train</li> <li>-s : The number
* of models to boost</li> <li>-p : Boost with weights only; no poisson</li>
* </ul>
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class OzaBoost extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Incremental on-line boosting of Oza and Russell.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models to boost.", 10, 1, Integer.MAX_VALUE);
public FlagOption pureBoostOption = new FlagOption("pureBoost", 'p',
"Boost with weights only; no poisson.");
protected Classifier[] ensemble;
protected double[] scms;
protected double[] swms;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.scms = new double[this.ensemble.length];
this.swms = new double[this.ensemble.length];
}
@Override
public void trainOnInstanceImpl(Instance inst) {
double lambda_d = 1.0;
for (int i = 0; i < this.ensemble.length; i++) {
double k = this.pureBoostOption.isSet() ? lambda_d : MiscUtils.poisson(lambda_d, this.classifierRandom);
if (k > 0.0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
if (this.ensemble[i].correctlyClassifies(inst)) {
this.scms[i] += lambda_d;
lambda_d *= this.trainingWeightSeenByModel / (2 * this.scms[i]);
} else {
this.swms[i] += lambda_d;
lambda_d *= this.trainingWeightSeenByModel / (2 * this.swms[i]);
}
}
}
protected double getEnsembleMemberWeight(int i) {
double em = this.swms[i] / (this.scms[i] + this.swms[i]);
if ((em == 0.0) || (em > 0.5)) {
return 0.0;
}
double Bm = em / (1.0 - em);
return Math.log(1.0 / Bm);
}
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
double memberWeight = getEnsembleMemberWeight(i);
if (memberWeight > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
vote.scaleValues(memberWeight);
combinedVote.addValues(vote);
}
} else {
break;
}
}
return combinedVote.getArrayRef();
}
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* DACC.java
*
* @author Ghazal Jaber (ghazal.jaber@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.options.ClassOption;
import moa.options.FloatOption;
import moa.options.MultiChoiceOption;
import weka.core.Instance;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
/**
* Dynamic Adaptation to Concept Changes.
* Ensemble method for data streams that adapts to concept changes.
*
* Reference: JABER, G., CORNUEJOLS, A., and TARROUX, P. A New On-Line Learning Method
* for Coping with Recurring Concepts: The ADACC System. In : Neural Information
* Processing. Springer Berlin Heidelberg, 2013. p. 595-604.
*
* @author Ghazal Jaber (ghazal.jaber@gmail.com)
*
*/
public class DACC extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Dynamic Adaptation to Concept Changes for data streams.";
}
/**
* Base classifier
*/
public ClassOption learnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "bayes.NaiveBayes");
/**
* Ensemble size
*/
public FloatOption memberCountOption = new FloatOption("ensembleSize", 'n', "The maximum number of classifiers in an ensemble.", 20, 1, Integer.MAX_VALUE);
/**
* Maturity age of classifiers
*/
public FloatOption maturityOption = new FloatOption("maturity", 'a',
"The maturity age.", 20, 0, 100);
/**
* Size of the evaluation window for weights computing
*/
public FloatOption evaluationSizeOption = new FloatOption("evalSize", 'e',
"The size of the evaluation window.", 20, 1, 1000);
/**
* Combination functions: MAX and WVD (MAX leads to a faster reactivity to the change, WVD is more robust to noise)
*/
public MultiChoiceOption combinationOption= new MultiChoiceOption("cmb", 'c', "The combination function.",
new String[]{"MAX","WVD"} , new String[] {"Maximum","Weighted Vote of the best"},
0);
/**
* Ensemble of classifiers
*/
protected Classifier[] ensemble;
/**
* Weights of classifiers
*/
protected Pair[] ensembleWeights;
/**
* Age of classifiers (to compare with maturity age)
*/
protected double[] ensembleAges;
/**
* Evaluation windows (recent classification errors)
*/
protected int[][] ensembleWindows;
/**
* Number of instances from the stream
*/
protected int nbInstances = 0;
/**
* Initializes the method variables
*/
protected void initVariables(){
int ensembleSize = (int)this.memberCountOption.getValue();
this.ensemble = new Classifier[ensembleSize];
this.ensembleAges = new double[ensembleSize];
this.ensembleWindows = new int[ensembleSize][(int)this.evaluationSizeOption.getValue()];
}
@Override
public void resetLearningImpl() {
Classifier learner = (Classifier) getPreparedClassOption(this.learnerOption);
learner.resetLearning();
initVariables();
this.ensembleWeights = new Pair[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = learner.copy();
this.ensembleAges[i] = 0;
this.ensembleWeights[i] = new Pair(0.0,i);
this.ensembleWindows[i] = new int[(int)this.evaluationSizeOption.getValue()];
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
trainAndClassify(inst);
}
@Override
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
ArrayList<Integer> arr;
int cmb = this.combinationOption.getChosenIndex();
if (cmb == 0)
arr = getMAXIndexes();
else
arr = getWVDIndexes();
if (this.trainingWeightSeenByModel > 0.0) {
for (int i = 0; i < arr.size(); i++) {
if (this.ensembleWeights[arr.get(i)].val > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[arr.get(i)].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
vote.scaleValues(this.ensembleWeights[arr.get(i)].val);
combinedVote.addValues(vote);
}
}
}
}
return combinedVote.getArrayRef();
}
/**
* Receives a training instance from the stream and
* updates the adaptive classifiers accordingly
* @param inst the instance from the stream
*/
protected void trainAndClassify(Instance inst){
nbInstances++;
boolean mature = true;
boolean unmature = true;
for (int i = 0; i < getNbActiveClassifiers(); i++) {
// check if all adaptive learners are mature
if (this.ensembleAges[i] < this.maturityOption.getValue() && i<getNbAdaptiveClassifiers())
mature = false;
// check if all adaptive learners are not mature
if (this.ensembleAges[i] >= this.maturityOption.getValue() && i<getNbAdaptiveClassifiers())
unmature = false;
if (this.nbInstances >= this.ensembleWeights[i].index + 1){
// train adaptive learners
if (i < getNbAdaptiveClassifiers())
this.ensemble[i].trainOnInstance(inst);
int val = this.ensemble[i].correctlyClassifies(inst)?1:0;
double sum = updateEvaluationWindow(i, val);
this.ensembleWeights[i].val = sum;
this.ensembleAges[i] = this.ensembleAges[i]+1;
}
}
// if all adaptive learners are not mature --> set weights to one
if (unmature)
for (int i = 0; i < getNbAdaptiveClassifiers(); i++)
this.ensembleWeights[i].val=1;
// if all adaptive learners are mature --> delete one learner
if (mature){
Pair[] learners = getHalf(false);
if (learners.length > 0){
double rand = classifierRandom.nextInt(learners.length);
discardModel(learners[(int)rand].index);
}
}
}
/**
* Resets a classifier in the ensemble
* @param index the index of the classifier in the ensemble
*/
public void discardModel(int index) {
this.ensemble[index].resetLearning();
this.ensembleWeights[index].val = 0;
this.ensembleAges[index] = 0;
this.ensembleWindows[index]=new int[(int)this.evaluationSizeOption.getValue()];
}
/**
* Updates the evaluation window of a classifier and returns the
* updated weight value.
* @param index the index of the classifier in the ensemble
* @param val the last evaluation record of the classifier
* @return the updated weight value of the classifier
*/
protected double updateEvaluationWindow(int index,int val){
int[] newEnsembleWindows = new int[this.ensembleWindows[index].length];
int wsize = (int)Math.min(this.evaluationSizeOption.getValue(),this.ensembleAges[index]+1);
int sum = 0;
for (int i = 0; i < wsize-1 ; i++){
newEnsembleWindows[i+1] = this.ensembleWindows[index][i];
sum = sum + this.ensembleWindows[index][i];
}
newEnsembleWindows[0] = val;
this.ensembleWindows[index] = newEnsembleWindows;
if (this.ensembleAges[index] >= this.maturityOption.getValue())
return (sum + val) * 1.0/wsize;
else
return 0;
}
/**
* Returns the best (or worst) half of classifiers in the adaptive ensemble.
* The best classifiers are used to compute the stability index in ADACC. The worst
* classifiers are returned in order to select a classifier for deletion.
* @param bestHalf boolean value set to true (false) if we want to return
* the best (worst) half of adaptive classifiers.
* @param horizon
* @return an array containing the weight values of the corresponding classifiers
* and their indexes in the ensemble.
*/
protected Pair[] getHalf(boolean bestHalf){
Pair[] newEnsembleWeights = new Pair[getNbAdaptiveClassifiers()];
System.arraycopy(ensembleWeights, 0, newEnsembleWeights, 0, newEnsembleWeights.length);
if (bestHalf)
Arrays.sort(newEnsembleWeights,Collections.reverseOrder());
else
Arrays.sort(newEnsembleWeights);
Pair[] result = new Pair[(int)Math.floor(newEnsembleWeights.length/2)];
System.arraycopy(newEnsembleWeights, 0, result, 0, result.length);
return result;
}
/**
* Returns the classifiers that vote for the final prediction
* when the MAX combination function is selected
* @return the classifiers with the highest weight value
*/
protected ArrayList<Integer> getMAXIndexes(){
ArrayList<Integer> maxWIndex=new ArrayList<Integer>();
Pair[] newEnsembleWeights = new Pair[getNbActiveClassifiers()];
System.arraycopy(ensembleWeights, 0, newEnsembleWeights, 0, newEnsembleWeights.length);
Arrays.sort(newEnsembleWeights);
double maxWVal = newEnsembleWeights[newEnsembleWeights.length-1].val;
for (int i = newEnsembleWeights.length-1 ; i>=0 ; i--){
if (newEnsembleWeights[i].val!=maxWVal)
break;
else
maxWIndex.add(newEnsembleWeights[i].index);
}
return maxWIndex;
}
/**
* Returns the classifiers that vote for the final prediction
* when the WVD combination function is selected
* @return the classifiers whose weights lie in the higher half
* of the ensemble's weight interval.
*/
protected ArrayList<Integer> getWVDIndexes(){
ArrayList<Integer> maxWIndex = new ArrayList<Integer>();
Pair[] newEnsembleWeights = new Pair[getNbActiveClassifiers()];
System.arraycopy(ensembleWeights, 0, newEnsembleWeights, 0, newEnsembleWeights.length);
Arrays.sort(newEnsembleWeights);
double minWVal = newEnsembleWeights[0].val;
double maxWVal = newEnsembleWeights[newEnsembleWeights.length-1].val;
double med = (maxWVal-minWVal)*1.0/2;
for (int i = newEnsembleWeights.length-1 ; i>=0 ; i--)
if (newEnsembleWeights[i].val < med)
break;
else
maxWIndex.add(newEnsembleWeights[i].index);
return maxWIndex;
}
/**
* Returns the number of classifiers used for prediction
* which includes the adaptive learners and the snapshots in ADACC
* @return the number of classifiers used for prediction
*/
protected int getNbActiveClassifiers(){
return this.ensemble.length;
}
/**
* Returns the number of adaptive classifiers in the ensemble
* which excludes the static snapshots in ADACC
* @return the number of adaptive classifiers
*/
protected int getNbAdaptiveClassifiers(){
return this.ensemble.length;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] measurements = new Measurement[4];
measurements[0] = new Measurement("size ",
this.ensemble.length);
measurements[1] = new Measurement("maturity ",
this.maturityOption.getValue());
measurements[2] = new Measurement("evalsize ",
this.evaluationSizeOption.getValue());
measurements[3] = new Measurement("cmb ",
this.combinationOption.getChosenIndex());
return measurements;
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
/**
* This helper class is used to sort an array of pairs of integers: val and index.
* The array is sorted based on the val field.
* @author Ghazal Jaber
*
*/
protected class Pair implements Comparable<Pair>, Serializable {
private static final long serialVersionUID = 1L;
double val;
int index;
public Pair(double d, int i){
this.val = d;
this.index = i;
}
@Override
public int compareTo(Pair other){
if (this.val - other.val > 0 )
return 1;
else
if (this.val == other.val)
return 0;
return -1;
}
public double getValue(){
return val;
}
}
} | Java |
/*
* OzaBagASHT.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.Classifier;
import moa.classifiers.trees.ASHoeffdingTree;
import moa.core.DoubleVector;
import moa.core.MiscUtils;
import moa.options.IntOption;
import moa.options.FlagOption;
import weka.core.Instance;
import weka.core.Utils;
/**
* Bagging using trees of different size.
* The Adaptive-Size Hoeffding Tree (ASHT) is derived from the Hoeffding Tree
* algorithm with the following differences:
* <ul>
* <li> it has a maximum number of split nodes, or size
* <li> after one node splits, if the number of split nodes of the ASHT tree
* is higher than the maximum value, then it deletes some nodes to reduce its size
* </ul>
* The intuition behind this method is as follows: smaller trees adapt
* more quickly to changes, and larger trees do better during periods with
* no or little change, simply because they were built on more data. Trees
* limited to size s will be reset about twice as often as trees with a size
* limit of 2s. This creates a set of different reset-speeds for an ensemble of such
* trees, and therefore a subset of trees that are a good approximation for the
* current rate of change. It is important to note that resets will happen all
* the time, even for stationary datasets, but this behaviour should not have
* a negative impact on the ensemble’s predictive performance.
* When the tree size exceeds the maximun size value, there are two different
* delete options: <ul>
* <li> delete the oldest node, the root, and all of its children except the one
* where the split has been made. After that, the root of the child not
* deleted becomes the new root
* <li> delete all the nodes of the tree, i.e., restart from a new root.
* </ul>
* The maximum allowed size for the n-th ASHT tree is twice the maximum
* allowed size for the (n-1)-th tree. Moreover, each tree has a weight
* proportional to the inverse of the square of its error, and it monitors its
* error with an exponential weighted moving average (EWMA) with alpha = .01.
* The size of the first tree is 2.
* <br/><br/>
* With this new method, it is attempted to improve bagging performance
* by increasing tree diversity. It has been observed that boosting tends to
* produce a more diverse set of classifiers than bagging, and this has been
* cited as a factor in increased performance.<br/>
* See more details in:<br/><br/>
* Albert Bifet, Geoff Holmes, Bernhard Pfahringer, Richard Kirkby,
* and Ricard Gavaldà. New ensemble methods for evolving data
* streams. In 15th ACM SIGKDD International Conference on Knowledge
* Discovery and Data Mining, 2009.<br/><br/>
* The learner must be ASHoeffdingTree, a Hoeffding Tree with a maximum
* size value.<br/><br/>
* Example:<br/><br/>
* <code>OzaBagASHT -l ASHoeffdingTree -s 10 -u -r </code>
* Parameters:<ul>
* <li>Same parameters as <code>OzaBag</code>
* <li>-f : the size of first classifier in the bag.
* <li>-u : Enable weight classifiers
* <li>-e : Reset trees when size is higher than the max
* </ul>
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class OzaBagASHT extends OzaBag {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Bagging using trees of different size.";
}
public IntOption firstClassifierSizeOption = new IntOption("firstClassifierSize", 'f',
"The size of first classifier in the bag.", 1, 1, Integer.MAX_VALUE);
public FlagOption useWeightOption = new FlagOption("useWeight",
'u', "Enable weight classifiers.");
public FlagOption resetTreesOption = new FlagOption("resetTrees",
'e', "Reset trees when size is higher than the max.");
protected double[] error;
protected double alpha = 0.01;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
this.error = new double[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
int pow = this.firstClassifierSizeOption.getValue(); //EXTENSION TO ASHT
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
this.error[i] = 0.0;
((ASHoeffdingTree) this.ensemble[i]).setMaxSize(pow); //EXTENSION TO ASHT
if ((this.resetTreesOption != null)
&& this.resetTreesOption.isSet()) {
((ASHoeffdingTree) this.ensemble[i]).setResetTree();
}
pow *= 2; //EXTENSION TO ASHT
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
int trueClass = (int) inst.classValue();
for (int i = 0; i < this.ensemble.length; i++) {
int k = MiscUtils.poisson(1.0, this.classifierRandom);
if (k > 0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * k);
if (Utils.maxIndex(this.ensemble[i].getVotesForInstance(inst)) == trueClass) {
this.error[i] += alpha * (0.0 - this.error[i]); //EWMA
} else {
this.error[i] += alpha * (1.0 - this.error[i]); //EWMA
}
this.ensemble[i].trainOnInstance(weightedInst);
}
}
}
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
if ((this.useWeightOption != null)
&& this.useWeightOption.isSet()) {
vote.scaleValues(1.0 / (this.error[i] * this.error[i]));
}
combinedVote.addValues(vote);
}
}
return combinedVote.getArrayRef();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* LeveragingBag.java
* Copyright (C) 2010 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.core.driftdetection.ADWIN;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import weka.core.Instance;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.MiscUtils;
import moa.options.*;
/**
* Leveraging Bagging for evolving data streams using ADWIN. Leveraging Bagging
* and Leveraging Bagging MC using Random Output Codes ( -o option).
*
* <p>See details in:<br /> Albert Bifet, Geoffrey Holmes, Bernhard Pfahringer.
* Leveraging Bagging for Evolving Data Streams Machine Learning and Knowledge
* Discovery in Databases, European Conference, ECML PKDD}, 2010.</p>
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class LeveragingBag extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Leveraging Bagging for evolving data streams using ADWIN.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models in the bag.", 10, 1, Integer.MAX_VALUE);
public FloatOption weightShrinkOption = new FloatOption("weightShrink", 'w',
"The number to use to compute the weight of new instances.", 6, 0.0, Float.MAX_VALUE);
public FloatOption deltaAdwinOption = new FloatOption("deltaAdwin", 'a',
"Delta of Adwin change detection", 0.002, 0.0, 1.0);
// Leveraging Bagging MC: uses this option to use Output Codes
public FlagOption outputCodesOption = new FlagOption("outputCodes", 'o',
"Use Output Codes to use binary classifiers.");
public MultiChoiceOption leveraginBagAlgorithmOption = new MultiChoiceOption(
"leveraginBagAlgorithm", 'm', "Leveraging Bagging to use.", new String[]{
"LeveragingBag", "LeveragingBagME", "LeveragingBagHalf", "LeveragingBagWT", "LeveragingSubag"},
new String[]{"Leveraging Bagging for evolving data streams using ADWIN",
"Leveraging Bagging ME using weight 1 if misclassified, otherwise error/(1-error)",
"Leveraging Bagging Half using resampling without replacement half of the instances",
"Leveraging Bagging WT without taking out all instances.",
"Leveraging Subagging using resampling without replacement."
}, 0);
protected Classifier[] ensemble;
protected ADWIN[] ADError;
protected int numberOfChangesDetected;
protected int[][] matrixCodes;
protected boolean initMatrixCodes = false;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.ADError = new ADWIN[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ADError[i] = new ADWIN((double) this.deltaAdwinOption.getValue());
}
this.numberOfChangesDetected = 0;
if (this.outputCodesOption.isSet()) {
this.initMatrixCodes = true;
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
int numClasses = inst.numClasses();
//Output Codes
if (this.initMatrixCodes == true) {
this.matrixCodes = new int[this.ensemble.length][inst.numClasses()];
for (int i = 0; i < this.ensemble.length; i++) {
int numberOnes;
int numberZeros;
do { // until we have the same number of zeros and ones
numberOnes = 0;
numberZeros = 0;
for (int j = 0; j < numClasses; j++) {
int result = 0;
if (j == 1 && numClasses == 2) {
result = 1 - this.matrixCodes[i][0];
} else {
result = (this.classifierRandom.nextBoolean() ? 1 : 0);
}
this.matrixCodes[i][j] = result;
if (result == 1) {
numberOnes++;
} else {
numberZeros++;
}
}
} while ((numberOnes - numberZeros) * (numberOnes - numberZeros) > (this.ensemble.length % 2));
}
this.initMatrixCodes = false;
}
boolean Change = false;
Instance weightedInst = (Instance) inst.copy();
double w = this.weightShrinkOption.getValue();
//Train ensemble of classifiers
for (int i = 0; i < this.ensemble.length; i++) {
double k = 0.0;
switch (this.leveraginBagAlgorithmOption.getChosenIndex()) {
case 0: //LeveragingBag
k = MiscUtils.poisson(w, this.classifierRandom);
break;
case 1: //LeveragingBagME
double error = this.ADError[i].getEstimation();
k = !this.ensemble[i].correctlyClassifies(weightedInst) ? 1.0 : (this.classifierRandom.nextDouble() < (error / (1.0 - error)) ? 1.0 : 0.0);
break;
case 2: //LeveragingBagHalf
w = 1.0;
k = this.classifierRandom.nextBoolean() ? 0.0 : w;
break;
case 3: //LeveragingBagWT
w = 1.0;
k = 1.0 + MiscUtils.poisson(w, this.classifierRandom);
break;
case 4: //LeveragingSubag
w = 1.0;
k = MiscUtils.poisson(1, this.classifierRandom);
k = (k > 0) ? w : 0;
break;
}
if (k > 0) {
if (this.outputCodesOption.isSet()) {
weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]);
}
weightedInst.setWeight(inst.weight() * k);
this.ensemble[i].trainOnInstance(weightedInst);
}
boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst);
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
}
}
}
if (Change) {
numberOfChangesDetected++;
double max = 0.0;
int imax = -1;
for (int i = 0; i < this.ensemble.length; i++) {
if (max < this.ADError[i].getEstimation()) {
max = this.ADError[i].getEstimation();
imax = i;
}
}
if (imax != -1) {
this.ensemble[imax].resetLearning();
//this.ensemble[imax].trainOnInstance(inst);
this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue());
}
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
if (this.outputCodesOption.isSet()) {
return getVotesForInstanceBinary(inst);
}
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
combinedVote.addValues(vote);
}
}
return combinedVote.getArrayRef();
}
public double[] getVotesForInstanceBinary(Instance inst) {
double combinedVote[] = new double[(int) inst.numClasses()];
Instance weightedInst = (Instance) inst.copy();
if (this.initMatrixCodes == false) {
for (int i = 0; i < this.ensemble.length; i++) {
//Replace class by OC
weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]);
double vote[];
vote = this.ensemble[i].getVotesForInstance(weightedInst);
//Binary Case
int voteClass = 0;
if (vote.length == 2) {
voteClass = (vote[1] > vote[0] ? 1 : 0);
}
//Update votes
for (int j = 0; j < inst.numClasses(); j++) {
if (this.matrixCodes[i][j] == voteClass) {
combinedVote[j] += 1;
}
}
}
}
return combinedVote;
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0),
new Measurement("change detections", this.numberOfChangesDetected)
};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* LimAttClassifier.java
* Copyright (C) 2010 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @author Eibe Frank (eibe{[at]}cs{[dot]}waikato{[dot]}ac{[dot]}nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.trees.LimAttHoeffdingTree;
import weka.core.Instance;
import weka.core.Utils;
import java.math.BigInteger;
import java.util.Arrays;
import moa.classifiers.core.driftdetection.ADWIN;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.Measurement;
import moa.options.ClassOption;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
/**
* Ensemble Combining Restricted Hoeffding Trees using Stacking.
* It produces a classification model based on an
* ensemble of restricted decision trees, where each tree is built from a
* distinct subset of the attributes. The overall model is formed by
* combining the log-odds of the predicted class probabilities of these trees
* using sigmoid perceptrons, with one perceptron per class.
* In contrast to the standard boosting approach,
* which forms an ensemble classifier in a greedy fashion, building each tree in
* sequence and assigning corresponding weights as a by-product, our
* method generates each tree in parallel and combines them using perceptron
* classifiers by adopting the stacking approach.
*
* For more information see,<br/>
* <br/>
* Albert Bifet, Eibe Frank, Geoffrey Holmes, Bernhard Pfahringer: Accurate
* Ensembles for Data Streams: Combining Restricted Hoeffding Trees using Stacking.
* Journal of Machine Learning Research - Proceedings Track 13: 225-240 (2010)
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @article{BifetFHP10,
* author = {Albert Bifet and
* Eibe Frank and
* Geoffrey Holmes and
* Bernhard Pfahringer},
* title = {Accurate Ensembles for Data Streams: Combining Restricted
* Hoeffding Trees using Stacking},
* journal = {Journal of Machine Learning Research - Proceedings Track},
* volume = {13},
* year = {2010},
* pages = {225-240}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @author Eibe Frank (eibe{[at]}cs{[dot]}waikato{[dot]}ac{[dot]}nz)
* @version $Revision: 7 $
*/
public class LimAttClassifier extends AbstractClassifier {
@Override
public String getPurposeString() {
return "Ensemble Combining Restricted Hoeffding Trees using Stacking";
}
/*
* Class that generates all combinations of n elements, taken
* r at a time. The algorithm is described by
*
* Kenneth H. Rosen, Discrete Mathematics and Its Applications,
* 2nd edition (NY: McGraw-Hill, 1991), pp. 284-286.
*
* @author Michael Gilleland (megilleland@yahoo.com)
*/
public class CombinationGenerator {
private int[] a;
private int n;
private int r;
private BigInteger numLeft;
private BigInteger total;
//------------
// Constructor
//------------
public CombinationGenerator(int n, int r) {
if (r > n) {
throw new IllegalArgumentException();
}
if (n < 1) {
throw new IllegalArgumentException();
}
this.n = n;
this.r = r;
a = new int[r];
BigInteger nFact = getFactorial(n);
BigInteger rFact = getFactorial(r);
BigInteger nminusrFact = getFactorial(n - r);
total = nFact.divide(rFact.multiply(nminusrFact));
reset();
}
//------
// Reset
//------
public void reset() {
for (int i = 0; i < a.length; i++) {
a[i] = i;
}
numLeft = new BigInteger(total.toString());
}
//------------------------------------------------
// Return number of combinations not yet generated
//------------------------------------------------
public BigInteger getNumLeft() {
return numLeft;
}
//-----------------------------
// Are there more combinations?
//-----------------------------
public boolean hasMore() {
return numLeft.compareTo(BigInteger.ZERO) == 1;
}
//------------------------------------
// Return total number of combinations
//------------------------------------
public BigInteger getTotal() {
return total;
}
//------------------
// Compute factorial
//------------------
private BigInteger getFactorial(int n) {
BigInteger fact = BigInteger.ONE;
for (int i = n; i > 1; i--) {
fact = fact.multiply(new BigInteger(Integer.toString(i)));
}
return fact;
}
//--------------------------------------------------------
// Generate next combination (algorithm from Rosen p. 286)
//--------------------------------------------------------
public int[] getNext() {
if (numLeft.equals(total)) {
numLeft = numLeft.subtract(BigInteger.ONE);
int[] b = new int[a.length];
for (int k = 0; k < a.length; k++) {
b[k] = a[k];
}
return b;
}
int i = r - 1;
while (a[i] == n - r + i) {
i--;
}
a[i] = a[i] + 1;
for (int j = i + 1; j < r; j++) {
a[j] = a[i] + j - i;
}
numLeft = numLeft.subtract(BigInteger.ONE);
int[] b = new int[a.length];
for (int k = 0; k < a.length; k++) {
b[k] = a[k];
}
return b;
}
}
private static final long serialVersionUID = 1L;
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.LimAttHoeffdingTree");
public IntOption numAttributesOption = new IntOption("numAttributes", 'n',
"The number of attributes to use per model.", 1, 1, Integer.MAX_VALUE);
public FloatOption weightShrinkOption = new FloatOption("weightShrink", 'w',
"The number to multiply the weight misclassified counts.", 0.5, 0.0, Float.MAX_VALUE);
public FloatOption deltaAdwinOption = new FloatOption("deltaAdwin", 'a',
"Delta of Adwin change detection", 0.002, 0.0, 1.0);
public FloatOption oddsOffsetOption = new FloatOption("oddsOffset", 'o',
"Offset for odds to avoid probabilities that are zero.", 0.001, 0.0, Float.MAX_VALUE);
public FlagOption pruneOption = new FlagOption("prune", 'x',
"Enable pruning.");
public FlagOption bigTreesOption = new FlagOption("bigTrees", 'b',
"Use m-n attributes on the trees.");
public IntOption numEnsemblePruningOption = new IntOption("numEnsemblePruning", 'm',
"The pruned number of classifiers to use to predict.", 10, 1, Integer.MAX_VALUE);
public FlagOption adwinReplaceWorstClassifierOption = new FlagOption("adwinReplaceWorstClassifier", 'z',
"When one Adwin detects change, replace worst classifier.");
protected Classifier[] ensemble;
protected ADWIN[] ADError;
protected int numberOfChangesDetected;
protected int[][] matrixCodes;
protected boolean initMatrixCodes = false;
protected boolean initClassifiers = false;
protected int numberAttributes = 1;
protected int numInstances = 0;
@Override
public void resetLearningImpl() {
this.initClassifiers = true;
this.reset = true;
}
@Override
public void trainOnInstanceImpl(Instance inst) {
int numClasses = inst.numClasses();
//Init Ensemble
if (this.initClassifiers == true) {
numberAttributes = numAttributesOption.getValue();
if (bigTreesOption.isSet()) {
numberAttributes = inst.numAttributes() - 1 - numAttributesOption.getValue();
}
CombinationGenerator x = new CombinationGenerator(inst.numAttributes() - 1, this.numberAttributes);
int numberClassifiers = x.getTotal().intValue();
this.ensemble = new Classifier[numberClassifiers];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
}
this.ADError = new ADWIN[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ADError[i] = new ADWIN((double) this.deltaAdwinOption.getValue());
}
this.numberOfChangesDetected = 0;
//Prepare combinations
int i = 0;
if (baseLearner instanceof LimAttHoeffdingTree) {
while (x.hasMore()) {
((LimAttHoeffdingTree) this.ensemble[i]).setlistAttributes(x.getNext());
i++;
}
}
this.initClassifiers = false;
}
boolean Change = false;
Instance weightedInst = (Instance) inst.copy();
//Train Perceptron
double[][] votes = new double[this.ensemble.length + 1][numClasses];
for (int i = 0; i < this.ensemble.length; i++) {
double[] v = new double[numClasses];
for (int j = 0; j < v.length; j++) {
v[j] = (double) this.oddsOffsetOption.getValue();
}
double[] vt = this.ensemble[i].getVotesForInstance(inst);
double sum = Utils.sum(vt);
if (!Double.isNaN(sum) && (sum > 0)) {
for (int j = 0; j < vt.length; j++) {
vt[j] /= sum;
}
} else {
// Just in case the base learner returns NaN
for (int k = 0; k < vt.length; k++) {
vt[k] = 0.0;
}
}
sum = numClasses * (double) this.oddsOffsetOption.getValue();
for (int j = 0; j < vt.length; j++) {
v[j] += vt[j];
sum += vt[j];
}
for (int j = 0; j < vt.length; j++) {
votes[i][j] = Math.log(v[j] / (sum - v[j]));
}
}
if (adwinReplaceWorstClassifierOption.isSet() == false) {
//Train ensemble of classifiers
for (int i = 0; i < this.ensemble.length; i++) {
boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst);
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
numInstances = initialNumInstancesOption.getValue();
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
//Replace classifier if ADWIN has detected change
numberOfChangesDetected++;
this.ensemble[i].resetLearning();
this.ADError[i] = new ADWIN((double) this.deltaAdwinOption.getValue());
for (int ii = 0; ii < inst.numClasses(); ii++) {
weightAttribute[ii][i] = 0.0;// 0.2 * Math.random() - 0.1;
}
}
}
}
} else {
//Train ensemble of classifiers
for (int i = 0; i < this.ensemble.length; i++) {
boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst);
double ErrEstim = this.ADError[i].getEstimation();
if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
if (this.ADError[i].getEstimation() > ErrEstim) {
Change = true;
}
}
}
//Replace classifier with higher error if ADWIN has detected change
if (Change) {
numberOfChangesDetected++;
double max = 0.0;
int imax = -1;
for (int i = 0; i < this.ensemble.length; i++) {
if (max < this.ADError[i].getEstimation()) {
max = this.ADError[i].getEstimation();
imax = i;
}
}
if (imax != -1) {
this.ensemble[imax].resetLearning();
this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue());
for (int ii = 0; ii < inst.numClasses(); ii++) {
weightAttribute[ii][imax] = 0.0;
}
}
}
}
trainOnInstanceImplPerceptron(inst.numClasses(), (int) inst.classValue(), votes);
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i].trainOnInstance(inst);
}
}
@Override
public double[] getVotesForInstance(Instance inst) {
if (this.initClassifiers == true) {
return new double[0];
}
int numClasses = inst.numClasses();
int sizeEnsemble = this.ensemble.length;
if (pruneOption.isSet()) {
sizeEnsemble = this.numEnsemblePruningOption.getValue();
}
double[][] votes = new double[sizeEnsemble + 1][numClasses];
int[] bestClassifiers = new int[sizeEnsemble];
if (pruneOption.isSet()) {
//Check for the best classifiers
double[] weight = new double[this.ensemble.length];
for (int i = 0; i < numClasses; i++) {
for (int j = 0; j < this.ensemble.length; j++) {
weight[j] += weightAttribute[i][j];
}
}
Arrays.sort(weight);
double cutValue = weight[this.ensemble.length - sizeEnsemble]; //reverse order
int ii = 0;
for (int j = 0; j < this.ensemble.length; j++) {
if (weight[j] >= cutValue && ii < sizeEnsemble) {
bestClassifiers[ii] = j;
ii++;
}
}
} else { //Not pruning: all classifiers
for (int ii = 0; ii < sizeEnsemble; ii++) {
bestClassifiers[ii] = ii;
}
}
for (int ii = 0; ii < sizeEnsemble; ii++) {
int i = bestClassifiers[ii];
double[] v = new double[numClasses];
for (int j = 0; j < v.length; j++) {
v[j] = (double) this.oddsOffsetOption.getValue();
}
double[] vt = this.ensemble[i].getVotesForInstance(inst);
double sum = Utils.sum(vt);
if (!Double.isNaN(sum) && (sum > 0)) {
for (int j = 0; j < vt.length; j++) {
vt[j] /= sum;
}
} else {
// Just in case the base learner returns NaN
for (int k = 0; k < vt.length; k++) {
vt[k] = 0.0;
}
}
sum = numClasses * (double) this.oddsOffsetOption.getValue();
for (int j = 0; j < vt.length; j++) {
v[j] += vt[j];
sum += vt[j];
}
for (int j = 0; j < vt.length; j++) {
votes[ii][j] = Math.log(v[j] / (sum - v[j]));
// votes[i][j] = vt[j];
}
}
return getVotesForInstancePerceptron(votes, bestClassifiers, inst.numClasses());
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0),
new Measurement("change detections", this.numberOfChangesDetected)
};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
//Perceptron
public FloatOption learningRatioOption = new FloatOption("learningRatio", 'e', "Learning ratio", 1);
public FloatOption penaltyFactorOption = new FloatOption("lambda", 'p', "Lambda", 0.0);
public IntOption initialNumInstancesOption = new IntOption("initialNumInstances", 'i', "initialNumInstances", 10);
protected double[][] weightAttribute;
protected boolean reset;
public void trainOnInstanceImplPerceptron(int numClasses, int actualClass, double[][] votes) {
//Init Perceptron
if (this.reset == true) {
this.reset = false;
this.weightAttribute = new double[numClasses][votes.length];
for (int i = 0; i < numClasses; i++) {
for (int j = 0; j < votes.length - 1; j++) {
weightAttribute[i][j] = 1.0 / (votes.length - 1.0);
}
}
numInstances = initialNumInstancesOption.getValue();
}
// Weight decay
double learningRatio = learningRatioOption.getValue() * 2.0 / (numInstances + (votes.length - 1) + 2.0);
double lambda = penaltyFactorOption.getValue();
numInstances++;
double[] preds = new double[numClasses];
for (int i = 0; i < numClasses; i++) {
preds[i] = prediction(votes, i);
}
for (int i = 0; i < numClasses; i++) {
double actual = (i == actualClass) ? 1.0 : 0.0;
double delta = (actual - preds[i]) * preds[i] * (1 - preds[i]);
for (int j = 0; j < this.ensemble.length; j++) {
this.weightAttribute[i][j] += learningRatio * (delta * votes[j][i] - lambda * this.weightAttribute[i][j]);
}
this.weightAttribute[i][this.ensemble.length] += learningRatio * delta;
}
}
public double predictionPruning(double[][] votes, int[] bestClassifiers, int classVal) {
double sum = 0.0;
for (int i = 0; i < votes.length - 1; i++) {
sum += (double) weightAttribute[classVal][bestClassifiers[i]] * votes[i][classVal];
}
sum += weightAttribute[classVal][votes.length - 1];
return 1.0 / (1.0 + Math.exp(-sum));
}
public double prediction(double[][] votes, int classVal) {
double sum = 0.0;
for (int i = 0; i < votes.length - 1; i++) {
sum += (double) weightAttribute[classVal][i] * votes[i][classVal];
}
sum += weightAttribute[classVal][votes.length - 1];
return 1.0 / (1.0 + Math.exp(-sum));
}
public double[] getVotesForInstancePerceptron(double[][] votesEnsemble, int[] bestClassifiers, int numClasses) {
double[] votes = new double[numClasses];
if (this.reset == false) {
for (int i = 0; i < votes.length; i++) {
votes[i] = predictionPruning(votesEnsemble, bestClassifiers, i);
}
}
return votes;
}
}
| Java |
/*
* OCBoost.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import weka.core.Instance;
import weka.core.Utils;
import moa.core.Measurement;
import moa.options.ClassOption;
import moa.options.FloatOption;
import moa.options.IntOption;
/**
* Online Coordinate boosting for two classes evolving data streams.
*
* <p>Pelossof et al. presented Online Coordinate Boosting, a new online
* boosting algorithm for adapting the weights of a boosted classifier,
* which yields a closer approximation to Freund and Schapire’s AdaBoost
* algorithm. The weight update procedure is derived by minimizing AdaBoost’s
* loss when viewed in an incremental form. This boosting method may be reduced
* to a form similar to Oza and Russell’s algorithm.</p>
*
* <p>See details in:<br />
* Raphael Pelossof, Michael Jones, Ilia Vovsha, and Cynthia Rudin.
* Online coordinate boosting. 2008.</p>
* <p>Example:</p>
* <code>OCBoost -l HoeffdingTreeNBAdaptive -e 0.5</code>
* <p>Parameters:</p><ul>
* <li>-l : Classifier to train</li>
* <li>-s : The number of models to boost</li>
* <li>-e : Smoothing parameter</li></ul>
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class OCBoost extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Online Coordinate boosting for two classes evolving data streams.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models to boost.", 10, 1, Integer.MAX_VALUE);
public FloatOption smoothingOption = new FloatOption("smoothingParameter", 'e',
"Smoothing parameter.", 0.5, 0.0, 100.0);
protected Classifier[] ensemble;
protected double[] alpha;
protected double[] alphainc;
protected double[] pipos;
protected double[] pineg;
protected double[][] wpos;
protected double[][] wneg;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
this.alpha = new double[this.ensemble.length];
this.alphainc = new double[this.ensemble.length];
this.pipos = new double[this.ensemble.length];
this.pineg = new double[this.ensemble.length];
this.wpos = new double[this.ensemble.length][this.ensemble.length];
this.wneg = new double[this.ensemble.length][this.ensemble.length];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
alpha[i] = 0.0;
alphainc[i] = 0.0;
for (int j = 0; j < this.ensemble.length; j++) {
wpos[i][j] = this.smoothingOption.getValue();
wneg[i][j] = this.smoothingOption.getValue();
}
}
}
@Override
public void trainOnInstanceImpl(Instance inst) {
double d = 1.0;
int[] m = new int[this.ensemble.length];
for (int j = 0; j < this.ensemble.length; j++) {
int j0 = 0; //max(0,j-K)
pipos[j] = 1.0;
pineg[j] = 1.0;
m[j] = -1;
if (this.ensemble[j].correctlyClassifies(inst) == true) {
m[j] = 1;
}
for (int k = j0; k <= j - 1; k++) {
pipos[j] *= wpos[j][k] / wpos[j][j] * Math.exp(-alphainc[k])
+ (1.0 - wpos[j][k] / wpos[j][j]) * Math.exp(alphainc[k]);
pineg[j] *= wneg[j][k] / wneg[j][j] * Math.exp(-alphainc[k])
+ (1.0 - wneg[j][k] / wneg[j][j]) * Math.exp(alphainc[k]);
}
for (int k = 0; k <= j; k++) {
wpos[j][k] = wpos[j][k] * pipos[j] + d * (m[k] == 1 ? 1 : 0) * (m[j] == 1 ? 1 : 0);
wneg[j][k] = wneg[j][k] * pineg[j] + d * (m[k] == -1 ? 1 : 0) * (m[j] == -1 ? 1 : 0);
}
alphainc[j] = -alpha[j];
alpha[j] = 0.5 * Math.log(wpos[j][j] / wneg[j][j]);
alphainc[j] += alpha[j];
d = d * Math.exp(-alpha[j] * m[j]);
if (d > 0.0) {
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(inst.weight() * d);
this.ensemble[j].trainOnInstance(weightedInst);
}
}
}
protected double getEnsembleMemberWeight(int i) {
return alpha[i];
}
@Override
public double[] getVotesForInstance(Instance inst) {
double[] output = new double[2];
int vote;
double combinedVote = 0.0;
for (int i = 0; i < this.ensemble.length; i++) {
vote = Utils.maxIndex(this.ensemble[i].getVotesForInstance(inst));
if (vote == 0) {
vote = -1;
}
combinedVote += (double) vote * getEnsembleMemberWeight(i);
}
output[0] = 0;
output[1] = 0;
output[combinedVote > 0 ? 1 : 0] = 1;
return output;
}
@Override
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* OnlineSmoothBoost.java
* Copyright (C) 2013 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.options.ClassOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import weka.core.Instance;
/**
* Incremental on-line boosting with Theoretical Justifications of Shang-Tse Chen,
* Hsuan-Tien Lin and Chi-Jen Lu.
*
* <p>See details in:<br /> </p>
*
* <p>Parameters:</p> <ul> <li>-l : Classifier to train</li> <li>-s : The number
* of models to boost</li>
* </ul>
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class OnlineSmoothBoost extends AbstractClassifier {
private static final long serialVersionUID = 1L;
@Override
public String getPurposeString() {
return "Incremental on-line boosting of Shang-Tse Chen, Hsuan-Tien Lin and Chi-Jen Lu.";
}
public ClassOption baseLearnerOption = new ClassOption("baseLearner", 'l',
"Classifier to train.", Classifier.class, "trees.HoeffdingTree");
public IntOption ensembleSizeOption = new IntOption("ensembleSize", 's',
"The number of models to boost.", 10, 1, Integer.MAX_VALUE);
//public FlagOption pureBoostOption = new FlagOption("pureBoost", 'p',
// "Boost with weights only; no poisson.");
public FloatOption gammaOption = new FloatOption("gamma",
'g',
"The value of the gamma parameter.",
0.1, 0.0, 1.0);
protected Classifier[] ensemble;
protected double[] alpha;
protected double gamma;
protected double theta;
@Override
public void resetLearningImpl() {
this.ensemble = new Classifier[this.ensembleSizeOption.getValue()];
Classifier baseLearner = (Classifier) getPreparedClassOption(this.baseLearnerOption);
baseLearner.resetLearning();
this.alpha = new double[this.ensemble.length];
for (int i = 0; i < this.ensemble.length; i++) {
this.ensemble[i] = baseLearner.copy();
this.alpha[i] = 1.0/ (double) this.ensemble.length;
}
this.gamma = this.gammaOption.getValue();
this.theta = this.gamma/(2.0+this.gamma);
}
@Override
public void trainOnInstanceImpl(Instance inst) {
double zt = 0.0;
double weight = 1.0;
for (int i = 0; i < this.ensemble.length; i++) {
zt += (this.ensemble[i].correctlyClassifies(inst) ? 1 : -1) - theta;
//normalized_predict(ex.x) * ex.y - theta;
Instance weightedInst = (Instance) inst.copy();
weightedInst.setWeight(weight);
this.ensemble[i].trainOnInstance(weightedInst);
weight = (zt<=0)? 1.0 : Math.pow(1.0-gamma, zt/2.0);
}
}
protected double getEnsembleMemberWeight(int i) {
return this.alpha[i];
}
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
for (int i = 0; i < this.ensemble.length; i++) {
double memberWeight = getEnsembleMemberWeight(i);
if (memberWeight > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
vote.scaleValues(memberWeight);
combinedVote.addValues(vote);
}
} else {
break;
}
}
return combinedVote.getArrayRef();
}
public boolean isRandomizable() {
return true;
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
// TODO Auto-generated method stub
}
@Override
protected Measurement[] getModelMeasurementsImpl() {
return new Measurement[]{new Measurement("ensemble size",
this.ensemble != null ? this.ensemble.length : 0)};
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
}
| Java |
/*
* AccuracyWeightedEnsemble.java
* Copyright (C) 2010 Poznan University of Technology, Poznan, Poland
* @author Dariusz Brzezinski (dariusz.brzezinski@cs.put.poznan.pl)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.classifiers.meta;
import java.util.Random;
import moa.classifiers.AbstractClassifier;
import moa.classifiers.Classifier;
import moa.core.DoubleVector;
import moa.core.Measurement;
import moa.core.ObjectRepository;
import moa.options.ClassOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
/**
* The Accuracy Weighted Ensemble classifier as proposed by Wang et al. in
* "Mining concept-drifting data streams using ensemble classifiers", KDD 2003.
*/
public class AccuracyWeightedEnsemble extends AbstractClassifier {
@Override
public String getPurposeString() {
return "Accuracy Weighted Ensemble classifier as proposed by Wang et al. in 'Mining concept-drifting data streams using ensemble classifiers', KDD 2003";
}
/**
* Simple weight comparator. Needed for sorting component classifiers.
*/
private static final class ClassifierWeightComparator implements java.util.Comparator<double[]> {
@Override
public int compare(double[] o1, double[] o2) {
if (o1[0] > o2[0]) {
return 1;
} else if (o1[0] < o2[0]) {
return -1;
} else {
return 0;
}
}
}
private static final long serialVersionUID = 1L;
/**
* Simple weight comparator.
*/
protected static java.util.Comparator<double[]> weightComparator = new ClassifierWeightComparator();
/**
* Type of classifier to use as a component classifier.
*/
public ClassOption learnerOption = new ClassOption("learner", 'l', "Classifier to train.", Classifier.class, "trees.HoeffdingTree -l NB -e 1000 -g 100 -c 0.01");
/**
* Number of component classifiers.
*/
public FloatOption memberCountOption = new FloatOption("memberCount", 'n', "The maximum number of classifier in an ensemble.", 15, 1, Integer.MAX_VALUE);
/**
* Number of classifiers remembered and available for ensemble construction.
*/
public FloatOption storedCountOption = new FloatOption("storedCount", 'r', "The maximum number of classifiers to store and choose from when creating an ensemble.", 30, 1, Integer.MAX_VALUE);
/**
* Chunk size.
*/
public IntOption chunkSizeOption = new IntOption("chunkSize", 'c', "The chunk size used for classifier creation and evaluation.", 500, 1, Integer.MAX_VALUE);
/**
* Number of folds in candidate classifier cross-validation.
*/
public IntOption numFoldsOption = new IntOption("numFolds", 'f', "Number of cross-validation folds for candidate classifier testing.", 10, 1, Integer.MAX_VALUE);
protected long[] classDistributions;
protected Classifier[] ensemble;
protected Classifier[] storedLearners;
protected double[] ensembleWeights;
/**
* The weights of stored classifiers. storedWeights[x][0] = weight
* storedWeights[x][1] = classifier
*/
protected double[][] storedWeights;
protected int processedInstances;
protected int chunkSize;
protected int numFolds;
protected int maxMemberCount;
protected int maxStoredCount;
protected Classifier candidateClassifier;
protected Instances currentChunk;
@Override
public void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
this.maxMemberCount = (int) memberCountOption.getValue();
this.maxStoredCount = (int) storedCountOption.getValue();
if (this.maxMemberCount > this.maxStoredCount) {
this.maxStoredCount = this.maxMemberCount;
}
this.chunkSize = this.chunkSizeOption.getValue();
this.numFolds = this.numFoldsOption.getValue();
this.candidateClassifier = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidateClassifier.resetLearning();
super.prepareForUseImpl(monitor, repository);
}
@Override
public void resetLearningImpl() {
this.currentChunk = null;
this.classDistributions = null;
this.processedInstances = 0;
this.ensemble = new Classifier[0];
this.storedLearners = new Classifier[0];
this.candidateClassifier = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidateClassifier.resetLearning();
}
@Override
public void trainOnInstanceImpl(Instance inst) {
this.initVariables();
this.classDistributions[(int) inst.classValue()]++;
this.currentChunk.add(inst);
this.processedInstances++;
if (this.processedInstances % this.chunkSize == 0) {
this.processChunk();
}
}
/**
* Initiates the current chunk and class distribution variables.
*/
private void initVariables() {
if (this.currentChunk == null) {
this.currentChunk = new Instances(this.getModelContext());
}
if (this.classDistributions == null) {
this.classDistributions = new long[this.getModelContext().classAttribute().numValues()];
for (int i = 0; i < this.classDistributions.length; i++) {
this.classDistributions[i] = 0;
}
}
}
/**
* Processes a chunk.
*
* @param useMseR Determines whether to use the MSEr threshold.
*/
protected void processChunk() {
// Compute weights
double candidateClassifierWeight = this.computeCandidateWeight(this.candidateClassifier, this.currentChunk, this.numFolds);
for (int i = 0; i < this.storedLearners.length; i++) {
this.storedWeights[i][0] = this.computeWeight(this.storedLearners[(int) this.storedWeights[i][1]], this.currentChunk);
}
if (this.storedLearners.length < this.maxStoredCount) {
// Train and add classifier
for (int num = 0; num < this.chunkSize; num++) {
this.candidateClassifier.trainOnInstance(this.currentChunk.instance(num));
}
this.addToStored(this.candidateClassifier, candidateClassifierWeight);
} else {
// Substitute poorest classifier
java.util.Arrays.sort(this.storedWeights, weightComparator);
if (this.storedWeights[0][0] < candidateClassifierWeight) {
for (int num = 0; num < this.chunkSize; num++) {
this.candidateClassifier.trainOnInstance(this.currentChunk.instance(num));
}
this.storedWeights[0][0] = candidateClassifierWeight;
this.storedLearners[(int) this.storedWeights[0][1]] = this.candidateClassifier.copy();
}
}
int ensembleSize = java.lang.Math.min(this.storedLearners.length, this.maxMemberCount);
this.ensemble = new Classifier[ensembleSize];
this.ensembleWeights = new double[ensembleSize];
// Sort learners according to their weights
java.util.Arrays.sort(this.storedWeights, weightComparator);
// Select top k classifiers to construct the ensemble
int storeSize = this.storedLearners.length;
for (int i = 0; i < ensembleSize; i++) {
this.ensembleWeights[i] = this.storedWeights[storeSize - i - 1][0];
this.ensemble[i] = this.storedLearners[(int) this.storedWeights[storeSize - i - 1][1]];
}
this.classDistributions = null;
this.currentChunk = null;
this.candidateClassifier = (Classifier) getPreparedClassOption(this.learnerOption);
this.candidateClassifier.resetLearning();
}
/**
* Computes the weight of a candidate classifier.
*
* @param candidate Candidate classifier.
* @param chunk Data chunk of examples.
* @param numFolds Number of folds in candidate classifier cross-validation.
* @param useMseR Determines whether to use the MSEr threshold.
* @return Candidate classifier weight.
*/
protected double computeCandidateWeight(Classifier candidate, Instances chunk, int numFolds) {
double candidateWeight = 0.0;
Random random = new Random(1);
Instances randData = new Instances(chunk);
randData.randomize(random);
if (randData.classAttribute().isNominal()) {
randData.stratify(numFolds);
}
for (int n = 0; n < numFolds; n++) {
Instances train = randData.trainCV(numFolds, n, random);
Instances test = randData.testCV(numFolds, n);
Classifier learner = candidate.copy();
for (int num = 0; num < train.numInstances(); num++) {
learner.trainOnInstance(train.instance(num));
}
candidateWeight += computeWeight(learner, test);
}
double resultWeight = candidateWeight / numFolds;
if (Double.isInfinite(resultWeight)) {
return Double.MAX_VALUE;
} else {
return resultWeight;
}
}
/**
* Computes the weight of a given classifie.
*
* @param learner Classifier to calculate weight for.
* @param chunk Data chunk of examples.
* @param useMseR Determines whether to use the MSEr threshold.
* @return The given classifier's weight.
*/
protected double computeWeight(Classifier learner, Instances chunk) {
double mse_i = 0;
double mse_r = 0;
double f_ci;
double voteSum;
for (int i = 0; i < chunk.numInstances(); i++) {
try {
voteSum = 0;
for (double element : learner.getVotesForInstance(chunk.instance(i))) {
voteSum += element;
}
if (voteSum > 0) {
f_ci = learner.getVotesForInstance(chunk.instance(i))[(int) chunk.instance(i).classValue()] / voteSum;
mse_i += (1 - f_ci) * (1 - f_ci);
} else {
mse_i += 1;
}
} catch (Exception e) {
mse_i += 1;
}
}
mse_i /= this.chunkSize;
mse_r = this.computeMseR();
return java.lang.Math.max(mse_r - mse_i, 0);
}
/**
* Computes the MSEr threshold.
*
* @return The MSEr threshold.
*/
protected double computeMseR() {
double p_c;
double mse_r = 0;
for (int i = 0; i < this.classDistributions.length; i++) {
p_c = (double) this.classDistributions[i] / (double) this.chunkSize;
mse_r += p_c * ((1 - p_c) * (1 - p_c));
}
return mse_r;
}
/**
* Predicts a class for an example.
*/
public double[] getVotesForInstance(Instance inst) {
DoubleVector combinedVote = new DoubleVector();
if (this.trainingWeightSeenByModel > 0.0) {
for (int i = 0; i < this.ensemble.length; i++) {
if (this.ensembleWeights[i] > 0.0) {
DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
if (vote.sumOfValues() > 0.0) {
vote.normalize();
//scale weight and prevent overflow
vote.scaleValues(this.ensembleWeights[i] / (1.0 * this.ensemble.length + 1));
combinedVote.addValues(vote);
}
}
}
}
combinedVote.normalize();
return combinedVote.getArrayRef();
}
@Override
public void getModelDescription(StringBuilder out, int indent) {
}
/**
* Adds ensemble weights to the measurements.
*/
@Override
protected Measurement[] getModelMeasurementsImpl() {
Measurement[] measurements = new Measurement[this.maxStoredCount];
for (int m = 0; m < this.maxMemberCount; m++) {
measurements[m] = new Measurement("Member weight " + (m + 1), -1);
}
for (int s = this.maxMemberCount; s < this.maxStoredCount; s++) {
measurements[s] = new Measurement("Stored member weight " + (s + 1), -1);
}
if (this.storedWeights != null) {
int storeSize = this.storedWeights.length;
for (int i = 0; i < storeSize; i++) {
if (i < this.ensemble.length) {
measurements[i] = new Measurement("Member weight " + (i + 1), this.storedWeights[storeSize - i - 1][0]);
} else {
measurements[i] = new Measurement("Stored member weight " + (i + 1), this.storedWeights[storeSize - i - 1][0]);
}
}
}
return measurements;
}
/**
* Determines whether the classifier is randomizable.
*/
public boolean isRandomizable() {
return false;
}
@Override
public Classifier[] getSubClassifiers() {
return this.ensemble.clone();
}
/**
* Adds a classifier to the storage.
*
* @param newClassifier The classifier to add.
* @param newClassifiersWeight The new classifiers weight.
*/
protected Classifier addToStored(Classifier newClassifier, double newClassifiersWeight) {
Classifier addedClassifier = null;
Classifier[] newStored = new Classifier[this.storedLearners.length + 1];
double[][] newStoredWeights = new double[newStored.length][2];
for (int i = 0; i < newStored.length; i++) {
if (i < this.storedLearners.length) {
newStored[i] = this.storedLearners[i];
newStoredWeights[i][0] = this.storedWeights[i][0];
newStoredWeights[i][1] = this.storedWeights[i][1];
} else {
newStored[i] = addedClassifier = newClassifier.copy();
newStoredWeights[i][0] = newClassifiersWeight;
newStoredWeights[i][1] = i;
}
}
this.storedLearners = newStored;
this.storedWeights = newStoredWeights;
return addedClassifier;
}
/**
* Removes the poorest classifier from the model, thus decreasing the models
* size.
*
* @return the size of the removed classifier.
*/
protected int removePoorestModelBytes() {
int poorestIndex = Utils.minIndex(this.ensembleWeights);
int byteSize = this.ensemble[poorestIndex].measureByteSize();
discardModel(poorestIndex);
return byteSize;
}
/**
* Removes the classifier at a given index from the model, thus decreasing
* the models size.
*
* @param index
*/
protected void discardModel(int index) {
Classifier[] newEnsemble = new Classifier[this.ensemble.length - 1];
double[] newEnsembleWeights = new double[newEnsemble.length];
int oldPos = 0;
for (int i = 0; i < newEnsemble.length; i++) {
if (oldPos == index) {
oldPos++;
}
newEnsemble[i] = this.ensemble[oldPos];
newEnsembleWeights[i] = this.ensembleWeights[oldPos];
oldPos++;
}
this.ensemble = newEnsemble;
this.ensembleWeights = newEnsembleWeights;
}
}
| Java |
/*
* MultiChoiceOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.gui.MultiChoiceOptionEditComponent;
/**
* Multi choice option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class MultiChoiceOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected String[] optionLabels;
protected String[] optionDescriptions;
protected int defaultOptionIndex;
protected int chosenOptionIndex;
public MultiChoiceOption(String name, char cliChar, String purpose,
String[] optionLabels, String[] optionDescriptions,
int defaultOptionIndex) {
super(name, cliChar, purpose);
if (optionLabels.length != optionDescriptions.length) {
throw new IllegalArgumentException("Labels/descriptions mismatch.");
}
this.optionLabels = optionLabels.clone();
this.optionDescriptions = optionDescriptions.clone();
this.defaultOptionIndex = defaultOptionIndex;
resetToDefault();
}
@Override
public String getDefaultCLIString() {
return this.optionLabels[this.defaultOptionIndex];
}
@Override
public String getValueAsCLIString() {
return getChosenLabel();
}
@Override
public void setValueViaCLIString(String s) {
try {
setChosenIndex(Integer.parseInt(s.trim()));
} catch (NumberFormatException nfe) {
setChosenLabel(s);
}
}
public void setChosenLabel(String label) {
label = label.trim();
for (int i = 0; i < this.optionLabels.length; i++) {
if (this.optionLabels[i].equals(label)) {
this.chosenOptionIndex = i;
return;
}
}
throw new IllegalArgumentException("Label not recognised: " + label);
}
public void setChosenIndex(int index) {
if ((index < 0) || (index >= this.optionLabels.length)) {
throw new IndexOutOfBoundsException();
}
this.chosenOptionIndex = index;
}
public String[] getOptionLabels() {
return this.optionLabels.clone();
}
public String getChosenLabel() {
return this.optionLabels[this.chosenOptionIndex];
}
public int getChosenIndex() {
return this.chosenOptionIndex;
}
@Override
public JComponent getEditComponent() {
return new MultiChoiceOptionEditComponent(this);
}
}
| Java |
/*
* ClassOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import java.io.File;
import javax.swing.JComponent;
import moa.gui.ClassOptionEditComponent;
import moa.tasks.Task;
/**
* Class option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class ClassOption extends AbstractClassOption {
private static final long serialVersionUID = 1L;
public ClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString) {
super(name, cliChar, purpose, requiredType, defaultCLIString);
}
public ClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString, String nullString) {
super(name, cliChar, purpose, requiredType, defaultCLIString, nullString);
}
@Override
public String getValueAsCLIString() {
if ((this.currentValue == null) && (this.nullString != null)) {
return this.nullString;
} else if (this.currentValue == null) {
return "None";
}
return objectToCLIString(this.currentValue, this.requiredType);
}
@Override
public void setValueViaCLIString(String s) {
if ((this.nullString != null) && ((s == null) || (s.length() == 0) || s.equals(this.nullString))) {
this.currentValue = null;
} else if ((s != null) && (s.equalsIgnoreCase("None"))) {
this.currentValue = null;
} else {
try {
this.currentValue = cliStringToObject(s, this.requiredType, null);
} catch (Exception e) {
throw new IllegalArgumentException("Problems with option: " + getName(), e);
}
}
}
public static String objectToCLIString(Object obj, Class<?> requiredType) {
if (obj == null) {
return "";
}
if (obj instanceof File) {
return (FILE_PREFIX_STRING + ((File) obj).getPath());
}
if (obj instanceof String) {
return (INMEM_PREFIX_STRING + obj);
}
String className = classToCLIString(obj.getClass(), requiredType);
if (obj instanceof OptionHandler) {
String subOptions = ((OptionHandler) obj).getOptions().getAsCLIString();
if (subOptions.length() > 0) {
return (className + " " + subOptions);
}
}
return className;
}
public static Object cliStringToObject(String cliString,
Class<?> requiredType, Option[] externalOptions) throws Exception {
if (cliString.startsWith(FILE_PREFIX_STRING)) {
return new File(cliString.substring(FILE_PREFIX_STRING.length()));
}
if (cliString.startsWith(INMEM_PREFIX_STRING)) {
return cliString.substring(INMEM_PREFIX_STRING.length());
}
cliString = cliString.trim();
int firstSpaceIndex = cliString.indexOf(' ', 0);
String className;
String classOptions;
if (firstSpaceIndex > 0) {
className = cliString.substring(0, firstSpaceIndex);
classOptions = cliString.substring(firstSpaceIndex + 1, cliString.length());
classOptions = classOptions.trim();
} else {
className = cliString;
classOptions = "";
}
Class<?> classObject;
try {
classObject = Class.forName(className);
} catch (Throwable t1) {
try {
// try prepending default package
classObject = Class.forName(requiredType.getPackage().getName()
+ "." + className);
} catch (Throwable t2) {
try {
// try prepending task package
classObject = Class.forName(Task.class.getPackage().getName()
+ "." + className);
} catch (Throwable t3) {
throw new Exception("Class not found: " + className);
}
}
}
Object classInstance;
try {
classInstance = classObject.newInstance();
} catch (Exception ex) {
throw new Exception("Problem creating instance of class: "
+ className, ex);
}
if (requiredType.isInstance(classInstance)
|| ((classInstance instanceof Task) && requiredType.isAssignableFrom(((Task) classInstance).getTaskResultType()))) {
Options options = new Options();
if (externalOptions != null) {
for (Option option : externalOptions) {
options.addOption(option);
}
}
if (classInstance instanceof OptionHandler) {
Option[] objectOptions = ((OptionHandler) classInstance).getOptions().getOptionArray();
for (Option option : objectOptions) {
options.addOption(option);
}
}
try {
options.setViaCLIString(classOptions);
} catch (Exception ex) {
throw new Exception("Problem with options to '"
+ className
+ "'."
+ "\n\nValid options for "
+ className
+ ":\n"
+ ((OptionHandler) classInstance).getOptions().getHelpString(), ex);
} finally {
options.removeAllOptions(); // clean up listener refs
}
} else {
throw new Exception("Class named '" + className
+ "' is not an instance of " + requiredType.getName() + ".");
}
return classInstance;
}
@Override
public JComponent getEditComponent() {
return new ClassOptionEditComponent(this);
}
}
| Java |
/*
* AbstractClassOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import java.io.File;
import javax.swing.JComponent;
import moa.core.ObjectRepository;
import moa.core.SerializeUtils;
import moa.tasks.Task;
import moa.tasks.TaskMonitor;
/**
* Abstract class option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision$
*/
public abstract class AbstractClassOption extends AbstractOption {
private static final long serialVersionUID = 1L;
/** The prefix text to use to indicate file. */
public static final String FILE_PREFIX_STRING = "file:";
/** The prefix text to use to indicate inmem. */
public static final String INMEM_PREFIX_STRING = "inmem:";
/** The current object */
protected Object currentValue;
/** The class type */
protected Class<?> requiredType;
/** The default command line interface text. */
protected String defaultCLIString;
/** The null text. */
protected String nullString;
/**
* Creates a new instance of an abstract option given its class name,
* command line interface text, its purpose, its class type and its default
* command line interface text.
*
* @param name the name of this option
* @param cliChar the command line interface text
* @param purpose the text describing the purpose of this option
* @param requiredType the class type
* @param defaultCLIString the default command line interface text
*/
public AbstractClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString) {
this(name, cliChar, purpose, requiredType, defaultCLIString, null);
}
/**
* Creates a new instance of an abstract option given its class name,
* command line interface text, its purpose, its class type, default
* command line interface text, and its null text.
*
* @param name the name of this option
* @param cliChar the command line interface text
* @param purpose the text describing the purpose of this option
* @param requiredType the class type
* @param defaultCLIString the default command line interface text
* @param nullString the null text
*/
public AbstractClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString, String nullString) {
super(name, cliChar, purpose);
this.requiredType = requiredType;
this.defaultCLIString = defaultCLIString;
this.nullString = nullString;
resetToDefault();
}
/**
* Sets current object.
*
* @param obj the object to set as current.
*/
public void setCurrentObject(Object obj) {
if (((obj == null) && (this.nullString != null))
|| this.requiredType.isInstance(obj)
|| (obj instanceof String)
|| (obj instanceof File)
|| ((obj instanceof Task) && this.requiredType.isAssignableFrom(((Task) obj).getTaskResultType()))) {
this.currentValue = obj;
} else {
throw new IllegalArgumentException("Object not of required type.");
}
}
/**
* Returns the current object.
*
* @return the current object
*/
public Object getPreMaterializedObject() {
return this.currentValue;
}
/**
* Gets the class type of this option.
*
* @return the class type of this option
*/
public Class<?> getRequiredType() {
return this.requiredType;
}
/**
* Gets the null string of this option.
*
* @return the null string of this option
*/
public String getNullString() {
return this.nullString;
}
/**
* Gets a materialized object of this option.
*
* @param monitor the task monitor to use
* @param repository the object repository to use
* @return the materialized object
*/
public Object materializeObject(TaskMonitor monitor,
ObjectRepository repository) {
if ((this.currentValue == null)
|| this.requiredType.isInstance(this.currentValue)) {
return this.currentValue;
} else if (this.currentValue instanceof String) {
if (repository != null) {
Object inmemObj = repository.getObjectNamed((String) this.currentValue);
if (inmemObj == null) {
throw new RuntimeException("No object named "
+ this.currentValue + " found in repository.");
}
return inmemObj;
}
throw new RuntimeException("No object repository available.");
} else if (this.currentValue instanceof Task) {
Task task = (Task) this.currentValue;
Object result = task.doTask(monitor, repository);
return result;
} else if (this.currentValue instanceof File) {
File inputFile = (File) this.currentValue;
Object result = null;
try {
result = SerializeUtils.readFromFile(inputFile);
} catch (Exception ex) {
throw new RuntimeException("Problem loading "
+ this.requiredType.getName() + " object from file '"
+ inputFile.getName() + "':\n" + ex.getMessage(), ex);
}
return result;
} else {
throw new RuntimeException(
"Could not materialize object of required type "
+ this.requiredType.getName() + ", found "
+ this.currentValue.getClass().getName()
+ " instead.");
}
}
@Override
public String getDefaultCLIString() {
return this.defaultCLIString;
}
/**
* Gets the command line interface text of the class.
*
* @param aClass the class
* @param requiredType the class type
* @return the command line interface text of the class
*/
public static String classToCLIString(Class<?> aClass, Class<?> requiredType) {
String className = aClass.getName();
String packageName = requiredType.getPackage().getName();
if (className.startsWith(packageName)) {
// cut off package name
className = className.substring(packageName.length() + 1, className.length());
} else if (Task.class.isAssignableFrom(aClass)) {
packageName = Task.class.getPackage().getName();
if (className.startsWith(packageName)) {
// cut off task package name
className = className.substring(packageName.length() + 1,
className.length());
}
}
return className;
}
@Override
public abstract String getValueAsCLIString();
@Override
public abstract void setValueViaCLIString(String s);
@Override
public abstract JComponent getEditComponent();
/**
* Gets the class name without its package name prefix.
*
* @param className the name of the class
* @param expectedType the type of the class
* @return the class name without its package name prefix
*/
public static String stripPackagePrefix(String className, Class<?> expectedType) {
if (className.startsWith(expectedType.getPackage().getName())) {
return className.substring(expectedType.getPackage().getName().length() + 1);
}
return className;
}
}
| Java |
/*
* WEKAClassOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @author FracPete (fracpete at waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import weka.core.Utils;
import java.io.File;
import java.util.Enumeration;
import java.util.Vector;
import javax.swing.JComponent;
import moa.gui.WEKAClassOptionEditComponent;
import moa.tasks.Task;
/**
* WEKA class option. This option is used to access options in WEKA.
* For example, WEKAClassifier uses it to set the base learner.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision$
*/
public class WEKAClassOption extends AbstractClassOption {
private static final long serialVersionUID = 1L;
public WEKAClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString) {
super(name, cliChar, purpose, requiredType, defaultCLIString);
}
public WEKAClassOption(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString, String nullString) {
super(name, cliChar, purpose, requiredType, defaultCLIString, nullString);
}
@Override
public String getValueAsCLIString() {
if ((this.currentValue == null) && (this.nullString != null)) {
return this.nullString;
}
String result = currentValue.getClass().getName();
if (currentValue instanceof weka.core.OptionHandler)
result += " " + Utils.joinOptions(((weka.core.OptionHandler) currentValue).getOptions());
result = result.trim();
return result;
}
@Override
public void setValueViaCLIString(String s) {
if ((this.nullString != null)
&& ((s == null) || (s.length() == 0) || s
.equals(this.nullString))) {
this.currentValue = null;
} else {
try {
this.currentValue = cliStringToObject(s, this.requiredType, null);
} catch (Exception e) {
throw new IllegalArgumentException("Problems with option: " + getName(), e);
}
}
}
public static String objectToCLIString(Object obj, Class<?> requiredType) {
if (obj == null) {
return "";
}
if (obj instanceof File) {
return (FILE_PREFIX_STRING + ((File) obj).getPath());
}
if (obj instanceof String) {
return (INMEM_PREFIX_STRING + obj);
}
String className = classToCLIString(obj.getClass(), requiredType);
if (obj instanceof weka.core.OptionHandler) {
String subOptions = Utils.joinOptions(((weka.core.OptionHandler) obj).getOptions());
if (subOptions.length() > 0) {
return new String(className + " " + subOptions).trim();
}
}
return className;
}
public static Object cliStringToObject(String cliString,
Class<?> requiredType, Option[] externalOptions) throws Exception {
if (cliString.startsWith(FILE_PREFIX_STRING)) {
return new File(cliString.substring(FILE_PREFIX_STRING.length()));
}
if (cliString.startsWith(INMEM_PREFIX_STRING)) {
return cliString.substring(INMEM_PREFIX_STRING.length());
}
cliString = cliString.trim();
int firstSpaceIndex = cliString.indexOf(' ', 0);
String className;
String WEKAClassOptions;
if (firstSpaceIndex > 0) {
className = cliString.substring(0, firstSpaceIndex);
WEKAClassOptions = cliString.substring(firstSpaceIndex + 1, cliString.length());
WEKAClassOptions = WEKAClassOptions.trim();
} else {
className = cliString;
WEKAClassOptions = "";
}
Class<?> classObject;
try {
classObject = Class.forName(className);
} catch (Throwable t1) {
try {
// try prepending default package
classObject = Class.forName(requiredType.getPackage().getName() + "." + className);
} catch (Throwable t2) {
try {
// try prepending task package
classObject = Class.forName(Task.class.getPackage().getName() + "." + className);
} catch (Throwable t3) {
throw new Exception("Class not found: " + className);
}
}
}
Object classInstance;
try {
classInstance = classObject.newInstance();
} catch (Exception ex) {
throw new Exception("Problem creating instance of class: "
+ className, ex);
}
if (requiredType.isInstance(classInstance)
|| ((classInstance instanceof Task) && requiredType.isAssignableFrom(((Task) classInstance).getTaskResultType()))) {
Vector<String> options = new Vector<String>();
if (externalOptions != null) {
for (Option option : externalOptions) {
options.add(option.getValueAsCLIString());
}
}
else {
String[] optionsTmp = Utils.splitOptions(cliString);
for (int i = 1; i < optionsTmp.length; i++)
options.add(optionsTmp[i]);
}
if (classInstance instanceof weka.core.OptionHandler) {
try {
((weka.core.OptionHandler) classInstance).setOptions(options.toArray(new String[options.size()]));
} catch (Exception ex) {
Enumeration enm = ((weka.core.OptionHandler) classInstance).listOptions();
StringBuffer optionsText = new StringBuffer();
while (enm.hasMoreElements()) {
weka.core.Option option = (weka.core.Option) enm.nextElement();
optionsText.append(option.synopsis() + '\n');
optionsText.append(option.description() + "\n");
}
throw new Exception("Problem with options to '"
+ className
+ "'."
+ "\n\nValid options for "
+ className
+ ":\n"
+ optionsText.toString(), ex);
}
}
} else {
throw new Exception("Class named '" + className
+ "' is not an instance of " + requiredType.getName() + ".");
}
return classInstance;
}
@Override
public JComponent getEditComponent() {
return new WEKAClassOptionEditComponent(this);
}
}
| Java |
/*
* Option.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.MOAObject;
/**
* Interface representing an option or parameter.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public interface Option extends MOAObject {
/**
* Gets the name of this option
*
* @return the name of this option
*/
public String getName();
/**
* Gets the Command Line Interface text of this option
*
* @return the Command Line Interface text
*/
public char getCLIChar();
/**
* Gets the purpose of this option
*
* @return the purpose of this option
*/
public String getPurpose();
/**
* Gets the Command Line Interface text
*
* @return the Command Line Interface text
*/
public String getDefaultCLIString();
/**
* Sets value of this option via the Command Line Interface text
*
* @param s the Command Line Interface text
*/
public void setValueViaCLIString(String s);
/**
* Gets the value of a Command Line Interface text as a string
*
* @return the string with the value of the Command Line Interface text
*/
public String getValueAsCLIString();
/**
* Resets this option to the default value
*
*/
public void resetToDefault();
/**
* Gets the state of this option in human readable form
*
* @return the string with state of this option in human readable form
*/
public String getStateString();
/**
* Gets a copy of this option
*
* @return the copy of this option
*/
public Option copy();
/**
* Gets the GUI component to edit
*
* @return the component to edit
*/
public JComponent getEditComponent();
}
| Java |
/*
* FloatOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.gui.FloatOptionEditComponent;
/**
* Float option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class FloatOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected double currentVal;
protected double defaultVal;
protected double minVal;
protected double maxVal;
public FloatOption(String name, char cliChar, String purpose,
double defaultVal) {
this(name, cliChar, purpose, defaultVal, Double.NEGATIVE_INFINITY,
Double.POSITIVE_INFINITY);
}
public FloatOption(String name, char cliChar, String purpose,
double defaultVal, double minVal, double maxVal) {
super(name, cliChar, purpose);
this.defaultVal = defaultVal;
this.minVal = minVal;
this.maxVal = maxVal;
resetToDefault();
}
public void setValue(double v) {
if (v < this.minVal) {
throw new IllegalArgumentException("Option " + getName()
+ " cannot be less than " + this.minVal
+ ", out of range: " + v);
}
if (v > this.maxVal) {
throw new IllegalArgumentException("Option " + getName()
+ " cannot be greater than " + this.maxVal
+ ", out of range: " + v);
}
this.currentVal = v;
}
public double getValue() {
return this.currentVal;
}
public double getMinValue() {
return this.minVal;
}
public double getMaxValue() {
return this.maxVal;
}
@Override
public String getDefaultCLIString() {
return doubleToCLIString(this.defaultVal);
}
@Override
public String getValueAsCLIString() {
return doubleToCLIString(this.currentVal);
}
@Override
public void setValueViaCLIString(String s) {
setValue(cliStringToDouble(s));
}
public static double cliStringToDouble(String s) {
return Double.parseDouble(s.trim());
}
public static String doubleToCLIString(double d) {
return Double.toString(d);
}
@Override
public JComponent getEditComponent() {
return new FloatOptionEditComponent(this);
}
}
| Java |
/*
* StringOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
/**
* String option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class StringOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected String currentVal;
protected String defaultVal;
public StringOption(String name, char cliChar, String purpose,
String defaultVal) {
super(name, cliChar, purpose);
this.defaultVal = defaultVal;
resetToDefault();
}
public void setValue(String v) {
this.currentVal = v;
}
public String getValue() {
return this.currentVal;
}
@Override
public String getDefaultCLIString() {
return this.defaultVal;
}
@Override
public String getValueAsCLIString() {
return this.currentVal;
}
@Override
public void setValueViaCLIString(String s) {
setValue(s);
}
}
| Java |
/*
* OptionHandler.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import moa.MOAObject;
import moa.core.ObjectRepository;
import moa.tasks.TaskMonitor;
/**
* Interface representing an object that handles options or parameters.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public interface OptionHandler extends MOAObject {
/**
* Gets the purpose of this object
*
* @return the string with the purpose of this object
*/
public String getPurposeString();
/**
* Gets the options of this object
*
* @return the options of this object
*/
public Options getOptions();
/**
* This method prepares this object for use.
*
*/
public void prepareForUse();
/**
* This method prepares this object for use.
*
* @param monitor the TaskMonitor to use
* @param repository the ObjectRepository to use
*/
public void prepareForUse(TaskMonitor monitor, ObjectRepository repository);
/**
* This method produces a copy of this object.
*
* @return a copy of this object
*/
public OptionHandler copy();
/**
* Gets the Command Line Interface text to create the object
*
* @return the Command Line Interface text to create the object
*/
public String getCLICreationString(Class<?> expectedType);
}
| Java |
/**
* [ClassOptionWithNames.java]
*
* A variation of [ClassOption], which you can choose specific classes as its entries.
* In the constructor, put a list of class names you want to add (String[] classNames) as an argument.
*
* @author Yunsu Kim
* based on the implementation of Richard Kirkby
* Data Management and Data Exploration Group, RWTH Aachen University
*/
package moa.options;
import java.io.File;
import javax.swing.JComponent;
import moa.gui.ClassOptionWithNamesEditComponent;
import moa.tasks.Task;
public class ClassOptionWithNames extends AbstractClassOption {
private static final long serialVersionUID = 1L;
private String[] names;
public ClassOptionWithNames(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString, String[] classNames) {
super(name, cliChar, purpose, requiredType, defaultCLIString);
this.names = classNames;
}
public ClassOptionWithNames(String name, char cliChar, String purpose,
Class<?> requiredType, String defaultCLIString, String nullString, String[] classNames) {
super(name, cliChar, purpose, requiredType, defaultCLIString, nullString);
this.names = classNames;
}
@Override
public String getValueAsCLIString() {
if ((this.currentValue == null) && (this.nullString != null)) {
return this.nullString;
}
return objectToCLIString(this.currentValue, this.requiredType);
}
@Override
public void setValueViaCLIString(String s) {
if ((this.nullString != null)
&& ((s == null) || (s.length() == 0) || s.equals(this.nullString))) {
this.currentValue = null;
} else {
try {
this.currentValue = cliStringToObject(s, this.requiredType,
null);
} catch (Exception e) {
throw new IllegalArgumentException("Problems with option: " + getName(), e);
}
}
}
public static String objectToCLIString(Object obj, Class<?> requiredType) {
if (obj == null) {
return "";
}
if (obj instanceof File) {
return (FILE_PREFIX_STRING + ((File) obj).getPath());
}
if (obj instanceof String) {
return (INMEM_PREFIX_STRING + obj);
}
String className = classToCLIString(obj.getClass(), requiredType);
if (obj instanceof OptionHandler) {
String subOptions = ((OptionHandler) obj).getOptions().getAsCLIString();
if (subOptions.length() > 0) {
return (className + " " + subOptions);
}
}
return className;
}
public static Object cliStringToObject(String cliString,
Class<?> requiredType, Option[] externalOptions) throws Exception {
if (cliString.startsWith(FILE_PREFIX_STRING)) {
return new File(cliString.substring(FILE_PREFIX_STRING.length()));
}
if (cliString.startsWith(INMEM_PREFIX_STRING)) {
return cliString.substring(INMEM_PREFIX_STRING.length());
}
cliString = cliString.trim();
int firstSpaceIndex = cliString.indexOf(' ', 0);
String className;
String classOptions;
if (firstSpaceIndex > 0) {
className = cliString.substring(0, firstSpaceIndex);
classOptions = cliString.substring(firstSpaceIndex + 1, cliString.length());
classOptions = classOptions.trim();
} else {
className = cliString;
classOptions = "";
}
Class<?> classObject;
try {
classObject = Class.forName(className);
} catch (Throwable t1) {
try {
// try prepending default package
classObject = Class.forName(requiredType.getPackage().getName()
+ "." + className);
} catch (Throwable t2) {
try {
// try prepending task package
classObject = Class.forName(Task.class.getPackage().getName()
+ "." + className);
} catch (Throwable t3) {
throw new Exception("Class not found: " + className);
}
}
}
Object classInstance;
try {
classInstance = classObject.newInstance();
} catch (Exception ex) {
throw new Exception("Problem creating instance of class: "
+ className, ex);
}
if (requiredType.isInstance(classInstance)
|| ((classInstance instanceof Task) && requiredType.isAssignableFrom(((Task) classInstance).getTaskResultType()))) {
Options options = new Options();
if (externalOptions != null) {
for (Option option : externalOptions) {
options.addOption(option);
}
}
if (classInstance instanceof OptionHandler) {
Option[] objectOptions = ((OptionHandler) classInstance).getOptions().getOptionArray();
for (Option option : objectOptions) {
options.addOption(option);
}
}
try {
options.setViaCLIString(classOptions);
} catch (Exception ex) {
throw new Exception("Problem with options to '"
+ className
+ "'."
+ "\n\nValid options for "
+ className
+ ":\n"
+ ((OptionHandler) classInstance).getOptions().getHelpString(), ex);
} finally {
options.removeAllOptions(); // clean up listener refs
}
} else {
throw new Exception("Class named '" + className
+ "' is not an instance of " + requiredType.getName() + ".");
}
return classInstance;
}
@Override
public JComponent getEditComponent() {
return new ClassOptionWithNamesEditComponent(this);
}
public String[] getClassNames() {
return this.names;
}
}
| Java |
/*
* ListOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
/**
* List option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class ListOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected Option[] currentList;
protected Option expectedType;
protected Option[] defaultList;
protected char separatorChar;
public ListOption(String name, char cliChar, String purpose,
Option expectedType, Option[] defaultList, char separatorChar) {
super(name, cliChar, purpose);
this.expectedType = expectedType;
this.defaultList = defaultList.clone();
this.separatorChar = separatorChar;
resetToDefault();
}
public void setList(Option[] optList) {
Option[] newArray = new Option[optList.length];
for (int i = 0; i < optList.length; i++) {
newArray[i] = this.expectedType.copy();
newArray[i].setValueViaCLIString(optList[i].getValueAsCLIString());
}
this.currentList = newArray;
}
public Option[] getList() {
return this.currentList.clone();
}
@Override
public String getDefaultCLIString() {
return optionArrayToCLIString(this.defaultList, this.separatorChar);
}
@Override
public String getValueAsCLIString() {
return optionArrayToCLIString(this.currentList, this.separatorChar);
}
@Override
public void setValueViaCLIString(String s) {
this.currentList = cliStringToOptionArray(s, this.separatorChar,
this.expectedType);
}
public static Option[] cliStringToOptionArray(String s, char separator,
Option expectedType) {
if (s == null || s.length() < 1) {
return new Option[0];
}
String[] subStrings = s.split(Character.toString(separator));
Option[] options = new Option[subStrings.length];
for (int i = 0; i < options.length; i++) {
options[i] = expectedType.copy();
options[i].setValueViaCLIString(subStrings[i]);
}
return options;
}
public static String optionArrayToCLIString(Option[] os, char separator) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < os.length; i++) {
if (i > 0) {
sb.append(separator);
}
sb.append(os[i].getValueAsCLIString());
}
return sb.toString();
}
}
| Java |
/*
* AbstractOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.AbstractMOAObject;
import moa.gui.StringOptionEditComponent;
/**
* Abstract option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public abstract class AbstractOption extends AbstractMOAObject implements
Option {
/** Array of characters not valid to use in option names. */
public static final char[] illegalNameCharacters = new char[]{' ', '-',
'(', ')'};
/** Name of this option. */
protected String name;
/** Command line interface text of this option. */
protected char cliChar;
/** Text of the purpose of this option. */
protected String purpose;
/**
* Gets whether the name is valid or not.
*
* @param optionName the name of the option
* @return true if the name that not contain any illegal character
*/
public static boolean nameIsLegal(String optionName) {
for (char illegalChar : illegalNameCharacters) {
if (optionName.indexOf(illegalChar) >= 0) {
return false;
}
}
return true;
}
/**
* Creates a new instance of an abstract option given its class name,
* command line interface text and its purpose.
*
* @param name the name of this option
* @param cliChar the command line interface text
* @param purpose the text describing the purpose of this option
*/
public AbstractOption(String name, char cliChar, String purpose) {
if (!nameIsLegal(name)) {
throw new IllegalArgumentException("Illegal option name: " + name);
}
this.name = name;
this.cliChar = cliChar;
this.purpose = purpose;
}
@Override
public String getName() {
return this.name;
}
@Override
public char getCLIChar() {
return this.cliChar;
}
@Override
public String getPurpose() {
return this.purpose;
}
@Override
public void resetToDefault() {
setValueViaCLIString(getDefaultCLIString());
}
@Override
public String getStateString() {
return getValueAsCLIString();
}
@Override
public Option copy() {
return (Option) super.copy();
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
@Override
public JComponent getEditComponent() {
return new StringOptionEditComponent(this);
}
}
| Java |
/*
* FlagOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.gui.FlagOptionEditComponent;
/**
* Flag option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class FlagOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected boolean isSet = false;
public FlagOption(String name, char cliChar, String purpose) {
super(name, cliChar, purpose);
}
public void setValue(boolean v) {
this.isSet = v;
}
public void set() {
setValue(true);
}
public void unset() {
setValue(false);
}
public boolean isSet() {
return this.isSet;
}
@Override
public String getDefaultCLIString() {
return null;
}
@Override
public String getValueAsCLIString() {
return this.isSet ? "" : null;
}
@Override
public void setValueViaCLIString(String s) {
this.isSet = (s != null);
}
@Override
public String getStateString() {
return this.isSet ? "true" : "false";
}
@Override
public JComponent getEditComponent() {
return new FlagOptionEditComponent(this);
}
}
| Java |
/*
* FileOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import java.io.File;
import javax.swing.JComponent;
import moa.gui.FileOptionEditComponent;
/**
* File option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class FileOption extends StringOption {
private static final long serialVersionUID = 1L;
protected String defaultFileExtension;
protected boolean isOutputFile;
public FileOption(String name, char cliChar, String purpose,
String defaultFileName, String defaultExtension, boolean isOutput) {
super(name, cliChar, purpose, defaultFileName);
this.defaultFileExtension = defaultExtension;
this.isOutputFile = isOutput;
}
public String getDefaultFileExtension() {
return this.defaultFileExtension;
}
public boolean isOutputFile() {
return this.isOutputFile;
}
public File getFile() {
if ((getValue() != null) && (getValue().length() > 0)) {
return new File(getValue());
}
return null;
}
@Override
public JComponent getEditComponent() {
return new FileOptionEditComponent(this);
}
}
| Java |
/*
* IntOption.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import javax.swing.JComponent;
import moa.gui.IntOptionEditComponent;
/**
* Int option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class IntOption extends AbstractOption {
private static final long serialVersionUID = 1L;
protected int currentVal;
protected int defaultVal;
protected int minVal;
protected int maxVal;
public IntOption(String name, char cliChar, String purpose, int defaultVal) {
this(name, cliChar, purpose, defaultVal, Integer.MIN_VALUE,
Integer.MAX_VALUE);
}
public IntOption(String name, char cliChar, String purpose, int defaultVal,
int minVal, int maxVal) {
super(name, cliChar, purpose);
this.defaultVal = defaultVal;
this.minVal = minVal;
this.maxVal = maxVal;
resetToDefault();
}
public void setValue(int v) {
if (v < this.minVal) {
throw new IllegalArgumentException("Option " + getName()
+ " cannot be less than " + this.minVal
+ ", out of range: " + v);
}
if (v > this.maxVal) {
throw new IllegalArgumentException("Option " + getName()
+ " cannot be greater than " + this.maxVal
+ ", out of range: " + v);
}
this.currentVal = v;
}
public int getValue() {
return this.currentVal;
}
public int getMinValue() {
return this.minVal;
}
public int getMaxValue() {
return this.maxVal;
}
@Override
public String getDefaultCLIString() {
return intToCLIString(this.defaultVal);
}
@Override
public String getValueAsCLIString() {
return intToCLIString(this.currentVal);
}
@Override
public void setValueViaCLIString(String s) {
setValue(cliStringToInt(s));
}
public static int cliStringToInt(String s) {
return Integer.parseInt(s.trim());
}
public static String intToCLIString(int i) {
return Integer.toString(i);
}
@Override
public JComponent getEditComponent() {
return new IntOptionEditComponent(this);
}
}
| Java |
/*
* AbstractOptionHandler.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import moa.AbstractMOAObject;
import moa.clusterers.AbstractClusterer;
import moa.core.ObjectRepository;
import moa.tasks.NullMonitor;
import moa.tasks.TaskMonitor;
/**
* Abstract Option Handler. All classes that have options in
* MOA extend this class.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public abstract class AbstractOptionHandler extends AbstractMOAObject implements
OptionHandler {
private static final long serialVersionUID = 1L;
/** Options to handle */
protected Options options;
/** Dictionary with option texts and objects */
protected Map<String, Object> classOptionNamesToPreparedObjects;
@Override
public String getPurposeString() {
return "Anonymous object: purpose undocumented.";
}
@Override
public Options getOptions() {
if (this.options == null) {
this.options = new Options();
Option[] myOptions = discoverOptionsViaReflection();
for (Option option : myOptions) {
this.options.addOption(option);
}
}
return this.options;
}
@Override
public void prepareForUse() {
prepareForUse(new NullMonitor(), null);
}
@Override
public void prepareForUse(TaskMonitor monitor, ObjectRepository repository) {
prepareClassOptions(monitor, repository);
prepareForUseImpl(monitor, repository);
}
/**
* This method describes the implementation of how to prepare this object for use.
* All classes that extends this class have to implement <code>prepareForUseImpl</code>
* and not <code>prepareForUse</code> since
* <code>prepareForUse</code> calls <code>prepareForUseImpl</code>.
*
* @param monitor the TaskMonitor to use
* @param repository the ObjectRepository to use
*/
protected abstract void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository);
@Override
public String getCLICreationString(Class<?> expectedType) {
return ClassOption.stripPackagePrefix(this.getClass().getName(),
expectedType)
+ " " + getOptions().getAsCLIString();
}
@Override
public OptionHandler copy() {
return (OptionHandler) super.copy();
}
/**
* Gets the options of this class via reflection.
*
* @return an array of options
*/
protected Option[] discoverOptionsViaReflection() {
Class<? extends AbstractOptionHandler> c = this.getClass();
Field[] fields = c.getFields();
List<Option> optList = new LinkedList<Option>();
for (Field field : fields) {
String fName = field.getName();
Class<?> fType = field.getType();
if (fName.endsWith("Option")) {
if (Option.class.isAssignableFrom(fType)) {
Option oVal = null;
try {
field.setAccessible(true);
oVal = (Option) field.get(this);
} catch (IllegalAccessException ignored) {
// cannot access this field
}
if (oVal != null) {
optList.add(oVal);
}
}
}
}
return optList.toArray(new Option[optList.size()]);
}
/**
* Prepares the options of this class.
*
* @param monitor the TaskMonitor to use
* @param repository the ObjectRepository to use
*/
protected void prepareClassOptions(TaskMonitor monitor,
ObjectRepository repository) {
this.classOptionNamesToPreparedObjects = null;
Option[] optionArray = getOptions().getOptionArray();
for (Option option : optionArray) {
if (option instanceof ClassOption) {
ClassOption classOption = (ClassOption) option;
monitor.setCurrentActivity("Materializing option "
+ classOption.getName() + "...", -1.0);
Object optionObj = classOption.materializeObject(monitor,
repository);
if (monitor.taskShouldAbort()) {
return;
}
if (optionObj instanceof OptionHandler) {
monitor.setCurrentActivity("Preparing option "
+ classOption.getName() + "...", -1.0);
((OptionHandler) optionObj).prepareForUse(monitor,
repository);
if (monitor.taskShouldAbort()) {
return;
}
}
if (this.classOptionNamesToPreparedObjects == null) {
this.classOptionNamesToPreparedObjects = new HashMap<String, Object>();
}
this.classOptionNamesToPreparedObjects.put(option.getName(),
optionObj);
} else if (option instanceof ClassOptionWithNames) {
ClassOptionWithNames classOption = (ClassOptionWithNames) option;
monitor.setCurrentActivity("Materializing option "
+ classOption.getName() + "...", -1.0);
Object optionObj = classOption.materializeObject(monitor,
repository);
if (monitor.taskShouldAbort()) {
return;
}
if (optionObj instanceof OptionHandler) {
monitor.setCurrentActivity("Preparing option "
+ classOption.getName() + "...", -1.0);
((OptionHandler) optionObj).prepareForUse(monitor,
repository);
if (monitor.taskShouldAbort()) {
return;
}
}
if (this.classOptionNamesToPreparedObjects == null) {
this.classOptionNamesToPreparedObjects = new HashMap<String, Object>();
}
this.classOptionNamesToPreparedObjects.put(option.getName(),
optionObj);
}
}
}
/**
* Gets a prepared option of this class.
*
* @param opt the class option to get
* @return an option stored in the dictionary
*/
protected Object getPreparedClassOption(ClassOption opt) {
return this.classOptionNamesToPreparedObjects.get(opt.getName());
}
/**
* Gets a prepared option of this class.
*
* @param opt - ClassOptionWithNames
* @return an option stored in the dictionary
*/
protected Object getPreparedClassOption(ClassOptionWithNames opt) {
return this.classOptionNamesToPreparedObjects.get(opt.getName());
}
}
| Java |
/*
* Options.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.options;
import java.util.LinkedList;
import java.util.List;
import moa.AbstractMOAObject;
import moa.core.StringUtils;
/**
* File option.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class Options extends AbstractMOAObject {
private static final long serialVersionUID = 1L;
protected List<Option> optionList = new LinkedList<Option>();
public void addOption(Option opt) {
if (getOption(opt.getName()) != null) {
throw new IllegalArgumentException("Duplicate option name: "
+ opt.getName());
}
if (getOption(opt.getCLIChar()) != null) {
throw new IllegalArgumentException(
"Duplicate option command line character: "
+ opt.getCLIChar());
}
this.optionList.add(opt);
}
public int numOptions() {
return this.optionList.size();
}
public Option getOption(String optName) {
for (Option option : this.optionList) {
if (optName.equals(option.getName())) {
return option;
}
}
return null;
}
public Option getOption(char cliChar) {
for (Option option : this.optionList) {
if (option.getCLIChar() == cliChar) {
return option;
}
}
return null;
}
public Option[] getOptionArray() {
return this.optionList.toArray(new Option[this.optionList.size()]);
}
public void removeOption(String optName) {
removeOption(getOption(optName));
}
public void removeOption(Option opt) {
this.optionList.remove(opt);
}
public void removeAllOptions() {
this.optionList = new LinkedList<Option>();
}
public void resetToDefaults() {
for (Option option : this.optionList) {
option.resetToDefault();
}
}
public void setViaCLIString(String cliString) {
cliString = cliString.trim();
while (cliString.length() > 0) {
if (cliString.startsWith("-")) {
boolean flagClusterFound = false;
String optionString = null;
int nextSpaceIndex = cliString.indexOf(' ', 1);
Option opt;
if (nextSpaceIndex > 0) {
optionString = cliString.substring(1, nextSpaceIndex);
} else {
optionString = cliString.substring(1, cliString.length());
nextSpaceIndex = cliString.length() - 1;
}
if (optionString.length() == 1) {
opt = getOption(optionString.charAt(0));
} else {
opt = getOption(optionString);
if (opt == null) {
// check for cluster of flags
flagClusterFound = true;
for (int i = 0; i < optionString.length(); i++) {
opt = getOption(optionString.charAt(i));
if (!(opt instanceof FlagOption)) {
flagClusterFound = false;
opt = null;
break;
}
}
if (flagClusterFound) {
for (int i = 0; i < optionString.length(); i++) {
opt = getOption(optionString.charAt(i));
opt.setValueViaCLIString("");
}
cliString = cliString.substring(nextSpaceIndex + 1,
cliString.length());
}
}
}
if (!flagClusterFound) {
if (opt != null) {
String parameters = cliString.substring(
nextSpaceIndex + 1, cliString.length());
if (opt instanceof FlagOption) {
opt.setValueViaCLIString("");
cliString = parameters;
} else {
String[] paramSplit = splitParameterFromRemainingOptions(parameters);
opt.setValueViaCLIString(paramSplit[0]);
cliString = paramSplit[1];
}
} else {
throw new IllegalArgumentException("Unknown option: -"
+ optionString);
}
}
} else {
throw new IllegalArgumentException("Expecting option, found: '"
+ cliString + "'.");
}
cliString = cliString.trim();
}
}
public String getAsCLIString() {
StringBuilder commandLine = new StringBuilder();
for (Option option : this.optionList) {
String value = option.getValueAsCLIString();
if ((value != null) && !value.equals(option.getDefaultCLIString())) {
if (commandLine.length() > 0) {
commandLine.append(" ");
}
commandLine.append("-" + option.getCLIChar());
if (value.length() > 0) {
if (value.indexOf(' ') < 0) {
commandLine.append(" " + value);
} else {
commandLine.append(" (" + value + ")");
}
}
}
}
return commandLine.toString();
}
public String getHelpString() {
StringBuilder sb = new StringBuilder();
getHelp(sb, 0);
return sb.toString();
}
public void getHelp(StringBuilder sb, int indent) {
if (optionList.size() > 0) {
for (Option option : optionList) {
StringUtils.appendIndent(sb, indent);
sb.append('-');
sb.append(option.getCLIChar());
sb.append(' ');
sb.append(option.getName());
String defaultString = option.getDefaultCLIString();
if (defaultString != null && defaultString.length() > 0) {
sb.append(" (default: ");
sb.append(defaultString);
sb.append(')');
}
StringUtils.appendNewline(sb);
StringUtils.appendIndent(sb, indent);
sb.append(option.getPurpose());
StringUtils.appendNewline(sb);
}
} else {
StringUtils.appendIndented(sb, indent, "No options.");
}
}
/**
* Internal method that splits a string into two parts - the parameter for
* the current option, and the remaining options.
*
* @param cliString
* the command line string, beginning at an option parameter
* @return an array of two strings - the first is the option paramter, the
* second is the remaining cli string
*/
protected static String[] splitParameterFromRemainingOptions(
String cliString) {
String[] paramSplit = new String[2];
cliString = cliString.trim();
if (cliString.startsWith("\"") || cliString.startsWith("'")) {
int endQuoteIndex = cliString.indexOf(cliString.charAt(0), 1);
if (endQuoteIndex < 0) {
throw new IllegalArgumentException(
"Quotes not terminated correctly.");
}
paramSplit[0] = cliString.substring(1, endQuoteIndex);
paramSplit[1] = cliString.substring(endQuoteIndex + 1, cliString.length());
} else if (cliString.startsWith("(")) {
int bracketsOpen = 1;
int currPos = 1;
int nextCloseIndex = cliString.indexOf(")", currPos);
int nextOpenIndex = cliString.indexOf("(", currPos);
while (bracketsOpen != 0) {
if (nextCloseIndex < 0) {
throw new IllegalArgumentException("Brackets do not match.");
} else if ((nextOpenIndex < 0)
|| (nextCloseIndex < nextOpenIndex)) {
bracketsOpen--;
currPos = nextCloseIndex + 1;
nextCloseIndex = cliString.indexOf(")", currPos);
} else {
bracketsOpen++;
currPos = nextOpenIndex + 1;
nextOpenIndex = cliString.indexOf("(", currPos);
}
}
paramSplit[0] = cliString.substring(1, currPos - 1);
paramSplit[1] = cliString.substring(currPos, cliString.length());
} else {
int firstSpaceIndex = cliString.indexOf(" ", 0);
if (firstSpaceIndex >= 0) {
paramSplit[0] = cliString.substring(0, firstSpaceIndex);
paramSplit[1] = cliString.substring(firstSpaceIndex + 1,
cliString.length());
} else {
paramSplit[0] = cliString;
paramSplit[1] = "";
}
}
return paramSplit;
}
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
package moa.options;
public class RequiredOptionNotSpecifiedException extends Exception {
private static final long serialVersionUID = 1L;
}
| Java |
/*
* MultiFilteredStream.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.ClassOption;
import moa.options.ListOption;
import moa.options.Option;
import moa.options.OptionHandler;
import moa.streams.filters.StreamFilter;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
/**
* Class for representing a stream that is filtered.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class MultiFilteredStream extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "A stream that is filtered.";
}
private static final long serialVersionUID = 1L;
public ClassOption streamOption = new ClassOption("stream", 's',
"Stream to filter.", InstanceStream.class,
"generators.RandomTreeGenerator");
public ListOption filtersOption = new ListOption("filters", 'f',
"Filters to apply.", new ClassOption("filter", ' ',
"Stream filter.", StreamFilter.class, "AddNoiseFilter"),
new Option[0], ',');
protected InstanceStream filterChain;
@Override
public void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
Option[] filterOptions = this.filtersOption.getList();
StreamFilter[] filters = new StreamFilter[filterOptions.length];
for (int i = 0; i < filters.length; i++) {
monitor.setCurrentActivity("Materializing filter " + (i + 1)
+ "...", -1.0);
filters[i] = (StreamFilter) ((ClassOption) filterOptions[i]).materializeObject(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
if (filters[i] instanceof OptionHandler) {
monitor.setCurrentActivity("Preparing filter " + (i + 1)
+ "...", -1.0);
((OptionHandler) filters[i]).prepareForUse(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
}
}
InstanceStream chain = (InstanceStream) getPreparedClassOption(this.streamOption);
for (int i = 0; i < filters.length; i++) {
filters[i].setInputStream(chain);
chain = filters[i];
}
this.filterChain = chain;
}
@Override
public long estimatedRemainingInstances() {
return this.filterChain.estimatedRemainingInstances();
}
@Override
public InstancesHeader getHeader() {
return this.filterChain.getHeader();
}
@Override
public boolean hasMoreInstances() {
return this.filterChain.hasMoreInstances();
}
@Override
public boolean isRestartable() {
return this.filterChain.isRestartable();
}
@Override
public Instance nextInstance() {
return this.filterChain.nextInstance();
}
@Override
public void restart() {
this.filterChain.restart();
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* ClusteringStream.java
* Copyright (C) 2010 RWTH Aachen University, Germany
* @author Jansen (moa@cs.rwth-aachen.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.streams.clustering;
import moa.options.AbstractOptionHandler;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
public abstract class ClusteringStream extends AbstractOptionHandler implements InstanceStream{
public IntOption decayHorizonOption = new IntOption("decayHorizon", 'h',
"Decay horizon", 1000, 0, Integer.MAX_VALUE);
public FloatOption decayThresholdOption = new FloatOption("decayThreshold", 't',
"Decay horizon threshold", 0.01, 0, 1);
public IntOption evaluationFrequencyOption = new IntOption("evaluationFrequency", 'e',
"Evaluation frequency", 1000, 0, Integer.MAX_VALUE);
public IntOption numAttsOption = new IntOption("numAtts", 'a',
"The number of attributes to generate.", 2, 0, Integer.MAX_VALUE);
public int getDecayHorizon(){
return decayHorizonOption.getValue();
}
public double getDecayThreshold(){
return decayThresholdOption.getValue();
}
public int getEvaluationFrequency(){
return evaluationFrequencyOption.getValue();
}
}
| Java |
/**
* [FileStream.java]
*
* @author Timm Jansen
* @editor Yunsu Kim
*
* Last Edited: 2013/06/27
* Data Management and Data Exploration Group, RWTH Aachen University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.streams.clustering;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import moa.core.InputStreamProgressMonitor;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.FileOption;
import moa.options.FlagOption;
import moa.options.IntOption;
import moa.options.ListOption;
import moa.options.Option;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
import weka.core.Instances;
public class FileStream extends ClusteringStream{
@Override
public String getPurposeString() {
return "A stream read from an ARFF file. HINT: Visualization only works correctly with numerical 0-1 normalized attributes!";
}
private static final long serialVersionUID = 1L;
String defaultfile = "/Users/kokomo40/Dropbox/BT Kim/Datasets/KDDCUP99/KDDCup99.arff";
public FileOption arffFileOption = new FileOption("arffFile", 'f',
"ARFF file to load.", defaultfile, "arff", false);
public IntOption classIndexOption = new IntOption(
"classIndex",
'c',
"Class index of data. 0 for none or -1 for last attribute in file.",
-1, -1, Integer.MAX_VALUE);
public FlagOption normalizeOption =
new FlagOption("normalize", 'n',
"Numerical data will be normalized to 0-1 " +
"for the visualization to work. The complete arff file needs to be read upfront.");
public ListOption removeAttributesOption = new ListOption("removeAttributes", 'r',
"Attributes to remove. Enter comma seperated list, " +
"starting with 1 for first attribute.",
new IntOption("removeAttribute", ' ', "Attribute to remove.",-1),
new Option[0], ',');
public FlagOption keepNonNumericalAttrOption =
new FlagOption("keepNonNumericalAttr", 'K',
"Non-numerical attributes are being filtered by default " +
"(except the class attribute). " +
"Check to keep all attributes. This option is being " +
"overwritten by the manual attribute removal filter.");
protected Instances instances;
protected Reader fileReader;
protected boolean hitEndOfFile;
protected Instance lastInstanceRead;
protected int numInstancesRead;
protected InputStreamProgressMonitor fileProgressMonitor;
private Integer[] removeAttributes = null;
private Instances filteredDataset = null;
private ArrayList<Double[]> valuesMinMaxDiff = null;
public FileStream(){
//remove numAttritube Option from ClusteringStream as that is being set internally for Filestream
numAttsOption = null;
}
@Override
public void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
restart();
}
public InstancesHeader getHeader() {
return new InstancesHeader(this.filteredDataset);
}
public long estimatedRemainingInstances() {
double progressFraction = this.fileProgressMonitor
.getProgressFraction();
if ((progressFraction > 0.0) && (this.numInstancesRead > 0)) {
return (long) ((this.numInstancesRead / progressFraction) - this.numInstancesRead);
}
return -1;
}
public boolean hasMoreInstances() {
return !this.hitEndOfFile;
}
public Instance nextInstance() {
Instance prevInstance = this.lastInstanceRead;
this.hitEndOfFile = !readNextInstanceFromFile();
return prevInstance;
}
public boolean isRestartable() {
return true;
}
public void restart() {
try {
if (fileReader != null) {
fileReader.close();
}
InputStream fileStream = new FileInputStream(arffFileOption.getFile());
fileProgressMonitor = new InputStreamProgressMonitor(fileStream);
fileReader = new BufferedReader(new InputStreamReader(fileProgressMonitor));
instances = new Instances(fileReader, 1);
if (classIndexOption.getValue() < 0) {
instances.setClassIndex(instances.numAttributes() - 1);
} else if (classIndexOption.getValue() > 0) {
instances.setClassIndex(classIndexOption.getValue() - 1);
}
//use hashset to delete duplicates and attributes numbers that aren't valid
HashSet<Integer> attributes = new HashSet<Integer>();
Option[] rawAttributeList = removeAttributesOption.getList();
for (int i = 0; i < rawAttributeList.length; i++) {
int attribute = ((IntOption)rawAttributeList[i]).getValue();
if(1 <= attribute && attribute <= instances.numAttributes())
attributes.add(attribute-1);
else
System.out.println("Found invalid attribute removal description: " +
"Attribute option "+attribute
+" will be ignored. Filestream only has "
+instances.numAttributes()+" attributes.");
}
//remove all non numeric attributes except the class attribute
if(!keepNonNumericalAttrOption.isSet()){
for (int i = 0; i < instances.numAttributes(); i++) {
if(!instances.attribute(i).isNumeric() && i != instances.classIndex()){
attributes.add(i);
}
}
}
//read min/max values in case we need to normalize
if(normalizeOption.isSet())
valuesMinMaxDiff = readMinMaxDiffValues(attributes);
//convert hashset to array and sort array so we can delete attributes in a sequence
removeAttributes = attributes.toArray(new Integer[0]);
Arrays.sort(removeAttributes);
//set updated number of attributes (class attribute included)
numAttsOption = new IntOption("numAtts", 'a',"", instances.numAttributes() - removeAttributes.length);
if(removeAttributes.length > 0){
System.out.println("Removing the following attributes:");
for (int i = 0; i < removeAttributes.length; i++) {
System.out.println((removeAttributes[i]+1)+" "
+instances.attribute(removeAttributes[i]).name());
}
}
//create filtered dataset
filteredDataset = new Instances(instances);
for (int i = removeAttributes.length-1; i >= 0 ; i--) {
filteredDataset.deleteAttributeAt(removeAttributes[i]);
if(true){
}
}
this.numInstancesRead = 0;
this.lastInstanceRead = null;
this.hitEndOfFile = !readNextInstanceFromFile();
} catch (IOException ioe) {
throw new RuntimeException("ArffFileStream restart failed.", ioe);
}
}
protected boolean readNextInstanceFromFile() {
try {
if (this.instances.readInstance(this.fileReader)) {
Instance rawInstance = this.instances.instance(0);
//remove dataset from instance so we can delete attributes
rawInstance.setDataset(null);
for (int i = removeAttributes.length-1; i >= 0 ; i--) {
rawInstance.deleteAttributeAt(removeAttributes[i]);
}
//set adjusted dataset for instance
rawInstance.setDataset(filteredDataset);
if (normalizeOption.isSet() && valuesMinMaxDiff != null) {
for (int i = 0; i < rawInstance.numAttributes() ; i++) {
if (valuesMinMaxDiff.get(i)[2] != 1 && // Already normalized
valuesMinMaxDiff.get(i)[2] != 0 && // Max. value is 0 (unable to be normalized)
i != rawInstance.classIndex()) { // Class label is not subject to be normalized
double v = rawInstance.value(i);
v = (v - valuesMinMaxDiff.get(i)[0]) / valuesMinMaxDiff.get(i)[2];
rawInstance.setValue(i, v);
}
}
}
this.lastInstanceRead = rawInstance;
this.instances.delete(); // keep instances clean
this.numInstancesRead++;
return true;
}
if (this.fileReader != null) {
this.fileReader.close();
this.fileReader = null;
}
return false;
} catch (IOException ioe) {
throw new RuntimeException(
"ArffFileStream failed to read instance from stream.", ioe);
}
}
/**
* @param ignoredAttributes Attributes that will be ignored
* @return A list with min/max and diff=max-min values per attribute of the arff file
*/
protected ArrayList<Double[]> readMinMaxDiffValues(HashSet<Integer> ignoredAttributes) {
ArrayList<Double[]> valuesMinMaxDiff = null;
if(ignoredAttributes == null)
ignoredAttributes = new HashSet<Integer>();
try {
InputStream fileStream = new FileInputStream(arffFileOption.getFile());
InputStreamProgressMonitor fileProgressMonitor = new InputStreamProgressMonitor(fileStream);
Reader fileReader = new BufferedReader(new InputStreamReader(fileProgressMonitor));
Instances instances = new Instances(fileReader, 1);
valuesMinMaxDiff = new ArrayList<Double[]>();
for (int i = 0; i < instances.numAttributes()-ignoredAttributes.size(); i++) {
Double[] values = {Double.POSITIVE_INFINITY,Double.NEGATIVE_INFINITY,0.0};
valuesMinMaxDiff.add(values);
}
System.out.print("Reading arff file for normalization...");
int counter = 0;
while (instances.readInstance(fileReader)) {
Instance instance = instances.instance(0);
int a = 0;
for (int i = 0; i < instances.numAttributes(); i++) {
if(!ignoredAttributes.contains(i)){
double value = instance.value(i);
if(value < valuesMinMaxDiff.get(a)[0])
valuesMinMaxDiff.get(a)[0] = value;
if(value > valuesMinMaxDiff.get(a)[1])
valuesMinMaxDiff.get(a)[1] = value;
a++;
}
}
instances.delete();
//show some progress
counter++;
if(counter >= 10000){
counter = 0;
System.out.print(".");
}
}
if (fileReader != null) {
fileReader.close();
fileReader = null;
}
System.out.println("done!");
for (int i = 0; i < valuesMinMaxDiff.size(); i++) {
valuesMinMaxDiff.get(i)[2]=valuesMinMaxDiff.get(i)[1]-valuesMinMaxDiff.get(i)[0];
}
return valuesMinMaxDiff;
} catch (IOException ioe) {
throw new RuntimeException(
"ArffFileStream failed to read instance from stream.", ioe);
}
}
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* ClusterEvent.java
* Copyright (C) 2010 RWTH Aachen University, Germany
* @author Jansen (moa@cs.rwth-aachen.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.streams.clustering;
import java.util.EventObject;
public class ClusterEvent extends EventObject {
private String type;
private String message;
private long timestamp;
public ClusterEvent(Object source, long timestamp, String type, String message) {
super(source);
this.type = type;
this.message = message;
this.timestamp = timestamp;
}
public String getMessage(){
return message;
}
public long getTimestamp(){
return timestamp;
}
public String getType(){
return type;
}
}
| Java |
/*
* ClusterEventListener.java
* Copyright (C) 2010 RWTH Aachen University, Germany
* @author Jansen (moa@cs.rwth-aachen.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.streams.clustering;
import java.util.EventListener;
public interface ClusterEventListener extends EventListener {
public void changeCluster(ClusterEvent e);
}
| Java |
/**
* RandomRBFGeneratorEvents.java
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz) - RandomRBFGenerator
* Timm Jansen (moa@cs.rwth-aachen.de) - Events
* @editor Yunsu Kim
*
* Last edited: 2013/06/02
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package moa.streams.clustering;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.Random;
import java.util.Vector;
import moa.cluster.Clustering;
import moa.cluster.SphereCluster;
import moa.core.AutoExpandVector;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.gui.visualization.DataPoint;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.tasks.TaskMonitor;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
public class RandomRBFGeneratorEvents extends ClusteringStream {
private transient Vector listeners;
private static final long serialVersionUID = 1L;
public IntOption modelRandomSeedOption = new IntOption("modelRandomSeed",
'm', "Seed for random generation of model.", 1);
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 5);
public IntOption numClusterOption = new IntOption("numCluster", 'K',
"The average number of centroids in the model.", 5, 1, Integer.MAX_VALUE);
public IntOption numClusterRangeOption = new IntOption("numClusterRange", 'k',
"Deviation of the number of centroids in the model.", 3, 0, Integer.MAX_VALUE);
public FloatOption kernelRadiiOption = new FloatOption("kernelRadius", 'R',
"The average radii of the centroids in the model.", 0.07, 0, 1);
public FloatOption kernelRadiiRangeOption = new FloatOption("kernelRadiusRange", 'r',
"Deviation of average radii of the centroids in the model.", 0, 0, 1);
public FloatOption densityRangeOption = new FloatOption("densityRange", 'd',
"Offset of the average weight a cluster has. Value of 0 means all cluster " +
"contain the same amount of points.", 0, 0, 1);
public IntOption speedOption = new IntOption("speed", 'V',
"Kernels move a predefined distance of 0.01 every X points", 500, 1, Integer.MAX_VALUE);
public IntOption speedRangeOption = new IntOption("speedRange", 'v',
"Speed/Velocity point offset", 0, 0, Integer.MAX_VALUE);
public FloatOption noiseLevelOption = new FloatOption("noiseLevel", 'N',
"Noise level", 0.1, 0, 1);
public FlagOption noiseInClusterOption = new FlagOption("noiseInCluster", 'n',
"Allow noise to be placed within a cluster");
public IntOption eventFrequencyOption = new IntOption("eventFrequency", 'E',
"Event frequency. Enable at least one of the events below and set numClusterRange!", 30000, 0, Integer.MAX_VALUE);
public FlagOption eventMergeSplitOption = new FlagOption("eventMergeSplitOption", 'M',
"Enable merging and splitting of clusters. Set eventFrequency and numClusterRange!");
public FlagOption eventDeleteCreateOption = new FlagOption("eventDeleteCreate", 'C',
"Enable emering and disapperaing of clusters. Set eventFrequency and numClusterRange!");
private double merge_threshold = 0.7;
private int kernelMovePointFrequency = 10;
private double maxDistanceMoveThresholdByStep = 0.01;
private int maxOverlapFitRuns = 50;
private double eventFrequencyRange = 0;
private boolean debug = false;
private AutoExpandVector<GeneratorCluster> kernels;
protected Random instanceRandom;
protected InstancesHeader streamHeader;
private int numGeneratedInstances;
private int numActiveKernels;
private int nextEventCounter;
private int nextEventChoice = -1;
private int clusterIdCounter;
private GeneratorCluster mergeClusterA;
private GeneratorCluster mergeClusterB;
private boolean mergeKernelsOverlapping = false;
private class GeneratorCluster{
//TODO: points is redundant to microclusterpoints, we need to come
//up with a good strategy that microclusters get updated and
//rebuild if needed. Idea: Sort microclusterpoints by timestamp and let
// microclusterdecay hold the timestamp for when the last point in a
//microcluster gets kicked, then we rebuild... or maybe not... could be
//same as searching for point to be kicked. more likely is we rebuild
//fewer times then insert.
SphereCluster generator;
int kill = -1;
boolean merging = false;
double[] moveVector;
int totalMovementSteps;
int currentMovementSteps;
boolean isSplitting = false;
LinkedList<DataPoint> points = new LinkedList<DataPoint>();
ArrayList<SphereCluster> microClusters = new ArrayList<SphereCluster>();
ArrayList<ArrayList<DataPoint>> microClustersPoints = new ArrayList();
ArrayList<Integer> microClustersDecay = new ArrayList();
public GeneratorCluster(int label) {
boolean outofbounds = true;
int tryCounter = 0;
while(outofbounds && tryCounter < maxOverlapFitRuns){
tryCounter++;
outofbounds = false;
double[] center = new double [numAttsOption.getValue()];
double radius = kernelRadiiOption.getValue()+(instanceRandom.nextBoolean()?-1:1)*kernelRadiiRangeOption.getValue()*instanceRandom.nextDouble();
while(radius <= 0){
radius = kernelRadiiOption.getValue()+(instanceRandom.nextBoolean()?-1:1)*kernelRadiiRangeOption.getValue()*instanceRandom.nextDouble();
}
for (int j = 0; j < numAttsOption.getValue(); j++) {
center[j] = instanceRandom.nextDouble();
if(center[j]- radius < 0 || center[j] + radius > 1){
outofbounds = true;
break;
}
}
generator = new SphereCluster(center, radius);
}
if(tryCounter < maxOverlapFitRuns){
generator.setId(label);
double avgWeight = 1.0/numClusterOption.getValue();
double weight = avgWeight + (instanceRandom.nextBoolean()?-1:1)*avgWeight*densityRangeOption.getValue()*instanceRandom.nextDouble();
generator.setWeight(weight);
setDesitnation(null);
}
else{
generator = null;
kill = 0;
System.out.println("Tried "+maxOverlapFitRuns+" times to create kernel. Reduce average radii." );
}
}
public GeneratorCluster(int label, SphereCluster cluster) {
this.generator = cluster;
cluster.setId(label);
setDesitnation(null);
}
public int getWorkID(){
for(int c = 0; c < kernels.size(); c++){
if(kernels.get(c).equals(this))
return c;
}
return -1;
}
private void updateKernel(){
if(kill == 0){
kernels.remove(this);
}
if(kill > 0){
kill--;
}
//we could be lot more precise if we would keep track of timestamps of points
//then we could remove all old points and rebuild the cluster on up to date point base
//BUT worse the effort??? so far we just want to avoid overlap with this, so its more
//konservative as needed. Only needs to change when we need a thighter representation
for (int m = 0; m < microClusters.size(); m++) {
if(numGeneratedInstances-microClustersDecay.get(m) > decayHorizonOption.getValue()){
microClusters.remove(m);
microClustersPoints.remove(m);
microClustersDecay.remove(m);
}
}
if(!points.isEmpty() && numGeneratedInstances-points.getFirst().getTimestamp() >= decayHorizonOption.getValue()){
// if(debug)
// System.out.println("Cleaning up macro cluster "+generator.getId());
points.removeFirst();
}
}
private void addInstance(Instance instance){
DataPoint point = new DataPoint(instance, numGeneratedInstances);
points.add(point);
int minMicroIndex = -1;
double minHullDist = Double.MAX_VALUE;
boolean inserted = false;
//we favour more recently build clusters so we can remove earlier cluster sooner
for (int m = microClusters.size()-1; m >=0 ; m--) {
SphereCluster micro = microClusters.get(m);
double hulldist = micro.getCenterDistance(point)-micro.getRadius();
//point fits into existing cluster
if(hulldist <= 0){
microClustersPoints.get(m).add(point);
microClustersDecay.set(m, numGeneratedInstances);
inserted = true;
break;
}
//if not, check if its at least the closest cluster
else{
if(hulldist < minHullDist){
minMicroIndex = m;
minHullDist = hulldist;
}
}
}
//Reseting index choice for alternative cluster building
int alt = 1;
if(alt == 1)
minMicroIndex = -1;
if(!inserted){
//add to closest cluster and expand cluster
if(minMicroIndex!=-1){
microClustersPoints.get(minMicroIndex).add(point);
//we should keep the miniball instances and just check in
//new points instead of rebuilding the whole thing
SphereCluster s = new SphereCluster(microClustersPoints.get(minMicroIndex),numAttsOption.getValue());
//check if current microcluster is bigger then generating cluster
if(s.getRadius() > generator.getRadius()){
//remove previously added point
microClustersPoints.get(minMicroIndex).remove(microClustersPoints.get(minMicroIndex).size()-1);
minMicroIndex = -1;
}
else{
microClusters.set(minMicroIndex, s);
microClustersDecay.set(minMicroIndex, numGeneratedInstances);
}
}
//minMicroIndex might have been reset above
//create new micro cluster
if(minMicroIndex == -1){
ArrayList<DataPoint> microPoints = new ArrayList<DataPoint>();
microPoints.add(point);
SphereCluster s;
if(alt == 0)
s = new SphereCluster(microPoints,numAttsOption.getValue());
else
s = new SphereCluster(generator.getCenter(),generator.getRadius(),1);
microClusters.add(s);
microClustersPoints.add(microPoints);
microClustersDecay.add(numGeneratedInstances);
int id = 0;
while(id < kernels.size()){
if(kernels.get(id) == this)
break;
id++;
}
s.setGroundTruth(id);
}
}
}
private void move(){
if(currentMovementSteps < totalMovementSteps){
currentMovementSteps++;
if( moveVector == null){
return;
}
else{
double[] center = generator.getCenter();
boolean outofbounds = true;
while(outofbounds){
double radius = generator.getRadius();
outofbounds = false;
center = generator.getCenter();
for ( int d = 0; d < center.length; d++ ) {
center[d]+= moveVector[d];
if(center[d]- radius < 0 || center[d] + radius > 1){
outofbounds = true;
setDesitnation(null);
break;
}
}
}
generator.setCenter(center);
}
}
else{
if(!merging){
setDesitnation(null);
isSplitting = false;
}
}
}
void setDesitnation(double[] destination){
if(destination == null){
destination = new double [numAttsOption.getValue()];
for (int j = 0; j < numAttsOption.getValue(); j++) {
destination[j] = instanceRandom.nextDouble();
}
}
double[] center = generator.getCenter();
int dim = center.length;
double[] v = new double[dim];
for ( int d = 0; d < dim; d++ ) {
v[d]=destination[d]-center[d];
}
setMoveVector(v);
}
void setMoveVector(double[] vector){
//we are ignoring the steps, otherwise we have to change
//speed of the kernels, do we want that?
moveVector = vector;
int speedInPoints = speedOption.getValue();
if(speedRangeOption.getValue() > 0)
speedInPoints +=(instanceRandom.nextBoolean()?-1:1)*instanceRandom.nextInt(speedRangeOption.getValue());
if(speedInPoints < 1) speedInPoints = speedOption.getValue();
double length = 0;
for ( int d = 0; d < moveVector.length; d++ ) {
length+=Math.pow(vector[d],2);
}
length = Math.sqrt(length);
totalMovementSteps = (int)(length/(maxDistanceMoveThresholdByStep*kernelMovePointFrequency)*speedInPoints);
for ( int d = 0; d < moveVector.length; d++ ) {
moveVector[d]/=(double)totalMovementSteps;
}
currentMovementSteps = 0;
// if(debug){
// System.out.println("Setting new direction for C"+generator.getId()+": distance "
// +length+" in "+totalMovementSteps+" steps");
// }
}
private String tryMerging(GeneratorCluster merge){
String message = "";
double overlapDegree = generator.overlapRadiusDegree(merge.generator);
if(overlapDegree > merge_threshold){
SphereCluster mcluster = merge.generator;
double radius = Math.max(generator.getRadius(), mcluster.getRadius());
generator.combine(mcluster);
// //adjust radius, get bigger and bigger with high dim data
generator.setRadius(radius);
// double[] center = generator.getCenter();
// double[] mcenter = mcluster.getCenter();
// double weight = generator.getWeight();
// double mweight = generator.getWeight();
//// for (int i = 0; i < center.length; i++) {
//// center[i] = (center[i] * weight + mcenter[i] * mweight) / (mweight + weight);
//// }
// generator.setWeight(weight + mweight);
message = "Clusters merging: "+mergeClusterB.generator.getId()+" into "+mergeClusterA.generator.getId();
//clean up and restet merging stuff
//mark kernel so it gets killed when it doesn't contain any more instances
merge.kill = decayHorizonOption.getValue();
//set weight to 0 so no new instances will be created in the cluster
mcluster.setWeight(0.0);
normalizeWeights();
numActiveKernels--;
mergeClusterB = mergeClusterA = null;
merging = false;
mergeKernelsOverlapping = false;
}
else{
if(overlapDegree > 0 && !mergeKernelsOverlapping){
mergeKernelsOverlapping = true;
message = "Merge overlapping started";
}
}
return message;
}
private String splitKernel(){
isSplitting = true;
//todo radius range
double radius = kernelRadiiOption.getValue();
double avgWeight = 1.0/numClusterOption.getValue();
double weight = avgWeight + avgWeight*densityRangeOption.getValue()*instanceRandom.nextDouble();
SphereCluster spcluster = null;
double[] center = generator.getCenter();
spcluster = new SphereCluster(center, radius, weight);
if(spcluster !=null){
GeneratorCluster gc = new GeneratorCluster(clusterIdCounter++, spcluster);
gc.isSplitting = true;
kernels.add(gc);
normalizeWeights();
numActiveKernels++;
return "Split from Kernel "+generator.getId();
}
else{
System.out.println("Tried to split new kernel from C"+generator.getId()+
". Not enough room for new cluster, decrease average radii, number of clusters or enable overlap.");
return "";
}
}
private String fadeOut(){
kill = decayHorizonOption.getValue();
generator.setWeight(0.0);
numActiveKernels--;
normalizeWeights();
return "Fading out C"+generator.getId();
}
}
public RandomRBFGeneratorEvents() {
noiseInClusterOption.set();
// eventDeleteCreateOption.set();
// eventMergeSplitOption.set();
}
public InstancesHeader getHeader() {
return streamHeader;
}
public long estimatedRemainingInstances() {
return -1;
}
public boolean hasMoreInstances() {
return true;
}
public boolean isRestartable() {
return true;
}
@Override
public void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
monitor.setCurrentActivity("Preparing random RBF...", -1.0);
generateHeader();
restart();
}
public void restart() {
instanceRandom = new Random(instanceRandomSeedOption.getValue());
nextEventCounter = eventFrequencyOption.getValue();
nextEventChoice = getNextEvent();
numActiveKernels = 0;
numGeneratedInstances = 0;
clusterIdCounter = 0;
mergeClusterA = mergeClusterB = null;
kernels = new AutoExpandVector<GeneratorCluster>();
initKernels();
}
protected void generateHeader() { // 2013/06/02: Noise label
ArrayList<Attribute> attributes = new ArrayList<Attribute>();
for (int i = 0; i < this.numAttsOption.getValue(); i++) {
attributes.add(new Attribute("att" + (i + 1)));
}
ArrayList<String> classLabels = new ArrayList<String>();
for (int i = 0; i < this.numClusterOption.getValue(); i++) {
classLabels.add("class" + (i + 1));
}
if (noiseLevelOption.getValue() > 0) classLabels.add("noise"); // The last label = "noise"
attributes.add(new Attribute("class", classLabels));
streamHeader = new InstancesHeader(new Instances(getCLICreationString(InstanceStream.class), attributes, 0));
streamHeader.setClassIndex(streamHeader.numAttributes() - 1);
}
protected void initKernels() {
for (int i = 0; i < numClusterOption.getValue(); i++) {
kernels.add(new GeneratorCluster(clusterIdCounter));
numActiveKernels++;
clusterIdCounter++;
}
normalizeWeights();
}
public Instance nextInstance() {
numGeneratedInstances++;
eventScheduler();
//make room for the classlabel
double[] values_new = new double [numAttsOption.getValue()+1];
double[] values = null;
int clusterChoice = -1;
if(instanceRandom.nextDouble() > noiseLevelOption.getValue()){
clusterChoice = chooseWeightedElement();
values = kernels.get(clusterChoice).generator.sample(instanceRandom).toDoubleArray();
}
else{
//get ranodm noise point
values = getNoisePoint();
}
if(Double.isNaN(values[0])){
System.out.println("Instance corrupted:"+numGeneratedInstances);
}
System.arraycopy(values, 0, values_new, 0, values.length);
Instance inst = new DenseInstance(1.0, values_new);
inst.setDataset(getHeader());
if(clusterChoice == -1){
// 2013/06/02 (Yunsu Kim)
// Noise instance has the last class value instead of "-1"
// Preventing ArrayIndexOutOfBoundsException in WriteStreamToARFFFile
inst.setClassValue(numClusterOption.getValue());
}
else{
inst.setClassValue(kernels.get(clusterChoice).generator.getId());
//Do we need micro cluster representation if have overlapping clusters?
//if(!overlappingOption.isSet())
kernels.get(clusterChoice).addInstance(inst);
}
// System.out.println(numGeneratedInstances+": Overlap is"+updateOverlaps());
return inst;
}
public Clustering getGeneratingClusters(){
Clustering clustering = new Clustering();
for (int c = 0; c < kernels.size(); c++) {
clustering.add(kernels.get(c).generator);
}
return clustering;
}
public Clustering getMicroClustering(){
Clustering clustering = new Clustering();
int id = 0;
for (int c = 0; c < kernels.size(); c++) {
for (int m = 0; m < kernels.get(c).microClusters.size(); m++) {
kernels.get(c).microClusters.get(m).setId(id);
kernels.get(c).microClusters.get(m).setGroundTruth(kernels.get(c).generator.getId());
clustering.add(kernels.get(c).microClusters.get(m));
id++;
}
}
//System.out.println("numMicroKernels "+clustering.size());
return clustering;
}
/**************************** EVENTS ******************************************/
private void eventScheduler(){
for ( int i = 0; i < kernels.size(); i++ ) {
kernels.get(i).updateKernel();
}
nextEventCounter--;
//only move kernels every 10 points, performance reasons????
//should this be randomized as well???
if(nextEventCounter%kernelMovePointFrequency == 0){
//move kernels
for ( int i = 0; i < kernels.size(); i++ ) {
kernels.get(i).move();
//overlapControl();
}
}
if(eventFrequencyOption.getValue() == 0){
return;
}
String type ="";
String message ="";
boolean eventFinished = false;
switch(nextEventChoice){
case 0:
if(numActiveKernels > 1 && numActiveKernels > numClusterOption.getValue() - numClusterRangeOption.getValue()){
message = mergeKernels(nextEventCounter);
type = "Merge";
}
if(mergeClusterA==null && mergeClusterB==null && message.startsWith("Clusters merging")){
eventFinished = true;
}
break;
case 1:
if(nextEventCounter<=0){
if(numActiveKernels < numClusterOption.getValue() + numClusterRangeOption.getValue()){
type = "Split";
message = splitKernel();
}
eventFinished = true;
}
break;
case 2:
if(nextEventCounter<=0){
if(numActiveKernels > 1 && numActiveKernels > numClusterOption.getValue() - numClusterRangeOption.getValue()){
message = fadeOut();
type = "Delete";
}
eventFinished = true;
}
break;
case 3:
if(nextEventCounter<=0){
if(numActiveKernels < numClusterOption.getValue() + numClusterRangeOption.getValue()){
message = fadeIn();
type = "Create";
}
eventFinished = true;
}
break;
}
if (eventFinished){
nextEventCounter = (int)(eventFrequencyOption.getValue()+(instanceRandom.nextBoolean()?-1:1)*eventFrequencyOption.getValue()*eventFrequencyRange*instanceRandom.nextDouble());
nextEventChoice = getNextEvent();
//System.out.println("Next event choice: "+nextEventChoice);
}
if(!message.isEmpty()){
message+=" (numKernels = "+numActiveKernels+" at "+numGeneratedInstances+")";
if(!type.equals("Merge") || message.startsWith("Clusters merging"))
fireClusterChange(numGeneratedInstances, type, message);
}
}
private int getNextEvent() {
int choice = -1;
boolean lowerLimit = numActiveKernels <= numClusterOption.getValue() - numClusterRangeOption.getValue();
boolean upperLimit = numActiveKernels >= numClusterOption.getValue() + numClusterRangeOption.getValue();
if(!lowerLimit || !upperLimit){
int mode = -1;
if(eventDeleteCreateOption.isSet() && eventMergeSplitOption.isSet()){
mode = instanceRandom.nextInt(2);
}
if(mode==0 || (mode==-1 && eventMergeSplitOption.isSet())){
//have we reached a limit? if not free choice
if(!lowerLimit && !upperLimit)
choice = instanceRandom.nextInt(2);
else
//we have a limit. if lower limit, choose split
if(lowerLimit)
choice = 1;
//otherwise we reached upper level, choose merge
else
choice = 0;
}
if(mode==1 || (mode==-1 && eventDeleteCreateOption.isSet())){
//have we reached a limit? if not free choice
if(!lowerLimit && !upperLimit)
choice = instanceRandom.nextInt(2)+2;
else
//we have a limit. if lower limit, choose create
if(lowerLimit)
choice = 3;
//otherwise we reached upper level, choose delete
else
choice = 2;
}
}
return choice;
}
private String fadeOut(){
int id = instanceRandom.nextInt(kernels.size());
while(kernels.get(id).kill!=-1)
id = instanceRandom.nextInt(kernels.size());
String message = kernels.get(id).fadeOut();
return message;
}
private String fadeIn(){
GeneratorCluster gc = new GeneratorCluster(clusterIdCounter++);
kernels.add(gc);
numActiveKernels++;
normalizeWeights();
return "Creating new cluster";
}
private String changeWeight(boolean increase){
double changeRate = 0.1;
int id = instanceRandom.nextInt(kernels.size());
while(kernels.get(id).kill!=-1)
id = instanceRandom.nextInt(kernels.size());
int sign = 1;
if(!increase)
sign = -1;
double weight_old = kernels.get(id).generator.getWeight();
double delta = sign*numActiveKernels*instanceRandom.nextDouble()*changeRate;
kernels.get(id).generator.setWeight(weight_old+delta);
normalizeWeights();
String message;
if(increase)
message = "Increase ";
else
message = "Decrease ";
message+=" weight on Cluster "+id+" from "+weight_old+" to "+(weight_old+delta);
return message;
}
private String changeRadius(boolean increase){
double maxChangeRate = 0.1;
int id = instanceRandom.nextInt(kernels.size());
while(kernels.get(id).kill!=-1)
id = instanceRandom.nextInt(kernels.size());
int sign = 1;
if(!increase)
sign = -1;
double r_old = kernels.get(id).generator.getRadius();
double r_new =r_old+sign*r_old*instanceRandom.nextDouble()*maxChangeRate;
if(r_new >= 0.5) return "Radius to big";
kernels.get(id).generator.setRadius(r_new);
String message;
if(increase)
message = "Increase ";
else
message = "Decrease ";
message+=" radius on Cluster "+id+" from "+r_old+" to "+r_new;
return message;
}
private String splitKernel(){
int id = instanceRandom.nextInt(kernels.size());
while(kernels.get(id).kill!=-1)
id = instanceRandom.nextInt(kernels.size());
String message = kernels.get(id).splitKernel();
return message;
}
private String mergeKernels(int steps){
if(numActiveKernels >1 && ((mergeClusterA == null && mergeClusterB == null))){
//choose clusters to merge
double diseredDist = steps / speedOption.getValue() * maxDistanceMoveThresholdByStep;
double minDist = Double.MAX_VALUE;
// System.out.println("DisredDist:"+(2*diseredDist));
for(int i = 0; i < kernels.size(); i++){
for(int j = 0; j < i; j++){
if(kernels.get(i).kill!=-1 || kernels.get(j).kill!=-1){
continue;
}
else{
double kernelDist = kernels.get(i).generator.getCenterDistance(kernels.get(j).generator);
double d = kernelDist-2*diseredDist;
// System.out.println("Dist:"+i+" / "+j+" "+d);
if(Math.abs(d) < minDist &&
(minDist != Double.MAX_VALUE || d>0 || Math.abs(d) < 0.001)){
minDist = Math.abs(d);
mergeClusterA = kernels.get(i);
mergeClusterB = kernels.get(j);
}
}
}
}
if(mergeClusterA!=null && mergeClusterB!=null){
double[] merge_point = mergeClusterA.generator.getCenter();
double[] v = mergeClusterA.generator.getDistanceVector(mergeClusterB.generator);
for (int i = 0; i < v.length; i++) {
merge_point[i]= merge_point[i]+v[i]*0.5;
}
mergeClusterA.merging = true;
mergeClusterB.merging = true;
mergeClusterA.setDesitnation(merge_point);
mergeClusterB.setDesitnation(merge_point);
if(debug){
System.out.println("Center1"+Arrays.toString(mergeClusterA.generator.getCenter()));
System.out.println("Center2"+Arrays.toString(mergeClusterB.generator.getCenter()));
System.out.println("Vector"+Arrays.toString(v));
System.out.println("Try to merge cluster "+mergeClusterA.generator.getId()+
" into "+mergeClusterB.generator.getId()+
" at "+Arrays.toString(merge_point)+
" time "+numGeneratedInstances);
}
return "Init merge";
}
}
if(mergeClusterA != null && mergeClusterB != null){
//movekernels will move the kernels close to each other,
//we just need to check and merge here if they are close enough
return mergeClusterA.tryMerging(mergeClusterB);
}
return "";
}
/************************* TOOLS **************************************/
public void getDescription(StringBuilder sb, int indent) {
}
private double[] getNoisePoint(){
double [] sample = new double [numAttsOption.getValue()];
boolean incluster = true;
int counter = 20;
while(incluster){
for (int j = 0; j < numAttsOption.getValue(); j++) {
sample[j] = instanceRandom.nextDouble();
}
incluster = false;
if(!noiseInClusterOption.isSet() && counter > 0){
counter--;
for(int c = 0; c < kernels.size(); c++){
for(int m = 0; m < kernels.get(c).microClusters.size(); m++){
Instance inst = new DenseInstance(1, sample);
if(kernels.get(c).microClusters.get(m).getInclusionProbability(inst) > 0){
incluster = true;
break;
}
}
if(incluster)
break;
}
}
}
// double [] sample = new double [numAttsOption.getValue()];
// for (int j = 0; j < numAttsOption.getValue(); j++) {
// sample[j] = instanceRandom.nextDouble();
// }
return sample;
}
private int chooseWeightedElement() {
double r = instanceRandom.nextDouble();
// Determine index of choosen element
int i = 0;
while (r > 0.0) {
r -= kernels.get(i).generator.getWeight();
i++;
}
--i; // Overcounted once
//System.out.println(i);
return i;
}
private void normalizeWeights(){
double sumWeights = 0.0;
for (int i = 0; i < kernels.size(); i++) {
sumWeights+=kernels.get(i).generator.getWeight();
}
for (int i = 0; i < kernels.size(); i++) {
kernels.get(i).generator.setWeight(kernels.get(i).generator.getWeight()/sumWeights);
}
}
/*************** EVENT Listener *********************/
// should go into the superclass of the generator, create new one for cluster streams?
/** Add a listener */
synchronized public void addClusterChangeListener(ClusterEventListener l) {
if (listeners == null)
listeners = new Vector();
listeners.addElement(l);
}
/** Remove a listener */
synchronized public void removeClusterChangeListener(ClusterEventListener l) {
if (listeners == null)
listeners = new Vector();
listeners.removeElement(l);
}
/** Fire a ClusterChangeEvent to all registered listeners */
protected void fireClusterChange(long timestamp, String type, String message) {
// if we have no listeners, do nothing...
if (listeners != null && !listeners.isEmpty()) {
// create the event object to send
ClusterEvent event =
new ClusterEvent(this, timestamp, type , message);
// make a copy of the listener list in case
// anyone adds/removes listeners
Vector targets;
synchronized (this) {
targets = (Vector) listeners.clone();
}
// walk through the listener list and
// call the sunMoved method in each
Enumeration e = targets.elements();
while (e.hasMoreElements()) {
ClusterEventListener l = (ClusterEventListener) e.nextElement();
l.changeCluster(event);
}
}
}
@Override
public String getPurposeString() {
return "Generates a random radial basis function stream.";
}
public String getParameterString(){
return "";
}
}
| Java |
/*
* FilteredStream.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.ClassOption;
import moa.options.OptionHandler;
import moa.streams.filters.StreamFilter;
import moa.tasks.TaskMonitor;
import weka.core.Instance;
/**
* Class for representing a stream that is filtered.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class FilteredStream extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "A stream that is filtered.";
}
private static final long serialVersionUID = 1L;
public ClassOption streamOption = new ClassOption("stream", 's',
"Stream to filter.", InstanceStream.class,
"generators.RandomTreeGenerator");
public ClassOption filtersOption = new ClassOption("filters", 'f',
"Filters to apply.", StreamFilter.class,
"AddNoiseFilter");
protected InstanceStream filterChain;
@Override
public void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
StreamFilter filters;
monitor.setCurrentActivity("Materializing filter " //+ (i + 1)
+ "...", -1.0);
filters = (StreamFilter) getPreparedClassOption(this.filtersOption);
if (monitor.taskShouldAbort()) {
return;
}
if (filters instanceof OptionHandler) {
monitor.setCurrentActivity("Preparing filter " //+ (i + 1)
+ "...", -1.0);
((OptionHandler) filters).prepareForUse(monitor, repository);
if (monitor.taskShouldAbort()) {
return;
}
}
InstanceStream chain = (InstanceStream) getPreparedClassOption(this.streamOption);
filters.setInputStream(chain);
chain = filters;
this.filterChain = chain;
}
@Override
public long estimatedRemainingInstances() {
return this.filterChain.estimatedRemainingInstances();
}
@Override
public InstancesHeader getHeader() {
return this.filterChain.getHeader();
}
@Override
public boolean hasMoreInstances() {
return this.filterChain.hasMoreInstances();
}
@Override
public boolean isRestartable() {
return this.filterChain.isRestartable();
}
@Override
public Instance nextInstance() {
return this.filterChain.nextInstance();
}
@Override
public void restart() {
this.filterChain.restart();
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* AbstractConceptDriftGenerator.java
* Copyright (C) 2010 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package moa.streams.generators.cd;
import java.util.ArrayList;
import java.util.Random;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.FlagOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.streams.clustering.ClusterEvent;
import moa.tasks.TaskMonitor;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
public abstract class AbstractConceptDriftGenerator extends AbstractOptionHandler implements
ConceptDriftGenerator {
@Override
public String getPurposeString() {
return "Generates a stream problem of predicting concept drift.";
}
protected ArrayList<ClusterEvent> clusterEvents;
public ArrayList<ClusterEvent> getEventsList() {
return this.clusterEvents;
}
private static final long serialVersionUID = 1L;
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public FlagOption notBinaryStreamOption = new FlagOption("notBinaryStream",
'b', "Don't convert to a binary stream of 0 and 1.");
public IntOption numInstancesConceptOption = new IntOption("numInstancesConcept", 'p',
"The number of instances for each concept.", 500, 0, Integer.MAX_VALUE);
protected InstancesHeader streamHeader;
protected Random instanceRandom;
protected int period;
protected int numInstances;
protected boolean change;
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
restart();
this.numInstances = 0;
this.period = numInstancesConceptOption.getValue();
// generate header
FastVector attributes = new FastVector();
FastVector binaryLabels = new FastVector();
binaryLabels.addElement("0");
binaryLabels.addElement("1");
if (!this.notBinaryStreamOption.isSet()) {
attributes.addElement(new Attribute("input", binaryLabels));
} else {
attributes.addElement(new Attribute("input"));
}
// Ground Truth
attributes.addElement(new Attribute("change", binaryLabels));
attributes.addElement(new Attribute("ground truth input"));
this.streamHeader = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
this.clusterEvents = new ArrayList<ClusterEvent>();
//this.clusterEvents.add(new ClusterEvent(this,100,"Change", "Drift"));
//this.clusterEvents.add(new ClusterEvent(this,200,"Change2", "Drift2"));
}
public long estimatedRemainingInstances() {
return -1;
}
public InstancesHeader getHeader() {
return this.streamHeader;
}
public boolean hasMoreInstances() {
return true;
}
public boolean isRestartable() {
return true;
}
protected abstract double nextValue();
private int nextbinaryValue( double num) {
int res = 0;
if (this.instanceRandom.nextDouble() <= num) {
res = 1;
}
return res;
}
public boolean getChange() {
return this.change;
}
public Instance nextInstance() {
this.numInstances++;
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
inst.setDataset(header);
double nextValue = this.nextValue();
if (this.notBinaryStreamOption.isSet()) {
inst.setValue(0, nextValue);
} else {
inst.setValue(0, this.nextbinaryValue(nextValue));
}
//Ground truth
inst.setValue(1, this.getChange() ? 1 : 0);
if (this.getChange() == true) {
//this.clusterEvents.add(new ClusterEvent(this, this.numInstances, "Change", "Drift"));
}
inst.setValue(2, nextValue);
return inst;
}
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
}
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* AbruptChangeGenerator.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators.cd;
import moa.options.FloatOption;
public class GradualChangeGenerator extends AbstractConceptDriftGenerator {
public FloatOption changeDriftOption = new FloatOption("changeDrift", 'c',
"The magnitude of change.", .001, 0.0, Double.MAX_VALUE);
@Override
protected double nextValue() {
double res;
double t = this.numInstances;
this.change = (t == this.period ) ? true : false;
res = (t < this.period ) ? .2 : .2 + (t - this.period) * changeDriftOption.getValue();
res = res > 1.0 ? 1.0 : res;
return res;
}
}
| Java |
/*
* NoChangeGenerator.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators.cd;
public class NoChangeGenerator extends AbstractConceptDriftGenerator {
@Override
protected double nextValue() {
double res = .2;
return res;
}
}
| Java |
/*
* ConceptDriftGenerator.java
* Copyright (C) 2010 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package moa.streams.generators.cd;
import java.util.ArrayList;
import moa.streams.InstanceStream;
import moa.streams.clustering.ClusterEvent;
public interface ConceptDriftGenerator extends InstanceStream {
public ArrayList<ClusterEvent> getEventsList();
}
| Java |
/*
* AbruptChangeGenerator.java
* Copyright (C) 2012 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators.cd;
public class AbruptChangeGenerator extends AbstractConceptDriftGenerator {
@Override
protected double nextValue() {
double res;
double t = this.numInstances % this.period;
this.change = (t == this.period / 2) ? true : false;
res = (t < this.period / 2) ? .2 : .8;
return res;
}
}
| Java |
/*
* RandomTreeGenerator.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Random;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.tasks.TaskMonitor;
/**
* Stream generator for a stream based on a randomly generated tree..
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class RandomTreeGenerator extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "Generates a stream based on a randomly generated tree.";
}
private static final long serialVersionUID = 1L;
public IntOption treeRandomSeedOption = new IntOption("treeRandomSeed",
'r', "Seed for random generation of tree.", 1);
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public IntOption numClassesOption = new IntOption("numClasses", 'c',
"The number of classes to generate.", 2, 2, Integer.MAX_VALUE);
public IntOption numNominalsOption = new IntOption("numNominals", 'o',
"The number of nominal attributes to generate.", 5, 0,
Integer.MAX_VALUE);
public IntOption numNumericsOption = new IntOption("numNumerics", 'u',
"The number of numeric attributes to generate.", 5, 0,
Integer.MAX_VALUE);
public IntOption numValsPerNominalOption = new IntOption(
"numValsPerNominal", 'v',
"The number of values to generate per nominal attribute.", 5, 2,
Integer.MAX_VALUE);
public IntOption maxTreeDepthOption = new IntOption("maxTreeDepth", 'd',
"The maximum depth of the tree concept.", 5, 0, Integer.MAX_VALUE);
public IntOption firstLeafLevelOption = new IntOption(
"firstLeafLevel",
'l',
"The first level of the tree above maxTreeDepth that can have leaves.",
3, 0, Integer.MAX_VALUE);
public FloatOption leafFractionOption = new FloatOption("leafFraction",
'f',
"The fraction of leaves per level from firstLeafLevel onwards.",
0.15, 0.0, 1.0);
protected static class Node implements Serializable {
private static final long serialVersionUID = 1L;
public int classLabel;
public int splitAttIndex;
public double splitAttValue;
public Node[] children;
}
protected Node treeRoot;
protected InstancesHeader streamHeader;
protected Random instanceRandom;
@Override
public void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
monitor.setCurrentActivity("Preparing random tree...", -1.0);
generateHeader();
generateRandomTree();
restart();
}
@Override
public long estimatedRemainingInstances() {
return -1;
}
@Override
public boolean isRestartable() {
return true;
}
@Override
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
}
@Override
public InstancesHeader getHeader() {
return this.streamHeader;
}
@Override
public boolean hasMoreInstances() {
return true;
}
@Override
public Instance nextInstance() {
double[] attVals = new double[this.numNominalsOption.getValue()
+ this.numNumericsOption.getValue()];
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
for (int i = 0; i < attVals.length; i++) {
attVals[i] = i < this.numNominalsOption.getValue() ? this.instanceRandom.nextInt(this.numValsPerNominalOption.getValue())
: this.instanceRandom.nextDouble();
inst.setValue(i, attVals[i]);
}
inst.setDataset(header);
inst.setClassValue(classifyInstance(this.treeRoot, attVals));
return inst;
}
protected int classifyInstance(Node node, double[] attVals) {
if (node.children == null) {
return node.classLabel;
}
if (node.splitAttIndex < this.numNominalsOption.getValue()) {
return classifyInstance(
node.children[(int) attVals[node.splitAttIndex]], attVals);
}
return classifyInstance(
node.children[attVals[node.splitAttIndex] < node.splitAttValue ? 0
: 1], attVals);
}
protected void generateHeader() {
FastVector attributes = new FastVector();
FastVector nominalAttVals = new FastVector();
for (int i = 0; i < this.numValsPerNominalOption.getValue(); i++) {
nominalAttVals.addElement("value" + (i + 1));
}
for (int i = 0; i < this.numNominalsOption.getValue(); i++) {
attributes.addElement(new Attribute("nominal" + (i + 1),
nominalAttVals));
}
for (int i = 0; i < this.numNumericsOption.getValue(); i++) {
attributes.addElement(new Attribute("numeric" + (i + 1)));
}
FastVector classLabels = new FastVector();
for (int i = 0; i < this.numClassesOption.getValue(); i++) {
classLabels.addElement("class" + (i + 1));
}
attributes.addElement(new Attribute("class", classLabels));
this.streamHeader = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
}
protected void generateRandomTree() {
Random treeRand = new Random(this.treeRandomSeedOption.getValue());
ArrayList<Integer> nominalAttCandidates = new ArrayList<Integer>(
this.numNominalsOption.getValue());
for (int i = 0; i < this.numNominalsOption.getValue(); i++) {
nominalAttCandidates.add(i);
}
double[] minNumericVals = new double[this.numNumericsOption.getValue()];
double[] maxNumericVals = new double[this.numNumericsOption.getValue()];
for (int i = 0; i < this.numNumericsOption.getValue(); i++) {
minNumericVals[i] = 0.0;
maxNumericVals[i] = 1.0;
}
this.treeRoot = generateRandomTreeNode(0, nominalAttCandidates,
minNumericVals, maxNumericVals, treeRand);
}
protected Node generateRandomTreeNode(int currentDepth,
ArrayList<Integer> nominalAttCandidates, double[] minNumericVals,
double[] maxNumericVals, Random treeRand) {
if ((currentDepth >= this.maxTreeDepthOption.getValue())
|| ((currentDepth >= this.firstLeafLevelOption.getValue()) && (this.leafFractionOption.getValue() >= (1.0 - treeRand.nextDouble())))) {
Node leaf = new Node();
leaf.classLabel = treeRand.nextInt(this.numClassesOption.getValue());
return leaf;
}
Node node = new Node();
int chosenAtt = treeRand.nextInt(nominalAttCandidates.size()
+ this.numNumericsOption.getValue());
if (chosenAtt < nominalAttCandidates.size()) {
node.splitAttIndex = nominalAttCandidates.get(chosenAtt);
node.children = new Node[this.numValsPerNominalOption.getValue()];
ArrayList<Integer> newNominalCandidates = new ArrayList<Integer>(
nominalAttCandidates);
newNominalCandidates.remove(new Integer(node.splitAttIndex));
newNominalCandidates.trimToSize();
for (int i = 0; i < node.children.length; i++) {
node.children[i] = generateRandomTreeNode(currentDepth + 1,
newNominalCandidates, minNumericVals, maxNumericVals,
treeRand);
}
} else {
int numericIndex = chosenAtt - nominalAttCandidates.size();
node.splitAttIndex = this.numNominalsOption.getValue()
+ numericIndex;
double minVal = minNumericVals[numericIndex];
double maxVal = maxNumericVals[numericIndex];
node.splitAttValue = ((maxVal - minVal) * treeRand.nextDouble())
+ minVal;
node.children = new Node[2];
double[] newMaxVals = maxNumericVals.clone();
newMaxVals[numericIndex] = node.splitAttValue;
node.children[0] = generateRandomTreeNode(currentDepth + 1,
nominalAttCandidates, minNumericVals, newMaxVals, treeRand);
double[] newMinVals = minNumericVals.clone();
newMinVals[numericIndex] = node.splitAttValue;
node.children[1] = generateRandomTreeNode(currentDepth + 1,
nominalAttCandidates, newMinVals, maxNumericVals, treeRand);
}
return node;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* AgrawalGenerator.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import java.util.Random;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.FlagOption;
import moa.options.FloatOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.tasks.TaskMonitor;
/**
* Stream generator for Agrawal dataset.
* Generator described in paper:<br/>
* Rakesh Agrawal, Tomasz Imielinksi, and Arun Swami,
* "Database Mining: A Performance Perspective",
* IEEE Transactions on Knowledge and Data Engineering,
* 5(6), December 1993. <br/><br/>
*
* Public C source code available at:<br/>
* <a href="http://www.almaden.ibm.com/cs/projects/iis/hdb/Projects/data_mining/datasets/syndata.html">
* http://www.almaden.ibm.com/cs/projects/iis/hdb/Projects/data_mining/datasets/syndata.html</a><br/><br/>
*
* Notes:<br/>
* The built in functions are based on the paper (page 924),
* which turn out to be functions pred20 thru pred29 in the public C implementation.
* Perturbation function works like C implementation rather than description in paper.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class AgrawalGenerator extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "Generates one of ten different pre-defined loan functions.";
}
private static final long serialVersionUID = 1L;
public IntOption functionOption = new IntOption("function", 'f',
"Classification function used, as defined in the original paper.",
1, 1, 10);
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public FloatOption peturbFractionOption = new FloatOption("peturbFraction",
'p',
"The amount of peturbation (noise) introduced to numeric values.",
0.05, 0.0, 1.0);
public FlagOption balanceClassesOption = new FlagOption("balanceClasses",
'b', "Balance the number of instances of each class.");
protected interface ClassFunction {
public int determineClass(double salary, double commission, int age,
int elevel, int car, int zipcode, double hvalue, int hyears,
double loan);
}
protected static ClassFunction[] classificationFunctions = {
// function 1
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
return ((age < 40) || (60 <= age)) ? 0 : 1;
}
},
// function 2
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
if (age < 40) {
return ((50000 <= salary) && (salary <= 100000)) ? 0
: 1;
} else if (age < 60) {// && age >= 40
return ((75000 <= salary) && (salary <= 125000)) ? 0
: 1;
} else {// age >= 60
return ((25000 <= salary) && (salary <= 75000)) ? 0 : 1;
}
}
},
// function 3
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
if (age < 40) {
return ((elevel == 0) || (elevel == 1)) ? 0 : 1;
} else if (age < 60) { // && age >= 40
return ((elevel == 1) || (elevel == 2) || (elevel == 3)) ? 0
: 1;
} else { // age >= 60
return ((elevel == 2) || (elevel == 3) || (elevel == 4)) ? 0
: 1;
}
}
},
// function 4
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
if (age < 40) {
if ((elevel == 0) || (elevel == 1)) {
return ((25000 <= salary) && (salary <= 75000)) ? 0
: 1;
}
return ((50000 <= salary) && (salary <= 100000)) ? 0
: 1;
} else if (age < 60) {// && age >= 40
if ((elevel == 1) || (elevel == 2) || (elevel == 3)) {
return ((50000 <= salary) && (salary <= 100000)) ? 0
: 1;
}
return ((75000 <= salary) && (salary <= 125000)) ? 0
: 1;
} else {// age >= 60
if ((elevel == 2) || (elevel == 3) || (elevel == 4)) {
return ((50000 <= salary) && (salary <= 100000)) ? 0
: 1;
}
return ((25000 <= salary) && (salary <= 75000)) ? 0 : 1;
}
}
},
// function 5
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
if (age < 40) {
if ((50000 <= salary) && (salary <= 100000)) {
return ((100000 <= loan) && (loan <= 300000)) ? 0
: 1;
}
return ((200000 <= loan) && (loan <= 400000)) ? 0 : 1;
} else if (age < 60) {// && age >= 40
if ((75000 <= salary) && (salary <= 125000)) {
return ((200000 <= loan) && (loan <= 400000)) ? 0
: 1;
}
return ((300000 <= loan) && (loan <= 500000)) ? 0 : 1;
} else {// age >= 60
if ((25000 <= salary) && (salary <= 75000)) {
return ((300000 <= loan) && (loan <= 500000)) ? 0
: 1;
}
return ((100000 <= loan) && (loan <= 300000)) ? 0 : 1;
}
}
},
// function 6
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
double totalSalary = salary + commission;
if (age < 40) {
return ((50000 <= totalSalary) && (totalSalary <= 100000)) ? 0
: 1;
} else if (age < 60) {// && age >= 40
return ((75000 <= totalSalary) && (totalSalary <= 125000)) ? 0
: 1;
} else {// age >= 60
return ((25000 <= totalSalary) && (totalSalary <= 75000)) ? 0
: 1;
}
}
},
// function 7
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
double disposable = (2.0 * (salary + commission) / 3.0
- loan / 5.0 - 20000.0);
return disposable > 0 ? 0 : 1;
}
},
// function 8
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
double disposable = (2.0 * (salary + commission) / 3.0
- 5000.0 * elevel - 20000.0);
return disposable > 0 ? 0 : 1;
}
},
// function 9
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
double disposable = (2.0 * (salary + commission) / 3.0
- 5000.0 * elevel - loan / 5.0 - 10000.0);
return disposable > 0 ? 0 : 1;
}
},
// function 10
new ClassFunction() {
@Override
public int determineClass(double salary, double commission,
int age, int elevel, int car, int zipcode,
double hvalue, int hyears, double loan) {
double equity = 0.0;
if (hyears >= 20) {
equity = hvalue * (hyears - 20.0) / 10.0;
}
double disposable = (2.0 * (salary + commission) / 3.0
- 5000.0 * elevel + equity / 5.0 - 10000.0);
return disposable > 0 ? 0 : 1;
}
}};
protected InstancesHeader streamHeader;
protected Random instanceRandom;
protected boolean nextClassShouldBeZero;
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// generate header
FastVector attributes = new FastVector();
attributes.addElement(new Attribute("salary"));
attributes.addElement(new Attribute("commission"));
attributes.addElement(new Attribute("age"));
FastVector elevelLabels = new FastVector();
for (int i = 0; i < 5; i++) {
elevelLabels.addElement("level" + i);
}
attributes.addElement(new Attribute("elevel", elevelLabels));
FastVector carLabels = new FastVector();
for (int i = 0; i < 20; i++) {
carLabels.addElement("car" + (i + 1));
}
attributes.addElement(new Attribute("car", carLabels));
FastVector zipCodeLabels = new FastVector();
for (int i = 0; i < 9; i++) {
zipCodeLabels.addElement("zipcode" + (i + 1));
}
attributes.addElement(new Attribute("zipcode", zipCodeLabels));
attributes.addElement(new Attribute("hvalue"));
attributes.addElement(new Attribute("hyears"));
attributes.addElement(new Attribute("loan"));
FastVector classLabels = new FastVector();
classLabels.addElement("groupA");
classLabels.addElement("groupB");
attributes.addElement(new Attribute("class", classLabels));
this.streamHeader = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
restart();
}
@Override
public long estimatedRemainingInstances() {
return -1;
}
@Override
public InstancesHeader getHeader() {
return this.streamHeader;
}
@Override
public boolean hasMoreInstances() {
return true;
}
@Override
public boolean isRestartable() {
return true;
}
@Override
public Instance nextInstance() {
double salary = 0, commission = 0, hvalue = 0, loan = 0;
int age = 0, elevel = 0, car = 0, zipcode = 0, hyears = 0, group = 0;
boolean desiredClassFound = false;
while (!desiredClassFound) {
// generate attributes
salary = 20000.0 + 130000.0 * this.instanceRandom.nextDouble();
commission = (salary >= 75000.0) ? 0
: (10000.0 + 65000.0 * this.instanceRandom.nextDouble());
// true to c implementation:
// if (instanceRandom.nextDouble() < 0.5 && salary < 75000.0)
// commission = 10000.0 + 65000.0 * instanceRandom.nextDouble();
age = 20 + this.instanceRandom.nextInt(61);
elevel = this.instanceRandom.nextInt(5);
car = this.instanceRandom.nextInt(20);
zipcode = this.instanceRandom.nextInt(9);
hvalue = (9.0 - zipcode) * 100000.0
* (0.5 + this.instanceRandom.nextDouble());
hyears = 1 + this.instanceRandom.nextInt(30);
loan = this.instanceRandom.nextDouble() * 500000.0;
// determine class
group = classificationFunctions[this.functionOption.getValue() - 1].determineClass(salary, commission, age, elevel, car,
zipcode, hvalue, hyears, loan);
if (!this.balanceClassesOption.isSet()) {
desiredClassFound = true;
} else {
// balance the classes
if ((this.nextClassShouldBeZero && (group == 0))
|| (!this.nextClassShouldBeZero && (group == 1))) {
desiredClassFound = true;
this.nextClassShouldBeZero = !this.nextClassShouldBeZero;
} // else keep searching
}
}
// perturb values
if (this.peturbFractionOption.getValue() > 0.0) {
salary = perturbValue(salary, 20000, 150000);
if (commission > 0) {
commission = perturbValue(commission, 10000, 75000);
}
age = (int) Math.round(perturbValue(age, 20, 80));
hvalue = perturbValue(hvalue, (9.0 - zipcode) * 100000.0, 0, 135000);
hyears = (int) Math.round(perturbValue(hyears, 1, 30));
loan = perturbValue(loan, 0, 500000);
}
// construct instance
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
inst.setValue(0, salary);
inst.setValue(1, commission);
inst.setValue(2, age);
inst.setValue(3, elevel);
inst.setValue(4, car);
inst.setValue(5, zipcode);
inst.setValue(6, hvalue);
inst.setValue(7, hyears);
inst.setValue(8, loan);
inst.setDataset(header);
inst.setClassValue(group);
return inst;
}
protected double perturbValue(double val, double min, double max) {
return perturbValue(val, max - min, min, max);
}
protected double perturbValue(double val, double range, double min,
double max) {
val += range * (2.0 * (this.instanceRandom.nextDouble() - 0.5))
* this.peturbFractionOption.getValue();
if (val < min) {
val = min;
} else if (val > max) {
val = max;
}
return val;
}
@Override
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
this.nextClassShouldBeZero = false;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* STAGGERGenerator.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import java.util.Random;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.FlagOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.tasks.TaskMonitor;
/**
* Stream generator for STAGGER Concept functions.
*
* Generator described in the paper:<br/>
* Jeffrey C. Schlimmer and Richard H. Granger Jr.
* "Incremental Learning from Noisy Data",
* Machine Learning 1: 317-354 1986.<br/><br/>
*
* Notes:<br/>
* The built in functions are based on the paper (page 341).
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class STAGGERGenerator extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "Generates STAGGER Concept functions.";
}
private static final long serialVersionUID = 1L;
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public IntOption functionOption = new IntOption("function", 'f',
"Classification function used, as defined in the original paper.",
1, 1, 3);
public FlagOption balanceClassesOption = new FlagOption("balanceClasses",
'b', "Balance the number of instances of each class.");
protected interface ClassFunction {
public int determineClass(int size, int color, int shape);
}
protected static ClassFunction[] classificationFunctions = {
// function 1
new ClassFunction() {
@Override
public int determineClass(int size, int color, int shape) {
return (size == 0 && color == 0) ? 1 : 0; //size==small && color==red
}
},
// function 2
new ClassFunction() {
@Override
public int determineClass(int size, int color, int shape) {
return (color == 2 || shape == 0) ? 1 : 0; //color==green || shape==circle
}
},
// function 3
new ClassFunction() {
@Override
public int determineClass(int size, int color, int shape) {
return (size == 1 || size == 2) ? 1 : 0; // size==medium || size==large
}
}
};
protected InstancesHeader streamHeader;
protected Random instanceRandom;
protected boolean nextClassShouldBeZero;
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// generate header
FastVector attributes = new FastVector();
FastVector sizeLabels = new FastVector();
sizeLabels.addElement("small");
sizeLabels.addElement("medium");
sizeLabels.addElement("large");
attributes.addElement(new Attribute("size", sizeLabels));
FastVector colorLabels = new FastVector();
colorLabels.addElement("red");
colorLabels.addElement("blue");
colorLabels.addElement("green");
attributes.addElement(new Attribute("color", colorLabels));
FastVector shapeLabels = new FastVector();
shapeLabels.addElement("circle");
shapeLabels.addElement("square");
shapeLabels.addElement("triangle");
attributes.addElement(new Attribute("shape", shapeLabels));
FastVector classLabels = new FastVector();
classLabels.addElement("false");
classLabels.addElement("true");
attributes.addElement(new Attribute("class", classLabels));
this.streamHeader = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
restart();
}
@Override
public long estimatedRemainingInstances() {
return -1;
}
@Override
public InstancesHeader getHeader() {
return this.streamHeader;
}
@Override
public boolean hasMoreInstances() {
return true;
}
@Override
public boolean isRestartable() {
return true;
}
@Override
public Instance nextInstance() {
int size = 0, color = 0, shape = 0, group = 0;
boolean desiredClassFound = false;
while (!desiredClassFound) {
// generate attributes
size = this.instanceRandom.nextInt(3);
color = this.instanceRandom.nextInt(3);
shape = this.instanceRandom.nextInt(3);
// determine class
group = classificationFunctions[this.functionOption.getValue() - 1].determineClass(size, color, shape);
if (!this.balanceClassesOption.isSet()) {
desiredClassFound = true;
} else {
// balance the classes
if ((this.nextClassShouldBeZero && (group == 0))
|| (!this.nextClassShouldBeZero && (group == 1))) {
desiredClassFound = true;
this.nextClassShouldBeZero = !this.nextClassShouldBeZero;
} // else keep searching
}
}
// construct instance
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
inst.setValue(0, size);
inst.setValue(1, color);
inst.setValue(2, shape);
inst.setDataset(header);
inst.setClassValue(group);
return inst;
}
@Override
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
this.nextClassShouldBeZero = false;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* WaveformGenerator.java
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import java.util.Random;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.AbstractOptionHandler;
import moa.options.FlagOption;
import moa.options.IntOption;
import moa.streams.InstanceStream;
import moa.tasks.TaskMonitor;
/**
* Stream generator for the problem of predicting one of three waveform types.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class WaveformGenerator extends AbstractOptionHandler implements
InstanceStream {
@Override
public String getPurposeString() {
return "Generates a problem of predicting one of three waveform types.";
}
private static final long serialVersionUID = 1L;
public static final int NUM_CLASSES = 3;
public static final int NUM_BASE_ATTRIBUTES = 21;
public static final int TOTAL_ATTRIBUTES_INCLUDING_NOISE = 40;
protected static final int hFunctions[][] = {
{0, 1, 2, 3, 4, 5, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 5, 4, 3, 2, 1, 0},
{0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0, 0}};
public IntOption instanceRandomSeedOption = new IntOption(
"instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public FlagOption addNoiseOption = new FlagOption("addNoise", 'n',
"Adds noise, for a total of 40 attributes.");
protected InstancesHeader streamHeader;
protected Random instanceRandom;
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
// generate header
FastVector attributes = new FastVector();
int numAtts = this.addNoiseOption.isSet() ? TOTAL_ATTRIBUTES_INCLUDING_NOISE
: NUM_BASE_ATTRIBUTES;
for (int i = 0; i < numAtts; i++) {
attributes.addElement(new Attribute("att" + (i + 1)));
}
FastVector classLabels = new FastVector();
for (int i = 0; i < NUM_CLASSES; i++) {
classLabels.addElement("class" + (i + 1));
}
attributes.addElement(new Attribute("class", classLabels));
this.streamHeader = new InstancesHeader(new Instances(
getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
restart();
}
@Override
public long estimatedRemainingInstances() {
return -1;
}
@Override
public InstancesHeader getHeader() {
return this.streamHeader;
}
@Override
public boolean hasMoreInstances() {
return true;
}
@Override
public boolean isRestartable() {
return true;
}
@Override
public Instance nextInstance() {
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
inst.setDataset(header);
int waveform = this.instanceRandom.nextInt(NUM_CLASSES);
int choiceA = 0, choiceB = 0;
switch (waveform) {
case 0:
choiceA = 0;
choiceB = 1;
break;
case 1:
choiceA = 0;
choiceB = 2;
break;
case 2:
choiceA = 1;
choiceB = 2;
break;
}
double multiplierA = this.instanceRandom.nextDouble();
double multiplierB = 1.0 - multiplierA;
for (int i = 0; i < NUM_BASE_ATTRIBUTES; i++) {
inst.setValue(i, (multiplierA * hFunctions[choiceA][i])
+ (multiplierB * hFunctions[choiceB][i])
+ this.instanceRandom.nextGaussian());
}
if (this.addNoiseOption.isSet()) {
for (int i = NUM_BASE_ATTRIBUTES; i < TOTAL_ATTRIBUTES_INCLUDING_NOISE; i++) {
inst.setValue(i, this.instanceRandom.nextGaussian());
}
}
inst.setClassValue(waveform);
return inst;
}
@Override
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
/*
* LEDGeneratorDrift.java
* Copyright (C) 2008 University of Waikato, Hamilton, New Zealand
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package moa.streams.generators;
import weka.core.DenseInstance;
import weka.core.Instance;
import moa.core.InstancesHeader;
import moa.core.ObjectRepository;
import moa.options.IntOption;
import moa.tasks.TaskMonitor;
/**
* Stream generator for the problem of predicting the digit displayed on a 7-segment LED display with drift.
*
* @author Albert Bifet (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class LEDGeneratorDrift extends LEDGenerator {
@Override
public String getPurposeString() {
return "Generates a problem of predicting the digit displayed on a 7-segment LED display with drift.";
}
private static final long serialVersionUID = 1L;
public IntOption numberAttributesDriftOption = new IntOption("numberAttributesDrift",
'd', "Number of attributes with drift.", 1, 0, 7);
protected int[] numberAttribute;
@Override
protected void prepareForUseImpl(TaskMonitor monitor,
ObjectRepository repository) {
super.prepareForUseImpl(monitor, repository);
this.numberAttribute = new int[7 + NUM_IRRELEVANT_ATTRIBUTES];
for (int i = 0; i < 7 + NUM_IRRELEVANT_ATTRIBUTES; i++) {
this.numberAttribute[i] = i;
}
//Change atributes
if (!this.suppressIrrelevantAttributesOption.isSet() && this.numberAttributesDriftOption.getValue() > 0) {
int randomInt = 0;//this.instanceRandom.nextInt(7);
int offset = 0;//this.instanceRandom.nextInt(NUM_IRRELEVANT_ATTRIBUTES);
for (int i = 0; i < this.numberAttributesDriftOption.getValue(); i++) {
int value1 = (i + randomInt) % 7;
int value2 = 7 + ((i + offset) % (NUM_IRRELEVANT_ATTRIBUTES));
this.numberAttribute[value1] = value2;
this.numberAttribute[value2] = value1;
}
}
}
@Override
public Instance nextInstance() {
InstancesHeader header = getHeader();
Instance inst = new DenseInstance(header.numAttributes());
inst.setDataset(header);
int selected = this.instanceRandom.nextInt(10);
for (int i = 0; i < 7; i++) {
if ((1 + (this.instanceRandom.nextInt(100))) <= this.noisePercentageOption.getValue()) {
inst.setValue(this.numberAttribute[i], originalInstances[selected][i] == 0 ? 1 : 0);
} else {
inst.setValue(this.numberAttribute[i], originalInstances[selected][i]);
}
}
if (!this.suppressIrrelevantAttributesOption.isSet()) {
for (int i = 0; i < NUM_IRRELEVANT_ATTRIBUTES; i++) {
inst.setValue(this.numberAttribute[i + 7], this.instanceRandom.nextInt(2));
}
}
inst.setClassValue(selected);
return inst;
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.