repo stringlengths 1 191 ⌀ | file stringlengths 23 351 | code stringlengths 0 5.32M | file_length int64 0 5.32M | avg_line_length float64 0 2.9k | max_line_length int64 0 288k | extension_type stringclasses 1 value |
|---|---|---|---|---|---|---|
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/BestIndividual.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import weka.core.Instance;
/**
*
* @author James Large james.large@uea.ac.uk
*/
public abstract class BestIndividual extends ModuleVotingScheme {
//list of all best modules this instance of 'BestIndividual' has 'been involved with'
//purely for experimental/analysis purposes and lazyness
//in a single hesca run, only one will be chosen (stored in the bestModule int),
//but these will store the best potentially over mutliple folds on multiple datasets
//just stored as simple list here, it's up to the experimental code to divide them up
//by fold/dataset
protected ArrayList<Integer> bestModulesInds = new ArrayList<>();
protected ArrayList<String> bestModulesNames = new ArrayList<>();
public int bestModule; //bestModule on this particular run of the ensemble
public BestIndividual() {
}
public BestIndividual(int numClasses) {
this.numClasses = numClasses;
}
public ArrayList<Integer> getBestModulesInds() {
return bestModulesInds;
}
public ArrayList<String> getBestModulesNames() {
return bestModulesNames;
}
/**
* @return map<dset, bestclassifierperfold>
* @throws Exception if (numBestIndsStored != dsets.length * folds)
*/
public Map<String, ArrayList<String>> splitBestIndividualString(String[] dsets, int numFolds) throws Exception {
if (bestModulesNames.size() != dsets.length * numFolds)
throw new Exception("not all folds present");
Map<String, ArrayList<String>> res = new HashMap<>(dsets.length);
int globalIndex = 0;
for (int d = 0; d < dsets.length; d++) {
ArrayList<String> fs = new ArrayList<>(numFolds);
for (int f = 0; f < numFolds; f++)
fs.add(bestModulesNames.get(globalIndex++));
res.put(dsets[d], fs);
}
return res;
}
public Map<String, ArrayList<Integer>> splitBestIndividualIndex(String[] dsets, int numFolds) throws Exception {
if (bestModulesNames.size() != dsets.length * numFolds)
throw new Exception("not all folds present");
Map<String, ArrayList<Integer>> res = new HashMap<>(dsets.length);
int globalIndex = 0;
for (int d = 0; d < dsets.length; d++) {
ArrayList<Integer> fs = new ArrayList<>(numFolds);
for (int f = 0; f < numFolds; f++)
fs.add(bestModulesInds.get(globalIndex++));
res.put(dsets[d], fs);
}
return res;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
return modules[bestModule].trainResults.getProbabilityDistribution(trainInstanceIndex);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
return modules[bestModule].testResults.getProbabilityDistribution(testInstanceIndex);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
return modules[bestModule].getClassifier().distributionForInstance(testInstance);
}
}
| 4,260 | 36.377193 | 116 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/BestIndividualOracle.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* TODO what if there's tie for best? UNTESTED
*
* The ensemble's distribution for an instance is equal to the single 'best' individual,
* as defined by THEIR TEST ACCURACY. Results must have been read from file (i.e test preds
* already exist at train time) Weighting scheme is irrelevant, only considers accuracy.
*
* Mostly just written so that I can do the best individual within the existing framework for
* later testing
*
* @author James Large james.large@uea.ac.uk
*/
public class BestIndividualOracle extends BestIndividual {
public BestIndividualOracle() {
super();
}
public BestIndividualOracle(int numClasses) {
super(numClasses);
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) throws Exception {
super.trainVotingScheme(modules, numClasses);
double bestAcc = -1;
for (int m = 0; m < modules.length; ++m) {
if (modules[m].testResults.getAcc() > bestAcc) {
bestAcc = modules[m].testResults.getAcc();
bestModule = m;
}
}
bestModulesInds.add(bestModule);
bestModulesNames.add(modules[bestModule].getModuleName());
printlnDebug(modules[bestModule].getModuleName());
}
}
| 2,239 | 34.555556 | 94 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/BestIndividualTrain.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
*
* TODO what if there's tie for best?
*
* The ensemble's distribution for an instance is equal to the single 'best' individual,
* as defined by whatever (uniform) weighting scheme is being used.
*
* Mostly just written so that I can do the best individual within the existing framework for
* later testing
*
* @author James Large james.large@uea.ac.uk
*/
public class BestIndividualTrain extends BestIndividual {
public BestIndividualTrain() {
super();
}
public BestIndividualTrain(int numClasses) {
super(numClasses);
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) throws Exception {
super.trainVotingScheme(modules, numClasses);
double bestWeight = -1;
for (int m = 0; m < modules.length; ++m) {
//checking that the weights are uniform
double prevWeight = modules[m].posteriorWeights[0];
for (int c = 1; c < numClasses; ++c) {
if (prevWeight == modules[m].posteriorWeights[c])
prevWeight = modules[m].posteriorWeights[c];
else
throw new Exception("BestIndividualTrain cannot be used with non-uniform weighting schemes");
}
if (modules[m].posteriorWeights[0] > bestWeight) {
bestWeight = modules[m].posteriorWeights[0];
bestModule = m;
}
}
bestModulesInds.add(bestModule);
bestModulesNames.add(modules[bestModule].getModuleName());
printlnDebug(modules[bestModule].getModuleName());
}
}
| 2,597 | 34.589041 | 113 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/MajorityConfidence.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import weka.core.Instance;
/**
* Uses the weighted confidences of each module that the instance is in EACH class (not just the most likely)
*
* i.e in a 2-class problem, a module's distforinst maybe be .6 / .4,
* .6 * weight_c1 will be added to class 1
* .4 * weight_c2 will be added to class 2 as well
*
* @author James Large
*/
public class MajorityConfidence extends ModuleVotingScheme {
public MajorityConfidence() {
}
public MajorityConfidence(int numClasses) {
this.numClasses = numClasses;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] preds = new double[numClasses];
for(int m = 0; m < modules.length; m++){
double[] p=modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex);
for (int c = 0; c < numClasses; c++) {
preds[c] += modules[m].priorWeight *
modules[m].posteriorWeights[c] * p[c];
}
}
return normalise(preds);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] preds = new double[numClasses];
for(int m = 0; m < modules.length; m++){
double[] p=modules[m].testResults.getProbabilityDistribution(testInstanceIndex);
for (int c = 0; c < numClasses; c++) {
preds[c] += modules[m].priorWeight *
modules[m].posteriorWeights[c] * p[c];
}
}
return normalise(preds);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] preds = new double[numClasses];
double[] dist;
for(int m = 0; m < modules.length; m++){
dist = distributionForNewInstance(modules[m], testInstance);
for (int c = 0; c < numClasses; c++) {
preds[c] += modules[m].priorWeight *
modules[m].posteriorWeights[c] *
dist[c];
}
}
return normalise(preds);
}
}
| 3,335 | 34.115789 | 111 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/MajorityVote.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
* Simple majority vote, gets the prediction of each module and adds it's weight
* to that class' overall weight
*
* @author James Large
*/
public class MajorityVote extends ModuleVotingScheme {
public MajorityVote() {
}
public MajorityVote(int numClasses) {
this.numClasses = numClasses;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int c = 0; c < modules.length; c++){
pred = (int) modules[c].trainResults.getPredClassValue(trainInstanceIndex);
preds[pred] += modules[c].priorWeight *
modules[c].posteriorWeights[pred];
}
return normalise(preds);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int c = 0; c < modules.length; c++){
pred = (int) modules[c].testResults.getPredClassValue(testInstanceIndex);
preds[pred] += modules[c].priorWeight *
modules[c].posteriorWeights[pred];
}
return normalise(preds);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] preds = new double[numClasses];
int pred;
double[] dist;
for(int m = 0; m < modules.length; m++){
dist = distributionForNewInstance(modules[m], testInstance);
pred = (int)indexOfMax(dist);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred];
}
return normalise(preds);
}
}
| 3,042 | 32.811111 | 111 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/MajorityVoteByConfidence.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import java.util.Arrays;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
* Majority vote, however classifiers' vote is weighted by the confidence in their prediction,
* i.e distForInst[pred]
*
* @author James Large
*/
public class MajorityVoteByConfidence extends ModuleVotingScheme {
public MajorityVoteByConfidence() {
}
public MajorityVoteByConfidence(int numClasses) {
this.numClasses = numClasses;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)[pred];
}
//debug start
double[] unweightedPreds = new double[numClasses];
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
unweightedPreds[pred]++;
}
for(int m = 0; m < modules.length; m++) {
printlnDebug(modules[m].getModuleName() + " distForInst: " + Arrays.toString(modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)));
printlnDebug(modules[m].getModuleName() + " priorweights: " + modules[m].priorWeight);
printlnDebug(modules[m].getModuleName() + " postweights: " + Arrays.toString(modules[m].posteriorWeights));
printlnDebug(modules[m].getModuleName() + " voteweight: " + (modules[m].priorWeight *
modules[m].posteriorWeights[(int) modules[m].trainResults.getPredClassValue(trainInstanceIndex)] *
modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)[(int) modules[m].trainResults.getPredClassValue(trainInstanceIndex)]));
}
printlnDebug("Ensemble Votes: " + Arrays.toString(unweightedPreds));
printlnDebug("Ensemble Dist: " + Arrays.toString(preds));
printlnDebug("Normed: " + Arrays.toString(normalise(preds)));
printlnDebug("");
//debug end
return normalise(preds);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].testResults.getPredClassValue(testInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
modules[m].testResults.getProbabilityDistribution(testInstanceIndex)[pred];
}
return normalise(preds);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] preds = new double[numClasses];
int pred;
double[] dist;
for(int m = 0; m < modules.length; m++){
dist = distributionForNewInstance(modules[m], testInstance);
pred = (int)indexOfMax(dist);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
dist[pred];
}
return normalise(preds);
}
}
| 4,805 | 39.05 | 170 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/MajorityVoteByCorrectedConfidence.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
* Individuals vote based on their weight * (confidence in prediction - 1/C), where
* C is the number of classes. Subtracting 1/C effectively removes the aspect of random
* guessing. In a 2 class case, if a classifier's distforinst is .9,.1, it is very confident
* that the class is 0. However if its dist is .55,.45, it may as well just be guessing, however
* the value .55 by itself does not reflect that, because the range of values is 0-1
*
* The 'corrected' confidences would instead be in the range 0-0.5, and in the two cases above
* would then be .4 and .05. Thus this voting system disfavours more heavily those classifiers
* that are unsure of their decision
*
*
* @author James Large james.large@uea.ac.uk
*/
public class MajorityVoteByCorrectedConfidence extends ModuleVotingScheme {
public MajorityVoteByCorrectedConfidence() {
}
public MajorityVoteByCorrectedConfidence(int numClasses) {
this.numClasses = numClasses;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] preds = new double[numClasses];
double normValue = 1.0/numClasses;
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
(modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)[pred] - normValue);
}
//debug start
// double[] unweightedPreds = new double[numClasses];
//
// for(int m = 0; m < modules.length; m++){
// pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
// unweightedPreds[pred]++;
// }
//
// for(int m = 0; m < modules.length; m++) {
// printlnDebug(modules[m].getModuleName() + " distForInst: " + Arrays.toString(modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)));
// printlnDebug(modules[m].getModuleName() + " priorweights: " + modules[m].priorWeight);
// printlnDebug(modules[m].getModuleName() + " postweights: " + Arrays.toString(modules[m].posteriorWeights));
// printlnDebug(modules[m].getModuleName() + " voteweight: " + (modules[m].priorWeight *
// modules[m].posteriorWeights[(int) modules[m].trainResults.getPredClassValue(trainInstanceIndex)] *
// (modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)[(int) modules[m].trainResults.getPredClassValue(trainInstanceIndex)] - normValue)));
// }
//
// printlnDebug("Ensemble Votes: " + Arrays.toString(unweightedPreds));
// printlnDebug("Ensemble Dist: " + Arrays.toString(preds));
// printlnDebug("Normed: " + Arrays.toString(normalise(preds)));
// printlnDebug("");
//debug end
return normalise(preds);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] preds = new double[numClasses];
double normValue = 1.0/numClasses;
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].testResults.getPredClassValue(testInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
(modules[m].testResults.getProbabilityDistribution(testInstanceIndex)[pred] - normValue);
}
return normalise(preds);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] preds = new double[numClasses];
double normValue = 1.0/numClasses;
int pred;
double[] dist;
for(int m = 0; m < modules.length; m++){
dist = distributionForNewInstance(modules[m], testInstance);
pred = (int)indexOfMax(dist);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
(dist[pred] - normValue);
}
return normalise(preds);
}
}
| 5,656 | 41.216418 | 186 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/MajorityVoteByPoweredConfidence.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
* Individuals vote based on their weight * (confidence^power). The power scales the
* relative differences between different confidences, effectively up-weighting those
* individuals that are more confident in their vote
*
*
* @author James Large james.large@uea.ac.uk
*/
public class MajorityVoteByPoweredConfidence extends ModuleVotingScheme {
private double power = 2.0;
public MajorityVoteByPoweredConfidence() {
}
public MajorityVoteByPoweredConfidence(double power) {
this.power = power;
}
public MajorityVoteByPoweredConfidence(int numClasses) {
this.numClasses = numClasses;
}
public MajorityVoteByPoweredConfidence(int numClasses, double power) {
this.power = power;
this.numClasses = numClasses;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
Math.pow((modules[m].trainResults.getProbabilityDistribution(trainInstanceIndex)[pred]), power);
}
return normalise(preds);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] preds = new double[numClasses];
int pred;
for(int m = 0; m < modules.length; m++){
pred = (int) modules[m].testResults.getPredClassValue(testInstanceIndex);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
Math.pow((modules[m].testResults.getProbabilityDistribution(testInstanceIndex)[pred]), power);
}
return normalise(preds);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] preds = new double[numClasses];
int pred;
double[] dist;
for(int m = 0; m < modules.length; m++){
dist = distributionForNewInstance(modules[m], testInstance);
pred = (int)indexOfMax(dist);
preds[pred] += modules[m].priorWeight *
modules[m].posteriorWeights[pred] *
Math.pow(dist[pred], power);
}
return normalise(preds);
}
}
| 4,013 | 33.307692 | 124 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/ModuleVotingScheme.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import utilities.DebugPrinting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import evaluation.storage.ClassifierResults;
import java.io.Serializable;
import java.util.concurrent.TimeUnit;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
* Base class for methods on combining ensemble members' ouputs into a single classification/distribution
*
* @author James Large
*/
public abstract class ModuleVotingScheme implements DebugPrinting, Serializable {
protected int numClasses;
public boolean needTrainPreds = false;
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) throws Exception {
this.numClasses = numClasses;
}
public abstract double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) throws Exception;
public double classifyTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) throws Exception {
double[] dist = distributionForTrainInstance(modules, trainInstanceIndex);
return indexOfMax(dist);
}
public abstract double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) throws Exception;
public double classifyTestInstance(EnsembleModule[] modules, int testInstanceIndex) throws Exception {
double[] dist = distributionForTestInstance(modules, testInstanceIndex);
return indexOfMax(dist);
}
public abstract double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception;
public double classifyInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] dist = distributionForInstance(modules, testInstance);
return indexOfMax(dist);
}
// protected static double indexOfMax(double[] dist) throws Exception {
// double bsfWeight = -(Double.MAX_VALUE);
// ArrayList<Integer> bsfClassVals = null;
//
// for (int c = 0; c < dist.length; c++) {
// if(dist[c] > bsfWeight){
// bsfWeight = dist[c];
// bsfClassVals = new ArrayList<>();
// bsfClassVals.add(c);
// }else if(dist[c] == bsfWeight){
// bsfClassVals.add(c);
// }
// }
//
// if(bsfClassVals == null)
// throw new Exception("bsfClassVals == null, NaN problem");
//
// double pred;
// //if there's a tie for highest voted class after all module have voted, settle randomly
// if(bsfClassVals.size()>1)
// pred = bsfClassVals.get(new Random(0).nextInt(bsfClassVals.size()));
// else
// pred = bsfClassVals.get(0);
//
// return pred;
// }
/**
* makes array sum to 1
*/
public double[] normalise(double[] dist) {
//normalise so all sum to one
double sum=dist[0];
for(int i = 1; i < dist.length; i++)
sum += dist[i];
if (sum == 0.0)
for(int i = 0; i < dist.length; i++)
dist[i] = 1.0/dist.length;
else
for(int i = 0; i < dist.length; i++)
dist[i] /= sum;
return dist;
}
protected double[] distributionForNewInstance(EnsembleModule module, Instance inst) throws Exception {
long startTime = System.nanoTime();
double[] dist = module.getClassifier().distributionForInstance(inst);
long predTime = System.nanoTime() - startTime;
storeModuleTestResult(module, dist, predTime);
return dist;
}
public void storeModuleTestResult(EnsembleModule module, double[] dist, long predTime) throws Exception {
if (module.testResults == null) {
module.testResults = new ClassifierResults();
module.testResults.setTimeUnit(TimeUnit.NANOSECONDS);
module.testResults.setBuildTime(module.trainResults.getBuildTime());
}
module.testResults.addPrediction(dist, indexOfMax(dist), predTime, "");
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
| 5,073 | 36.308824 | 126 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/NP_MAX.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
*
* krawczyk16combination
* http://www.sciencedirect.com/science/article/pii/S0925231216002757
*
* the optional pruning parameter/capability is not implemented
*
* @author James Large
*/
public class NP_MAX extends MajorityVote {
protected double sigma = 1.0; //no idea if this makes a difference. paper says sigma is a parameter,
//then neglects to mention it ever again, though shouldnt be
//in a quick test between sigma = 0.5,1,2, seemingly only 1 or 2 differences in predictions
//out of millions, liekly down to double precision errors. jsut ignore it
public NP_MAX() {
super();
}
public NP_MAX(int numClasses) {
super(numClasses);
}
public NP_MAX(double sigma) {
super();
this.sigma = sigma;
}
public NP_MAX(double sigma,int numClasses) {
super(numClasses);
this.sigma = sigma;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) {
this.numClasses = numClasses;
for (int c = 0; c < numClasses; c++) {
double norm = .0;
double np_max = modules[0].posteriorWeights[c];
for (int m = 1; m < modules.length; m++) //find max of the support functions for this class
if (modules[m].posteriorWeights[c] > np_max)
np_max = modules[m].posteriorWeights[c];
double[] newWeights = new double[modules.length];
for (int m = 0; m < modules.length; m++) {
newWeights[m] = Math.pow(1. / sigma * Math.sqrt(2*Math.PI), (-modules[m].posteriorWeights[c] - np_max) / 2*sigma*sigma);
//todo find and replace with proper gaussian function
norm += newWeights[m];
}
//pruning skipped
for (int m = 0; m < modules.length; m++)
modules[m].posteriorWeights[c] = newWeights[m]/norm;
}
}
}
| 2,951 | 34.142857 | 136 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/NaiveBayesCombiner.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.Instance;
/**
*
*
*
* @author James Large james.large@uea.ac.uk
*/
public class NaiveBayesCombiner extends ModuleVotingScheme {
//i.e the probability that this class, [*][][], is the real class given that
//this module, [][*][], predicted this class [][][*], based off the train confusion matrix
//will do a form of laplace correction to form the probabilities, i.e all cells in the confusion matrix
//will have 1 added to them. theoretically this could majorly mess up results on train sets with small datasets
//or heavily unbalanced class distributions
protected double[/*actual class*/][/*module*/][/*predictedclass*/] postProbs;
protected double[] priorClassProbs;
protected boolean laplaceCorrection;
public NaiveBayesCombiner() {
this.needTrainPreds = true;
this.laplaceCorrection = true;
}
public NaiveBayesCombiner(int numClasses) {
this.numClasses = numClasses;
this.needTrainPreds = true;
this.laplaceCorrection = true;
}
public NaiveBayesCombiner(boolean laplaceCorrection) {
this.needTrainPreds = true;
this.laplaceCorrection = laplaceCorrection;
}
public NaiveBayesCombiner(boolean laplaceCorrection, int numClasses) {
this.numClasses = numClasses;
this.needTrainPreds = true;
this.laplaceCorrection = laplaceCorrection;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) throws Exception {
this.numClasses = numClasses;
//double[/*actual class*/][/*module*/][/*predictedclass*/] probs;
postProbs = new double[numClasses][modules.length][numClasses];
priorClassProbs = new double[numClasses];
int correction = laplaceCorrection ? 1 : 0;
for (int ac = 0; ac < numClasses; ac++) {
double numInClass = 0;
for (int pc = 0; pc < numClasses; pc++)
numInClass += (modules[0].trainResults.confusionMatrix[ac][pc] + correction);
priorClassProbs[ac] = numInClass / modules[0].trainResults.numInstances();
for (int m = 0; m < modules.length; m++)
for (int pc = 0; pc < numClasses; pc++)
postProbs[ac][m][pc] = (modules[m].trainResults.confusionMatrix[ac][pc] + correction) / numInClass;
}
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) {
double[] dist = new double[numClasses];
for (int ac = 0; ac < numClasses; ac++)
dist[ac] = 1;
int pred;
for (int m = 0; m < modules.length; m++) {
pred = (int) modules[m].trainResults.getPredClassValue(trainInstanceIndex);
for (int ac = 0; ac < numClasses; ac++) {
dist[ac] *= postProbs[ac][m][pred] *
modules[m].priorWeight *
modules[m].posteriorWeights[pred];
}
}
for (int ac = 0; ac < numClasses; ac++)
dist[ac] /= priorClassProbs[ac];
return normalise(dist);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) {
double[] dist = new double[numClasses];
for (int ac = 0; ac < numClasses; ac++)
dist[ac] = 1;
int pred;
for (int m = 0; m < modules.length; m++) {
pred = (int) modules[m].testResults.getPredClassValue(testInstanceIndex);
for (int ac = 0; ac < numClasses; ac++) {
dist[ac] *= postProbs[ac][m][pred] *
modules[m].priorWeight *
modules[m].posteriorWeights[pred];
}
}
for (int ac = 0; ac < numClasses; ac++)
// dist[ac] /= priorClassProbs[ac];
dist[ac] *= priorClassProbs[ac]; //TODO double check
return normalise(dist);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[] ensDist = new double[numClasses];
for (int ac = 0; ac < numClasses; ac++)
ensDist[ac] = 1;
int pred;
double[] mdist;
for (int m = 0; m < modules.length; m++) {
mdist = distributionForNewInstance(modules[m], testInstance);
pred = (int)indexOfMax(mdist);
for (int ac = 0; ac < numClasses; ac++) {
ensDist[ac] *= postProbs[ac][m][pred] *
modules[m].priorWeight *
modules[m].posteriorWeights[pred];
}
}
for (int ac = 0; ac < numClasses; ac++)
ensDist[ac] /= priorClassProbs[ac];
return normalise(ensDist);
}
}
| 5,971 | 36.559748 | 120 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/stacking/AbstractStacking.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting.stacking;
import java.util.ArrayList;
import weka.classifiers.Classifier;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import machine_learning.classifiers.ensembles.voting.ModuleVotingScheme;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
/**
*
* A given classifier is trained on a set of instances where each inst is formed by
* concatenating the weighted distforinst output of each module for this instance. For
* unweighted distforinsts to be considered, can use of course just EqualWeighting()
*
* @author James Large
*/
public abstract class AbstractStacking extends ModuleVotingScheme {
protected Classifier classifier;
protected int numOutputAtts;
protected Instances instsHeader;
public AbstractStacking(Classifier classifier) {
this.classifier = classifier;
}
public AbstractStacking(Classifier classifier, int numClasses) {
this.classifier = classifier;
this.numClasses = numClasses;
}
public Classifier getClassifier() {
return classifier;
}
@Override
public void trainVotingScheme(EnsembleModule[] modules, int numClasses) throws Exception {
this.numClasses = numClasses;
setNumOutputAttributes(modules);
int numInsts = modules[0].trainResults.numInstances();
initInstances();
Instances insts = new Instances(this.instsHeader, numInsts);
for (int i = 0; i < numInsts; i++)
insts.add(buildInst(modules, true, i));
classifier.buildClassifier(insts);
}
protected abstract void setNumOutputAttributes(EnsembleModule[] modules) throws Exception;
protected abstract Instance buildInst(double[][] dists, Double classVal) throws Exception;
protected Instance buildInst(EnsembleModule[] modules, boolean train, int instIndex) throws Exception {
double[][] dists = new double[modules.length][];
for (int m = 0; m < modules.length; m++) {
if (train)
dists[m] = modules[m].trainResults.getProbabilityDistribution(instIndex);
else //test
dists[m] = modules[m].testResults.getProbabilityDistribution(instIndex);
for (int c = 0; c < numClasses; c++)
dists[m][c] *= modules[m].priorWeight * modules[m].posteriorWeights[c];
}
Double classVal = train ? modules[0].trainResults.getTrueClassValue(instIndex) : null;
return buildInst(dists, classVal);
}
protected void initInstances() {
ArrayList<Attribute> atts = new ArrayList<>(numOutputAtts);
for (int i = 0; i < numOutputAtts-1; i++)
atts.add(new Attribute(""+i));
ArrayList<String> classVals = new ArrayList<>(numClasses);
for (int i = 0; i < numClasses; i++)
classVals.add("" + i);
atts.add(new Attribute("class", classVals));
instsHeader = new Instances("", atts, 1);
instsHeader.setClassIndex(numOutputAtts-1);
}
@Override
public double[] distributionForTrainInstance(EnsembleModule[] modules, int trainInstanceIndex) throws Exception {
Instance inst = buildInst(modules, true, trainInstanceIndex);
return classifier.distributionForInstance(inst);
}
@Override
public double[] distributionForTestInstance(EnsembleModule[] modules, int testInstanceIndex) throws Exception {
Instance inst = buildInst(modules, false, testInstanceIndex);
return classifier.distributionForInstance(inst);
}
@Override
public double[] distributionForInstance(EnsembleModule[] modules, Instance testInstance) throws Exception {
double[][] dists = new double[modules.length][];
for(int m = 0; m < modules.length; m++){
long startTime = System.nanoTime();
dists[m] = modules[m].getClassifier().distributionForInstance(testInstance);
long predTime = System.nanoTime() - startTime;
storeModuleTestResult(modules[m], dists[m], predTime);
for (int c = 0; c < numClasses; c++)
dists[m][c] *= modules[m].priorWeight * modules[m].posteriorWeights[c];
}
Instance inst = buildInst(dists, null);
return classifier.distributionForInstance(inst);
}
public String toString() {
return super.toString() + "(" + classifier.getClass().getSimpleName() + ")";
}
}
| 5,426 | 37.21831 | 117 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/stacking/StackingOnDists.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting.stacking;
import weka.classifiers.Classifier;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import weka.core.DenseInstance;
import weka.core.Instance;
/**
*
* @author James Large james.large@uea.ac.uk
*/
public class StackingOnDists extends AbstractStacking {
public StackingOnDists(Classifier classifier) {
super(classifier);
}
public StackingOnDists(Classifier classifier, int numClasses) {
super(classifier, numClasses);
}
@Override
protected void setNumOutputAttributes(EnsembleModule[] modules) {
this.numOutputAtts = modules.length*numClasses + 1; //each dist + class val
}
@Override
protected Instance buildInst(double[][] dists, Double classVal) {
double[] instData = new double[numOutputAtts];
int i = 0;
for (int m = 0; m < dists.length; m++)
for (int c = 0; c < numClasses; c++)
instData[i++] = dists[m][c];
assert(i == numOutputAtts-1);
if (classVal != null)
instData[i] = classVal;
//else irrelevent
instsHeader.add(new DenseInstance(1.0, instData));
return instsHeader.remove(0);
}
}
| 2,081 | 31.53125 | 83 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/stacking/StackingOnExtendedSetOfFeatures.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting.stacking;
import weka.classifiers.Classifier;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import weka.core.DenseInstance;
import weka.core.Instance;
/**
* Stacking on dists, dists multiplied by the max of the probabilities, and the entropies of the dists
*
* From section 3.2 of
*
(
@article{dvzeroski2004combining,
title={Is combining classifiers with stacking better than selecting the best one?},
author={D{\v{z}}eroski, Saso and {\v{Z}}enko, Bernard},
journal={Machine learning},
volume={54},
number={3},
pages={255--273},
year={2004},
publisher={Springer}
}
*
* @author James Large james.large@uea.ac.uk
*/
public class StackingOnExtendedSetOfFeatures extends AbstractStacking {
public StackingOnExtendedSetOfFeatures(Classifier classifier) {
super(classifier);
}
public StackingOnExtendedSetOfFeatures(Classifier classifier, int numClasses) {
super(classifier, numClasses);
}
@Override
protected void setNumOutputAttributes(EnsembleModule[] modules) {
this.numOutputAtts = modules.length*(numClasses*2 + 1) + 1; //each dist twice and their entopies + class val
}
@Override
protected Instance buildInst(double[][] dists, Double classVal) {
double[] instData = new double[numOutputAtts];
int i = 0;
for (int m = 0; m < dists.length; m++) {
for (int c = 0; c < numClasses; c++)
instData[i++] = dists[m][c];
double maxProbability = utilities.GenericTools.max(dists[m]);
for (int c = 0; c < numClasses; c++)
instData[i++] = dists[m][c] * maxProbability;
double entropy = 0;
for (int c = 0; c < numClasses; c++)
entropy -= dists[m][c] * (Math.log(dists[m][c]) / Math.log(2)); //change of base formula
instData[i++] = entropy;
}
assert(i == numOutputAtts-1);
if (classVal != null)
instData[i] = classVal;
//else irrelevent
instsHeader.add(new DenseInstance(1.0, instData));
return instsHeader.remove(0);
}
}
| 3,066 | 33.460674 | 116 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/voting/stacking/StackingOnPreds.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.voting.stacking;
import weka.classifiers.Classifier;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import static utilities.GenericTools.indexOfMax;
import weka.core.DenseInstance;
import weka.core.Instance;
/**
*
* @author James Large james.large@uea.ac.uk
*/
public class StackingOnPreds extends AbstractStacking {
public StackingOnPreds(Classifier classifier) {
super(classifier);
}
public StackingOnPreds(Classifier classifier, int numClasses) {
super(classifier, numClasses);
}
@Override
protected void setNumOutputAttributes(EnsembleModule[] modules) {
this.numOutputAtts = modules.length + 1; //each pred + class val
}
@Override
protected Instance buildInst(double[][] dists, Double classVal) throws Exception {
double[] instData = new double[numOutputAtts];
for (int m = 0; m < dists.length; m++)
instData[m] = indexOfMax(dists[m]);
if (classVal != null)
instData[numOutputAtts-1] = classVal;
//else irrelevent
instsHeader.add(new DenseInstance(1.0, instData));
return instsHeader.remove(0);
}
}
| 2,037 | 32.409836 | 86 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_MajorityVote.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import machine_learning.classifiers.ensembles.voting.MajorityVote;
import machine_learning.classifiers.ensembles.weightings.EqualWeighting;
import machine_learning.classifiers.ensembles.CAWPE;
/**
* Implemented as separate classifier for explicit comparison, from Kuncheva and Rodríguez (2014)
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_MajorityVote extends CAWPE {
public CAWPE_MajorityVote() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "CAWPE_MajorityVote";
weightingScheme = new EqualWeighting();
votingScheme = new MajorityVote();
}
}
| 1,504 | 37.589744 | 97 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_NaiveBayesCombiner.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import machine_learning.classifiers.ensembles.voting.NaiveBayesCombiner;
import machine_learning.classifiers.ensembles.weightings.EqualWeighting;
import machine_learning.classifiers.ensembles.CAWPE;
/**
* Implemented as separate classifier for explicit comparison, from Kuncheva and Rodríguez (2014)
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_NaiveBayesCombiner extends CAWPE {
public CAWPE_NaiveBayesCombiner() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "CAWPE_NaiveBayesCombiner";
weightingScheme = new EqualWeighting();
votingScheme = new NaiveBayesCombiner();
}
}
| 1,532 | 37.325 | 97 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_PickBest.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import machine_learning.classifiers.ensembles.voting.BestIndividualTrain;
import machine_learning.classifiers.ensembles.weightings.EqualWeighting;
import machine_learning.classifiers.ensembles.CAWPE;
/**
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_PickBest extends CAWPE {
public CAWPE_PickBest() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "CAWPE_PickBest";
weightingScheme = new EqualWeighting();
votingScheme = new BestIndividualTrain();
}
}
| 1,407 | 36.052632 | 76 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_RecallCombiner.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import machine_learning.classifiers.ensembles.voting.MajorityVote;
import machine_learning.classifiers.ensembles.weightings.RecallByClass;
import machine_learning.classifiers.ensembles.CAWPE;
/**
* Implemented as separate classifier for explicit comparison, from Kuncheva and Rodríguez (2014)
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_RecallCombiner extends CAWPE {
public CAWPE_RecallCombiner() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "CAWPE_RecallCombiner";
weightingScheme = new RecallByClass();
votingScheme = new MajorityVote();
}
}
| 1,505 | 37.615385 | 97 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_TunedAlpha.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import experiments.data.DatasetLists;
import fileIO.OutFile;
import java.io.FileNotFoundException;
import java.util.concurrent.TimeUnit;
import machine_learning.classifiers.ensembles.voting.BestIndividualTrain;
import machine_learning.classifiers.ensembles.voting.MajorityConfidence;
import machine_learning.classifiers.ensembles.weightings.EqualWeighting;
import machine_learning.classifiers.ensembles.weightings.TrainAcc;
import evaluation.storage.ClassifierResults;
import utilities.StatisticalUtilities;
import machine_learning.classifiers.ensembles.CAWPE;
import weka.core.Instances;
import weka.filters.Filter;
/**
* Tunes the value of alpha for a given dataset. Not much slower than normal hesca
* anyway if the base classifier results are given, since we're just playing with cached results.
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_TunedAlpha extends CAWPE {
//where Integer.MAX_VALUE is a marker for pick best, as alpha tends to infinity
//0 = equal vote
//1 = regular weighted vote
public int[] alphaParaRange = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, Integer.MAX_VALUE };
public int alpha = 4;
public double[] alphaParaAccs = null;
public CAWPE_TunedAlpha() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "HESCA_TunedAlpha";
}
@Override
public void buildClassifier(Instances data) throws Exception {
printlnDebug("**HESCA TRAIN**");
//housekeeping
if (resultsFilesParametersInitialised) {
if (readResultsFilesDirectories.length > 1)
if (readResultsFilesDirectories.length != modules.length)
throw new Exception("HESCA.buildClassifier: more than one results path given, but number given does not align with the number of classifiers/modules.");
if (writeResultsFilesDirectory == null)
writeResultsFilesDirectory = readResultsFilesDirectories[0];
}
long startTime = System.nanoTime();
//transform data if specified
if(this.transform==null){
this.trainInsts = new Instances(data);
}else{
this.trainInsts = transform.transform(data);
}
//init
this.numTrainInsts = trainInsts.numInstances();
this.numClasses = trainInsts.numClasses();
this.numAttributes = trainInsts.numAttributes();
//set up modules
initialiseModules();
ClassifierResults[] alphaResults = new ClassifierResults[alphaParaRange.length];
alphaParaAccs = new double[alphaParaRange.length];
double maxAcc = -1;
int maxAccInd = -1;
//in case of ties, keeps earliest intentionally, i.e favours more evenly weighted ensemble
//(less chance of overfitting) than going towards pick best
for (int i = 0; i < alphaParaRange.length; i++) {
initCombinationSchemes(alphaParaRange[i]);
alphaResults[i] = estimateEnsemblePerformance(data);
alphaParaAccs[i] = alphaResults[i].getAcc();
if (alphaResults[i].getAcc() > maxAcc) {
maxAcc = alphaResults[i].getAcc();
maxAccInd = i;
}
}
this.alpha = alphaParaRange[maxAccInd];
initCombinationSchemes(alpha);
trainResults = alphaResults[maxAccInd];
long buildTime = System.nanoTime() - startTime;
trainResults.setTimeUnit(TimeUnit.NANOSECONDS);
trainResults.setBuildTime(buildTime);
this.testInstCounter = 0; //prep for start of testing
}
protected void initCombinationSchemes(int alphaVal) throws Exception {
if (alphaVal == 0) {
weightingScheme = new EqualWeighting();
votingScheme = new MajorityConfidence();
}
else if (alphaVal == Integer.MAX_VALUE) {
weightingScheme = new EqualWeighting(); //actual weighting is irrelevant
votingScheme = new BestIndividualTrain(); //just copy over the results of the best individual
} else {
weightingScheme = new TrainAcc(alphaVal);
votingScheme = new MajorityConfidence();
}
weightingScheme.defineWeightings(modules, numClasses);
votingScheme.trainVotingScheme(modules, numClasses);
}
@Override
public String getParameters(){
StringBuilder out = new StringBuilder();
if (trainResults != null) //cv performed
out.append("BuildTime,").append(trainResults.getBuildTime()).append(",Trainacc,").append(trainResults.getAcc()).append(",");
else
out.append("BuildTime,").append("-1").append(",Trainacc,").append("-1").append(",");
out.append(weightingScheme.toString()).append(",").append(votingScheme.toString()).append(",");
for(int m = 0; m < modules.length; m++){
out.append(modules[m].getModuleName()).append("(").append(modules[m].priorWeight);
for (int j = 0; j < modules[m].posteriorWeights.length; ++j)
out.append("/").append(modules[m].posteriorWeights[j]);
out.append("),");
}
out.append("alphaParaAccs=").append(alphaParaRange[0]).append(":").append(alphaParaAccs[0]);
for (int i = 1; i < alphaParaRange.length; i++)
out.append("/").append(alphaParaRange[i]).append(":").append(alphaParaAccs[i]);
return out.toString();
}
public static void main(String[] args) throws Exception {
// buildParaAnalysisFiles();
}
public static void buildParaAnalysisFiles() throws FileNotFoundException, Exception {
String resPath = "C:/JamesLPHD/HESCA/UCI/UCIResults/";
int numfolds = 30;
String[] dsets = DatasetLists.UCIContinuousFileNames;
String classifier = "HESCA_TunedAlpha";
//both dset by fold
OutFile outAlphaSelected = new OutFile(resPath + classifier + "/alphaParaValues.csv");
OutFile outDsetStdDevOverAlpha = new OutFile(resPath + classifier + "/alphaParaStdDevOverAlphaAccForEachFold.csv");
//are dset (or dset_foldid) by alpha value
OutFile outTSSAlphaAccs = new OutFile(resPath + classifier + "/alphaParaAccsByFold.csv");
OutFile outDsetAvgAlphaAccs = new OutFile(resPath + classifier + "/alphaParasAvgOverDataset.csv");
OutFile outDsetStdDevOverFolds = new OutFile(resPath + classifier + "/alphaParaStdDevInAccOverFoldsForEachPara.csv");
for (int alpha : new CAWPE_TunedAlpha().alphaParaRange) {
outTSSAlphaAccs.writeString("," + alpha);
outDsetStdDevOverFolds.writeString("," + alpha);
outDsetAvgAlphaAccs.writeString("," + alpha);
}
outTSSAlphaAccs.writeLine("");
outDsetStdDevOverFolds.writeLine("");
outDsetAvgAlphaAccs.writeLine("");
System.out.println("\t" + classifier);
for (String dset : dsets) {
System.out.println(dset);
outAlphaSelected.writeString(dset);
outDsetStdDevOverAlpha.writeString(dset);
outDsetAvgAlphaAccs.writeString(dset);
outDsetStdDevOverFolds.writeString(dset);
double[][] alphaByFoldAccs = new double[new CAWPE_TunedAlpha().alphaParaRange.length][numfolds];
for (int fold = 0; fold < numfolds; fold++) {
String predictions = resPath+classifier+"/Predictions/"+dset;
ClassifierResults cr = new ClassifierResults(predictions+"/testFold"+fold+".csv");
String[] paraParts = cr.getParas().split(",");
//handling outTSSAlphaAccs
outTSSAlphaAccs.writeString(dset + "_" + fold);
String[] alphaParaAccStrings = paraParts[paraParts.length-1].split("/");
double[] alphaAccsOnThisFold = new double[alphaParaAccStrings.length];
for (int i = 0; i < alphaParaAccStrings.length; ++i) {
String paraAcc = alphaParaAccStrings[i];
double acc = Double.parseDouble(paraAcc.split(":")[1]);
outTSSAlphaAccs.writeString("," + acc);
alphaAccsOnThisFold[i] = acc;
alphaByFoldAccs[i][fold] = acc;
}
outTSSAlphaAccs.writeLine("");
outDsetStdDevOverAlpha.writeString("," + StatisticalUtilities.standardDeviation(alphaAccsOnThisFold, false, StatisticalUtilities.mean(alphaAccsOnThisFold, false)));
//handling outAlphaSelected
String weightToString = paraParts[2];
if (weightToString.equals("EqualWeighting"))
outAlphaSelected.writeString(",equal");
else if(weightToString.equals("BestIndividualTrain"))
outAlphaSelected.writeString(",pickbest");
else {
int alphaSelected = 1;
if (weightToString.contains("("))
alphaSelected = (int)Double.parseDouble(weightToString.split("\\(")[1].split("\\)")[0]); // TrainAcc(4.0) => 4
outAlphaSelected.writeString("," + alphaSelected);
}
}
for (int i = 0; i < alphaByFoldAccs.length; i++) {
double meanAlphaAccOnDset = StatisticalUtilities.mean(alphaByFoldAccs[i], false);
outDsetAvgAlphaAccs.writeString("," + meanAlphaAccOnDset);
outDsetStdDevOverFolds.writeString("," + StatisticalUtilities.standardDeviation(alphaByFoldAccs[i], false, meanAlphaAccOnDset));
}
outAlphaSelected.writeLine("");
outDsetStdDevOverAlpha.writeLine("");
outDsetAvgAlphaAccs.writeLine("");
outDsetStdDevOverFolds.writeLine("");
}
outAlphaSelected.closeFile();
outTSSAlphaAccs.closeFile();
outDsetAvgAlphaAccs.closeFile();
outDsetStdDevOverFolds.closeFile();
outDsetStdDevOverAlpha.closeFile();
}
}
| 11,386 | 42.628352 | 180 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightedvoters/CAWPE_WeightedMajorityVote.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightedvoters;
import machine_learning.classifiers.ensembles.voting.MajorityVote;
import machine_learning.classifiers.ensembles.weightings.TrainAcc;
import machine_learning.classifiers.ensembles.CAWPE;
/**
* Implemented as separate classifier for explicit comparison, from Kuncheva and Rodríguez (2014)
*
* @author James Large (james.large@uea.ac.uk)
*/
public class CAWPE_WeightedMajorityVote extends CAWPE {
public CAWPE_WeightedMajorityVote() {
super(); //sets default classifiers etc
//overwriting relevant parts
ensembleName = "HESCA_WeightedMajorityVote";
weightingScheme = new TrainAcc();
votingScheme = new MajorityVote();
}
}
| 1,514 | 37.846154 | 97 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/AUROC.java | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Will call findMeanAuroc() on each module's results, therefore not necessary to call
* it within HESCA/whatever ensemble
*
* @author James Large (james.large@uea.ac.uk)
*/
public class AUROC extends ModuleWeightingScheme {
private double power = 1.0;
public AUROC() {
uniformWeighting = true;
needTrainPreds = true;
}
public AUROC(double power) {
this.power = power;
uniformWeighting = true;
needTrainPreds = true;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
return makeUniformWeighting(Math.pow(module.trainResults.findMeanAUROC(), power), numClasses);
}
@Override
public String toString() {
return this.getClass().getSimpleName() + (power==1.0 ? "" : "(" + power + ")");
}
}
| 1,814 | 29.25 | 102 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/AvgCorrectedTrainAcc.java | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Sets each module's weighting to Math.max(0.01, module.trainResults.acc - maxClassWeighting),
* where maxClassWeighting is the proportion of cases belonging to the most common class,
* i.e the accuracy expected from a completely biased classifier
*
* @author James Large (james.large@uea.ac.uk)
*/
public class AvgCorrectedTrainAcc extends ModuleWeightingScheme {
public AvgCorrectedTrainAcc() {
uniformWeighting = true;
needTrainPreds = true;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
//made non zero (effectively 1% accuracy) in weird case that all classifiers get less than expected acc
return makeUniformWeighting(Math.max(0.01, module.trainResults.getAcc() - (1.0/numClasses)), numClasses);
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
| 1,731 | 38.363636 | 113 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/BalancedAccuracy.java | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Will call findBalancedAcc() on each module's results, therefore not necessary to call
* it within HESCA/whatever ensemble
*
* @author James Large (james.large@uea.ac.uk)
*/
public class BalancedAccuracy extends ModuleWeightingScheme {
private double power = 1.0;
public BalancedAccuracy() {
uniformWeighting = true;
needTrainPreds = true;
}
public BalancedAccuracy(double power) {
this.power = power;
uniformWeighting = true;
needTrainPreds = true;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
module.trainResults.findAllStats(); //countsPerClass not initialised without this call
return makeUniformWeighting(Math.pow(module.trainResults.balancedAcc, power), numClasses);
}
@Override
public String toString() {
return this.getClass().getSimpleName() + (power==1.0 ? "" : "(" + power + ")");
}
}
| 1,939 | 31.333333 | 98 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/ConfusionEntropy.java | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
*
* Uses Confusion Entropy (CEN) to weight modules, which is a measure related to the
* entropy of a confusion matrix
*
* Reportedly unreliable for 2 class matrices in some cases, implemented for completeness
*
* http://cs.tju.edu.cn/szdw/jsfjs/huqinghua/papers/A%20novel%20measure%20for%20evaluating%20classifiers.pdf
*
* @author James Large
*/
public class ConfusionEntropy extends ModuleWeightingScheme {
public ConfusionEntropy() {
uniformWeighting = true;
needTrainPreds = true;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
return makeUniformWeighting(computeCEN(module.trainResults.confusionMatrix), numClasses);
}
protected double computeCEN(double[][] confMat) {
double cen = .0;
int n = confMat.length;
double cen_j = .0;
double p_j = .0;
for (int j = 0; j < n; j++) {
p_j = p_j(confMat, j, n);
cen_j = cen_j(confMat, j, n);
cen += p_j * cen_j;
}
return cen;
}
protected double cen_j(double[][] confMat, int j, int n) {
double logbase = Math.log(2*(n-1));
double cen_j = 0.0;
double pK_kj = .0, pJ_kj = .0;
for (int k = 0; k < n; k++) {
if (k != j) {
pK_kj = pK_kj(confMat, j, k, n);
pJ_kj = pJ_kj(confMat, j, k, n);
//using logb(n) = log(n) / log(b) identity to get to correct base
cen_j -= pK_kj == .0 ? .0 : (pK_kj * (Math.log(pK_kj) / logbase));
cen_j -= pJ_kj == .0 ? .0 : (pJ_kj * (Math.log(pJ_kj) / logbase));
}
}
return cen_j;
}
protected double p_j(double[][] confMat, int j, int n) {
double pj = 0.0;
double den = 0.0;
for (int k = 0; k < n; k++) {
pj += confMat[j][k] + confMat[k][j];
for (int l = 0; l < n; l++)
den += confMat[k][l];
}
return pj / (2*den);
}
protected double pK_kj(double[][] confMat, int j, int k, int n) {
double C_kj = confMat[k][j];
double den = 0.0;
for (int l = 0; l < n; l++)
den += confMat[k][l] + confMat[l][k];
return C_kj / den;
}
protected double pJ_kj(double[][] confMat, int j, int k, int n) {
double C_kj = confMat[k][j];
double den = 0.0;
for (int l = 0; l < n; l++)
den += confMat[j][l] + confMat[l][j];
return C_kj / den;
}
}
| 3,512 | 30.088496 | 108 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/EqualWeighting.java | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
*
* Gives equal weights to all modules, i.e simple majority vote
*
* @author James Large
*/
public class EqualWeighting extends ModuleWeightingScheme {
public EqualWeighting() {
uniformWeighting = true;
needTrainPreds = false;
}
@Override
public double[] defineWeighting(EnsembleModule trainPredictions, int numClasses) {
return makeUniformWeighting(1.0, numClasses);
}
}
| 1,251 | 31.102564 | 86 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/FScore.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Non uniform weighting scheme, uses F-measure to give a weighting composed of
* the classifier's precision and recall *for each class*
*
* @author James Large
*/
public class FScore extends ModuleWeightingScheme {
private double beta = 1.0;
private double power = 1.0;
public FScore() {
uniformWeighting = false;
needTrainPreds = true;
}
public FScore(double power) {
this.power = power;
uniformWeighting = false;
needTrainPreds = true;
}
public FScore(double power, double beta) {
this.power = power;
this.beta = beta;
uniformWeighting = false;
needTrainPreds = true;
}
public double getBeta() {
return beta;
}
public void setBeta(double beta) {
this.beta = beta;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
double[] weights = new double[numClasses];
for (int c = 0; c < numClasses; c++) {
double weight = computeFScore(module.trainResults.confusionMatrix, c);
weights[c] = Math.pow(weight, power);
}
return weights;
}
protected double computeFScore(double[][] confMat, int c) {
double tp = confMat[c][c]; //[actual class][predicted class]
if (tp == .0)
return .0000001;
//some very small non-zero value, in the extreme case that no classifiers
//in the entire ensemble classified cases of this class correctly
//happened once on adiac (37 classes)
double fp = 0.0, fn = 0.0;
for (int i = 0; i < confMat.length; i++) {
if (i!=c) {
fp += confMat[i][c];
fn += confMat[c][i];
}
}
double precision = tp / (tp+fp);
double recall = tp / (tp+fn);
return (1+beta*beta) * (precision*recall) / ((beta*beta)*precision + recall);
}
@Override
public String toString() {
return this.getClass().getSimpleName() + "(" + power + "," + beta + ")";
}
}
| 3,202 | 29.504762 | 85 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/MCCWeighting.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Uses the Matthews Correlation Coefficient (MCC) to define the weighting of a module
* MCC is a score calculated from the confusion matrix of the module's predictions
*
* @author James Large
*/
public class MCCWeighting extends ModuleWeightingScheme {
private double power = 1.0;
public MCCWeighting() {
uniformWeighting = true;
}
public MCCWeighting(double power) {
this.power = power;
uniformWeighting = true;
needTrainPreds = true;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
//mcc returns vals in range -1,1, need it in range 0,1, so (w + 1)/2
double weight = (module.trainResults.mcc + 1) / 2;
return makeUniformWeighting(Math.pow(weight, power), numClasses);
}
// /**
// * todo could easily be optimised further if really wanted
// */
// public double computeMCC(double[][] confusionMatrix) {
//
// double num=0.0;
// for (int k = 0; k < confusionMatrix.length; ++k)
// for (int l = 0; l < confusionMatrix.length; ++l)
// for (int m = 0; m < confusionMatrix.length; ++m)
// num += (confusionMatrix[k][k]*confusionMatrix[m][l])-
// (confusionMatrix[l][k]*confusionMatrix[k][m]);
//
// if (num == 0.0)
// return 0;
//
// double den1 = 0.0;
// double den2 = 0.0;
// for (int k = 0; k < confusionMatrix.length; ++k) {
//
// double den1Part1=0.0;
// double den2Part1=0.0;
// for (int l = 0; l < confusionMatrix.length; ++l) {
// den1Part1 += confusionMatrix[l][k];
// den2Part1 += confusionMatrix[k][l];
// }
//
// double den1Part2=0.0;
// double den2Part2=0.0;
// for (int kp = 0; kp < confusionMatrix.length; ++kp)
// if (kp!=k) {
// for (int lp = 0; lp < confusionMatrix.length; ++lp) {
// den1Part2 += confusionMatrix[lp][kp];
// den2Part2 += confusionMatrix[kp][lp];
// }
// }
//
// den1 += den1Part1 * den1Part2;
// den2 += den2Part1 * den2Part2;
// }
//
// return num / (Math.sqrt(den1)*Math.sqrt(den2));
// }
@Override
public String toString() {
return this.getClass().getSimpleName() + (power==1.0 ? "" : "(" + power + ")");
}
}
| 3,636 | 33.311321 | 87 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/MaxCorrectedTrainAcc.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Sets each module's weighting to Math.max(0.01, module.trainResults.acc - maxClassWeighting),
* where maxClassWeighting is the proportion of cases belonging to the most common class,
* i.e the accuracy expected from a completely biased classifier
*
* @author James Large (james.large@uea.ac.uk)
*/
public class MaxCorrectedTrainAcc extends ModuleWeightingScheme {
double maxClassWeighting;
public MaxCorrectedTrainAcc() {
uniformWeighting = true;
needTrainPreds = true;
}
public void defineWeightings(EnsembleModule[] modules, int numClasses) {
double[] classDist = new double[numClasses];
for (int i = 0; i < modules[0].trainResults.getTrueClassValsAsArray().length; i++)
classDist[(int)modules[0].trainResults.getTrueClassValsAsArray()[i]]++;
maxClassWeighting = classDist[0];
for (int i = 1; i < classDist.length; i++)
if (classDist[i] > maxClassWeighting)
maxClassWeighting = classDist[i];
for (EnsembleModule m : modules) //by default, sets weights independently for each module
m.posteriorWeights = defineWeighting(m, numClasses);
//some schemes may sets weights for each moduel relative to the rest, and
//so will need to override this method
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
//made non zero (effectively 1% accuracy) in weird case that all classifiers get less than expected acc
return makeUniformWeighting(Math.max(0.01, module.trainResults.getAcc() - maxClassWeighting), numClasses);
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
| 2,686 | 39.712121 | 114 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/ModuleWeightingScheme.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
import java.io.Serializable;
/**
* Base class for defining the weighting of a classifiers votes in ensemble classifiers
*
* @author James Large
*/
public abstract class ModuleWeightingScheme implements Serializable {
public boolean uniformWeighting = true;
public boolean needTrainPreds = true;
public void defineWeightings(EnsembleModule[] modules, int numClasses) {
for (EnsembleModule m : modules) //by default, sets weights independently for each module
m.posteriorWeights = defineWeighting(m, numClasses);
//some schemes may sets weights for each moduel relative to the rest, and
//so will need to override this method
}
protected abstract double[] defineWeighting(EnsembleModule trainPredictions, int numClasses);
protected double[] makeUniformWeighting(double weight, int numClasses) {
//Prevents all weights from being set to 0 for datasets such as Fungi.
if (weight == 0) weight = 1;
double[] weights = new double[numClasses];
for (int i = 0; i < weights.length; ++i)
weights[i] = weight;
return weights;
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
| 2,182 | 35.383333 | 97 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/NLL.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
*
* @author James Large (james.large@uea.ac.uk)
*/
public class NLL extends ModuleWeightingScheme {
private double power = 1.0;
public NLL() {
uniformWeighting = true;
needTrainPreds = true;
}
public NLL(double power) {
this.power = power;
uniformWeighting = true;
needTrainPreds = true;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
public void defineWeightings(EnsembleModule[] modules, int numClasses) {
double[] nlls = new double[modules.length];
double sum = .0;
for (int i = 0; i < nlls.length; i++) {
nlls[i] = Math.pow(modules[i].trainResults.findNLL(), power);
sum += nlls[i];
}
for (int i = 0; i < nlls.length; i++) {
nlls[i] /= sum;
nlls[i] = 1 - nlls[i];
modules[i].posteriorWeights = makeUniformWeighting(nlls[i], numClasses);
}
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
throw new UnsupportedOperationException("NLL weighting cannot be defined for a single module, "
+ "only in relation to the rest, call defineWeighings(...)");
}
@Override
public String toString() {
return this.getClass().getSimpleName() + (power==1.0 ? "" : "(" + power + ")");
}
} | 2,393 | 30.92 | 103 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/ProportionalTrainAcc.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Each modules weighting is defined as it's accuracy proportional to the other modules,
* i.e weight_i = (acc_i) / (sum of all accs)
*
* @author James Large james.large@uea.ac.uk
*/
public class ProportionalTrainAcc extends ModuleWeightingScheme {
protected double sumOfTrainAccs = 0.;
public ProportionalTrainAcc() {
uniformWeighting = true;
needTrainPreds = true;
}
public void defineWeightings(EnsembleModule[] modules, int numClasses) {
for (EnsembleModule m : modules)
sumOfTrainAccs += m.trainResults.getAcc();
for (EnsembleModule m : modules)
m.posteriorWeights = defineWeighting(m, numClasses);
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
return makeUniformWeighting(module.trainResults.getAcc() / sumOfTrainAccs, numClasses);
}
}
| 1,821 | 34.72549 | 95 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/RecallByClass.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Non uniform weighting scheme, calculates the recall for each class and uses those
* as the classifier's weighting
*
* @author James Large
*/
public class RecallByClass extends ModuleWeightingScheme {
public RecallByClass() {
uniformWeighting = false;
needTrainPreds = true;
}
@Override
protected double[] defineWeighting(EnsembleModule module, int numClasses) {
double[] weights = new double[numClasses];
for (int c = 0; c < numClasses; c++)
weights[c] = computeRecall(module.trainResults.confusionMatrix, c);
return weights;
}
protected double computeRecall(double[][] confMat, int c) {
double tp = confMat[c][c]; //[actual class][predicted class]
if (tp == .0)
return .0000001;
//some very small non-zero value, in the extreme case that no classifiers
//in the entire ensemble classified cases of this class correctly
//happened once on adiac (37 classes)
double fn = 0.0;
for (int i = 0; i < confMat.length; i++)
if (i!=c)
fn += confMat[c][i];
double recall = tp / (tp+fn);
return recall;
}
}
| 2,149 | 33.126984 | 85 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/TrainAcc.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Simply uses the modules train acc as it's weighting. Extension: can raise the accuracy
* to some power in order to scale the relative difference in accuracy between modules
*
* e.g, if raising all accuracies to power of 2, weights (0.7, 0.8, 0.9) become (0.49, 0.64, 0.81)
*
* @author James Large
*/
public class TrainAcc extends ModuleWeightingScheme {
private double power = 1.0;
public TrainAcc() {
uniformWeighting = true;
needTrainPreds = true;
}
public TrainAcc(double power) {
this.power = power;
uniformWeighting = true;
needTrainPreds = true;
}
public double getPower() {
return power;
}
public void setPower(double power) {
this.power = power;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
return makeUniformWeighting(Math.pow(module.trainResults.getAcc(), power), numClasses);
}
@Override
public String toString() {
return this.getClass().getSimpleName() + (power==1.0 ? "" : "(" + power + ")");
}
}
| 2,037 | 30.84375 | 98 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/TrainAccByClass.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Simply calculates this classifier's accuracy on each class
*
* @author James Large
*/
public class TrainAccByClass extends ModuleWeightingScheme {
public TrainAccByClass() {
uniformWeighting = false;
needTrainPreds = true;
}
@Override
public double[] defineWeighting(EnsembleModule module, int numClasses) {
double[] weights = new double[numClasses];
for (int c = 0; c < numClasses; c++)
weights[c] = calcClassAcc(module.trainResults.confusionMatrix, c);
return weights;
}
protected double calcClassAcc(double [][] confMat, int c) {
double correct = confMat[c][c];
double wrong = .0;
for (int i = 0; i < confMat.length; i++)
if (i!=c)
wrong += confMat[c][i];
return correct / (wrong+correct);
}
}
| 1,777 | 31.925926 | 78 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/ensembles/weightings/TrainAccOrMCC.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.ensembles.weightings;
import machine_learning.classifiers.ensembles.AbstractEnsemble.EnsembleModule;
/**
* Will define a module's weighting as it's train accuracy by default, however in
* cases where the class distribution of the dataset is 'uneven', it will instead use MCC
*
* Currently, 'uneven' is (arbitrarily) defined as one class having four times as many train instances
* as another, e.g in the 2 class case one class having 80% of the train insts would lead to
* the MCC weighting being used.
*
* @author James Large james.large@uea.ac.uk
*/
public class TrainAccOrMCC extends ModuleWeightingScheme {
protected double unevenProp = 4.0; //how much bigger must max class dist be than min to use MCC
public TrainAccOrMCC() {
uniformWeighting = true;
needTrainPreds = true;
}
/**
* @param minPropToUseMCC how much bigger must max class dist be than the min dist to use MCC, defaults to 4,
* i.e in a two class class MCC will be used if one class has 80% of the insts, and the other has 20%
*/
public TrainAccOrMCC(double minPropToUseMCC) {
uniformWeighting = true;
this.unevenProp = minPropToUseMCC;
}
private ModuleWeightingScheme scheme = null;
@Override
public void defineWeightings(EnsembleModule[] modules, int numClasses) {
double[] dist = classDistribution(modules[0].trainResults.getTrueClassValsAsArray(), numClasses);
double max = dist[0], min = dist[0];
for (int c = 1; c < dist.length; c++) {
if (dist[c] > max)
max = dist[c];
else if (dist[c] < min)
min = dist[c];
}
if (max >= min*this.unevenProp)
scheme = new MCCWeighting();
else
scheme = new TrainAcc();
for (EnsembleModule module : modules)
module.posteriorWeights = defineWeighting(module, numClasses);
}
@Override
protected double[] defineWeighting(EnsembleModule trainPredictions, int numClasses) {
return scheme.defineWeighting(trainPredictions, numClasses);
}
protected double[] classDistribution(double[] classVals, int numClasses) {
double[] dist = new double[numClasses];
for (double c : classVals)
++dist[(int)c];
for (int i = 0; i < numClasses; i++)
dist[i] /= classVals.length;
return dist;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + "(" + unevenProp + ")";
}
}
| 3,452 | 34.96875 | 114 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/tuned/TunedClassifier.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.tuned;
import evaluation.evaluators.CrossValidationEvaluator;
import evaluation.evaluators.InternalEstimateEvaluator;
import evaluation.tuning.ParameterResults;
import evaluation.tuning.ParameterSet;
import evaluation.tuning.ParameterSpace;
import evaluation.tuning.Tuner;
import experiments.ClassifierExperiments;
import experiments.ExperimentalArguments;
import tsml.classifiers.ParameterSplittable;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.functions.SMO;
import weka.classifiers.functions.supportVector.PolyKernel;
import weka.core.Instance;
import weka.core.Instances;
import java.util.Arrays;
import tsml.classifiers.EnhancedAbstractClassifier;
import tsml.classifiers.Checkpointable;
import tsml.classifiers.TrainTimeContractable;
import machine_learning.classifiers.SaveEachParameter;
import tsml.classifiers.Tuneable;
/**
* Given
* 1) a tuning method,
* - DEFAULT: tuner set to gridsearch + 10foldcv
* - settable via setTuner(...)
* 2) a base classifier with a well formed setOptions(String[]) method
* (that must extend AbstractClassifier, the Classifier interface
* alone does not provide setOptions())
* - DEFAULT: none
* - settable via setClassifier(...)
* 3) a parameter space expressed as keys (that correspond to flags in the setOptions() method)
* to lists of values that these parameters can take.
* - DEFAULT: none
* - settable via setParameterSpace(...)
*
* For a basic example of the above, see setupTestTunedClassifier()
*
* This class will select the best parameter set on the given dataset according to the
* selection and evaluation methods described by the tuner, and build the base classifier
* with the best parameters found
*
* @author James Large (james.large@uea.ac.uk)
*/
public class TunedClassifier extends EnhancedAbstractClassifier
implements SaveEachParameter,ParameterSplittable,Checkpointable, TrainTimeContractable {
ParameterSpace space = null;
Tuner tuner = null;
AbstractClassifier classifier = null;
ParameterSet bestParas = null;
String[] bestOptions = null;
////////// start interface variables
//we're implementing CheckpointClassifier AND SaveEachParameter for now, however for this classifier checkpointing is
//identical to SaveEachParamter, we just implicitely checkpoint after each parameterSet evaluation
String SEP_CP_PS_paraWritePath; //SaveEachParameter //CheckpointClassifier //ParameterSplittable
boolean SEP_CP_savingAllParameters = false; //SaveEachParameter //CheckpointClassifier
long trainContractTimeNanos; //TrainTimeContractClassifier //note, leaving in nanos for max fidelity, max val of long = 2^64-1 = 586 years in nanoseconds
boolean trainTimeContract = false; //TrainTimeContractClassifier
boolean PS_parameterSplitting = false; //ParameterSplittable
int PS_paraSetID = -1; //ParameterSplittable
////////// end interface variables
/**
* Creates an empty TunedClassifier. Tuner has a default value, however at minimum the classifier and parameter space
* shall need to be provided later via set...() methods
*/
public TunedClassifier() {
this(null, null, new Tuner());
}
/**
* If the classifier is able to estimate its own performance while building, the tuner shall default to using that
* as the evaluation method. Otherwise defaults to an external 10fold cv
*/
public TunedClassifier(AbstractClassifier classifier, ParameterSpace space) {
this(classifier, space,
EnhancedAbstractClassifier.classifierAbleToEstimateOwnPerformance(classifier) ?
new Tuner(new InternalEstimateEvaluator()) :
new Tuner(new CrossValidationEvaluator())
);
}
public TunedClassifier(AbstractClassifier classifier, ParameterSpace space, Tuner tuner) {
super(CAN_ESTIMATE_OWN_PERFORMANCE);
this.classifier = classifier;
this.space = space;
this.tuner = tuner;
}
@Override
public String toString() {
return super.toString() +
classifier.getClass().getSimpleName() +
(bestParas != null ? "_"+bestParas.toString() : "");
}
/**
* PRE: Classifier must be set, if not, noothing happens
* @return true if successful in turning on internal estimate
*/
public boolean useInternalEstimates(){
if(classifier==null)
return false;
if(EnhancedAbstractClassifier.classifierAbleToEstimateOwnPerformance(classifier) ){
tuner=new Tuner(new InternalEstimateEvaluator());
return true;
}
return false;
}
public void setSeed(int seed) {
super.setSeed(seed);
tuner.setSeed(seed);
//no setSeed in abstractclassifier. i imagine most define it via setOptions,
//so could add it a a parameter with only one possible value, or jsut set the seed
//before giving the classifier to this tunedclassifier instance
}
public boolean getCloneClassifierForEachParameterEval() {
return tuner.getCloneClassifierForEachParameterEval();
}
public void setCloneClassifierForEachParameterEval(boolean clone) {
tuner.setCloneClassifierForEachParameterEval(clone);
}
public String[] getBestOptions() { return bestOptions; }
public ParameterSpace getSpace() {
return space;
}
public void setParameterSpace(ParameterSpace space) {
this.space = space;
}
public Tuner getTuner() {
return tuner;
}
public void setTuner(Tuner tuner) {
this.tuner = tuner;
}
public AbstractClassifier getClassifier() {
return classifier;
}
public void setClassifier(AbstractClassifier classifier) {
this.classifier = classifier;
}
public void setupTestTunedClassifier() {
//setup classifier. in this example, if we wanted to tune the kernel as well,
//we'd have to extend smo and override the setOptions to allow specific options
//for kernal settings... see SMO.setOptions()
SMO svm = new SMO();
PolyKernel p=new PolyKernel();
p.setExponent(2);
svm.setKernel(p);
this.classifier = new SMO();
//setup tuner, defaults to 10foldCV grid-search
this.tuner = new Tuner();
//setup para space
int size = 13;
double[] cs = new double[size];
for (int i = 0; i < cs.length; i++)
cs[i] = Math.pow(10.0, (i-size/2));
this.space = new ParameterSpace();
this.space.addParameter("C", cs);
}
@Override
public void buildClassifier(Instances data) throws Exception {
//check everything's here/init
boolean somethingMissing = false;
String msg = "";
if (tuner == null) {
msg += "Tuner not setup. ";
somethingMissing = true;
}
if (classifier == null) {
msg += "No classifier specified. ";
somethingMissing = true;
}
if (space == null) {
if (classifier instanceof Tuneable)
space = ((Tuneable)classifier).getDefaultParameterSearchSpace();
else {
msg += "Parameter space not setup. ";
somethingMissing = true;
}
}
if (somethingMissing)
throw new Exception("TunedClassifier: " + msg);
applyInterfaceFlagsToTuner(); //apply any interface flags onto the tuner itself
//special case: if we've been set up to evaluate a particular parameter set in this execution
//instead of search the full space, evaluate that parameter, write it, and quit
if (PS_parameterSplitting && PS_paraSetID >= 0) {
trainResults = tuner.evaluateParameterSetByIndex(classifier, data, space, PS_paraSetID);
tuner.saveParaResults(PS_paraSetID, trainResults);
return;
//todo think that's it?
}
//actual work if normal run
ParameterResults best = tuner.tune(classifier, data, space);
bestParas = best.paras;
trainResults = best.results;
//apply best paras and build final classifier on full train data
String[] options = best.paras.toOptionsList();
bestOptions = Arrays.copyOf(options, options.length);
classifier.setOptions(options);
classifier.buildClassifier(data);
trainResults.setParas(getParameters());
}
@Override
public double[] distributionForInstance(Instance inst) throws Exception {
return classifier.distributionForInstance(inst);
}
public static void main(String[] args) throws Exception {
// String dataset = "hayes-roth";
//
// TunedClassifier tcGrid = new TunedClassifier();
// tcGrid.setupTestTunedClassifier();
// tcGrid.setCloneClassifierForEachParameterEval(false);
//
// TunedClassifier tcRand = new TunedClassifier();
// tcRand.setupTestTunedClassifier();
// tcRand.getTuner().setSearcher(new RandomSearcher(3));
// tcRand.getTuner().setEvaluator(new StratifiedResamplesEvaluator());
// tcRand.setCloneClassifierForEachParameterEval(false);
//
//
// Classifier[] cs = new Classifier[] { tcRand, new SMO(), tcGrid };
//
// int numFolds = 10;
//
// for (Classifier c : cs) {
// Instances all = ClassifierTools.loadData("Z:\\Data\\UCIDelgado\\"+dataset+"\\"+dataset+".arff");
// double mean =.0;
//
// for (int f = 0; f < numFolds; f++) {
// Instances[] data = InstanceTools.resampleInstances(all, f, 0.5);
//
// try {
// ((TunedClassifier)c).setSeed(f);
// }catch (Exception e){ }
//
// c.buildClassifier(data[0]);
// double t = ClassifierTools.accuracy(data[1], c);
// mean += t;
// System.out.print(t + ", ");
// }
//
// mean /= numFolds;
// System.out.println("\nmean = " + mean);
// }
ExperimentalArguments exp = new ExperimentalArguments();
exp.checkpointing = true;
exp.estimatorName = "TunedSMO";
exp.datasetName = "hayes-roth";
exp.foldId = 1;
exp.generateErrorEstimateOnTrainSet = true;
exp.dataReadLocation = "Z:\\Data\\UCIDelgado\\";
exp.resultsWriteLocation = "C:\\Temp\\TunerTests\\t\\";
//
// exp.singleParameterID = 1;
ClassifierExperiments.setupAndRunExperiment(exp);
}
// METHODS FOR: TrainAccuracyEstimate,SaveEachParameter,ParameterSplittable,CheckpointClassifier,TrainTimeContractClassifier
@Override
public String getParameters() {
StringBuilder sb = new StringBuilder(classifier.getClass().getSimpleName());
if (bestParas != null)
sb.append(",BESTPARAS:").append(bestParas.toString().replace("\n", ","));
sb.append(",SPACE:").append(space.toString().replace("\n", ","));
// if(classifier instanceof EnhancedAbstractClassifier)
// str+=","+((EnhancedAbstractClassifier)classifier).getParameters();
return sb.toString();
}
@Override //SaveEachParameter
public void setPathToSaveParameters(String r) {
this.SEP_CP_PS_paraWritePath = r;
this.SEP_CP_savingAllParameters = true;
}
@Override //SaveEachParameter
public void setSaveEachParaAcc(boolean b) {
this.SEP_CP_savingAllParameters = b;
}
@Override //ParameterSplittable
public void setParamSearch(boolean b) {
throw new UnsupportedOperationException("-------This was intended to turn off the tuning "
+ "of parameters while evaluating a particular parameter set in the original tuned classifiers. "
+ "Now that we're in a general tunedClassifier specifically, this doesnt make sense. Part of the ParameterSplittable interface");
}
@Override //ParameterSplittable
public void setParametersFromIndex(int x) {
this.PS_paraSetID = x;
this.PS_parameterSplitting = true;
}
// @Override //ParameterSplittable
// public String getParas() {
// return bestParas.toClassifierResultsParaLine(true);
// }
@Override //Checkpointable
public boolean setCheckpointPath(String path) {
boolean validPath=Checkpointable.super.createDirectories(path);
if(validPath){
this.SEP_CP_PS_paraWritePath = path;
this.SEP_CP_savingAllParameters = true;
}
return validPath;
}
@Override //CheckpointClassifier
public void copyFromSerObject(Object obj) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void setTrainTimeLimit(long amount) {
trainContractTimeNanos =amount;
trainTimeContract = true;
}
@Override
public boolean withinTrainContract(long start) {
return tuner.withinTrainContract(start);
}
/**
* To be called at start of buildClassifier
*
* Simple helper method to transfer necessary interface variable changes over to the tuner
* in case user e.g sets up interface variables THEN sets a new tuner, or sets a new tuner
* (or sticks with default) THEN sets these variables, etc
*/
private void applyInterfaceFlagsToTuner() {
if (SEP_CP_savingAllParameters || PS_parameterSplitting)
tuner.setPathToSaveParameters(this.SEP_CP_PS_paraWritePath);
if (trainTimeContract)
tuner.setTrainTimeLimit(this.trainContractTimeNanos);
}
}
| 15,121 | 35.350962 | 158 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/tuned/TunedRandomForest.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.tuned;
import experiments.CollateResults;
import fileIO.OutFile;
import java.io.File;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import tsml.classifiers.ParameterSplittable;
import utilities.ClassifierTools;
import evaluation.evaluators.CrossValidationEvaluator;
import utilities.InstanceTools;
import tsml.classifiers.SaveParameterInfo;
import weka.classifiers.meta.Bagging;
import evaluation.storage.ClassifierResults;
import experiments.data.DatasetLoading;
import static utilities.GenericTools.indexOfMax;
import weka.classifiers.trees.RandomForest;
import weka.classifiers.trees.RandomTree;
import weka.core.Instances;
import weka.core.Utils;
import machine_learning.classifiers.SaveEachParameter;
/**
*This classifier is enhanced so that classifier builds a random forest with the
facility to build by forward selection addition of trees to minimize OOB error,
by far the fastest way.
As far as tuning are concerned, RandomForest has three parameters
m_MaxDepth: defaults to 0 (no limit on depth)
m_numFeatures: defaults to log(m)+1 (not sqrt(m) as most implementations do
m_numTrees: defaults to 10
Further enhanced to allow for selection through OOB error estimates and predictions
Further changes:
1. set number of trees (m_numTrees) via grid search on a range (using OOB) that
defaults to
{10 [Weka Default],100,200,.., 500 [R default],...,1000} (11 values)
2. set number of features (max value m==numAttributes without class)
per tree (m_numFeatures) and m_numTrees through grid
search on a range
1, 10, sqrt(m) [R default], log_2(m)+1 [Weka default], m [full set]}
(4 values)+add an option to choose randomly for each tree?
grid search is then just 55 values and because it uses OOB no CV is required
* @author ajb
*
*
*
*
*
*
* NOTE jamesl: this classifier is now out of step with the current intended purpose usage of
* EnhancedAbstractClassifier (in that, it extend RandomForest directly
* which does not extend that)
*
* Simple usage with ClassifierExperiments may not be guaranteed to work, especially in trainfile writing
*/
public class TunedRandomForest extends RandomForest implements SaveParameterInfo,SaveEachParameter,ParameterSplittable{
boolean tuneParameters=true;
int[] paraSpace1;//Maximum tree depth, m_MaxDepth
int[] paraSpace2;//Number of features per tree,m_numFeatures
int[] paraSpace3;//Number of trees, m_numTrees
int[] paras;
int maxPerPara=10;
String trainPath="";
int seed; //need this to seed cver/the forests for consistency in meta-classification/ensembling purposes
Random rng; //legacy, 'seed' still (and always has) seeds this for any other rng purposes, e.g tie resolution
ArrayList<Double> accuracy;
boolean crossValidate=true;
boolean estimateAcc=true; //If there is no tuning, this will find the estimate with the fixed values
private long combinedBuildTime;
protected String resultsPath;
protected boolean saveEachParaAcc=false;
//Need to know this before build if the parameters are going to be set by index
//It is only used in the method setParametersFromIndex, which throws an
//Exception if it is zero
private int numFeaturesInProblem=0;
private static int MAX_FOLDS=10;
private ClassifierResults res =new ClassifierResults();
public void setNumFeaturesInProblem(int m){
numFeaturesInProblem=m;
}
public void setNumFeaturesForEachTree(int m){
m_numFeatures=m;
}
/**
* Determines whether an estimate of the accuracy is to be obtained from the train data
* by 10x cross validation
* @param b
*/
public void setCrossValidate(boolean b){
if(b)
setEstimateAcc(b);
crossValidate=b;
}
public void setEstimateAcc(boolean b){
estimateAcc=b;
}
//methods from SaveEachParameter
@Override
public void setPathToSaveParameters(String r){
resultsPath=r;
setSaveEachParaAcc(true);
}
@Override
public void setSaveEachParaAcc(boolean b){
saveEachParaAcc=b;
}
@Override
public void setParametersFromIndex(int x) {
tuneParameters=false;
//Three paras, evenly distributed, 1 to maxPerPara.
//Note that if maxPerPara > numFeaturesInProblem, we have a problem, so it will throw an exception later
paras=new int[3];
if(x<1 || x>maxPerPara*maxPerPara*maxPerPara)//Error, invalid range
throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range for PolyNomialKernel"); //To change body of generated methods, choose Tools | Templates.
int numLevelsIndex=(x-1)/(maxPerPara*maxPerPara);
int numFeaturesIndex=((x-1)/(maxPerPara))%maxPerPara;
int numTreesIndex=x%maxPerPara;
//Need to know number of attributes
if(numFeaturesInProblem==0)
throw new RuntimeException("Error in TunedRandomForest in set ParametersFromIndex: we do not know the number of attributes, need to call setNumFeaturesInProblem before this call");
//Para 1. Maximum tree depth, m_MaxDepth
if(numLevelsIndex==0)
paras[0]=0;
else
paras[0]=numLevelsIndex*(numFeaturesInProblem/maxPerPara);
//Para 2. Num features
if(numFeaturesIndex==0)
paras[1]=(int)Math.sqrt(numFeaturesInProblem);
else if(numFeaturesIndex==1)
paras[1]=(int) Utils.log2(numFeaturesInProblem)+1;
else
paras[1]=((numFeaturesIndex-1)*numFeaturesInProblem)/maxPerPara;
if(numTreesIndex==0)
paras[2]=10; //Weka default
else
paras[2]=100*numTreesIndex;
setMaxDepth(paras[0]);
setNumFeaturesForEachTree(paras[1]);
setNumTrees(paras[2]);
if(m_Debug)
System.out.println("Index ="+x+" Num Features ="+numFeaturesInProblem+" Max Depth="+paras[0]+" Num Features ="+paras[1]+" Num Trees ="+paras[2]);
}
//SaveParameterInfo
@Override
public String getParameters() {
String result="BuildTime,"+res.getBuildTime()+",CVAcc,"+res.getAcc()+",";
result+="MaxDepth,"+this.getMaxDepth()+",NumFeatures,"+this.getNumFeatures()+",NumTrees,"+this.getNumTrees();
return result;
}
@Override
public void setParamSearch(boolean b) {
tuneParameters=b;
}
public TunedRandomForest(){
super();
m_numTrees=500;
m_numExecutionSlots=1;
m_bagger=new EnhancedBagging();
rng=new Random();
seed=0;
accuracy=new ArrayList<>();
}
@Override
public void setSeed(int s){
super.setSeed(s);
seed = s;
rng=new Random();
rng.setSeed(seed);
}
public void debug(boolean b){
m_Debug=b;
}
public void tuneParameters(boolean b){
tuneParameters=b;
}
public void setNumTreesRange(int[] d){
paraSpace1=d;
}
public void setNumFeaturesRange(int[] d){
paraSpace2=d;
}
public void writeTrainEstimatesToFile(String train) {
trainPath=train;
estimateAcc=true;
}
public void setFindTrainAccuracyEstimate(boolean setCV){
estimateAcc=setCV;
}
public boolean findsTrainAccuracyEstimate(){ return estimateAcc;}
protected final void setStandardParaSearchSpace(int m){
//Need to know the number of features to do this
//Does 1000 parameter searches on a 10x10x10 grid
if(m<maxPerPara)
maxPerPara=m;
if(m_Debug){
System.out.println("Number of features ="+m+" max para values ="+maxPerPara);
System.out.println("Setting defaults ....");
}
//Para 1. Maximum tree depth, m_MaxDepth
paraSpace1=new int[maxPerPara];
paraSpace1[0]=0; // No limit
for(int i=1;i<paraSpace1.length;i++)
paraSpace1[i]=paraSpace1[i-1]+m/(paraSpace1.length-1);
//Para 2. Num features
paraSpace2=new int[maxPerPara];
paraSpace2[0]=(int)Math.sqrt(m);
paraSpace2[1]=(int) Utils.log2(m)+1;
for(int i=2;i<maxPerPara;i++)
paraSpace2[i]=((i-1)*m)/maxPerPara;
//Para 3. Num trees
paraSpace3=new int[10];//Num trees
paraSpace3[0]=10; //Weka default
for(int i=1;i<paraSpace3.length;i++)
paraSpace3[i]=100*i;
if(m_Debug){
System.out.print(" m ="+m);
System.out.print("Para 1 (Num levels) : ");
for(int i:paraSpace1)
System.out.print(i+", ");
System.out.print("\nPara 2 (Num features) : ");
for(int i:paraSpace2)
System.out.print(i+", ");
System.out.print("\nPara 3 (Num trees) : ");
for(int i:paraSpace3)
System.out.print(i+", ");
}
}
public void tuneRandomForest(Instances train) throws Exception {
paras=new int[3];
int folds=MAX_FOLDS;
if(folds>train.numInstances())
folds=train.numInstances();
double minErr=1;
this.setSeed(rng.nextInt());
Instances trainCopy=new Instances(train);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
cv.setSeed(seed);
cv.setNumFolds(folds);
cv.buildFolds(trainCopy);
ArrayList<TunedSVM.ResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
int count=0;
OutFile temp=null;
for(int p1:paraSpace1){//Maximum tree depth, m_MaxDepth
for(int p2:paraSpace2){//Num features
for(int p3:paraSpace3){//Num trees
count++;
if(saveEachParaAcc){// check if para value already done
File f=new File(resultsPath+count+".csv");
if(f.exists()){
if(CollateResults.validateSingleFoldFile(resultsPath+count+".csv")==false){
System.out.println("Deleting file "+resultsPath+count+".csv because size ="+f.length());
}
else
continue;//If done, ignore skip this iteration
}
}
TunedRandomForest model = new TunedRandomForest();
model.setMaxDepth(p1);
model.setNumFeatures(p2);
model.setNumTrees(p3);
model.tuneParameters=false;
model.estimateAcc=false;
model.setSeed(count);
tempResults=cv.crossValidateWithStats(model,trainCopy);
tempResults.setEstimatorName("RandFPara"+count);
tempResults.setDatasetName(train.relationName());
tempResults.setFoldID(seed);
tempResults.setSplit("train");
tempResults.setParas("maxDepth,"+p1+",numFeatures,"+p2+",numTrees,"+p3);
double e=1-tempResults.getAcc();
if(m_Debug)
System.out.println("Depth="+p1+",Features"+p2+",Trees="+p3+" Acc = "+(1-e));
accuracy.add(tempResults.getAcc());
if(saveEachParaAcc){// Save to file and close
tempResults.writeFullResultsToFile(resultsPath+count+".csv");
File f=new File(resultsPath+count+".csv");
if(f.exists())
f.setWritable(true, false);
}
else{
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
}
}
}
}
int bestNumLevels;
int bestNumFeatures;
int bestNumTrees;
minErr=1;
if(saveEachParaAcc){
// Check they are all there first.
int missing=0;
for(int p1:paraSpace1){
for(int p2:paraSpace2){
for(int p3:paraSpace3){
File f=new File(resultsPath+count+".csv");
if(!(f.exists() && f.length()>0))
missing++;
}
}
}
if(missing==0)//All present
{
combinedBuildTime=0;
// If so, read them all from file, pick the best
count=0;
for(int p1:paraSpace1){//C
for(int p2:paraSpace2){//Exponent
for(int p3:paraSpace3){//B
count++;
tempResults = new ClassifierResults();
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTime();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
//Delete the files here to clean up.
File f= new File(resultsPath+count+".csv");
if(!f.delete())
System.out.println("DELETE FAILED "+resultsPath+count+".csv");
}
}
}
TunedSVM.ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestNumLevels=(int)best.x;
bestNumFeatures=(int)best.y;
bestNumTrees=(int)best.z;
paras[0]=bestNumLevels;
paras[1]=bestNumFeatures;
paras[2]=bestNumTrees;
this.setMaxDepth(bestNumLevels);
this.setNumFeatures(bestNumFeatures);
this.setNumTrees(bestNumTrees);
res=best.res;
if(m_Debug)
System.out.println("Bestnum levels ="+bestNumLevels+" best num features = "+bestNumFeatures+" best num trees ="+bestNumTrees+" best train acc = "+res.getAcc());
}else//Not all present, just ditch
System.out.println(resultsPath+" error: missing ="+missing+" parameter values");
}
else{
TunedSVM.ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestNumLevels=(int)best.x;
bestNumFeatures=(int)best.y;
bestNumTrees=(int)best.z;
paras[0]=bestNumLevels;
paras[1]=bestNumFeatures;
paras[2]=bestNumTrees;
this.setMaxDepth(bestNumLevels);
this.setNumFeatures(bestNumFeatures);
this.setNumTrees(bestNumTrees);
res=best.res;
}
}
@Override
public void buildClassifier(Instances data) throws Exception{
long startTime=System.nanoTime();
//********* 1: Set up the main classifier with standard Weka calls ***************/
// can classifier handle the data?
getCapabilities().testWithFail(data);
// remove instances with missing class
data = new Instances(data);
data.deleteWithMissingClass();
//this is only used if CV is used to find parameters or estimate acc from train data
int folds=10;
if(folds>data.numInstances())
folds=data.numInstances();
super.setSeed(seed);
super.setNumFeatures((int)Math.sqrt(data.numAttributes()-1));
/******* 2. Tune parameters if required:
*
* NOTE: the number of trees could be found incrementally, just start with the smallest
* number and add in each time rather than rebuild. It would massively speed up the search
* this has been implemented for the EnhancedBagger,but is not yet used.
* Obviously cannot do this for the number of attributes
*/
if(tuneParameters){
if(paraSpace1==null)
setStandardParaSearchSpace(data.numAttributes()-1);
tuneRandomForest(data);
}
else //Override WEKA's default which is worse than sqrt(m)
setNumFeatures(Math.max(1,(int)Math.sqrt(data.numAttributes()-1)));
/******** 3. Build final classifier ***************/
/*Cant call super.buildClassifier as it resets the bagger to Bagging instead of
EnhancedBagging so instead straight cut and paste from RandomForest, with
Bagging changed to EnhancedBagging and default size changed
*/
m_bagger = new EnhancedBagging();
//Test with C4.5 rather than RT
RandomTree rTree = new RandomTree();
// J48 rTree = new J48();
// set up the random tree options
if(m_numFeatures>data.numAttributes()-1)
m_numFeatures=data.numAttributes()-1;
if(m_MaxDepth>data.numAttributes()-1)
m_MaxDepth=0;
m_KValue = m_numFeatures;
//the value in m_numFeatures is not actually used
//its only role is setting m_KValue
rTree.setKValue(m_KValue);
rTree.setMaxDepth(getMaxDepth());
// set up the bagger and build the forest
m_bagger.setClassifier(rTree);
m_bagger.setSeed(seed);
m_bagger.setNumIterations(m_numTrees);
m_bagger.setCalcOutOfBag(true);
m_bagger.setNumExecutionSlots(m_numExecutionSlots);
m_bagger.buildClassifier(data);
/*** 4. Find the estimates of the train acc, either through CV or OOB ****/
// do this after the main build in case OOB is used, because we need the main
//classifier for that
//NOTE IF THE CLASSIFIER IS TUNED THIS WILL BE A POSSIBLE SOURCE OF BIAS
//It should really be nested up a level.
if(estimateAcc){ //Need find train acc, either through CV or OOB
if(crossValidate){
RandomForest t= new RandomForest();
t.setNumFeatures(this.getNumFeatures());
t.setNumTrees(this.getNumTrees());
t.setSeed(seed);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
cv.setSeed(seed);
cv.setNumFolds(folds);
cv.buildFolds(data);
res = cv.crossValidateWithStats(t, data);
if(m_Debug){
System.out.println("In cross validate");
System.out.println(getParameters());
}
}
else{
res.setAcc(1-this.measureOutOfBagError());
//Get OOB probabilities. This is not possible with the standard
//random forest bagger, hence the use of EnhancedBagger
System.out.println("BAGGER CLASS = "+m_bagger.getClass().getName());
((EnhancedBagging)m_bagger).findOOBProbabilities();
double[][] OOBPredictions=((EnhancedBagging)m_bagger).OOBProbabilities;
for(int i=0;i<data.numInstances();i++)
res.addPrediction(data.instance(i).classValue(),OOBPredictions[i],indexOfMax(OOBPredictions[i]), -1, "");
}
}
res.setTimeUnit(TimeUnit.NANOSECONDS);
res.setBuildTime(System.nanoTime()-startTime);
if(trainPath!=""){ //Save basic train results
res.setEstimatorName("TunedRandF");
res.setDatasetName(data.relationName());
res.setFoldID(seed);
res.setSplit("train");
res.setParas(getParameters());
res.writeFullResultsToFile(trainPath);
}
}
public void addTrees(int n, Instances data) throws Exception{
EnhancedBagging newTrees =new EnhancedBagging();
RandomTree rTree = new RandomTree();
// set up the random tree options
m_KValue = m_numFeatures;
rTree.setKValue(m_KValue);
rTree.setMaxDepth(getMaxDepth());
//Change this so that it is reproducable
Random r= new Random();
newTrees.setSeed(r.nextInt());
newTrees.setClassifier(rTree);
newTrees.setNumIterations(n);
newTrees.setCalcOutOfBag(true);
newTrees.setNumExecutionSlots(m_numExecutionSlots);
newTrees.buildClassifier(data);
newTrees.findOOBProbabilities();
//Merge with previous
m_bagger.aggregate(newTrees);
m_bagger.finalizeAggregation();
//Update OOB Error, as this is seemingly not done in the bagger
m_numTrees+=n;
m_bagger.setNumIterations(m_numTrees);
((EnhancedBagging)m_bagger).mergeBaggers(newTrees);
}
public double getBaggingPercent(){
return m_bagger.getBagSizePercent();
}
static protected class EnhancedBagging extends Bagging{
//
@Override
public void buildClassifier(Instances data)throws Exception {
super.buildClassifier(data);
m_data=data;
// System.out.println(" RESET BAGGER");
}
double[][] OOBProbabilities;
int[] counts;
public void mergeBaggers(EnhancedBagging other){
for (int i = 0; i < m_data.numInstances(); i++) {
for (int j = 0; j < m_data.numClasses(); j++) {
OOBProbabilities[i][j]=counts[i]*OOBProbabilities[i][j]+other.counts[i]*other.OOBProbabilities[i][j];
OOBProbabilities[i][j]/=counts[i]+other.counts[i];
}
counts[i]=counts[i]+other.counts[i];
}
//Merge m_inBags index i is classifier, j the instance
boolean[][] inBags = new boolean[m_inBag.length+other.m_inBag.length][];
for(int i=0;i<m_inBag.length;i++)
inBags[i]=m_inBag[i];
for(int i=0;i<other.m_inBag.length;i++)
inBags[m_inBag.length+i]=other.m_inBag[i];
m_inBag=inBags;
findOOBError();
}
public void findOOBProbabilities() throws Exception{
OOBProbabilities=new double[m_data.numInstances()][m_data.numClasses()];
counts=new int[m_data.numInstances()];
for (int i = 0; i < m_data.numInstances(); i++) {
for (int j = 0; j < m_Classifiers.length; j++) {
if (m_inBag[j][i])
continue;
counts[i]++;
double[] newProbs = m_Classifiers[j].distributionForInstance(m_data.instance(i));
// average the probability estimates
for (int k = 0; k < m_data.numClasses(); k++) {
OOBProbabilities[i][k] += newProbs[k];
}
}
for (int k = 0; k < m_data.numClasses(); k++) {
OOBProbabilities[i][k] /= counts[i];
}
}
}
public double findOOBError(){
double correct = 0.0;
for (int i = 0; i < m_data.numInstances(); i++) {
double[] probs = OOBProbabilities[i];
int vote =0;
for (int j = 1; j < probs.length; j++) {
if(probs[vote]<probs[j])
vote=j;
}
if(m_data.instance(i).classValue()==vote)
correct++;
}
m_OutOfBagError=1- correct/(double)m_data.numInstances();
// System.out.println(" NEW OOB ERROR ="+m_OutOfBagError);
return m_OutOfBagError;
}
// public double getOOBError
}
public double findOOBError() throws Exception{
((EnhancedBagging)m_bagger).findOOBProbabilities();
return ((EnhancedBagging)m_bagger).findOOBError();
}
public double[][] findOOBProbabilities() throws Exception{
((EnhancedBagging)m_bagger).findOOBProbabilities();
return ((EnhancedBagging)m_bagger).OOBProbabilities;
}
public double[][] getOBProbabilities() throws Exception{
return ((EnhancedBagging)m_bagger).OOBProbabilities;
}
public static void jamesltests() {
//tests to confirm correctness of cv changes
//summary: pre/post change avg accs over 50 folds
// train: 0.9689552238805973 vs 0.9680597014925374
// test: 0.9590670553935859 vs 0.9601943634596699
//post change trainaccs/testaccs on 50 folds of italypowerdemand:
// trainacc=0.9680597014925374
// folds:
// [0.9552238805970149, 0.9552238805970149, 1.0, 1.0, 0.9552238805970149,
// 0.9701492537313433, 0.9552238805970149, 0.9402985074626866, 0.9552238805970149, 1.0,
// 0.9552238805970149, 0.9701492537313433, 0.9701492537313433, 0.9701492537313433, 0.9850746268656716,
// 0.9552238805970149, 0.9402985074626866, 0.9850746268656716, 1.0, 0.9552238805970149,
// 0.9850746268656716, 0.9701492537313433, 0.9701492537313433, 0.9552238805970149, 1.0,
// 0.9701492537313433, 0.9701492537313433, 0.9552238805970149, 0.9552238805970149, 0.9850746268656716,
// 0.9402985074626866, 0.9850746268656716, 1.0, 0.9850746268656716, 0.9850746268656716,
// 0.9402985074626866, 0.9552238805970149, 0.9253731343283582, 0.9701492537313433, 0.9701492537313433,
// 0.9701492537313433, 1.0, 0.9402985074626866, 0.9701492537313433, 0.9552238805970149,
// 0.9402985074626866, 0.9701492537313433, 0.9552238805970149, 0.9850746268656716, 0.9701492537313433]
//
// testacc=0.9601943634596699
// folds:
// [0.9543245869776482, 0.9271137026239067, 0.9582118561710399, 0.966958211856171, 0.9718172983479106,
// 0.9650145772594753, 0.9582118561710399, 0.9689018464528668, 0.9494655004859086, 0.966958211856171,
// 0.9620991253644315, 0.9698736637512148, 0.9659863945578231, 0.9659863945578231, 0.966958211856171,
// 0.9718172983479106, 0.9416909620991254, 0.9640427599611273, 0.9368318756073858, 0.9698736637512148,
// 0.966958211856171, 0.9494655004859086, 0.9582118561710399, 0.9698736637512148, 0.9620991253644315,
// 0.9650145772594753, 0.9640427599611273, 0.9601554907677357, 0.9319727891156463, 0.967930029154519,
// 0.9523809523809523, 0.967930029154519, 0.9591836734693877, 0.9727891156462585, 0.9572400388726919,
// 0.9329446064139941, 0.9718172983479106, 0.9620991253644315, 0.9689018464528668, 0.9514091350826045,
// 0.9630709426627794, 0.966958211856171, 0.9543245869776482, 0.9718172983479106, 0.9698736637512148,
// 0.9552964042759962, 0.9727891156462585, 0.9329446064139941, 0.9630709426627794, 0.9650145772594753]
//pre change trainaccs/testaccs on 50 folds of italypowerdemand:
// trainacc=0.9689552238805973
// folds:
// [0.9402985074626866, 0.9701492537313433, 1.0, 1.0, 0.9253731343283582,
// 0.9850746268656716, 0.9850746268656716, 0.9552238805970149, 0.9552238805970149, 1.0,
// 0.9402985074626866, 0.9701492537313433, 0.9701492537313433, 0.9850746268656716, 0.9850746268656716,
// 0.9701492537313433, 0.9253731343283582, 0.9850746268656716, 1.0, 0.9701492537313433,
// 0.9850746268656716, 0.9850746268656716, 0.9701492537313433, 0.9701492537313433, 1.0,
// 0.9552238805970149, 0.9552238805970149, 0.9552238805970149, 0.9701492537313433, 0.9850746268656716,
// 0.9552238805970149, 0.9850746268656716, 1.0, 0.9850746268656716, 0.9850746268656716,
// 0.9701492537313433, 0.9552238805970149, 0.9402985074626866, 0.9701492537313433, 0.9552238805970149,
// 0.9850746268656716, 1.0, 0.9402985074626866, 0.9701492537313433, 0.9402985074626866,
// 0.9253731343283582, 0.9701492537313433, 0.9552238805970149, 0.9552238805970149, 0.9552238805970149]
//
// testacc=0.9590670553935859
// folds:
// [0.9514091350826045, 0.9290573372206026, 0.9591836734693877, 0.967930029154519, 0.9708454810495627,
// 0.9689018464528668, 0.9650145772594753, 0.9708454810495627, 0.9358600583090378, 0.967930029154519,
// 0.9640427599611273, 0.9640427599611273, 0.9630709426627794, 0.9659863945578231, 0.9543245869776482,
// 0.9689018464528668, 0.9514091350826045, 0.9659863945578231, 0.9659863945578231, 0.9611273080660836,
// 0.9689018464528668, 0.9504373177842566, 0.9504373177842566, 0.9698736637512148, 0.9630709426627794,
// 0.9620991253644315, 0.9582118561710399, 0.966958211856171, 0.9543245869776482, 0.9640427599611273,
// 0.9514091350826045, 0.9533527696793003, 0.9659863945578231, 0.9689018464528668, 0.9572400388726919,
// 0.967930029154519, 0.9689018464528668, 0.9698736637512148, 0.9698736637512148, 0.9582118561710399,
// 0.9601554907677357, 0.966958211856171, 0.9378036929057337, 0.9689018464528668, 0.9650145772594753,
// 0.8794946550048591, 0.9737609329446064, 0.9319727891156463, 0.9484936831875608, 0.9689018464528668]
System.out.println("ranftestsWITHCHANGES");
String dataset = "ItalyPowerDemand";
Instances train = DatasetLoading.loadDataNullable("c:/tsc problems/"+dataset+"/"+dataset+"_TRAIN");
Instances test = DatasetLoading.loadDataNullable("c:/tsc problems/"+dataset+"/"+dataset+"_TEST");
int rs = 50;
double[] trainAccs = new double[rs];
double[] testAccs = new double[rs];
double trainAcc =0;
double testAcc =0;
for (int r = 0; r < rs; r++) {
Instances[] data = InstanceTools.resampleTrainAndTestInstances(train, test, r);
TunedRandomForest ranF = new TunedRandomForest();
ranF.setCrossValidate(true);
ranF.setEstimateAcc(true);
try {
ranF.buildClassifier(data[0]);
} catch (Exception ex) {
Logger.getLogger(TunedRandomForest.class.getName()).log(Level.SEVERE, null, ex);
}
trainAccs[r] = ranF.res.getAcc();
trainAcc+=trainAccs[r];
testAccs[r] = ClassifierTools.accuracy(data[1], ranF);
testAcc+=testAccs[r];
System.out.print(".");
}
trainAcc/=rs;
testAcc/=rs;
System.out.println("\nacc="+trainAcc);
System.out.println(Arrays.toString(trainAccs));
System.out.println("\nacc="+testAcc);
System.out.println(Arrays.toString(testAccs));
}
public static void main(String[] args) {
cheatOnMNIST();
TunedRandomForest randF=new TunedRandomForest();
randF.m_Debug=true;
randF.setStandardParaSearchSpace(200);
// randF.setNumFeaturesInProblem(3);
// for(int i=1;i<=1000;i++)
// randF.setParametersFromIndex(i);
// jamesltests();
// testBinMaker();
System.exit(0);
DecimalFormat df = new DecimalFormat("##.###");
try{
String dset = "balloons";
Instances all=DatasetLoading.loadDataNullable("C:\\Users\\ajb\\Dropbox\\UCI Problems\\"+dset+"\\"+dset);
Instances[] split=InstanceTools.resampleInstances(all,1,0.5);
TunedRandomForest rf=new TunedRandomForest();
rf.debug(true);
rf.tuneParameters(true);
rf.buildClassifier(split[0]);
System.out.println(" bag percent ="+rf.getBaggingPercent()+" OOB error "+rf.measureOutOfBagError());
/*
for(int i=0;i<5;i++){
System.out.println(" Number f trees ="+rf.getNumTrees()+" num elements ="+rf.numElements());
System.out.println(" bag percent ="+rf.getBaggingPercent()+" OOB error "+rf.measureOutOfBagError());
double[][] probs=rf.findOOBProbabilities();
/*s
for (int j = 0; j < probs.length; j++) {
double[] prob = probs[j];
for (int k = 0; k < prob.length; k++) {
System.out.print(","+prob[k]);
}
System.out.println("");
}
rf.addTrees(50, train);
}
int correct=0;
for(Instance ins:test){
double[] pred=rf.distributionForInstance(ins);
double cls=rf.classifyInstance(ins);
if(cls==ins.classValue())
correct++;
}
System.out.println(" ACC = "+((double)correct)/test.numInstances());
// System.out.println(" calc out of bag? ="+rf.m_bagger.m_CalcOutOfBag);
System.exit(0);
double a =ClassifierTools.singleTrainTestSplitAccuracy(rf, train, test);
System.out.println(" error ="+df.format(1-a));
// tsbf.buildClassifier(train);
// double c=tsbf.classifyInstance(test.instance(0));
// System.out.println(" Class ="+c);
*/
}catch(Exception e){
System.out.println("Exception "+e);
e.printStackTrace();
System.exit(0);
}
}
public static void cheatOnMNIST(){
Instances train=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TRAIN");
Instances test=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TEST");
RandomForest rf=new RandomForest();
System.out.println("Data loaded ......");
double a =ClassifierTools.singleTrainTestSplitAccuracy(rf, train, test);
System.out.println("Trees ="+10+" acc = "+a);
for(int trees=50;trees<=1000;trees+=50){
rf.setNumTrees(trees);
a =ClassifierTools.singleTrainTestSplitAccuracy(rf, train, test);
System.out.println("Trees ="+trees+" acc = "+a);
}
}
}
| 35,954 | 42.847561 | 192 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/tuned/TunedSVM.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.classifiers.tuned;
import experiments.CollateResults;
import fileIO.OutFile;
import java.io.File;
import java.io.FileNotFoundException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import tsml.classifiers.ParameterSplittable;
import utilities.ClassifierTools;
import evaluation.evaluators.CrossValidationEvaluator;
import utilities.InstanceTools;
import tsml.classifiers.SaveParameterInfo;
import weka.classifiers.functions.SMO;
import weka.classifiers.functions.supportVector.Kernel;
import weka.classifiers.functions.supportVector.PolyKernel;
import weka.classifiers.functions.supportVector.RBFKernel;
import evaluation.storage.ClassifierResults;
import experiments.data.DatasetLoading;
import weka.core.*;
import machine_learning.classifiers.SaveEachParameter;
/*
Tony's attempt to see the effect of parameter setting on SVM.
Two parameters:
kernel para: for polynomial this is the weighting given to lower order terms
k(x,x')=(<x'.x>+b)^d
regularisation parameter, used in the SMO
m_C
*/
/**
*
* @author ajb
*
*
*
*
* NOTE jamesl: this classifier is now out of step with the current intended purpose usage of
* EnhancedAbstractClassifier (in that, it extend RandomForest directly
* which does not extend that)
*
* Simple usage with ClassifierExperiments may not be guaranteed to work, especially in trainfile writing
TunedSVM sets the margin c through b ten fold cross validation.
If the kernel type is RBF, also set sigma through CV, same values as c
NOTE:
1. CV could be done faster?
2. Could use libSVM instead
*
*/
public class TunedSVM extends SMO implements SaveParameterInfo,SaveEachParameter,ParameterSplittable{
boolean setSeed=false;
int seed;
int minC=-16;//These search values are used for all kernels with C. It is also used for Gamma in RBF, but not for the Polynomial exponent search
int maxC=16;
int minExponent=1;//These values are also used for Gamma in RBF, but not for the Polynomial exponent search
int maxExponent=6;
int minB=0;//These are for the constant value in the Polynomial Kernel
int maxB=5;
double IncrementB=1;
double[] paraSpace1;//For fixed polynomial (LINEAR and QUADRATIC) there is just one range of parameters
double[] paraSpace2;//For RBF this is gamma, for POLYNOMIAL it is exponent.
double[] paraSpace3;//For POLYNOMIAL this is the constant term b in the kernel.
private static int MAX_FOLDS=10;
private double[] paras;//Stored final parameter values after search
String trainPath="";
boolean debug=false;
protected boolean findTrainAcc=true;
Random rng;
ArrayList<Double> accuracy;
private boolean kernelOptimise=false; //Choose between linear, quadratic and RBF kernel
private boolean tuneParameters=true;
private ClassifierResults res =new ClassifierResults();
private long combinedBuildTime;
private boolean buildFromFile=false;
protected String resultsPath;
protected boolean saveEachParaAcc=false;
//HARD CODED FLAG that allows a build from partials
private boolean buildFromPartial=false;
@Override
public void setPathToSaveParameters(String r){
resultsPath=r;
setSaveEachParaAcc(true);
}
@Override
public void setSaveEachParaAcc(boolean b){
saveEachParaAcc=b;
}
public TunedSVM(){
super();
kernelOptimise=false;
kernel=KernelType.RBF;
tuneParameters=true;
setKernel(new RBFKernel());
rng=new Random();
accuracy=new ArrayList<>();
setBuildLogisticModels(true);
}
public void estimateAccFromTrain(boolean b){
this.findTrainAcc=b;
}
public void setSeed(int s){
this.setSeed=true;
seed=s;
rng=new Random();
rng.setSeed(seed);
}
public void writeTrainEstimatesToFile(String train) {
findTrainAcc=true;
trainPath=train;
}
public void setFindTrainAccuracyEstimate(boolean setCV){
findTrainAcc=setCV;
}
//Think this always does para search?
// @Override
// public boolean findsTrainAccuracyEstimate(){ return findTrainAcc;}
@Override
public String getParameters() {
String result="BuildTime,"+res.getBuildTimeInNanos()+",CVAcc,"+res.getAcc();
result+=",C,"+paras[0];
if(paras.length>1){
if(kernel==KernelType.RBF)
result+=",Gamma,"+paras[1];
else if (paras.length>2 && kernel==KernelType.POLYNOMIAL)
result+=",Power,"+paras[1]+",b,"+paras[2];
}
for(double d:accuracy)
result+=","+d;
return result;
}
@Override
public void setParamSearch(boolean b) {
tuneParameters=b;
}
@Override
public void setParametersFromIndex(int x) {
kernelOptimise=false; //Choose between linear, quadratic and RBF kernel
tuneParameters=false;
int numCParas=maxC-minC+1;
if(kernel==KernelType.LINEAR || kernel==KernelType.QUADRATIC){//Single parameter for C between 1 and 33
if(x<1 || x>numCParas)//Error, invalid range
throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range "+minC+" to "+ "max"); //To change body of generated methods, choose Tools | Templates.
paras=new double[1];
paras[0]=Math.pow(2,minC+(x-1));
setC(paras[0]);
}
else if(kernel==KernelType.RBF){//Two parameters, same range for both
if(x<1 || x>numCParas*numCParas)//Error, invalid range
throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range "+minC+" to "+ "max"); //To change body of generated methods, choose Tools | Templates.
paras=new double[2];
int temp=minC+(x-1)/numCParas;
paras[0]=Math.pow(2,temp);
temp=minC+(x-1)%numCParas;
paras[1]=Math.pow(2,temp);
setC(paras[0]);
((RBFKernel)m_kernel).setGamma(paras[1]);
System.out.println("");
}
else if(kernel==KernelType.POLYNOMIAL){
//Three paras, not evenly distributed. C [1 to 33] exponent =[1 to 6], b=[0 to 5]
paras=new double[3];
int numExpParas=maxExponent-minExponent+1;
int numBParas=maxB-minB+1;
if(x<1 || x>numCParas*numExpParas*numBParas)//Error, invalid range
throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range for PolyNomialKernel"); //To change body of generated methods, choose Tools | Templates.
int cPara=minC+(x-1)%numCParas;
int expPara=minExponent+(x-1)/(numBParas*numCParas);
int bPara=minB+((x-1)/numCParas)%numBParas;
paras[0]=Math.pow(2,cPara);
paras[1]=expPara;
paras[2]=bPara;
PolynomialKernel kern = new PolynomialKernel();
kern.setExponent(paras[1]);
kern.setB(paras[2]);
setKernel(kern);
setC(paras[0]);
System.out.println("Index "+x+" maps to "+cPara+","+expPara+","+bPara);
}
}
public enum KernelType {LINEAR,QUADRATIC,POLYNOMIAL,RBF};
KernelType kernel;
public void debug(boolean b){
this.debug=b;
}
public void setKernelType(KernelType type) {
kernel = type;
switch (type) {
case LINEAR:
PolyKernel p=new PolynomialKernel();
p.setExponent(1);
setKernel(p);
break;
case QUADRATIC:
PolyKernel p2=new PolynomialKernel();
p2.setExponent(2);
setKernel(p2);
break;
case POLYNOMIAL:
PolyKernel p3=new PolynomialKernel();
p3.setExponent(1);
setKernel(p3);
break;
case RBF:
RBFKernel kernel2 = new RBFKernel();
setKernel(kernel2);
break;
}
}
public void setParaSpace(double[] p){
paraSpace1=p;
}
public void setStandardParaSearchSpace(){
paraSpace1=new double[maxC-minC+1];
for(int i=minC;i<=maxC;i++)
paraSpace1[i-minC]=Math.pow(2,i);
if(kernel==KernelType.RBF){
paraSpace2=new double[maxC-minC+1];
for(int i=minC;i<=maxC;i++)
paraSpace2[i-minC]=Math.pow(2,i);
}
else if(kernel==KernelType.POLYNOMIAL){
paraSpace2=new double[maxExponent-minExponent+1];
paraSpace3=new double[maxB-minB+1];
for(int i=minExponent;i<=maxExponent;i++)
paraSpace2[i-minExponent]=i;
for(int i=minB;i<=maxB;i++)
paraSpace3[i-minB]=i;
}
}
/**
*
* @param n number of parameter values to try, spread across 2^minC and 2^maxC
on an exponential scale
*/
public void setLargePolynomialParameterSpace(int n){
paraSpace1=new double[n];
double interval=(maxC-minC)/(n-1);
double exp=minC;
for(int i=0;i<n;i++){
paraSpace1[i]= Math.pow(2,exp);
exp+=interval;
}
}
public void optimiseKernel(boolean b){kernelOptimise=b;}
public boolean getOptimiseKernel(){ return kernelOptimise;}
public void optimiseParas(boolean b){tuneParameters=b;}
static class ResultsHolder{
double x,y,z;
ClassifierResults res;
ResultsHolder(double a, double b,ClassifierResults r){
x=a;
y=b;
z=0;
res=r;
}
ResultsHolder(double a, double b,double c,ClassifierResults r){
x=a;
y=b;
z=c;
res=r;
}
}
public void tuneRBF(Instances train) throws Exception {
paras=new double[2];
int folds=MAX_FOLDS;
if(folds>train.numInstances())
folds=train.numInstances();
double minErr=1;
this.setSeed(rng.nextInt());
Instances trainCopy=new Instances(train);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
if (setSeed)
cv.setSeed(seed);
cv.setNumFolds(folds);
cv.buildFolds(trainCopy);
ArrayList<ResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
int count=0;
OutFile temp=null;
for(double p1:paraSpace1){
for(double p2:paraSpace2){
count++;
if(saveEachParaAcc){// check if para value already done
File f=new File(resultsPath+count+".csv");
if(f.exists()){
if(f.length()==0){//Empty, delete
f.delete();
}
else
continue;//If done, ignore skip this iteration
}
}
SMO model = new SMO();
RBFKernel kern = new RBFKernel();
kern.setGamma(p2);
model.setKernel(kern);
model.setC(p1);
model.setBuildLogisticModels(true);
tempResults=cv.crossValidateWithStats(model,trainCopy);
tempResults.setEstimatorName("TunedSVM"+kernel);
tempResults.setDatasetName(train.relationName());
tempResults.setFoldID(seed);
tempResults.setSplit("train");
tempResults.setParas("C,"+p1+",Gamma,"+p2);
// Evaluation eval=new Evaluation(temp);
// eval.crossValidateModel(model, temp, folds, rng);
double e=1-tempResults.getAcc();
accuracy.add(tempResults.getAcc());
if(debug)
System.out.println(" C= "+p1+" Gamma = "+p2+" Acc = "+(1-e));
if(saveEachParaAcc){// Save to file and close
res.writeFullResultsToFile(resultsPath+count+".csv");
File f=new File(resultsPath+count+".csv");
if(f.exists())
f.setWritable(true, false);
}
else{
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
}
}
}
double bestC;
double bestSigma;
minErr=1;
if(saveEachParaAcc){
// Check they are all there first.
int missing=0;
for(double p1:paraSpace1){
for(double p2:paraSpace1){
File f=new File(resultsPath+count+".csv");
if(!(f.exists() && f.length()>0))
missing++;
}
}
if(missing==0)//All present
{
combinedBuildTime=0;
// If so, read them all from file, pick the best
count=0;
for(double p1:paraSpace1){
for(double p2:paraSpace1){
count++;
tempResults = new ClassifierResults();
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
//Delete the files here to clean up.
File f= new File(resultsPath+count+".csv");
if(!f.delete())
System.out.println("DELETE FAILED "+resultsPath+count+".csv");
}
}
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestC=best.x;
bestSigma=best.y;
paras[0]=bestC;
setC(bestC);
((RBFKernel)m_kernel).setGamma(bestSigma);
paras[1]=bestSigma;
res=best.res;
if(debug)
System.out.println("Best C ="+bestC+" best Gamma = "+bestSigma+" best train acc = "+res.getAcc());
}else//Not all present, just ditch
System.out.println(resultsPath+" error: missing ="+missing+" parameter values");
}
else{
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestC=best.x;
bestSigma=best.y;
paras[0]=bestC;
setC(bestC);
((RBFKernel)m_kernel).setGamma(bestSigma);
paras[1]=bestSigma;
res=best.res;
}
}
/**
* Searches the polynomial exponent and the C value
* @param train
* @throws Exception
*/
public void tunePolynomial(Instances train) throws Exception {
paras=new double[3];
int folds=MAX_FOLDS;
if(folds>train.numInstances())
folds=train.numInstances();
double minErr=1;
this.setSeed(rng.nextInt());
Instances trainCopy=new Instances(train);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
if (setSeed)
cv.setSeed(seed);
cv.setNumFolds(folds);
cv.buildFolds(trainCopy);
ArrayList<ResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
int count=0;
OutFile temp=null;
for(double p1:paraSpace1){//C
for(double p2:paraSpace2){//Exponent
for(double p3:paraSpace3){//B
count++;
if(saveEachParaAcc){// check if para value already done
File f=new File(resultsPath+count+".csv");
if(f.exists()){
if(CollateResults.validateSingleFoldFile(resultsPath+count+".csv")==false){
System.out.println("Deleting file "+resultsPath+count+".csv because incomplete, size ="+f.length());
}
else
continue;//If done, ignore skip this iteration
}
}
SMO model = new SMO();
PolynomialKernel kern = new PolynomialKernel();
kern.setExponent(p2);
kern.setB(p3);
model.setKernel(kern);
model.setC(p1);
model.setBuildLogisticModels(true);
tempResults=cv.crossValidateWithStats(model,trainCopy);
// Evaluation eval=new Evaluation(temp);
// eval.crossValidateModel(model, temp, folds, rng);
double e=1-tempResults.getAcc();
accuracy.add(tempResults.getAcc());
if(debug)
System.out.println("C="+p1+",Exp="+p2+",B="+p3+", Acc = "+(1-e));
if(saveEachParaAcc){// Save to file and close
res.writeFullResultsToFile(resultsPath+count+".csv");
}
else{
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
}
}
}
}
double bestC;
double bestExponent;
double bestB;
minErr=1;
if(saveEachParaAcc){
// Check they are all there first.
int missing=0;
for(double p1:paraSpace1){
for(double p2:paraSpace2){
for(double p3:paraSpace3){
File f=new File(resultsPath+count+".csv");
if(!(f.exists() && f.length()>0))
missing++;
}
}
}
if(missing==0)//All present
{
combinedBuildTime=0;
// If so, read them all from file, pick the best
count=0;
for(double p1:paraSpace1){//C
for(double p2:paraSpace2){//Exponent
for(double p3:paraSpace3){//B
count++;
tempResults = new ClassifierResults();
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
//Delete the files here to clean up.
File f= new File(resultsPath+count+".csv");
if(!f.delete())
System.out.println("DELETE FAILED "+resultsPath+count+".csv");
}
}
}
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestC=best.x;
bestExponent=best.y;
bestB=best.z;
paras[0]=bestC;
paras[1]=bestExponent;
paras[2]=bestB;
PolynomialKernel kern = new PolynomialKernel();
kern.setExponent(bestExponent);
kern.setB(bestB);
setKernel(kern);
setC(bestC);
res=best.res;
if(debug)
System.out.println("Best C ="+bestC+" best Gamma = "+bestExponent+" best train acc = "+res.getAcc());
}else//Not all present, just ditch
System.out.println(resultsPath+" error: missing ="+missing+" parameter values");
}
else{
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
bestC=best.x;
bestExponent=best.y;
bestB=best.z;
paras[0]=bestC;
paras[1]=bestExponent;
paras[2]=bestB;
PolynomialKernel kern = new PolynomialKernel();
kern.setExponent(bestExponent);
kern.setB(bestB);
setKernel(kern);
setC(bestC);
res=best.res;
}
}
/**
* This function assumes the Polynomial exponent is fixed and just searches
* for C values. I could generalise this to use with the exponent search, but
* the risk of introducing bugs is too large
* @param train
* @throws Exception
*/
public void tuneCForFixedPolynomial(Instances train) throws Exception {
paras=new double[1];
int folds=MAX_FOLDS;
if(folds>train.numInstances())
folds=train.numInstances();
double minErr=1;
this.setSeed(rng.nextInt());
Instances trainCopy=new Instances(train);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
if (setSeed)
cv.setSeed(seed);
cv.setNumFolds(folds);
cv.buildFolds(trainCopy);
ArrayList<ResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
int count=0;
OutFile temp=null;
for(double d: paraSpace1){
count++;
if(saveEachParaAcc){// check if para value already done
File f=new File(resultsPath+count+".csv");
if(f.exists() && f.length()>0)
continue;//If done, ignore skip this iteration
if(debug)
System.out.println("PARA COUNT ="+count);
}
SMO model = new SMO();
model.setKernel(m_kernel);
model.setC(d);
model.setBuildLogisticModels(true);
tempResults=cv.crossValidateWithStats(model,trainCopy);
// Evaluation eval=new Evaluation(temp);
// eval.crossValidateModel(model, temp, folds, rng);
double e=1-tempResults.getAcc();
accuracy.add(tempResults.getAcc());
if(saveEachParaAcc){// Save to file and close
res.writeFullResultsToFile(resultsPath+count+".csv");
}
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(d,0.0,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(d,0.0,tempResults));
}
}
if(saveEachParaAcc){// Read them all from file, if all donepick the best
int missing=0;
for(double p1:paraSpace1){
File f=new File(resultsPath+count+".csv");
if(!(f.exists() && f.length()>0))
missing++;
}
if(missing==0)//All present
{
combinedBuildTime=0;
count=0;
for(double p1:paraSpace1){
count++;
tempResults = new ClassifierResults();
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,0.0,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,0.0,tempResults));
}
//Delete the files here to clean up.
File f= new File(resultsPath+count+".csv");
if(!f.delete())
System.out.println("DELETE FAILED "+resultsPath+count+".csv");
}
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
setC(best.x);
res=best.res;
paras[0]=best.x;
}
else{
System.out.println(resultsPath+" error: missing ="+missing+" parameter values");
}
}
else{
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
setC(best.x);
res=best.res;
paras[0]=best.x;
}
}
public void selectKernel(Instances train) throws Exception {
KernelType[] ker=KernelType.values();
double[] rbfParas=new double[2];
double rbfCVAcc=0;
double linearBestC=0;
double linearCVAcc=0;
double quadraticBestC=0;
double quadraticCVAcc=0;
for(KernelType k:ker){
TunedSVM temp=new TunedSVM();
Kernel kernel;
switch(k){
case LINEAR:
PolyKernel p=new PolyKernel();
p.setExponent(1);
temp.setKernel(p);
temp.setStandardParaSearchSpace();
temp.tuneCForFixedPolynomial(train);
linearCVAcc=temp.res.getAcc();
linearBestC=temp.getC();
break;
case QUADRATIC:
PolyKernel p2=new PolyKernel();
p2.setExponent(2);
temp.setKernel(p2);
temp.setStandardParaSearchSpace();
temp.tuneCForFixedPolynomial(train);
quadraticCVAcc=temp.res.getAcc();
quadraticBestC=temp.getC();
break;
case RBF:
RBFKernel kernel2 = new RBFKernel();
temp.setKernel(kernel2);
temp.setStandardParaSearchSpace();
temp.tuneRBF(train);
rbfCVAcc=temp.res.getAcc();
rbfParas[0]=temp.getC();
rbfParas[1]=((RBFKernel)temp.m_kernel).getGamma();
break;
}
}
//Choose best, inelligantly
if(linearCVAcc> rbfCVAcc && linearCVAcc> quadraticCVAcc){//Linear best
PolyKernel p=new PolyKernel();
p.setExponent(1);
setKernel(p);
setC(linearBestC);
paras=new double[1];
paras[0]=linearBestC;
res.setAcc(linearCVAcc);
}else if(quadraticCVAcc> linearCVAcc && quadraticCVAcc> rbfCVAcc){ //Quad best
PolyKernel p=new PolyKernel();
p.setExponent(2);
setKernel(p);
setC(quadraticBestC);
paras=new double[1];
paras[0]=quadraticBestC;
res.setAcc(quadraticCVAcc);
}else{ //RBF
RBFKernel kernel = new RBFKernel();
kernel.setGamma(rbfParas[1]);
setKernel(kernel);
setC(rbfParas[0]);
paras=rbfParas;
res.setAcc(rbfCVAcc);
}
}
//TO DO: add the option to build from an incomplete parameter set,
// without deleting
public void buildFromFile() throws FileNotFoundException, Exception{
combinedBuildTime=0;
int count=0;
ArrayList<ResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
double minErr=1;
if(kernel==KernelType.LINEAR || kernel==KernelType.QUADRATIC){
for(double p1:paraSpace1){
count++;
tempResults = new ClassifierResults();
File f= new File(resultsPath+count+".csv");
if(f.exists() && f.length()>0){
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,0.0,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,0.0,tempResults));
}
}
}
}else if(kernel==KernelType.RBF){
for(double p1:paraSpace1){
for(double p2:paraSpace2){
count++;
tempResults = new ClassifierResults();
File f= new File(resultsPath+count+".csv");
if(f.exists() && f.length()>0){
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,tempResults));
}
}
}
}
}else if(kernel==KernelType.POLYNOMIAL){
for(double p1:paraSpace1){
for(double p2:paraSpace2){
for(double p3:paraSpace3){
count++;
tempResults = new ClassifierResults();
File f= new File(resultsPath+count+".csv");
if(f.exists() && f.length()>0){
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new ResultsHolder(p1,p2,p3,tempResults));
}
}
}
}
}
}
ResultsHolder best=ties.get(rng.nextInt(ties.size()));
setC(best.x);
res=best.res;
paras[0]=best.x;
if(kernel==KernelType.RBF){
paras[1]=best.y;
//Set Gamma
}else if(kernel==KernelType.POLYNOMIAL){
paras[1]=best.y;
paras[2]=best.z;
}
}
private void setRBFParasFromPartiallyCompleteSearch() throws Exception{
paras=new double[2];
combinedBuildTime=0;
ArrayList<TunedSVM.ResultsHolder> ties=new ArrayList<>();
// If so, read them all from file, pick the best
int count=0;
int present=0;
double minErr=1;
for(double p1:paraSpace1){//C
for(double p2:paraSpace2){//GAMMA
ClassifierResults tempResults = new ClassifierResults();
count++;
if(new File(resultsPath+count+".csv").exists()){
present++;
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,tempResults));
}
}
}
}
//Set the parameters
if(present>0){
System.out.println("Number of paras = "+present);
System.out.println("Number of best = "+ties.size());
TunedSVM.ResultsHolder best=ties.get(rng.nextInt(ties.size()));
double bestC;
double bestSigma;
bestC=best.x;
bestSigma=best.y;
paras[0]=bestC;
paras[1]=bestSigma;
setC(bestC);
((RBFKernel)m_kernel).setGamma(bestSigma);
res=best.res;
}
else
throw new Exception("Error, no parameter files for "+resultsPath);
}
private void setPolynomialParasFromPartiallyCompleteSearch() throws Exception{
paras=new double[3];
combinedBuildTime=0;
ArrayList<TunedSVM.ResultsHolder> ties=new ArrayList<>();
// If so, read them all from file, pick the best
int count=0;
int present=0;
double minErr=1;
for(double p1:paraSpace1){//
for(double p2:paraSpace2){//
for(double p3:paraSpace3){//
ClassifierResults tempResults = new ClassifierResults();
count++;
if(new File(resultsPath+count+".csv").exists()){
present++;
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTimeInNanos();
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new TunedSVM.ResultsHolder(p1,p2,p3,tempResults));
}
}
}
}
}
//Set the parameters
if(present>0){
System.out.println("Number of paras = "+present);
System.out.println("Number of best = "+ties.size());
TunedSVM.ResultsHolder best=ties.get(rng.nextInt(ties.size()));
double bestC;
double bestB;
bestC=best.x;
bestB=best.y;
paras[0]=bestC;
paras[1]=bestB;
setC(bestC);
((PolynomialKernel)m_kernel).setB(bestB);
res=best.res;
}
else
throw new Exception("Error, no parameter files for "+resultsPath);
}
@Override
public void buildClassifier(Instances train) throws Exception {
res =new ClassifierResults();
long t=System.nanoTime();
// if(kernelOptimise)
// selectKernel(train);
if(buildFromPartial){
if(paraSpace1==null)
setStandardParaSearchSpace();
if(kernel==KernelType.RBF)
setRBFParasFromPartiallyCompleteSearch();
// else if(kernel==KernelType.LINEAR || kernel==KernelType.QUADRATIC)
// setFixedPolynomialParasFromPartiallyCompleteSearch();
else if(kernel==KernelType.POLYNOMIAL)
setPolynomialParasFromPartiallyCompleteSearch();
}
else if(tuneParameters){
if(paraSpace1==null)
setStandardParaSearchSpace();
if(buildFromFile){
throw new Exception("Build from file in TunedSVM Not implemented yet");
}else{
if(kernel==KernelType.RBF)
tuneRBF(train); //Tunes two parameters
else if(kernel==KernelType.LINEAR || kernel==KernelType.QUADRATIC)
tuneCForFixedPolynomial(train);//Tunes one parameter
else if(kernel==KernelType.POLYNOMIAL)
tunePolynomial(train);
}
}
/*If there is no parameter search, then there is no train CV available.
this gives the option of finding one using 10xCV
*/
else if(findTrainAcc){
int folds=10;
if(folds>train.numInstances())
folds=train.numInstances();
SMO model = new SMO();
model.setKernel(this.m_kernel);
model.setC(this.getC());
model.setBuildLogisticModels(true);
model.setRandomSeed(seed);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
cv.setSeed(seed); //trying to mimick old seeding behaviour below
cv.setNumFolds(folds);
cv.buildFolds(train);
res = cv.crossValidateWithStats(model, train);
}
//If both kernelOptimise and tuneParameters are false, it just builds and SVM
//With whatever the parameters are set to
super.buildClassifier(train);
res.setTimeUnit(TimeUnit.NANOSECONDS);
if(saveEachParaAcc)
res.setBuildTime(combinedBuildTime);
else
res.setBuildTime(System.nanoTime()-t);
if(trainPath!=null && trainPath!=""){ //Save basic train results
res.setEstimatorName("TunedSVM"+kernel);
res.setDatasetName(train.relationName());
res.setFoldID(seed);
res.setSplit("train");
res.setParas(getParameters());
res.writeFullResultsToFile(trainPath);
File x=new File(trainPath);
x.setWritable(true, false);
}
}
public static void jamesltest() {
try{
String dset = "zoo";
// int fold = 0;
Instances all=DatasetLoading.loadDataNullable("C:/UCI Problems/"+dset+"/"+dset);
for (int fold = 0; fold < 30; fold++) {
Instances[] split=InstanceTools.resampleInstances(all,fold,0.5);
Instances train=split[0];
Instances test=split[1];
TunedSVM svml = new TunedSVM();
svml.optimiseParas(true);
svml.optimiseKernel(false);
svml.setBuildLogisticModels(true);
svml.setSeed(fold);
svml.setKernelType(TunedSVM.KernelType.LINEAR);
//
// TunedSVM svmq = new TunedSVM();
// svmq.optimiseParas(true);
// svmq.optimiseKernel(false);
// svmq.setBuildLogisticModels(true);
// svmq.setSeed(fold);
// svmq.setKernelType(TunedSVM.KernelType.QUADRATIC);
//
// TunedSVM svmrbf = new TunedSVM();
// svmrbf.optimiseParas(true);
// svmrbf.optimiseKernel(false);
// svmrbf.setBuildLogisticModels(true);
// svmrbf.setSeed(fold);
// svmrbf.setKernelType(TunedSVM.KernelType.RBF);
System.out.println("\n\nTSVM_L:");
svml.buildClassifier(train);
System.out.println("C ="+svml.getC());
System.out.println("Train: " + svml.res.getAcc() + " " + svml.res.stddev);
double accL=ClassifierTools.accuracy(test, svml);
System.out.println("Test: " + accL);
//
//
// System.out.println("\n\nTSVM_Q:");
// svmq.buildClassifier(train);
// System.out.println("C ="+svmq.getC());
// System.out.println("Train: " + svmq.res.acc + " " + svmq.res.stddev);
// double accQ=ClassifierTools.accuracy(test, svmq);
// System.out.println("Test: " + accQ);
//
// System.out.println("\n\nTSVM_RBF:");
// svmrbf.buildClassifier(train);
// System.out.println("C ="+svmrbf.getC());
// System.out.println("Train: " + svmrbf.res.acc + " " + svmrbf.res.stddev);
// double accRBF=ClassifierTools.accuracy(test, svmrbf);
// System.out.println("Test: " + accRBF);
}
}catch(Exception e){
System.out.println("ffsjava");
System.out.println(e);
e.printStackTrace();
}
}
public static void testKernel() throws Exception{
TunedSVM svm= new TunedSVM();
svm.setKernelType(KernelType.POLYNOMIAL);
svm.setParamSearch(false);
svm.setBuildLogisticModels(true);
String dset = "balloons";
svm.setSeed(0);
Instances all=DatasetLoading.loadDataNullable("C:\\Users\\ajb\\Dropbox\\UCI Problems\\"+dset+"\\"+dset);
Instances[] split=InstanceTools.resampleInstances(all,1,0.5);
svm.buildClassifier(split[0]);
}
public static void main(String[] args){
cheatOnMNIST();
System.exit(0);
try {
testKernel();
} catch (Exception ex) {
Logger.getLogger(TunedSVM.class.getName()).log(Level.SEVERE, null, ex);
}
System.exit(0);
int min=-16, max=16;
int numParas=max-min;
if(max*min<0)
numParas++;
for(int x=1;x<=1089;x++){
int temp=min+(x-1)/numParas;
double c=Math.pow(2,temp);
int temp2=min+(x-1)%numParas;
double gamma=Math.pow(2,temp2);
System.out.println("c count ="+temp+" gamma count = "+ temp2+" c="+c+" gamma ="+gamma);
}
System.exit(0);
// jamesltest();
String sourcePath="C:\\Users\\ajb\\Dropbox\\TSC Problems\\";
String problemFile="ItalyPowerDemand";
DecimalFormat df = new DecimalFormat("###.###");
Instances all=DatasetLoading.loadDataNullable(sourcePath+problemFile+"/"+problemFile+"_TRAIN");
Instances[] split=InstanceTools.resampleInstances(all,0,0.5);
Instances train=split[0];
Instances test=split[1];
try{
TunedSVM svml=new TunedSVM();
svml.setPathToSaveParameters("C:\\Temp\\fold1_");
svml.optimiseParas(true);
svml.optimiseKernel(false);
svml.setBuildLogisticModels(true);
svml.setSeed(0);
svml.setKernelType(TunedSVM.KernelType.RBF);
svml.debug=true;
/* TunedSVM svmq=new TunedSVM();
kernel = new PolyKernel();
kernel.setExponent(2);
svmq.setKernel(kernel);
TunedSVM svmrbf=new TunedSVM();
RBFKernel kernel2 = new RBFKernel();
kernel2.setGamma(1/(double)(all.numAttributes()-1));
svmrbf.setKernel(kernel2);
svmq.buildClassifier(train);
System.out.println("BUILT QUAD");
System.out.println(" Optimal C ="+svmq.getC());
svmrbf.buildClassifier(train);
System.out.println("BUILT RBF");
System.out.println(" Optimal C ="+svmrbf.getC());
double accL=0,accQ=0,accRBF=0;
accQ=ClassifierTools.accuracy(test, svmq);
accRBF=ClassifierTools.accuracy(test,svmrbf);
*/
svml.buildClassifier(train);
System.out.println("BUILT LINEAR = "+svml);
System.out.println(" Optimal C ="+svml.getC());
double accL=ClassifierTools.accuracy(test, svml);
System.out.println("ACC on "+problemFile+": Linear = "+df.format(accL)); //+", Quadratic = "+df.format(accQ)+", RBF = "+df.format(accRBF));
}catch(Exception e){
System.out.println(" Exception building a classifier = "+e);
e.printStackTrace();
System.exit(0);
}
}
protected static class PolynomialKernel extends PolyKernel {
//Constant parameter to allow for (x.x+b)^m_exponent. The reason this wraps the
//Weka kernel is I dont think it possible to include this parameter in Weka
double b=0;
public void setB(double x){b=x;}
protected void setConstantTerm(double x){ b=x;}
@Override
protected double evaluate(int id1, int id2, Instance inst1)
throws Exception {
double result;
if (id1 == id2) {
result = dotProd(inst1, inst1);
} else {
result = dotProd(inst1, m_data.instance(id2));
}
// // Replacing this
// if (m_lowerOrder) {
// result += 1.0;
// }
//Only change from base class to allow for b constant term, rather than 0/1
result += b;
if (m_exponent != 1.0) {
result = Math.pow(result, m_exponent);
}
return result;
}
}
public static void cheatOnMNIST(){
Instances train=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TRAIN");
Instances test=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TEST");
SMO svm=new SMO();
RBFKernel k=new RBFKernel();
svm.setKernel(k);
System.out.println("Data loaded ......");
double a =ClassifierTools.singleTrainTestSplitAccuracy(svm, train, test);
System.out.println("Default acc = "+a);
int min=1;//These search values are used for all kernels with C. It is also used for Gamma in RBF, but not for the Polynomial exponent search
int max=6;
for(double c=min;c<=max;c++)
for(double r=min;r<=max;r++){
svm.setC(Math.pow(2, c));
k.setGamma(Math.pow(2, r));
svm.setKernel(k);//Just in case ...
a =ClassifierTools.singleTrainTestSplitAccuracy(svm, train, test);
System.out.println("logC ="+c+" logGamma = "+r+" acc = "+a);
}
}
}
| 49,007 | 38.17506 | 186 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/classifiers/tuned/TunedXGBoost.java | /*
Copyright (c) 2014 by Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package machine_learning.classifiers.tuned;
import evaluation.evaluators.CrossValidationEvaluator;
import evaluation.storage.ClassifierResults;
import fileIO.OutFile;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import ml.dmlc.xgboost4j.java.Booster;
import ml.dmlc.xgboost4j.java.DMatrix;
import ml.dmlc.xgboost4j.java.XGBoost;
import ml.dmlc.xgboost4j.java.XGBoostError;
import tsml.classifiers.ParameterSplittable;
import utilities.DebugPrinting;
import weka.classifiers.AbstractClassifier;
import weka.core.Instance;
import weka.core.Instances;
import experiments.CollateResults;
import experiments.data.DatasetLists;
import experiments.ClassifierExperiments;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import tsml.classifiers.EnhancedAbstractClassifier;
import tsml.classifiers.SaveParameterInfo;
import machine_learning.classifiers.SaveEachParameter;
/**
* Original code repo, around which this class wraps: https://github.com/dmlc/xgboost
* Paper:
@inproceedings{chen2016xgboost,
title={Xgboost: A scalable tree boosting system},
author={Chen, Tianqi and Guestrin, Carlos},
booktitle={Proceedings of the 22nd acm sigkdd international conference on knowledge discovery and data mining},
pages={785--794},
year={2016},
organization={ACM}
}
*
* Wrapping around the public xgboost API for multiclass classification, with automatic grid search parameter tuning
* as an option. Would search over the learning rate, num iterations, max tree depth, and min child weighting.
*
* TODOS:
* - Thorough testing of the tuning checkpointing/para splitting for evaluation
* - Potentially tweaking the para spaces depending on observed behaviour
* - Any extra software engineering-type things required
* - Look for speedups, esp early abandons on grid search with num iters
*
* @author James Large (james.large@uea.ac.uk)
*/
public class TunedXGBoost extends EnhancedAbstractClassifier implements SaveParameterInfo, DebugPrinting, SaveEachParameter, ParameterSplittable {
//data info
int numTrainInsts = -1;
int numAtts = -1;
int numClasses = -1;
Instances trainInsts = null;
DMatrix trainDMat = null;
//model
HashMap<String, DMatrix> watches = null;
HashMap<String, Object> params = null;
Booster booster = null;
//hyperparameters - fixed
float rowSubsampling = 0.8f; //aka rowSubsampling
float colSubsampling = 0.8f; //aka colsample_bytree
int minChildWeight = 1; //aka min_child_weight. NO LONGER TUNABLE, LEFT AS DEFAULT ( 1 ), on advice from rotf paper reviewer
//old parameters
// //hyperparameter settings informed by a mix of these, but also restricted in certain situations
// //to bring in line with the amount of tuning provided to other classifiers for fairness.
// //subject to change
// // https://www.analyticsvidhya.com/blog/2016/03/complete-guide-parameter-tuning-xgboost-with-codes-python/
// // https://www.slideshare.net/odsc/owen-zhangopen-sourcetoolsanddscompetitions1 (slide 12)
// // https://cambridgespark.com/content/tutorials/hyperparameter-tuning-in-xgboost/index.html
// //hyperparameters - tunable through cv (6*5*5*7 = 1050 possible paras)
// float learningRate = 0.1f; //aka eta
// static float[] learningRateParaRange = { 0.001f, 0.01f, 0.05f, 0.1f, 0.2f, 0.3f };
// int maxTreeDepth = 4; //aka max_depth
// static int[] maxTreeDepthParaRange = { 2,4,6,8,10 };
// int minChildWeight = 1; //aka min_child_weight
// static int[] minChildWeightParaRange = { 1,3,5,7,9 };
// int numIterations = 500; //aka rounds
// static int[] numIterationsParaRange = { 50, 100, 250, 500, 1000, 1500, 2000 };
//new parameters, on advice from rotf paper reviewer
float learningRate = 0.1f; //aka eta
// static float[] learningRateParaRange = { 0.01f, 0.1f, 0.2f };
static float[] learningRateParaRange = { 0.00001f, 0.0001f, 0.001f, 0.01f, 0.05f, 0.1f, 0.15f, 0.2f, 0.25f, 0.3f };
int maxTreeDepth = 4; //aka max_depth
// static int[] maxTreeDepthParaRange = { 1,3,5 };
static int[] maxTreeDepthParaRange = { 1,2,3,4,5,6,7,8,9,10 };
int numIterations = 500; //aka rounds
// static int[] numIterationsParaRange = { 10, 25, 50};
static int[] numIterationsParaRange = { 10, 25, 50, 100, 250, 500, 750, 1000, 1250, 1500 };
//tuning/cv/jobsplitting
int cvFolds = 10;
boolean tuneParameters=false;
protected String resultsPath;
protected boolean saveEachParaAcc=false;
ArrayList<Double> paramAccuracies;
private long combinedBuildTime;
boolean runSingleThreaded = false;
public TunedXGBoost() {
super(CAN_ESTIMATE_OWN_PERFORMANCE);
}
public static void setDefaultParaSearchSpace_1000paras() {
learningRateParaRange = new float[] { 0.00001f, 0.0001f, 0.001f, 0.01f, 0.05f, 0.1f, 0.15f, 0.2f, 0.25f, 0.3f };
maxTreeDepthParaRange = new int[] { 1,2,3,4,5,6,7,8,9,10 };
numIterationsParaRange = new int[] { 10, 25, 50, 100, 250, 500, 750, 1000, 1250, 1500 };
}
public static void setSmallParaSearchSpace_64paras() {
learningRateParaRange = new float[] { 0.001f, 0.01f, 0.1f, 0.2f };
maxTreeDepthParaRange = new int[] { 1,3,5,7 };
numIterationsParaRange = new int[] { 250, 500, 1000, 1500};
}
public boolean getTuneParameters() {
return tuneParameters;
}
public void setTuneParameters(boolean tuneParameters) {
this.tuneParameters = tuneParameters;
}
public float getLearningRate() {
return learningRate;
}
public void setLearningRate(float learningRate) {
this.learningRate = learningRate;
}
public int getMaxTreeDepth() {
return maxTreeDepth;
}
public void setMaxTreeDepth(int maxTreeDepth) {
this.maxTreeDepth = maxTreeDepth;
}
public int getMinChildWeight() {
return minChildWeight;
}
public void setMinChildWeight(int minChildWeight) {
this.minChildWeight = minChildWeight;
}
public int getNumIterations() {
return numIterations;
}
public void setNumIterations(int numIterations) {
this.numIterations = numIterations;
}
public boolean getRunSingleThreaded() {
return runSingleThreaded;
}
public void setRunSingleThreaded(boolean runSingleThreaded) {
this.runSingleThreaded = runSingleThreaded;
}
//copied over/refactored from tunedsvm/randf/rotf
public static class XGBoostParamResultsHolder implements Comparable<XGBoostParamResultsHolder> {
float learningRate;
int maxTreeDepth;
int numIterations;
int conservedness;
ClassifierResults results;
XGBoostParamResultsHolder(float learningRate, int maxTreeDepth, int numIterations,ClassifierResults r){
this.learningRate=learningRate;
this.maxTreeDepth=maxTreeDepth;
this.numIterations=numIterations;
conservedness = computeConservedness();
results=r;
}
@Override
public String toString() {
return "learningRate="+learningRate+",maxTreeDepth="+maxTreeDepth+",numIterations="+numIterations+",conservedness="+conservedness+",acc="+results.getAcc();
}
/**
* This values wants to be minimised, higher values = potentially more prone to overfitting
*/
public int computeConservedness() {
return (1 + Arrays.binarySearch(TunedXGBoost.learningRateParaRange, learningRate))
* (1 + Arrays.binarySearch(TunedXGBoost.maxTreeDepthParaRange, maxTreeDepth))
* (1 + Arrays.binarySearch(TunedXGBoost.numIterationsParaRange, numIterations));
}
/**
* Implements a fairly naive way of determining if this param set is more conservative than the other,
* based on the total 'ranking' of each of the param values within the 4 param spaces.
*
* Returns less than zero if this is LESS conservative than other (i.e this.computeConservedness() > other.computeConservedness())
* Returns greater than zero if this is MORE conservative than other (i.e this.computeConservedness() < other.computeConservedness())
*
* Therefore to find most conservative in list of params, use max();
*/
@Override
public int compareTo(XGBoostParamResultsHolder other) {
return other.conservedness - this.conservedness;
}
}
//copied over/refactored from vector_classifiers.tunedsvm/randf/rotf
public void tuneHyperparameters() throws Exception {
printlnDebug("tuneHyperparameters()");
double minErr=1;
paramAccuracies=new ArrayList<>();
Instances trainCopy=new Instances(trainInsts);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
cv.setSeed(seed);
cv.setNumFolds(cvFolds);
cv.buildFolds(trainCopy);
ArrayList<XGBoostParamResultsHolder> ties=new ArrayList<>();
ClassifierResults tempResults;
int count=0;
OutFile temp=null;
for(float p1:learningRateParaRange){
for(int p2:maxTreeDepthParaRange){
// TuningXGBoostCrossValidationWrapper cvmodels = new TuningXGBoostCrossValidationWrapper(p1, p2);
// cvmodels.setSeed(seed);
for(int p4:numIterationsParaRange){
count++;
if(saveEachParaAcc){// check if para value already done
File f=new File(resultsPath+count+".csv");
if(f.exists()){
if(CollateResults.validateSingleFoldFile(resultsPath+count+".csv")==false){
System.out.println("Deleting file "+resultsPath+count+".csv because size ="+f.length());
}
else
continue;//If done, ignore skip this iteration
}
}
TunedXGBoost model = new TunedXGBoost();
model.setLearningRate(p1);
model.setMaxTreeDepth(p2);
model.setMinChildWeight(minChildWeight);
model.setNumIterations(p4);
model.tuneParameters=false;
model.setEstimateOwnPerformance(false);
model.setSeed(seed);
tempResults=cv.crossValidateWithStats(model,trainCopy);
// cvmodels.setNextNumIterations(p4);
// tempResults=cv.crossValidateWithStats(cvmodels,trainCopy);
tempResults.setEstimatorName("XGBoostPara"+count);
tempResults.setParas("learningRate,"+p1+",maxTreeDepth,"+p2+",numIterations="+p4);
double e=1-tempResults.getAcc();
printlnDebug("learningRate="+p1+",maxTreeDepth"+p2+",numIterations="+p4+" Acc = "+(1-e));
paramAccuracies.add(tempResults.getAcc());
if(saveEachParaAcc){// Save to file and close
tempResults.writeFullResultsToFile(resultsPath+count+".csv");
File f=new File(resultsPath+count+".csv");
if(f.exists())
f.setWritable(true, false);
}
else{
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new XGBoostParamResultsHolder(p1,p2,p4,tempResults));
}
else if(e==minErr)//Sort out ties
ties.add(new XGBoostParamResultsHolder(p1,p2,p4,tempResults));
}
}
}
}
minErr=1;
if(saveEachParaAcc){
// Check they are all there first.
int missing=0;
count=1;
for(float p1:learningRateParaRange){
for(int p2:maxTreeDepthParaRange){
for(int p4:numIterationsParaRange){
File f=new File(resultsPath+count+".csv");
if(!(f.exists() && f.length()>0))
missing++;
count++;
}
}
}
if(missing==0)//All present
{
//rebuild the accuracies list
//if we had checkpointing on, but managed to perform the tuning in a single execution,
//this will be a waste of time (but functinoally makes no difference), however if we had
//to rerun multiple times or got here via para splitting, the list will be empty/incomplete,
//so start from scratch and repopulate it
paramAccuracies=new ArrayList<>();
combinedBuildTime=0;
// If so, read them all from file, pick the best
count=0;
for(float p1:learningRateParaRange){
for(int p2:maxTreeDepthParaRange){
for(int p4:numIterationsParaRange){
count++;
tempResults = new ClassifierResults();
tempResults.loadResultsFromFile(resultsPath+count+".csv");
combinedBuildTime+=tempResults.getBuildTime();
paramAccuracies.add(tempResults.getAcc());
double e=1-tempResults.getAcc();
if(e<minErr){
minErr=e;
ties=new ArrayList<>();//Remove previous ties
ties.add(new XGBoostParamResultsHolder(p1,p2,p4,tempResults));
}
else if(e==minErr){//Sort out ties
ties.add(new XGBoostParamResultsHolder(p1,p2,p4,tempResults));
}
//Delete the files here to clean up.
File f= new File(resultsPath+count+".csv");
if(!f.delete())
System.out.println("DELETE FAILED "+resultsPath+count+".csv");
}
}
}
// XGBoostParamResultsHolder best=ties.get(rng.nextInt(ties.size()));
XGBoostParamResultsHolder best=Collections.max(ties); //get the most conservative (see XGBoostParamResultsHolder.computeconservedness())
printlnDebug("Best learning rate ="+best.learningRate+" best max depth = "+best.maxTreeDepth+" best num iterations ="+best.numIterations+ " acc = " + trainResults.getAcc() + " (num ties = " + ties.size() + ")");
this.setLearningRate(best.learningRate);
this.setMaxTreeDepth(best.maxTreeDepth);
this.setNumIterations(best.numIterations);
trainResults=best.results;
}else//Not all present, just ditch
System.out.println(resultsPath+" error: missing ="+missing+" parameter values");
}
else{
printlnDebug("\nTies Handling: ");
for (XGBoostParamResultsHolder tie : ties) {
printlnDebug(tie.toString());
}
printlnDebug("\n");
// XGBoostParamResultsHolder best=ties.get(rng.nextInt(ties.size()));
XGBoostParamResultsHolder best=Collections.max(ties); //get the most conservative (see XGBoostParamResultsHolder.computeconservedness())
printlnDebug("Best learning rate ="+best.learningRate+" best max depth = "+best.maxTreeDepth+" best num iterations ="+best.numIterations+" acc = " + trainResults.getAcc() + " (num ties = " + ties.size() + ")");
this.setLearningRate(best.learningRate);
this.setMaxTreeDepth(best.maxTreeDepth);
this.setNumIterations(best.numIterations);
trainResults=best.results;
}
}
/**
* Does the 'actual' initialising and building of the model, as opposed to experimental code
* setup etc
* @throws Exception
*/
public void buildActualClassifer() throws Exception {
if(tuneParameters)
tuneHyperparameters();
String objective = "multi:softprob";
// String objective = numClasses == 2 ? "binary:logistic" : "multi:softprob";
trainDMat = wekaInstancesToDMatrix(trainInsts);
params = new HashMap<String, Object>();
//todo: this is a mega hack to enforce 1 thread only on cluster (else bad juju).
//fix some how at some point.
if (runSingleThreaded || System.getProperty("os.name").toLowerCase().contains("linux"))
params.put("nthread", 1);
// else == num processors by default
//fixed params
params.put("silent", 1);
params.put("objective", objective);
if(objective.contains("multi"))
params.put("num_class", numClasses); //required with multiclass problems
params.put("seed", seed);
params.put("subsample", rowSubsampling);
params.put("colsample_bytree", colSubsampling);
//tunable params (numiterations passed directly to XGBoost.train(...)
params.put("learning_rate", learningRate);
params.put("max_depth", maxTreeDepth);
params.put("min_child_weight", minChildWeight);
watches = new HashMap<String, DMatrix>();
// if (getDebugPrinting() || getDebug())
// watches.put("train", trainDMat);
// int earlyStopping = (int) Math.ceil(numIterations / 10.0);
//e.g numIts == 25 => stop after 3 increases in err
// numIts == 250 => stop after 25 increases in err
// booster = XGBoost.train(trainDMat, params, numIterations, watches, null, null, null, earlyStopping);
booster = XGBoost.train(trainDMat, params, numIterations, watches, null, null);
}
public ClassifierResults estimateTrainAcc(Instances insts) throws Exception {
printlnDebug("estimateTrainAcc()");
TunedXGBoost xg = new TunedXGBoost();
xg.setLearningRate(learningRate);
xg.setMaxTreeDepth(maxTreeDepth);
xg.setMinChildWeight(minChildWeight);
xg.setNumIterations(numIterations);
xg.tuneParameters=false;
xg.setEstimateOwnPerformance(false);
xg.setSeed(seed);
CrossValidationEvaluator cv = new CrossValidationEvaluator();
cv.setSeed(seed);
cv.setNumFolds(cvFolds);
cv.buildFolds(insts);
return cv.evaluate(xg, insts);
}
@Override
public void buildClassifier(Instances insts) throws Exception {
// long startTime=System.nanoTime();
long startTime=System.nanoTime();
booster = null;
trainResults =new ClassifierResults();
trainInsts = new Instances(insts);
numTrainInsts = insts.numInstances();
numAtts = insts.numAttributes();
numClasses = insts.numClasses();
if(cvFolds>numTrainInsts)
cvFolds=numTrainInsts;
// rng = new Random(seed); //for tie resolution etc if needed
buildActualClassifer();
if(getEstimateOwnPerformance()&& !tuneParameters) //if tuneparas, will take the cv results of the best para set
trainResults = estimateTrainAcc(trainInsts);
if(saveEachParaAcc)
trainResults.setBuildTime(combinedBuildTime);
else
trainResults.setBuildTime(System.nanoTime()-startTime);
// trainResults.buildTime=System.nanoTime()-startTime;
trainResults.setTimeUnit(TimeUnit.NANOSECONDS);
trainResults.setEstimatorName(tuneParameters ? "TunedXGBoost" : "XGBoost");
trainResults.setDatasetName(trainInsts.relationName());
trainResults.setParas(getParameters());
}
@Override
public double[] distributionForInstance(Instance inst) {
double[] dist = new double[numClasses];
//converting inst to dmat form
Instances instHolder = new Instances(trainInsts, 0);
instHolder.add(inst);
DMatrix testInstMat = null;
try {
testInstMat = wekaInstancesToDMatrix(instHolder);
} catch (XGBoostError ex) {
System.err.println("Error converting test inst to DMatrix form: \n" + ex);
System.exit(0);
}
//predicting, converting back to double[]
try {
float[][] predicts = booster.predict(testInstMat);
for (int c = 0; c < numClasses; c++)
dist[c] = predicts[0][c];
} catch (XGBoostError ex) {
System.err.println("Error predicting test inst: \n" + ex);
System.exit(0);
}
return dist;
}
public static DMatrix wekaInstancesToDMatrix(Instances insts) throws XGBoostError {
int numRows = insts.numInstances();
int numCols = insts.numAttributes()-1;
float[] data = new float[numRows*numCols];
float[] labels = new float[numRows];
int ind = 0;
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < numCols; j++)
data[ind++] = (float) insts.instance(i).value(j);
labels[i] = (float) insts.instance(i).classValue();
}
DMatrix dmat = new DMatrix(data, numRows, numCols);
dmat.setLabel(labels);
return dmat;
}
@Override
public void setPathToSaveParameters(String r){
resultsPath=r;
setSaveEachParaAcc(true);
}
@Override
public void setSaveEachParaAcc(boolean bln) {
saveEachParaAcc=bln;
}
@Override
public void setParamSearch(boolean bln) {
tuneParameters=bln;
}
@Override
public void setParametersFromIndex(int x) {
tuneParameters=false;
if(x<1 || x>numIterationsParaRange.length*learningRateParaRange.length*maxTreeDepthParaRange.length)//Error, invalid range
throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range for TunedXGBoost"); //To change body of generated methods, choose Tools | Templates.
//x starts counting from 1 in parameter splittable for some reason, get it back to 0 in here
x -= 1;
int numIterationsIndex = x % numIterationsParaRange.length;
setNumIterations(numIterationsParaRange[numIterationsIndex]);
x /= numIterationsParaRange.length;
int maxTreeDepthIndex = x % maxTreeDepthParaRange.length;
setMaxTreeDepth(maxTreeDepthParaRange[maxTreeDepthIndex]);
x /= maxTreeDepthParaRange.length;
int learningRateIndex = x;
setLearningRate(learningRateParaRange[learningRateIndex]);
printlnDebug("Index ="+x+" LearningRate="+learningRate+" MaxTreeDepth="+maxTreeDepth+" NumIterations ="+numIterations);
}
/**
* SaveParameterInfo interface
*/
@Override
public String getParameters() {
String result="BuildTime,"+trainResults.getBuildTime()+",CVAcc,"+trainResults.getAcc();
result+=",learningRate,"+learningRate;
result+=",maxTreeDepth,"+maxTreeDepth;
result+=",numIterations,"+numIterations;
if (tuneParameters) {
result+=",learningRateSpace,"+Arrays.toString(learningRateParaRange).replace(",", "/").replace(" ", "");
result+=",maxTreeDepthSpace,"+Arrays.toString(maxTreeDepthParaRange).replace(",", "/").replace(" ", "");
result+=",numIterationsSpace,"+Arrays.toString(numIterationsParaRange).replace(",", "/").replace(" ", "");
for(double d:paramAccuracies)
result+=","+d;
}
return result;
}
/**
* Provides a smallish speedup when crossvalidating to tune hyperparameters.
* At current, will just speed up the search for the num iterations for a given set
* of the other 3 params, storing the models built on each of the cv folds for a
* number of iterations, and continuing to build from those when evaluating higher number of iterations.
*
* It's definitely imaginable in concept that this same process could be applied to the other params,
* but would require going into the xgboost library code. nah.
*
* The spaghetti code is real.
*/
private static class TuningXGBoostCrossValidationWrapper extends AbstractClassifier {
final int numModels = 10;
int modelIndex;
TunedXGBoost[] models;
float learningRate;
int maxTreeDepth;
int newNumIterations;
int numIterations;
public TuningXGBoostCrossValidationWrapper(float learningRate, int maxTreeDepth) {
this.learningRate = learningRate;
this.maxTreeDepth = maxTreeDepth;
this.newNumIterations = 0;
this.numIterations = 0;
int modelIndex = 0;
models = new TunedXGBoost[numModels];
for (int i = 0; i < numModels; i++) {
models[i] = new TunedXGBoost();
models[i].setTuneParameters(false);
models[i].setEstimateOwnPerformance(false);
models[i].setLearningRate(learningRate);
models[i].setMaxTreeDepth(maxTreeDepth);
models[i].setNumIterations(newNumIterations);
}
}
public void setSeed(int seed) {
for (int i = 0; i < numModels; i++)
models[i].setSeed(seed);
}
public void setNextNumIterations(int newNumIts) {
numIterations = newNumIterations;
newNumIterations = newNumIts;
modelIndex = -1;
}
@Override
public void buildClassifier(Instances data) throws Exception {
//instead of (on a high level) calling build classifier on the same thing 10 times,
//with each subsequent call overwriting the training done in the last,
//we'll instead build each classifier in the models[] once, storing the traind model for each cv fold
//when we move to the next num iterations, instead of building from scratch
//we'll continue iterating from the stored models, which we can do since the
//cv folds will be identical.
// so for a given para set, this build classifier will essentially be called 10 times,
//once for each cv fold
modelIndex++; //going to use this model for this fold
TunedXGBoost model = models[modelIndex];
if (numIterations == 0) {
//first of the 'numiterations' paras, i.e first build of each model. just build normally
// - including the initialisation of all the meta info
model.buildClassifier(data);
} else {
//continuing on from an already build model with less iterations
//dont call normal build classifier, since that'll reinitialise
//a bunch of stuff, including the booster itself. instead just
//continue with a modified call to the trainer function
model.booster = XGBoost.train(model.trainDMat, model.params, newNumIterations - numIterations, model.watches, null, null, null, 0, model.booster);
}
}
@Override
public double[] distributionForInstance(Instance inst) {
return models[modelIndex].distributionForInstance(inst);
}
}
public static void main(String[] args) throws Exception {
// for (int fold = 0; fold < 15; fold++) {
// for (String dataset : DatasetLists.UCIContinuousFileNames) {
// ClassifierExperiments.main(new String[] { "Z:/Data/UCIContinuous/", "Z:/CawpeResubmissionDump/XGBoostTimingsForHESCA/", "true", "XGBoostSingleThread", dataset, ""+(fold+1) });
// }
// }
// for (int fold = 15; fold < 30; fold++) {
// for (String dataset : DatasetLists.UCIContinuousFileNames) {
// ClassifierExperiments.main(new String[] { "Z:/Data/UCIContinuous/", "Z:/CawpeResubmissionDump/XGBoostTimingsForHESCA/", "true", "XGBoostSingleThread", dataset, ""+(fold+1) });
// }
// }
//para split
// for (int para = 1; para <= 27; para++)
// ClassifierExperiments.main(new String[] { "Z:/Data/UCIDelgado/", "C:/Temp/XGBoostParaSplitTest/", "true", "TunedXGBoost", "hayes-roth", "1", "false", ""+para});
// ClassifierExperiments.main(new String[] { "Z:/Data/UCIDelgado/", "C:/Temp/XGBoostParaSplitTest/", "true", "TunedXGBoost", "hayes-roth", "1", "true"});
//end para split
//checkpoint
// ClassifierExperiments.main(new String[] { "Z:/Data/UCIDelgado/", "C:/Temp/XGBoostCheckpointTest/", "true", "TunedXGBoost", "hayes-roth", "1", "true"});
//standard
ClassifierExperiments.main(new String[] { "Z:/Data/UCIDelgado/", "C:/Temp/XGBoostStraightUpTest/", "true", "TunedXGBoost", "hayes-roth", "1", });
}
public static void listInvalidFiles(String base, StringBuilder sb){
File[] files = (new File(base)).listFiles();
if (files.length == 0)
return;
for (File file : files) {
if (file.isDirectory())
listInvalidFiles(base + file.getName(), sb);
else {
try {
new ClassifierResults(file.getAbsolutePath());
}catch (Exception e) {
System.out.println(file.getAbsolutePath());
// sb.append(file.getAbsolutePath()).append("\n");
}
}
}
}
public static void editTestFilesWithoutCorrespondingTrain() throws Exception {
String path = "Z:\\Results\\TunedXGBoost\\Predictions\\";
String safetyWritePath = "C:/Temp/XGBoostTestBackups/";
for (String dataset : DatasetLists.UCIContinuousWithoutBigFour) {
for (int fold = 0; fold < 30; fold++) {
File trainFile = new File(path + dataset + "/trainFold" + fold + ".csv");
try {
new ClassifierResults(trainFile.getAbsolutePath());
} catch (Exception e) {
//couldnt load the train file, so edit the test file as missing so that experiments
//can redo it
File testFile = new File(path + dataset + "/testFold" + fold + ".csv");
if (testFile.exists()) {
ClassifierResults tempRes = new ClassifierResults(testFile.getAbsolutePath());
(new File(safetyWritePath + dataset)).mkdirs();
tempRes.writeFullResultsToFile(safetyWritePath + dataset + "/testFold" + fold + ".csv");
if (!testFile.renameTo(new File(testFile.getAbsolutePath().replace(".csv", "EDITNOTRAIN.csv"))))
throw new Exception("couldn't rename: " + testFile.getAbsolutePath());
else {
System.out.println("Edited: " + testFile.getAbsolutePath());
}
}
}
}
}
}
}
| 32,325 | 41.534211 | 227 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/CAST.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.clusterers;
import experiments.data.DatasetLoading;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import static utilities.ClusteringUtilities.createDistanceMatrix;
import static utilities.InstanceTools.deleteClassAttribute;
import static utilities.Utilities.maxIndex;
import static utilities.Utilities.minIndex;
/**
* Implementation of the CAST clustering algorithm.
*
* @author Matthew Middlehurst
*/
public class CAST extends DistanceBasedVectorClusterer {
//Ben-Dor, Amir, Ron Shamir, and Zohar Yakhini.
//"Clustering gene expression patterns."
//Journal of computational biology 6.3-4 (1999): 281-297.
private double affinityThreshold = 0.1;
private boolean dynamicAffinityThreshold = false;
private double eCastThreshold = 0.25;
private double[][] distanceMatrix;
private boolean hasDistances = false;
private ArrayList<double[]> clusterAffinities;
public CAST() {
}
public CAST(double[][] distanceMatrix) {
this.distanceMatrix = distanceMatrix;
this.hasDistances = true;
}
@Override
public int numberOfClusters() {
return clusters.length;
}
public ArrayList<double[]> getClusterAffinities() {
return clusterAffinities;
}
public void setAffinityThreshold(double d) {
affinityThreshold = d;
}
public void setDynamicAffinityThreshold(boolean b) {
dynamicAffinityThreshold = b;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
if (!hasDistances) {
distanceMatrix = createDistanceMatrix(train, distFunc);
}
normaliseDistanceMatrix();
//Main CAST loop
ArrayList<ArrayList<Integer>> subclusters = runCAST();
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster
assignments = new double[train.size()];
clusters = new ArrayList[subclusters.size()];
for (int i = 0; i < subclusters.size(); i++) {
clusters[i] = new ArrayList();
for (int n = 0; n < subclusters.get(i).size(); n++) {
clusters[i].add(subclusters.get(i).get(n));
assignments[subclusters.get(i).get(n)] = i;
}
}
}
private ArrayList<ArrayList<Integer>> runCAST() {
ArrayList<ArrayList<Integer>> subclusters = new ArrayList();
ArrayList<Integer> indicies = new ArrayList(distanceMatrix.length);
for (int i = 0; i < distanceMatrix.length; i++) {
indicies.add(i);
}
clusterAffinities = new ArrayList();
double[] subclusterAffinities = null;
while (indicies.size() > 0) {
ArrayList<Integer> subcluster = new ArrayList();
boolean change = true;
//E-cast
if (dynamicAffinityThreshold) {
computeThreshold(indicies);
}
subcluster.add(indicies.remove(initialiseCluster(indicies)));
//While changes still happen continue to add and remove items from the cluster.
while (change) {
change = false;
double[] indiciesAffinities = getAffinities(indicies, subcluster);
int minIdx = minIndex(indiciesAffinities);
//Addition step
while (indiciesAffinities.length > 0 && indiciesAffinities[minIdx] <=
affinityThreshold * subcluster.size()) {
subcluster.add(indicies.remove(minIdx));
indiciesAffinities = getAffinities(indicies, subcluster);
minIdx = minIndex(indiciesAffinities);
if (!change) {
change = true;
}
}
subclusterAffinities = getAffinities(subcluster, subcluster);
int maxIdx = maxIndex(subclusterAffinities);
//Removal step
while (subclusterAffinities[maxIdx] > affinityThreshold * (subcluster.size() - 1)) {
indicies.add(subcluster.remove(maxIdx));
subclusterAffinities = getAffinities(subcluster, subcluster);
maxIdx = maxIndex(subclusterAffinities);
if (!change) {
change = true;
}
}
}
//Add the cluster and the affinities of each member to itself.
//Items in the subcluster are removed from the indicies pool
clusterAffinities.add(subclusterAffinities);
subclusters.add(subcluster);
}
return subclusters;
}
private double[] getAffinities(ArrayList<Integer> indicies, ArrayList<Integer> subcluster) {
double[] affinities = new double[indicies.size()];
for (int n = 0; n < affinities.length; n++) {
for (int i = 0; i < subcluster.size(); i++) {
if (indicies.get(n).equals(subcluster.get(i))) continue;
if (indicies.get(n) > subcluster.get(i)) {
affinities[n] += distanceMatrix[indicies.get(n)][subcluster.get(i)];
} else {
affinities[n] += distanceMatrix[subcluster.get(i)][indicies.get(n)];
}
}
}
return affinities;
}
public int initialiseCluster(ArrayList<Integer> indicies) {
double minDist = Double.MAX_VALUE;
int minIdx = 0;
for (int n = 0; n < indicies.size(); n++) {
for (int i = 0; i < indicies.size(); i++) {
if (indicies.get(n).equals(indicies.get(i))) continue;
double dist;
if (indicies.get(n) > indicies.get(i)) {
dist = distanceMatrix[indicies.get(n)][indicies.get(i)];
} else {
dist = distanceMatrix[indicies.get(i)][indicies.get(n)];
}
if (dist < minDist) {
minDist = dist;
minIdx = n;
}
}
}
return minIdx;
}
private void normaliseDistanceMatrix() {
double maxDist = -99999999;
double minDist = Double.MAX_VALUE;
for (int i = 0; i < distanceMatrix.length; i++) {
for (int n = 0; n < i; n++) {
if (distanceMatrix[i][n] > maxDist) {
maxDist = distanceMatrix[i][n];
}
if (distanceMatrix[i][n] < minDist) {
minDist = distanceMatrix[i][n];
}
}
}
for (int i = 0; i < distanceMatrix.length; i++) {
for (int n = 0; n < i; n++) {
distanceMatrix[i][n] = (distanceMatrix[i][n] - minDist) / (maxDist - minDist);
}
}
}
//Bellaachia, Abdelghani, et al.
//"E-CAST: a data mining algorithm for gene expression data."
//Proceedings of the 2nd International Conference on Data Mining in Bioinformatics. Springer-Verlag, 2002.
private void computeThreshold(ArrayList<Integer> indicies) {
double a = 0;
int count = 0;
for (int n = 0; n < indicies.size(); n++) {
for (int i = 0; i < indicies.size(); i++) {
if (indicies.get(n).equals(indicies.get(i))) continue;
double dist;
if (indicies.get(n) > indicies.get(i)) {
dist = distanceMatrix[indicies.get(n)][indicies.get(i)];
} else {
dist = distanceMatrix[indicies.get(i)][indicies.get(n)];
}
if (dist < eCastThreshold) {
a += dist - eCastThreshold;
count++;
}
}
}
affinityThreshold = (a / count) + eCastThreshold;
if (Double.isNaN(affinityThreshold)) affinityThreshold = eCastThreshold;
}
public static void main(String[] args) throws Exception {
String[] datasets = {"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\aggregation.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\clustersynth.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest1k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest4k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\flame.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\spiral.arff"};
String[] names = {"aggre", "synth", "dptest1k", "dptest4k", "flame", "spiral"};
boolean output = true;
if (output) {
System.out.println("cd('Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\" +
"DensityPeakVector')");
System.out.println("load('matlabCluster.mat')");
System.out.println("k = [1,2,3,4,5,6,7,8,9,10]");
}
for (int i = 0; i < datasets.length; i++) {
Instances inst = DatasetLoading.loadDataNullable(datasets[i]);
inst.setClassIndex(inst.numAttributes() - 1);
CAST cast = new CAST();
cast.setDynamicAffinityThreshold(true);
cast.setSeed(0);
cast.buildClusterer(inst);
if (output) {
System.out.println(names[i] + "c = " + Arrays.toString(cast.assignments));
System.out.println("figure");
System.out.println("scatter(" + names[i] + "x," + names[i] + "y,[],scatterColours(" + names[i] + "c))");
}
}
}
}
| 10,591 | 34.905085 | 120 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/DensityPeaks.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.clusterers;
import experiments.data.DatasetLoading;
import utilities.GenericTools;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import static utilities.ClusteringUtilities.createDistanceMatrix;
/**
* Implementation of the Density Peaks algorithm described in "Clustering by
* fast search and find of density peaks.".
*
* @author Matthew Middlehurst
*/
public class DensityPeaks extends DistanceBasedVectorClusterer {
//Rodriguez, Alex, and Alessandro Laio.
//"Clustering by fast search and find of density peaks."
//Science 344.6191 (2014): 1492-1496.
private boolean gaussianKernel = true;
private boolean cutoffOutlierSelection = false;
private boolean haloOutlierSelection = false;
private double distC = -1;
private double clusterCenterCutoff = -1;
private double outlierCutoff = -1;
private double[][] distanceMatrix;
private double[] localDensities;
private double[] shortestDist;
private int[] nearestNeighbours;
private int numInstances;
private Integer[] sortedDensitiesIndex;
private boolean hasDistances = false;
private ArrayList<Integer> clusterCenters;
public DensityPeaks() {
}
public DensityPeaks(double[][] distanceMatrix) {
this.distanceMatrix = distanceMatrix;
this.hasDistances = true;
}
public ArrayList<Integer> getClusterCenters() {
return clusterCenters;
}
@Override
public int numberOfClusters() {
return clusterCenters.size();
}
public void setGaussianKernel(boolean b) {
this.gaussianKernel = b;
}
public void setCutoffOutlierSelection(boolean b) {
this.cutoffOutlierSelection = b;
}
public void setHaloOutlierSelection(boolean b) {
this.haloOutlierSelection = b;
}
public void setDistC(double distC) {
this.distC = distC;
}
public void setClusterCenterCutoff(double cutoff) {
this.clusterCenterCutoff = cutoff;
}
public void setOutlierCutoff(double cutoff) {
this.outlierCutoff = cutoff;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
numInstances = train.size();
if (!hasDistances) {
distanceMatrix = createDistanceMatrix(train, distFunc);
}
if (distC < 0) {
distC = getDistCDefault();
}
if (gaussianKernel) {
generateLocalDensitiesGuassian();
} else {
generateLocalDensitiesCutoff();
}
generateHighDensDistance();
findClusterCentres();
assignClusters();
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster
clusters = new ArrayList[clusterCenters.size()];
for (int i = 0; i < clusterCenters.size(); i++) {
clusters[i] = new ArrayList();
for (int n = 0; n < numInstances; n++) {
if (clusterCenters.get(i) == assignments[n]) {
clusters[i].add(n);
assignments[n] = i;
}
}
}
}
//Method used in the original implementation to set distC so that the
//average number of neighbors is around 1 to 2% of the total number of
//points in the dataset
private double getDistCDefault() {
ArrayList<Double> sortedDistances = new ArrayList<>(numInstances);
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < i; n++) {
sortedDistances.add(distanceMatrix[i][n]);
}
}
Collections.sort(sortedDistances);
double percent = 2.0;
int position = (int) (sortedDistances.size() * percent / 100);
return sortedDistances.get(position - 1);
}
//Gets the local density for each instance i with the density defined as the
//number of points closer than distC to i
private void generateLocalDensitiesCutoff() {
localDensities = new double[numInstances];
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < i; n++) {
if (distanceMatrix[i][n] - distC < 0) {
localDensities[i]++;
localDensities[n]++;
}
}
}
}
//Gets the local density for each instance i with the density estimated
//using a gaussian kernel
private void generateLocalDensitiesGuassian() {
localDensities = new double[numInstances];
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < i; n++) {
double j = distanceMatrix[i][n] / distC;
j = Math.exp(-(j * j));
localDensities[i] += j;
localDensities[n] += j;
}
}
}
private void generateHighDensDistance() {
//Find the indexes of the local densities sorted in descending order
sortedDensitiesIndex = new Integer[numInstances];
for (int i = 0; i < numInstances; i++) {
sortedDensitiesIndex[i] = i;
}
GenericTools.SortIndexDescending sort = new GenericTools.SortIndexDescending(localDensities);
Arrays.sort(sortedDensitiesIndex, sort);
shortestDist = new double[numInstances];
nearestNeighbours = new int[numInstances];
//Find the shortest distance/nearest neigbour from points with a higher
//local density for each point
for (int i = 1; i < numInstances; i++) {
shortestDist[sortedDensitiesIndex[i]] = Double.MAX_VALUE;
for (int n = 0; n < i; n++) {
if (sortedDensitiesIndex[n] > sortedDensitiesIndex[i]) {
if (distanceMatrix[sortedDensitiesIndex[n]][sortedDensitiesIndex[i]] < shortestDist[sortedDensitiesIndex[i]]) {
shortestDist[sortedDensitiesIndex[i]] = distanceMatrix[sortedDensitiesIndex[n]][sortedDensitiesIndex[i]];
nearestNeighbours[sortedDensitiesIndex[i]] = sortedDensitiesIndex[n];
}
} else {
if (distanceMatrix[sortedDensitiesIndex[i]][sortedDensitiesIndex[n]] < shortestDist[sortedDensitiesIndex[i]]) {
shortestDist[sortedDensitiesIndex[i]] = distanceMatrix[sortedDensitiesIndex[i]][sortedDensitiesIndex[n]];
nearestNeighbours[sortedDensitiesIndex[i]] = sortedDensitiesIndex[n];
}
}
}
}
//Set the shortest distance of the point with the highest local density
//to the max of the distances from other points
double maxDensDist = 0;
for (int i = 0; i < shortestDist.length; i++) {
if (shortestDist[i] > maxDensDist) {
maxDensDist = shortestDist[i];
}
}
shortestDist[sortedDensitiesIndex[0]] = maxDensDist;
nearestNeighbours[sortedDensitiesIndex[0]] = -1;
}
private void findClusterCentres() {
clusterCenters = new ArrayList<>();
assignments = new double[numInstances];
//Get the cluster center estimates.
double[] estimates = new double[numInstances];
double sum = 0;
for (int i = 0; i < numInstances; i++) {
estimates[i] = localDensities[i] * shortestDist[i];
sum += estimates[i];
}
//Find the indexes of the estimates sorted in ascending order
Integer[] estIndexes = new Integer[numInstances];
for (int i = 0; i < numInstances; i++) {
estIndexes[i] = i;
}
GenericTools.SortIndexAscending sort = new GenericTools.SortIndexAscending(estimates);
Arrays.sort(estIndexes, sort);
double mean = sum / numInstances;
boolean threshholdFound = false;
//If no estimate cutoff value is set find a cutoff point
boolean findCutoff = clusterCenterCutoff < 0;
for (int i = 0; i < numInstances; i++) {
if (findCutoff) {
clusterCenterCutoff = estimates[estIndexes[i]] + mean;
}
//If a value above the cutoff is found set it and the following
//points as cluster centers
if (threshholdFound || i == numInstances - 1) {
clusterCenters.add(estIndexes[i]);
assignments[estIndexes[i]] = estIndexes[i];
} else if (clusterCenterCutoff < estimates[estIndexes[i + 1]]) {
threshholdFound = true;
assignments[estIndexes[i]] = -1;
} else {
assignments[estIndexes[i]] = -1;
}
}
}
//Assigns each point to a cluster by setting each to the cluster of its
//nearest neighbour, iterating through the sorted local densities
private void assignClusters() {
for (int i = 0; i < numInstances; i++) {
if (!clusterCenters.contains(sortedDensitiesIndex[i])) {
assignments[sortedDensitiesIndex[i]] = assignments[nearestNeighbours[sortedDensitiesIndex[i]]];
}
}
if (cutoffOutlierSelection) {
cutoffOutliers();
} else if (haloOutlierSelection) {
haloOutliers();
}
}
//Sets points as not belonging to a cluster using a cutoff for its local
//density
private void cutoffOutliers() {
if (outlierCutoff < 0) {
outlierCutoff = numInstances / 20;
}
for (int i = 0; i < numInstances; i++) {
if (localDensities[i] < outlierCutoff) {
assignments[i] = -1;
}
}
}
//Sets points as not belonging to a cluster using the original
//implementations halo method, setting points outside the found clusters
//border as clusterless.
private void haloOutliers() {
if (clusterCenters.size() > 0) {
double[] border = new double[numInstances]; //larger than it should be
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < i; n++) {
if (assignments[i] != assignments[n] && distanceMatrix[i][n] <= distC) {
double ldAvg = (localDensities[i] + localDensities[n]) / 2;
if (ldAvg > border[(int) assignments[i]]) {
border[(int) assignments[i]] = ldAvg;
}
if (ldAvg > border[(int) assignments[n]]) {
border[(int) assignments[n]] = ldAvg;
}
}
}
}
for (int i = 0; i < numInstances; i++) {
if (localDensities[i] < border[(int) assignments[i]]) {
assignments[i] = -1;
}
}
}
}
public static void main(String[] args) throws Exception {
String[] datasets = {"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\aggregation.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\clustersynth.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest1k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest4k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\flame.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\spiral.arff"};
String[] names = {"aggre", "synth", "dptest1k", "dptest4k", "flame", "spiral"};
double[] cutoffs = {0.75, 1.5, 1, 4, 2, 0.3};
// String[] datasets = {"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\aggregation.arff"};
// String[] names = {"aggre"};
boolean output = true;
if (output) {
System.out.println("cd('Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\)");
System.out.println("load('matlabCluster.mat')");
}
for (int i = 0; i < datasets.length; i++) {
Instances inst = DatasetLoading.loadDataNullable(datasets[i]);
inst.setClassIndex(inst.numAttributes() - 1);
DensityPeaks dp = new DensityPeaks();
dp.setClusterCenterCutoff(cutoffs[i]);
dp.setGaussianKernel(true);
dp.setHaloOutlierSelection(false);
dp.setSeed(0);
dp.buildClusterer(inst);
if (output) {
System.out.println(names[i] + "c = " + Arrays.toString(dp.assignments));
System.out.println("figure");
System.out.println("scatter(" + names[i] + "x," + names[i] + "y,[],scatterColours(" + names[i] + "c))");
}
}
}
}
| 13,718 | 35.102632 | 131 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/DistanceBasedVectorClusterer.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.clusterers;
import tsml.clusterers.EnhancedAbstractClusterer;
import weka.core.*;
import static utilities.InstanceTools.deleteClassAttribute;
/**
* Abstract class for vector based clusterers.
*
* @author Matthew Middlehurst
*/
public abstract class DistanceBasedVectorClusterer extends EnhancedAbstractClusterer {
protected DistanceFunction distFunc = new EuclideanDistance();
protected boolean symmetricDistance = true;
protected boolean normaliseData = true;
//mean and stdev of each attribute for normalisation.
protected double[] attributeMeans;
protected double[] attributeStdDevs;
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
if (normaliseData)
normaliseData(train);
distFunc.setInstances(train);
}
//Find the closest train instance and return its cluster
@Override
public int clusterInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
if (normaliseData)
normaliseData(newInst);
double minDist = Double.MAX_VALUE;
int closestCluster = 0;
for (int i = 0; i < train.size(); ++i) {
double dist = distFunc.distance(newInst, train.get(i));
if (dist < minDist) {
minDist = dist;
closestCluster = (int) assignments[i];
}
}
return closestCluster;
}
public void setDistanceFunction(DistanceFunction distFunc) {
this.distFunc = distFunc;
}
public void setSymmetricDistance(boolean b) { this.symmetricDistance = b; }
public void setNormaliseData(boolean b) {
this.normaliseData = b;
}
//Normalise instances and save the means and standard deviations.
protected void normaliseData(Instances data) throws Exception {
if (data.classIndex() >= 0 && data.classIndex() != data.numAttributes() - 1) {
throw new Exception("Class attribute is available and not the final attribute.");
}
int cls = data.classIndex() >= 0 ? 1 : 0;
attributeMeans = new double[data.numAttributes() - cls];
attributeStdDevs = new double[data.numAttributes() - cls];
for (int i = 0; i < data.numAttributes() - cls; i++) {
attributeMeans[i] = data.attributeStats(i).numericStats.mean;
attributeStdDevs[i] = data.attributeStats(i).numericStats
.stdDev;
if (attributeStdDevs[i] == 0) {
attributeStdDevs[i] = 0.0000001;
}
for (int n = 0; n < data.size(); n++) {
Instance instance = data.get(n);
instance.setValue(i, (instance.value(i) - attributeMeans[i]) / attributeStdDevs[i]);
}
}
}
protected void normaliseData(Instance inst){
int cls = inst.classIndex() >= 0 ? 1 : 0;
for (int i = 0; i < inst.numAttributes() - cls; i++) {
inst.setValue(i, (inst.value(i) - attributeMeans[i]) / attributeStdDevs[i]);
}
}
}
| 4,079 | 32.719008 | 100 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/KMeans.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.clusterers;
import experiments.data.DatasetLoading;
import weka.clusterers.NumberOfClustersRequestable;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
import static utilities.ClusteringUtilities.createDistanceMatrix;
/**
* Implementation of the K-Means algorithm with options for finding a value
* for k and a refined initial cluster center selection.
*
* @author Matthew Middlehurst
*/
public class KMeans extends DistanceBasedVectorClusterer implements NumberOfClustersRequestable {
//MacQueen, James.
//"Some methods for classification and analysis of multivariate observations."
//Proceedings of the fifth Berkeley symposium on mathematical statistics and probability. Vol. 1. No. 14. 1967.
private int k = 2;
private boolean findBestK = false;
private boolean refinedInitialCenters = false;
private int numSubsamples = 10;
private int maxIterations = 200;
private int numInstances;
private double[][] centerDistances;
private boolean hasInitialCenters = false;
private Instances clusterCenters;
public KMeans() {
}
//Used when selecting refined initial centers.
private KMeans(Instances initialCenters) {
super();
this.clusterCenters = new Instances(initialCenters);
this.hasInitialCenters = true;
}
public Instances getClusterCenters() {
return clusterCenters;
}
@Override
public int numberOfClusters() {
return k;
}
@Override
public void setNumClusters(int numClusters) throws Exception {
k = numClusters;
}
public void setFindBestK(boolean b) {
this.findBestK = b;
}
public void setRefinedInitialCenters(boolean b) {
this.refinedInitialCenters = b;
}
public void setNumSubsamples(int n) {
this.numSubsamples = n;
}
public void setMaxIterations(int n) {
this.maxIterations = n;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
numInstances = train.size();
assignments = new double[numInstances];
if (numInstances <= k) {
clusterCenters = new Instances(train);
for (int i = 0; i < numInstances; i++) {
assignments[i] = i;
}
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList<>();
}
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < k; n++) {
if (n == assignments[i]) {
clusters[n].add(i);
break;
}
}
}
return;
}
if (findBestK) {
//Builds clusters using multiple values of k and keeps the best one
findBestK(train);
} else {
//Pick initial cluster centers.
if (!hasInitialCenters) {
if (refinedInitialCenters) {
initialClusterCentersRefined(train);
} else {
initialClusterCenters(train);
}
}
boolean finished = false;
int iterations = 0;
//Change cluster centers until cluster membership no longer changes
while (!finished) {
centerDistances = createCenterDistances(train);
//If no clusters changed membership.
if (!calculateClusterMembership() || iterations == maxIterations) {
finished = true;
} else {
selectClusterCenters(train);
}
iterations++;
}
}
}
@Override
public int clusterInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
if (normaliseData)
normaliseData(newInst);
double minDist = Double.MAX_VALUE;
int closestCluster = 0;
for (int i = 0; i < clusterCenters.size(); ++i) {
double dist = distFunc.distance(inst, clusterCenters.get(i));
if (dist < minDist) {
minDist = dist;
closestCluster = i;
}
}
return closestCluster;
}
//Returns the sum of the squared distance from each point to its cluster
//center
public double clusterSquaredDistance(Instances data) {
distFunc.setInstances(data);
double distSum = 0;
for (int i = 0; i < k; i++) {
for (int n = 0; n < clusters[i].size(); n++) {
double dist = distFunc.distance(clusterCenters.get(i), data.get(clusters[i].get(n)));
distSum += dist * dist;
}
}
return distSum;
}
//Create distances to cluster centers
private double[][] createCenterDistances(Instances data) {
distFunc.setInstances(data);
double[][] centerDists = new double[k][numInstances];
for (int i = 0; i < numInstances; i++) {
Instance first = data.get(i);
for (int n = 0; n < k; n++) {
centerDists[n][i] = distFunc.distance(first, clusterCenters.get(n));
}
}
return centerDists;
}
//Randomly select initial cluster centers
private void initialClusterCenters(Instances data) {
ArrayList<Integer> indexes = new ArrayList<>(numInstances);
for (int i = 0; i < numInstances; i++) {
indexes.add(i);
}
Random rand;
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
clusterCenters = new Instances(data, 0);
Collections.shuffle(indexes, rand);
for (int i = 0; i < k; i++) {
clusterCenters.add(data.get(indexes.get(i)));
}
}
//Bradley, Paul S., and Usama M. Fayyad.
//"Refining Initial Points for K-Means Clustering."
//ICML. Vol. 98. 1998.
//Refined selection on initial cluster centers using the method above,
//running k-means over multiple subsamples then again on the resulting
//centers selecting the best performing one
private void initialClusterCentersRefined(Instances data) throws Exception {
int subsampleSize = numInstances / 10;
if (subsampleSize < k) {
subsampleSize = k;
}
ArrayList<Integer> indexes = new ArrayList<>(numInstances);
for (int i = 0; i < numInstances; i++) {
indexes.add(i);
}
Random rand;
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
//Randomly select centers for the subsample data
Instances initialClusterCenters = new Instances(data, k);
Collections.shuffle(indexes, rand);
for (int i = 0; i < k; i++) {
initialClusterCenters.add(data.get(indexes.get(i)));
}
Instances[] subsampleCenters = new Instances[numSubsamples];
//Get the resulting centers from running k-means on multiple random
//subsamples of the data
for (int i = 0; i < numSubsamples; i++) {
Collections.shuffle(indexes, rand);
Instances subsample = new Instances(data, subsampleSize);
for (int n = 0; n < subsampleSize; n++) {
subsample.add(data.get(indexes.get(n)));
}
boolean finished = false;
while (!finished) {
KMeans kmeans = new KMeans(initialClusterCenters);
kmeans.setDistanceFunction(distFunc);
kmeans.setNumClusters(k);
kmeans.setNormaliseData(false);
kmeans.setFindBestK(false);
kmeans.setRefinedInitialCenters(false);
if (seedClusterer)
kmeans.setSeed(seed + (i + 1) * 37);
kmeans.buildClusterer(subsample);
boolean emptyCluster = false;
//If any cluster is empty set the initial cluster centre to the
//point with the max distance from its centre
for (int n = 0; n < k; n++) {
if (kmeans.clusters[n].isEmpty()) {
emptyCluster = true;
double maxDist = 0;
int maxIndex = -1;
for (int g = 0; g < subsampleSize; g++) {
double dist = kmeans.centerDistances[(int) kmeans.assignments[g]][g];
if (dist > maxDist) {
boolean contains = false;
for (int j = 0; j < k; j++) {
if (Arrays.equals(initialClusterCenters.get(j).toDoubleArray(), subsample.get(g).toDoubleArray())) {
contains = true;
break;
}
}
if (!contains) {
maxDist = dist;
maxIndex = g;
}
}
}
initialClusterCenters.set(n, subsample.get(maxIndex));
}
}
subsampleCenters[i] = kmeans.clusterCenters;
if (!emptyCluster) {
finished = true;
}
}
}
//Create Instance object for subsample centers
Instances centers = new Instances(data, numSubsamples * k);
for (int i = 0; i < numSubsamples; i++) {
for (int n = 0; n < k; n++) {
centers.add(subsampleCenters[i].get(n));
}
}
//Cluster again using subsample centers as data and find the solution
//with the lowest distortion using each set of centers as the initial
//set
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int i = 0; i < numSubsamples; i++) {
KMeans kmeans = new KMeans(subsampleCenters[i]);
kmeans.setDistanceFunction(distFunc);
kmeans.setNumClusters(k);
kmeans.setNormaliseData(false);
kmeans.setFindBestK(false);
kmeans.setRefinedInitialCenters(false);
if (seedClusterer)
kmeans.setSeed(seed + (i + 1) * 137);
kmeans.buildClusterer(centers);
double dist = kmeans.clusterSquaredDistance(centers);
if (dist < minDist) {
minDist = dist;
minIndex = i;
}
}
clusterCenters = subsampleCenters[minIndex];
}
private boolean calculateClusterMembership() {
boolean membershipChange = false;
//Set membership of each point to the closest cluster center
for (int i = 0; i < numInstances; i++) {
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int n = 0; n < k; n++) {
if (centerDistances[n][i] < minDist) {
minDist = centerDistances[n][i];
minIndex = n;
}
}
//If membership of any point changed return true to keep
//looping
if (minIndex != assignments[i]) {
assignments[i] = minIndex;
membershipChange = true;
}
}
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList<>();
}
for (int i = 0; i < numInstances; i++) {
clusters[(int) assignments[i]].add(i);
}
return membershipChange;
}
//Select the new cluster centers for each cluster to be the mean of each
//attribute of points in the cluster.
private void selectClusterCenters(Instances data) {
for (int i = 0; i < k; i++) {
Instance center = clusterCenters.get(i);
for (int n = 0; n < data.numAttributes() - 1; n++) {
double sum = 0;
for (Integer g : clusters[i]) {
sum += data.get(g).value(n);
}
center.setValue(n, sum / clusters[i].size());
}
}
}
//Lletı, R., et al.
//"Selecting variables for k-means cluster analysis by using a genetic algorithm that optimises the silhouettes."
//Analytica Chimica Acta 515.1 (2004): 87-100.
//Method of finding the best value for k based on the silhouette method
//above
private void findBestK(Instances data) throws Exception {
int maxK = 10;
double bestSilVal = 0;
double[][] distMatrix = createDistanceMatrix(data, distFunc);
//For each value of K
for (int i = 2; i <= maxK; i++) {
KMeans kmeans = new KMeans();
kmeans.setDistanceFunction(distFunc);
kmeans.setNumClusters(i);
kmeans.setNormaliseData(false);
kmeans.setFindBestK(false);
kmeans.setRefinedInitialCenters(refinedInitialCenters);
if (seedClusterer)
kmeans.setSeed(seed + (i + 1) * 237);
kmeans.buildClusterer(data);
double totalSilVal = 0;
//For each cluster created by k-means
for (int n = 0; n < i; n++) {
//For each point in the cluster
for (int g = 0; g < kmeans.clusters[n].size(); g++) {
double clusterDist = 0;
double minOtherClusterDist = Double.MAX_VALUE;
int index = kmeans.clusters[n].get(g);
//Find mean distance of the point to other points in its
//cluster
for (int j = 0; j < kmeans.clusters[n].size(); j++) {
if (index > kmeans.clusters[n].get(j)) {
clusterDist += distMatrix[index][kmeans.clusters[n].get(j)];
} else {
clusterDist += distMatrix[kmeans.clusters[n].get(j)][index];
}
}
clusterDist /= kmeans.clusters[n].size();
//Find the minimum distance of the point to other clusters
for (int m = 0; m < i; m++) {
if (m == n) {
continue;
}
double otherClusterDist = 0;
for (int j = 0; j < kmeans.clusters[m].size(); j++) {
if (index > kmeans.clusters[m].get(j)) {
otherClusterDist += distMatrix[index][kmeans.clusters[m].get(j)];
} else {
otherClusterDist += distMatrix[kmeans.clusters[m].get(j)][index];
}
}
otherClusterDist /= kmeans.clusters[m].size();
if (otherClusterDist < minOtherClusterDist) {
minOtherClusterDist = otherClusterDist;
}
}
//Calculate the silhoutte value for the point and add it
//to the total.
double silVal = minOtherClusterDist - clusterDist;
double div = Math.max(minOtherClusterDist, clusterDist);
silVal /= div;
totalSilVal += silVal;
}
}
if (totalSilVal > bestSilVal) {
bestSilVal = totalSilVal;
clusterCenters = kmeans.clusterCenters;
assignments = kmeans.assignments;
clusters = kmeans.clusters;
k = kmeans.k;
}
}
}
public static void main(String[] args) throws Exception {
String[] datasets = {"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\aggregation.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\clustersynth.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest1k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest4k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\flame.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\spiral.arff"};
String[] names = {"aggre", "synth", "dptest1k", "dptest4k", "flame", "spiral"};
boolean output = true;
if (output) {
System.out.println("cd('Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\" +
"DensityPeakVector')");
System.out.println("load('matlabCluster.mat')");
System.out.println("k = [1,2,3,4,5,6,7,8,9,10]");
}
for (int i = 0; i < datasets.length; i++) {
Instances inst = DatasetLoading.loadDataNullable(datasets[i]);
inst.setClassIndex(inst.numAttributes() - 1);
KMeans kmeans = new KMeans();
kmeans.setFindBestK(true);
kmeans.setRefinedInitialCenters(true);
kmeans.setSeed(0);
kmeans.buildClusterer(inst);
if (output) {
System.out.println(names[i] + "c = " + Arrays.toString(kmeans.assignments));
System.out.println("figure");
System.out.println("scatter(" + names[i] + "x," + names[i] + "y,[],scatterColours(" + names[i] + "c))");
}
}
}
}
| 19,192 | 33.151246 | 136 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/KMedoids.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package machine_learning.clusterers;
import experiments.data.DatasetLoading;
import weka.clusterers.NumberOfClustersRequestable;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
import static utilities.ClusteringUtilities.createDistanceMatrix;
import static utilities.InstanceTools.deleteClassAttribute;
/**
* Implementation of the KMedoids algorithm with
* options for finding a value for k and a refined initial medoid selection.
*
* @author Matthew Middlehurst
*/
public class KMedoids extends DistanceBasedVectorClusterer implements NumberOfClustersRequestable {
private int k = 2;
private boolean findBestK = false;
private boolean refinedInitialMedoids = false;
private int numSubsamples = 10;
private double[][] distanceMatrix;
private int numInstances;
private boolean hasInitialMedoids = false;
private boolean hasDistances = false;
private int[] medoids;
public KMedoids() {
}
//Used when finding best value for k to avoid recalculating distances
public KMedoids(double[][] distanceMatrix) {
this.distanceMatrix = distanceMatrix;
this.hasDistances = true;
}
//Used when selecting refined initial medoids.
private KMedoids(int[] initialMedoids) {
super();
this.medoids = initialMedoids;
this.hasInitialMedoids = true;
}
public int[] getMedoids() {
return medoids;
}
@Override
public int numberOfClusters() {
return k;
}
@Override
public void setNumClusters(int numClusters) throws Exception {
k = numClusters;
}
public void setFindBestK(boolean b) {
this.findBestK = b;
}
public void setRefinedInitialMedoids(boolean b) {
this.refinedInitialMedoids = b;
}
public void setNumSubsamples(int n) {
this.numSubsamples = n;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
numInstances = train.size();
assignments = new double[numInstances];
if (numInstances <= k) {
medoids = new int[numInstances];
for (int i = 0; i < numInstances; i++) {
assignments[i] = i;
medoids[i] = i;
}
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList();
}
for (int i = 0; i < numInstances; i++) {
for (int n = 0; n < k; n++) {
if (n == assignments[i]) {
clusters[n].add(i);
break;
}
}
}
return;
}
if (!hasDistances) {
distanceMatrix = createDistanceMatrix(train, distFunc);
}
if (findBestK) {
//Build clusters using multiple values of k and uses the best one.
findBestK(train);
} else {
//Pick initial medoids.
if (!hasInitialMedoids) {
if (refinedInitialMedoids) {
initialMedoidsRefined(train);
} else {
initialMedoids();
}
}
boolean finished = false;
//Change medoids until medoid location no longer changes.
while (!finished) {
calculateClusterMembership();
finished = selectMedoids();
}
}
for (int n = 0; n < numInstances; n++) {
for (int i = 0; i < medoids.length; i++) {
if (medoids[i] == assignments[n]) {
assignments[n] = i;
break;
}
}
}
}
@Override
public int clusterInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
if (normaliseData)
normaliseData(newInst);
double minDist = Double.MAX_VALUE;
int closestCluster = 0;
for (int i = 0; i < medoids.length; ++i) {
double dist = distFunc.distance(inst, train.get(medoids[i]));
if (dist < minDist) {
minDist = dist;
closestCluster = i;
}
}
return closestCluster;
}
//Returns the sum of the squared distance from each point to its cluster medoid
public double clusterSquaredDistance() {
double distSum = 0;
for (int i = 0; i < k; i++) {
for (int n = 0; n < clusters[i].size(); n++) {
if (medoids[i] == clusters[i].get(n)) continue;
if (medoids[i] > clusters[i].get(n)) {
distSum += distanceMatrix[medoids[i]][clusters[i].get(n)]
* distanceMatrix[medoids[i]][clusters[i].get(n)];
} else {
distSum += distanceMatrix[clusters[i].get(n)][medoids[i]]
+ distanceMatrix[clusters[i].get(n)][medoids[i]];
}
}
}
return distSum;
}
//Randomly select initial medoids
private void initialMedoids() {
medoids = new int[k];
ArrayList<Integer> indexes = new ArrayList();
for (int i = 0; i < numInstances; i++) {
indexes.add(i);
}
Random rand;
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
Collections.shuffle(indexes, rand);
for (int i = 0; i < k; i++) {
medoids[i] = indexes.get(i);
}
}
//Bradley, Paul S., and Usama M. Fayyad.
//"Refining Initial Points for K-Means Clustering."
//ICML. Vol. 98. 1998.
//Refined selection on initial medoids using a modified version of the
//method above, running KMedoids over multiple subsamples then again on the
//resulting medoids selecting the best performing one
private void initialMedoidsRefined(Instances data) throws Exception {
int subsampleSize = numInstances / 10;
if (subsampleSize < k) {
subsampleSize = k;
}
ArrayList<Integer> indexes = new ArrayList(numInstances);
for (int i = 0; i < numInstances; i++) {
indexes.add(i);
}
Random rand;
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
int[][] subsampleMedoids = new int[numSubsamples][];
//Get the resulting medoids from running KMedoids on multiple random
//subsamples of the data
for (int i = 0; i < numSubsamples; i++) {
Collections.shuffle(indexes, rand);
Instances subsample = new Instances(data, subsampleSize);
for (int n = 0; n < subsampleSize; n++) {
subsample.add(data.get(indexes.get(n)));
}
KMedoids kmedoids = new KMedoids();
kmedoids.setDistanceFunction(distFunc);
kmedoids.setNumClusters(k);
kmedoids.setNormaliseData(false);
kmedoids.setRefinedInitialMedoids(false);
if (seedClusterer)
kmedoids.setSeed(seed + (i + 1) * 37);
kmedoids.buildClusterer(subsample);
subsampleMedoids[i] = kmedoids.medoids;
}
//Create Instance object for subsample medoids.
Instances medoidInsts = new Instances(data, numSubsamples * k);
for (int i = 0; i < numSubsamples; i++) {
for (int n = 0; n < k; n++) {
medoidInsts.add(data.get(subsampleMedoids[i][n]));
}
}
//Cluster again using subsample medoids as data and find the solution
//with the lowest distortion using each set of medoids as the initial
//set
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int i = 0; i < numSubsamples; i++) {
int[] initialMedoids = new int[k];
for (int n = 0; n < k; n++) {
initialMedoids[n] = n + (i * k);
}
KMedoids kmedoids = new KMedoids(initialMedoids);
kmedoids.setDistanceFunction(distFunc);
kmedoids.setNumClusters(k);
kmedoids.setNormaliseData(false);
kmedoids.setRefinedInitialMedoids(false);
if (seedClusterer)
kmedoids.setSeed(seed + (i + 1) * 137);
kmedoids.buildClusterer(medoidInsts);
double dist = kmedoids.clusterSquaredDistance();
if (dist < minDist) {
minDist = dist;
minIndex = i;
}
}
medoids = subsampleMedoids[minIndex];
}
private void calculateClusterMembership() {
//Set membership of each point to the closest medoid.
for (int i = 0; i < numInstances; i++) {
double minDist = Double.MAX_VALUE;
for (int n = 0; n < k; n++) {
if (medoids[n] > i) {
if (distanceMatrix[medoids[n]][i] < minDist) {
minDist = distanceMatrix[medoids[n]][i];
assignments[i] = medoids[n];
}
}
//If a point is a medoid set it to its own cluster.
else if (medoids[n] == i) {
assignments[i] = medoids[n];
break;
} else {
if (distanceMatrix[i][medoids[n]] < minDist) {
minDist = distanceMatrix[i][medoids[n]];
assignments[i] = medoids[n];
}
}
}
}
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster.
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList();
for (int n = 0; n < numInstances; n++) {
if (medoids[i] == assignments[n]) {
clusters[i].add(n);
}
}
}
}
//Select new medoids by finding the point with the lowest distance sum for
//each cluster
private boolean selectMedoids() {
boolean changedMedoid = false;
for (int i = 0; i < k; i++) {
double minDist = Double.MAX_VALUE;
int bestMedoid = -1;
for (int n = 0; n < clusters[i].size(); n++) {
double clusterDist = 0;
for (int g = 0; g < clusters[i].size(); g++) {
if (clusters[i].get(n).equals(clusters[i].get(g))) continue;
if (clusters[i].get(n) > clusters[i].get(g)) {
clusterDist += distanceMatrix[clusters[i].get(n)][clusters[i].get(g)];
} else {
clusterDist += distanceMatrix[clusters[i].get(g)][clusters[i].get(n)];
}
}
if (clusterDist < minDist) {
minDist = clusterDist;
bestMedoid = clusters[i].get(n);
}
}
//If a medoid changes return false to keep looping
if (bestMedoid != medoids[i]) {
changedMedoid = true;
medoids[i] = bestMedoid;
}
}
return !changedMedoid;
}
//Lletı, R., et al.
//"Selecting variables for k-means cluster analysis by using a genetic algorithm that optimises the silhouettes."
//Analytica Chimica Acta 515.1 (2004): 87-100.
//Method of finding the best value for k based on the silhouette method
//above
private void findBestK(Instances data) throws Exception {
int maxK = 10;
double bestSilVal = 0;
//For each value of K.
for (int i = 2; i <= maxK; i++) {
KMedoids kmedoids = new KMedoids(distanceMatrix);
kmedoids.setDistanceFunction(distFunc);
kmedoids.setNumClusters(i);
kmedoids.setNormaliseData(false);
kmedoids.setRefinedInitialMedoids(refinedInitialMedoids);
if (seedClusterer)
kmedoids.setSeed(seed + (i + 1) * 237);
kmedoids.buildClusterer(data);
double totalSilVal = 0;
//For each cluster created by k-means.
for (int n = 0; n < i; n++) {
//For each point in the cluster.
for (int g = 0; g < kmedoids.clusters[n].size(); g++) {
double clusterDist = 0;
double minOtherClusterDist = Double.MAX_VALUE;
int index = kmedoids.clusters[n].get(g);
//Find mean distance of the point to other points in its
//cluster.
for (int j = 0; j < kmedoids.clusters[n].size(); j++) {
if (index == kmedoids.clusters[n].get(j)) continue;
if (index > kmedoids.clusters[n].get(j)) {
clusterDist += distanceMatrix[index][kmedoids.clusters[n].get(j)];
} else {
clusterDist += distanceMatrix[kmedoids.clusters[n].get(j)][index];
}
}
clusterDist /= kmedoids.clusters[n].size();
//Find the minimum distance of the point to other clusters.
for (int m = 0; m < i; m++) {
if (m == n) {
continue;
}
double otherClusterDist = 0;
for (int j = 0; j < kmedoids.clusters[m].size(); j++) {
if (index > kmedoids.clusters[m].get(j)) {
otherClusterDist += distanceMatrix[index][kmedoids.clusters[m].get(j)];
} else {
otherClusterDist += distanceMatrix[kmedoids.clusters[m].get(j)][index];
}
}
otherClusterDist /= kmedoids.clusters[m].size();
if (otherClusterDist < minOtherClusterDist) {
minOtherClusterDist = otherClusterDist;
}
}
//Calculate the silhoutte value for the point and add it
//to the total
double silVal = minOtherClusterDist - clusterDist;
double div = clusterDist;
if (minOtherClusterDist > clusterDist) {
div = minOtherClusterDist;
}
silVal /= div;
totalSilVal += silVal;
}
}
if (totalSilVal > bestSilVal) {
bestSilVal = totalSilVal;
medoids = kmedoids.medoids;
assignments = kmedoids.assignments;
clusters = kmedoids.clusters;
k = kmedoids.k;
}
}
}
public static void main(String[] args) throws Exception {
String[] datasets = {"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\aggregation.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\clustersynth.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest1k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\dptest4k.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\flame.arff",
"Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\spiral.arff"};
String[] names = {"aggre", "synth", "dptest1k", "dptest4k", "flame", "spiral"};
boolean output = true;
if (output) {
System.out.println("cd('Z:\\Data Working Area\\ClusteringTestDatasets\\DensityPeakVector\\" +
"DensityPeakVector')");
System.out.println("load('matlabCluster.mat')");
System.out.println("k = [1,2,3,4,5,6,7,8,9,10]");
}
for (int i = 0; i < datasets.length; i++) {
Instances inst = DatasetLoading.loadDataNullable(datasets[i]);
inst.setClassIndex(inst.numAttributes() - 1);
KMedoids kmedoids = new KMedoids();
kmedoids.setFindBestK(true);
kmedoids.setRefinedInitialMedoids(true);
kmedoids.setSeed(0);
kmedoids.buildClusterer(inst);
if (output) {
System.out.println(names[i] + "c = " + Arrays.toString(kmedoids.assignments));
System.out.println("figure");
System.out.println("scatter(" + names[i] + "x," + names[i] + "y,[],scatterColours(" + names[i] + "c))");
}
}
}
}
| 18,142 | 32.975655 | 120 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/consensus/ACE.java | package machine_learning.clusterers.consensus;
import evaluation.storage.ClustererResults;
import experiments.data.DatasetLoading;
import machine_learning.clusterers.KMeans;
import tsml.clusterers.EnhancedAbstractClusterer;
import utilities.GenericTools;
import weka.clusterers.NumberOfClustersRequestable;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Random;
import static utilities.ArrayUtilities.sum;
import static utilities.ArrayUtilities.unique;
import static utilities.ClusteringUtilities.randIndex;
import static utilities.GenericTools.max;
import static utilities.Utilities.argMax;
public class ACE extends ConsensusClusterer implements LoadableConsensusClusterer, NumberOfClustersRequestable {
private double alpha = 0.8;
private double alphaIncrement = 0.1;
private double alphaMin = 0.6;
private double alpha2 = 0.7;
private int k = 2;
private int[] newLabels;
private Random rand;
public ACE(EnhancedAbstractClusterer[] clusterers) {
super(clusterers);
}
public ACE(ArrayList<EnhancedAbstractClusterer> clusterers) {
super(clusterers);
}
@Override
public int numberOfClusters() throws Exception {
return k;
}
@Override
public void setNumClusters(int numClusters) throws Exception {
k = numClusters;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
if (buildClusterers){
for (EnhancedAbstractClusterer clusterer: clusterers){
clusterer.buildClusterer(data);
}
}
ArrayList<Integer>[][] ensembleClusters = new ArrayList[clusterers.length][];
for (int i = 0; i < ensembleClusters.length; i++) {
ensembleClusters[i] = clusterers[i].getClusters();
}
buildEnsemble(ensembleClusters, data.numInstances());
}
@Override
public void buildFromFile(String[] directoryPaths) throws Exception {
ArrayList<Integer>[][] ensembleClusters = new ArrayList[directoryPaths.length][];
int numInstances = -1;
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "trainFold" + seed + ".csv");
if (i == 0)
numInstances = r.numInstances();
ensembleClusters[i] = new ArrayList[r.getNumClusters()];
for (int n = 0; n < ensembleClusters[i].length; n++) {
ensembleClusters[i][n] = new ArrayList();
}
int[] fileAssignments = r.getClusterValuesAsIntArray();
for (int n = 0; n < fileAssignments.length; n++) {
ensembleClusters[i][fileAssignments[n]].add(n);
}
}
buildEnsemble(ensembleClusters, numInstances);
}
@Override
public int clusterInstance(Instance inst) throws Exception {
double[] dist = distributionForInstance(inst);
return argMax(dist, rand);
}
@Override
public double[] distributionForInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
//todo checkout with certainty stuff
double[] dist = new double[k];
int offset = 0;
for (EnhancedAbstractClusterer clusterer : clusterers) {
dist[newLabels[offset + clusterer.clusterInstance(inst)]]++;
offset += clusterer.numberOfClusters();
}
for (int i = 0; i < dist.length; i++){
dist[i] /= clusterers.length;
}
return dist;
}
@Override
public int[] clusterFromFile(String[] directoryPaths) throws Exception {
double[][] dists = distributionFromFile(directoryPaths);
int[] arr = new int[dists.length];
for (int i = 0; i < dists.length; i++) {
arr[i] = argMax(dists[i], rand);
}
return arr;
}
@Override
public double[][] distributionFromFile(String[] directoryPaths) throws Exception {
int[][] ensembleAssignments = new int[directoryPaths.length][];
int offset = 0;
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "testFold" + seed + ".csv");
ensembleAssignments[i] = r.getClusterValuesAsIntArray();
for (int n = 0; n < ensembleAssignments[i].length; n++) {
ensembleAssignments[i][n] = newLabels[offset + ensembleAssignments[i][n]];
}
offset += r.getNumClusters();
}
double[][] dists = new double[ensembleAssignments[0].length][k];
for (int i = 0; i < dists.length; i++) {
for (int[] clusterAssignments : ensembleAssignments) {
dists[i][clusterAssignments[i]]++;
}
for (int n = 0; n < dists[n].length; n++){
dists[i][n] /= ensembleAssignments.length;
}
}
return dists;
}
private void buildEnsemble(ArrayList<Integer>[][] ensembleClusters, int numInstances) throws Exception {
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
int clusterCount = 0;
ArrayList<ArrayList<double[]>> binaryClusterMembership = new ArrayList<>(ensembleClusters.length);
for (ArrayList<Integer>[] memberClusters: ensembleClusters){
clusterCount += memberClusters.length;
ArrayList<double[]> binaryClusters = new ArrayList<>(memberClusters.length);
binaryClusterMembership.add(binaryClusters);
for (ArrayList<Integer> memberCluster : memberClusters) {
double[] binaryCluster = new double[numInstances];
binaryClusters.add(binaryCluster);
for (int n : memberCluster) {
binaryCluster[n] = 1;
}
}
}
if (k > clusterCount){
throw new Exception("K is greater than the total number of clusters in the ensemble.");
}
double newAlpha = alpha;
newLabels = new int[clusterCount];
for (int i = 1; i < clusterCount; i++){
newLabels[i] = i;
}
boolean stage1 = true;
double[][] clusterSimilarities = null;
while (true) {
// find the similarity of each cluster from different ensemble members
int countI = 0;
clusterSimilarities = new double[clusterCount][];
for (int i = 0; i < binaryClusterMembership.size(); i++) {
for (int n = 0; n < binaryClusterMembership.get(i).size(); n++) {
clusterSimilarities[countI + n] = new double[countI];
}
int countN = 0;
for (int n = 0; n < i; n++) {
for (int j = 0; j < binaryClusterMembership.get(i).size(); j++) {
for (int k = 0; k < binaryClusterMembership.get(n).size(); k++) {
clusterSimilarities[countI + j][countN + k] = setCorrelation(
binaryClusterMembership.get(i).get(j), binaryClusterMembership.get(n).get(k),
numInstances);
}
}
countN += binaryClusterMembership.get(n).size();
}
countI += binaryClusterMembership.get(i).size();
}
// update alpha to the max similarity value if not using initial clusters
if (!stage1){
newAlpha = maxSimilarity(clusterSimilarities);
if (newAlpha < alphaMin){
break;
}
}
int tempClusterCount = clusterCount;
int[] tempNewLabels = Arrays.copyOf(newLabels, newLabels.length);
// merge clusters with a similarity greater than alpha
boolean[] newCluster = new boolean[clusterCount];
boolean[] merged = new boolean[clusterCount];
countI = 0;
for (int i = 0; i < binaryClusterMembership.size(); i++) {
int countN = 0;
for (int n = 0; n < i; n++) {
for (int j = 0; j < binaryClusterMembership.get(i).size(); j++) {
for (int k = 0; k < binaryClusterMembership.get(n).size(); k++) {
if (!merged[countI + j] && !merged[countN + k] &&
clusterSimilarities[countI + j][countN + k] >= newAlpha) {
for (int g = 0; g < tempNewLabels.length; g++){
if (tempNewLabels[g] == countI + j){
tempNewLabels[g] = countN + k;
}
}
tempNewLabels[countI + j] = tempNewLabels[countN + k];
merged[countI + j] = true;
newCluster[countN + k] = true;
tempClusterCount--;
for (int v = 0; v < numInstances; v++) {
binaryClusterMembership.get(n).get(k)[v] +=
binaryClusterMembership.get(i).get(j)[v];
}
}
}
}
countN += binaryClusterMembership.get(n).size();
}
countI += binaryClusterMembership.get(i).size();
}
// using the initial clusters, keep going and incrementing alpha until the number of
// clusters is greater than k
if (stage1) {
if (tempClusterCount >= k) {
clusterCount = tempClusterCount;
binaryClusterMembership = removeMerged(binaryClusterMembership, merged, newCluster);
newLabels = relabel(tempNewLabels);
stage1 = false;
} else {
newAlpha += alphaIncrement;
}
}
// no longer using the initial clusters, keep going and lowering alpha to the max similarity
// until the number of clusters is less than or equal to k or less than the minimum alpha
else{
if (tempClusterCount == k){
clusterCount = tempClusterCount;
binaryClusterMembership = removeMerged(binaryClusterMembership, merged, newCluster);
newLabels = relabel(tempNewLabels);
break;
}
else if (tempClusterCount < k){
break;
}
else{
clusterCount = tempClusterCount;
binaryClusterMembership = removeMerged(binaryClusterMembership, merged, newCluster);
newLabels = relabel(tempNewLabels);
}
}
}
// calculate how certain each cluster is for each case
double[][] membershipCounts = new double[clusterCount][];
int clusterIdx = 0;
for (ArrayList<double[]> clusterGroup : binaryClusterMembership) {
for (double[] cluster : clusterGroup) {
membershipCounts[clusterIdx++] = cluster;
}
}
double[][] membershipSimilarities = new double[clusterCount][numInstances];
for (int i = 0; i < numInstances; i++){
double sum = 0;
for (int n = 0; n < clusterCount; n++){
sum += membershipCounts[n][i];
}
for (int n = 0; n < clusterCount; n++){
membershipSimilarities[n][i] = membershipCounts[n][i] / sum;
}
}
int[] certainClusters = new int[clusterCount];
for (int i = 0; i < clusterCount; i++){
for (int n = 0; n < numInstances; n++){
if (membershipSimilarities[i][n] > alpha2){
certainClusters[i] = 1;
break;
}
}
}
// if we dont have k clusters with at least one certain (member similarity > alpha2) case, find the k most
// certain clusters
Integer[] clusterRanks = new Integer[clusterCount];
double ncc = sum(certainClusters);
double newAlpha2 = alpha2;
if (ncc != k){
double[] clusterCertainties = new double[clusterCount];
for (int i = 0; i < clusterCount; i++) {
int numObjects = 0;
for (int n = 0; n < numInstances; n++) {
if (membershipSimilarities[i][n] > 0) {
clusterCertainties[i] += membershipSimilarities[i][n];
numObjects++;
}
clusterCertainties[i] /= numObjects;
}
}
for (int i = 0; i < clusterCount; i++) {
clusterRanks[i] = i;
}
GenericTools.SortIndexDescending sort = new GenericTools.SortIndexDescending(clusterCertainties);
Arrays.sort(clusterRanks, sort);
//todo check out, weird
newAlpha2 = -1;
if (ncc < 1) {
newAlpha2 = clusterCertainties[clusterRanks[k - 1]];
}
else {
for (int i = 1; i < clusterCount; i++) {
double m = max(clusterSimilarities[i]);
if (m > newAlpha2) {
newAlpha2 = m;
}
}
}
}
//todo checkout
else{
int n = 0;
for (int i = 0; i < clusterCount; i++) {
if (certainClusters[i] == 1) {
clusterRanks[n++] = i;
}
}
for (int i = 0; i < clusterCount; i++) {
if (certainClusters[i] == 0) {
clusterRanks[n++] = i;
}
}
}
for (int i = 0; i < newLabels.length; i++){
for (int n = 0; n < clusterCount; n++){
if (newLabels[i] == clusterRanks[n]){
newLabels[i] = n;
break;
}
}
}
// calculate similarities of remaining clusters to removed ones to determine labels for new cases
clusterSimilarities = new double[k][clusterCount - k];
for (int i = k; i < clusterCount; i++){
double max = -2;
int maxIdx = -1;
for (int n = 0; n < k; n++){
double similarity = setCorrelation(membershipSimilarities[clusterRanks[i]],
membershipSimilarities[clusterRanks[n]], numInstances);
clusterSimilarities[n][i - k] = similarity;
if (similarity > max){
max = similarity;
maxIdx = n;
}
}
for (int n = 0; n < newLabels.length; n++){
if (newLabels[n] == i){
newLabels[n] = maxIdx;
}
}
for (int n = 0; n < numInstances; n++){
membershipCounts[maxIdx][n] += membershipCounts[clusterRanks[i]][n];
}
}
assignments = new double[numInstances];
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList();
}
//todo checkout
if (clusterCount > k) {
for (int i = 0; i < numInstances; i++) {
double sum = 0;
for (int n = 0; n < clusterCount; n++) {
sum += membershipCounts[clusterRanks[n]][i];
}
for (int n = 0; n < clusterCount; n++) {
membershipSimilarities[clusterRanks[n]][i] = membershipCounts[clusterRanks[n]][i] / sum;
}
}
}
// assign similarities to any cases which have no similarity with any of the current clusters
int numUnclustered = 0;
for (int i = 0; i < numInstances; i++) {
double max = 0;
int maxIdx = -1;
double sum = 0;
for (int n = 0; n < k; n++) {
sum += membershipSimilarities[clusterRanks[n]][i];
if (membershipSimilarities[clusterRanks[n]][i] > max){
max = membershipSimilarities[clusterRanks[n]][i];
maxIdx = n;
}
}
// todo checkout, weird
if (sum == 0){
for (int n = 0; n < k; n++) {
for (int j = k; j < clusterCount; j++) {
if (membershipSimilarities[clusterRanks[j]][i] > 0) {
membershipSimilarities[clusterRanks[n]][i] += clusterSimilarities[n][j - k] *
membershipSimilarities[clusterRanks[j]][i];
}
}
if (membershipSimilarities[clusterRanks[n]][i] > max){
max = membershipSimilarities[clusterRanks[n]][i];
maxIdx = n;
}
}
}
// this skips stage 3.
if (max > newAlpha2) {
assignments[i] = maxIdx;
clusters[maxIdx].add(i);
}
else {
assignments[i] = -1;
numUnclustered++;
}
}
// assign clusters to uncertain cases
if (numUnclustered > 0) {
double[] clusterQualities = new double[k];
double[] membershipSums = new double[k];
for (int n = 0; n < k; n++){
int numAboveZero = 0;
for (int i = 0; i < numInstances; i++) {
if (membershipSimilarities[clusterRanks[n]][i] > 0) {
membershipSums[n] += membershipSimilarities[clusterRanks[n]][i];
numAboveZero++;
}
}
membershipSums[n] /= numAboveZero;
double sum = 0;
for (int c: clusters[n]){
sum += Math.pow(membershipSimilarities[clusterRanks[n]][c] - membershipSums[n], 2);
}
clusterQualities[n] = sum / clusters[n].size();
}
for (int i = 0; i < numInstances; i++){
if (assignments[i] == -1){
double minQualityChange = Double.MAX_VALUE;
double newClusterQuality = -1;
int minIdx = -1;
for (int n = 0; n < k; n++){
double sum = 0;
for (int c: clusters[n]){
sum += Math.pow(membershipSimilarities[clusterRanks[n]][c] - membershipSums[n], 2);
}
sum += Math.pow(membershipSimilarities[clusterRanks[n]][i] - membershipSums[n], 2);
double quality = sum / (clusters[n].size() + 1);
double qualityChange = quality - clusterQualities[n];
if (qualityChange < minQualityChange){
minQualityChange = qualityChange;
newClusterQuality = quality;
minIdx = n;
}
}
clusterQualities[minIdx] = newClusterQuality;
assignments[i] = minIdx;
clusters[minIdx].add(i);
}
}
}
}
private double setCorrelation(double[] c1, double[] c2, int n){
double c1Size = 0;
double c2Size = 0;
double intersection = 0;
for (int i = 0; i < n; i ++){
if (c1[i] > 0) {
c1Size++;
if (c2[i] > 0) {
c2Size++;
intersection++;
}
}
else if (c2[i] > 0) {
c2Size++;
}
}
double multSize = c1Size * c2Size;
double numerator = intersection - multSize / n;
double denominator = Math.sqrt(multSize * (1 - c1Size / n) * (1 - c2Size / n));
return numerator/denominator;
}
private double maxSimilarity(double[][] clusterSimilarities){
double max = -1;
for (double[] clusterSimilarity : clusterSimilarities) {
for (double v : clusterSimilarity) {
if (v > max) {
max = v;
}
}
}
return max;
}
private ArrayList<ArrayList<double[]>> removeMerged(ArrayList<ArrayList<double[]>> binaryClusterMembership,
boolean[] merged, boolean[] newCluster){
ArrayList<ArrayList<double[]>> newBinaryClusterMembership = new ArrayList<>();
int i = 0;
for (ArrayList<double[]> clusterGroup : binaryClusterMembership) {
ArrayList<double[]> newGroup = new ArrayList<>();
for (int j = 0; j < clusterGroup.size(); j++) {
if (newCluster[i]) {
if (clusterGroup.size() > 1) {
ArrayList<double[]> newSingleGroup = new ArrayList<>(1);
newSingleGroup.add(clusterGroup.get(j));
newBinaryClusterMembership.add(newSingleGroup);
} else {
newBinaryClusterMembership.add(clusterGroup);
}
} else if (!merged[i]) {
newGroup.add(clusterGroup.get(j));
}
i++;
}
if (newGroup.size() > 0) {
newBinaryClusterMembership.add(newGroup);
}
}
return newBinaryClusterMembership;
}
private int[] relabel(int[] labels){
Integer[] unique = unique(labels).toArray(new Integer[0]);
for (int i = 0; i < labels.length; i++){
for (int n = 0; n < unique.length; n++){
if (labels[i] == unique[n]){
labels[i] = n;
break;
}
}
}
return labels;
}
public static void main(String[] args) throws Exception {
String dataset = "Trace";
Instances inst = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TRAIN.arff");
Instances inst2 = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TEST.arff");
inst.setClassIndex(inst.numAttributes() - 1);
inst.addAll(inst2);
ArrayList<EnhancedAbstractClusterer> clusterers = new ArrayList<>();
for (int i = 0; i < 3; i++){
KMeans c = new KMeans();
c.setNumClusters(inst.numClasses());
c.setSeed(i);
clusterers.add(c);
}
ACE k = new ACE(clusterers);
k.setNumClusters(inst.numClasses());
k.setSeed(0);
k.buildClusterer(inst);
System.out.println(k.clusters.length);
System.out.println(Arrays.toString(k.assignments));
System.out.println(Arrays.toString(k.clusters));
System.out.println(randIndex(k.assignments, inst));
}
}
| 24,094 | 35.898928 | 130 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/consensus/ConsensusClusterer.java | package machine_learning.clusterers.consensus;
import tsml.clusterers.EnhancedAbstractClusterer;
import java.util.ArrayList;
public abstract class ConsensusClusterer extends EnhancedAbstractClusterer {
protected EnhancedAbstractClusterer[] clusterers;
protected boolean buildClusterers = true;
public ConsensusClusterer(EnhancedAbstractClusterer[] clusterers){
this.clusterers = clusterers;
}
public ConsensusClusterer(ArrayList<EnhancedAbstractClusterer> clusterers){
this.clusterers = clusterers.toArray(new EnhancedAbstractClusterer[0]);
}
public void setBuildClusterers(boolean b){
buildClusterers = b;
}
}
| 675 | 26.04 | 79 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/consensus/IVC.java | package machine_learning.clusterers.consensus;
import evaluation.storage.ClustererResults;
import experiments.data.DatasetLoading;
import machine_learning.clusterers.KMeans;
import tsml.clusterers.EnhancedAbstractClusterer;
import weka.clusterers.NumberOfClustersRequestable;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import java.util.*;
import static utilities.ClusteringUtilities.randIndex;
import static utilities.Utilities.argMax;
public class IVC extends ConsensusClusterer implements LoadableConsensusClusterer, NumberOfClustersRequestable {
// https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=4470298
private int k = 2;
private int maxIterations = 200;
private double[][] clusterCenters;
private Random rand;
public IVC(EnhancedAbstractClusterer[] clusterers) {
super(clusterers);
}
public IVC(ArrayList<EnhancedAbstractClusterer> clusterers) {
super(clusterers);
}
@Override
public int numberOfClusters() throws Exception {
return k;
}
@Override
public void setNumClusters(int numClusters) throws Exception {
k = numClusters;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
if (buildClusterers){
for (EnhancedAbstractClusterer clusterer: clusterers){
clusterer.buildClusterer(data);
}
}
double[][] ensembleClusters = new double[clusterers.length][];
for (int i = 0; i < ensembleClusters.length; i++) {
ensembleClusters[i] = clusterers[i].getAssignments();
}
buildEnsemble(ensembleClusters);
}
@Override
public void buildFromFile(String[] directoryPaths) throws Exception {
double[][] ensembleAssignments = new double[directoryPaths.length][];
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "trainFold" + seed + ".csv");
ensembleAssignments[i] = r.getClusterValuesAsArray();
}
buildEnsemble(ensembleAssignments);
}
@Override
public int clusterInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int i = 0; i < k; i++) {
double dist = 0;
for (int n = 0; n < clusterCenters[i].length; n++) {
if (clusterers[n].clusterInstance(inst) == clusterCenters[i][n]) {
dist++;
}
}
if (dist < minDist) {
minDist = dist;
minIndex = i;
}
}
return minIndex;
}
@Override
public int[] clusterFromFile(String[] directoryPaths) throws Exception {
int[][] ensembleAssignments = new int[directoryPaths.length][];
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "testFold" + seed + ".csv");
ensembleAssignments[i] = r.getClusterValuesAsIntArray();
}
int[] cluserings = new int[ensembleAssignments[0].length];
for (int i = 0; i < ensembleAssignments[0].length; i++) {
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int n = 0; n < k; n++) {
double dist = 0;
for (int j = 0; j < clusterCenters[n].length; j++) {
if (ensembleAssignments[j][i] == clusterCenters[n][j]) {
dist++;
}
}
if (dist < minDist) {
minDist = dist;
minIndex = n;
}
}
cluserings[i] = minIndex;
}
return cluserings;
}
@Override
public double[][] distributionFromFile(String[] directoryPaths) throws Exception {
int[] clusterings = clusterFromFile(directoryPaths);
double[][] dists = new double[clusterings.length][k];
for (int i = 0; i < clusterings.length; i++) {
dists[i][clusterings[i]] = 1;
}
return dists;
}
private void buildEnsemble(double[][] ensembleClusters) throws Exception {
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
assignments = new double[ensembleClusters[0].length];
initialClusterCenters(ensembleClusters);
calculateClusterMembership(ensembleClusters);
boolean finished = false;
int iterations = 0;
//Change cluster centers until cluster membership no longer changes
while (!finished) {
if (!calculateClusterMembership(ensembleClusters) || iterations == maxIterations) {
finished = true;
} else {
selectClusterCenters(ensembleClusters);
}
iterations++;
}
}
private void initialClusterCenters(double[][] ensembleClusters) {
ArrayList<Integer> indexes = new ArrayList<>(ensembleClusters[0].length);
for (int i = 0; i < ensembleClusters[0].length; i++) {
indexes.add(i);
}
clusterCenters = new double[k][ensembleClusters.length];
Collections.shuffle(indexes, rand);
for (int i = 0; i < k; i++) {
for (int n = 0; n < ensembleClusters.length; n++) {
clusterCenters[i][n] = ensembleClusters[n][indexes.get(i)];
}
}
}
private boolean calculateClusterMembership(double[][] ensembleClusters) {
boolean membershipChange = false;
//Set membership of each point to the closest cluster center
for (int i = 0; i < ensembleClusters[0].length; i++) {
double minDist = Double.MAX_VALUE;
int minIndex = -1;
for (int n = 0; n < k; n++) {
double dist = 0;
for (int j = 0; j < clusterCenters[n].length; j++) {
if (ensembleClusters[j][i] == clusterCenters[n][j]) {
dist++;
}
}
if (dist < minDist) {
minDist = dist;
minIndex = n;
}
}
//If membership of any point changed return true to keep
//looping
if (minIndex != assignments[i]) {
assignments[i] = minIndex;
membershipChange = true;
}
}
if (membershipChange) {
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster.
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList<>();
}
for (int i = 0; i < train.numInstances(); i++) {
clusters[(int) assignments[i]].add(i);
}
}
return membershipChange;
}
private void selectClusterCenters(double[][] ensembleClusters) {
for (int i = 0; i < k; i++) {
int halfPoint = clusters[i].size() / 2;
for (int n = 0; n < ensembleClusters.length; n++) {
HashMap<Double, Integer> map = new HashMap<>();
boolean foundMajority = false;
for (int j = 0; j < clusters[i].size(); j++) {
double v = ensembleClusters[n][clusters[i].get(j)];
if (map.containsKey(v)) {
int count = map.get(v) + 1;
if (count > halfPoint) {
clusterCenters[i][n] = v;
foundMajority = true;
break;
} else {
map.put(v, count);
}
} else {
map.put(v, 1);
}
}
if (!foundMajority) {
int maxCount = -1;
for (Map.Entry<Double, Integer> entry : map.entrySet()) {
if (entry.getValue() > maxCount || (entry.getValue() == maxCount && rand.nextBoolean())) {
maxCount = entry.getValue();
clusterCenters[i][n] = entry.getKey();
}
}
}
}
}
}
public static void main(String[] args) throws Exception {
String dataset = "Trace";
Instances inst = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TRAIN.arff");
Instances inst2 = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TEST.arff");
inst.setClassIndex(inst.numAttributes() - 1);
inst.addAll(inst2);
ArrayList<EnhancedAbstractClusterer> clusterers = new ArrayList<>();
for (int i = 0; i < 3; i++){
KMeans c = new KMeans();
c.setNumClusters(inst.numClasses());
c.setSeed(i);
clusterers.add(c);
}
IVC k = new IVC(clusterers);
k.setNumClusters(inst.numClasses());
k.setSeed(0);
k.buildClusterer(inst);
System.out.println(k.clusters.length);
System.out.println(Arrays.toString(k.assignments));
System.out.println(Arrays.toString(k.clusters));
System.out.println(randIndex(k.assignments, inst));
}
}
| 10,009 | 31.083333 | 130 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/consensus/LoadableConsensusClusterer.java | package machine_learning.clusterers.consensus;
public interface LoadableConsensusClusterer {
void buildFromFile(String[] directoryPaths) throws Exception;
int[] clusterFromFile(String[] directoryPaths) throws Exception;
double[][] distributionFromFile(String[] directoryPaths) throws Exception;
}
| 313 | 27.545455 | 78 | java |
tsml-java | tsml-java-master/src/main/java/machine_learning/clusterers/consensus/SimpleVote.java | package machine_learning.clusterers.consensus;
import blogspot.software_and_algorithms.stern_library.optimization.HungarianAlgorithm;
import evaluation.storage.ClustererResults;
import experiments.data.DatasetLoading;
import machine_learning.clusterers.KMeans;
import tsml.clusterers.EnhancedAbstractClusterer;
import weka.clusterers.NumberOfClustersRequestable;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Random;
import static utilities.ClusteringUtilities.randIndex;
import static utilities.InstanceTools.deleteClassAttribute;
import static utilities.Utilities.argMax;
public class SimpleVote extends ConsensusClusterer implements LoadableConsensusClusterer, NumberOfClustersRequestable {
private int k = 2;
private int[][] newLabels;
private Random rand;
public SimpleVote(EnhancedAbstractClusterer[] clusterers) {
super(clusterers);
}
public SimpleVote(ArrayList<EnhancedAbstractClusterer> clusterers) {
super(clusterers);
}
@Override
public int numberOfClusters() throws Exception {
return k;
}
@Override
public void setNumClusters(int numClusters) throws Exception {
k = numClusters;
}
@Override
public void buildClusterer(Instances data) throws Exception {
super.buildClusterer(data);
if (buildClusterers){
for (EnhancedAbstractClusterer clusterer: clusterers){
if (clusterer instanceof NumberOfClustersRequestable)
((NumberOfClustersRequestable) clusterer).setNumClusters(k);
clusterer.buildClusterer(train);
}
}
for (EnhancedAbstractClusterer clusterer: clusterers){
if (clusterer.numberOfClusters() != k)
throw new Exception("SimpleVote base clusterer number of clusters must match k.");
}
double[][] ensembleAssignments = new double[clusterers.length][];
for (int i = 0; i < ensembleAssignments.length; i++) {
ensembleAssignments[i] = clusterers[i].getAssignments();
}
buildEnsemble(ensembleAssignments);
}
@Override
public void buildFromFile(String[] directoryPaths) throws Exception {
double[][] ensembleAssignments = new double[directoryPaths.length][];
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "trainFold" + seed + ".csv");
if (r.getNumClusters() != k)
throw new Exception("SimpleVote base clusterer number of clusters must match k.");
ensembleAssignments[i] = r.getClusterValuesAsArray();
}
buildEnsemble(ensembleAssignments);
}
@Override
public int clusterInstance(Instance inst) throws Exception {
double[] dist = distributionForInstance(inst);
return argMax(dist, rand);
}
@Override
public double[] distributionForInstance(Instance inst) throws Exception {
Instance newInst = copyInstances ? new DenseInstance(inst) : inst;
int clsIdx = inst.classIndex();
if (clsIdx >= 0){
newInst.setDataset(null);
newInst.deleteAttributeAt(clsIdx);
}
double[] dist = new double[k];
dist[clusterers[0].clusterInstance(inst)]++;
for (int i = 1; i < clusterers.length; i++) {
dist[newLabels[i - 1][clusterers[i].clusterInstance(inst)]]++;
}
for (int i = 0; i < dist.length; i++){
dist[i] /= clusterers.length;
}
return dist;
}
@Override
public int[] clusterFromFile(String[] directoryPaths) throws Exception {
double[][] dists = distributionFromFile(directoryPaths);
int[] arr = new int[dists.length];
for (int i = 0; i < dists.length; i++) {
arr[i] = argMax(dists[i], rand);
}
return arr;
}
@Override
public double[][] distributionFromFile(String[] directoryPaths) throws Exception {
int[][] ensembleAssignments = new int[directoryPaths.length][];
for (int i = 0; i < directoryPaths.length; i++) {
ClustererResults r = new ClustererResults(directoryPaths[i] + "testFold" + seed + ".csv");
if (r.getNumClusters() != k)
throw new Exception("SimpleVote base clusterer number of clusters must match k.");
ensembleAssignments[i] = r.getClusterValuesAsIntArray();
if (i > 0){
for (int n = 0; n < ensembleAssignments[i].length; n++) {
ensembleAssignments[i][n] = newLabels[i - 1][ensembleAssignments[i][n]];
}
}
}
double[][] dists = new double[ensembleAssignments[0].length][k];
for (int i = 0; i < dists.length; i++) {
for (int[] clusterAssignments : ensembleAssignments) {
dists[i][clusterAssignments[i]]++;
}
for (int n = 0; n < dists[n].length; n++){
dists[i][n] /= ensembleAssignments.length;
}
}
return dists;
}
private void buildEnsemble(double[][] ensembleAssignments){
if (!seedClusterer) {
rand = new Random();
} else {
rand = new Random(seed);
}
newLabels = new int[ensembleAssignments.length -1][];
for (int i = 1; i < ensembleAssignments.length; i++) {
double[][] contingencyTable = new double[k][k];
for (int n = 0; n < ensembleAssignments[i].length; n++) {
contingencyTable[(int)ensembleAssignments[0][n]][(int)ensembleAssignments[i][n]]--;
}
newLabels[i - 1] = new HungarianAlgorithm(contingencyTable).execute();
for (int n = 0; n < ensembleAssignments[i].length; n++) {
ensembleAssignments[i][n] = newLabels[i - 1][(int)ensembleAssignments[i][n]];
}
}
assignments = new double[train.numInstances()];
for (int i = 0; i < assignments.length; i++) {
double[] votes = new double[k];
for (double[] clusterAssignments : ensembleAssignments) {
votes[(int) clusterAssignments[i]]++;
}
assignments[i] = argMax(votes, rand);
}
//Create and store an ArrayList for each cluster containing indexes of
//points inside the cluster.
clusters = new ArrayList[k];
for (int i = 0; i < k; i++) {
clusters[i] = new ArrayList();
}
for (int i = 0; i < train.numInstances(); i++) {
clusters[(int) assignments[i]].add(i);
}
}
public static void main(String[] args) throws Exception {
String dataset = "Trace";
Instances inst = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TRAIN.arff");
Instances inst2 = DatasetLoading.loadDataNullable("D:\\CMP Machine Learning\\Datasets\\UnivariateARFF\\" + dataset + "/" +
dataset + "_TEST.arff");
inst.setClassIndex(inst.numAttributes() - 1);
inst.addAll(inst2);
ArrayList<EnhancedAbstractClusterer> clusterers = new ArrayList<>();
for (int i = 0; i < 3; i++){
KMeans c = new KMeans();
c.setNumClusters(inst.numClasses());
c.setSeed(i);
clusterers.add(c);
}
SimpleVote k = new SimpleVote(clusterers);
k.setNumClusters(inst.numClasses());
k.setSeed(0);
k.buildClusterer(inst);
System.out.println(k.clusters.length);
System.out.println(Arrays.toString(k.assignments));
System.out.println(Arrays.toString(k.clusters));
System.out.println(randIndex(k.assignments, inst));
}
}
| 7,969 | 33.059829 | 130 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/BernoulliDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The Bernoulli distribution with parameter p*/
public class BernoulliDistribution extends BinomialDistribution{
/**This general constructor creates a new Bernoulli distribution with a specified parameter*/
public BernoulliDistribution(double p){
super(1, p);
}
/**This default constructor creates a new Bernoulli distribution with parameter p = 0.5*/
public BernoulliDistribution(){
this(0.5);
}
/**This method overrides the corresponding method in BinomialDistribution so that the number of trials 1 cannot be changed*/
public void setTrials(int n){
super.setTrials(1);
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
return 1;
}
}
| 1,482 | 33.488372 | 125 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/BetaDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**A Java implmentation of the beta distribution with specified left and right parameters*/
public class BetaDistribution extends Distribution{
//Parameters
private double left, right, c;
/**General Constructor: creates a beta distribution with specified left and right
parameters*/
public BetaDistribution(double a, double b){
setParameters(a, b);
}
/**Default constructor: creates a beta distribution with left and right parameters
equal to 1*/
public BetaDistribution(){
this(1, 1);
}
/**Set the parameters, compute the normalizing constant c, and specifies the
interval and partition*/
public void setParameters(double a, double b){
double lower, upper, step;
//Correct parameters that are out of bounds
if (a <= 0) a = 1;
if (b <= 0) b = 1;
//Assign parameters
left = a; right = b;
//Compute the normalizing constant
c = logGamma(left + right) - logGamma(left) - logGamma(right);
//Specifiy the interval and partiton
super.setParameters(0, 1, 0.001, CONTINUOUS);
}
/**Sets the left parameter*/
public void setLeft(double a){
setParameters(a, right);
}
/**Sets the right parameter*/
public void setRight(double b){
setParameters(left, b);
}
/**Get the left paramter*/
public double getLeft(){
return left;
}
/**Get the right parameter*/
public double getRight(){
return right;
}
/**Define the beta getDensity function*/
public double getDensity(double x){
if ((x < 0) | (x > 1)) return 0;
else if ((x == 0) & (left == 1)) return right;
else if ((x == 0) & (left < 1)) return Double.POSITIVE_INFINITY;
else if ((x == 0) & (left > 1)) return 0;
else if ((x == 1) & (right == 1)) return left;
else if ((x == 1) & (right < 1)) return Double.POSITIVE_INFINITY;
else if ((x == 1) & (right > 1)) return 0;
else return Math.exp(c + (left - 1) * Math.log(x) + (right - 1) * Math.log(1 - x));
}
/**Compute the maximum getDensity*/
public double getMaxDensity(){
double mode;
if (left < 1) mode = 0.01;
else if (right <= 1) mode = 0.99;
else mode = (left - 1) / (left + right - 2);
return getDensity(mode);
}
/**Compute the mean in closed form*/
public double getMean(){
return left / (left + right);
}
/**Compute the variance in closed form*/
public double getVariance(){
return left * right / ((left + right) * (left + right) * (left + right + 1));
}
/**Compute the cumulative distribution function. The beta CDF is built into
the superclass Distribution*/
public double getCDF(double x){
return betaCDF(x, left, right);
}
}
| 3,306 | 29.62037 | 91 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/BinomialDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The binomial distribution with specified parameters:
the number of trials and the probability of success*/
public class BinomialDistribution extends Distribution{
//Variables
private int trials;
private double probability;
/**General constructor: creates the binomial distribution with specified
parameters*/
public BinomialDistribution(int n, double p){
setParameters(n, p);
}
/**Default constructor: creates the binomial distribution with 10 trials
and probability of success 1/2*/
public BinomialDistribution(){
this(10, 0.5);
}
/**Set the parameters*/
public void setParameters(int n, double p){
//Correct invalid parameters
if (n < 1) n = 1;
if (p < 0) p = 0; else if (p > 1) p = 1;
trials = n; probability = p;
super.setParameters(0, trials, 1, DISCRETE);
}
/**Set the number of trails*/
public void setTrials(int n){
setParameters(n, probability);
}
/**Get the number of trials*/
public int getTrials(){
return trials;
}
/**Set the probability of success*/
public void setProbability(double p){
setParameters(trials, p);
}
/**Get the probability of success*/
public double getProbability(){
return probability;
}
/**Define the binomial getDensity function*/
public double getDensity(double x){
int k = (int)Math.rint(x);
if (k < 0 | k > trials) return 0;
if (probability == 0){
if (k == 0) return 1;
else return 0;
}
else if (probability == 1){
if (k == trials) return 1;
else return 0;
}
else return comb(trials, k) * Math.pow(probability, k) * Math.pow( 1 - probability, trials - k);
}
/**Specify the maximum getDensity*/
public double getMaxDensity(){
double mode = Math.min(Math.floor((trials + 1) * probability), trials);
return getDensity(mode);
}
/**Give the mean in closed form*/
public double getMean(){
return trials * probability;
}
/**Specify the variance in close form*/
public double getVariance(){
return trials * probability * (1 - probability);
}
/**Specify the CDF in terms of the beta CDF*/
public double getCDF(double x){
if (x < 0) return 0;
else if (x >= trials) return 1;
else return 1 - betaCDF(probability, x + 1, trials - x);
}
/**Simulate the binomial distribution as the number of successes in n trials*/
public double simulate(){
int successes = 0;
for (int i = 1; i <= trials; i++){
if (Math.random() < probability) successes++;
}
return successes;
}
}
| 3,189 | 26.982456 | 98 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/BinomialRandomNDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The binomial distribution with a random number of trials*/
public class BinomialRandomNDistribution extends Distribution{
//Variables
double probability, sum;
Distribution dist;
/**This general constructor creates a new randomized binomial distribution with a specified probability of success and a specified distribution for
the number of trials*/
public BinomialRandomNDistribution(Distribution d, double p){
setParameters(d, p);
}
/**Special constructor: creates a new randomized binomial distribution with a specified probability of success and the uniform distribution on {1, 2, 3, 4, 5, 6} for the number of trials*/
public BinomialRandomNDistribution(double p){
this(new DiscreteUniformDistribution(1, 6, 1), p);
}
/**This default constructor: creates a new randomized binomial distribution with probability of success 0.5 and the uniform distribution on {1, 2, 3, 4, 5, 6} for the number of trials*/
public BinomialRandomNDistribution(){
this(new DiscreteUniformDistribution(1, 6, 1), 0.5);
}
/**Set the parameters: the distribution for the number of trials and the
probability of success*/
public void setParameters(Distribution d, double p){
dist = d;
probability = p;
super.setParameters(0, dist.getDomain().getUpperValue(), 1, DISCRETE);
}
//Density
public double getDensity(double x){
int k = (int)Math.rint(x);
double trials;
if (probability == 0){
if (k == 0) return 1;
else return 0;
}
else if (probability == 1) return dist.getDensity(k);
else{
sum = 0;
for(int i = 0; i < dist.getDomain().getSize(); i++){
trials = dist.getDomain().getValue(i);
sum = sum + dist.getDensity(trials) *
comb(trials, k) * Math.pow(probability, k) * Math.pow(1 - probability, trials - k);
}
return sum;
}
}
public double getMean(){
return dist.getMean() * probability;
}
public double getVariance(){
return dist.getMean() * probability * (1 - probability) + dist.getVariance() * probability * probability;
}
public double simulate(){
int trials = (int)dist.simulate();
int successes = 0;
for (int i = 1; i <= trials; i++){
if (Math.random() < probability) successes++;
}
return successes;
}
}
| 2,970 | 32.382022 | 189 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/BirthdayDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the distribution of the number of distinct sample values
when a sample of a specified size is chosen with replacement from a finite
population of a specified size*/
public class BirthdayDistribution extends Distribution{
private int popSize, sampleSize;
private double[][] prob;
/**This general constructor creates a new birthday distribution with
a specified population size and sample size*/
public BirthdayDistribution(int n, int k){
setParameters(n, k);
}
/**This default constructor creates a new birthday distribution with
population size 365 and sample size 20*/
public BirthdayDistribution(){
this(365, 20);
}
/**This method sets the parameters: the population size and the sample size.
Also, the probabilities are computed and stored in an array*/
public void setParameters(int n, int k){
//Correct for invalid parameters
if (n < 1) n = 1;
if (k < 1) k = 1;
int upperIndex;
popSize = n; sampleSize = k;
super.setParameters(1, Math.min(popSize, sampleSize), 1, DISCRETE);
prob = new double[sampleSize + 1][popSize + 1];
prob[0][0] = 1; prob[1][1] = 1;
for (int j = 1; j < sampleSize; j++){
if (j < popSize) upperIndex = j + 1; else upperIndex = (int)popSize;
for (int m = 1; m <= upperIndex; m++){
prob[j+1][m] = prob[j][m] * ((double)m / popSize)
+ prob[j][m - 1] * ((double)(popSize - m + 1) / popSize);
}
}
}
/**This method computes the getDensity function*/
public double getDensity(double x){
int m = (int)(Math.rint(x));
return prob[sampleSize][m];
}
/**This method computes the mean*/
public double getMean(){
return popSize * (1 - Math.pow(1 - 1.0 / popSize, sampleSize));
}
/**This method computes the variance*/
public double getVariance(){
return popSize * (popSize - 1) * Math.pow(1 - 2.0 / popSize, sampleSize)
+ popSize * Math.pow(1 - 1.0 / popSize, sampleSize)
- popSize * popSize * Math.pow(1 - 1.0 / popSize, 2 * sampleSize);
}
/**This method returns the population size*/
public double getPopSize(){
return popSize;
}
/**This method sets the population size*/
public void setPopSize(int n){
setParameters(n, sampleSize);
}
/**This method returns the sample size*/
public double getSampleSize(){
return sampleSize;
}
/**This method sets the sample size*/
public void setSampleSize(int k){
setParameters(popSize, k);
}
/**This method simulates a value from the distribution, as the number
of distinct sample values*/
public double simulate(){
int[] count = new int[popSize];
double distinct = 0;
for (int i = 1; i <= sampleSize; i++){
int j = (int)(popSize * Math.random());
if (count[j] == 0) distinct++;
count[j] = count[j]++;
}
return distinct;
}
}
| 3,497 | 31.091743 | 77 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/CauchyDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the Cauchy distribution*/
public class CauchyDistribution extends StudentDistribution{
//Constructor
public CauchyDistribution(){
super(1);
}
/**This method sets the degrees of freedom to 1.*/
public void setDegrees(int n){
super.setDegrees(1);
}
/**This method computes the CDF. This overrides the corresponding method in StudentDistribution.*/
public double getCDF(double x){
return 0.5 + Math.atan(x) / Math.PI;
}
/**This method computes the quantile function. This overrides
the corresponding method in StudentDistribution.*/
public double getQuantile(double p){
return Math.tan(Math.PI * (p - 0.5));
}
}
| 1,421 | 32.857143 | 99 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/ChiSquareDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class defines the chi-square distribution with a specifed degrees of
freedom*/
public class ChiSquareDistribution extends GammaDistribution{
int degrees;
/**This general constructor creates a new chi-square distribuiton with a
specified degrees of freedom parameter*/
public ChiSquareDistribution(int n){
setDegrees(n);
}
public ChiSquareDistribution(){
this(1);
}
/**This method sets the degrees of freedom*/
public void setDegrees(int n){
//Correct invalid parameter
if (n <= 0) n = 1;
degrees = n;
super.setParameters(0.5 * degrees, 2);
}
/**This method returns the degrees of freedom*/
public int getDegrees(){
return degrees;
}
/**This method simulates a value from the distribuiton, as the sum of squares
of independent, standard normal distribution*/
public double simulate(){
double V, Z, r, theta;
V = 0;
for (int i = 1; i <= degrees; i++){
r = Math.sqrt(-2 * Math.log(Math.random()));
theta = 2 * Math.PI * Math.random();
Z = r * Math.cos(theta);
V = V + Z * Z;
}
return V;
}
}
| 1,821 | 28.868852 | 78 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/CircleDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the crcle distribution with parameter a. This is the distribution of X
and Y when (X, Y) has the uniform distribution on a circular region with a specified radius.*/
public class CircleDistribution extends Distribution{
private double radius;
/**This general constructor creates a new circle distribution with a specified radius.*/
public CircleDistribution(double r){
setRadius(r);
}
/**This special constructor creates a new circle distribution with radius 1*/
public CircleDistribution(){
this(1);
}
/**This method sets the radius parameter*/
public void setRadius(double r){
if (r <= 0) r =1;
radius = r;
super.setParameters(-radius, radius, 0.02 * radius, CONTINUOUS);
}
/**This method computes the getDensity function.*/
public double getDensity(double x){
if (-radius <= x & x <= radius)
return 2 * Math.sqrt(radius * radius - x * x) / (Math.PI * radius * radius);
else return 0;
}
/**This method computes the maximum value of the getDensity function.*/
public double getMaxDensity(){
return getDensity(0);
}
/**This method computes the mean*/
public double getMean(){
return 0;
}
/**This method computes the variance*/
public double getVariance(){
return radius * radius / 4;
}
/**This method computes the median.*/
public double getMedian(){
return 0;
}
/**This method returns the radius parameter.*/
public double getRadius(){
return radius;
}
/**This method simulates a value from the distribution.*/
public double simulate(){
double u = radius * Math.random();
double v = radius * Math.random();
double r = Math.max(u, v);
double theta = 2 * Math.PI * Math.random();
return r * Math.cos(theta);
}
/**This method compute the cumulative distribution function.*/
public double getCDF(double x){
return 0.5 + Math.asin(x / radius) / Math.PI
+ x * Math.sqrt(1 - x * x / (radius * radius)) / (Math.PI * radius);
}
}
| 2,698 | 29.670455 | 94 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/ContinuousUniformDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the uniform distribution on a specified interval.*/
public class ContinuousUniformDistribution extends Distribution{
private double minValue, maxValue;
/**This general constructor creates a new uniform distribution on a specified interval.*/
public ContinuousUniformDistribution(double a, double b){
setParameters(a, b);
}
/**This default constructor creates a new uniform distribuiton on (0, 1).*/
public ContinuousUniformDistribution(){
this(0, 1);
}
/**This method sets the parameters: the minimum and maximum values of the interval.*/
public void setParameters(double a, double b){
minValue = a; maxValue = b;
double step = 0.01 * (maxValue - minValue);
super.setParameters(minValue, maxValue, step, CONTINUOUS);
}
/**This method computes the density function.*/
public double getDensity(double x){
if (minValue <= x & x <= maxValue) return 1 / (maxValue - minValue);
else return 0;
}
/**This method computes the maximum value of the getDensity function.*/
public double getMaxDensity(){
return 1 / (maxValue - minValue);
}
/**This method computes the mean.*/
public double getMean(){
return (minValue + maxValue) / 2;
}
/**This method computes the variance.*/
public double getVariance(){
return (maxValue - minValue) * (maxValue - minValue) / 12;
}
/**This method computes the cumulative distribution function.*/
public double getCDF(double x){
if (x < minValue) return 0;
else if (x >= maxValue) return 1;
else return (x - minValue) / (maxValue - minValue);
}
/**This method computes the getQuantile function.*/
public double getQuantile(double p){
if (p < 0) p = 0; else if (p > 1) p = 1;
return minValue + (maxValue - minValue) * p;
}
/**This method gets the minimum value.*/
public double getMinValue(){
return minValue;
}
/**This method returns the maximum value.*/
public double getMaxValue(){
return maxValue;
}
/**This method simulates a value from the distribution.*/
public double simulate(){
return minValue + Math.random() * (maxValue - minValue);
}
}
| 2,843 | 30.955056 | 90 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/Convolution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class creates the n-fold convolution of a given distribution*/
public class Convolution extends Distribution{
private Distribution distribution;
private int power;
private double[][] pdf;
/**This general constructor: creates a new convolution distribution corresponding
to a specified distribution and convolution power*/
public Convolution(Distribution d, int n){
setParameters(d, n);
}
/**This defalut constructor creates a new convolution distribution corrrepsonding to the
uniform distribution on (0,1), with convolution power 5.*/
public Convolution(){
this(new ContinuousUniformDistribution(0, 1), 5);
}
/**This method sets the parameters: the distribution and convolution power. The method computes
and store pdf values*/
public void setParameters(Distribution d, int n){
//Correct for invalid parameters
if (n < 1) n = 1;
distribution = d; power = n;
Domain domain = distribution.getDomain();
double l = domain.getLowerValue(), u = domain.getUpperValue(), w = domain.getWidth(), p, dx;
int t = distribution.getType();
if (t == DISCRETE) dx = 1; else dx = w;
super.setParameters(power * l, power * u, w, t);
int m = domain.getSize();
pdf = new double[power][];
for (int k = 0; k < n; k++) pdf[k] = new double[(k + 1) * m - k];
for (int j = 0; j < m; j++) pdf[0][j] = distribution.getDensity(domain.getValue(j));
for (int k = 1; k < n; k++){
for (int j = 0; j < (k + 1) * m - k; j++){
p = 0;
for (int i = Math.max(0, j - m + 1); i < Math.min(j+1, k * m - k + 1); i++){
p = p + pdf[k - 1][i] * pdf[0][j - i];
}
pdf[k][j] = p;
}
}
}
/**Density function*/
public double getDensity(double x){
return pdf[power - 1][getDomain().getIndex(x)];
}
/**Mean*/
public double getMean(){
return power * distribution.getMean();
}
/**Variance*/
public double getVariance(){
return power * distribution.getVariance();
}
/**Simulate a value from the distribution*/
public double simulate(){
double sum = 0;
for (int i = 0; i < power; i++) sum = sum + distribution.simulate();
return sum;
}
/**This method sets the convolution power.*/
public void setPower(int n){
setParameters(distribution, n);
}
/**This method returns the convolution power.*/
public int getPower(){
return power;
}
/**This method sets the distribution.*/
public void setDistribution(Distribution d){
setParameters(d, power);
}
/**This method returns the distribution.*/
public Distribution getDistribution(){
return distribution;
}
}
| 3,307 | 29.915888 | 96 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/CouponDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/** This class models the distribution of the sample size needed to get a specified number of distinct sample values when sampling with replacement from a finite population of a specified size*/
public class CouponDistribution extends Distribution{
int popSize, distinctValues, upperValue;
double[][] prob;
/**This general constructor: creates a new coupon distribution with specified population size and distinct sample size.*/
public CouponDistribution(int m, int k){
setParameters(m, k);
}
/**This general constructor creates a new coupon distribution with population size 10 and distinct
sample size 10.*/
public CouponDistribution(){
this(10, 10);
}
/**This method sets the parameters: the population size and number of distinct values needed*/
public void setParameters(int m, int k){
int upperIndex, maxIndex;
//Correct for invalid parameters
if (m < 1) m = 1;
if (k < 1) k = 1; else if (k > m) k = m;
popSize = m; distinctValues = k;
upperValue = (int)Math.ceil(getMean() + 4 * getSD());
super.setParameters(distinctValues, upperValue, 1, DISCRETE);
prob = new double[upperValue + 1][popSize + 1];
prob[0][0] = 1; prob[1][1] = 1;
for (int i = 1; i < upperValue; i++){
if (i < popSize) upperIndex = i + 1; else upperIndex = popSize;
for (int n = 1; n <= upperIndex; n++){
prob[i + 1][n] = prob[i][n] * ((double)n / popSize) + prob[i][n - 1] * ((double)(popSize - n + 1) / popSize);
}
}
}
/**Density function*/
public double getDensity(double x){
int k = (int)(Math.rint(x));
if (k < distinctValues | k > upperValue) return 0;
else return ((double)(popSize - distinctValues + 1) / popSize) * prob[k - 1][distinctValues - 1];
}
/**Mean*/
public double getMean(){
double sum = 0;
for (int i = 1; i <= distinctValues; i++) sum = sum + (double)popSize / (popSize - i + 1);
return sum;
}
/**Variance*/
public double getVariance(){
double sum = 0;
for (int i = 1; i <= distinctValues; i++) sum = sum + (double)(popSize * (i - 1)) / ((popSize - i + 1) * (popSize - i + 1));
return sum;
}
/**Get the population size*/
public double getPopSize(){
return popSize;
}
/**Set the population size*/
public void setPopSize(int m){
setParameters(m, distinctValues);
}
/**Get the number of distinct values*/
public double getDistinctValues(){
return distinctValues;
}
/**Set the number of distinct values*/
public void setDistinctValues(int k){
setParameters(popSize, k);
}
/**Simulate a value from the distribution*/
public double simulate(){
int[] cellCount = new int[(int)popSize];
double occupiedCells = 0;
int ballCount = 0;
while (occupiedCells <= distinctValues){
ballCount++;
int ballIndex = (int)(popSize * Math.random());
if (cellCount[ballIndex] == 0) occupiedCells++;
cellCount[ballIndex] = cellCount[ballIndex]++;
}
return ballCount;
}
}
| 3,643 | 32.127273 | 194 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/Data.java | package statistics.distributions;
import java.util.*;
/**A simple implementation of a data distribution*/
public class Data{
//Variables
private Vector values = new Vector();
private int size;
private double value, mean, meanSquare, mode;
private String name;
/**This general constructor creates a new data with a prescribed name.*/
public Data(String n){
setName(n);
}
/**This default constructor creates a new data with the name "X"*/
public Data(){
this("X");
}
/**This method adds a new number to the data set and re-compute the mean, mean square,
minimum and maximum values, and order statistics*/
public void setValue(double x){
double a, b;
value = x;
boolean notInserted = true;
//Add the value to the data set
for (int i = 0; i < size - 1; i++){
a = ((Double)values.elementAt(i)).doubleValue();
b = ((Double)values.elementAt(i + 1)).doubleValue();
if ((a <= x) & (x >= b)){
values.insertElementAt(new Double(x), i + 1);
notInserted = false;
}
}
if (notInserted) values.insertElementAt(new Double(x), 0);
//Re-compute mean and mean square
mean = ((double)(size - 1) / size) * mean + value / size;
meanSquare = ((double)(size - 1) / size) * meanSquare + value * value / size;
}
/**Get the current value of the data set*/
public double getValue(){
return value;
}
/**This method returns the i'th value of the data set.*/
public double getValue(int i){
return ((Double)values.elementAt(i)).doubleValue();
}
/**Get the mean*/
public double getMean(){
return mean;
}
/**Get the population variance*/
public double getPVariance(){
double var = meanSquare - mean * mean;
if (var < 0) var = 0;
return var;
}
/**Get the population standard deviation*/
public double getPSD(){
return Math.sqrt(getPVariance());
}
/**Get the sample variance of the data set*/
public double getVariance(){
return ((double)size / (size - 1)) * getPVariance();
}
/**Get the sample standard deviation of the data set*/
public double getSD(){
return Math.sqrt(getVariance());
}
/**Get the minimum value of the data set*/
public double getMinValue(){
return getValue(0);
}
/**Get the maximum value of the data set*/
public double getMaxValue(){
return getValue(size - 1);
}
/**Reset the data set*/
public void reset(){
values.removeAllElements();
size = 0;
}
/**Get the number of pointCount in the data set*/
public int getSize(){
return size;
}
/**Get the name of the data set*/
public void setName(String name){
this.name = name;
}
/**Set the name of the data set*/
public String getName(){
return name;
}
}
| 2,631 | 22.711712 | 87 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/DieDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**Distribution for a standard 6-sided die*/
public class DieDistribution extends FiniteDistribution{
public final static int FAIR = 0, FLAT16 = 1, FLAT25 = 2, FLAT34 = 3, LEFT = 4, RIGHT = 5;
/**General Constructor: creates a new die distribution with specified probabilities*/
public DieDistribution(double[] p){
super(1, 6, 1, p);
}
/**Special constructor: creates a new die distribution of a special type*/
public DieDistribution(int n){
super(1, 6, 1);
setProbabilities(n);
}
/**Default constructor: creates a new fair die distribution*/
public DieDistribution(){
this(FAIR);
}
/**Specify probabilities of a special type*/
public void setProbabilities(int n){
if (n == FLAT16)
setProbabilities(new double[] {1.0 / 4, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 4});
else if (n == FLAT25)
setProbabilities(new double[] {1.0 / 8, 1.0 / 4, 1.0 / 8, 1.0 / 8, 1.0 / 4, 1.0 / 8});
else if (n == FLAT34)
setProbabilities(new double[] {1.0 / 8, 1.0 / 8, 1.0 / 4, 1.0 / 4, 1.0 / 8, 1.0 / 8});
else if (n == LEFT)
setProbabilities(new double[] {1.0 / 21, 2.0 / 21, 3.0 / 21, 4.0 / 21, 5.0 / 21, 6.0 / 21});
else if (n == RIGHT)
setProbabilities(new double[] {6.0 / 21, 5.0 / 21, 4.0 / 21, 3.0 / 21, 2.0 / 21, 1.0 / 21});
else
setProbabilities(new double[] {1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6});
}
}
| 2,132 | 37.781818 | 95 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/DiscreteArcsineDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the discrete arcsine distribution that governs the last zero in
a symmetric random walk on an interval.*/
public class DiscreteArcsineDistribution extends Distribution{
//Paramters
private int parameter;
/**This general constructor creates a new discrete arcsine distribution with a specified
number of steps.*/
public DiscreteArcsineDistribution(int n){
setParameter(n);
}
/**This default constructor creates a new discrete arcsine distribution with 10 steps.*/
public DiscreteArcsineDistribution(){
this(10);
}
/**This method sets the parameter, the number of steps.*/
public void setParameter(int n){
parameter = n;
setParameters(0, parameter, 2, DISCRETE);
}
/**This method computes the density function.*/
public double getDensity(double x){
int k = (int)x;
return comb(k, k / 2) * comb(parameter - k, (parameter - k) / 2) / Math.pow(2, parameter);
}
/**This method computes the maximum value of the density function.*/
public double getMaxDensity(){
return getDensity(0);
}
/**This method gets the parameter, the number of steps.*/
public int getParameter(){
return parameter;
}
/**This method simulates a value from the distribution, by simulating a random walk on the
interval.*/
public double simulate(){
int step, lastZero = 0, position = 0;
for (int i = 1; i <= parameter; i++){
if (Math.random() < 0.5) step = 1;
else step = -1;
position = position + step;
if (position == 0) lastZero = i;
}
return lastZero;
}
}
| 2,281 | 31.140845 | 92 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/DiscreteUniformDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The discrete uniform distribution on a finite set*/
public class DiscreteUniformDistribution extends Distribution{
double values;
public DiscreteUniformDistribution(double a, double b, double w){
setParameters(a, b, w);
}
public DiscreteUniformDistribution(){
this(1, 6, 1);
}
public void setParameters(double a, double b, double w){
super.setParameters(a, b, w, DISCRETE);
}
public double getDensity(double x){
if (getDomain().getLowerValue() <= x & x <= getDomain().getUpperValue()) return 1.0 / getDomain().getSize();
else return 0;
}
public double getMaxDensity(){
return 1.0 / getDomain().getSize();
}
public double simulate(){
return getDomain().getLowerValue() + Math.random() * getDomain().getSize() * getDomain().getWidth();
}
}
| 1,541 | 31.125 | 110 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/Distribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
import java.util.*;
import statistics.distributions.Domain;
import statistics.distributions.MersenneTwister;
/**Distribution: An abstract implmentation of a real probability distribution*/
public abstract class Distribution{
//Constants
public final static int DISCRETE = 0, CONTINUOUS = 1, MIXED = 2;
//Variables
private int type;
//Objects
private Domain domain;
//Dont trust this: public Random RNG= new MersenneTwister();
public static Random RNG= new Random();
public static void setDistributionSeed(int r){
RNG.setSeed(r);
}
public void setRandomSeed(int r){
RNG.setSeed(r);
}
/**The getDensity method is abstract and must be overridden for any specific distribuiton*/
public abstract double getDensity(double x);
/**This method defines a partition of an interval that acts as a default domain
* for
the distribution, for purposes of data collection and for default computations.
For a discrete distribution, the specified parameters define the midpoints of
the partition (these are typically the values on which the distribution is
defined, although truncated if the true set of values is infinite). For a
continuous distribution, the parameters define the boundary points of the
interval on which the distribuiton is defined (truncated if the true interval
is infinite)*/
public void setParameters(double a, double b, double w, int t){
if (t < 0) t = 0; else if (t > 2) t = 2;
type = t;
if (type == DISCRETE) domain = new Domain(a - 0.5 * w, b + 0.5 * w, w);
else domain = new Domain(a, b, w);
}
/**This method returns the domain of the distribution.*/
public Domain getDomain(){
return domain;
}
/**This method returns the type of the distribution (discrete or continuous)*/
public final int getType(){
return type;
}
/**This method returns the largest (finite) value of the getDensity function on the finite set of domain values.
This method should be overridden if the maximum value is known in closed form*/
public double getMaxDensity(){
double max = 0, d;
for (int i = 0; i < domain.getSize(); i++){
d = getDensity(domain.getValue(i));
if (d > max & d < Double.POSITIVE_INFINITY) max = d;
}
return max;
}
/**This method returns a default approximate mean, based on the finite set of domain values. This method should be overriden if the mean is known in closed form*/
public double getMean(){
double sum = 0, x, w;
if (type == DISCRETE) w = 1; else w = domain.getWidth();
for (int i = 0; i < domain.getSize(); i++){
x = domain.getValue(i);
sum = sum + x * getDensity(x) * w;
}
return sum;
}
/**This method returns a default approximate variance. This method should be overriden if the variance is known in closed form*/
public double getVariance(){
double sum = 0, mu = getMean(), x, w;
if (type == DISCRETE) w = 1; else w = domain.getWidth();
for (int i = 0; i < domain.getSize(); i++){
x = domain.getValue(i);
sum = sum + (x - mu) * (x - mu) * getDensity(x) * w;
}
return sum;
}
/**This method returns the standard deviation, as the square root of the variance*/
public double getSD(){
return Math.sqrt(getVariance());
}
/**This method returns a default approximate cumulative distribution function.
This should be overriden if the CDF is known in closed form*/
public double getCDF(double x){
double sum = 0, w, y;
if (type == DISCRETE) w = 1; else w = domain.getWidth();
int j = domain.getIndex(x);
if (j < 0) return 0;
else if (j >= domain.getSize()) return 1;
else{
for(int i = 0; i <= j; i++) sum = sum + getDensity(domain.getValue(i)) * w;
if (type == CONTINUOUS){
y = domain.getValue(j) - 0.5 * w;
sum = sum + getDensity((x + y) / 2) * (x - y);
}
}
return sum;
}
/**This method computes an approximate getQuantile function. This should be overriden if the getQuantile function is known in closed form*/
public double getQuantile(double p){
double sum = 0, x, w;
if (type == DISCRETE) w = 1; else w = domain.getWidth();
if (p <= 0) return domain.getLowerValue();
else if (p >= 1) return domain.getUpperValue();
else{
int n = domain.getSize(), i = 0;
x = domain.getValue(i);
sum = getDensity(x) * w;
while ((sum < p) & (i < n)){
i++;
x = domain.getValue(i);
sum = sum + getDensity(x) * w;
}
return x;
}
}
/**This method computes a default simulation of a value from the distribution,
* as a random getQuantile. This method should be overridden if a better method
* of simulation is known.*/
public double simulate(){
return getQuantile(RNG.nextDouble());
}
/**This method computes a default approximate median. This method should be overridden when there is a closed
form expression for the median.*/
public double getMedian(){
return getQuantile(0.5);
}
/**This method computes the failure rate function*/
public double getFailureRate(double x){
return getDensity(x) / (1 - getCDF(x));
}
//Class methods
/**This method computes the number of permuatations of k objects chosen from
a population of n objects.*/
public static double perm(double n, int k){
double prod;
if (k > n | k < 0) return 0;
else{
prod = 1;
for (int i = 1; i <= k; i++) prod = prod * (n - i + 1);
return prod;
}
}
/**This method computes k!, the number of permutations of k objects.*/
public static double factorial(int k){
return perm(k, k);
}
/**This method computes the number of combinations of k objects chosen from
a population of n objects*/
public static double comb(double n, int k){
return perm(n, k) / factorial(k);
}
/**This method computes the log of the gamma function.*/
public static double logGamma(double x){
double coef[] = {76.18009173, -86.50532033, 24.01409822, -1.231739516, 0.00120858003, -0.00000536382};
double step = 2.50662827465, fpf = 5.5, t, tmp, ser, logGamma;
t = x - 1;
tmp = t + fpf;
tmp = (t + 0.5) * Math.log(tmp) - tmp;
ser = 1;
for (int i = 1; i <= 6; i++){
t = t + 1;
ser = ser + coef[i - 1] / t;
}
return tmp + Math.log(step * ser);
}
/**This method computes the gamma function.*/
public static double gamma(double x){
return Math.exp(logGamma(x));
}
/**This method computes the CDF of the gamma distribution with shape parameter a
and scale parameter 1*/
public static double gammaCDF(double x, double a){
if (x <= 0) return 0;
else if (x < a + 1) return gammaSeries(x, a);
else return 1 - gammaCF(x, a);
}
/**This method computes a gamma series that is used in the gamma CDF.*/
private static double gammaSeries(double x, double a){
//Constants
int maxit = 100;
double eps = 0.0000003;
//Variables
double sum = 1.0 / a, ap = a, gln = logGamma(a), del = sum;
for (int n = 1; n <= maxit; n++){
ap++;
del = del * x / ap;
sum = sum + del;
if (Math.abs(del) < Math.abs(sum) * eps) break;
}
return sum * Math.exp(-x + a * Math.log(x) - gln);
}
/**This method computes a gamma continued fraction function function that is used in the
gamma CDF.*/
private static double gammaCF(double x, double a){
//Constants
int maxit = 100;
double eps = 0.0000003;
//Variables
double gln = logGamma(a), g = 0, gOld = 0, a0 = 1, a1 = x, b0 = 0, b1 = 1, fac = 1;
double an, ana, anf;
for (int n = 1; n <= maxit; n++){
an = 1.0 * n;
ana = an - a;
a0 = (a1 + a0 * ana) * fac;
b0 = (b1 + b0 * ana) * fac;
anf = an * fac;
a1 = x * a0 + anf * a1;
b1 = x * b0 + anf * b1;
if (a1 != 0){
fac = 1.0 / a1;
g = b1 * fac;
if (Math.abs((g - gOld) / g) < eps) break;
gOld = g;
}
}
return Math.exp(-x + a * Math.log(x) - gln) * g;
}
/**The method computes the beta CDF.*/
public static double betaCDF(double x, double a, double b){
double bt;
if ((x == 0) | (x == 1)) bt = 0;
else bt = Math.exp(logGamma(a + b) - logGamma(a) - logGamma(b) + a * Math.log(x) + b * Math.log(1 - x));
if (x < (a + 1) / (a + b + 2)) return bt * betaCF(x, a, b) / a;
else return 1 - bt * betaCF(1 - x, b, a) / b;
}
/**This method computes a beta continued fractions function that is used in the beta CDF.*/
private static double betaCF(double x, double a, double b){
int maxit = 100;
double eps = 0.0000003, am = 1, bm = 1, az = 1, qab = a + b,
qap = a + 1, qam = a - 1, bz = 1 - qab * x / qap, tem, em, d, bpp, bp, app, aOld, ap;
for (int m = 1; m <= maxit; m++){
em = m;
tem = em + em;
d = em * (b - m) * x / ((qam + tem) * (a + tem));
ap = az + d * am;
bp = bz + d * bm;
d = -(a + em) *(qab + em) * x / ((a + tem) * (qap + tem));
app = ap + d * az;
bpp = bp + d * bz;
aOld = az;
am = ap / bpp;
bm = bp / bpp;
az = app / bpp;
bz = 1;
if (Math.abs(az - aOld) < eps * Math.abs(az)) break;
}
return az;
}
}
| 9,584 | 32.28125 | 163 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/Domain.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class defines a partition of an interval into subintervals of equal width. These objects are used to define default domains. A finite domain can be modeled by the values (midpoints) of the partition. The boundary points are a + i * w for i = 0, ..., n, where n is the size of the partition, a is the lower bound and w the width. The values (midpoints) are a + (i + 1/2) * w, for i = 0, ..., n - 1.*/
public class Domain{
//Variables
private double lowerBound, upperBound, width, lowerValue, upperValue;
private int size;
/**This general constructor creates a new partition of a specified interval [a, b] into subintervals of width w*/
public Domain(double a, double b, double w){
if (w <= 0) w = 1;
width = w;
if (b < a + w) b = a + w;
lowerBound = a; upperBound = b;
lowerValue = lowerBound + 0.5 * width; upperValue = upperBound - 0.5 * width;
size = (int)Math.rint((b - a) / w);
}
/**This special constructor creates a new partition of [0, b] into 10 equal subintervals*/
public Domain(double b){
this(0, b, 0.1 * b);
}
/**This default constructor creates a new partition of [0, 1] into 10 equal subintervals*/
public Domain(){
this(1);
}
/**This method returns the index of the interval containing a given value of x*/
public int getIndex(double x){
if (x < lowerBound) return -1;
if (x > upperBound) return size;
else return (int)Math.rint((x - lowerValue) / width);
}
/**This method returns the boundary point corresponding to a given index*/
public double getBound(int i){
return lowerBound + i * width;
}
/**This method return the midpoint of the interval corresponding to a given index*/
public double getValue(int i){
return lowerValue + i * width;
}
/**This method returns the lower bound*/
public double getLowerBound(){
return lowerBound;
}
/**This method returns the upper bound*/
public double getUpperBound(){
return upperBound;
}
/**This method returns the lower midpoint*/
public double getLowerValue(){
return lowerValue;
}
/**This method returns the upper midpoint*/
public double getUpperValue(){
return upperValue;
}
/**This method returns the width of the partition*/
public double getWidth(){
return width;
}
/**This method returns the size of the partition (the number of subintervals)*/
public int getSize(){
return size;
}
}
| 3,121 | 32.212766 | 410 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/ExponentialDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class defines the standard exponential distribution with rate parameter r*/
public class ExponentialDistribution extends GammaDistribution{
//Parameter
double rate;
/**This general constructor creates a new exponential distribution with a
specified rate*/
public ExponentialDistribution(double r){
setRate(r);
}
/**This default constructor creates a new exponential distribution with rate 1*/
public ExponentialDistribution(){
this(1);
}
/**This method sets the rate parameter*/
public void setRate(double r){
if (r <= 0) r = 1;
rate = r;
super.setParameters(1, 1 / rate);
}
/**This method gets the rate*/
public double getRate(){
return rate;
}
/**This method defines the getDensity function*/
public double getDensity(double x){
if (x < 0) return 0;
else return rate * Math.exp(-rate * x);
}
/**This method defines the cumulative distribution function*/
public double getCDF(double x){
return 1 - Math.exp(- rate * x);
}
/**The method defines the getQuantile function*/
public double getQuantile(double p){
return -Math.log(1 - p) / rate;
}
}
| 1,871 | 28.714286 | 83 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/FiniteDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**A basic discrete distribution on a finite set of points, with specified
probabilities*/
public class FiniteDistribution extends Distribution{
private int n;
private double[] prob;
/**Constructs a new finite distribution on a finite set of points with
a specified array of probabilities*/
public FiniteDistribution(double a, double b, double w, double[] p){
setParameters(a, b, w, p);
}
/**Constructs the uniform distribuiton on the finite set of points*/
public FiniteDistribution(double a, double b, double w){
super.setParameters(a, b, w, DISCRETE);
n = getDomain().getSize();
prob = new double[n];
for (int i = 0; i < n; i++) prob[i] = 1.0 / n;
}
/**This special constructor creates a new uniform distribution on {1, 2, ..., 10}.*/
public FiniteDistribution(){
this(1, 10, 1);
}
/**This method sets the parameters: the domain and the probabilities.*/
public void setParameters(double a, double b, double w, double[] p){
super.setParameters(a, b, w, DISCRETE);
n = getDomain().getSize();
prob = new double[n];
if (p.length != n) p = new double[n];
double sum = 0;
for (int i = 0; i < n; i++){
if (p[i] < 0) p[i] = 0;
sum = sum + p[i];
}
if (sum == 0) for (int i = 0; i < n; i++) prob[i] = 1.0 / n;
else for (int i = 0; i < n; i++) prob[i] = p[i] / sum;
}
/**Density function*/
public double getDensity(double x){
int j = getDomain().getIndex(x);
if (0 <= j & j < n) return prob[j];
else return 0;
}
/**Set the probabilities*/
public void setProbabilities(double[] p){
if (p.length != n) p = new double[n];
double sum = 0;
for (int i = 0; i < n; i++){
if (p[i] < 0) p[i] = 0;
sum = sum + p[i];
}
if (sum == 0) for (int i = 0; i < n; i++) prob[i] = 1.0 / n;
else for (int i = 0; i < n; i++) prob[i] = p[i] / sum;
}
/**This method gets the probability for a specified index*/
public double getProbability(int i){
if (i < 0) i = 0; else if (i >= n) i = n - 1;
return prob[i];
}
/**This method gets the probability vector.*/
public double[] getProbabilities(){
return prob;
}
}
| 2,848 | 30.307692 | 85 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/FiniteOrderStatisticDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/** This class models the distribution of the k'th order statistic for a sample of size n
chosen without replacement from {1, 2, ..., N} .*/
public class FiniteOrderStatisticDistribution extends Distribution{
Distribution dist;
private int sampleSize, populationSize, order;
/**This general constructor creates a new finite order statistic distribution with specified
population and sample sizes, and specified order.*/
public FiniteOrderStatisticDistribution(int N, int n, int k){
setParameters(N, n, k);
}
/**This default constructor creates a new finite order statistic distribution with population
size 50, sample size 10, and order 5.*/
public FiniteOrderStatisticDistribution(){
this(50, 10, 5);
}
/**This method sets the parameters: the sample size, population size, and order.*/
public void setParameters(int N, int n, int k){
populationSize = N;
sampleSize = n;
order = k;
super.setParameters(order, populationSize - sampleSize + order, 1, Distribution.DISCRETE);
}
/**This method computes the getDensity.*/
public double getDensity(double x){
int i = (int)Math.rint(x);
return comb(i - 1, order - 1)
* comb(populationSize - i, sampleSize - order) / comb(populationSize, sampleSize);
}
/**This method computes the mean.*/
public double getMean(){
return (double)order * (populationSize + 1) / (sampleSize + 1);
}
/**This method computes the variance.*/
public double getVariance(){
return (double)(populationSize + 1) * (populationSize - sampleSize)
* order * (sampleSize + 1 - order) / ((sampleSize + 1) * (sampleSize + 1) * (sampleSize + 2));
}
/**This method sets the population size.*/
public void setPopulationSize(int N){
setParameters(N, sampleSize, order);
}
/**This method returns the population size.*/
public int getPopulationSize(){
return populationSize;
}
/**This method sets the sample size.*/
public void setSampleSize(int n){
setParameters(populationSize, n, order);
}
/**This method returns the sampleSize.*/
public int getSampleSize(){
return sampleSize;
}
/**This method sets the order.*/
public void setOrder(int k){
setParameters(populationSize, sampleSize, k);
}
/**This method returns the order.*/
public int getOrder(){
return order;
}
}
| 3,031 | 31.255319 | 97 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/FisherDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/** This class models the Fisher F distribution with a spcified number of
degrees of freedom in the numerator and denominator*/
public class FisherDistribution extends Distribution{
private int nDegrees, dDegrees;
private double c;
/**This general constructor creates a new Fisher distribution with a
specified number of degrees of freedom in the numerator and denominator*/
public FisherDistribution(int n, int d){
setParameters(n, d);
}
/**This default constructor creates a new Fisher distribution with 5
degrees of freedom in numerator and denominator*/
public FisherDistribution(){
this(5, 5);
}
/**This method sets the parameters, the degrees of freedom in the numerator
and denominator. Additionally, the normalizing constant and default interval
are computed*/
public void setParameters(int n, int d){
double upper, width;
//Correct invalid parameters
if (n < 1) n = 1; if (d < 1) d = 1;
nDegrees = n; dDegrees = d;
//Compute normalizing constant
c = logGamma(0.5 * (nDegrees + dDegrees)) - logGamma(0.5 * nDegrees)
- logGamma(0.5 * dDegrees) + 0.5 * nDegrees * (Math.log(nDegrees)
- Math.log(dDegrees));
//Compute interval
if (dDegrees <= 4) upper = 20; else upper = getMean() + 4 * getSD();
width = 0.01 * upper;
super.setParameters(0, upper, width, CONTINUOUS);
}
/**This method computes the denisty function*/
public double getDensity(double x){
if (x < 0) return 0;
else if (x == 0 & nDegrees == 1) return Double.POSITIVE_INFINITY;
else return Math.exp(c + (0.5 * nDegrees - 1) * Math.log(x)
- 0.5 * (nDegrees + dDegrees) * Math.log(1 + nDegrees * x / dDegrees));
}
/**This method computes the maximum value of the getDensity function*/
public double getMaxDensity(){
double mode;
if (nDegrees == 1) mode = getDomain().getLowerValue();
else mode = (double)((nDegrees - 2) * dDegrees) / (nDegrees * (dDegrees + 2));
return getDensity(mode);
}
/**This method returns the mean*/
public double getMean(){
if (dDegrees <= 2) return Double.POSITIVE_INFINITY;
else return (double)dDegrees / (dDegrees - 2);
}
/**This method returns the variance*/
public double getVariance(){
if (dDegrees <= 2) return Double.NaN;
else if (dDegrees <= 4) return Double.POSITIVE_INFINITY;
else return 2.0 * (dDegrees / (dDegrees - 2)) * (dDegrees / (dDegrees - 2))
* (dDegrees + nDegrees - 2) / (nDegrees * (dDegrees - 4));
}
/**This method computes the cumulative distribution function in terms of
the beta CDF*/
public double getCDF(double x){
double u = dDegrees / (dDegrees + nDegrees * x);
if (x < 0) return 0;
else return 1 - betaCDF(u, 0.5 * dDegrees, 0.5 * nDegrees);
}
/**This method returns the numerator degrees of freedom*/
public double getNDegrees(){
return nDegrees;
}
/**This method sets the numerator degrees of freedom*/
public void setNDegrees(int n){
setParameters(n, dDegrees);
}
/**This method gets the denominator degrees of freedom*/
public double getDDegrees(){
return dDegrees;
}
/**This method sets the denominator degrees of freedom*/
public void setDDegrees(int d){
setParameters(nDegrees, d);
}
/**This method simulates a value from the distribution*/
public double simulate(){
double U, V, Z, r, theta;
U = 0;
for (int i = 1; i <= dDegrees; i++){
r = Math.sqrt(-2 * Math.log(Math.random()));
theta = 2 * Math.PI * Math.random();
Z = r * Math.cos(theta);
U = U + Z * Z;
}
V = 0;
for (int j = 1; j <= dDegrees; j++){
r = Math.sqrt(-2 * Math.log(Math.random()));
theta = 2 * Math.PI * Math.random();
Z = r * Math.cos(theta);
V = V + Z * Z;
}
return (U / nDegrees) / (V / dDegrees);
}
}
| 4,442 | 32.406015 | 80 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/GammaDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**Gamma distribution with a specified shape parameter and scale parameter*/
public class GammaDistribution extends Distribution{
//Parameters
private double shape, scale, c;
/**General Constructor: creates a new gamma distribution with shape parameter
k and scale parameter b*/
public GammaDistribution(double k, double b){
setParameters(k, b);
}
/**Default Constructor: creates a new gamma distribution with shape parameter
1 and scale parameter 1*/
public GammaDistribution(){
this(1, 1);
}
/**Set parameters and assign the default partition*/
public void setParameters(double k, double b){
double upperBound;
//Correct invalid parameters
if(k < 0) k = 1;
if(b < 0) b = 1;
shape = k;
scale = b;
//Normalizing constant
c = shape * Math.log(scale) + logGamma(shape);
//Assign default partition:
upperBound = getMean() + 4 * getSD();
super.setParameters(0, upperBound, 0.01 * upperBound, CONTINUOUS);
}
/** Get shape parameters*/
public double getShape(){
return shape;
}
/** Get scale parameters*/
public double getScale(){
return scale;
}
/**Density function */
public double getDensity(double x){
if (x < 0) return 0;
else if (x == 0 & shape < 1) return Double.POSITIVE_INFINITY;
else if (x == 0 & shape == 1) return Math.exp(-c);
else if (x == 0 & shape > 1) return 0;
else return Math.exp(-c + (shape - 1) * Math.log(x) - x / scale);
}
/** Maximum value of getDensity function*/
public double getMaxDensity(){
double mode;
if (shape < 1) mode = 0.01; else mode = scale * (shape - 1);
return getDensity(mode);
}
/** Mean */
public double getMean(){
return shape * scale;
}
/**Variance*/
public double getVariance(){
return shape * scale * scale;
}
/** Cumulative distribution function*/
public double getCDF(double x){
return gammaCDF(x / scale, shape);
}
/** Simulate a value*/
public double simulate(){
/*If shape parameter k is an integer, simulate as the k'th arrival time
in a Poisson proccess
*/
if (shape == Math.rint(shape)){
double sum = 0;
for (int i = 1; i <= shape; i++){
sum = sum - scale * Math.log(1 - Math.random());
}
return sum;
}
else return super.simulate();
}
}
| 2,982 | 26.878505 | 78 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/GeometricDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The geometric distribution with parameter p*/
public class GeometricDistribution extends NegativeBinomialDistribution{
/**General Constructor: creates a new geometric distribution with parameter p*/
public GeometricDistribution(double p){
super(1, p);
}
/**Default Constructor: creates a new geometric distribution with parameter 0.5*/
public GeometricDistribution(){
this(0.5);
}
/**Override set parameters*/
public void setParameters(int k, double p){
super.setParameters(1, p);
}
/**Override set successes*/
public void setSuccesses(int k){}
}
| 1,340 | 30.928571 | 82 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/HypergeometricDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the hypergeometric distribution with parameters m (population size), n (sample
size), and r (number of type 1 objects)*/
public class HypergeometricDistribution extends Distribution{
private int populationSize, sampleSize, type1Size;
double c;
/**General constructor: creates a new hypergeometric distribution with specified
values of the parameters*/
public HypergeometricDistribution(int m, int r, int n){
setParameters(m, r, n);
}
/**Default constructor: creates a new hypergeometric distribuiton with
parameters m = 100, r = 50, n = 10*/
public HypergeometricDistribution(){
this(100, 50, 10);
}
/**Set the parameters of the distribution*/
public void setParameters(int m, int r, int n){
//Correct for invalid parameters
if (m < 1) m = 1;
if (r < 0) r = 0; else if (r > m) r = m;
if (n < 0) n = 0; else if (n > m) n = m;
//Assign parameter values
populationSize = m;
type1Size = r;
sampleSize = n;
c = comb(populationSize, sampleSize);
super.setParameters(Math.max(0, sampleSize - populationSize + type1Size), Math.min(type1Size, sampleSize), 1, DISCRETE);
}
/**Density function*/
public double getDensity(double x){
int k = (int)Math.rint(x);
return comb(type1Size, k) * comb(populationSize - type1Size, sampleSize - k) / c;
}
/**Maximum value of the getDensity function*/
public double getMaxDensity(){
double mode = Math.floor(((double)(sampleSize + 1) * (type1Size + 1)) / (populationSize + 2));
return getDensity(mode);
}
/**Mean*/
public double getMean(){
return (double)sampleSize * type1Size / populationSize;
}
/**Variance*/
public double getVariance(){
return (double)sampleSize * type1Size * (populationSize - type1Size) *
(populationSize - sampleSize) / ( populationSize * populationSize * (populationSize - 1));
}
/**Set population size*/
public void setPopulationSize(int m){
setParameters(m, type1Size, sampleSize);
}
/**Get population size*/
public int getPopulationSize(){
return populationSize;
}
/**Set sub-population size*/
public void setType1Size(int r){
setParameters(populationSize, r, sampleSize);
}
/**Get sub-population size*/
public int getType1Size(){
return type1Size;
}
/**Set sample size*/
public void setSampleSize(int n){
setParameters(populationSize, type1Size, n);
}
/**Get sample size*/
public int getSampleSize(){
return sampleSize;
}
/**Simulate a value from the distribution*/
public double simulate(){
int j, k, u, m0;
double x = 0;
m0 = (int)populationSize;
int[] b = new int[m0];
for (int i = 0; i < m0; i++) b[i] = i;
for (int i = 0; i < sampleSize; i++){
k = m0 - i;
u = (int)(k * Math.random());
if (u < type1Size) x = x + 1;
j = b[k - 1];
b[k - 1] = b[u];
b[u] = j;
}
return x;
}
}
| 3,566 | 28.237705 | 122 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/IntervalData.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class defines a simple implementation of an interval data distribution. The data distribution is based on a specified domain (that is, a partition of an interval). When values are added, frequency counts for the subintervals are computed and various statistic updated.*/
public class IntervalData{
//Variables
private int size, maxFreq;
private double value, minValue, maxValue, mean, meanSquare, mode;
private int[] freq;
//Objects
private Domain domain;
private String name;
/**This general constructor creates a new data distribution with a specified domain
and a specified name*/
public IntervalData(Domain d, String n){
name = n;
setDomain(d);
}
/**This general constructor creates a new data distribution with a specified domain and a
specified name.*/
public IntervalData(double a, double b, double w, String n){
this(new Domain(a, b, w), n);
}
/**This special constructor creates a new data distribution with a specified domain and the
default name "X".*/
public IntervalData(Domain d){
this(d, "X");
}
/**This spcial constructor creates a new data distribution with a specified domain and
the name "X"*/
public IntervalData(double a, double b, double w){
this(a, b, w, "X");
}
/**This default constructor creates a new data distribution on the interval [0, 1] with
subintervals of length 0.1, and the default name "X".*/
public IntervalData(){
this(0, 1, 0.1);
}
/**This method sets the domain of the data set.*/
public void setDomain(Domain d){
domain = d;
reset();
}
/**This method returns the domain.*/
public Domain getDomain(){
return domain;
}
/**This method sets the name of the data set.*/
public void setName(String n){
name = n;
}
/**This method gets the name of the data set.*/
public String getName(){
return name;
}
/**This method resets the data set*/
public void reset(){
freq = new int[domain.getSize()];
size = 0;
minValue = domain.getUpperBound();
maxValue = domain.getLowerBound();
maxFreq = 0;
}
/**This method adds a new number to the data set and re-compute the mean, mean square,
minimum and maximum values, the frequency distribution, and the mode*/
public void setValue(double x){
value = x;
//Update the size of the data set:
size++;
//Re-compute mean and mean square
mean = ((double)(size - 1) / size) * mean + value / size;
meanSquare = ((double)(size - 1) / size) * meanSquare + value * value / size;
//Recompute minimum and maximum values
if (value < minValue) minValue = value;
if (value > maxValue) maxValue = value;
//Update frequency distribution
int i = domain.getIndex(x);
if (i >= 0 & i < domain.getSize()){
freq[i]++;
//Re-compute mode
if (freq[i] > maxFreq){
maxFreq = freq[i];
mode = domain.getValue(i);
}
else if (freq[i] == maxFreq) mode = Double.NaN; //There are two or more modes
}
}
/**This method returns the current value of the data set*/
public double getValue(){
return value;
}
/**This method returns the domain value (midpoint) closest to given value of x*/
public double getDomainValue(double x){
return domain.getValue(domain.getIndex(x));
}
/**This method returns the frequency of the class containing a given value
of x.*/
public int getFreq(double x){
int i = domain.getIndex(x);
if (i < 0 | i >= domain.getSize()) return 0;
else return freq[i];
}
/**This method returns the relative frequency of the class containing
a given value.*/
public double getRelFreq(double x){
if (size > 0) return (double)(getFreq(x)) / size;
else return 0;
}
/**This method returns the getDensity for a given value*/
public double getDensity(double x){
return getRelFreq(x) / domain.getWidth();
}
/**This method returns the mean of the data set.*/
public double getMean(){
return mean;
}
/**This method returns the mean of the frequency distribution. The interval
mean is an approximation to the true mean of the data set.*/
public double getIntervalMean(){
double sum = 0;
for (int i = 0; i < domain.getSize(); i++) sum = sum + domain.getValue(i) * freq[i];
return sum / size;
}
/**This method returns the population variance*/
public double getVarianceP(){
double var = meanSquare - mean * mean;
if (var < 0) var = 0;
return var;
}
/**This method returns the population standard deviation.*/
public double getSDP(){
return Math.sqrt(getVarianceP());
}
/**This method returns the sample variance.*/
public double getVariance(){
return ((double)size / (size - 1)) * getVarianceP();
}
/**This method returns the sample standard deviation.*/
public double getSD(){
return Math.sqrt(getVariance());
}
/**This method returns the interval variance.*/
public double getIntervalVariance(){
double m = getIntervalMean(), sum = 0, x;
for (int i = 0; i < domain.getSize(); i++){
x = domain.getValue(i);
sum = sum + (x - m) * (x - m) * freq[i];
}
return sum / size;
}
/**This method returns the interval standard deviation.*/
public double getIntervalSD(){
return Math.sqrt(getIntervalVariance());
}
/**This method returns the minimum value of the data set*/
public double getMinValue(){
return minValue;
}
/**This method returns the maximum value of the data set*/
public double getMaxValue(){
return maxValue;
}
/**This method computes the median of the values in the data set between two specified values*/
public double getMedian(double a, double b){
int sumFreq = 0, numValues = 0, lRank, uRank;
double lValue = a - 1, uValue = b + 1, w = domain.getWidth();
//Compute sum of frequencies between a and b
for (double x = a; x <= b + 0.5 * w; x = x + w) numValues = numValues + getFreq(x);
//Determine parity and ranks
if (2 * (numValues / 2) == numValues) {
lRank = numValues / 2;
uRank = lRank + 1;
}
else {
lRank = (numValues + 1) / 2;
uRank = lRank;
}
//Determine values
for (double x = a; x <= b + 0.5 * w; x = x + w) {
sumFreq = sumFreq + getFreq(x);
if ((lValue == a - 1) & (sumFreq >= lRank)) lValue = x;
if ((uValue == b + 1) & (sumFreq >= uRank)) uValue = x;
}
//Return average of upper and lower values
return (uValue + lValue) / 2;
}
/**This method computes the median of the entire data set*/
public double getMedian(){
return getMedian(domain.getLowerValue(), domain.getUpperValue());
}
/**This method returns the quartiles of the data set.*/
public double getQuartile(int i){
if (i < 1) i = 1; else if (i > 3) i = 3;
if (i == 1) return getMedian(domain.getLowerValue(), getMedian());
else if (i == 2) return getMedian();
else return getMedian(getMedian(), domain.getUpperValue());
}
/**This method computes the mean absoulte deviation*/
public double getMAD(){
double mad = 0, x;
double m = getMedian();
for (int i = 0; i < domain.getSize(); i++){
x = domain.getValue(i);
mad = mad + getRelFreq(x) * Math.abs(x - m);
}
return mad;
}
/**This method returns the number of pointCount in the data set*/
public int getSize(){
return size;
}
/**This method returns the maximum frequency*/
public int getMaxFreq(){
return maxFreq;
}
/**This method returns the maximum relative frequency.*/
public double getMaxRelFreq(){
if (size > 0) return (double)maxFreq / size;
else return 0;
}
/**This method returns the maximum getDensity.*/
public double getMaxDensity(){
return getMaxRelFreq() / domain.getWidth();
}
/**This method returns the mode of the distribution. The mode may not exist*/
public double getMode(){
return mode;
}
}
| 8,329 | 28.75 | 278 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/LocationScaleDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class applies a location-scale tranformation to a given distribution. In terms of
* the corresponding random variable X, the transformation is Y = a + bX*/
public class LocationScaleDistribution extends Distribution{
private Distribution dist;
private double location, scale;
/**This general constructor creates a new location-scale transformation on
a given distribuiton with given location and scale parameters*/
public LocationScaleDistribution(Distribution d, double a, double b){
setParameters(d, a, b);
}
/**This method sets the parameters: the distribution and the location and
scale parameters*/
public void setParameters(Distribution d, double a, double b){
dist = d; location = a; scale = b;
Domain domain = dist.getDomain();
double l, u, w = domain.getWidth();
int t = dist.getType();
if (t == DISCRETE){
l = domain.getLowerValue(); u = domain.getUpperValue();
}
else{
l = domain.getLowerBound(); u = domain.getUpperBound();
}
if (scale == 0) super.setParameters(location, location, 1, DISCRETE);
else if (scale < 0) super.setParameters(location + scale * u, location + scale * l, -scale * w, t);
else super.setParameters(location + scale * l, location + scale * u, scale * w, t);
}
/**This method defines the getDensity function*/
public double getDensity(double x){
if (scale == 0){
if (x == location) return 1;
else return 0;
}
else return dist.getDensity((x - location) / scale);
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
return dist.getMaxDensity();
}
/**This mtehod returns the mean*/
public double getMean(){
return location + scale * dist.getMean();
}
/**This method returns the variance*/
public double getVariance(){
return (scale * scale) * dist.getVariance();
}
/**This method returns a simulated value from the distribution*/
public double simulate(){
return location + scale * dist.simulate();
}
/**This method returns the cumulative distribution function*/
public double getCDF(double x){
if (scale > 0) return dist.getCDF((x - location) / scale);
else return 1 - dist.getCDF((x - location) / scale);
}
/**This method returns the getQuantile function*/
public double getQuantile(double p){
if (scale > 0) return location + scale * dist.getQuantile(p);
else return location + scale * dist.getQuantile(1 - p);
}
}
| 3,173 | 34.266667 | 101 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/LogNormalDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the lognormal distribution with specified parameters*/
public class LogNormalDistribution extends Distribution{
//variables
public final static double C = Math.sqrt(2 * Math.PI);
private double mu, sigma;
/**This general constructor creates a new lognormal distribution with
specified parameters*/
public LogNormalDistribution(double m, double s){
setParameters(m, s);
}
/**This default constructor creates the standard lognormal distribution*/
public LogNormalDistribution(){
this(0, 1);
}
/**This method sets the parameters, computes the default interval*/
public void setParameters(double m, double s){
if (s <= 0) s = 1;
mu = m; sigma = s;
double upper = getMean() + 3 * getSD();
super.setParameters(0, upper, 0.01 * upper, CONTINUOUS);
}
/**This method computes the getDensity function*/
public double getDensity(double x){
double z = (Math.log(x) - mu) / sigma;
return Math.exp(- z * z / 2) / (x * C * sigma);
}
/**This method computes the maximum value of the getDensity function*/
public double getMaxDensity(){
double mode = Math.exp(mu - sigma * sigma);
return getDensity(mode);
}
/**This method computes the mean*/
public double getMean(){
return Math.exp(mu + sigma * sigma / 2);
}
/**This method computes the variance*/
public double getVariance(){
double a = mu + sigma * sigma;
return Math.exp(2 * a) - Math.exp(mu + a);
}
/**This method simulates a value from the distribution*/
public double simulate(){
double r = Math.sqrt(-2 * Math.log(Math.random()));
double theta = 2 * Math.PI * Math.random();
return Math.exp(mu + sigma * r * Math.cos(theta));
}
/**This method returns mu*/
public double getMu(){
return mu;
}
/**This method sets mu*/
public void setMu(double m){
setParameters(m, sigma);
}
/**This method gets sigma*/
public double getSigma(){
return sigma;
}
/**This method sets sigma*/
public void setSigma(double s){
setParameters(mu, s);
}
/**This method computes the cumulative distribution function*/
public double getCDF(double x){
double z = (Math.log(x) - mu) / sigma;
if (z >= 0) return 0.5 + 0.5 * gammaCDF(z * z / 2, 0.5);
else return 0.5 - 0.5 * gammaCDF(z * z / 2, 0.5);
}
}
| 3,005 | 28.762376 | 75 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/LogisticDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the logistic distribution*/
public class LogisticDistribution extends Distribution{
/**This default constructor creates a new logsitic distribution*/
public LogisticDistribution(){
super.setParameters(-7, 7, 0.14, CONTINUOUS);
}
/**This method computes the getDensity function*/
public double getDensity(double x){
double e = Math.exp(x);
return e / ((1 + e)*(1 + e));
}
/**This method computes the maximum value of the getDensity function*/
public double getMaxDensity(){
return 0.25;
}
/**This method computes the cumulative distribution function*/
public double getCDF(double x){
double e = Math.exp(x);
return e / (1 + e);
}
/**This method comptues the getQuantile function*/
public double getQuantile(double p){
return Math.log(p / (1 - p));
}
/**This method returns the mean*/
public double getMean(){
return 0;
}
/**This method computes the variance*/
public double getVariance(){
return Math.PI * Math.PI / 3;
}
}
| 1,757 | 28.79661 | 75 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/MatchDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The distribution of the number of matches in a random permutation*/
public class MatchDistribution extends Distribution{
int parameter;
int[] b;
/**This general constructor creates a new matching distribution with a
specified parameter*/
public MatchDistribution(int n){
setParameter(n);
}
/**this default constructor creates a new mathcing distribuiton with
parameter 5*/
public MatchDistribution(){
this(5);
}
/**This method sets the parameter of the distribution (the size of the
random permutation*/
public void setParameter(int n){
if (n < 1) n = 1;
parameter = n;
super.setParameters(0, parameter, 1, DISCRETE);
b = new int[n];
}
/**This method computes the getDensity function*/
public double getDensity(double x){
int k = (int)Math.rint(x);
double sum = 0;
int sign = -1;
for (int j = 0; j <= parameter - k; j++){
sign = -sign;
sum = sum + sign / factorial(j);
}
return sum / factorial(k);
}
/**This method gives the maximum value of the getDensity function*/
public double getMaxDensity(){
if (parameter == 2) return getDensity(0);
else return getDensity(1);
}
/**This method returns the mean*/
public double getMean(){
return 1;
}
/**This method returns the variance*/
public double getVariance(){
return 1;
}
/**This method gets the parameter*/
public int getParameter(){
return parameter;
}
/**This method simulates a value from the distribution, by generating
a random permutation and computing the number of matches*/
public double simulate(){
int j, k, u;
double matches = 0;
for (int i = 0; i < parameter; i++) b[i] = i + 1;
for (int i = 0; i < parameter; i++){
j = parameter - i;
u = (int)(j * Math.random());
if (b[u] == i + 1) matches = matches + 1;
k = b[j - 1];
b[j - 1] = b[u];
b[u] = k;
}
return matches;
}
}
| 2,614 | 26.526316 | 75 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/MersenneTwister.java | package statistics.distributions;
import java.io.*;
/**
* Mersenne Twister and MersenneTwisterFast:
* <P>
* <b>MersenneTwister</b> is a drop-in subclass replacement
* for java.util.Random. It is properly synchronized and
* can be used in a multithreaded environment.
*
* <p><b>MersenneTwisterFast</b> is not a subclass of java.util.Random. It has
* the same public methods as Random does, however, and it is
* algorithmically identical to MersenneTwister. MersenneTwisterFast
* has hard-code inlined all of its methods directly, and made all of them
* final (well, the ones of consequence anyway). Further, these
* methods are <i>not</i> synchronized, so the same MersenneTwisterFast
* instance cannot be shared by multiple threads. But all this helps
* MersenneTwisterFast achieve over twice the speed of MersenneTwister.
*
* <p><b>About the Mersenne Twister. </b>
* This is a Java version of the C-program for MT19937: Integer version.
* next(32) generates one pseudorandom unsigned integer (32bit)
* which is uniformly distributed among 0 to 2^32-1 for each
* call. next(int bits) >>>'s by (32-bits) to get a value ranging
* between 0 and 2^bits-1 long inclusive; hope that's correct.
* setSeed(seed) set initial values to the working area
* of 624 words. For setSeed(seed), seed is any 32-bit integer
* <b>except for 0</b>.
*
* <p>Orignally Coded by Takuji Nishimura, considering the suggestions by
* Topher Cooper and Marc Rieffel in July-Aug. 1997.
* More information can be found
* <A HREF="http://www.math.keio.ac.jp/matumoto/emt.html">
* here. </a>
* <P>
* Translated to Java by Michael Lecuyer January 30, 1999
* Copyright (C) 1999 Michael Lecuyer
* <P>
* This library is free software; you can redistribute it and or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later
* version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Library General Public License for more details.
* You should have received a copy of the GNU Library General
* Public License along with this library; if not, write to the
* Free Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
* 02111-1307 USA
* <P>
* Makoto Matsumoto and Takuji Nishimura, the original authors
* ask "When you use this, send an email to: matumoto@math.keio.ac.jp
* with an appropriate reference to your work" You might also point
* out this was a translation.
* <P>
* <b>Reference. </b>
* M. Matsumoto and T. Nishimura,
* "Mersenne Twister: A 623-Dimensionally Equidistributed Uniform
* Pseudo-Random Number Generator",
* <i>ACM Transactions on Modeling and Computer Simulation,</i>
* Vol. 8, No. 1, January 1998, pp 3--30.
*
* <p><b>About this version. </b> This is a modification of the
* <a href="http://www.theorem.com/java/index.htm#Mersenne">original
* code</a> made to conform to proper java.util.Random format by
* <a href="http://www.cs.umd.edu/users/seanl/">Sean Luke,</a>
* August 7, 1999.
*
* <p><b>Bug Fixes. </b>This implementation implements the bug fixes made
* in Java 1.2's version of Random, which means it can be used with
* earlier versions of Java. See
* <a href="http://www.javasoft.com/products/jdk/1.2/docs/api/java/util/Random.html">
* the JDK 1.2 java.util.Random documentation</a> for further documentation
* on the random-number generation contracts made. Additionally, there's
* an undocumented bug in the JDK java.util.Random.nextBytes() method,
* which this code fixes.
*
* <p><b>Important Note. </b> Just like java.util.Random, this
* generator accepts a long seed but doesn't use all of it. java.util.Random
* uses 48 bits. The Mersenne Twister instead uses 32 bits (int size).
* So it's best if your seed does not exceed the int range.
*/
public class MersenneTwister extends java.util.Random implements Serializable
{
// Period parameters
private static final int N = 624;
private static final int M = 397;
private static final int MATRIX_A = 0x9908b0df; // private static final * constant vector a
private static final int UPPER_MASK = 0x80000000; // most significant w-r bits
private static final int LOWER_MASK = 0x7fffffff; // least significant r bits
// Tempering parameters
private static final int TEMPERING_MASK_B = 0x9d2c5680;
private static final int TEMPERING_MASK_C = 0xefc60000;
// #define TEMPERING_SHIFT_U(y) (y >>> 11)
// #define TEMPERING_SHIFT_S(y) (y << 7)
// #define TEMPERING_SHIFT_T(y) (y << 15)
// #define TEMPERING_SHIFT_L(y) (y >>> 18)
private int mt[]; // the array for the state vector
private int mti; // mti==N+1 means mt[N] is not initialized
private int mag01[];
// a good initial seed (of int size, though stored in a long)
private static final long GOOD_SEED = 4357;
/**
* Constructor using the default seed.
*/
public MersenneTwister()
{
super(GOOD_SEED);
setSeed(GOOD_SEED);
}
/**
* Constructor using a given seed. Though you pass this seed in
* as a long, it's best to make sure it's actually an integer.
*
* @param seed generator starting number, often the time of day.
*/
public MersenneTwister(long seed)
{
super(seed); /* just in case */
setSeed(seed);
}
/**
* Initalize the pseudo random number generator.
* The Mersenne Twister only uses an integer for its seed;
* It's best that you don't pass in a long that's bigger
* than an int.
*
* Note that for very old versions of jdk (like 1.0.2),
* setSeed will not properly reset the gaussian mechanism,
* so nextGaussian() may return <i>one</i> more extra
* gaussian drawn from the old seed rather than the new one.
*
* @param seed from constructor
*
*/
synchronized public void setSeed(long seed)
{
// this lets java.util.Random clear its nextNextGaussian field
// Note this is broken in older jdks like 1.0.2. -- nextNextGaussian
// will not be cleared so the very next gaussian you get *may* be drawn
// from the old seed's generation.
super.setSeed(seed);
mt = new int[N];
// setting initial seeds to mt[N] using
// the generator Line 25 of Table 1 in
// [KNUTH 1981, The Art of Computer Programming
// Vol. 2 (2nd Ed.), pp102]
// the 0xffffffff is commented out because in Java
// ints are always 32 bits; hence i & 0xffffffff == i
mt[0]= ((int)seed); // & 0xffffffff;
for (mti = 1; mti < N; mti++)
mt[mti] = (69069 * mt[mti-1]); //& 0xffffffff;
// mag01[x] = x * MATRIX_A for x=0,1
mag01 = new int[2];
mag01[0] = 0x0;
mag01[1] = MATRIX_A;
}
/**
* Returns an integer with <i>bits</i> bits filled with a random number.
*/
synchronized protected int next(int bits)
{
int y;
if (mti >= N) // generate N words at one time
{
int kk;
for (kk = 0; kk < N - M; kk++)
{
y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK);
mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1];
}
for (; kk < N-1; kk++)
{
y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK);
mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1];
}
y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK);
mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1];
mti = 0;
}
y = mt[mti++];
y ^= y >>> 11; // TEMPERING_SHIFT_U(y)
y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y)
y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y)
y ^= (y >>> 18); // TEMPERING_SHIFT_L(y)
return y >>> (32 - bits); // hope that's right!
}
/* If you've got a truly old version of Java, you can omit these
two next methods. */
private synchronized void writeObject(ObjectOutputStream out)
throws IOException
{
// just so we're synchronized.
out.defaultWriteObject();
}
private synchronized void readObject (ObjectInputStream in)
throws IOException, ClassNotFoundException
{
// just so we're synchronized.
in.defaultReadObject();
}
/** This method is missing from jdk 1.0.x and below. JDK 1.1
includes this for us, but what the heck.*/
public boolean nextBoolean() {return next(1) != 0;}
/** This method is missing from JDK 1.1 and below. JDK 1.2
includes this for us, but what the heck. */
public int nextInt(int n) {
if (n<=0)
throw new IllegalArgumentException("n must be positive");
if ((n & -n) == n) // i.e., n is a power of 2
return (int)((n * (long)next(31)) >> 31);
int bits, val;
do {
bits = next(31);
val = bits % n;
} while(bits - val + (n-1) < 0);
return val;
}
/** A bug fix for versions of JDK 1.1 and below. JDK 1.2 fixes
this for us, but what the heck. */
public double nextDouble()
{
return (((long)next(26) << 27) + next(27))
/ (double)(1L << 53);
}
/** A bug fix for versions of JDK 1.1 and below. JDK 1.2 fixes
this for us, but what the heck. */
public float nextFloat()
{
return next(24) / ((float)(1 << 24));
}
/** A bug fix for all versions of the JDK. The JDK appears to
use all four bytes in an integer as independent byte values!
Totally wrong. I've submitted a bug report. */
public void nextBytes(byte[] bytes)
{
for (int x=0;x<bytes.length;x++)
bytes[x] = (byte)next(8);
}
/** For completeness' sake, though it's not in java.util.Random. */
public char nextChar()
{
// chars are 16-bit UniCode values
return (char)(next(16));
}
/** For completeness' sake, though it's not in java.util.Random. */
public short nextShort()
{
return (short)(next(16));
}
/** For completeness' sake, though it's not in java.util.Random. */
public byte nextByte()
{
return (byte)(next(8));
}
/**
* Tests the code.
*/
public static void main(String args[])
{
int j;
MersenneTwister r;
// UNCOMMENT THIS TO TEST FOR PROPER GAUSSIAN STATE INITIALIZATION
/*
System.out.println("If the gaussian state is properly initialized when setSeed() is called,\nthen #1 != #2, but #1 == #3\nIt's known that java 1.0.2 doesn't do gaussian initialization right,\nso setSeed() may result in one last gaussian drawn from the *previous* seed.");
r = new MersenneTwister(1);
r.nextGaussian(); // loads the later gaussian into the state
System.out.println("1: " + r.nextGaussian());
r = new MersenneTwister(1);
r.nextGaussian(); // loads the later gaussian into the state
r.setSeed(1); // should reset the gaussian state
System.out.println("2: " + r.nextGaussian());
System.out.println("3: " + r.nextGaussian());
*/
// UNCOMMENT THIS TO TEST FOR CORRECTNESS
// COMPARE WITH http://www.math.keio.ac.jp/~nisimura/random/int/mt19937int.out
/*
r = new MersenneTwister(4357);
System.out.println("Output of MersenneTwister.java");
for (j=0;j<1000;j++)
{
// first, convert the int from signed to "unsigned"
long l = (long)r.nextInt();
if (l < 0 ) l += 4294967296L; // max int value
String s = String.valueOf(l);
while(s.length() < 10) s = " " + s; // buffer
System.out.print(s + " ");
if (j%8==7) System.out.println();
}
*/
// UNCOMMENT THIS TO TEST FOR SPEED
/*
r = new MersenneTwister();
System.out.println("\nTime to test grabbing 10000000 ints");
long ms = System.currentTimeMillis();
int xx=0;
for (j = 0; j < 10000000; j++)
xx += r.nextInt();
System.out.println("Mersenne Twister: " + (System.currentTimeMillis()-ms + " Ignore this: " + xx));
Random rr = new Random(1);
xx = 0;
ms = System.currentTimeMillis();
for (j = 0; j < 10000000; j++)
xx += rr.nextInt();
System.out.println("java.util.Random: " + (System.currentTimeMillis()-ms + " Ignore this: " + xx));
*/
// UNCOMMENT THIS TO DO TEST DIFFERENT TYPE OUTPUTS
// THIS CAN BE USED TO COMPARE THE DIFFERENCE BETWEEN
// MersenneTwisterFast.java AND MersenneTwister.java
/*
System.out.println("\nGrab the first 1000 booleans");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextBoolean() + " ");
if (j%8==7) System.out.println();
}
if (!(j%8==7)) System.out.println();
byte[] bytes = new byte[1000];
System.out.println("\nGrab the first 1000 bytes using nextBytes");
r = new MersenneTwister();
r.nextBytes(bytes);
for (j = 0; j < 1000; j++)
{
System.out.print(bytes[j] + " ");
if (j%16==15) System.out.println();
}
if (!(j%16==15)) System.out.println();
byte b;
System.out.println("\nGrab the first 1000 bytes -- must be same as nextBytes");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print((b = r.nextByte()) + " ");
if (b!=bytes[j]) System.out.print("BAD ");
if (j%16==15) System.out.println();
}
if (!(j%16==15)) System.out.println();
System.out.println("\nGrab the first 1000 shorts");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextShort() + " ");
if (j%8==7) System.out.println();
}
if (!(j%8==7)) System.out.println();
System.out.println("\nGrab the first 1000 ints");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextInt() + " ");
if (j%4==3) System.out.println();
}
if (!(j%4==3)) System.out.println();
System.out.println("\nGrab the first 1000 ints of different sizes");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextInt(j+1) + " ");
if (j%4==3) System.out.println();
}
if (!(j%4==3)) System.out.println();
System.out.println("\nGrab the first 1000 longs");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextLong() + " ");
if (j%3==2) System.out.println();
}
if (!(j%3==2)) System.out.println();
System.out.println("\nGrab the first 1000 floats");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextFloat() + " ");
if (j%4==3) System.out.println();
}
if (!(j%4==3)) System.out.println();
System.out.println("\nGrab the first 1000 doubles");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextDouble() + " ");
if (j%3==2) System.out.println();
}
if (!(j%3==2)) System.out.println();
System.out.println("\nGrab the first 1000 gaussian doubles");
r = new MersenneTwister();
for (j = 0; j < 1000; j++)
{
System.out.print(r.nextGaussian() + " ");
if (j%3==2) System.out.println();
}
if (!(j%3==2)) System.out.println();
*/
}
} | 16,579 | 33.541667 | 279 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/MixtureDistribution.java | // Mixture
package statistics.distributions;
public class MixtureDistribution extends Distribution{
Distribution[] dist;
int n, type;
double minValue, maxValue, lowerValue, upperValue, stepSize;
double[] prob;
//Constructors
public MixtureDistribution(Distribution[] d, double[] p){
setParameters(d, p);
}
public MixtureDistribution(Distribution d0, Distribution d1, double a){
setParameters(d0, d1, a);
}
public void setParameters(Distribution[] d, double[] p){
double minLower = Double.POSITIVE_INFINITY, maxUpper = Double.NEGATIVE_INFINITY, minWidth = Double.POSITIVE_INFINITY;
double a, b, w;
dist = d;
prob = p;
int t0 = dist[0].getType(), t;
n = dist.length;
boolean mixed = false;
for (int i = 0; i < n; i++){
t = dist[i].getType();
if (t == DISCRETE) a = dist[i].getDomain().getLowerValue(); else a = dist[i].getDomain().getLowerBound();
if (a < minLower) minLower = a;
if (t == DISCRETE) b = dist[i].getDomain().getUpperValue(); else b = dist[i].getDomain().getUpperBound();
if (b > maxUpper) maxUpper = b;
w = dist[i].getDomain().getWidth();
if (w < minWidth) minWidth = w;
if (t != t0) mixed = true;
}
if (mixed) t = 2; else t = t0;
super.setParameters(minLower, maxUpper, minWidth, t);
}
public void setParameters(Distribution d0, Distribution d1, double a){
setParameters(new Distribution[]{d0, d1}, new double[]{1 - a, a});
}
//Density
public double getDensity(double x){
double d = 0;
for (int i = 0; i < n; i++) d = d + prob[i] * dist[i].getDensity(x);
return d;
}
//Mean
public double getMean(){
double sum = 0;
for (int i = 0; i < n; i++) sum = sum + prob[i] * dist[i].getMean();
return sum;
}
//Variance
public double getVariance(){
double sum = 0, mu = getMean(), m;
for (int i = 0; i < n; i++){
m = dist[i].getMean();
sum = sum + prob[i] * (dist[i].getVariance() + m * m);
}
return sum - mu * mu;
}
//Simulate
public double simulate(){
double sum = 0, p = Math.random();
int i = -1;
while (sum < p & i < n){
sum = sum + prob[i];
i = i + 1;
}
return dist[i].simulate();
}
}
| 2,123 | 25.222222 | 119 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/NegativeBinomialDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the negative binomial distribution with specified successes
parameter and probability parameter.*/
public class NegativeBinomialDistribution extends Distribution{
//Paramters
private int successes;
private double probability;
/**General Constructor: creates a new negative binomial distribution with
given parameter values.*/
public NegativeBinomialDistribution(int k, double p){
setParameters(k, p);
}
/**Default Constructor: creates a new negative binomial distribution with
successes parameter 1 and probability parameter 0.5,*/
public NegativeBinomialDistribution(){
this(1, 0.5);
}
/**This method set the paramters and the set of values.*/
public void setParameters(int k, double p){
//Correct for invalid parameters
if(k < 1) k = 1;
if(p <= 0) p = 0.05;
if(p > 1) p = 1;
//Assign parameters
successes = k;
probability = p;
//Set truncated values
super.setParameters(successes, Math.ceil(getMean() + 4 * getSD()), 1, DISCRETE);
}
/**Set the successes parameters*/
public void setSuccesses(int k){
setParameters(k, probability);
}
/**Get the successes parameter*/
public int getSuccesses(){
return successes;
}
/**Get the probability parameter*/
public double getProbability(){
return probability;
}
/**Set the probability parameters*/
public void setProbability(double p){
setParameters(successes, p);
}
/**Density function*/
public double getDensity(double x){
int n = (int)Math.rint(x);
if(n < successes) return 0;
else return comb(n - 1, successes - 1) * Math.pow(probability, successes)
* Math.pow(1 - probability, n - successes);
}
/**Maximum value of getDensity function*/
public double getMaxDensity(){
double mode = (successes - 1) / probability + 1;
return getDensity(mode);
}
/**Mean*/
public double getMean(){
return successes / probability;
}
/**Variance*/
public double getVariance(){
return (successes * (1 - probability)) / (probability * probability);
}
/**Simulate a value*/
public double simulate(){
int count = 0, trials = 0;
while (count < successes){
if (Math.random() < probability) count++;
trials++;
}
System.out.println("In simulate, prob ="+probability+"\t success= "+successes+"\t trials ="+trials);
return trials;
}
}
| 3,058 | 27.588785 | 102 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/NormalDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class encapsulates the normal distribution with specified parameters*/
public class NormalDistribution extends Distribution{
//Paramters
public final static double C = Math.sqrt(2 * Math.PI);
private double mu, sigma, cSigma;
/**This general constructor creates a new normal distribution with specified
parameter values*/
public NormalDistribution(double mu, double sigma){
setParameters(mu, sigma);
}
/**This default constructor creates a new standard normal distribution*/
public NormalDistribution(){
this(0, 1);
}
/**This method sets the parameters*/
public void setParameters(double m, double s){
double lower, upper, width;
//Correct for invalid sigma
if (s < 0) s = 1;
mu = m; sigma = s;
cSigma = C * sigma;
upper = mu + 4 * sigma;
lower = mu - 4 * sigma;
width = (upper - lower) / 100;
super.setParameters(lower, upper, width, CONTINUOUS);
}
/**This method defines the getDensity function*/
public double getDensity(double x){
double z = (x - mu) / sigma;
return Math.exp(- z * z / 2) / cSigma;
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
return getDensity(mu);
}
/**This method returns the median*/
public double getMedian(){
return mu;
}
/**This method returns the mean*/
public double getMean(){
return mu;
}
/**This method returns the variance*/
public double getVariance(){
return sigma * sigma;
}
/**This method simulates a value from the distribution*/
public double simulate(){
double r = Math.sqrt(-2 * Math.log(RNG.nextDouble()));
double theta = 2 * Math.PI * RNG.nextDouble();
return mu + sigma * r * Math.cos(theta);
}
/**This method returns the location parameter*/
public double getMu(){
return mu;
}
/**This method sets the location parameter*/
public void setMu(double m){
setParameters(m, sigma);
}
/**This method gets the scale parameter*/
public double getSigma(){
return sigma;
}
/**This method sets the scale parameter*/
public void setSigma(double s){
setParameters(mu, s);
}
/**This method computes the cumulative distribution function*/
public double getCDF(double x){
double z = (x - mu) / sigma;
if (z >= 0) return 0.5 + 0.5 * gammaCDF(z * z / 2, 0.5);
else return 0.5 - 0.5 * gammaCDF(z * z / 2, 0.5);
}
public String toString()
{
String str=super.toString();
str+=" mean "+mu+" sigma = "+sigma;
return str;
}
}
| 3,204 | 26.869565 | 78 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/OrderStatisticDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The distribution of the order statistic of a specified order from a
random sample of a specified size from a specified sampling distribution*/
public class OrderStatisticDistribution extends Distribution{
Distribution dist;
int sampleSize, order;
/**General constructor: creates a new order statistic distribution
corresponding to a specified sampling distribution, sample size, and
order*/
public OrderStatisticDistribution(Distribution d, int n, int k){
setParameters(d, n, k);
}
/**Set the parameters: the sampling distribution, sample size, and order*/
public void setParameters(Distribution d, int n, int k){
//Correct for invalid parameters
if (n < 1) n = 1;
if (k < 1) k = 1; else if (k > n) k = n;
//Assign parameters
dist = d;
sampleSize = n;
order = k;
int t = dist.getType();
Domain domain = dist.getDomain();
if (t == DISCRETE) super.setParameters(domain.getLowerValue(), domain.getUpperValue(), domain.getWidth(), t);
else super.setParameters(domain.getLowerBound(), domain.getUpperBound(), domain.getWidth(), t);
}
/**Density function*/
public double getDensity(double x){
double p = dist.getCDF(x);
if (dist.getType() == DISCRETE) return getCDF(x) - getCDF(x - getDomain().getWidth());
else return order * comb(sampleSize, order) * Math.pow(p, order - 1) * Math.pow(1 - p, sampleSize - order) * dist.getDensity(x);
}
/**Cumulative distribution function*/
public double getCDF(double x){
double sum = 0;
double p = dist.getCDF(x);
for (int j = order; j <= sampleSize; j++) sum = sum + comb(sampleSize, j) * Math.pow(p, j) * Math.pow(1 - p, sampleSize - j);
return sum;
}
}
| 2,415 | 37.967742 | 130 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/ParetoDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the Pareto distribution with a specified parameter*/
public class ParetoDistribution extends Distribution{
//Variable
private double parameter;
/**This general constructor creates a new Pareto distribuiton with a
specified parameter*/
public ParetoDistribution(double a){
setParameter(a);
}
/**The default constructor creates a new Pareto distribution with parameter 1*/
public ParetoDistribution(){
this(1);
}
/**This method sets the parameter and computes the default interval*/
public void setParameter(double a){
if (a <= 0) a = 1;
parameter = a;
double upper = 20 / parameter;
double width = (upper - 1) / 100;
super.setParameters(1, upper, width, CONTINUOUS);
}
/**This method returns the parameter*/
public double getParameter(){
return parameter;
}
/**This method computes the getDensity function*/
public double getDensity(double x){
if (x < 1) return 0;
else return parameter / Math.pow(x, parameter + 1);
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
return parameter;
}
/**This method computes the mean*/
public double getMean(){
if (parameter > 1) return parameter / (parameter - 1);
else return Double.POSITIVE_INFINITY;
}
/**This method computes the variance*/
public double getVariance(){
if (parameter > 2) return parameter / ((parameter - 1) * (parameter - 1) * (parameter - 2));
else if (parameter > 1) return Double.POSITIVE_INFINITY;
else return Double.NaN;
}
/**This method comptues the cumulative distribution function*/
public double getCDF(double x){
return 1 - Math.pow(1 / x, parameter);
}
/**This method computes the getQuantile function*/
public double getQuantile(double p){
return 1 / Math.pow(1 - p, 1 / parameter);
}
}
| 2,576 | 30.048193 | 94 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/PointMassDistribution.java | // Point mass at x0
package statistics.distributions;
public class PointMassDistribution extends Distribution{
//Paramter
double x0;
//Constructor
public PointMassDistribution(double x0){
setParameters(x0);
}
public PointMassDistribution(){
this(0);
}
public void setParameters(double x0){
this.x0 = x0;
super.setParameters(x0, x0, 1, DISCRETE);
}
public double getDensity(double x){
if (x == x0) return 1;
else return 0;
}
public double getMaxDensity(){
return 1;
}
public double getMean(){
return x0;
}
public double getVariance(){
return 0;
}
public double getParameter(int i){
return x0;
}
public double simulate(){
return x0;
}
public double getQuantile(double p){
return x0;
}
public double CDF(double x){
if (x < x0) return 0;
else return 1;
}
public String name(){
return "Point Mass Distribution";
}
}
| 882 | 13.47541 | 56 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/PoissonDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The Poisson distribution with a specified rate parameter*/
public class PoissonDistribution extends Distribution{
//Variables
double parameter;
/**Default constructor: creates a new Poisson distribution with a given
parameter value*/
public PoissonDistribution(double r){
setParameter(r);
}
/**Default constructor: creates a new Poisson distribtiton with parameter 1*/
public PoissonDistribution(){
this(1);
}
/**Sets the parameter*/
public void setParameter(double r){
//Correct for invalid parameter:
if(r < 0) r = 1;
parameter = r;
//Sets the truncated set of values
double a = Math.ceil(getMean() - 4 * getSD()), b = Math.ceil(getMean() + 4 * getSD());
if (a < 0) a = 0;
super.setParameters(a, b, 1, DISCRETE);
}
/**Parameter*/
public double getParameter(){
return parameter;
}
/**Density function*/
public double getDensity(double x){
int k = (int)Math.rint(x);
if(k < 0) return 0;
else return Math.exp(-parameter) * (Math.pow(parameter, k) / factorial(k));
}
/**Maximum value of the getDensity function*/
public double getMaxDensity(){
double mode = Math.floor(parameter);
return getDensity(mode);
}
/**Cumulative distribution function*/
public double getCDF(double x){
return 1 - gammaCDF(parameter, x + 1);
}
/**Mean*/
public double getMean(){
return parameter;
}
/**Variance*/
public double getVariance(){
return parameter;
}
/**Simulate a value*/
public double simulate(){
int arrivals = 0;
double sum = -Math.log(1 - Math.random());
while (sum <= parameter){
arrivals++;
sum = sum - Math.log(1 - Math.random());
}
return arrivals;
}
}
| 2,411 | 25.8 | 88 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/PokerDiceDistribution.java | // Poker Dice Distribution
package statistics.distributions;
public class PokerDiceDistribution extends Distribution{
final static int c = 7776;
public PokerDiceDistribution(){
setParameters(0, 6, 1, DISCRETE);
}
public double getDensity(double x){
double d = 0;
int i = (int)x;
switch(i){
case 0:
d = 720.0 / c;
break;
case 1:
d = 3600.0 / c;
break;
case 2:
d = 1800.0 / c;
break;
case 3:
d = 1200.0 / c;
break;
case 4:
d = 300.0 / c;
break;
case 5:
d = 150.0 / c;
break;
case 6:
d = 6.0 / c;
break;
}
return d;
}
public String name(){
return "Poker Dice Distribution";
}
}
| 657 | 13.622222 | 56 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/RandomVariable.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
public class RandomVariable{
private Distribution distribution;
private IntervalData intervalData;
private String name;
/**General constructor: create a new random variable with a specified
probability distribution and name*/
public RandomVariable(Distribution d, String n){
distribution = d;
name = n;
intervalData = new IntervalData(distribution.getDomain(), name);
}
/**Special constructor: create a new random variable with a specified
probability distribution and the name X*/
public RandomVariable(Distribution d){
this(d, "X");
}
/**Assign the probability distribution and create a corresponding data distribution*/
public void setDistribution(Distribution d){
distribution = d;
intervalData.setDomain(distribution.getDomain());
}
/**Get the probability distribution*/
public Distribution getDistribution(){
return distribution;
}
/**Get the data distribution*/
public IntervalData getIntervalData(){
return intervalData;
}
/**Assign a value to the random variable*/
public void setValue(double x){
intervalData.setValue(x);
}
/**Get the current value of the random variable*/
public double getValue(){
return intervalData.getValue();
}
/**Simulate a value of the probability distribution and assign the value
to the data distribution*/
public void sample(){
intervalData.setValue(distribution.simulate());
}
/**Simulate a value of the probability distribution, assign the value to the data distribution
and return the value*/
public double simulate(){
double x = distribution.simulate();
intervalData.setValue(x);
return x;
}
/**Reset the data distribution*/
public void reset(){
intervalData.setDomain(distribution.getDomain());
}
/**Get the name of the random variable*/
public String getName(){
return name;
}
/**Assign a name to the random variable*/
public void setName(String n){
name = n;
intervalData.setName(name);
}
}
| 2,695 | 27.378947 | 95 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/StudentDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the student t distribution with a specifed degrees of freeom
parameter*/
public class StudentDistribution extends Distribution{
private int degrees;
private double c;
/**This general constructor creates a new student distribution with a specified
degrees of freedom*/
public StudentDistribution(int n){
setDegrees(n);
}
/**This default constructor creates a new student distribuion with 1 degree
of freedom*/
public StudentDistribution(){
this(1);
}
/**This method sets the degrees of freedom*/
public void setDegrees(int n){
//Correct invalid parameter
if (n < 1) n = 1;
//Assign parameter
degrees = n;
//Compute normalizing constant
c = logGamma(0.5 * (degrees + 1)) - 0.5 * Math.log(degrees) - 0.5 * Math.log(Math.PI) - logGamma(0.5 * degrees);
//Compute upper bound
double upper;
if (n == 1) upper = 8;
else if (n == 2) upper = 7;
else upper = Math.ceil(getMean() + 4 * getSD());
super.setParameters(-upper, upper, upper / 50, CONTINUOUS);
}
/**This method computes the getDensity function*/
public double getDensity(double x){
return Math.exp(c - 0.5 * (degrees + 1) * Math.log(1 + x * x / degrees));
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
return getDensity(0);
}
/**This method returns the mean*/
public double getMean(){
if (degrees == 1) return Double.NaN;
else return 0;
}
/**This method returns the variance*/
public double getVariance(){
if (degrees == 1) return Double.NaN;
else if (degrees == 2) return Double.POSITIVE_INFINITY;
else return (double)degrees / (degrees - 2);
}
/**This method computes the cumulative distribution function in terms of the
beta CDF*/
public double getCDF(double x){
double u = degrees / (degrees + x * x);
if (x > 0) return 1 - 0.5 * betaCDF(u, 0.5 * degrees, 0.5);
else return 0.5 * betaCDF(u, 0.5 * degrees, 0.5);
}
/**This method returns the degrees of freedom*/
public double getDegrees(){
return degrees;
}
/**This method simulates a value of the distribution*/
public double simulate(){
double v, z, r, theta;
v = 0;
for (int i = 1; i <= degrees; i++){
r = Math.sqrt(-2 * Math.log(Math.random()));
theta = 2 * Math.PI * Math.random();
z = r * Math.cos(theta);
v = v + z * z;
}
r = Math.sqrt(-2 * Math.log(Math.random()));
theta = 2 * Math.PI * Math.random();
z = r * Math.cos(theta);
return z / Math.sqrt(v / degrees);
}
}
| 3,247 | 29.933333 | 114 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/TriangleDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the triangle distribution on a specified interval. If (X, Y) is
uniformly distributed on a triangular region, then X and Y have triangular distribuitons.*/
public class TriangleDistribution extends Distribution{
private int orientation;
private double c, minValue, maxValue;
public final static int UP = 0, DOWN = 1;
/**This general constructor creates a new triangle distribution on a specified
interval and with a specified orientation.*/
public TriangleDistribution(double a, double b, int i){
setParameters(a, b, i);
}
/**This default constructor creates a new triangle distribution on the interval
(0, 1) with positive slope*/
public TriangleDistribution(){
this(0, 1, UP);
}
/**This method sets the parameters: the minimum value, maximum value, and
orientation.*/
public void setParameters(double a, double b, int i){
minValue = a;
maxValue = b;
orientation = i;
double stepSize = (maxValue - minValue) / 100;
super.setParameters(minValue, maxValue, stepSize, CONTINUOUS);
//Compute normalizing constant
c = (maxValue - minValue) * (maxValue - minValue);
}
//**This method computes the density.*/
public double getDensity(double x){
if (minValue <= x & x <= maxValue){
if (orientation == UP) return 2 * (x - minValue) / c;
else return 2 * (maxValue - x) / c;
}
else return 0;
}
/**This method computes the maximum value of the getDensity function.*/
public double getMaxDensity(){
double mode;
if (orientation == UP) mode = maxValue;
else mode = minValue;
return getDensity(mode);
}
/**This method computes the mean.*/
public double getMean(){
if (orientation == UP) return minValue / 3 + 2 * maxValue / 3;
else return 2 * minValue / 3 + maxValue / 3;
}
/**This method computes the variance.*/
public double getVariance(){
return (maxValue - minValue) * (maxValue - minValue) / 18;
}
/**This method returns the minimum value.*/
public double getMinValue(){
return minValue;
}
/**This method returns the maximum value.*/
public double getMaxValue(){
return maxValue;
}
/**This method returns the orientation.*/
public int getOrientation(){
return orientation;
}
/**This method simulates a value from the distribution.*/
public double simulate(){
double u = minValue + (maxValue - minValue) * Math.random();
double v = minValue + (maxValue - minValue) * Math.random();
if (orientation == UP) return Math.max(u, v);
else return Math.min(u, v);
}
/**This method computes the cumulative distribution function.*/
public double getCDF(double x){
if (orientation == UP) return (x - minValue) * (x - minValue) / c;
else return 1 - (maxValue - x) * (maxValue - x) / c;
}
}
| 3,475 | 31.485981 | 91 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/UniformDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**The discrete uniform distribution on a finite set*/
public class UniformDistribution extends Distribution{
double values;
public UniformDistribution(double a, double b){
setParameters(a, b);
}
public UniformDistribution(){
setParameters(0, 1);
}
public void setParameters(double a, double b){
super.setParameters(a, b, 0.01, CONTINUOUS);
}
public double getDensity(double x){
if (getDomain().getLowerValue() <= x & x <= getDomain().getUpperValue())
return 1.0 / getDomain().getSize();
else return 0;
}
public double getMaxDensity(){
return 1.0 / getDomain().getSize();
}
public double simulate(){
return getDomain().getLowerValue() + RNG.nextDouble()*(getDomain().getUpperValue()-getDomain().getLowerValue());
}
}
| 1,521 | 30.061224 | 114 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/WalkMaxDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the distribution of the maximum value of a symmetric random walk on the interval
[0, n].*/
public class WalkMaxDistribution extends Distribution{
//Paramters
private int steps;
/**This general constructor creates a new max walk distribution with a specified time parameter.*/
public WalkMaxDistribution(int n){
setSteps(n);
}
/**This default constructor creates a new walk max distribution with time parameter 10.*/
public WalkMaxDistribution(){
this(10);
}
/**This method sets the time parameter.*/
public void setSteps(int n){
if (n < 1) n = 1;
steps = n;
super.setParameters(0, steps, 1, DISCRETE);
}
/**This method defines the density function.*/
public double getDensity(double x){
int k = (int)Math.rint(x), m;
if ((k + steps) % 2 == 0) m = (k + steps) / 2;
else m = (k + steps + 1) / 2;
return comb(steps, m) / Math.pow(2 , steps);
}
/**This method returns the maximum value of the density function.*/
public double getMaxDensity(){
return getDensity(0);
}
/**This method returns the number ofsteps.*/
public double getSteps(){
return steps;
}
/**This method simulates a value from the distribution.*/
public double simulate(){
int step, max = 0, position = 0;
for (int i = 1; i <= steps; i++){
if (Math.random() < 0.5) step = 1;
else step = -1;
position = position + step;
if (position > max) max = position;
}
return max;
}
}
| 2,196 | 29.09589 | 101 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/WalkPositionDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
/**This class models the distribution of the position at time n for a random walk
on the interval [0, n].*/
package statistics.distributions;
public class WalkPositionDistribution extends Distribution{
//Paramters
private int steps ;
private double probability;
/**This general constructor creates a new distribution with specified time and
probability parameters.*/
public WalkPositionDistribution(int n, double p){
setParameters(n, p);
}
/**This default constructor creates a new WalkPositionDistribution with time parameter 10
and probability p.*/
public WalkPositionDistribution(){
this(10, 0.5);
}
/**This method sets the time and probability parameters.*/
public void setParameters(int n, double p){
if (n < 0) n = 0;
if (p < 0) p = 0; else if (p > 1) p = 1;
steps = n;
probability = p;
super.setParameters(-steps, steps, 2, DISCRETE);
}
/**This method computes the density function.*/
public double getDensity(double x){
int k = (int)Math.rint(x), m = (k + steps) / 2;
return comb(steps, m) * Math.pow(probability, m) * Math.pow(1 - probability, steps - m);
}
/**This method returns the maximum value of the density function.*/
public double getMaxDensity(){
double mode = 2 * Math.min(Math.floor((steps + 1) * probability), steps) - steps;
return getDensity(mode);
}
/**This method computes the mean.*/
public double getMean(){
return 2 * steps * probability - steps;
}
/**This method computes the variance.*/
public double getVariance(){
return 4 * steps * probability * (1 - probability);
}
/**This method returns the number of steps.*/
public double getSteps(){
return steps;
}
/**This method returns the probability of a step to the right.*/
public double getProbability(){
return probability;
}
/**This method simulates a value from the distribution.*/
public double simulate(){
int step, position = 0;
for (int i = 1; i <= steps; i++){
if (Math.random() < probability) step = 1;
else step = -1;
position = position + step;
}
return position;
}
}
| 2,786 | 29.626374 | 90 | java |
tsml-java | tsml-java-master/src/main/java/statistics/distributions/WeibullDistribution.java | /*
Copyright (C) 2001 Kyle Siegrist, Dawn Duehring
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This program is distributed in the hope that it will be useful, but without
any warranty; without even the implied warranty of merchantability or
fitness for a particular purpose. See the GNU General Public License for
more details. You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package statistics.distributions;
/**This class models the Weibull distribution with specified shape and scale
parameters*/
public class WeibullDistribution extends Distribution{
//Variables
double shape, scale, c;
/**This general constructor creates a new Weibull distribution with spcified
shape and scale parameters*/
public WeibullDistribution(double k, double b){
setParameters(k, b);
}
/**This default constructor creates a new Weibull distribution with shape
parameter 1 and scale parameter 1*/
public WeibullDistribution(){
this(1, 1);
}
/**This method sets the shape and scale parameter. The normalizing constant
is computed and the default interval defined*/
public void setParameters(double k, double b){
double upper, width;
if (k <= 0) k = 1;
if (b <= 0) b = 1;
//Assign parameters
shape = k; scale = b;
//Compute normalizing constant
c = shape / Math.pow(scale, shape);
//Define interval
upper = Math.ceil(getMean() + 4 * getSD());
width = upper/ 100;
super.setParameters(0, upper, width, CONTINUOUS);
}
/**This method compues teh denstiy function*/
public double getDensity(double x){
return c * Math.pow(x, shape - 1) * Math.exp(-Math.pow(x / scale, shape));
}
/**This method returns the maximum value of the getDensity function*/
public double getMaxDensity(){
double mode;
if (shape < 1) mode = getDomain().getLowerValue();
else mode = scale * Math.pow((shape - 1) / shape, 1 / shape);
return getDensity(mode);
}
/**The method returns the mean*/
public double getMean(){
return scale * gamma(1 + 1 / shape);
}
/**This method returns the variance*/
public double getVariance(){
double mu = getMean();
return scale * scale * gamma(1 + 2 / shape) - mu * mu;
}
/**This method computes the cumulative distribution function*/
public double getCDF(double x){
return 1 - Math.exp(-Math.pow(x / scale, shape));
}
/**This method returns the getQuantile function*/
public double getQuantile(double p){
return scale * Math.pow(-Math.log(1 - p), 1 / shape);
}
/**This method computes the failure rate function*/
public double getFailureRate(double x){
return shape * Math.pow(x, shape - 1) / Math.pow(scale, shape);
}
/**This method returns the shape parameter*/
public double getShape(){
return shape;
}
/**This method sets the shape parameter*/
public void setShape(double k){
setParameters(k, scale);
}
/**This method returns the scale parameter*/
public double getScale(){
return scale;
}
/**This method sets the shape parameter*/
public void setScale(double b){
setParameters(shape, b);
}
}
| 3,352 | 28.672566 | 77 | java |
tsml-java | tsml-java-master/src/main/java/statistics/simulators/ArmaModel.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package statistics.simulators;
//Model ARMA series naively as Box Jenkins representation
// Note really need infinite AR representation
import fileIO.OutFile;
import statistics.distributions.NormalDistribution;
public class ArmaModel extends Model{
double[] ar;
double[] ma;
//Not sure how to map one set onto the other tho!
//Why include this, just use the ar parameters instead
int p, q;
double[] xRecord;
double[] eRecord;
public static double GLOBALVAR=1;
public static void setGlobalVariance(double v){GLOBALVAR=v;}
double sigma=GLOBALVAR;
public ArmaModel(int p, int q)
{
this.p=p;
this.q=q;
if(p>0)
{
ar=new double[p];
xRecord=new double[p];
}
if(q>0)
{
ma=new double[q];
eRecord=new double[q];
}
t=0;
sigma=GLOBALVAR;
error = new NormalDistribution(0,sigma);
}
public ArmaModel(int p)
{
this.p=p;
this.q=0;
ar=new double[p];
xRecord=new double[p];
t=0;
sigma=GLOBALVAR;
error = new NormalDistribution(0,sigma);
}
public ArmaModel(double[] pi)
{
this(pi.length);
setParas(pi);
}
public ArmaModel(double[]ar, double[]ma)
{
this(ar.length,ma.length);
setParas(ar,ma);
}
@Override
public void setParameters(double[]pi)
{
if(pi.length!=p)
{
p=pi.length;
ar=new double[p];
xRecord=new double[p];
}
q=0;
for(int i=0;i<p;i++)
this.ar[i]=pi[i];
}
public void setParas(double[]pi)
{
if(pi.length!=p)
{
p=pi.length;
ar=new double[p];
xRecord=new double[p];
}
q=0;
for(int i=0;i<p;i++)
this.ar[i]=pi[i];
}
public void setParas(double[]ar, double[]ma)
{
if(p>0 && ar.length!=p)
{
p=ar.length;
this.ar=new double[p];
xRecord=new double[p];
}
if(q>0 && ma.length!=q)
{
q=ma.length;
this.ma=new double[q];
eRecord=new double[q];
}
for(int i=0;i<p;i++)
this.ar[i]=ar[i];
for(int i=0;i<q;i++)
this.ma[i]=ma[i];
}
public double[] getParas(){return ar;}
public void setSigma(double s){
sigma=s;
((NormalDistribution)error).setSigma(s);
}
public void setInitialValues(double[]initX, double[]initE)
{
for(int i=0;i<p;i++)
this.xRecord[i]=initX[i];
for(int i=0;i<q;i++)
this.eRecord[i]=initE[i];
}
public double generate(double x){
return -1;
}
public double generateError()
{
return error.simulate();
}
public void reset(){
t=0;
randomise();
}
public void randomise()
{
for(int i=0;i<p;i++)
xRecord[i]=-2*sigma+4*sigma*error.RNG.nextDouble();
for(int i=0;i<q;i++)
eRecord[i]=-2*sigma+4*sigma*error.RNG.nextDouble();
}
public void resetToZero()
{
t=0;
for(int i=0;i<p;i++)
xRecord[i]=0;
for(int i=0;i<q;i++)
eRecord[i]=0;
}
public double generate()
{
double x=0,e;
int t =(int) (this.t);
if(t<p)
{
this.t++;
return xRecord[t];
}
for(int i=0;i<p;i++)
x+=ar[p-i-1]*xRecord[(t+i)%p];
for(int i=0;i<q;i++)
x+=ma[(t+i)%q]*eRecord[(t+i)%q];
e=error.simulate();
x+=e;
if(p>0)
xRecord[t%p]=x;
if(q>0)
eRecord[t%q]=e;
this.t++;
return x;
}
public String toString()
{
String str="";
return str;
}
public static double[] differenceData(double[] d)
{
double[] newD = new double[d.length-1];
for(int i=0;i<d.length-1;i++)
newD[i]=d[i+1]-d[i];
return newD;
}
public static void simulateData(int p, int q, int t)
{
ArmaModel a = new ArmaModel(p,q);
double[] ar = new double[p];
double[] ma = new double[q];
for(int i=0;i<p;i++)
ar[i]=1;
for(int i=0;i<q;i++)
ma[i]=1;
a.setParas(ar,ma);
a.setSigma(GLOBALVAR);
for(int i=0;i<p;i++)
ar[i]=10;
for(int i=0;i<q;i++)
ma[i]=10;
a.setInitialValues(ar,ma);
System.out.println("Model = ARMA("+p+","+q+")");
for(int i=0;i<t;i++)
System.out.println("Data = "+a.generate());
// Compare to "anytime" Vas?? and Keogh == ??
// Clustering method Implement EM and Hierarchical
// Experiments
// 1: Use Marahajs models
// 2: if ineffective, find some models it works on
// 3: Generate a class of random models
// Perform experiments, write paper and send
// to Journal of Classification
// Get other data sources
}
static public void main(String[] args){
simulateDataForForecastingWithMP();
System.exit(0);
System.out.println("Testing Arma Models");
System.out.println("Generating Data");
simulateData(0,1,30);
}
public static void simulateDataForForecastingWithMP(){
OutFile ex= new OutFile("C:\\Temp\\ARSeries.csv");
double[][] p={{0.5},{-0.5}};
ArmaModel ar1= new ArmaModel(p[0]);
ArmaModel ar2= new ArmaModel(p[1]);
for(int i=0;i<10000;i++)
ex.writeLine(ar1.generate()+"");
for(int i=0;i<10000;i++)
ex.writeLine(ar2.generate()+"");
}
} | 7,188 | 26.026316 | 76 | java |
tsml-java | tsml-java-master/src/main/java/statistics/simulators/DataSimulator.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
/*
Base class for data simulator. Three use cases.
1. Set up models externally then call generateData
ArrayList<Model> m = ....
DataSimulator ds = new DataSimulator(m);
Instances data=ds.generateData();
2. Use a subclass of DataSimulator
DataSimulator ds = new SimulateShapeletDataset();
Instances data=ds.generateData();
*/
package statistics.simulators;
import java.util.ArrayList;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import tsml.transformers.RowNormalizer;
/**
*
* @author ajb
*/
public class DataSimulator {
int nosClasses=2;
ArrayList<Model> models;
// * @param seriesLength Length of each series, assumed same for all instances
int seriesLength=100;
int nosPerClass=50;
// * @param casesPerClass. nosCases.length specifies the number of cases (which should already be stored), casesPerClass[i] gives the number of cases in class i
int[] casesPerClass;
public void setSeriesLength(int s){
seriesLength=s;
}
public void setCasesPerClass(int[] c){
casesPerClass=c;
nosPerClass=c[0]; //Assumed equal number per class
}
//Override this to define a default random model
protected DataSimulator(){
models=new ArrayList<Model>();
}
protected DataSimulator(double[][] paras){
nosClasses=paras.length;
models=new ArrayList<Model>(nosClasses);
}
/**
* So you can either load the models like this, or you can subclass
* DataSimulator and redefine the constructor
* to create the base Models. This is a deep copy, I THINK!?
* @param m
*/
public DataSimulator(ArrayList<Model> m){
models=new ArrayList<>();
nosClasses=m.size();
models.addAll(m);
}
@SuppressWarnings("ManualArrayToCollectionCopy")
public DataSimulator(Model[] m){
nosClasses=m.length;
models=new ArrayList<Model>(nosClasses);
for(int i=0;i<m.length;i++)
models.add(m[i]);
}
public void setModel(ArrayList<Model> m){
nosClasses=m.size();
models.addAll(m);
}
public ArrayList<Model> getModels(){ return models;}
@SuppressWarnings("ManualArrayToCollectionCopy")
public void setModel(Model[] m){
nosClasses=m.length;
for(int i=0;i<m.length;i++)
models.add(m[i]);
}
/**
* @PRE: All parameters of the model have been set through other means
* @POST: no change to the model, no instances are stored
*
* @return Set of n=sum(casesPerClass[i]) instances, each seriesLength+1 attributes, the last of which is the class label,
*/
public Instances generateDataSet() {
Instances data;
if(casesPerClass==null){
casesPerClass=new int[nosClasses];
for(int i=0;i<casesPerClass.length;i++)
casesPerClass[i]=nosPerClass;
}
ArrayList<Attribute> atts=new ArrayList<>();
nosClasses=casesPerClass.length;
int totalCases=casesPerClass[0];
for(int i=1;i<casesPerClass.length;i++)
totalCases+=casesPerClass[i];
for(int i=1;i<=seriesLength;i++){
atts.add(new Attribute(models.get(0).getAttributeName()+i));
}
ArrayList<String> fv=new ArrayList<>();
for(int i=0;i<nosClasses;i++)
fv.add(""+i);
atts.add(new Attribute("Target",fv));
data = new Instances(models.get(0).getModelType(),atts,totalCases);
double[] d;
for(int i=0;i<nosClasses;i++){
for(int j=0;j<casesPerClass[i];j++){
//Generate the series
initialise();
d=generate(seriesLength,i);
//Add to an instance
Instance in= new DenseInstance(data.numAttributes());
for(int k=0;k<d.length;k++)
in.setValue(k,d[k]);
//Add to all instances
data.add(in);
in=data.lastInstance();
in.setValue(d.length,""+i);
}
}
data.setClassIndex(seriesLength);
return data;
}
/**
* @PRE: All parameters of the model have been set through other means
* @POST: no change to the model, no instances are stored
**/
public String generateHeader(){
String header="%"+" "+models.get(0).getModelType()+"\n";
for(int i=0;i<models.size();i++){
header+="%Class "+i;
header+="\n"+models.get(i).getHeader()+"\n";
}
return header;
}
public Instances[] generateTrainTest() throws Exception{
Instances[] data=new Instances[2];
data[0]=generateDataSet();
// initialise();//Rest models? depends if the model is deterministic! might cause some problems either way
data[1]=generateDataSet();
//Normalise
RowNormalizer nc= new RowNormalizer();
data[0]=nc.transform(data[0]);
data[1]=nc.transform(data[1]);
return data;
}
public double[] generate(int length, int modelNos){
Model a=models.get(modelNos);
double[] d=a.generateSeries(length);
return d;
}
/**
* This method
*/
public void initialise(){
for(Model a:models)
a.reset();
}
public void setNosPerClass(int x){
nosPerClass=x;
}
public void setLength(int l){
seriesLength=l;
}
/**
* @return String with all parameter names and values
*/
public String getParameters(){
String str=nosClasses+"\n";
for(Model m:models)
str+=m.toString()+"\n";
return str;
}
}
| 6,534 | 30.570048 | 161 | java |
tsml-java | tsml-java-master/src/main/java/statistics/simulators/DictionaryModel.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
/*
AJB Oct 2016
Model to simulate data where dictionary approach should be optimal.
A single shapelet is common to all series. The discriminatory feature is the
number of times it appears in a series.
*/
package statistics.simulators;
import fileIO.OutFile;
import java.util.*;
import java.io.*;
public class DictionaryModel extends Model {
public enum ShapeType {TRIANGLE,HEADSHOULDERS,SINE, STEP, SPIKE}
private static int DEFAULTNUMSHAPELETS=5;
private static int DEFAULTSERIESLENGTH=1000;
public static int DEFAULTSHAPELETLENGTH=29;
protected Shape shape1;
protected Shape shape2;
protected int numShape1=DEFAULTNUMSHAPELETS;
protected int[] shape1Locations;
protected int numShape2=DEFAULTNUMSHAPELETS;
protected int[] shape2Locations;
protected int totalNumShapes;
protected int seriesLength=DEFAULTSERIESLENGTH;
protected int shapeletLength=DEFAULTSHAPELETLENGTH;
//Default Constructor, max start should be at least 29 less than the length
// of the series if using the default shapelet length of 29
public DictionaryModel()
{
this(new double[]{DEFAULTSERIESLENGTH,DEFAULTNUMSHAPELETS,DEFAULTNUMSHAPELETS,DEFAULTSHAPELETLENGTH});
}
public DictionaryModel(double[] param)
{
super();
setDefaults();
//PARAMETER LIST: seriesLength, numShape1, numShape2, shapeletLength
if(param!=null){
switch(param.length){
default:
case 4: shapeletLength=(int)param[3];
case 3: numShape2=(int)param[2];
case 2: numShape1=(int)param[1];
case 1: seriesLength=(int)param[0];
}
}
totalNumShapes=numShape1+numShape2;
shape1Locations=new int[numShape1];
shape2Locations=new int[numShape2];
shape1=new Shape(shapeletLength);
// shape1.type=ShapeType.TRIANGLE;
shape1.length=shapeletLength;
shape1.randomiseShape();
shape2=new Shape(shapeletLength);
shape2.randomiseShape();
while(shape2.type==shape1.type)
shape2.randomiseShape();
/*
Enforce non-overlapping, only occurs if there is not enough room for
all the shapes. Locations split randomly between the two classes
This is reset with a call to generateSeries
*/
while(!setNonOverlappingLocations()){
totalNumShapes--;
if(numShape1>numShape2)
numShape1--;
else
numShape2--;
}
}
public final void setDefaults(){
seriesLength=DEFAULTSERIESLENGTH;
numShape1=DEFAULTNUMSHAPELETS;
shapeletLength=DEFAULTSHAPELETLENGTH;
}
public ShapeType getShape1(){
return shape1.type;
}
public ShapeType getShape2(){
return shape2.type;
}
public void setShape1Type(ShapeType st){
shape1.setType(st);
shape1.setLength(shapeletLength);
}
public void setShape2Type(ShapeType st){
shape2.setType(st);
shape2.setLength(shapeletLength);
}
public void setNumShape1(int n){
numShape1=n;
}
public void setNumShape2(int n){
numShape2=n;
}
final public boolean setNonOverlappingLocations(){
if(seriesLength-shapeletLength*totalNumShapes<totalNumShapes) //Cannot fit them in, not enough spaces
return false;
ArrayList<Integer> startPos= new ArrayList<>();
for(int i=0;i<seriesLength-shapeletLength-1;i++)
startPos.add(i);
ArrayList<Integer> locations=new ArrayList<>();
for(int i=0;i<totalNumShapes;i++) {
if(startPos.size() == 0)
throw new UnsupportedOperationException(" In DictionaryModel and there are no valid start points left!");
int x=rand.nextInt(startPos.size());
int val=startPos.get(x);
// System.out.println(" Start point ="+val);
locations.add(val);
for(int j=x;j<startPos.size() && j<x+shapeletLength;j++)
startPos.remove(j);
// System.out.println(" Remaining valid start points ="+startPos.size());
}
/*
for(int i=0;i<totalNumShapes;i++){
boolean ok=false;
int l=shapeletLength/2;
while(!ok){
ok=true;
//Search mid points to level the distribution up somewhat
l=rand.nextInt(seriesLength-shapeletLength)+shapeletLength/2;
// System.out.println("trying "+l);
for(int in:locations){
if((l>=in-shapeletLength && l<in+shapeletLength) //l inside ins
||(l<in-shapeletLength && l+shapeletLength>in) ){ //ins inside l
ok=false;
// System.out.println(l+" overlaps with "+in);
break;
}
}
}
// System.out.println("Adding "+l);
locations.add(l);
}
//Revert to start points
for(int i=0;i<locations.size();i++){
//Just in case ..
int val=locations.get(i);
locations.set(i, val-shapeletLength/2);
}
// System.out.println("Location ="+(l-shapeletLength/2));
*/
shape1Locations=new int[numShape1];
for(int i=0;i<numShape1;i++)
shape1Locations[i]=locations.get(i);
shape2Locations=new int[numShape2];
for(int i=0;i<numShape2;i++)
shape2Locations[i]=locations.get(i+numShape1);
Arrays.sort(shape1Locations);
Arrays.sort(shape2Locations);
return true;
}
final public boolean setNonOverlappingLocationsOld(){
// if(seriesLength-shapeletLength*totalNumShapes<totalNumShapes) //Cannot fit them in, not enough spaces
// return false;
int s=0;
// while(s)
//http://stackoverflow.com/questions/33831442/random-placement-of-non-overlapping-intervals
//Find non overlapping locations.
//1. Specify how many spaces there are
int spaces=seriesLength-shapeletLength*totalNumShapes+1;
//We now need to randomly distribute these spaces between each shapelet.
int nosIntervals=totalNumShapes+1;
//Split spaces into nosIntervals
ArrayList<Integer> intervals=new ArrayList<>();
/* for(int i=0;i<nosIntervals;i++){
int r=rand.nextInt(spaces-(nosIntervals-i));
intervals.add(r);
spaces-=r;
}
*/
int[] temp=new int[nosIntervals];
for(int i=0;i<spaces;i++)
temp[new Random().nextInt(nosIntervals)]++;
for(int i=0;i<nosIntervals;i++)
intervals.add(temp[i]);
//Randomize intervals
// Collections.shuffle(intervals, rand);
//Add back into place
ArrayList<Integer> shapeLocations=new ArrayList<> (totalNumShapes);
shapeLocations.add(intervals.get(0));
int current=shapeLocations.get(0)+shapeletLength;
for(int i=1;i<totalNumShapes;i++){
shapeLocations.add(current+intervals.get(i));
current=shapeLocations.get(i)+shapeletLength;
}
//Randomize locations
Collections.shuffle(shapeLocations, rand);
//Split randomised locations for two types of shape
for(int i=0;i<numShape1;i++)
shape1Locations[i]=shapeLocations.get(i);
for(int i=0;i<numShape2;i++)
shape2Locations[i]=shapeLocations.get(i+numShape1);
Arrays.sort(shape1Locations);
Arrays.sort(shape2Locations);
/*
//Sort them
Arrays.sort(shapeLocations);
//Shift forward if necessary
for(int i=1;i<shapeLocations.length;i++){
if(shapeLocations[i]<shapeLocations[i-1]+shapeletLength)
shapeLocations[i]=shapeLocations[i-1]+shapeletLength;
}
//Randomise again
intervals=new ArrayList<Integer>();
for(int i:shapeLocations)
intervals.add(i);
Collections.shuffle(intervals, rand);
//Split randomised locations for two types of shape
for(int i=0;i<numShape1;i++)
shape1Locations[i]=intervals.get(i);
for(int i=0;i<numShape2;i++)
shape2Locations[i]=intervals.get(i+numShape1);
Arrays.sort(shape1Locations);
Arrays.sort(shape2Locations);
//Find the positions by reflating
/*
int count1=0;
int count2=0;
//Try without alwayspushing forward
for(int i=0;i<totalNumShapes;i++){
if(count2==shape2Locations.length) //Finished shape2, must do shape1
shape1Locations[count1++]+=i*shapeletLength;
else if(count1==shape1Locations.length) //Finished shape1, must do shape1
shape2Locations[count2++]+=i*shapeletLength;
else if(shape1Locations[count1]<shape2Locations[count2])//Shape 1 before Shape 2, inflate shape 1
shape1Locations[count1++]+=i*shapeletLength;
else //Inflate shape2
shape2Locations[count2++]+=i*shapeletLength;
}
*/
return true;
}
/*Generate a single data
//Assumes a model independent of previous observations. As
//such will not be relevant for ARMA or HMM models, which just return -1.
* Should probably remove.
*/
@Override
public double generate(double x){
//Noise
int t=(int)x;
double value=error.simulate();
//Shape: Check if in a shape1
/* int insertionPoint=Arrays.binarySearch(shapeLocations,t);
if(insertionPoint<0)//Not a start pos: in
insertionPoint=-(1+insertionPoint);
//Too much grief, just doing a linear scan!
*/
//See if it is in shape1
int insertionPoint=0;
while(insertionPoint<shape1Locations.length && shape1Locations[insertionPoint]+shapeletLength<t)
insertionPoint++;
if(insertionPoint>=shape1Locations.length){ //Bigger than all the start points, set to last
insertionPoint=shape1Locations.length-1;
}
if(shape1Locations[insertionPoint]<=t && shape1Locations[insertionPoint]+shapeletLength>t){//in shape1
value+=shape1.generateWithinShapelet(t-shape1Locations[insertionPoint]);
// System.out.println(" IN SHAPE 1 occurence "+insertionPoint+" Time "+t);
}else{ //Check if in shape 2
insertionPoint=0;
while(insertionPoint<shape2Locations.length && shape2Locations[insertionPoint]+shapeletLength<t)
insertionPoint++;
if(insertionPoint>=shape2Locations.length){ //Bigger than all the start points, set to last
insertionPoint=shape2Locations.length-1;
}
if(shape2Locations[insertionPoint]<=t && shape2Locations[insertionPoint]+shapeletLength>t){//in shape2
value+=shape2.generateWithinShapelet(t-shape2Locations[insertionPoint]);
// System.out.println(" IN SHAPE 2 occurence "+insertionPoint+" Time "+t);
}
}
return value;
}
//This will generateWithinShapelet the next sequence after currently stored t value
@Override
public double generate()
{
// System.out.println("t ="+t);
double value=generate(t);
t++;
return value;
}
@Override
public double[] generateSeries(int n)
{
t=0;
//Resets the starting locations each time this is called
setNonOverlappingLocations();
double[] d = new double[n];
for(int i=0;i<n;i++)
d[i]=generate();
return d;
}
/**
* Subclasses must implement this, how they take them out of the array is their business.
* @param param parameters
*/
@Override
public void setParameters(double[] param){
if(param!=null){
switch(param.length){
default:
case 4: shapeletLength=(int)param[3];
case 3: numShape2=(int)param[2];
case 2: numShape1=(int)param[1];
case 1: seriesLength=(int)param[0];
}
}
}
@Override
public String getModelType(){ return "DictionarySimulator";}
@Override
public String getAttributeName(){return "Dict";}
@Override
public String getHeader(){
String header=super.getHeader();
header+="% \t Shapelet Length ="+shapeletLength;
header+="\n% \t Series Length ="+seriesLength;
header+="\n% \t Number of Shapelets ="+numShape1;
header+="\n% \t Shape = "+shape1.type;
return header;
}
// Inner class determining the shape1 inserted into the shapelet model
public static class Shape{
// Type: head and shoulders, spike, step, triangle, or sine wave.
public ShapeType type;
//Length of shape1
public int length;
//Position of shape1 on axis determined by base (lowest point) and amp(litude).
private double base;
private double amp;
//The position in the series at which the shape1 begins.
// private int location;
public void setBase(double b){ base=b;}
public void setAmp(double a){amp=a;}
public double getBase(){ return base;}
public double getAmp(){return amp;}
public static double DEFAULTBASE=-2;
public static double DEFAULTAMP=4;
//Default constructor, call randomise shape1 to get a random instance
// The default length is 29, the shape1 extends from -2 to +2, is of
// type head and shoulders, and is located at index 0.
public Shape(){
this(ShapeType.HEADSHOULDERS,DEFAULTSHAPELETLENGTH,DEFAULTBASE,DEFAULTAMP);
if(type==ShapeType.HEADSHOULDERS)
base=base/2;
}
//Set length only, default for the others
public Shape(int length){
this(ShapeType.HEADSHOULDERS,length,DEFAULTBASE,DEFAULTAMP);
if(type==ShapeType.HEADSHOULDERS)
base=base/2;
}
public Shape(int l, double b, double a){
randomiseShape();
length=l;
base=b;
amp=a;
}
// This constructor produces a completely specified shape1
public Shape(ShapeType t,int l, double b, double a){
type=t;
length=l;
base=b;
amp=a;
if(type==ShapeType.HEADSHOULDERS)
base=base/2;
}
public void setLength(int newLength){
this.length=newLength;
}
//Generates the t^th shapelet position
public double generateWithinShapelet(int offset){
double value=0;
int lower,mid,upper;
// if(offset==0)
// System.out.println("LENGTH ="+length+" TYPE ="+type);
switch(type){
case TRIANGLE:
mid=length/2;
if(offset<=mid) {
if(offset==0)
value=base;
else
value=((offset/(double)(mid))*(amp))+base;
}
else
{
if(offset>=length)
value=base;
else if(length%2==1)
value=((length-offset-1)/(double)(mid)*(amp))+base;
else
value=((length-offset)/(double)(mid)*(amp))+base;
}
break;
case HEADSHOULDERS:
//Need to properly set the boundaries for shapelets of length not divisible by 3.
lower=length/3;
upper=2*lower;
// Do something about uneven split.
if(length%3==2) //Add two the middle hump, or one each to the sides?
upper+=2;
if(offset<lower)//First small hump.
value = ((amp/2)*Math.sin(((2*Math.PI)/((length/3-1)*2))*offset))+base;
else if(offset>=upper)//last small hump
value = ((amp/2)*Math.sin(((2*Math.PI)/((length/3-1)*2))*(offset-upper)))+base;
else // middle big hump. Need to rescale for middle interval
value = ((amp)*Math.sin(((2*Math.PI)/(((upper-lower)-1)*2))*(offset-length/3)))+base;
if(value< base)//Case for length%3==1
value=base;
break;
case SINE:
value=amp*Math.sin(((2*Math.PI)/(length-1))*offset)/2;
break;
case STEP:
if(offset<length/2)
value=base;
else
value=base+amp;
break;
case SPIKE:
lower=length/4;
upper=3*lower;
if(offset<=lower) //From 0 to base
{
if(offset==0)
value=0;
else
value=(-amp/2)*(offset/(double)(lower));
}
else if(offset>lower && offset<upper)
{
value=-amp/2+(amp)*((offset-lower)/(double)(upper-lower-1));
// if(offset>length/2&&offset<=length/4*3)
// value=(offset-length/2)/(double)(length/4)*(amp/2);
}
else{
value=amp/2-amp/2*((offset-upper+1)/(double)(length-upper));
}
break;
}
return value;
}
public void setType(ShapeType newType){
this.type=newType;
if(newType==ShapeType.HEADSHOULDERS)
base=DEFAULTBASE/2;
else
base=DEFAULTBASE;
}
@Override
public String toString(){
String shp = ""+this.type+",length,"+this.length+",this.base,"+base+",amp,"+this.amp;
return shp;
}
//gives a shape1 a random type and start position
public void randomiseShape(){
ShapeType [] types = ShapeType.values();
int ranType = Model.rand.nextInt(types.length);
setType(types[ranType]);
}
@Override
public boolean equals(Object o){
if(! (o instanceof Shape)) return false;
if(type == ((Shape)o).type) return true;
return false;
}
}
//1. CHECK PROPERLY REPRODUCABLE RANDOM
public static void testRandSeed(){
Model.setDefaultSigma(.1);
Model.setGlobalRandomSeed(0);
DEFAULTSHAPELETLENGTH=29;
DEFAULTSERIESLENGTH=100;
double[][] d=new double[ShapeType.values().length][DEFAULTSERIESLENGTH];
int j=0;
for(ShapeType s:ShapeType.values()){
// DEFAULTSHAPELETLENGTH+=2;
//seriesLength, numShape1, numShape2, shapeletLength
DictionaryModel shape = new DictionaryModel(new double[]{DEFAULTSERIESLENGTH,1,1,DEFAULTSHAPELETLENGTH});
shape.setShape1Type(s);
shape.setShape2Type(s);
System.out.println(" SHAPE ="+s);
for(int i=0;i<DEFAULTSERIESLENGTH;i++)
d[j][i]=shape.generate(i);
j++;
}
OutFile out=new OutFile("C:\\temp\\dictNoNoiseRep1.csv");
for(int i=0;i<DEFAULTSERIESLENGTH;i++){
for(j=0;j<d.length;j++)
out.writeString(d[j][i]+",");
out.writeString("\n");
}
}
public static void testRandomisedPlacement(){
//2. CHECK LOCATIONS ARE RANDOM
Model.setDefaultSigma(.1);
Model.setGlobalRandomSeed(0);
DEFAULTSHAPELETLENGTH=29;
DEFAULTSERIESLENGTH=100;
OutFile of = new OutFile("C:\\temp\\locationsDict.csv");
of.writeString(",Shape1,,Shape2");
for(int i=0;i<10000;i++){
DictionaryModel shape = new DictionaryModel(new double[]{DEFAULTSERIESLENGTH,1,1,DEFAULTSHAPELETLENGTH});
shape.setNonOverlappingLocations();
for(int j=0;j<shape.numShape1;j++){
of.writeString(","+shape.shape1Locations[j]);
}
of.writeString(",");
for(int j=0;j<shape.numShape2;j++){
of.writeString(","+shape.shape2Locations[j]);
}
of.writeString("\n");
}
}
public static void main (String[] args) throws IOException
{
//3. CHECK RESIZED SHAPELETS WORKS
Model.setDefaultSigma(0);
Model.setGlobalRandomSeed(0);
int sLength=30;
int length=100;
Shape s=new Shape(sLength);
DictionaryModel shape;
int c=0;
double[][] data=new double[10][];
for( sLength=10;sLength<=10;sLength+=4){
// length=2*sLength+2;
OutFile of = new OutFile("C:\\temp\\headShoulders"+sLength+".csv");
System.out.println("SLength = "+sLength+" Length ="+length);
shape = new DictionaryModel(new double[]{length,2,1,sLength});
// shape.setShape1Type(ShapeType.HEADSHOULDERS);
// shape.setShape2Type(ShapeType.HEADSHOULDERS);
Model.setGlobalRandomSeed(4);
for(int i=0;i<data.length;i++){
Model.setGlobalRandomSeed(i);
data[i]=shape.generateSeries(length);
}
for(int j=0;j<length;j++){
for(int i=0;i<data.length;i++)
of.writeString(data[i][j]+",");
of.writeString("\n");
}
}
//4. Check multiple shapelet placement
//5. To do in Generator class: CHECK RESTART WORKS PROPERLY, generating new random positions
// shape1.reset();
// for(int i=0;i<200;i++)
// System.out.println(shape1.generate(i));
}
}
| 23,078 | 35.691574 | 121 | java |
tsml-java | tsml-java-master/src/main/java/statistics/simulators/ElasticModel.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
/*
Elastic model for simulators.
*/
package statistics.simulators;
import fileIO.OutFile;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import statistics.simulators.DictionaryModel.Shape;
import statistics.simulators.DictionaryModel.ShapeType;
import static statistics.simulators.Model.rand;
/**
*
* @author ajb
*/
public class ElasticModel extends Model{
private static double warpPercent=0.1;
private int seriesLength=200; // Need to set intervals, maybe allow different lengths?
private static double base=-1;
private static double amplitude=2;
private Shape baseShape;
int start =0;
int end=seriesLength;
public ElasticModel(){
baseShape=new Shape();
Shape.DEFAULTAMP=amplitude;
Shape.DEFAULTBASE=base;
}
public void setSeriesLength(int n){
seriesLength=n;
}
public void setBaseShapeType(ShapeType st){
baseShape.setType(st);
}
public static void setWarpPercent(double b){
warpPercent=b;
}
public static void setBaseAndAmp(double b, double a){
amplitude=a;
base=b;
Shape.DEFAULTAMP=amplitude;
Shape.DEFAULTBASE=base;
}
@Override
public double generate(){
//Noise
double value=error.simulate();
if(t>=start && t<end)
value+=baseShape.generateWithinShapelet((int)(t-start));
t++;
return value;
}
@Override
public double[] generateSeries(int n){
//Set random start and end and set up shape length
start=Model.rand.nextInt((int)(warpPercent*seriesLength));
end=seriesLength-Model.rand.nextInt((int)(warpPercent*seriesLength));
// -Model.rand.nextInt((int)(warpPercent*seriesLength+1));
baseShape.setLength(end-start);
// System.out.println("Length ="+seriesLength+" Start = "+start+" End = "+end+" Shape Length ="+(end-start));
double[] series=new double[n];
t=0;
for(int i=0;i<n;i++)
series[i]=generate();
return series;
}
@Override
public void setParameters(double[] p) {
warpPercent=p[0];
}
public void randomiseShape(DictionaryModel.ShapeType m){
baseShape.randomiseShape();
if(m!=null){
while(baseShape.type==m)
baseShape.randomiseShape();
}
}
public void randomiseShape(ElasticModel m){
baseShape.randomiseShape();
if(m!=null){
while(baseShape.equals(m.baseShape))
baseShape.randomiseShape();
}
}
public DictionaryModel.ShapeType getShape(){
return baseShape.type;
}
public void setShape(DictionaryModel.ShapeType sh){
baseShape.type=sh;
}
public static void main(String[] args){
}
@Override
public String toString(){
return baseShape.toString()+" warp ="+warpPercent;
}
}
| 3,730 | 29.333333 | 119 | java |
tsml-java | tsml-java-master/src/main/java/statistics/simulators/HMM_Model.java | /*
* This file is part of the UEA Time Series Machine Learning (TSML) toolbox.
*
* The UEA TSML toolbox is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The UEA TSML toolbox is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>.
*/
package statistics.simulators;
/** HHM Model generator: NOT IMPLEMENTED
*
*/
import statistics.distributions.NormalDistribution;
import fileIO.*;
import java.util.*;
import statistics.distributions.Distribution;
public class HMM_Model extends Model{
//Number of generating distributions
int N;
// family of generating distributions
Distribution[] f;
//Discrete transition matrix
double[][] T;
//Discrete initial values distribution
double[] pi;
//Current state
int state;
//Stats recorded for debugging
int n=200; //Default series length
ArrayList stateRecord;
ArrayList dataRecord;
//Private constructors to stop anyone using this unless it is implemented!
private HMM_Model()
{
System.out.println("SHOULD NOT BE HERE!!");
}
private HMM_Model(int models)
{
N=models;
f=new NormalDistribution[N];
T=new double[N][N];
pi = new double[N];
}
private HMM_Model(int models, double[] means, double[] stDevs, double[][]trans, double[] inits)
//Default constructor assumes normal distribution
{
N=models;
f=new NormalDistribution[N];
T=new double[N][N];
pi = new double[N];
for(int i=0;i<N;i++)
{
f[i]= new NormalDistribution(means[i],stDevs[i]);
for(int j=0;j<N;j++)
T[i][j]=trans[i][j];
pi[i]=inits[i];
}
//Set initial state
initialise();
}
public void initialise(){
double r=error.RNG.nextDouble();
state = 0;
double s=0;
boolean finished=false;
do{
s+=pi[state];
if(s>r)
finished=true;
else
state++;
}while(!finished && state<N);
stateRecord = new ArrayList(n);
dataRecord = new ArrayList(n);
}
public double generate(double x){
System.out.println("Error, generate not implemented for HMM");
System.exit(0);
return -1;
}
public double generate(){
//Determine state
double r=error.RNG.nextDouble();
double s=0;
int i=0;
boolean finished=false;
do{
s+=T[state][i];
if(s>r)
finished=true;
else
i++;
}while(!finished && i<N);
state=i;
//Sample Distribution
return f[state].simulate();
}
public String toString()
{
String str="";
return str;
}
static public void main(String[] args){
System.out.println("Testing HMM Models");
System.out.println("Generating Data: from 2 normal dists, variance 1");
OutFile f = new OutFile("SmytheHMMData.csv");
/* double[][] d = GenerateData.simulateSmytheData(3,200);
for(int j=0;j<d[0].length;j++)
{
for(int i=0;i<d.length;i++)
f.writeString(d[i][j]+",");
f.writeString("\n");
}
*/
}
@Override
public void setParameters(double[] p) {
throw new UnsupportedOperationException("Not supported yet.");
}
} | 3,437 | 21.470588 | 96 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.