repo
stringlengths
1
191
file
stringlengths
23
351
code
stringlengths
0
5.32M
file_length
int64
0
5.32M
avg_line_length
float64
0
2.9k
max_line_length
int64
0
288k
extension_type
stringclasses
1 value
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/memory/MemoryWatcher.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.memory; import com.sun.management.GarbageCollectionNotificationInfo; import javax.management.ListenerNotFoundException; import javax.management.Notification; import javax.management.NotificationEmitter; import javax.management.NotificationListener; import javax.management.openmbean.CompositeData; import java.io.IOException; import java.io.ObjectInputStream; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.MemoryUsage; import java.util.*; import org.nd4j.linalg.io.Assert; import tsml.classifiers.distance_based.utils.system.timing.Stated; import utilities.Utilities; /** * Purpose: watch the memory whilst enabled, tracking the mean, std dev, count, gc time and max mem usage. * * Note, most methods in this class are synchronized as the garbage collection updates come from another thread, * therefore all memory updates come from another thread and must be synced. * * Contributors: goastler */ public class MemoryWatcher extends Stated implements MemoryWatchable { public static void main(String[] args) { final MemoryWatcher memoryWatcher = new MemoryWatcher(); memoryWatcher.start(); final LinkedList<double[]> list = new LinkedList<>(); int i = 0; while(true) { i++; Utilities.busyWait(1000000); list.add(new double[1000]); // System.out.println(list.size()); if(i % 10 == 0) { list.remove(0); } if(i % 10000 == 0) { System.out.println(memoryWatcher.getMaxMemoryUsage()); } } } public synchronized void update() { // deliberately update memory usage using the used memory AT THE TIME OF INVOCATION. I.e. not necessarily the time of max memory usage! // we do this to work around the cases where the gc hasn't run, therefore we don't know the max memory over time // instead, we poll the memory usage at the current time if(maxMemoryUsage < 0) { maxMemoryUsage = Math.max(Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(), maxMemoryUsage); } } public synchronized long getMaxMemoryUsage() { update(); return maxMemoryUsage; } private long maxMemoryUsage = -1; private transient NotificationListener listener = this::handleNotification; private boolean activeListener = false; public MemoryWatcher() {} @Override public void start() { addListener(); MemoryWatchable.gc(); // do a gc sweep to try and prompt memory readings at start super.start(); } @Override public void stop() { MemoryWatchable.gc(); // clean up memory before stopping removeListener(); super.stop(); } private void addListener() { // emitters are used to listen to each memory pool (usually young / old gen). // garbage collector for old and young gen listener = this::handleNotification; List<GarbageCollectorMXBean> garbageCollectorBeans = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans(); for (GarbageCollectorMXBean garbageCollectorBean : garbageCollectorBeans) { // to log // listen to notification from the emitter NotificationEmitter emitter = (NotificationEmitter) garbageCollectorBean; /** * the memory update listener */ emitter.addNotificationListener(listener, null, null); } if(activeListener) throw new IllegalStateException("listener already active"); activeListener = true; } private void removeListener() { // emitters are used to listen to each memory pool (usually young / old gen). // garbage collector for old and young gen List<GarbageCollectorMXBean> garbageCollectorBeans = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans(); for (GarbageCollectorMXBean garbageCollectorBean : garbageCollectorBeans) { // to log // listen to notification from the emitter NotificationEmitter emitter = (NotificationEmitter) garbageCollectorBean; try { emitter.removeNotificationListener(listener); } catch(ListenerNotFoundException ignored) { // nevermind, already been removed System.out.println("failed to remove listener"); } } if(!activeListener) throw new IllegalStateException("listener already inactive"); activeListener = false; } private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { // default deserialization ois.defaultReadObject(); // stop if already started. Any memory watcher read from serialization should default to being stopped, like StopWatch if(isStarted()) { super.stop(); activeListener = false; } Assert.isNull(listener); } private synchronized void handleNotification(final Notification notification, final Object handback) { if(notification.getType() .equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) { GarbageCollectionNotificationInfo info = GarbageCollectionNotificationInfo .from((CompositeData) notification.getUserData()); for(Map.Entry<String, MemoryUsage> entry : info.getGcInfo().getMemoryUsageAfterGc().entrySet()) { MemoryUsage memoryUsageSnapshot = entry.getValue(); long memoryUsage = memoryUsageSnapshot.getUsed(); maxMemoryUsage = Math.max(memoryUsage, maxMemoryUsage); } for(Map.Entry<String, MemoryUsage> entry : info.getGcInfo().getMemoryUsageAfterGc().entrySet()) { MemoryUsage memoryUsageSnapshot = entry.getValue(); long memoryUsage = memoryUsageSnapshot.getUsed(); maxMemoryUsage = Math.max(memoryUsage, maxMemoryUsage); } } } @Override public String toString() { return "maxMemory: " + getMaxMemoryUsage(); } public synchronized void onReset() { maxMemoryUsage = -1; } }
7,193
39.189944
143
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/parallel/BlockingExecutor.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.parallel; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.Semaphore; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * Purpose: run several tasks concurrently with no queuing / queue overflow * <p> * Contributors: goastler */ public class BlockingExecutor implements ExecutorService { final Semaphore semaphore; final ExecutorService service; public Executor getService() { return service; } public BlockingExecutor(final int concurrentTasksLimit, final ExecutorService service) { semaphore = new Semaphore(concurrentTasksLimit); this.service = service; } public BlockingExecutor(ThreadPoolExecutor service) { int maximumPoolSize = service.getMaximumPoolSize(); this.semaphore = new Semaphore(maximumPoolSize); this.service = service; } private void acquire() { try { semaphore.acquire(); } catch (InterruptedException e) { IllegalStateException ex = new IllegalStateException(); ex.addSuppressed(e); throw ex; } } private boolean tryAcquire(long time, TimeUnit unit) { try { return semaphore.tryAcquire(time, unit); } catch (InterruptedException e) { IllegalStateException ex = new IllegalStateException(); ex.addSuppressed(e); throw ex; } } private <A> Callable<A> acquireThenReleaseWrap(Callable<A> callable) { acquire(); return releaseWrap(callable); } private <A> Callable<A> releaseWrap(Callable<A> callable) { return () -> { A result; try { result = callable.call(); } finally { semaphore.release(); } return result; }; } @Override public void execute(final Runnable runnable) { submit(runnable); } @Override public void shutdown() { service.shutdown(); } @Override public List<Runnable> shutdownNow() { return service.shutdownNow(); } @Override public boolean isShutdown() { return service.isShutdown(); } @Override public boolean isTerminated() { return service.isTerminated(); } @Override public boolean awaitTermination(final long l, final TimeUnit timeUnit) throws InterruptedException { return service.awaitTermination(l, timeUnit); } @Override public <T> Future<T> submit(final Callable<T> callable) { return service.submit(acquireThenReleaseWrap(callable)); } @Override public <T> Future<T> submit(final Runnable runnable, final T t) { return service.submit(acquireThenReleaseWrap(() -> { runnable.run(); return t; })); } @Override public Future<?> submit(final Runnable runnable) { return service.submit(acquireThenReleaseWrap(() -> { runnable.run(); return null; })); } @Override public <T> List<Future<T>> invokeAll(final Collection<? extends Callable<T>> collection) throws InterruptedException { List<Future<T>> list = new ArrayList<>(); for(Callable<T> callable : collection) { list.add(submit(callable)); } for(Future<T> future : list) { try { future.get(); } catch(ExecutionException ignored) { } } return list; } // only for internal use, returns an empty future in the case a permit couldn't be acquired in the time interval private <A> Future<A> submit(Callable<A> callable, long time, TimeUnit unit) throws TimeoutException { boolean acquired; if(time > 0) { acquired = tryAcquire(time, unit); if(acquired) { callable = releaseWrap(callable); return submit(callable); } } throw new TimeoutException(); } @Override public <T> List<Future<T>> invokeAll(final Collection<? extends Callable<T>> collection, final long l, final TimeUnit timeUnit) throws InterruptedException { long timestamp = System.nanoTime(); long timeLimit = TimeUnit.NANOSECONDS.convert(l, timeUnit); List<Future<T>> futures = new ArrayList<>(); for(Callable<T> callable : collection) { Future<T> future = null; try { future = submit(callable, timeLimit - (System.nanoTime() - timestamp), TimeUnit.NANOSECONDS); } catch(TimeoutException e) { future = new FutureTask<>(callable); future.cancel(true); } futures.add(future); } for(Future<T> future : futures) { long remainingTime = timeLimit - (System.nanoTime() - timestamp); if(remainingTime > 0 && !future.isCancelled()) { try { future.get(timeLimit - (System.nanoTime() - timestamp), TimeUnit.NANOSECONDS); } catch(CancellationException | InterruptedException | ExecutionException | TimeoutException ignored) { } } future.cancel(true); } return futures; } @Override public <T> T invokeAny(final Collection<? extends Callable<T>> collection) throws InterruptedException, ExecutionException { for(Callable<T> callable : collection) { try { Future<T> future = submit(callable); T result = future.get(); if(!future.isCancelled() && future.isDone()) { return result; } } catch(CancellationException | ExecutionException ignored) {} } throw new ExecutionException(new IllegalStateException("no task completed.")); } @Override public <T> T invokeAny(final Collection<? extends Callable<T>> collection, final long l, final TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException { long timestamp = System.nanoTime(); long timeLimit = TimeUnit.NANOSECONDS.convert(l, timeUnit); for(Callable<T> callable : collection) { try { Future<T> future = submit(callable, timeLimit - (System.nanoTime() - timestamp), TimeUnit.NANOSECONDS); T result = future.get(timeLimit - (System.nanoTime() - timestamp), TimeUnit.NANOSECONDS); if(!future.isCancelled() && future.isDone()) { return result; } } catch(CancellationException | ExecutionException ignored) {} } throw new ExecutionException(new IllegalStateException("no task completed.")); } }
8,090
32.572614
119
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/random/DebuggingRandom.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.random; import java.util.Random; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; /** * Purpose: print off every single random call. * <p> * Contributors: goastler */ public class DebuggingRandom extends Random { public DebuggingRandom() { } public DebuggingRandom(final long l) { super(l); } private int i = 0; @Override public double nextDouble() { double v = super.nextDouble(); System.out.println(i++ + ": nextDouble: " + v); return v; } @Override public int nextInt() { System.out.println(i++ + ": nextInt()"); return super.nextInt(); } @Override public int nextInt(final int i) { int v = super.nextInt(i); System.out.println(this.i++ + ": nextInt(" + i + "): " + v); return v; } @Override public synchronized void setSeed(final long l) { System.out.println(i++ + ": setSeed(" + l + ")"); super.setSeed(l); } @Override public void nextBytes(final byte[] bytes) { System.out.println(i++ + ": nextBytes"); super.nextBytes(bytes); } @Override public long nextLong() { System.out.println(i++ + ": nextLong()"); return super.nextLong(); } @Override public boolean nextBoolean() { System.out.println(i++ + ": nextLong()"); return super.nextBoolean(); } @Override public float nextFloat() { System.out.println(i++ + ": nextFloat()"); return super.nextFloat(); } @Override public synchronized double nextGaussian() { System.out.println(i++ + ": nextGaussian()"); return super.nextGaussian(); } }
2,587
25.408163
76
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/random/RandomSource.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.random; import java.io.Serializable; import java.util.Random; /** * Purpose: allow the setting / getting of the random source. * <p> * Contributors: goastler */ public interface RandomSource extends Serializable { Random getRandom(); int getSeed(); }
1,082
30.852941
76
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/random/RandomUtils.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.random; import java.util.*; import java.util.stream.Collectors; import tsml.classifiers.distance_based.utils.collections.iteration.BaseRandomIterator; import tsml.classifiers.distance_based.utils.collections.iteration.RandomIterator; import tsml.classifiers.distance_based.utils.collections.lists.IndexList; import utilities.Utilities; import static tsml.classifiers.distance_based.utils.collections.CollectionUtils.newArrayList; /** * Purpose: // todo - docs - type the purpose of the code here * <p> * Contributors: goastler */ public class RandomUtils { public static <A> ArrayList<A> choice(RandomIterator<A> iterator, int numChoices) { final ArrayList<A> choices = new ArrayList<>(numChoices); for(int i = 0; i < numChoices; i++) { if(!iterator.hasNext()) throw new IllegalStateException("iterator has no items remaining at iteration step " + i); choices.add(iterator.next()); } return choices; } public static int choiceIndex(int size, Random random) { if(size == 1) { // only 1 element, no need to randomly choose return 0; } else { // multiple elements, randomly choose return random.nextInt(size); } } /** * choice several indices from a set range. * @param size the max size (i.e. max index is size-1, min index is 0) * @param random the random source * @param numChoices the number of choices to make * @param withReplacement whether to allow indices to be picked more than once * @return */ public static List<Integer> choiceIndex(int size, Random random, int numChoices, boolean withReplacement) { if(numChoices == 1) { // single choice return newArrayList(choiceIndex(size, random)); } if(numChoices > size && !withReplacement) { // too many choices given size throw new IllegalArgumentException("cannot choose " + numChoices + " from 0.." + size + " without replacement"); } final List<Integer> indices = new IndexList(size); final RandomIterator<Integer> iterator = new BaseRandomIterator<>(); iterator.setWithReplacement(withReplacement); iterator.setRandom(random); iterator.buildIterator(indices); return choice(iterator, numChoices); } // choice elements by index /** * Avoids a span of numbers when choosing an index * @param size * @param random * @return */ public static Integer choiceIndexExcept(int size, Random random, Collection<Integer> exceptions) { int index = choiceIndex(size - exceptions.size(), random); // if the chosen index lies within the exception zone, then the index needs to be shifted by the zone length to avoid these indices exceptions = exceptions.stream().distinct().sorted().collect(Collectors.toList()); for(Integer exception : exceptions) { if(index >= exception) { index++; } else { break; } } return index; } public static Integer choiceIndexExcept(int size, Random random, int exception) { return choiceIndexExcept(size, random, Collections.singletonList(exception)); } public static List<Integer> choiceIndexWithReplacement(int size, Random random, int numChoices) { return choiceIndex(size, random, numChoices, true); } public static List<Integer> choiceIndex(int size, Random random, int numChoices) { return choiceIndex(size, random, numChoices, false); } public static List<Integer> shuffleIndices(int size, Random random) { return choiceIndex(size, random, size); } // choose elements directly public static <A> List<A> shuffle(List<A> list, Random random) { return Utilities.apply(shuffleIndices(list.size(), random), list::get); } public static <A> List<A> choice(List<A> list, Random random, int numChoices, boolean withReplacement) { return Utilities.apply( choiceIndex(list.size(), random, numChoices, withReplacement), list::get); } public static <A> A choice(List<A> list, Random random) { final int i = choiceIndex(list.size(), random); return list.get(i); } public static <A> List<A> choiceWithReplacement(List<A> list, Random random, int numChoices) { return choice(list, random, numChoices, true); } public static <A> List<A> choice(List<A> list, Random random, int numChoices) { return choice(list, random, numChoices, false); } // pick elements from list as well as choosing (i.e. make choice of elements and remove from the source list) /** * * @param list * @param random * @param numChoices * @param withReplacement * @param <A> * @return */ public static <A> List<A> remove(List<A> list, Random random, int numChoices, boolean withReplacement) { List<Integer> indices = choiceIndex(list.size(), random, numChoices, withReplacement); final ArrayList<A> chosen = Utilities.apply(indices, list::get); indices = indices.stream().distinct().sorted(Comparator.reverseOrder()).collect(Collectors.toList()); for(int index : indices) { list.remove(index); } return chosen; } public static <A> A remove(List<A> list, Random random) { final int i = choiceIndex(list.size(), random); return list.remove(i); } public static <A> List<A> remove(List<A> list, Random random, int numChoices) { return remove(list, random, numChoices, false); } public static <A> List<A> pickWithReplacement(List<A> list, Random random, int numChoices) { return remove(list, random, numChoices, true); } }
6,741
36.876404
139
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/random/RandomUtilsTest.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.random; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; public class RandomUtilsTest { private List<Integer> list; private Random random; @Before public void before() { random = new Random(0); list = IntStream.rangeClosed(1,10).boxed().collect(Collectors.toList()); } @Test public void testRandomChoiceSingle() { int choice = RandomUtils.choice(list, random); Assert.assertEquals(1, choice); choice = RandomUtils.choice(list, random); Assert.assertEquals(9, choice); choice = RandomUtils.choice(list, random); Assert.assertEquals(10, choice); choice = RandomUtils.choice(list, random); Assert.assertEquals(8, choice); choice = RandomUtils.choice(list, random); Assert.assertEquals(6, choice); Assert.assertEquals(10, list.size()); } @Test public void testRandomPickSingle() { int choice = RandomUtils.remove(list, random); Assert.assertEquals(1, choice); Assert.assertFalse(list.contains(choice)); Assert.assertEquals(9, list.size()); choice = RandomUtils.remove(list, random); Assert.assertEquals(9, choice); Assert.assertFalse(list.contains(choice)); Assert.assertEquals(8, list.size()); choice = RandomUtils.remove(list, random); Assert.assertEquals(3, choice); Assert.assertFalse(list.contains(choice)); Assert.assertEquals(7, list.size()); choice = RandomUtils.remove(list, random); Assert.assertEquals(5, choice); Assert.assertFalse(list.contains(choice)); Assert.assertEquals(6, list.size()); choice = RandomUtils.remove(list, random); Assert.assertEquals(10, choice); Assert.assertFalse(list.contains(choice)); Assert.assertEquals(5, list.size()); } @Test public void testRandomChoiceMultiple() { final List<Integer> choice = RandomUtils.choice(list, random, 5); Assert.assertEquals(5, choice.size()); Assert.assertEquals(new Integer(1), choice.get(0)); Assert.assertEquals(new Integer(8), choice.get(1)); Assert.assertEquals(new Integer(2), choice.get(2)); Assert.assertEquals(new Integer(3), choice.get(3)); Assert.assertEquals(new Integer(6), choice.get(4)); Assert.assertEquals(10, list.size()); } @Test public void testRandomPickMultiple() { final List<Integer> choice = RandomUtils.remove(list, random, 5); Assert.assertEquals(5, choice.size()); Assert.assertEquals(new Integer(1), choice.get(0)); Assert.assertEquals(new Integer(8), choice.get(1)); Assert.assertEquals(new Integer(2), choice.get(2)); Assert.assertEquals(new Integer(3), choice.get(3)); Assert.assertEquals(new Integer(6), choice.get(4)); Assert.assertTrue(list.contains(4)); Assert.assertTrue(list.contains(5)); Assert.assertTrue(list.contains(7)); Assert.assertTrue(list.contains(9)); Assert.assertTrue(list.contains(10)); Assert.assertTrue(list.size() == 5); } @Test public void testRandomChoiceAll() { final List<Integer> choice = RandomUtils.choice(list, random, 10); Assert.assertEquals(list.size(), choice.size()); Assert.assertEquals(new Integer(1), choice.get(0)); Assert.assertEquals(new Integer(8), choice.get(1)); Assert.assertEquals(new Integer(2), choice.get(2)); Assert.assertEquals(new Integer(3), choice.get(3)); Assert.assertEquals(new Integer(6), choice.get(4)); Assert.assertEquals(new Integer(4), choice.get(5)); Assert.assertEquals(new Integer(7), choice.get(6)); Assert.assertEquals(new Integer(10), choice.get(7)); Assert.assertEquals(new Integer(9), choice.get(8)); Assert.assertEquals(new Integer(5), choice.get(9)); Assert.assertEquals(10, list.size()); } @Test public void testRandomPickAll() { final List<Integer> choice = RandomUtils.remove(list, random, 10); Assert.assertEquals(0, list.size()); Assert.assertEquals(10, choice.size()); Assert.assertEquals(new Integer(1), choice.get(0)); Assert.assertEquals(new Integer(8), choice.get(1)); Assert.assertEquals(new Integer(2), choice.get(2)); Assert.assertEquals(new Integer(3), choice.get(3)); Assert.assertEquals(new Integer(6), choice.get(4)); Assert.assertEquals(new Integer(4), choice.get(5)); Assert.assertEquals(new Integer(7), choice.get(6)); Assert.assertEquals(new Integer(10), choice.get(7)); Assert.assertEquals(new Integer(9), choice.get(8)); Assert.assertEquals(new Integer(5), choice.get(9)); Assert.assertTrue(list.isEmpty()); } @Test public void testRandomChoiceWithReplacement() { final List<Integer> choice = RandomUtils.choiceWithReplacement(list, random, list.size() * 10); final Set<Integer> set = new HashSet<>(choice); Assert.assertEquals(list.size(), set.size()); Assert.assertEquals(10, list.size()); } @Test public void testRandomChoiceWithoutReplacement() { final List<Integer> choice = RandomUtils.choice(list, random, 10); final Set<Integer> set = new HashSet<>(choice); Assert.assertEquals(choice.size(), set.size()); Assert.assertEquals(10, list.size()); } @Test public void testRandomPickWithReplacement() { final List<Integer> choice = RandomUtils.pickWithReplacement(list, random, list.size() * 10); final Set<Integer> set = new HashSet<>(choice); Assert.assertEquals(10, set.size()); Assert.assertEquals(0, list.size()); } @Test public void testRandomPickWithoutReplacement() { final List<Integer> choice = RandomUtils.remove(list, random, 10); final Set<Integer> set = new HashSet<>(choice); Assert.assertEquals(choice.size(), set.size()); Assert.assertEquals(0, list.size()); } @Test(expected = IllegalArgumentException.class) public void testRandomChoiceTooMany() { RandomUtils.choice(list, random, list.size() + 1); } }
7,286
37.760638
103
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/random/Randomised.java
package tsml.classifiers.distance_based.utils.system.random; import weka.core.Randomizable; import java.util.Random; /** * Purpose: allow the setting / getting of the random source. * <p> * Contributors: goastler */ public interface Randomised extends RandomSource, Randomizable { void setRandom(Random random); void setSeed(int seed); default void checkRandom() { if(getRandom() == null) { // random should be set by either calling setRandom or setSeed, the latter of which will automatically build a random with the specified seed throw new IllegalStateException("random not set"); } } /** * Copy the random config (i.e. rng object and the seed) onto another instance which requires randomisation * @param obj */ default void copyRandomTo(Object obj) { copySeedTo(obj); if(obj instanceof Randomised) { // pass on the already initialised (with the seed) random // note that the seed has already been passed on because Randomised extends Randomizable ((Randomised) obj).setRandom(getRandom()); } // else not a user of randomisation so don't worry about setting random / seed } default void copySeedTo(Object obj) { if(obj instanceof Randomizable) { ((Randomizable) obj).setSeed(getSeed()); } } }
1,406
30.977273
153
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/serial/SerSupplier.java
package tsml.classifiers.distance_based.utils.system.serial; import java.io.Serializable; import java.util.function.Supplier; public interface SerSupplier<A> extends Serializable, Supplier<A> { }
198
23.875
67
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/timing/Stated.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.timing; import java.io.Serializable; /** * Purpose: template for managing simple boolean start / stopped state. * <p> * Contributors: goastler */ public class Stated implements Serializable { private boolean started; public Stated() { this(false); } public Stated(boolean start) { reset(); if(start) { start(); } } public boolean isStarted() { return started; } public boolean isStopped() { return !started; } public void start() { if(!started) { started = true; } else { throw new IllegalStateException("already started"); } } public void optionalStart() { if(!isStarted()) { start(); } } public void optionalStop() { if(!isStopped()) { stop(); } } public void stop() { if(started) { started = false; } else { throw new IllegalStateException("already stopped"); } } public void reset() { } public void resetAndStart() { reset(); optionalStart(); } public void stopAndReset() { optionalStop(); reset(); } public void checkStopped() { if(started) { throw new IllegalStateException("not stopped"); } } public void checkStarted() { if(!started) { throw new IllegalStateException("not started"); } } @Override public String toString() { return "started=" + started; } }
2,459
21.568807
76
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/timing/StopWatch.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.timing; import tsml.classifiers.distance_based.utils.experiment.TimeSpan; import tsml.classifiers.distance_based.utils.system.logging.LogUtils; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.time.Duration; /** * Purpose: track time, ability to pause and add on time from another stop watch * * Contributors: goastler */ public class StopWatch extends Stated { // track the last time the elapsedTime was updated private long lastUpdateTimeStamp; // the cumulative elapsed time private long elapsedTime; public StopWatch() { super(false); } public StopWatch(boolean start) { super(start); } public StopWatch(long startTimeStamp) { super(false); start(startTimeStamp); } /** * Get the elapsed time. If the StopWatch is started, this will update the elapsedTime with the difference since start() or this method were last called. * @return */ public long elapsedTime() { return elapsedTime(System.nanoTime()); } /** * Update and get the elapsed time. If the stopwatch is started, this will update the elapsed time with the difference between the given time stamp and the last recorded timestamp (either the start time stamp or the time stamp from the most recent call to this method). * @param timeStamp * @return */ public long elapsedTime(long timeStamp) { if(isStarted()) { if(lastUpdateTimeStamp > timeStamp) { throw new IllegalStateException("last update time stamp is from the future: " + lastUpdateTimeStamp + " > " + timeStamp); } final long diff = timeStamp - lastUpdateTimeStamp; elapsedTime += diff; lastUpdateTimeStamp = timeStamp; } return elapsedTime; } /** * Start the StopWatch at the current time. */ public void start() { start(System.nanoTime()); } /** * Start the StopWatch from the given time. * @param startTimeStamp */ public void start(long startTimeStamp) { super.start(); setStartTimeStamp(startTimeStamp); } public void stop(long timeStamp) { elapsedTime(timeStamp); super.stop(); } public void stop() { stop(System.nanoTime()); } /** * Set the start time irrelevant of current state. * @param startTimeStamp */ private void setStartTimeStamp(long startTimeStamp) { if(startTimeStamp > System.nanoTime()) { throw new IllegalArgumentException("cannot set start time in the future"); } lastUpdateTimeStamp = startTimeStamp; } /** * Set the elapsed time. * @param elapsedTime */ public void setElapsedTime(long elapsedTime) { if(elapsedTime < 0) { throw new IllegalArgumentException("elapsed time cannot be less than 0"); } this.elapsedTime = elapsedTime; } public void resetElapsedTime() { setElapsedTime(0); } /** * Reset the elapsed time to zero and invalidate the start time. */ public void reset() { optionalStop(); resetElapsedTime(); setStartTimeStamp(System.nanoTime()); } /** * add time to the elapsed time * @param nanos */ public void add(long nanos) { elapsedTime += nanos; } public void add(long startTimeStamp, long stopTimeStamp) { if(stopTimeStamp < startTimeStamp) { throw new IllegalArgumentException("start before stop"); } add(stopTimeStamp - startTimeStamp); } @Override public String toString() { return "StopWatch{" + "elapsedTime=" + elapsedTime() + ", " + super.toString() + '}'; } /** * Get the time stamp when the elapsed time was last updated. * @return */ public long timeStamp() { return lastUpdateTimeStamp; } private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { ois.defaultReadObject(); // any stopwatch read from file should begin in a stopped state setStartTimeStamp(0); if(isStarted()) { super.stop(); } } private void writeObject(ObjectOutputStream oos) throws ClassNotFoundException, IOException { // update the elapsed time elapsedTime(); oos.defaultWriteObject(); } public TimeSpan toTimeSpan() { return new TimeSpan(elapsedTime()); } }
5,492
28.532258
273
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/timing/StopWatchTest.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.timing; import java.util.concurrent.TimeUnit; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import tsml.classifiers.distance_based.utils.system.copy.CopierUtils; import static utilities.Utilities.busyWait; public class StopWatchTest { private StopWatch stopWatch; @Before public void before() { stopWatch = new StopWatch(); } @Test public void testStartSpecificTime() { stopWatch.start(0); Assert.assertEquals(0, stopWatch.timeStamp()); final long timeStamp = System.nanoTime(); Assert.assertEquals(timeStamp, stopWatch.elapsedTime(), TimeUnit.NANOSECONDS.convert(10, TimeUnit.MILLISECONDS)); } @Test public void testElapsedTime() { stopWatch.start(); busyWait(delay); stopWatch.stop(); final long l = stopWatch.elapsedTime(); // should be at least delay of time elapsed Assert.assertTrue(l >= delay); // should be somewhere less than delay*2 elapsed. This varies from machine to machine, so the test is not very // stringent on tolerance Assert.assertTrue(l <= delay * 2); } @Test public void testSerialisation() { stopWatch.start(); long timeStamp = stopWatch.timeStamp(); StopWatch other = CopierUtils.deserialise(CopierUtils.serialise(stopWatch)); Assert.assertTrue(other.isStopped()); // make sure the clock / timeStamp gets reset post ser Assert.assertTrue(other.timeStamp() == 0); Assert.assertTrue(other.elapsedTime() > 0); stopWatch.resetAndStart(); busyWait(delay); stopWatch.stop(); timeStamp = stopWatch.timeStamp(); other = CopierUtils.deserialise(CopierUtils.serialise(stopWatch)); Assert.assertTrue(other.isStopped()); // make sure the clock / timeStamp gets reset post ser Assert.assertEquals(other.timeStamp(), 0); Assert.assertEquals(other.elapsedTime(), stopWatch.elapsedTime()); } @Test(expected = IllegalStateException.class) public void testGetStartTimeWhenStopped() { stopWatch.optionalStop(); Assert.assertFalse(stopWatch.isStarted()); stopWatch.stop(); } @Test public void testGetStartTimeWhenStarted() { long timeStamp = System.nanoTime(); stopWatch.start(); Assert.assertTrue(stopWatch.isStarted()); long startTime = stopWatch.timeStamp(); Assert.assertTrue(startTime > timeStamp); Assert.assertTrue(startTime < timeStamp + TimeUnit.NANOSECONDS.convert(10, TimeUnit.MILLISECONDS)); } @Test public void testReset() { stopWatch.start(); stopWatch.stop(); stopWatch.reset(); long timeStamp = System.nanoTime(); Assert.assertEquals(stopWatch.elapsedTime(), 0); Assert.assertEquals(stopWatch.timeStamp(), timeStamp, tolerance); } @Test public void testResetTime() { stopWatch.start(); busyWait(delay); Assert.assertNotEquals(stopWatch.elapsedTime(), 0); stopWatch.stop(); stopWatch.resetElapsedTime(); Assert.assertEquals(stopWatch.elapsedTime(), 0); } @Test public void testResetClock() { stopWatch.start(); long startTime = stopWatch.timeStamp(); stopWatch.reset(); Assert.assertTrue(stopWatch.timeStamp() > startTime); Assert.assertEquals(stopWatch.timeStamp(), stopWatch.timeStamp()); } @Test public void testStop() { stopWatch.start(); long startTime = stopWatch.elapsedTime(); busyWait(delay); Assert.assertTrue(stopWatch.isStarted()); stopWatch.stop(); long stopTime = stopWatch.elapsedTime(); Assert.assertTrue(stopTime > 0); Assert.assertFalse(stopWatch.isStarted()); Assert.assertTrue(stopWatch.isStopped()); stopWatch.stopAndReset(); Assert.assertFalse(stopWatch.isStarted()); Assert.assertTrue(stopWatch.isStopped()); Assert.assertEquals(0, stopWatch.elapsedTime()); } @Test public void testDoubleStop() { stopWatch.start(); Assert.assertTrue(stopWatch.isStarted()); stopWatch.stop(); Assert.assertTrue(stopWatch.isStopped()); stopWatch.optionalStop(); Assert.assertTrue(stopWatch.isStopped()); try { stopWatch.stop(); Assert.fail(); } catch(IllegalStateException e) { } Assert.assertTrue(stopWatch.isStopped()); } @Test public void testDoubleStart() { stopWatch.start(); Assert.assertTrue(stopWatch.isStarted()); stopWatch.optionalStart(); Assert.assertTrue(stopWatch.isStarted()); try { stopWatch.start(); Assert.fail(); } catch(IllegalStateException e) { } Assert.assertTrue(stopWatch.isStarted()); } @Test public void testAdd() { stopWatch.start(); stopWatch.stop(); long time = stopWatch.elapsedTime(); long addend = 10; stopWatch.add(addend); Assert.assertEquals(addend + time, stopWatch.elapsedTime()); long prevTime = stopWatch.elapsedTime(); stopWatch.add(stopWatch.elapsedTime()); Assert.assertEquals(prevTime * 2, stopWatch.elapsedTime()); } private final long tolerance = TimeUnit.NANOSECONDS.convert(10, TimeUnit.MILLISECONDS); private final long delay = TimeUnit.NANOSECONDS.convert(100, TimeUnit.MILLISECONDS); }
6,431
32.675393
121
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/distance_based/utils/system/timing/TimeAmount.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.distance_based.utils.system.timing; import java.util.concurrent.TimeUnit; import tsml.classifiers.distance_based.utils.strings.StrUtils; /** * Purpose: // todo - docs - type the purpose of the code here * <p> * Contributors: goastler */ public class TimeAmount implements Comparable<TimeAmount> { private long amount; private TimeUnit unit; public TimeAmount() { this(0, TimeUnit.NANOSECONDS); } public TimeAmount(long amount, TimeUnit unit) { setAmount(amount); setUnit(unit); } public enum ShortTimeUnit { S(TimeUnit.SECONDS), SEC(TimeUnit.SECONDS), M(TimeUnit.MINUTES), MIN(TimeUnit.MINUTES), H(TimeUnit.HOURS), HR(TimeUnit.HOURS), D(TimeUnit.DAYS), ; private final TimeUnit alias; ShortTimeUnit(final TimeUnit unit) { this.alias = unit; } public TimeUnit getAlias() { return alias; } } public static TimeAmount parse(String amount, String unit) { unit = unit.trim(); amount = amount.trim(); unit = unit.toUpperCase(); unit = StrUtils.depluralise(unit); TimeUnit timeUnit; try { timeUnit = ShortTimeUnit.valueOf(unit).getAlias(); } catch(Exception e) { timeUnit = TimeUnit.valueOf(unit); } return new TimeAmount(Long.parseLong(amount), timeUnit); } public static TimeAmount parse(String str) { String[] parts = StrUtils.extractAmountAndUnit(str); return parse(parts[0], parts[1]); } @Override public String toString() { return getAmount() + " " + getUnit(); } public long getAmount() { return amount; } public TimeAmount setAmount(final long amount) { this.amount = amount; return this; } public TimeUnit getUnit() { return unit; } public TimeAmount setUnit(final TimeUnit unit) { this.unit = unit; return this; } public TimeAmount convert(TimeUnit unit) { return new TimeAmount(unit.convert(getAmount(), getUnit()), unit); } @Override public int compareTo(final TimeAmount other) { TimeAmount otherNanos = other.convert(TimeUnit.NANOSECONDS); TimeAmount nanos = convert(TimeUnit.NANOSECONDS); return Long.compare(nanos.getAmount(), otherNanos.getAmount()); } }
3,239
26.692308
76
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/AbstractEarlyClassifier.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import weka.classifiers.AbstractClassifier; /** * Abstract classifier class for early classification classifiers. * * @author Matthew Middlehurst */ public abstract class AbstractEarlyClassifier extends AbstractClassifier { /** Time point thresholds for classifiers to make predictions at */ protected int[] thresholds; protected boolean normalise = false; public int[] getThresholds(){ return thresholds; } public void setThresholds(int[] t){ thresholds = t; } public void setNormalise(boolean b) { normalise = b; } }
1,274
33.459459
74
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/ECEC.java
package tsml.classifiers.early_classification; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import tsml.classifiers.EnhancedAbstractClassifier; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.Instances; import weka.core.Randomizable; import java.util.ArrayList; import java.util.List; import java.util.Random; import static utilities.ArrayUtilities.unique; import static utilities.InstanceTools.truncateInstances; import static utilities.InstanceTools.zNormaliseWithClass; import static utilities.Utilities.argMax; public class ECEC extends EarlyDecisionMaker implements Randomizable, LoadableEarlyDecisionMaker { private double ratio = 0.8; private double confidenceThreshold; private int finalIndex; private int[] labels; private int[][] predCount; private int[][] correctCount; private int seed; private Random rand; public ECEC() { } @Override public void setSeed(int s) { seed = s; } @Override public int getSeed() { return seed; } @Override public void fit(Instances data, Classifier[] classifiers, int[] thresholds) throws Exception { double[][][] trainProbabilities = new double[thresholds.length][][]; for (int i = 0; i < thresholds.length; i++) { if (classifiers[i] instanceof EnhancedAbstractClassifier && ((EnhancedAbstractClassifier) classifiers[i]).ableToEstimateOwnPerformance() && ((EnhancedAbstractClassifier) classifiers[i]).getEstimateOwnPerformance()) { trainProbabilities[i] = ((EnhancedAbstractClassifier) classifiers[i]).getTrainResults() .getProbabilityDistributionsAsArray(); } else { Instances truncatedData = truncateInstances(data, data.numAttributes() - 1, thresholds[i]); if (normalise) zNormaliseWithClass(truncatedData); CrossValidationEvaluator cv = new CrossValidationEvaluator(); cv.setSeed(seed); cv.setNumFolds(5); ClassifierResults r = cv.crossValidateWithStats(AbstractClassifier.makeCopy(classifiers[i]), truncatedData); trainProbabilities[i] = r.getProbabilityDistributionsAsArray(); } } fitECEC(data, thresholds, trainProbabilities); } @Override public void loadFromFile(Instances data, String directoryPath, int[] thresholds) throws Exception { double[][][] trainProbabilities = new double[thresholds.length][][]; for (int i = 0; i < thresholds.length; i++) { ClassifierResults r = new ClassifierResults(directoryPath + thresholds[i] + "trainFold" + seed + ".csv"); trainProbabilities[i] = r.getProbabilityDistributionsAsArray(); } fitECEC(data, thresholds, trainProbabilities); } @Override public boolean decide(int thresholdIndex, double[] probabilities) throws Exception { if (thresholdIndex == finalIndex) return true; if (thresholdIndex == 0) labels = new int[finalIndex + 1]; labels[thresholdIndex] = argMax(probabilities, rand); double mod = 1; for(int j = 0; j <= thresholdIndex; j++) { if (labels[j] == labels[thresholdIndex]) { double correct = (double) correctCount[j][labels[thresholdIndex]] / predCount[j][labels[thresholdIndex]]; mod *= 1 - correct; } } double confidence = 1 - mod; return confidence >= confidenceThreshold; } public void fitECEC(Instances data, int[] thresholds, double[][][] trainProbabilities) throws Exception { rand = new Random(seed); finalIndex = thresholds.length - 1; int seriesLength = data.numAttributes() - 1; int[][] trainPred = new int[thresholds.length][data.numInstances()]; predCount = new int[thresholds.length][data.numClasses()]; correctCount = new int[thresholds.length][data.numClasses()]; for (int n = 0; n < thresholds.length; n++) { for (int i = 0; i < data.numInstances(); i++) { trainPred[n][i] = argMax(trainProbabilities[n][i], rand); predCount[n][trainPred[n][i]]++; if (trainPred[n][i] == data.get(i).classIndex()) { correctCount[n][trainPred[n][i]]++; } } } double[][] confidences = new double[thresholds.length][data.numInstances()]; double[] allConfidences = new double[thresholds.length * data.numInstances()]; ArrayList<Double>[] classCondifences = new ArrayList[data.numClasses()]; for (int i = 0; i < classCondifences.length; i++) { classCondifences[i] = new ArrayList<>(); } int p = 0; for (int i = 0; i < data.numInstances(); i++) { for (int n = 0; n < thresholds.length; n++) { double mod = 1; for (int j = 0; j <= n; j++) { if (trainPred[j][i] == trainPred[n][i]) { double correct = (double) correctCount[j][trainPred[n][i]] / predCount[j][trainPred[n][i]]; mod *= 1 - correct; } } confidences[n][i] = 1 - mod; allConfidences[p++] = confidences[n][i]; classCondifences[(int) data.get(i).classValue()].add(confidences[n][i]); } } List<Double> uniqueConfidences = unique(allConfidences); double[] confidenceThresholds = new double[uniqueConfidences.size() - 1]; for (int i = 0; i < confidenceThresholds.length; i++) { confidenceThresholds[i] = (uniqueConfidences.get(i) + uniqueConfidences.get(i + 1)) / 2; } double minCost = Double.MAX_VALUE; for (double threshold : confidenceThresholds) { int success = 0; double earliness = 0; for (int n = 0; n < data.numInstances(); n++) { for (int j = 0; j < thresholds.length; j++) { if (confidences[n][j] > threshold || j == finalIndex) { earliness += (double) thresholds[j] / seriesLength; if (trainPred[n][j] == (int) data.get(n).classValue()) { success++; } break; } } } double cost = ratio * (data.numInstances() - success) + (1 - ratio) * earliness; if (cost < minCost) { minCost = cost; confidenceThreshold = threshold; } } } }
6,891
37.077348
115
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/EarlyDecisionMaker.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import weka.classifiers.Classifier; import weka.core.Instances; import java.util.Arrays; import java.util.TreeSet; /** * Class for early classification decision makers. * When presented with a series to predict, decide on whether to make a prediction or delay until more of the series * is available. * * @author Matthew Middlehurst */ public abstract class EarlyDecisionMaker { protected boolean normalise = false; public void setNormalise(boolean b) { normalise = b; } public abstract void fit(Instances data, Classifier[] classifiers, int[] thresholds) throws Exception; public void fit(Instances data, Classifier classifier, int[] thresholds) throws Exception { Classifier[] classifiers = new Classifier[thresholds.length]; Arrays.fill(classifiers, classifier); fit(data, classifiers, thresholds); } public void fit(Instances data, Classifier classifier) throws Exception { fit(data, classifier, defaultTimeStamps(data.numAttributes()-1)); } /** * Many algorithms require data from previous decisions form an instance. The way this is currently set up got many * is that it will create a clean slate each time the thresholdIndex is 0. Calling this on a new instance before * the final decision has been made on a previous one is a good way to break everything. * * This is obviously not ideal for real use cases, but there arent being used for that currently. */ public abstract boolean decide(int thresholdIndex, double[] probabilities) throws Exception; public int[] defaultTimeStamps(int length) { TreeSet<Integer> ts = new TreeSet<>(); for (double i = 0.05; i < 0.99; i += 0.05) { i = Math.round(i * 100.0) / 100.0; int v = (int) Math.round(i * length); if (v >= 3) ts.add(v); } ts.add(length); int[] arr = new int[ts.size()]; int i = 0; for (Integer v: ts) arr[i++] = v; return arr; } }
2,767
36.405405
119
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/EarlyDecisionMakerClassifier.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import evaluation.storage.ClassifierResults; import experiments.data.DatasetLoading; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.interval_based.TSF; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.Instance; import weka.core.Instances; import weka.core.Randomizable; import java.util.Arrays; import java.util.Random; import static utilities.ArrayUtilities.mean; import static utilities.InstanceTools.*; import static utilities.Utilities.argMax; /** * Early classification classifier which makes use of a tsml/weka classifier and decision maker. * Can load from test and train files. * * @author Matthew Middlehurst */ public class EarlyDecisionMakerClassifier extends AbstractEarlyClassifier implements Randomizable { private Classifier classifier; private EarlyDecisionMaker decisionMaker; private Classifier[] classifiers; private boolean useOwnTrainEstimates = true; private int seed = 0; private Random rand; private boolean loadFromFile = false; private String loadPath; private ClassifierResults[] loadedResults; private int testInstanceCounter = 0; private int lastIdx = Integer.MIN_VALUE; public EarlyDecisionMakerClassifier(Classifier classifier, EarlyDecisionMaker decisionMaker){ this.classifier = classifier; this.decisionMaker = decisionMaker; } @Override public int getSeed() { return 0; } public Classifier getClassifier() { return classifier; } public EarlyDecisionMaker getDecisionMaker() { return decisionMaker; } @Override public void setSeed(int i) { seed = i; } public void setLoadFromFilePath(String path) { loadFromFile = true; loadPath = path; } public void setUseOwnTrainEstimates(boolean b){ useOwnTrainEstimates = b; } @Override public void buildClassifier(Instances data) throws Exception { if (thresholds == null) thresholds = decisionMaker.defaultTimeStamps(data.numAttributes()-1); classifiers = new Classifier[thresholds.length]; if (classifier instanceof Randomizable) ((Randomizable) classifier).setSeed(seed); if (decisionMaker instanceof Randomizable) ((Randomizable) decisionMaker).setSeed(seed); decisionMaker.setNormalise(normalise); rand = new Random(seed); boolean estimatingOwnPerformance = false; if (classifier instanceof EnhancedAbstractClassifier && ((EnhancedAbstractClassifier)classifier).ableToEstimateOwnPerformance()){ ((EnhancedAbstractClassifier)classifier).setEstimateOwnPerformance(useOwnTrainEstimates); estimatingOwnPerformance = true; } if (loadFromFile) { loadedResults = new ClassifierResults[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { loadedResults[i] = new ClassifierResults(loadPath + thresholds[i] + "testFold" + seed + ".csv"); } if (decisionMaker instanceof LoadableEarlyDecisionMaker){ ((LoadableEarlyDecisionMaker) decisionMaker).loadFromFile(data, loadPath, thresholds); } else{ decisionMaker.fit(data, classifier, thresholds); } } else { int length = data.numAttributes() - 1; for (int i = 0; i < thresholds.length; i++) { Instances newData = truncateInstances(data, length, thresholds[i]); if (normalise) newData = zNormaliseWithClass(newData); classifiers[i] = AbstractClassifier.makeCopy(classifier); classifiers[i].buildClassifier(newData); } if (estimatingOwnPerformance) { decisionMaker.fit(data, classifiers, thresholds); } else{ Classifier[] blankClassifiers = new Classifier[thresholds.length]; for (int i = 0; i < blankClassifiers.length; i++) { blankClassifiers[i] = AbstractClassifier.makeCopy(classifier); } decisionMaker.fit(data, blankClassifiers, thresholds); } } } @Override public double classifyInstance(Instance instance) throws Exception { double[] probs = distributionForInstance(instance); return probs == null ? -1 : argMax(probs, rand); } @Override public double[] distributionForInstance(Instance instance) throws Exception { int idx = -1; for (int i = 0; i < thresholds.length; i++){ if (instance.numAttributes()-1 == thresholds[i]){ idx = i; break; } } if (idx == -1) throw new Exception("Input instance length does not match any given timestamps."); double[] probs; boolean decision; Instance newData = instance; if (normalise) newData = zNormaliseWithClass(instance); if (loadFromFile) { if (idx <= lastIdx) testInstanceCounter++; probs = loadedResults[idx].getProbabilityDistribution(testInstanceCounter); decision = decisionMaker.decide(idx, probs); lastIdx = idx; } else { probs = classifiers[idx].distributionForInstance(newData); decision = decisionMaker.decide(idx, probs); } return decision ? probs : null; } public static void main(String[] args) throws Exception { int fold = 0; String dataset = "ItalyPowerDemand"; Instances train = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TRAIN.arff"); Instances test = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TEST.arff"); Instances[] data = resampleTrainAndTestInstances(train, test, fold); train = data[0]; test = data[1]; Random r = new Random(fold); Classifier c = new TSF(); EarlyDecisionMaker dm = new TEASER(); EarlyDecisionMakerClassifier cls = new EarlyDecisionMakerClassifier(c, dm); cls.normalise = true; cls.buildClassifier(train); int length = test.numAttributes() - 1; double[][] testProbs = new double[test.numInstances()][]; double[] testPreds = new double[test.numInstances()]; double[] testEarliness = new double[test.numInstances()]; for (int i = 0; i < cls.thresholds.length; i++) { Instances newData = truncateInstances(test, length, cls.thresholds[i]); for (int n = 0; n < test.numInstances(); n++) { if (testProbs[n] == null) { Instance inst = newData.get(n); double[] probs = cls.distributionForInstance(inst); if (probs != null) { testProbs[n] = probs; testPreds[n] = argMax(probs, r); testEarliness[n] = cls.thresholds[i] / (double) length; } } } } double[] trueClassVals = test.attributeToDoubleArray(test.classIndex()); String[] stringEarliness = new String[test.numInstances()]; for (int n = 0; n < testEarliness.length; n++) { stringEarliness[n] = Double.toString(testEarliness[n]); } ClassifierResults cr = new ClassifierResults(); cr.addAllPredictions(trueClassVals, testPreds, testProbs, new long[test.numInstances()], stringEarliness); double accuracy = cr.getAcc(); double earliness = mean(testEarliness); System.out.println("Accuracy " + accuracy); System.out.println("Earliness " + earliness); System.out.println("HM " + (2 * accuracy * (1 - earliness)) / (accuracy + (1 - earliness))); } }
8,760
36.926407
114
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/LoadableEarlyDecisionMaker.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import weka.core.Instances; /** * Interface for decision makers which can use train files to fit. * * @author Matthew Middlehurst */ public interface LoadableEarlyDecisionMaker { void loadFromFile(Instances data, String directoryPath, int[] thresholds) throws Exception; }
1,004
34.892857
95
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/ProbabilityThreshold.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import com.carrotsearch.hppc.IntIntHashMap; import weka.classifiers.Classifier; import weka.core.Instances; import weka.core.Randomizable; import java.util.Random; import static utilities.Utilities.argMax; /** * Probability threshold decision maker. * Only makes a prediction if the highest probability is past a set threshold or the full series has been seen. * * @author Matthew Middlehurst */ public class ProbabilityThreshold extends EarlyDecisionMaker implements Randomizable { private double threshold = 0.85; private int consecutivePredictions = 1; private IntIntHashMap predCounts; private int finalIndex; private int seed = 0; private Random rand; public ProbabilityThreshold() { } public void setThreshold(double d) { threshold = d; } public void setConsecutivePredictions(int i) { consecutivePredictions = i; } public void setSeed(int i) { seed = i; } @Override public int getSeed() { return seed; } @Override public void fit(Instances data, Classifier[] classifiers, int[] thresholds) { finalIndex = thresholds.length - 1; rand = new Random(seed); } @Override public boolean decide(int thresholdIndex, double[] probabilities) { if (thresholdIndex == finalIndex) return true; if (thresholdIndex == 0) predCounts = new IntIntHashMap(); int pred = argMax(probabilities, rand); if (probabilities[pred] > threshold) { if (consecutivePredictions < 2) { return true; } int count = predCounts.get(pred); if (count == 0) { predCounts.clear(); predCounts.put(pred, 1); } else { count++; if (count >= consecutivePredictions) { predCounts.clear(); return true; } else { predCounts.put(pred, count); return false; } } } else{ predCounts.clear(); } return false; } }
2,835
29.494624
111
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/SR1CF1.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import tsml.classifiers.EnhancedAbstractClassifier; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.Instances; import weka.core.Randomizable; import java.util.Arrays; import java.util.Random; import static utilities.GenericTools.linSpace; import static utilities.InstanceTools.*; import static utilities.Utilities.argMax; /** * Modified SR1CF1 decision maker. Uses gridsearch for parameter values instead of the genetic algorithm used in the * paper. * Tunes parameters to find a suitable accuracy and earliness balance, one can be favoured over the other using the * alpha parameter. * * Mori, Usue, et al. "Early classification of time series by simultaneously optimizing the accuracy and earliness." * IEEE transactions on neural networks and learning systems 29.10 (2017): 4569-4578. * * @author Matthew Middlehurst */ public class SR1CF1 extends EarlyDecisionMaker implements Randomizable, LoadableEarlyDecisionMaker { private double alpha = 0.8; private int[] timeStamps; private int numParamValues = 200; private int fullLength; private int numInstances; private double[][][] cvProbabilities; private double[] classValues; private double[] p; private int seed = 0; private Random rand; public SR1CF1() {} @Override public int getSeed() { return seed; } public void setAlpha(double d) { alpha = d; } @Override public void setSeed(int i) { seed = i; } @Override public void fit(Instances data, Classifier[] classifiers, int[] thresholds) throws Exception { fullLength = data.numAttributes()-1; numInstances = data.numInstances(); timeStamps = thresholds; rand = new Random(seed); cvProbabilities = new double[timeStamps.length][][]; classValues = data.attributeToDoubleArray(data.classIndex()); p = new double[3]; for (int i = 0; i < timeStamps.length; i++) { if (classifiers[i] instanceof EnhancedAbstractClassifier && ((EnhancedAbstractClassifier) classifiers[i]).ableToEstimateOwnPerformance() && ((EnhancedAbstractClassifier) classifiers[i]).getEstimateOwnPerformance()) { cvProbabilities[i] = ((EnhancedAbstractClassifier) classifiers[i]).getTrainResults() .getProbabilityDistributionsAsArray(); } else { Instances truncatedData = truncateInstances(data, fullLength, timeStamps[i]); if (normalise) zNormaliseWithClass(truncatedData); CrossValidationEvaluator cv = new CrossValidationEvaluator(); cv.setSeed(seed); cv.setNumFolds(5); ClassifierResults r = cv.crossValidateWithStats(AbstractClassifier.makeCopy(classifiers[i]), truncatedData); cvProbabilities[i] = r.getProbabilityDistributionsAsArray(); } } findP(); } @Override public void loadFromFile(Instances data, String directoryPath, int[] thresholds) throws Exception { fullLength = data.numAttributes()-1; numInstances = data.numInstances(); timeStamps = thresholds; rand = new Random(seed); cvProbabilities = new double[timeStamps.length][][]; classValues = data.attributeToDoubleArray(data.classIndex()); p = new double[3]; for (int i = 0; i < timeStamps.length; i++) { ClassifierResults r = new ClassifierResults(directoryPath + thresholds[i] + "trainFold" + seed + ".csv"); cvProbabilities[i] = r.getProbabilityDistributionsAsArray(); } findP(); } @Override public boolean decide(int thresholdIndex, double[] probabilities) { return thresholdIndex == timeStamps.length - 1 || stoppingRule(probabilities, timeStamps[thresholdIndex]); } private void findP(){ double[] pVals = linSpace(numParamValues, -1.0, 1.0); double bestGain = 0; double[] bestP = null; for (double v : pVals) { for (double b : pVals) { for (double n : pVals) { p[0] = v; p[1] = b; p[2] = n; double gain = gainFunction(); if (gain > bestGain || (gain == bestGain && rand.nextBoolean())) { bestGain = gain; bestP = Arrays.copyOf(p, p.length); } } } } p = bestP; } private boolean stoppingRule(double[] probs, int length) { double largestVal = -1; double secondLargestVal = -1; for (double prob : probs) { if (prob > largestVal) { secondLargestVal = largestVal; largestVal = prob; } else if (prob > secondLargestVal) { secondLargestVal = prob; } } return (p[0] * largestVal + p[1] * (largestVal - secondLargestVal) + p[2] * length / fullLength) > 0; } private double gainFunction() { double gain = 0; for (int i = 0; i < numInstances; i++) { for (int n = 0; n < timeStamps.length; n++) { if (n == timeStamps.length - 1 || stoppingRule(cvProbabilities[n][i], timeStamps[n])) { gain += alpha * accuracyGain(classValues[i], cvProbabilities[n][i]) + (1 - alpha) * earlinessGain(timeStamps[n]); break; } } } return gain; } private double accuracyGain(double actualClass, double[] probs) { int predClass = argMax(probs, rand); return actualClass == predClass ? 1 : 0; } private double earlinessGain(int length) { return 1 - length / (double) fullLength; } }
6,792
34.941799
116
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/ShapeletTransformEarlyClassifier.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import evaluation.storage.ClassifierResults; import experiments.data.DatasetLoading; import tsml.transformers.ShapeletTransform; import tsml.transformers.shapelet_tools.ShapeletTransformFactory; import tsml.transformers.shapelet_tools.ShapeletTransformFactoryOptions; import tsml.transformers.shapelet_tools.ShapeletTransformTimingUtilities; import tsml.transformers.shapelet_tools.distance_functions.ShapeletDistance; import tsml.transformers.shapelet_tools.quality_measures.ShapeletQuality; import tsml.transformers.shapelet_tools.search_functions.ShapeletSearch; import tsml.transformers.shapelet_tools.search_functions.ShapeletSearchOptions; import utilities.InstanceTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.classifiers.meta.RotationForest; import weka.core.Instance; import weka.core.Instances; import java.util.Random; import static utilities.ArrayUtilities.mean; import static utilities.InstanceTools.*; import static utilities.Utilities.argMax; /** * Early classification classifier using the shapelet transform. * Extracts shapelets from the full series and uses them and a decision maker to classify. * * @author Matthew Middlehurst */ public class ShapeletTransformEarlyClassifier extends AbstractEarlyClassifier { private Classifier classifier; private EarlyDecisionMaker decisionMaker = new ProbabilityThreshold(); private ShapeletTransform transform; private Instances shapeletData; private int[] redundantFeatures; private int seed; private Random rand; public ShapeletTransformEarlyClassifier() { } @Override public void buildClassifier(Instances data) throws Exception { rand = new Random(seed); decisionMaker.setNormalise(normalise); int n = data.numInstances(); int m = data.numAttributes()-1; Instances newData = data; if (normalise) newData = zNormaliseWithClass(data); ShapeletSearch.SearchType searchType = ShapeletSearch.SearchType.RANDOM; ShapeletTransformFactoryOptions.ShapeletTransformOptions transformOptions = new ShapeletTransformFactoryOptions.ShapeletTransformOptions(); transformOptions.setDistanceType(ShapeletDistance.DistanceType.IMPROVED_ONLINE); transformOptions.setQualityMeasure(ShapeletQuality.ShapeletQualityChoice.INFORMATION_GAIN); transformOptions.setRescalerType(ShapeletDistance.RescalerType.NORMALISATION); transformOptions.setRoundRobin(true); transformOptions.setCandidatePruning(true); transformOptions.setMinLength(3); transformOptions.setMaxLength(data.numAttributes()-1); if(data.numClasses() > 2) { transformOptions.setBinaryClassValue(true); transformOptions.setClassBalancing(true); }else{ transformOptions.setBinaryClassValue(false); transformOptions.setClassBalancing(false); } int numShapeletsInTransform= Math.min(10 * n, ShapeletTransform.MAXTRANSFORMSIZE); transformOptions.setKShapelets(numShapeletsInTransform); long numShapeletsInProblem = ShapeletTransformTimingUtilities.calculateNumberOfShapelets(n, m, 3, m); long numShapeletsToEvaluate = 100000; //hardcoded for now if (numShapeletsToEvaluate < n) { numShapeletsToEvaluate = n; } if (numShapeletsToEvaluate >= numShapeletsInProblem){ numShapeletsToEvaluate = numShapeletsInProblem; searchType = ShapeletSearch.SearchType.FULL; } ShapeletSearchOptions.Builder searchBuilder = new ShapeletSearchOptions.Builder(); searchBuilder.setSeed(2*seed); searchBuilder.setMin(transformOptions.getMinLength()); searchBuilder.setMax(transformOptions.getMaxLength()); searchBuilder.setSearchType(searchType); searchBuilder.setNumShapeletsToEvaluate(numShapeletsToEvaluate/n); transformOptions.setSearchOptions(searchBuilder.build()); transform = new ShapeletTransformFactory(transformOptions.build()).getTransform(); shapeletData = transform.fitTransform(newData); redundantFeatures = InstanceTools.removeRedundantTrainAttributes(shapeletData); RotationForest rotf = new RotationForest(); rotf.setNumIterations(200); rotf.setSeed(seed); classifier = rotf; classifier.buildClassifier(shapeletData); thresholds = decisionMaker.defaultTimeStamps(data.numAttributes()-1); decisionMaker.fit(newData, classifier, thresholds); shapeletData = new Instances(data,0); } @Override public double classifyInstance(Instance instance) throws Exception { double[] probs = distributionForInstance(instance); return probs == null ? -1 : argMax(probs, rand); } @Override public double[] distributionForInstance(Instance instance) throws Exception { int idx = -1; for (int i = 0; i < thresholds.length; i++){ if (instance.numAttributes()-1 == thresholds[i]){ idx = i; break; } } if (idx == -1) throw new Exception("Input instance length does not match any given timestamps."); Instance newData = instance; if (normalise) newData = zNormaliseWithClass(instance); shapeletData = new Instances(instance.dataset(),0); shapeletData.add(newData); Instances temp = transform.transform(shapeletData); for (int del: redundantFeatures) temp.deleteAttributeAt(del); double[] probs = classifier.distributionForInstance(temp.get(0)); boolean decision = decisionMaker.decide(idx, probs); return decision ? probs : null; } public static void main(String[] args) throws Exception{ int fold = 0; String dataset = "ItalyPowerDemand"; Instances train = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TRAIN.arff"); Instances test = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TEST.arff"); Instances[] data = resampleTrainAndTestInstances(train, test, fold); train = data[0]; test = data[1]; Random r = new Random(fold); ShapeletTransformEarlyClassifier cls = new ShapeletTransformEarlyClassifier(); cls.seed = fold; cls.normalise = true; cls.buildClassifier(train); int length = test.numAttributes()-1; double[][] testProbs = new double[test.numInstances()][]; double[] testPreds = new double[test.numInstances()]; double[] testEarliness = new double[test.numInstances()]; for (int i = 0; i < 20; i++){ int newLength = (int)Math.round((i+1)*0.05 * length); Instances newData = truncateInstances(test, length, newLength); for (int n = 0; n < test.numInstances(); n++){ if (testProbs[n] == null) { Instance inst = newData.get(n); double[] probs = cls.distributionForInstance(inst); if (probs != null) { testProbs[n] = probs; testPreds[n] = argMax(probs, r); testEarliness[n] = newLength/(double)length; } } } } double[] trueClassVals = test.attributeToDoubleArray(test.classIndex()); String[] stringEarliness = new String[test.numInstances()]; for (int n = 0; n < testEarliness.length; n++){ stringEarliness[n] = Double.toString(testEarliness[n]); } ClassifierResults cr = new ClassifierResults(); cr.addAllPredictions(trueClassVals, testPreds, testProbs, new long[test.numInstances()], stringEarliness); System.out.println(mean(testEarliness)); System.out.println(cr.getAcc()); } }
8,791
40.276995
114
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/early_classification/TEASER.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.early_classification; import com.carrotsearch.hppc.IntIntHashMap; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import tsml.classifiers.EnhancedAbstractClassifier; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.classifiers.functions.LibSVM; import weka.core.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Random; import static utilities.InstanceTools.truncateInstances; import static utilities.InstanceTools.zNormaliseWithClass; import static utilities.Utilities.argMax; /** * TEASER early classification decision maker. * Trains a 1-class SVM for each threshold, requires v positive decisions in a row to return a true decision. * * Schäfer, Patrick, and Ulf Leser. "Teaser: Early and accurate time series classification." * Data Mining and Knowledge Discovery (2020): 1-27. * https://link.springer.com/article/10.1007/s10618-020-00690-z * * @author Matthew Middlehurst */ public class TEASER extends EarlyDecisionMaker implements Randomizable, LoadableEarlyDecisionMaker { private static final double[] SVM_GAMMAS = new double[]{100, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1.5, 1}; private int maxV = 5; private LibSVM[] svm; private int finalIndex; private Instances probDataHeader; private IntIntHashMap predCounts; private int v; private int seed; private Random rand; public TEASER() {} @Override public void setSeed(int s) { seed = s; } @Override public int getSeed() { return seed; } @Override public void fit(Instances data, Classifier[] classifiers, int[] thresholds) throws Exception { double[][][] trainProbabilities = new double[thresholds.length][][]; for (int i = 0; i < thresholds.length; i++) { if (classifiers[i] instanceof EnhancedAbstractClassifier && ((EnhancedAbstractClassifier) classifiers[i]).ableToEstimateOwnPerformance() && ((EnhancedAbstractClassifier) classifiers[i]).getEstimateOwnPerformance()) { trainProbabilities[i] = ((EnhancedAbstractClassifier) classifiers[i]).getTrainResults() .getProbabilityDistributionsAsArray(); } else { Instances truncatedData = truncateInstances(data, data.numAttributes() - 1, thresholds[i]); if (normalise) zNormaliseWithClass(truncatedData); CrossValidationEvaluator cv = new CrossValidationEvaluator(); cv.setSeed(seed); cv.setNumFolds(5); ClassifierResults r = cv.crossValidateWithStats(AbstractClassifier.makeCopy(classifiers[i]), truncatedData); trainProbabilities[i] = r.getProbabilityDistributionsAsArray(); } } fitTEASER(data, thresholds, trainProbabilities); } @Override public void loadFromFile(Instances data, String directoryPath, int[] thresholds) throws Exception { double[][][] trainProbabilities = new double[thresholds.length][][]; for (int i = 0; i < thresholds.length; i++) { ClassifierResults r = new ClassifierResults(directoryPath + thresholds[i] + "trainFold" + seed + ".csv"); trainProbabilities[i] = r.getProbabilityDistributionsAsArray(); } fitTEASER(data, thresholds, trainProbabilities); } @Override public boolean decide(int thresholdIndex, double[] probabilities) throws Exception { if (thresholdIndex == finalIndex) return true; int pred = argMax(probabilities, rand); double minDiff = 1; for (int g = 0; g < probabilities.length; g++) { if (pred != g) { minDiff = Math.min(minDiff, probabilities[pred] - probabilities[g]); } } double[] arr = Arrays.copyOf(probabilities, probabilities.length + 2); arr[arr.length - 2] = minDiff; Instance inst = new DenseInstance(1, arr); inst.setDataset(probDataHeader); if (thresholdIndex == 0) predCounts = new IntIntHashMap(); if (svm[thresholdIndex] != null && svm[thresholdIndex].distributionForInstance(inst)[0] == 1) { if (v < 2) { return true; } int count = predCounts.get(pred); if (count == 0) { predCounts.clear(); predCounts.put(pred, 1); } else { count++; if (count >= v) { predCounts.clear(); return true; } else { predCounts.put(pred, count); return false; } } } else { predCounts.clear(); } return false; } public void fitTEASER(Instances data, int[] thresholds, double[][][] trainProbabilities) throws Exception { rand = new Random(seed); libsvm.svm.rand.setSeed(seed); // Disables svm output libsvm.svm.svm_set_print_string_function(s -> { }); finalIndex = thresholds.length - 1; svm = new LibSVM[thresholds.length]; ArrayList<Attribute> atts = new ArrayList<>(); for (int i = 1; i <= data.numClasses()+1; i++) { atts.add(new Attribute("att" + i)); } ArrayList<String> cls = new ArrayList<>(1); cls.add("1"); atts.add(new Attribute("cls", cls)); probDataHeader = new Instances("probData", atts, 0); probDataHeader.setClassIndex(probDataHeader.numAttributes()-1); Instances[] trainData = new Instances[thresholds.length]; int[][] trainPred = new int[thresholds.length][data.numInstances()]; for (int i = 0; i < thresholds.length; i++) { trainData[i] = new Instances(probDataHeader, data.numInstances()); Instances probData = new Instances(probDataHeader, data.numInstances()); for (int n = 0; n < data.numInstances(); n++){ trainPred[i][n] = argMax(trainProbabilities[i][n], rand); double minDiff = 1; for (int g = 0; g < trainProbabilities[i][n].length; g++) { if (trainPred[i][n] != g) { minDiff = Math.min(minDiff, trainProbabilities[i][n][trainPred[i][n]] - trainProbabilities[i][n][g]); } } double[] arr = Arrays.copyOf(trainProbabilities[i][n], trainProbabilities[i][n].length + 2); arr[arr.length-2] = minDiff; Instance inst = new DenseInstance(1, arr); trainData[i].add(inst); if (trainPred[i][n] == data.get(n).classValue()) { probData.add(inst); } } if (probData.numInstances() > 1) { int noFolds = Math.min(probData.numInstances(), 10); probData.randomize(rand); probData.stratify(noFolds); double bestAccuracy = -1; for (double svmGamma : SVM_GAMMAS) { LibSVM svmCandidate = new LibSVM(); svmCandidate.setSVMType(new SelectedTag(LibSVM.SVMTYPE_ONE_CLASS_SVM, LibSVM.TAGS_SVMTYPE)); svmCandidate.setEps(1e-4); svmCandidate.setGamma(svmGamma); svmCandidate.setNu(0.05); svmCandidate.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_RBF, LibSVM.TAGS_KERNELTYPE)); svmCandidate.setCacheSize(40); double correct = 0; for (int n = 0; n < noFolds; n++) { Instances cvTrain = probData.trainCV(noFolds, n); Instances cvTest = probData.testCV(noFolds, n); LibSVM svmCV = (LibSVM) AbstractClassifier.makeCopy(svmCandidate); svmCV.buildClassifier(cvTrain); for (Instance inst : cvTest) { if (svmCV.distributionForInstance(inst)[0] == 1) { correct++; } } } double accuracy = correct / probData.numInstances(); if (accuracy > bestAccuracy) { svm[i] = svmCandidate; bestAccuracy = accuracy; } } svm[i].buildClassifier(probData); } } double bestHM = -1; for (int g = 2; g <= maxV; g++) { double correctSum = 0; double earlinessSum = 0; for (int n = 0; n < data.numInstances(); n++){ IntIntHashMap counts = new IntIntHashMap(); for (int i = 0; i < thresholds.length; i++){ if (svm[i] != null && (svm[i].distributionForInstance(trainData[i].get(n))[0] == 1 || i == thresholds.length-1)){ int count = counts.get(trainPred[i][n]); if (count == 0 && i < thresholds.length-1){ counts.clear(); counts.put(trainPred[i][n], 1); } else{ count++; if (count >= g || i == thresholds.length-1){ if (trainPred[i][n] == data.get(n).classValue()) correctSum++; earlinessSum += thresholds[i] / (data.numAttributes()-1.0); break; } else { counts.put(trainPred[i][n], count); } } } } } double accuracy = correctSum / data.numInstances(); double earliness = 1.0 - earlinessSum / data.numInstances(); double hm = (2 * accuracy * earliness) / (accuracy + earliness); if (hm > bestHM) { bestHM = hm; v = g; } } } }
11,132
38.760714
119
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/hybrids/Catch22Classifier.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.hybrids; import experiments.data.DatasetLoading; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.transformers.Catch22; import utilities.ClassifierTools; import weka.classifiers.Classifier; import weka.classifiers.trees.RandomForest; import weka.core.*; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import static utilities.InstanceTools.resampleTrainAndTestInstances; import static utilities.multivariate_tools.MultivariateInstanceTools.*; /** * Classifier built using catch22 features. * * C.H. Lubba, S.S. Sethi, P. Knaute, S.R. Schultz, B.D. Fulcher, N.S. Jones. * catch22: CAnonical Time-series CHaracteristics. * Data Mining and Knowledge Discovery (2019) * * Implementation based on C and Matlab code provided on authors github: * https://github.com/chlubba/catch22 * * @author Matthew Middlehurst */ public class Catch22Classifier extends EnhancedAbstractClassifier { //z-norm before transform private boolean norm = false; //specifically normalise for the outlier stats, which can take a long time with large positive/negative values private boolean outlierNorm = false; private Classifier cls = new RandomForest(); private Catch22 c22; private Instances header; private int numColumns; public Catch22Classifier(){ super(CANNOT_ESTIMATE_OWN_PERFORMANCE); ((RandomForest)cls).setNumTrees(500); } @Override public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); result.setMinimumNumberInstances(2); // attributes result.enable(Capabilities.Capability.RELATIONAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); return result; } public void setClassifier(Classifier cls){ this.cls = cls; } public void setNormalise(boolean b) { this.norm = b; } public void setOutlierNormalise(boolean b) { this.outlierNorm = b; } @Override public void buildClassifier(Instances data) throws Exception { super.buildClassifier(data); trainResults.setBuildTime(System.nanoTime()); getCapabilities().testWithFail(data); Instances[] columns; //Multivariate if (data.checkForAttributeType(Attribute.RELATIONAL)) { columns = splitMultivariateInstances(data); numColumns = numDimensions(data); } //Univariate else{ columns = new Instances[]{data}; numColumns = 1; } c22 = new Catch22(); c22.setNormalise(norm); c22.setOutlierNormalise(outlierNorm); ArrayList<Attribute> atts = new ArrayList<>(); for (int i = 1; i <= 22*numColumns; i++){ atts.add(new Attribute("att" + i)); } atts.add(data.classAttribute()); Instances transformedData = new Instances("Catch22Transform", atts, data.numInstances()); transformedData.setClassIndex(transformedData.numAttributes()-1); header = new Instances(transformedData,0); //transform each dimension using the catch22 transformer into a sincle vector for (int i = 0 ; i < data.numInstances(); i++){ double[] d = new double[transformedData.numAttributes()]; for (int n = 0 ; n < numColumns; n++){ Instance inst = (c22.transform(columns[n].get(i))); for (int j = 0; j < 22; j++){ d[n * 22 + j] = inst.value(j); } } d[transformedData.numAttributes()-1] = data.get(i).classValue(); transformedData.add(new DenseInstance(1, d)); } if (cls instanceof Randomizable){ ((Randomizable) cls).setSeed(seed); } cls.buildClassifier(transformedData); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime() - trainResults.getBuildTime()); } @Override public double classifyInstance(Instance instance) throws Exception { return cls.classifyInstance(predictionTransform(instance)); } public double[] distributionForInstance(Instance instance) throws Exception { return cls.distributionForInstance(predictionTransform(instance)); } public Instance predictionTransform(Instance instance){ Instance[] columns; //Multivariate if (numColumns > 1) { columns = splitMultivariateInstance(instance); } //Univariate else{ columns = new Instance[]{instance}; } //transform each dimension using the catch22 transformer into a sincle vector double[] d = new double[header.numAttributes()]; for (int n = 0 ; n < numColumns; n++){ Instance inst = (c22.transform(columns[n])); for (int j = 0; j < 22; j++){ d[n * 22 + j] = inst.value(j); } } d[header.numAttributes()-1] = instance.classValue(); Instance transformedInst = new DenseInstance(1, d); transformedInst.setDataset(header); return transformedInst; } public static void main(String[] args) throws Exception { int fold = 0; String dataset = "ItalyPowerDemand"; Instances train = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TRAIN.arff"); Instances test = DatasetLoading.loadDataNullable("Z:\\ArchiveData\\Univariate_arff\\" + dataset + "\\" + dataset + "_TEST.arff"); Instances[] data = resampleTrainAndTestInstances(train, test, fold); train = data[0]; test = data[1]; Catch22Classifier c; double accuracy; c = new Catch22Classifier(); c.buildClassifier(train); accuracy = ClassifierTools.accuracy(test, c); System.out.println("Catch22Classifier accuracy on " + dataset + " fold " + fold + " = " + accuracy); } }
6,935
33.854271
114
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/hybrids/HIVE_COTE.java
/* * Copyright (C) 2019 xmw13bzu * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.hybrids; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.tuning.ParameterSpace; import machine_learning.classifiers.ensembles.AbstractEnsemble; import machine_learning.classifiers.ensembles.voting.MajorityConfidence; import machine_learning.classifiers.ensembles.weightings.TrainAcc; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.TrainTimeContractable; import tsml.classifiers.Tuneable; import tsml.classifiers.dictionary_based.BOSS; import tsml.classifiers.dictionary_based.TDE; import tsml.classifiers.dictionary_based.cBOSS; import tsml.classifiers.distance_based.ElasticEnsemble; import tsml.classifiers.interval_based.DrCIF; import tsml.classifiers.interval_based.RISE; import tsml.classifiers.interval_based.TSF; import tsml.classifiers.kernel_based.Arsenal; import tsml.classifiers.shapelet_based.ShapeletTransformClassifier; import tsml.data_containers.TimeSeriesInstances; import tsml.data_containers.utilities.Converter; import tsml.transformers.Resizer; import utilities.ClassifierTools; import weka.classifiers.Classifier; import weka.core.*; import java.util.concurrent.TimeUnit; /** * * @author James Large (james.large@uea.ac.uk), Tony Bagnall * @maintainer Tony Bagnall * * This classifier is the latest version of the Hierarchical Vote Ensemble Collective of Transformation-based * Ensembles (HIVE-COTE). * * The original classifier, now called HiveCote 0.1, described in [1] has been moved to legacy_cote. * This new one * * 1. Threadable * 2. Contractable * 3. Tuneable * * Version 1.0: */ public class HIVE_COTE extends AbstractEnsemble implements TechnicalInformationHandler, TrainTimeContractable, Tuneable { //TrainTimeContractable protected boolean trainTimeContract = false; protected long trainContractTimeNanos = TimeUnit.DAYS.toNanos(7); // if contracting with no time limit given, default to 7 days. protected TimeUnit contractTrainTimeUnit = TimeUnit.NANOSECONDS; /** * Utility if we want to be conservative while contracting with the overhead * of the ensemble and any variance with the base classifiers' abilities to adhere * to the contract. Give the base classifiers a (very large not not full) proportion * of the contract time given, and allow some extra time for the ensemble overhead, * potential threading overhead, etc */ protected final double BASE_CLASSIFIER_CONTRACT_PROP = 0.99; //if e.g 1 day contract, 864 seconds grace time protected double alpha=4.0; // Weighting parameter for voting method private int defaultSettings = 2; private Resizer resizer; @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "J. Lines, S. Taylor and A. Bagnall"); result.setValue(TechnicalInformation.Field.TITLE, "Time Series Classification with HIVE-COTE: The Hierarchical Vote Collective of Transformation-Based Ensembles"); result.setValue(TechnicalInformation.Field.JOURNAL, "ACM Transactions on Knowledge Discovery from Data"); result.setValue(TechnicalInformation.Field.VOLUME, "12"); result.setValue(TechnicalInformation.Field.NUMBER, "5"); result.setValue(TechnicalInformation.Field.PAGES, "52"); result.setValue(TechnicalInformation.Field.YEAR, "2018"); return result; } public HIVE_COTE() { super(); } @Override public void setupDefaultEnsembleSettings() { defaultSettings = 2; } public void setupHIVE_COTE_0_1() { defaultSettings = 0; } private void buildHC0() { //copied over/adapted from HiveCote.setDefaultEnsembles() //TODO jay/tony review this.ensembleName = "HIVE-COTE 0.1"; if (this.weightingScheme == null) this.weightingScheme = new TrainAcc(alpha); if (this.votingScheme == null) this.votingScheme = new MajorityConfidence(); if (this.trainEstimator == null) { CrossValidationEvaluator cv = new CrossValidationEvaluator(seed, false, false, false, false); cv.setNumFolds(10); this.trainEstimator = cv; } if (modules == null) { Classifier[] classifiers = new Classifier[5]; String[] classifierNames = new String[5]; EnhancedAbstractClassifier ee = new ElasticEnsemble(); ee.setEstimateOwnPerformance(true); classifiers[0] = ee; classifierNames[0] = "EE"; // CAWPE st_classifier = new CAWPE(); // DefaultShapeletTransformPlaceholder st_transform= new DefaultShapeletTransformPlaceholder(); // st_classifier.setTransform(st_transform); ShapeletTransformClassifier stc = new ShapeletTransformClassifier(); if (trainTimeContract) stc.setTrainTimeLimit(contractTrainTimeUnit, trainContractTimeNanos); stc.setEstimateOwnPerformance(true); classifiers[1] = stc; classifierNames[1] = "STC"; classifiers[2] = new RISE(); classifierNames[2] = "RISE"; BOSS boss = new BOSS(); boss.setEstimateOwnPerformance(true); classifiers[3] = boss; classifierNames[3] = "BOSS"; TSF tsf = new TSF(); tsf.setEstimateOwnPerformance(true); classifiers[4] = tsf; classifierNames[4] = "TSF"; try { setClassifiers(classifiers, classifierNames, null); } catch (Exception e) { System.out.println("Exception thrown when setting up DEFAULT settings of " + this.getClass().getSimpleName() + ". Should " + "be fixed before continuing"); System.exit(1); } } for (EnsembleModule module : modules) if(module.getClassifier() instanceof Randomizable) ((Randomizable)module.getClassifier()).setSeed(seed); if(trainTimeContract) setTrainTimeLimit(contractTrainTimeUnit, trainContractTimeNanos); } public void setupHIVE_COTE_1_0() { defaultSettings = 1; } private void buildHC1() { this.ensembleName = "HIVE-COTE 1.0"; if (this.weightingScheme == null) this.weightingScheme = new TrainAcc(alpha); if (this.votingScheme == null) this.votingScheme = new MajorityConfidence(); if (this.trainEstimator == null) { CrossValidationEvaluator cv = new CrossValidationEvaluator(seed, false, false, false, false); cv.setNumFolds(10); this.trainEstimator = cv; } if (modules == null) { Classifier[] classifiers = new Classifier[4]; String[] classifierNames = new String[4]; ShapeletTransformClassifier stc = new ShapeletTransformClassifier(); stc.setEstimateOwnPerformance(true); classifiers[0] = stc; classifierNames[0] = "STC"; classifiers[1] = new RISE(); classifierNames[1] = "RISE"; cBOSS boss = new cBOSS(); boss.setEstimateOwnPerformance(true); classifiers[2] = boss; classifierNames[2] = "cBOSS"; TSF tsf = new TSF(); classifiers[3] = tsf; classifierNames[3] = "TSF"; tsf.setEstimateOwnPerformance(true); try { setClassifiers(classifiers, classifierNames, null); } catch (Exception e) { System.out.println("Exception thrown when setting up DEFAULT settings of " + this.getClass().getSimpleName() + ". Should " + "be fixed before continuing"); System.exit(1); } } for (EnsembleModule module : modules) if(module.getClassifier() instanceof Randomizable) ((Randomizable)module.getClassifier()).setSeed(seed); if(trainTimeContract) setTrainTimeLimit(contractTrainTimeUnit, trainContractTimeNanos); } public void setupHIVE_COTE_2_0() { defaultSettings = 2; } private void buildHC2() { this.ensembleName = "HIVE-COTE 2.0"; if (this.weightingScheme == null) this.weightingScheme = new TrainAcc(alpha); if (this.votingScheme == null) this.votingScheme = new MajorityConfidence(); if (this.trainEstimator == null) { CrossValidationEvaluator cv = new CrossValidationEvaluator(seed, false, false, false, false); cv.setNumFolds(10); this.trainEstimator = cv; } if (modules == null) { ShapeletTransformClassifier stc = new ShapeletTransformClassifier(); DrCIF cif = new DrCIF(); Arsenal afc = new Arsenal(); TDE tde = new TDE(); String[] classifierNames = new String[4]; classifierNames[0] = "STC"; classifierNames[1] = "DrCIF"; classifierNames[2] = "Arsenal"; classifierNames[3] = "TDE"; EnhancedAbstractClassifier[] classifiers = new EnhancedAbstractClassifier[4]; classifiers[0] = stc; classifiers[1] = cif; classifiers[2] = afc; classifiers[3] = tde; for (EnhancedAbstractClassifier cls : classifiers) { cls.setEstimateOwnPerformance(true); cls.setTrainEstimateMethod(TrainEstimateMethod.OOB); } try { setClassifiers(classifiers, classifierNames, null); } catch (Exception e) { System.out.println("Exception thrown when setting up DEFAULT settings of " + this.getClass().getSimpleName() + ". Should " + "be fixed before continuing"); System.exit(1); } } for (EnsembleModule module : modules) if(module.getClassifier() instanceof Randomizable) ((Randomizable)module.getClassifier()).setSeed(seed); if(trainTimeContract) setTrainTimeLimit(contractTrainTimeUnit, trainContractTimeNanos); } @Override public void buildClassifier(TimeSeriesInstances data) throws Exception { if (defaultSettings == 0){ buildHC0(); } else if (defaultSettings == 1){ buildHC1(); } else if (defaultSettings == 2){ buildHC2(); } getCapabilities().testWithFail(Converter.toArff(data)); if (!data.isEqualLength()) { // pad with 0s resizer = new Resizer(new Resizer.MaxResizeMetric(), new Resizer.FlatPadMetric(0)); TimeSeriesInstances padded = resizer.fitTransform(data); data = padded; } if(debug) { printDebug(" Building HIVE-COTE with components: "); for (EnsembleModule module : modules){ if (module.getClassifier() instanceof EnhancedAbstractClassifier) ((EnhancedAbstractClassifier) module.getClassifier()).setDebug(debug); printDebug(module.getModuleName()+" "); } printDebug(" \n "); } if (trainTimeContract){ printLineDebug(" In build of HC2: contract time = "+trainContractTimeNanos/1000000000/60/60+" hours "); setupContracting(); } super.buildClassifier(Converter.toArff(data)); trainResults.setParas(getParameters()); printLineDebug("*************** Finished HIVE-COTE Build with train time " + (trainResults.getBuildTime()/1000000000/60/60.0) + " hours, Train+Estimate time = "+(trainResults.getBuildPlusEstimateTime()/1000000000/60/60.0)+" hours ***************"); } @Override public void buildClassifier(Instances data) throws Exception { if (defaultSettings == 0){ buildHC0(); } else if (defaultSettings == 1){ buildHC1(); } else if (defaultSettings == 2){ buildHC2(); } if(debug) { printDebug(" Building HIVE-COTE with components: "); for (EnsembleModule module : modules){ if (module.getClassifier() instanceof EnhancedAbstractClassifier) ((EnhancedAbstractClassifier) module.getClassifier()).setDebug(debug); printDebug(module.getModuleName()+" "); } printDebug(" \n "); } if (trainTimeContract){ printLineDebug(" In build of HC2: contract time = "+trainContractTimeNanos/1000000000/60/60+" hours "); setupContracting(); } super.buildClassifier(data); trainResults.setParas(getParameters()); printLineDebug("*************** Finished HIVE-COTE Build with train time " + (trainResults.getBuildTime()/1000000000/60/60.0) + " hours, Train+Estimate time = "+(trainResults.getBuildPlusEstimateTime()/1000000000/60/60.0)+" hours ***************"); } /** * Returns default capabilities of the classifier. These are that the * data must be numeric, with no missing and a nominal class * @return the capabilities of this classifier */ @Override public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); // attributes must be numeric result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // Can only handle discrete class result.enable(Capabilities.Capability.NOMINAL_CLASS); // instances result.setMinimumNumberInstances(1); if(readIndividualsResults)//Can handle all data sets result.enableAll(); return result; } /** * Will split time given evenly among the contractable base classifiers. * * This is currently very naive, and likely innaccurate. Consider these TODOs * * 1) If there are any non-contractable base classifiers, these are ignored in * the contract setting. The full time is allocated among the contractable * base classifiers, instead of trying to do any wonky guessing of how long the * non-contractable ones might take * 2) Currently, generating accuracy estimates is not considered in the contract. * If there are any non-TrainAccuracyEstimating classifiers, the estimation procedure (e.g. * a 10fold cv) will very likely overshoot the contract, since the classifier would be * trying to keep to contract on each fold and the full build individually, not in total. * This is an active research question moreso than an implementation question * 3) The contract currently does not consider whether the ensemble is being threaded, * i.e. even if it can run the building of two or more classifiers in parallel, * this will still naively set the contract per classifier as amount/numClassifiers */ /** * Overriding TrainTimeContract methods * @param amount of time in nanos */ @Override //TrainTimeContractable public void setTrainTimeLimit(long amount) { trainTimeContract = true; trainContractTimeNanos = amount; contractTrainTimeUnit = TimeUnit.NANOSECONDS; } @Override public boolean withinTrainContract(long start) { return start<trainContractTimeNanos; } /** * Sets up the ensemble for contracting, to be called at the start of build classifier, * i.e. when parameters can no longer be changed. */ protected void setupContracting() { //splits the ensemble contract time between this many classifiers int numContractableClassifiers = 0; //in future, the number of classifiers we need to separately eval and custom-contract for int numNonTrainEstimatingClassifiers = 0; printLineDebug(" Setting up contracting. Number of modules = "+modules.length); for (EnsembleModule module : modules) { if(module.isTrainTimeContractable()) numContractableClassifiers++; else System.out.println("WARNING: trying to contract " + ensembleName + ", but base classifier " + module.getModuleName() + " is not contractable, " + "and is therefore not considered in the contract. The ensemble as a whole will very likely not meet the contract."); if(!module.isAbleToEstimateOwnPerformance()) { numNonTrainEstimatingClassifiers++; System.out.println("WARNING: trying to contract " + ensembleName + ", but base classifier " + module.getModuleName() + " does not estimate its own accuracy. " + "Performing a separate evaluation on the train set currently is not considered in the contract, and therefore the ensemble as a whole will very " + "likely not meet the contract."); } } //force nanos in setting base classifier contracts in case e.g. 1 hour was passed, 1/5 = 0... TimeUnit highFidelityUnit = TimeUnit.NANOSECONDS; long conservativeBaseClassifierContract = (long) (BASE_CLASSIFIER_CONTRACT_PROP * highFidelityUnit.convert(trainContractTimeNanos, contractTrainTimeUnit)); long highFidelityTimePerClassifier; if(multiThread) highFidelityTimePerClassifier= (conservativeBaseClassifierContract); else highFidelityTimePerClassifier= (conservativeBaseClassifierContract) / numContractableClassifiers; printLineDebug(" Setting up contract\nTotal Contract = "+(trainContractTimeNanos/1000000000/60/60)+" hours"); printLineDebug(" Per Classifier = "+highFidelityTimePerClassifier+" Nanos"); for (EnsembleModule module : modules) if(module.isTrainTimeContractable()) ((TrainTimeContractable) module.getClassifier()).setTrainTimeLimit(highFidelityUnit, highFidelityTimePerClassifier); } public void setAlpha(double alpha){ this.alpha = alpha; this.weightingScheme = new TrainAcc(this.alpha); } @Override //EnhancedAbstractClassifier public void setSeed(int seed) { super.setSeed(seed); } @Override //AbstractClassifier public void setOptions(String[] options) throws Exception { // System.out.print("TSF para sets "); // for (String str:options) // System.out.print(","+str); // System.out.print("\n"); String a = Utils.getOption('A', options); alpha=Double.parseDouble(a); this.weightingScheme = new TrainAcc(alpha); } /** *TUNED TSF Classifiers. Method for interface Tuneable * Valid options are: <p/> * <pre> -T Number of trees.</pre> * <pre> -I Number of intervals to fit.</pre> * * * @return ParameterSpace object */ @Override //Tuneable public ParameterSpace getDefaultParameterSearchSpace(){ ParameterSpace ps=new ParameterSpace(); String[] alphaRange={"1.0","2.0","3.0","4.0","5.0","6.0","7.0","8.0","9.0","10.0"}; ps.addParameter("A", alphaRange); return ps; } @Override public String getParameters() { String str="WeightingScheme,"+weightingScheme+","+"VotingScheme,"+votingScheme+",alpha,"+alpha+ ",seedClassifier,"+seedClassifier+",seed,"+seed; if (trainTimeContract) str += ",contractTime(hrs),"+trainContractTimeNanos/1000000000/60/60.0; for (EnsembleModule module : modules) str+=","+module.getModuleName()+","+module.posteriorWeights[0]; //This gets really long and it only really used for debugging if (readIndividualsResults) for (EnsembleModule module : modules) str += module.getParameters() + ",,"; return str; } public static void main(String[] args) throws Exception { System.out.println(ClassifierTools.testUtils_getIPDAcc(new HIVE_COTE())); } }
21,236
39.761996
187
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/CIF.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import evaluation.tuning.ParameterSpace; import experiments.data.DatasetLoading; import fileIO.OutFile; import machine_learning.classifiers.ContinuousIntervalTree; import machine_learning.classifiers.ContinuousIntervalTree.Interval; import tsml.classifiers.*; import tsml.data_containers.TSCapabilities; import tsml.data_containers.TimeSeriesInstance; import tsml.data_containers.TimeSeriesInstances; import tsml.data_containers.utilities.Converter; import tsml.transformers.Catch22; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.*; import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Random; import java.util.concurrent.*; import java.util.function.Function; import static tsml.classifiers.interval_based.DrCIF.*; import static utilities.Utilities.argMax; /** * Implementation of the catch22 Interval Forest (CIF) algorithm * * @author Matthew Middlehurst **/ public class CIF extends EnhancedAbstractClassifier implements TechnicalInformationHandler, TrainTimeContractable, Checkpointable, Tuneable, MultiThreadable, Visualisable, Interpretable { /** * Paper defining CIF. * * @return TechnicalInformation for CIF */ @Override //TechnicalInformationHandler public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "M. Middlehurst, J. Large and A. Bagnall"); result.setValue(TechnicalInformation.Field.TITLE, "The Canonical Interval Forest (CIF) Classifier for " + "Time Series Classifciation"); result.setValue(TechnicalInformation.Field.JOURNAL, "IEEE International Conference on Big Data"); result.setValue(TechnicalInformation.Field.YEAR, "2020"); return result; } /** * Primary parameters potentially tunable */ private int numClassifiers = 500; /** * Amount of attributes to be subsampled and related data storage. */ private int attSubsampleSize = 8; private int numAttributes = 25; private int startNumAttributes; private ArrayList<int[]> subsampleAtts; /** * Normalise outlier catch22 features which break on data not normalised */ private boolean outlierNorm = true; /** * Use mean,stdev,slope as well as catch22 features */ private boolean useSummaryStats = true; /** IntervalsFinders sets parameter values in buildClassifier if -1. */ /** * Num intervals selected per tree built */ private int numIntervals = -1; private transient Function<Integer, Integer> numIntervalsFinder; /** Secondary parameters */ /** Mainly there to avoid single item intervals, which have no slope or std dev */ /** * Min defaults to 3, Max defaults to m/2 */ private int minIntervalLength = -1; private transient Function<Integer, Integer> minIntervalLengthFinder; private int maxIntervalLength = -1; private transient Function<Integer, Integer> maxIntervalLengthFinder; /** * Ensemble members of base classifier, default to TimeSeriesTree */ private ArrayList<Classifier> trees; private Classifier base = new ContinuousIntervalTree(); /** * for each classifier [i] interval j starts at intervals.get(i)[j][0] and * ends at intervals.get(i)[j][1] */ private ArrayList<int[][]> intervals; /** * Holding variable for test classification in order to retain the header info */ private Instances testHolder; /** * Flags and data required if Bagging */ private boolean bagging = false; private int[] oobCounts; private double[][] trainDistributions; /** * Flags and data required if Checkpointing */ private boolean checkpoint = false; private String checkpointPath; private long checkpointTime = 0; private long lastCheckpointTime = 0; private long checkpointTimeDiff = 0; private boolean internalContractCheckpointHandling = false; /** * Flags and data required if Contracting */ private boolean trainTimeContract = false; private long contractTime = 0; private int maxClassifiers = 500; /** * Multithreading */ private int numThreads = 1; private boolean multiThread = false; private ExecutorService ex; /** * Visualisation and interpretability */ private String visSavePath; private int visNumTopAtts = 3; private String interpSavePath; private ArrayList<ArrayList<double[]>> interpData; private ArrayList<Integer> interpTreePreds; private int interpCount = 0; private double[] interpSeries; private int interpPred; /** * data information */ private int seriesLength; private int numInstances; /** * Multivariate */ private int numDimensions; private ArrayList<int[]> intervalDimensions; /** * Transformer used to obtain catch22 features */ private transient Catch22 c22; protected static final long serialVersionUID = 1L; /** * Default constructor for CIF. Can estimate own performance. */ public CIF() { super(CAN_ESTIMATE_OWN_PERFORMANCE); } /** * Set the number of trees to be built. * * @param t number of trees */ public void setNumTrees(int t) { numClassifiers = t; } /** * Set the number of attributes to be subsampled per tree. * * @param a number of attributes sumsampled */ public void setAttSubsampleSize(int a) { attSubsampleSize = a; } /** * Set whether to use the original TSF statistics as well as catch22 features. * * @param b boolean to use summary stats */ public void setUseSummaryStats(boolean b) { useSummaryStats = b; } /** * Set a function for finding the number of intervals randomly selected per tree. * * @param f a function for the number of intervals */ public void setNumIntervalsFinder(Function<Integer, Integer> f) { numIntervalsFinder = f; } /** * Set a function for finding the min interval length for randomly selected intervals. * * @param f a function for min interval length */ public void setMinIntervalLengthFinder(Function<Integer, Integer> f) { minIntervalLengthFinder = f; } /** * Set a function for finding the max interval length for randomly selected intervals. * * @param f a function for max interval length */ public void setMaxIntervalLengthFinder(Function<Integer, Integer> f) { maxIntervalLengthFinder = f; } /** * Set whether to normalise the outlier catch22 features. * * @param b boolean to set outlier normalisation */ public void setOutlierNorm(boolean b) { outlierNorm = b; } /** * Sets the base classifier for the ensemble. * * @param c a base classifier constructed elsewhere and cloned into ensemble */ public void setBaseClassifier(Classifier c) { base = c; } /** * Set whether to perform bagging with replacement. * * @param b boolean to set bagging */ public void setBagging(boolean b) { bagging = b; } /** * Set the number of attributes to show when creating visualisations. * * @param i number of attributes */ public void setVisNumTopAtts(int i) { visNumTopAtts = i; } /** * Outputs CIF parameters information as a String. * * @return String written to results files */ @Override //SaveParameterInfo public String getParameters() { int nt = numClassifiers; if (trees != null) nt = trees.size(); return super.getParameters() + ",numTrees," + nt + ",attSubsampleSize," + attSubsampleSize + ",outlierNorm," + outlierNorm + ",basicSummaryStats," + useSummaryStats + ",numIntervals," + numIntervals + ",minIntervalLength," + minIntervalLength + ",maxIntervalLength," + maxIntervalLength + ",baseClassifier," + base.getClass().getSimpleName() + ",bagging," + bagging + ",estimator," + trainEstimateMethod.name() + ",contractTime," + contractTime; } /** * Returns the capabilities for CIF. These are that the * data must be numeric or relational, with no missing and a nominal class * * @return the capabilities of CIF */ @Override //AbstractClassifier public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); result.setMinimumNumberInstances(2); // attributes result.enable(Capabilities.Capability.RELATIONAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); return result; } /** * Returns the time series capabilities for CIF. These are that the * data must be equal length, with no missing values * * @return the time series capabilities of CIF */ public TSCapabilities getTSCapabilities() { TSCapabilities capabilities = new TSCapabilities(); capabilities.enable(TSCapabilities.EQUAL_LENGTH) .enable(TSCapabilities.MULTI_OR_UNIVARIATE) .enable(TSCapabilities.NO_MISSING_VALUES) .enable(TSCapabilities.MIN_LENGTH(3)); return capabilities; } /** * Build the CIF classifier. * * @param data TimeSeriesInstances object * @throws Exception unable to train model */ @Override //TSClassifier public void buildClassifier(TimeSeriesInstances data) throws Exception { /** Build Stage: * Builds the final classifier with or without bagging. */ trainResults = new ClassifierResults(); rand.setSeed(seed); numClasses = data.numClasses(); trainResults.setEstimatorName(getClassifierName()); trainResults.setBuildTime(System.nanoTime()); // can classifier handle the data? getTSCapabilities().test(data); File file = new File(checkpointPath + "CIF" + seed + ".ser"); //if checkpointing and serialised files exist load said files if (checkpoint && file.exists()) { //path checkpoint files will be saved to if (debug) System.out.println("Loading from checkpoint file"); loadFromFile(checkpointPath + "CIF" + seed + ".ser"); } //initialise variables else { seriesLength = data.getMaxLength(); numInstances = data.numInstances(); numDimensions = data.getMaxNumDimensions(); if (numIntervalsFinder == null) { numIntervals = (int) (Math.sqrt(seriesLength) * Math.sqrt(numDimensions)); } else { numIntervals = numIntervalsFinder.apply(seriesLength); } if (minIntervalLengthFinder == null) { minIntervalLength = 3; } else { minIntervalLength = minIntervalLengthFinder.apply(seriesLength); } if (minIntervalLength < 3) { minIntervalLength = 3; } if (seriesLength <= minIntervalLength) { minIntervalLength = seriesLength / 2; } if (maxIntervalLengthFinder == null) { maxIntervalLength = seriesLength / 2; } else { maxIntervalLength = maxIntervalLengthFinder.apply(seriesLength); } if (maxIntervalLength > seriesLength) { maxIntervalLength = seriesLength; } if (maxIntervalLength < minIntervalLength) { maxIntervalLength = minIntervalLength; } if (!useSummaryStats) { numAttributes = 22; } startNumAttributes = numAttributes; subsampleAtts = new ArrayList<>(); if (attSubsampleSize < numAttributes) { numAttributes = attSubsampleSize; } //Set up for Bagging if required if (bagging && getEstimateOwnPerformance()) { trainDistributions = new double[numInstances][numClasses]; oobCounts = new int[numInstances]; } //cancel loop using time instead of number built. if (trainTimeContract) { numClassifiers = maxClassifiers; trees = new ArrayList<>(); intervals = new ArrayList<>(); } else { trees = new ArrayList<>(numClassifiers); intervals = new ArrayList<>(numClassifiers); } intervalDimensions = new ArrayList<>(); } if (multiThread) { ex = Executors.newFixedThreadPool(numThreads); if (checkpoint) System.out.println("Unable to checkpoint until end of build when multi threading."); } c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); //Set up instances size and format. ArrayList<Attribute> atts = new ArrayList<>(); String name; for (int j = 0; j < numIntervals * numAttributes; j++) { name = "F" + j; atts.add(new Attribute(name)); } //Get the class values as an array list ArrayList<String> vals = new ArrayList<>(numClasses); for (int j = 0; j < numClasses; j++) vals.add(Integer.toString(j)); atts.add(new Attribute("cls", vals)); //create blank instances with the correct class value Instances result = new Instances("Tree", atts, numInstances); result.setClassIndex(result.numAttributes() - 1); for (int i = 0; i < numInstances; i++) { DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, data.get(i).getLabelIndex()); result.add(in); } testHolder = new Instances(result, 1); DenseInstance in = new DenseInstance(testHolder.numAttributes()); in.setValue(testHolder.numAttributes() - 1, -1); testHolder.add(in); if (multiThread) { multiThreadBuildCIF(data, result); } else { buildCIF(data, result); } if (trees.size() == 0) {//Not enough time to build a single classifier throw new Exception((" ERROR in CIF, no trees built, contract time probably too low. Contract time = " + contractTime)); } if (checkpoint) { saveToFile(checkpointPath); } trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime() - trainResults.getBuildTime() - checkpointTimeDiff - trainResults.getErrorEstimateTime()); if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(data); long est2 = System.nanoTime(); trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); } trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); trainResults.setParas(getParameters()); printLineDebug("*************** Finished CIF Build with " + trees.size() + " Trees built in " + trainResults.getBuildTime() / 1000000000 + " Seconds ***************"); } /** * Build the CIF classifier. * * @param data weka Instances object * @throws Exception unable to train model */ @Override //AbstractClassifier public void buildClassifier(Instances data) throws Exception { buildClassifier(Converter.fromArff(data)); } /** * Build the CIF classifier * For each base classifier * generate random intervals * do the transfrorms * build the classifier * * @param data TimeSeriesInstances data * @param result Instances object formatted for transformed data * @throws Exception unable to build CIF */ public void buildCIF(TimeSeriesInstances data, Instances result) throws Exception { double[][][] dimensions = data.toValueArray(); while (withinTrainContract(trainResults.getBuildTime()) && trees.size() < numClassifiers) { int i = trees.size(); //1. Select random intervals for tree i int[][] interval = new int[numIntervals][2]; //Start and end for (int j = 0; j < numIntervals; j++) { if (rand.nextBoolean()) { if (seriesLength - minIntervalLength > 0) interval[j][0] = rand.nextInt(seriesLength - minIntervalLength); //Start point int range = Math.min(seriesLength - interval[j][0], maxIntervalLength); int length; if (range - minIntervalLength == 0) length = minIntervalLength; else length = rand.nextInt(range - minIntervalLength) + minIntervalLength; interval[j][1] = interval[j][0] + length; } else { if (seriesLength - minIntervalLength > 0) interval[j][1] = rand.nextInt(seriesLength - minIntervalLength) + minIntervalLength; //End point int range = Math.min(interval[j][1], maxIntervalLength); int length; if (range - minIntervalLength == 0) length = minIntervalLength; else length = rand.nextInt(range - minIntervalLength) + minIntervalLength; interval[j][0] = interval[j][1] - length; } } //If bagging find instances with replacement int[] instInclusions = null; boolean[] inBag = null; if (bagging) { inBag = new boolean[numInstances]; instInclusions = new int[numInstances]; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { inBag[n] = true; } } } //find attributes to subsample ArrayList<Integer> arrl = new ArrayList<>(startNumAttributes); for (int n = 0; n < startNumAttributes; n++) { arrl.add(n); } int[] subsampleAtt = new int[numAttributes]; for (int n = 0; n < numAttributes; n++) { subsampleAtt[n] = arrl.remove(rand.nextInt(arrl.size())); } //find dimensions for each interval int[] intervalDimension = new int[numIntervals]; for (int n = 0; n < numIntervals; n++) { intervalDimension[n] = rand.nextInt(numDimensions); } Arrays.sort(intervalDimension); //For bagging int instIdx = 0; int lastIdx = -1; //2. Generate and store attributes for (int k = 0; k < numInstances; k++) { //For each instance if (bagging) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.set(k, new DenseInstance(result.instance(k - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; result.instance(k).setValue(result.classIndex(), data.get(instIdx).getLabelIndex()); } else { instIdx = k; } for (int j = 0; j < numIntervals; j++) { //extract the interval double[] series = dimensions[instIdx][intervalDimension[j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[j][0], interval[j][1] + 1); //process features for (int g = 0; g < numAttributes; g++) { if (subsampleAtt[g] < 22) { result.instance(k).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtt[g], j, intervalArray)); } else { result.instance(k).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtt[g], interval[j][0], interval[j][1], series)); } } } } //3. Create and build tree using all the features. Feature selection Classifier tree = AbstractClassifier.makeCopy(base); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (i + 1)); tree.buildClassifier(result); if (bagging && getEstimateOwnPerformance()) { long t1 = System.nanoTime(); if (base instanceof ContinuousIntervalTree) { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; double[] newProbs = ((ContinuousIntervalTree) tree).distributionForInstance(dimensions[n], functions, interval, subsampleAtt, intervalDimension); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } else { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; for (int j = 0; j < numIntervals; j++) { double[] series = dimensions[n][intervalDimension[j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[j][0], interval[j][1] + 1); for (int g = 0; g < numAttributes; g++) { if (subsampleAtt[g] < 22) { testHolder.instance(0).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtt[g], j, intervalArray)); } else { testHolder.instance(0).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtt[g], interval[j][0], interval[j][1], series)); } } } double[] newProbs = tree.distributionForInstance(testHolder.instance(0)); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + (System.nanoTime() - t1)); } trees.add(tree); intervals.add(interval); subsampleAtts.add(subsampleAtt); intervalDimensions.add(intervalDimension); //Timed checkpointing if enabled, else checkpoint every 100 trees if (checkpoint && ((checkpointTime > 0 && System.nanoTime() - lastCheckpointTime > checkpointTime) || trees.size() % 100 == 0)) { saveToFile(checkpointPath); } } } /** * Build the CIF classifier using multiple threads. * Unable to checkpoint until after the build process while using multiple threads. * For each base classifier * generate random intervals * do the transfrorms * build the classifier * * @param data TimeSeriesInstances data * @param result Instances object formatted for transformed data * @throws Exception unable to build CIF */ private void multiThreadBuildCIF(TimeSeriesInstances data, Instances result) throws Exception { double[][][] dimensions = data.toValueArray(); int[] classVals = data.getClassIndexes(); int buildStep = trainTimeContract ? numThreads : numClassifiers; while (withinTrainContract(trainResults.getBuildTime()) && trees.size() < numClassifiers) { ArrayList<Future<MultiThreadBuildHolder>> futures = new ArrayList<>(buildStep); int end = trees.size() + buildStep; for (int i = trees.size(); i < end; ++i) { Instances resultCopy = new Instances(result, numInstances); for (int n = 0; n < numInstances; n++) { DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, result.instance(n).classValue()); resultCopy.add(in); } futures.add(ex.submit(new TreeBuildThread(i, dimensions, classVals, resultCopy))); } for (Future<MultiThreadBuildHolder> f : futures) { MultiThreadBuildHolder h = f.get(); trees.add(h.tree); intervals.add(h.interval); subsampleAtts.add(h.subsampleAtts); intervalDimensions.add(h.intervalDimensions); if (bagging && getEstimateOwnPerformance()) { trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + h.errorTime); for (int n = 0; n < numInstances; n++) { oobCounts[n] += h.oobCounts[n]; for (int k = 0; k < numClasses; k++) trainDistributions[n][k] += h.trainDistribution[n][k]; } } } } } /** * Estimate accuracy stage: Three scenarios * 1. If we bagged the full build (bagging ==true), we estimate using the full build OOB. * If we built on all data (bagging ==false) we estimate either: * 2. With a 10 fold CV. * 3. Build a bagged model simply to get the estimate. * * @param data TimeSeriesInstances to estimate with * @throws Exception unable to obtain estimate */ private void estimateOwnPerformance(TimeSeriesInstances data) throws Exception { if (bagging) { // Use bag data, counts normalised to probabilities double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()];//Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); if (oobCounts[j] == 0) Arrays.fill(trainDistributions[j], 1.0 / trainDistributions[j].length); else for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= oobCounts[j]; preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).getLabelIndex(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setEstimatorName("CIFBagging"); trainResults.setDatasetName(data.getProblemName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setErrorEstimateMethod("OOB"); trainResults.finaliseResults(actuals); } //Either do a CV, or bag and get the estimates else if (trainEstimateMethod == TrainEstimateMethod.CV) { /** Defaults to 10 or numInstances, whichever is smaller. * Interface TrainAccuracyEstimate * Could this be handled better? */ int numFolds = Math.min(data.numInstances(), 10); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5); cv.setNumFolds(numFolds); CIF cif = new CIF(); cif.copyParameters(this); if (seedClassifier) cif.setSeed(seed * 100); cif.setEstimateOwnPerformance(false); long tt = trainResults.getBuildTime(); trainResults = cv.evaluate(cif, Converter.toArff(data)); trainResults.setBuildTime(tt); trainResults.setEstimatorName("CIFCV"); trainResults.setErrorEstimateMethod("CV_" + numFolds); } else if (trainEstimateMethod == TrainEstimateMethod.OOB || trainEstimateMethod == TrainEstimateMethod.NONE || trainEstimateMethod == TrainEstimateMethod.TRAIN) { /** Build a single new TSF using Bagging, and extract the estimate from this */ CIF cif = new CIF(); cif.copyParameters(this); cif.setSeed(seed); cif.setEstimateOwnPerformance(true); cif.bagging = true; cif.multiThread = multiThread; cif.numThreads = numThreads; cif.buildClassifier(data); long tt = trainResults.getBuildTime(); trainResults = cif.trainResults; trainResults.setBuildTime(tt); trainResults.setEstimatorName("CIFOOB"); trainResults.setErrorEstimateMethod("OOB"); } } /** * Copy the parameters of a CIF object to this. * * @param other A CIF object */ private void copyParameters(CIF other) { this.numClassifiers = other.numClassifiers; this.attSubsampleSize = other.attSubsampleSize; this.outlierNorm = other.outlierNorm; this.useSummaryStats = other.useSummaryStats; this.numIntervals = other.numIntervals; this.numIntervalsFinder = other.numIntervalsFinder; this.minIntervalLength = other.minIntervalLength; this.minIntervalLengthFinder = other.minIntervalLengthFinder; this.maxIntervalLength = other.maxIntervalLength; this.maxIntervalLengthFinder = other.maxIntervalLengthFinder; this.base = other.base; this.bagging = other.bagging; this.trainTimeContract = other.trainTimeContract; this.contractTime = other.contractTime; } /** * Find class probabilities of an instance using the trained model. * * @param ins TimeSeriesInstance object * @return array of doubles: probability of each class * @throws Exception failure to classify */ @Override //TSClassifier public double[] distributionForInstance(TimeSeriesInstance ins) throws Exception { double[] d = new double[numClasses]; double[][] dimensions = ins.toValueArray(); if (interpSavePath != null) { interpData = new ArrayList<>(); interpTreePreds = new ArrayList<>(); } if (multiThread) { ArrayList<Future<MultiThreadPredictionHolder>> futures = new ArrayList<>(trees.size()); for (int i = 0; i < trees.size(); ++i) { Instances testCopy = new Instances(testHolder, 1); DenseInstance in = new DenseInstance(testHolder.numAttributes()); in.setValue(testHolder.numAttributes() - 1, -1); testCopy.add(in); futures.add(ex.submit(new TreePredictionThread(i, dimensions, trees.get(i), testCopy))); } for (Future<MultiThreadPredictionHolder> f : futures) { MultiThreadPredictionHolder h = f.get(); d[h.c]++; if (interpSavePath != null && base instanceof ContinuousIntervalTree) { interpData.add(h.al); interpTreePreds.add(h.c); } } } else if (base instanceof ContinuousIntervalTree) { for (int i = 0; i < trees.size(); i++) { int c; if (interpSavePath != null) { ArrayList<double[]> al = new ArrayList<>(); c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i), al); interpData.add(al); interpTreePreds.add(c); } else { c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i)); } d[c]++; } } else { //Build transformed instance for (int i = 0; i < trees.size(); i++) { Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); for (int j = 0; j < numIntervals; j++) { double[] series = dimensions[intervalDimensions.get(i)[j]]; double[] intervalArray = Arrays.copyOfRange(series, intervals.get(i)[j][0], intervals.get(i)[j][1] + 1); for (int g = 0; g < numAttributes; g++) { if (subsampleAtts.get(i)[g] < 22) { testHolder.instance(0).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtts.get(i)[g], j, intervalArray)); } else { testHolder.instance(0).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtts.get(i)[g], intervals.get(i)[j][0], intervals.get(i)[j][1], series)); } } } int c = (int) trees.get(i).classifyInstance(testHolder.instance(0)); d[c]++; } } double sum = 0; for (double x : d) sum += x; for (int i = 0; i < d.length; i++) d[i] = d[i] / sum; if (interpSavePath != null) { interpSeries = dimensions[0]; interpPred = argMax(d, rand); } return d; } /** * Find class probabilities of an instance using the trained model. * * @param ins weka Instance object * @return array of doubles: probability of each class * @throws Exception failure to classify */ @Override //AbstractClassifier public double[] distributionForInstance(Instance ins) throws Exception { return distributionForInstance(Converter.fromArff(ins)); } /** * Classify an instance using the trained model. * * @param ins TimeSeriesInstance object * @return predicted class value * @throws Exception failure to classify */ @Override //TSClassifier public double classifyInstance(TimeSeriesInstance ins) throws Exception { double[] probs = distributionForInstance(ins); return findIndexOfMax(probs, rand); } /** * Classify an instance using the trained model. * * @param ins weka Instance object * @return predicted class value * @throws Exception failure to classify */ @Override //AbstractClassifier public double classifyInstance(Instance ins) throws Exception { return classifyInstance(Converter.fromArff(ins)); } /** * Set the train time limit for a contracted classifier. * * @param amount contract time in nanoseconds */ @Override //TrainTimeContractable public void setTrainTimeLimit(long amount) { contractTime = amount; trainTimeContract = true; } /** * Check if a contracted classifier is within its train time limit. * * @param start classifier build start time * @return true if within the contract or not contracted, false otherwise. */ @Override //TrainTimeContractable public boolean withinTrainContract(long start) { if (contractTime <= 0) return true; //Not contracted return System.nanoTime() - start - checkpointTimeDiff < contractTime; } /** * Set the path to save checkpoint files to. * * @param path string for full path for the directory to store checkpointed files * @return true if valid path, false otherwise */ @Override //Checkpointable public boolean setCheckpointPath(String path) { boolean validPath = Checkpointable.super.createDirectories(path); if (validPath) { checkpointPath = path; checkpoint = true; } return validPath; } /** * Set the time between checkpoints in hours. * * @param t number of hours between checkpoints * @return true */ @Override //Checkpointable public boolean setCheckpointTimeHours(int t) { checkpointTime = TimeUnit.NANOSECONDS.convert(t, TimeUnit.HOURS); return true; } /** * Serialises this CIF object to the specified path. * * @param path save path for object * @throws Exception object fails to save */ @Override //Checkpointable public void saveToFile(String path) throws Exception { lastCheckpointTime = System.nanoTime(); Checkpointable.super.saveToFile(path + "CIF" + seed + "temp.ser"); File file = new File(path + "CIF" + seed + "temp.ser"); File file2 = new File(path + "CIF" + seed + ".ser"); file2.delete(); file.renameTo(file2); if (internalContractCheckpointHandling) checkpointTimeDiff += System.nanoTime() - lastCheckpointTime; } /** * Copies values from a loaded CIF object into this object. * * @param obj a CIF object * @throws Exception if obj is not an instance of CIF */ @Override //Checkpointable public void copyFromSerObject(Object obj) throws Exception { if (!(obj instanceof CIF)) throw new Exception("The SER file is not an instance of TSF"); CIF saved = ((CIF) obj); System.out.println("Loading CIF" + seed + ".ser"); try { numClassifiers = saved.numClassifiers; attSubsampleSize = saved.attSubsampleSize; numAttributes = saved.numAttributes; startNumAttributes = saved.startNumAttributes; subsampleAtts = saved.subsampleAtts; outlierNorm = saved.outlierNorm; useSummaryStats = saved.useSummaryStats; numIntervals = saved.numIntervals; //numIntervalsFinder = saved.numIntervalsFinder; minIntervalLength = saved.minIntervalLength; //minIntervalLengthFinder = saved.minIntervalLengthFinder; maxIntervalLength = saved.maxIntervalLength; //maxIntervalLengthFinder = saved.maxIntervalLengthFinder; trees = saved.trees; base = saved.base; intervals = saved.intervals; //testHolder = saved.testHolder; bagging = saved.bagging; oobCounts = saved.oobCounts; trainDistributions = saved.trainDistributions; //checkpoint = saved.checkpoint; //checkpointPath = saved.checkpointPath //checkpointTime = saved.checkpointTime; //lastCheckpointTime = saved.lastCheckpointTime; //checkpointTimeDiff = saved.checkpointTimeDiff; //internalContractCheckpointHandling = saved.internalContractCheckpointHandling; trainTimeContract = saved.trainTimeContract; if (internalContractCheckpointHandling) contractTime = saved.contractTime; maxClassifiers = saved.maxClassifiers; //numThreads = saved.numThreads; //multiThread = saved.multiThread; //ex = saved.ex; visSavePath = saved.visSavePath; visNumTopAtts = saved.visNumTopAtts; interpSavePath = saved.interpSavePath; //interpData = saved.interpData; //interpTreePreds = saved.interpTreePreds; //interpCount = saved.interpCount; //interpSeries = saved.interpSeries; //interpPred = saved.interpPred; seriesLength = saved.seriesLength; numInstances = saved.numInstances; numDimensions = saved.numDimensions; intervalDimensions = saved.intervalDimensions; //c22 = saved.c22; trainResults = saved.trainResults; if (!internalContractCheckpointHandling) trainResults.setBuildTime(System.nanoTime()); seedClassifier = saved.seedClassifier; seed = saved.seed; rand = saved.rand; estimateOwnPerformance = saved.estimateOwnPerformance; trainEstimateMethod = saved.trainEstimateMethod; numClasses = saved.numClasses; if (internalContractCheckpointHandling) checkpointTimeDiff = saved.checkpointTimeDiff + (System.nanoTime() - saved.lastCheckpointTime); lastCheckpointTime = System.nanoTime(); } catch (Exception ex) { System.out.println("Unable to assign variables when loading serialised file"); } } /** * Returns the default set of possible parameter values for use in setOptions when tuning. * * @return default parameter space for tuning */ @Override //Tunable public ParameterSpace getDefaultParameterSearchSpace() { ParameterSpace ps = new ParameterSpace(); String[] numAtts = {"8", "16", "25"}; ps.addParameter("-A", numAtts); String[] maxIntervalLengths = {"0.5", "0.75", "1"}; ps.addParameter("-L", maxIntervalLengths); return ps; } /** * Parses a given list of options. Valid options are: * <p> * -A The number of attributes to subsample as an integer from 1-25. * -L Max interval length as a proportion of series length as a double from 0-1. * * @param options the list of options as an array of strings * @throws Exception if an option value is invalid */ @Override //AbstractClassifier public void setOptions(String[] options) throws Exception { System.out.println(Arrays.toString(options)); String numAttsString = Utils.getOption("-A", options); System.out.println(numAttsString); if (numAttsString.length() != 0) attSubsampleSize = Integer.parseInt(numAttsString); String maxIntervalLengthsString = Utils.getOption("-L", options); System.out.println(maxIntervalLengthsString); if (maxIntervalLengthsString.length() != 0) maxIntervalLengthFinder = (numAtts) -> (int) (numAtts * Double.parseDouble(maxIntervalLengthsString)); System.out.println(attSubsampleSize + " " + maxIntervalLengthFinder.apply(100)); } /** * Enables multi threading with a set number of threads to use. * * @param numThreads number of threads available for multi threading */ @Override //MultiThreadable public void enableMultiThreading(int numThreads) { if (numThreads > 1) { this.numThreads = numThreads; multiThread = true; } else { this.numThreads = 1; multiThread = false; } } /** * Creates and stores a path to save visualisation files to. * * @param path String directory path * @return true if path is valid, false otherwise. */ @Override //Visualisable public boolean setVisualisationSavePath(String path) { boolean validPath = Visualisable.super.createVisualisationDirectories(path); if (validPath) { visSavePath = path; } return validPath; } /** * Finds the temporal importance curves for model. Outputs a matplotlib figure using the visCIF.py file using the * generated curves. * * @return true if python file to create visualisation ran, false if no path set or invalid classifier * @throws Exception if failure to set path or create visualisation */ @Override //Visualisable public boolean createVisualisation() throws Exception { if (!(base instanceof ContinuousIntervalTree)) { System.err.println("CIF temporal importance curve only available for ContinuousIntervalTree."); return false; } if (visSavePath == null) { System.err.println("CIF visualisation save path not set."); return false; } boolean isMultivariate = numDimensions > 1; int[] dimCount = null; if (isMultivariate) dimCount = new int[numDimensions]; //get information gain from all tree node splits for each attribute/time point double[][][] curves = new double[startNumAttributes][numDimensions][seriesLength]; for (int i = 0; i < trees.size(); i++) { ContinuousIntervalTree tree = (ContinuousIntervalTree) trees.get(i); ArrayList<Double>[] sg = tree.getTreeSplitsGain(); for (int n = 0; n < sg[0].size(); n++) { double split = sg[0].get(n); double gain = sg[1].get(n); int interval = (int) (split / numAttributes); int att = subsampleAtts.get(i)[(int) (split % numAttributes)]; int dim = intervalDimensions.get(i)[interval]; if (isMultivariate) dimCount[dim]++; for (int j = intervals.get(i)[interval][0]; j <= intervals.get(i)[interval][1]; j++) { curves[att][dim][j] += gain; } } } if (isMultivariate) { OutFile of = new OutFile(visSavePath + "/dims" + seed + ".txt"); of.writeLine(Arrays.toString(dimCount)); of.closeFile(); } OutFile of = new OutFile(visSavePath + "/vis" + seed + ".txt"); for (int i = 0; i < numDimensions; i++) { for (int n = 0; n < startNumAttributes; n++) { switch (n) { case 22: of.writeLine("Mean"); break; case 23: of.writeLine("Standard Deviation"); break; case 24: of.writeLine("Slope"); break; default: of.writeLine(Catch22.getSummaryStatNameByIndex(n)); } of.writeLine(Integer.toString(i)); of.writeLine(Arrays.toString(curves[n][i])); } } of.closeFile(); //run python file to output temporal importance curves graph Process p = Runtime.getRuntime().exec("py src/main/python/visualisation/visCIF.py \"" + visSavePath.replace("\\", "/") + "\" " + seed + " " + startNumAttributes + " " + numDimensions + " " + visNumTopAtts); if (debug) { System.out.println("CIF vis python output:"); BufferedReader out = new BufferedReader(new InputStreamReader(p.getInputStream())); BufferedReader err = new BufferedReader(new InputStreamReader(p.getErrorStream())); System.out.println("output : "); String outLine = out.readLine(); while (outLine != null) { System.out.println(outLine); outLine = out.readLine(); } System.out.println("error : "); String errLine = err.readLine(); while (errLine != null) { System.out.println(errLine); errLine = err.readLine(); } } return true; } /** * Stores a path to save interpretability files to. * * @param path String directory path * @return true if path is valid, false otherwise. */ @Override //Interpretable public boolean setInterpretabilitySavePath(String path) { boolean validPath = Interpretable.super.createInterpretabilityDirectories(path); if (validPath) { interpSavePath = path; } return validPath; } /** * Outputs a summary/visualisation of how the last classifier prediction was made to a set path. Runs * interpretabilityCIF.py for visualisations. * * @return true if python file to create visualisation ran, false if no path set or invalid classifier * @throws Exception if failure to set path or output files */ @Override //Interpretable public boolean lastClassifiedInterpretability() throws Exception { if (!(base instanceof ContinuousIntervalTree)) { System.err.println("CIF interpretability output only available for ContinuousIntervalTree."); return false; } if (interpSavePath == null) { System.err.println("CIF interpretability output save path not set."); return false; } OutFile of = new OutFile(interpSavePath + "pred" + seed + "-" + interpCount + ".txt"); //output test series of.writeLine("Series"); of.writeLine(Arrays.toString(interpSeries)); //output the nodes visited for each tree for (int i = 0; i < interpData.size(); i++) { of.writeLine("Tree " + i + " - " + interpData.get(i).size() + " nodes - pred " + interpTreePreds.get(i)); for (int n = 0; n < interpData.get(i).size(); n++) { if (n == interpData.get(i).size() - 1) { of.writeLine(Arrays.toString(interpData.get(i).get(n))); } else { ContinuousIntervalTree tree = (ContinuousIntervalTree) trees.get(i); double[] arr = new double[5]; double[] nodeData = interpData.get(i).get(n); int interval = (int) (nodeData[0] / numAttributes); int att = (int) (nodeData[0] % numAttributes); att = subsampleAtts.get(i)[att]; arr[0] = att; arr[1] = intervals.get(i)[interval][0]; arr[2] = intervals.get(i)[interval][1]; arr[3] = nodeData[1]; arr[4] = nodeData[2]; of.writeLine(Arrays.toString(arr)); } } } of.closeFile(); //run python file to output graph displaying important attributes and intervals for test series Process p = Runtime.getRuntime().exec("py src/main/python/visualisation/interpretabilityCIF.py \"" + interpSavePath.replace("\\", "/") + "\" " + seed + " " + interpCount + " " + trees.size() + " " + seriesLength + " " + startNumAttributes + " " + interpPred); interpCount++; if (debug) { System.out.println("CIF interp python output:"); BufferedReader out = new BufferedReader(new InputStreamReader(p.getInputStream())); BufferedReader err = new BufferedReader(new InputStreamReader(p.getErrorStream())); System.out.println("output : "); String outLine = out.readLine(); while (outLine != null) { System.out.println(outLine); outLine = out.readLine(); } System.out.println("error : "); String errLine = err.readLine(); while (errLine != null) { System.out.println(errLine); errLine = err.readLine(); } } return true; } /** * Get a unique indentifier for the last prediction made, used for filenames etc. * * @return int ID for the last prediction */ @Override //Interpretable public int getPredID() { return interpCount; } /** * Nested class to find and store three simple summary features for an interval */ private static class FeatureSet { public static double calcFeatureByIndex(int idx, int start, int end, double[] data) { switch (idx) { case 22: return calcMean(start, end, data); case 23: return calcStandardDeviation(start, end, data); case 24: return calcSlope(start, end, data); default: return Double.NaN; } } public static double calcMean(int start, int end, double[] data) { double sumY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; } int length = end - start + 1; return sumY / length; } public static double calcStandardDeviation(int start, int end, double[] data) { double sumY = 0; double sumYY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; sumYY += data[i] * data[i]; } int length = end - start + 1; return (sumYY - (sumY * sumY) / length) / (length - 1); } public static double calcSlope(int start, int end, double[] data) { double sumY = 0; double sumX = 0, sumXX = 0, sumXY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; sumX += (i - start); sumXX += (i - start) * (i - start); sumXY += data[i] * (i - start); } int length = end - start + 1; double slope = (sumXY - (sumX * sumY) / length); double denom = sumXX - (sumX * sumX) / length; slope = denom == 0 ? 0 : slope / denom; return slope; } } /** * Class to hold data about a CIF tree when multi threading. */ private static class MultiThreadBuildHolder { int[] subsampleAtts; int[] intervalDimensions; Classifier tree; int[][] interval; double[][] trainDistribution; int[] oobCounts; long errorTime; public MultiThreadBuildHolder() { } } /** * Class to build a CIF tree when multi threading. */ private class TreeBuildThread implements Callable<MultiThreadBuildHolder> { int i; double[][][] dimensions; int[] classVals; Instances result; public TreeBuildThread(int i, double[][][] dimensions, int[] classVals, Instances result) { this.i = i; this.dimensions = dimensions; this.classVals = classVals; this.result = result; } /** * generate random intervals * do the transfrorms * build the classifier **/ @Override public MultiThreadBuildHolder call() throws Exception { MultiThreadBuildHolder h = new MultiThreadBuildHolder(); Random rand = new Random(seed + i * numClassifiers); Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); //1. Select random intervals for tree i int[][] interval = new int[numIntervals][2]; //Start and end for (int j = 0; j < numIntervals; j++) { if (rand.nextBoolean()) { if (seriesLength - minIntervalLength > 0) interval[j][0] = rand.nextInt(seriesLength - minIntervalLength); //Start point int range = Math.min(seriesLength - interval[j][0], maxIntervalLength); int length = rand.nextInt(range - minIntervalLength) + minIntervalLength; interval[j][1] = interval[j][0] + length; } else { if (seriesLength - minIntervalLength > 0) interval[j][1] = rand.nextInt(seriesLength - minIntervalLength) + minIntervalLength; //End point int range = Math.min(interval[j][1], maxIntervalLength); int length; if (range - minIntervalLength == 0) length = 3; else length = rand.nextInt(range - minIntervalLength) + minIntervalLength; interval[j][0] = interval[j][1] - length; } } //If bagging find instances with replacement int[] instInclusions = null; boolean[] inBag = null; if (bagging) { inBag = new boolean[numInstances]; instInclusions = new int[numInstances]; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { inBag[n] = true; } } } //find attributes to subsample ArrayList<Integer> arrl = new ArrayList<>(startNumAttributes); for (int n = 0; n < startNumAttributes; n++) { arrl.add(n); } int[] subsampleAtts = new int[numAttributes]; for (int n = 0; n < numAttributes; n++) { subsampleAtts[n] = arrl.remove(rand.nextInt(arrl.size())); } //find dimensions for each interval int[] intervalDimensions = new int[numIntervals]; for (int n = 0; n < numIntervals; n++) { intervalDimensions[n] = rand.nextInt(numDimensions); } Arrays.sort(intervalDimensions); h.subsampleAtts = subsampleAtts; h.intervalDimensions = intervalDimensions; //For bagging int instIdx = 0; int lastIdx = -1; //2. Generate and store attributes for (int k = 0; k < numInstances; k++) { //For each instance if (bagging) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.set(k, new DenseInstance(result.instance(k - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; result.instance(k).setValue(result.classIndex(), classVals[instIdx]); } else { instIdx = k; } for (int j = 0; j < numIntervals; j++) { //extract the interval double[] series = dimensions[instIdx][intervalDimensions[j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[j][0], interval[j][1] + 1); for (int g = 0; g < numAttributes; g++) { //process features if (subsampleAtts[g] < 22) { result.instance(k).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtts[g], j, intervalArray)); } else { result.instance(k).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtts[g], interval[j][0], interval[j][1], series)); } } } } //3. Create and build tree using all the features. Feature selection Classifier tree = AbstractClassifier.makeCopy(base); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (i + 1)); tree.buildClassifier(result); if (bagging && getEstimateOwnPerformance()) { long t1 = System.nanoTime(); int[] oobCounts = new int[numInstances]; double[][] trainDistributions = new double[numInstances][numClasses]; if (base instanceof ContinuousIntervalTree) { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; double[] newProbs = ((ContinuousIntervalTree) tree).distributionForInstance(dimensions[n], functions, interval, subsampleAtts, intervalDimensions); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } else { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; for (int j = 0; j < numIntervals; j++) { double[] series = dimensions[n][intervalDimensions[j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[j][0], interval[j][1] + 1); for (int g = 0; g < numAttributes; g++) { if (subsampleAtts[g] < 22) { result.instance(0).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtts[g], j, intervalArray)); } else { result.instance(0).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtts[g], interval[j][0], interval[j][1], series)); } } } double[] newProbs = tree.distributionForInstance(result.instance(0)); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } h.oobCounts = oobCounts; h.trainDistribution = trainDistributions; h.errorTime = System.nanoTime() - t1; } h.tree = tree; h.interval = interval; return h; } } /** * Class to hold data about a CIF tree when multi threading. */ private static class MultiThreadPredictionHolder { int c; ArrayList<double[]> al; public MultiThreadPredictionHolder() { } } /** * Class to make a class prediction using a CIF tree when multi threading. */ private class TreePredictionThread implements Callable<MultiThreadPredictionHolder> { int i; double[][] dimensions; Classifier tree; Instances testHolder; public TreePredictionThread(int i, double[][] dimensions, Classifier tree, Instances testHolder) { this.i = i; this.dimensions = dimensions; this.tree = tree; this.testHolder = testHolder; } @Override public MultiThreadPredictionHolder call() throws Exception { MultiThreadPredictionHolder h = new MultiThreadPredictionHolder(); if (base instanceof ContinuousIntervalTree) { if (interpSavePath != null) { ArrayList<double[]> al = new ArrayList<>(); h.c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i), al); h.al = al; } else { h.c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i)); } } else { //Build transformed instance Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); for (int j = 0; j < numIntervals; j++) { double[] series = dimensions[intervalDimensions.get(i)[j]]; double[] intervalArray = Arrays.copyOfRange(series, intervals.get(i)[j][0], intervals.get(i)[j][1] + 1); for (int g = 0; g < numAttributes; g++) { if (subsampleAtts.get(i)[g] < 22) { testHolder.instance(0).setValue(j * numAttributes + g, c22.getSummaryStatByIndex(subsampleAtts.get(i)[g], j, intervalArray)); } else { testHolder.instance(0).setValue(j * numAttributes + g, FeatureSet.calcFeatureByIndex(subsampleAtts.get(i)[g], intervals.get(i)[j][0], intervals.get(i)[j][1], series)); } } } h.c = (int) tree.classifyInstance(testHolder.instance(0)); } return h; } } /** * CIF attributes as functions **/ public static final Function<Interval, Double>[] functions = new Function[]{c22_0, c22_1, c22_2, c22_3, c22_4, c22_5, c22_6, c22_7, c22_8, c22_9, c22_10, c22_11, c22_12, c22_13, c22_14, c22_15, c22_16, c22_17, c22_18, c22_19, c22_20, c22_21, mean, stdev, slope}; /** * Development tests for the CIF classifier. * * @param arg arguments, unused * @throws Exception if tests fail */ public static void main(String[] arg) throws Exception { Instances[] data = DatasetLoading.sampleItalyPowerDemand(0); Instances train = data[0]; Instances test = data[1]; CIF c = new CIF(); c.setSeed(0); c.estimateOwnPerformance = true; c.trainEstimateMethod = TrainEstimateMethod.OOB; double a; long t1 = System.nanoTime(); c.buildClassifier(train); System.out.println("Train time=" + (System.nanoTime() - t1) * 1e-9); System.out.println("build ok: original atts = " + (train.numAttributes() - 1) + " new atts = " + (c.testHolder.numAttributes() - 1) + " num trees = " + c.trees.size() + " num intervals = " + c.numIntervals); System.out.println("recorded times: train time = " + (c.trainResults.getBuildTime() * 1e-9) + " estimate time = " + (c.trainResults.getErrorEstimateTime() * 1e-9) + " both = " + (c.trainResults.getBuildPlusEstimateTime() * 1e-9)); a = ClassifierTools.accuracy(test, c); System.out.println("Test Accuracy = " + a); System.out.println("Train Accuracy = " + c.trainResults.getAcc()); //Test Accuracy = 0.9650145772594753 //Train Accuracy = 0.9701492537313433 } }
70,123
37.849861
128
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/DrCIF.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import evaluation.tuning.ParameterSpace; import experiments.data.DatasetLoading; import machine_learning.classifiers.ContinuousIntervalTree; import machine_learning.classifiers.ContinuousIntervalTree.Interval; import tsml.classifiers.*; import tsml.data_containers.TSCapabilities; import tsml.data_containers.TimeSeriesInstance; import tsml.data_containers.TimeSeriesInstances; import tsml.data_containers.utilities.Converter; import tsml.transformers.Catch22; import tsml.transformers.Differences; import tsml.transformers.Fast_FFT; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.*; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Random; import java.util.concurrent.*; import java.util.function.Function; import static utilities.ArrayUtilities.sum; import static utilities.StatisticalUtilities.median; /** * Diverse Representation catch22 Interval Forest (DrCIF) * Implementation of the catch22 Interval Forest (CIF) algorithm with extra representations and summary stats. * * @author Matthew Middlehurst **/ public class DrCIF extends EnhancedAbstractClassifier implements TechnicalInformationHandler, TrainTimeContractable, Checkpointable, Tuneable, MultiThreadable { /** * Paper defining DrCIF. * * @return TechnicalInformation for DrCIF */ @Override //TechnicalInformationHandler public TechnicalInformation getTechnicalInformation() { //TODO update // TechnicalInformation result; // result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); // result.setValue(TechnicalInformation.Field.AUTHOR, "M. Middlehurst, J. Large and A. Bagnall"); // result.setValue(TechnicalInformation.Field.TITLE, "The Canonical Interval Forest (CIF) Classifier for " + // "Time Series Classifciation"); // result.setValue(TechnicalInformation.Field.YEAR, "2020"); // return result; return null; } /** * Primary parameters potentially tunable */ private int numClassifiers = 500; /** * Amount of attributes to be subsampled and related data storage. */ private int attSubsampleSize = 10; private int numAttributes = 29; private int startNumAttributes; private ArrayList<int[]> subsampleAtts; /** * Normalise outlier catch22 features which break on data not normalised */ private boolean outlierNorm = true; /** * Use STSF features as well as catch22 features */ private boolean useSummaryStats = true; /** IntervalsFinders sets parameter values in buildClassifier if -1. */ /** * Num intervals selected per representation per tree built */ private int[] numIntervals; private transient Function<Integer, Integer> numIntervalsFinder; /** Secondary parameters */ /** Mainly there to avoid single item intervals, which have no slope or std dev */ /** * Min defaults to 3, Max defaults to m/2 */ private int[] minIntervalLength; private transient Function<Integer, Integer> minIntervalLengthFinder; private int[] maxIntervalLength; private transient Function<Integer, Integer> maxIntervalLengthFinder; /** * Ensemble members of base classifier, default to TimeSeriesTree */ private ArrayList<Classifier> trees; private Classifier base = new ContinuousIntervalTree(); /** * for each classifier i representation r attribute a interval j starts at intervals[i][r][a][j][0] and * ends at intervals[i][r][j][1] */ private ArrayList<int[][][]> intervals; /** * Holding variable for test classification in order to retain the header info */ private Instances testHolder; /** * Flags and data required if bagging, only OOB is available for train estimates in this case */ private boolean bagging = false; private int[] oobCounts; private double[][] trainDistributions; /** * Flags and data required if Checkpointing */ private boolean checkpoint = false; private String checkpointPath; private long checkpointTime = 0; private long lastCheckpointTime = 0; private long checkpointTimeDiff = 0; private boolean internalContractCheckpointHandling = false; /** * Flags and data required if Contracting */ private boolean trainTimeContract = false; private long contractTime = 0; private boolean underContractTime = true; private int maxClassifiers = 500; /** * Multithreading */ private int numThreads = 1; private boolean multiThread = false; private ExecutorService ex; /** * data information */ private int numInstances; /** * Multivariate */ private int numDimensions; private ArrayList<int[][]> intervalDimensions; /** * Transformer used to obtain catch22 features */ private transient Catch22 c22; /** * Transformers used for other representations */ private transient Fast_FFT fft; private transient Differences di; protected static final long serialVersionUID = 1L; /** * Default constructor for DrCIF. Can estimate own performance. */ public DrCIF() { super(CAN_ESTIMATE_OWN_PERFORMANCE); } /** * Set the number of trees to be built. * * @param t number of trees */ public void setNumTrees(int t) { numClassifiers = t; } /** * Set the number of attributes to be subsampled per tree. * * @param a number of attributes sumsampled */ public void setAttSubsampleSize(int a) { attSubsampleSize = a; } /** * Set whether to use the original TSF statistics as well as catch22 features. * * @param b boolean to use summary stats */ public void setUseSummaryStats(boolean b) { useSummaryStats = b; } /** * Set a function for finding the number of intervals randomly selected per tree. * * @param f a function for the number of intervals */ public void setNumIntervalsFinder(Function<Integer, Integer> f) { numIntervalsFinder = f; } /** * Set a function for finding the min interval length for randomly selected intervals. * * @param f a function for min interval length */ public void setMinIntervalLengthFinder(Function<Integer, Integer> f) { minIntervalLengthFinder = f; } /** * Set a function for finding the max interval length for randomly selected intervals. * * @param f a function for max interval length */ public void setMaxIntervalLengthFinder(Function<Integer, Integer> f) { maxIntervalLengthFinder = f; } /** * Set whether to normalise the outlier catch22 features. * * @param b boolean to set outlier normalisation */ public void setOutlierNorm(boolean b) { outlierNorm = b; } /** * Sets the base classifier for the ensemble. * * @param c a base classifier constructed elsewhere and cloned into ensemble */ public void setBaseClassifier(Classifier c) { base = c; } /** * Set whether to perform bagging with replacement. * * @param b boolean to set bagging */ public void setBagging(boolean b) { bagging = b; } /** * Outputs DrCIF parameters information as a String. * * @return String written to results files */ @Override //SaveParameterInfo public String getParameters() { int nt = numClassifiers; if (trees != null) nt = trees.size(); return super.getParameters() + ",numTrees," + nt + ",attSubsampleSize," + attSubsampleSize + ",outlierNorm," + outlierNorm + ",basicSummaryStats," + useSummaryStats + ",numIntervals," + Arrays.toString(numIntervals).replace(',', ';') + ",minIntervalLength," + Arrays.toString(minIntervalLength).replace(',', ';') + ",maxIntervalLength," + Arrays.toString(maxIntervalLength).replace(',', ';') + ",baseClassifier," + base.getClass().getSimpleName() + ",bagging," + bagging + ",estimator," + trainEstimateMethod.name() + ",contractTime," + contractTime; } /** * Returns the capabilities for DrCIF. These are that the * data must be numeric or relational, with no missing and a nominal class * * @return the capabilities of DrCIF */ @Override //AbstractClassifier public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); result.setMinimumNumberInstances(2); // attributes result.enable(Capabilities.Capability.RELATIONAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); return result; } /** * Returns the time series capabilities for DrCIF. These are that the * data must be equal length, with no missing values * * @return the time series capabilities of DrCIF */ public TSCapabilities getTSCapabilities() { TSCapabilities capabilities = new TSCapabilities(); capabilities.enable(TSCapabilities.EQUAL_LENGTH) .enable(TSCapabilities.MULTI_OR_UNIVARIATE) .enable(TSCapabilities.NO_MISSING_VALUES) .enable(TSCapabilities.MIN_LENGTH(4)); return capabilities; } /** * Build the DrCIF classifier. * * @param data TimeSeriesInstances object * @throws Exception unable to train model */ @Override //TSClassifier public void buildClassifier(TimeSeriesInstances data) throws Exception { /** Build Stage: * Builds the final classifier with or without bagging. */ trainResults = new ClassifierResults(); rand.setSeed(seed); numClasses = data.numClasses(); trainResults.setEstimatorName(getClassifierName()); trainResults.setBuildTime(System.nanoTime()); // can classifier handle the data? getTSCapabilities().test(data); c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); TimeSeriesInstances[] representations = new TimeSeriesInstances[3]; representations[0] = data; fft = new Fast_FFT(); fft.nearestPowerOF2(representations[0].getMaxLength()); representations[1] = fft.transform(representations[0]); di = new Differences(); di.setSubtractFormerValue(true); representations[2] = di.transform(representations[0]); File file = new File(checkpointPath + "DrCIF" + seed + ".ser"); //if checkpointing and serialised files exist load said files if (checkpoint && file.exists()) { //path checkpoint files will be saved to if (debug) System.out.println("Loading from checkpoint file"); loadFromFile(checkpointPath + "DrCIF" + seed + ".ser"); } //initialise variables else { numInstances = data.numInstances(); numDimensions = data.getMaxNumDimensions(); numIntervals = new int[representations.length]; minIntervalLength = new int[representations.length]; maxIntervalLength = new int[representations.length]; for (int r = 0; r < 3; r++) { if (numIntervalsFinder == null) { numIntervals[r] = (int) (4 + (Math.sqrt(representations[r].getMaxLength()) * Math.sqrt(numDimensions)) / 3); } else { numIntervals[r] = numIntervalsFinder.apply(representations[0].getMaxLength()); } if (minIntervalLengthFinder == null) { minIntervalLength[r] = 4; } else { minIntervalLength[r] = minIntervalLengthFinder.apply(representations[r].getMaxLength()); } if (minIntervalLength[r] < 4) { minIntervalLength[r] = 4; } if (representations[r].getMaxLength() <= minIntervalLength[r]) { minIntervalLength[r] = representations[r].getMaxLength() / 2; } if (maxIntervalLengthFinder == null) { maxIntervalLength[r] = representations[r].getMaxLength() / 2; } else { maxIntervalLength[r] = maxIntervalLengthFinder.apply(representations[r].getMaxLength()); } if (maxIntervalLength[r] > representations[r].getMaxLength()) { maxIntervalLength[r] = representations[r].getMaxLength(); } if (maxIntervalLength[r] < minIntervalLength[r]) { maxIntervalLength[r] = minIntervalLength[r]; } } if (!useSummaryStats) { numAttributes = 22; } startNumAttributes = numAttributes; subsampleAtts = new ArrayList<>(); if (attSubsampleSize < numAttributes) { numAttributes = attSubsampleSize; } //Set up for Bagging if required if (getEstimateOwnPerformance() && (bagging || trainEstimateMethod != TrainEstimateMethod.OOB)) { trainDistributions = new double[numInstances][numClasses]; if (bagging) oobCounts = new int[numInstances]; } //cancel loop using time instead of number built. if (trainTimeContract) { numClassifiers = maxClassifiers; trees = new ArrayList<>(); intervals = new ArrayList<>(); } else { trees = new ArrayList<>(numClassifiers); intervals = new ArrayList<>(numClassifiers); } intervalDimensions = new ArrayList<>(); } if (multiThread) { ex = Executors.newFixedThreadPool(numThreads); if (checkpoint) System.out.println("Unable to checkpoint until end of build when multi threading."); } //Set up instances size and format. ArrayList<Attribute> atts = new ArrayList<>(); String name; for (int j = 0; j < sum(numIntervals) * numAttributes; j++) { name = "F" + j; atts.add(new Attribute(name)); } //Get the class values as an array list ArrayList<String> vals = new ArrayList<>(numClasses); for (int j = 0; j < numClasses; j++) vals.add(Integer.toString(j)); atts.add(new Attribute("cls", vals)); //create blank instances with the correct class value Instances result = new Instances("Tree", atts, numInstances); result.setClassIndex(result.numAttributes() - 1); for (int i = 0; i < numInstances; i++) { DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, data.get(i).getLabelIndex()); result.add(in); } testHolder = new Instances(result, 1); DenseInstance in = new DenseInstance(testHolder.numAttributes()); in.setValue(testHolder.numAttributes() - 1, -1); testHolder.add(in); if (multiThread) { multiThreadBuildDrCIF(representations, result); } else { buildDrCIF(representations, result); } if (trees.size() == 0) {//Not enough time to build a single classifier throw new Exception((" ERROR in DrCIF, no trees built, contract time probably too low. Contract time = " + contractTime)); } if (checkpoint) { saveToFile(checkpointPath); } trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime() - trainResults.getBuildTime() - checkpointTimeDiff - trainResults.getErrorEstimateTime()); if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(data); long est2 = System.nanoTime(); trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); } trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); trainResults.setParas(getParameters()); printLineDebug("*************** Finished DrCIF Build with " + trees.size() + "trees, train time = " + (trainResults.getBuildTime()/1000000000/60/60.0) + " hours, Train+Estimate time = "+(trainResults.getBuildPlusEstimateTime()/1000000000/60/60.0)+" hours ***************"); } /** * Build the DrCIF classifier. * * @param data weka Instances object * @throws Exception unable to train model */ @Override //AbstractClassifier public void buildClassifier(Instances data) throws Exception { buildClassifier(Converter.fromArff(data)); } /** * Build the DrCIF classifier * For each base classifier: * generate random intervals * do the transfrorms * build the classifier * * @throws Exception unable to build DrCIF */ public void buildDrCIF(TimeSeriesInstances[] representations, Instances result) throws Exception { double[][][][] dimensions = new double[numInstances][representations.length][][]; for (int r = 0; r < representations.length; r++) { double[][][] arr = representations[r].toValueArray(); for (int n = 0; n < numInstances; n++) { dimensions[n][r] = arr[n]; } } while (underContractTime && trees.size() < numClassifiers) { int i = trees.size(); //1. Select random intervals for tree i int[][][] interval = new int[representations.length][][]; for (int r = 0; r < representations.length; r++) { interval[r] = new int[numIntervals[r]][2]; for (int j = 0; j < numIntervals[r]; j++) { if (rand.nextBoolean()) { if (representations[r].getMaxLength() - minIntervalLength[r] > 0) interval[r][j][0] = rand.nextInt(representations[r].getMaxLength() - minIntervalLength[r]); //Start point int range = Math.min(representations[r].getMaxLength() - interval[r][j][0], maxIntervalLength[r]); int length; if (range - minIntervalLength[r] == 0) length = minIntervalLength[r]; else length = rand.nextInt(range - minIntervalLength[r]) + minIntervalLength[r]; interval[r][j][1] = interval[r][j][0] + length; } else { if (representations[r].getMaxLength() - minIntervalLength[r] > 0) interval[r][j][1] = rand.nextInt(representations[r].getMaxLength() - minIntervalLength[r]) + minIntervalLength[r]; //End point int range = Math.min(interval[r][j][1], maxIntervalLength[r]); int length; if (range - minIntervalLength[r] == 0) length = minIntervalLength[r]; else length = rand.nextInt(range - minIntervalLength[r]) + minIntervalLength[r]; interval[r][j][0] = interval[r][j][1] - length; } } } //If bagging find instances with replacement int[] instInclusions = null; boolean[] inBag = null; if (bagging) { inBag = new boolean[numInstances]; instInclusions = new int[numInstances]; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { inBag[n] = true; } } } //find attributes to subsample ArrayList<Integer> arrl = new ArrayList<>(startNumAttributes); for (int n = 0; n < startNumAttributes; n++) { arrl.add(n); } int[] subsampleAtt = new int[numAttributes]; for (int n = 0; n < numAttributes; n++) { subsampleAtt[n] = arrl.remove(rand.nextInt(arrl.size())); } //find dimensions for each interval int[][] intervalDimension = new int[representations.length][]; for (int r = 0; r < representations.length; r++) { intervalDimension[r] = new int[numIntervals[r]]; for (int n = 0; n < numIntervals[r]; n++) { intervalDimension[r][n] = rand.nextInt(numDimensions); } Arrays.sort(intervalDimension[r]); } //For bagging int instIdx = 0; int lastIdx = -1; //2. Generate and store attributes for (int k = 0; k < numInstances; k++) { //For each instance if (bagging) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.set(k, new DenseInstance(result.instance(k - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; result.instance(k).setValue(result.classIndex(), representations[0].get(instIdx).getLabelIndex()); } else { instIdx = k; } int p = 0; for (int r = 0; r < representations.length; r++) { for (int j = 0; j < numIntervals[r]; j++) { //extract the interval double[] series = dimensions[instIdx][r][intervalDimension[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[r][j][0], interval[r][j][1] + 1); //process features for (int a = 0; a < numAttributes; a++) { if (subsampleAtt[a] < 22) { result.instance(k).setValue(p, c22.getSummaryStatByIndex(subsampleAtt[a], j, intervalArray)); } else { result.instance(k).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtt[a], interval[r][j][0], interval[r][j][1], series)); } p++; } } } } //3. Create and build tree using all the features. Feature selection Classifier tree = AbstractClassifier.makeCopy(base); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (i + 1)); tree.buildClassifier(result); if (getEstimateOwnPerformance() && bagging) { long t1 = System.nanoTime(); if (base instanceof ContinuousIntervalTree) { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; double[] newProbs = ((ContinuousIntervalTree) tree).distributionForInstance(dimensions[n], functions, interval, subsampleAtt, intervalDimension); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } else { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; int p = 0; for (int r = 0; r < representations.length; r++) { for (int j = 0; j < numIntervals[r]; j++) { double[] series = dimensions[n][r][intervalDimension[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[r][j][0], interval[r][j][1] + 1); for (int a = 0; a < numAttributes; a++) { if (subsampleAtt[a] < 22) { testHolder.instance(0).setValue(p, c22.getSummaryStatByIndex(subsampleAtt[a], j, intervalArray)); } else { testHolder.instance(0).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtt[a], interval[r][j][0], interval[r][j][1], series)); } p++; } } } double[] newProbs = tree.distributionForInstance(testHolder.instance(0)); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + (System.nanoTime() - t1)); } else if (getEstimateOwnPerformance() && trainEstimateMethod == TrainEstimateMethod.TRAIN) { long t1 = System.nanoTime(); for (int n = 0; n < numInstances; n++) { double[] newProbs = tree.distributionForInstance(result.instance(n)); for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + (System.nanoTime() - t1)); } else if (getEstimateOwnPerformance() && trainEstimateMethod == TrainEstimateMethod.CV) { long t1 = System.nanoTime(); int numFolds = Math.min(result.numInstances(), 10); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5 * i); cv.setNumFolds(numFolds); Classifier cvtree = AbstractClassifier.makeCopy(base); if (seedClassifier && cvtree instanceof Randomizable) ((Randomizable) cvtree).setSeed(seed * (i + 1)); ClassifierResults cvResults = cv.evaluate(cvtree, result); for (int g = 0; g < result.numInstances(); g++) { double[] dist = cvResults.getProbabilityDistribution(g); for (int n = 0; n < trainDistributions[g].length; n++) { trainDistributions[g][n] += dist[n]; } } trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + (System.nanoTime() - t1)); } trees.add(tree); intervals.add(interval); subsampleAtts.add(subsampleAtt); intervalDimensions.add(intervalDimension); //Timed checkpointing if enabled, else checkpoint every 100 trees if (checkpoint && ((checkpointTime > 0 && System.nanoTime() - lastCheckpointTime > checkpointTime) || trees.size() % 100 == 0)) { saveToFile(checkpointPath); } underContractTime = withinTrainContract(trainResults.getBuildTime()); } } /** * Build the DrCIF classifier using multiple threads. * Unable to checkpoint until after the build process while using multiple threads. * For each base classifier * generate random intervals * do the transfrorms * build the classifier * * @param representations TimeSeriesInstances data * @param result Instances object formatted for transformed data * @throws Exception unable to build DrCIF */ private void multiThreadBuildDrCIF(TimeSeriesInstances[] representations, Instances result) throws Exception { double[][][][] dimensions = new double[numInstances][representations.length][][]; for (int r = 0; r < representations.length; r++) { double[][][] arr = representations[r].toValueArray(); for (int n = 0; n < numInstances; n++) { dimensions[n][r] = arr[n]; } } int[] classVals = representations[0].getClassIndexes(); int buildStep = trainTimeContract ? numThreads : numClassifiers; while (withinTrainContract(trainResults.getBuildTime()) && trees.size() < numClassifiers) { ArrayList<Future<MultiThreadBuildHolder>> futures = new ArrayList<>(buildStep); int end = trees.size() + buildStep; for (int i = trees.size(); i < end; ++i) { Instances resultCopy = new Instances(result, numInstances); for (int n = 0; n < numInstances; n++) { DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, result.instance(n).classValue()); resultCopy.add(in); } futures.add(ex.submit(new TreeBuildThread(i, dimensions, classVals, resultCopy))); } for (Future<MultiThreadBuildHolder> f : futures) { MultiThreadBuildHolder h = f.get(); trees.add(h.tree); intervals.add(h.interval); subsampleAtts.add(h.subsampleAtts); intervalDimensions.add(h.intervalDimensions); if (getEstimateOwnPerformance() && (bagging || trainEstimateMethod != TrainEstimateMethod.OOB)) { trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + h.errorTime); for (int n = 0; n < numInstances; n++) { if (bagging) oobCounts[n] += h.oobCounts[n]; for (int k = 0; k < numClasses; k++) trainDistributions[n][k] += h.trainDistribution[n][k]; } } } } } /** * Estimate accuracy stage: Three scenarios * 1. If we bagged the full build (bagging ==true), we estimate using the full build OOB. * If we built on all data (bagging ==false) we estimate either: * 2. With a 10 fold CV. * 3. Build a bagged model simply to get the estimate. * * @param data TimeSeriesInstances to estimate with * @throws Exception unable to obtain estimate */ private void estimateOwnPerformance(TimeSeriesInstances data) throws Exception { if (bagging) { // Use bag data, counts normalised to probabilities double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()];//Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); if (oobCounts[j] == 0) Arrays.fill(trainDistributions[j], 1.0 / trainDistributions[j].length); else for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= oobCounts[j]; preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).getLabelIndex(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setEstimatorName("DrCIFBagging"); trainResults.setDatasetName(data.getProblemName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setErrorEstimateMethod("OOB"); trainResults.finaliseResults(actuals); } else if (trainEstimateMethod == TrainEstimateMethod.TRAIN) { double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()];//Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= trees.size(); preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).getLabelIndex(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setEstimatorName("SCIFTrainPreds"); trainResults.setDatasetName(data.getProblemName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setErrorEstimateMethod("TrainPred"); trainResults.finaliseResults(actuals); } else if (trainEstimateMethod == TrainEstimateMethod.CV) { double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()];//Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= trees.size(); preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).getLabelIndex(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setEstimatorName("SCIFInternalCV"); trainResults.setDatasetName(data.getProblemName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setErrorEstimateMethod("InternalCV"); trainResults.finaliseResults(actuals); //todo doubles the transform, make more like STC and test (same with Arsenal) } else if (trainEstimateMethod == TrainEstimateMethod.OOB || trainEstimateMethod == TrainEstimateMethod.NONE) { /** Build a single new DrCIF using Bagging, and extract the estimate from this */ DrCIF cif = new DrCIF(); cif.copyParameters(this); cif.setSeed(seed * 5); cif.setEstimateOwnPerformance(true); cif.bagging = true; cif.numThreads = numThreads; cif.multiThread = multiThread; cif.buildClassifier(data); long tt = trainResults.getBuildTime(); trainResults = cif.trainResults; trainResults.setBuildTime(tt); trainResults.setEstimatorName("DrCIFOOB"); trainResults.setErrorEstimateMethod("OOB"); } } /** * Copy the parameters of a DrCIF object to this. * * @param other A DrCIF object */ private void copyParameters(DrCIF other) { this.numClassifiers = other.numClassifiers; this.attSubsampleSize = other.attSubsampleSize; this.outlierNorm = other.outlierNorm; this.useSummaryStats = other.useSummaryStats; this.numIntervals = other.numIntervals; this.numIntervalsFinder = other.numIntervalsFinder; this.minIntervalLength = other.minIntervalLength; this.minIntervalLengthFinder = other.minIntervalLengthFinder; this.maxIntervalLength = other.maxIntervalLength; this.maxIntervalLengthFinder = other.maxIntervalLengthFinder; this.base = other.base; this.bagging = other.bagging; this.trainTimeContract = other.trainTimeContract; this.contractTime = other.contractTime; } /** * Find class probabilities of an instance using the trained model. * * @param ins TimeSeriesInstance object * @return array of doubles: probability of each class * @throws Exception failure to classify */ @Override //TSClassifier public double[] distributionForInstance(TimeSeriesInstance ins) throws Exception { double[] d = new double[numClasses]; double[][][] dimensions = new double[3][][]; dimensions[0] = ins.toValueArray(); dimensions[1] = fft.transform(ins).toValueArray(); dimensions[2] = di.transform(ins).toValueArray(); if (multiThread) { ArrayList<Future<MultiThreadPredictionHolder>> futures = new ArrayList<>(trees.size()); for (int i = 0; i < trees.size(); ++i) { Instances testCopy = new Instances(testHolder, 1); DenseInstance in = new DenseInstance(testHolder.numAttributes()); in.setValue(testHolder.numAttributes() - 1, -1); testCopy.add(in); futures.add(ex.submit(new TreePredictionThread(i, dimensions, trees.get(i), testCopy))); } for (Future<MultiThreadPredictionHolder> f : futures) { MultiThreadPredictionHolder h = f.get(); d[h.c]++; } } else if (base instanceof ContinuousIntervalTree) { for (int i = 0; i < trees.size(); i++) { int c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i)); d[c]++; } } else { //Build transformed instance for (int i = 0; i < trees.size(); i++) { Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); int p = 0; for (int r = 0; r < dimensions.length; r++) { for (int j = 0; j < intervals.get(i)[r].length; j++) { double[] series = dimensions[r][intervalDimensions.get(i)[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, intervals.get(i)[r][j][0], intervals.get(i)[r][j][1] + 1); for (int a = 0; a < numAttributes; a++) { if (subsampleAtts.get(i)[a] < 22) { testHolder.instance(0).setValue(p, c22.getSummaryStatByIndex(subsampleAtts.get(i)[a], j, intervalArray)); } else { testHolder.instance(0).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtts.get(i)[a], intervals.get(i)[r][j][0], intervals.get(i)[r][j][1], series)); } p++; } } } int c = (int) trees.get(i).classifyInstance(testHolder.instance(0)); d[c]++; } } double sum = 0; for (double x : d) sum += x; for (int i = 0; i < d.length; i++) d[i] = d[i] / sum; return d; } /** * Find class probabilities of an instance using the trained model. * * @param ins weka Instance object * @return array of doubles: probability of each class * @throws Exception failure to classify */ @Override //AbstractClassifier public double[] distributionForInstance(Instance ins) throws Exception { return distributionForInstance(Converter.fromArff(ins)); } /** * Classify an instance using the trained model. * * @param ins TimeSeriesInstance object * @return predicted class value * @throws Exception failure to classify */ @Override //TSClassifier public double classifyInstance(TimeSeriesInstance ins) throws Exception { double[] probs = distributionForInstance(ins); return findIndexOfMax(probs, rand); } /** * Classify an instance using the trained model. * * @param ins weka Instance object * @return predicted class value * @throws Exception failure to classify */ @Override //AbstractClassifier public double classifyInstance(Instance ins) throws Exception { return classifyInstance(Converter.fromArff(ins)); } /** * Set the train time limit for a contracted classifier. * * @param amount contract time in nanoseconds */ @Override //TrainTimeContractable public void setTrainTimeLimit(long amount) { contractTime = amount; trainTimeContract = true; } /** * Check if a contracted classifier is within its train time limit. * * @param start classifier build start time * @return true if within the contract or not contracted, false otherwise. */ @Override //TrainTimeContractable public boolean withinTrainContract(long start) { if (contractTime <= 0) return true; //Not contracted int d = getEstimateOwnPerformance() && trainEstimateMethod != TrainEstimateMethod.TRAIN ? 2 : 1; return System.nanoTime() - start - checkpointTimeDiff < contractTime / d; } /** * Set the path to save checkpoint files to. * * @param path string for full path for the directory to store checkpointed files * @return true if valid path, false otherwise */ @Override //Checkpointable public boolean setCheckpointPath(String path) { boolean validPath = Checkpointable.super.createDirectories(path); if (validPath) { checkpointPath = path; checkpoint = true; } return validPath; } /** * Set the time between checkpoints in hours. * * @param t number of hours between checkpoints * @return true */ @Override //Checkpointable public boolean setCheckpointTimeHours(int t) { checkpointTime = TimeUnit.NANOSECONDS.convert(t, TimeUnit.HOURS); return true; } /** * Serialises this DrCIF object to the specified path. * * @param path save path for object * @throws Exception object fails to save */ @Override //Checkpointable public void saveToFile(String path) throws Exception { lastCheckpointTime = System.nanoTime(); Checkpointable.super.saveToFile(path + "DrCIF" + seed + "temp.ser"); File file = new File(path + "DrCIF" + seed + "temp.ser"); File file2 = new File(path + "DrCIF" + seed + ".ser"); file2.delete(); file.renameTo(file2); if (internalContractCheckpointHandling) checkpointTimeDiff += System.nanoTime() - lastCheckpointTime; } /** * Copies values from a loaded DrCIF object into this object. * * @param obj a DrCIF object * @throws Exception if obj is not an instance of DrCIF */ @Override //Checkpointable public void copyFromSerObject(Object obj) throws Exception { if (!(obj instanceof DrCIF)) throw new Exception("The SER file is not an instance of TSF"); DrCIF saved = ((DrCIF) obj); System.out.println("Loading DrCIF" + seed + ".ser"); try { numClassifiers = saved.numClassifiers; attSubsampleSize = saved.attSubsampleSize; numAttributes = saved.numAttributes; startNumAttributes = saved.startNumAttributes; subsampleAtts = saved.subsampleAtts; outlierNorm = saved.outlierNorm; useSummaryStats = saved.useSummaryStats; numIntervals = saved.numIntervals; //numIntervalsFinder = saved.numIntervalsFinder; minIntervalLength = saved.minIntervalLength; //minIntervalLengthFinder = saved.minIntervalLengthFinder; maxIntervalLength = saved.maxIntervalLength; //maxIntervalLengthFinder = saved.maxIntervalLengthFinder; trees = saved.trees; base = saved.base; intervals = saved.intervals; //testHolder = saved.testHolder; bagging = saved.bagging; oobCounts = saved.oobCounts; trainDistributions = saved.trainDistributions; //checkpoint = saved.checkpoint; //checkpointPath = saved.checkpointPath //checkpointTime = saved.checkpointTime; //lastCheckpointTime = saved.lastCheckpointTime; //checkpointTimeDiff = saved.checkpointTimeDiff; //internalContractCheckpointHandling = saved.internalContractCheckpointHandling; trainTimeContract = saved.trainTimeContract; if (internalContractCheckpointHandling) contractTime = saved.contractTime; if (internalContractCheckpointHandling) underContractTime = saved.underContractTime; maxClassifiers = saved.maxClassifiers; //numThreads = saved.numThreads; //multiThread = saved.multiThread; //ex = saved.ex; numInstances = saved.numInstances; numDimensions = saved.numDimensions; intervalDimensions = saved.intervalDimensions; //c22 = saved.c22; //fft = saved.fft; //di = saved.di; trainResults = saved.trainResults; if (!internalContractCheckpointHandling) trainResults.setBuildTime(System.nanoTime()); seedClassifier = saved.seedClassifier; seed = saved.seed; rand = saved.rand; estimateOwnPerformance = saved.estimateOwnPerformance; trainEstimateMethod = saved.trainEstimateMethod; numClasses = saved.numClasses; if (internalContractCheckpointHandling) checkpointTimeDiff = saved.checkpointTimeDiff + (System.nanoTime() - saved.lastCheckpointTime); lastCheckpointTime = System.nanoTime(); } catch (Exception ex) { System.out.println("Unable to assign variables when loading serialised file"); } } /** * Returns the default set of possible parameter values for use in setOptions when tuning. * * @return default parameter space for tuning */ @Override //Tunable public ParameterSpace getDefaultParameterSearchSpace() { ParameterSpace ps = new ParameterSpace(); String[] numAtts = {"8", "16", "25"}; ps.addParameter("-A", numAtts); String[] maxIntervalLengths = {"0.5", "0.75", "1"}; ps.addParameter("-L", maxIntervalLengths); return ps; } /** * Parses a given list of options. Valid options are: * <p> * -A The number of attributes to subsample as an integer from 1-25. * -L Max interval length as a proportion of series length as a double from 0-1. * * @param options the list of options as an array of strings * @throws Exception if an option value is invalid */ @Override //AbstractClassifier public void setOptions(String[] options) throws Exception { System.out.println(Arrays.toString(options)); String numAttsString = Utils.getOption("-A", options); System.out.println(numAttsString); if (numAttsString.length() != 0) attSubsampleSize = Integer.parseInt(numAttsString); String maxIntervalLengthsString = Utils.getOption("-L", options); System.out.println(maxIntervalLengthsString); if (maxIntervalLengthsString.length() != 0) maxIntervalLengthFinder = (numAtts) -> (int) (numAtts * Double.parseDouble(maxIntervalLengthsString)); System.out.println(attSubsampleSize + " " + maxIntervalLengthFinder.apply(100)); } /** * Enables multi threading with a set number of threads to use. * * @param numThreads number of threads available for multi threading */ @Override //MultiThreadable public void enableMultiThreading(int numThreads) { if (numThreads > 1) { this.numThreads = numThreads; multiThread = true; } else { this.numThreads = 1; multiThread = false; } } /** * Nested class to find and store seven simple summary features for an interval */ private static class FeatureSet { public static double calcFeatureByIndex(int idx, int start, int end, double[] data) { switch (idx) { case 22: return calcMean(start, end, data); case 23: return calcMedian(start, end, data); case 24: return calcStandardDeviation(start, end, data); case 25: return calcSlope(start, end, data); case 26: return calcInterquartileRange(start, end, data); case 27: return calcMin(start, end, data); case 28: return calcMax(start, end, data); default: return Double.NaN; } } public static double calcMean(int start, int end, double[] data) { double sumY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; } int length = end - start + 1; return sumY / length; } public static double calcMedian(int start, int end, double[] data) { ArrayList<Double> sortedData = new ArrayList<>(end - start + 1); for (int i = start; i <= end; i++) { sortedData.add(data[i]); } return median(sortedData, false); //sorted in function } public static double calcStandardDeviation(int start, int end, double[] data) { double sumY = 0; double sumYY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; sumYY += data[i] * data[i]; } int length = end - start + 1; return (sumYY - (sumY * sumY) / length) / (length - 1); } public static double calcSlope(int start, int end, double[] data) { double sumY = 0; double sumX = 0, sumXX = 0, sumXY = 0; for (int i = start; i <= end; i++) { sumY += data[i]; sumX += (i - start); sumXX += (i - start) * (i - start); sumXY += data[i] * (i - start); } int length = end - start + 1; double slope = (sumXY - (sumX * sumY) / length); double denom = sumXX - (sumX * sumX) / length; slope = denom == 0 ? 0 : slope / denom; return slope; } public static double calcInterquartileRange(int start, int end, double[] data) { ArrayList<Double> sortedData = new ArrayList<>(end - start + 1); for (int i = start; i <= end; i++) { sortedData.add(data[i]); } Collections.sort(sortedData); int length = end - start + 1; ArrayList<Double> left = new ArrayList<>(length / 2 + 1); ArrayList<Double> right = new ArrayList<>(length / 2 + 1); if (length % 2 == 1) { for (int i = 0; i <= length / 2; i++) { left.add(sortedData.get(i)); } } else { for (int i = 0; i < length / 2; i++) { left.add(sortedData.get(i)); } } for (int i = length / 2; i < sortedData.size(); i++) { right.add(sortedData.get(i)); } return median(right, false) - median(left, false); } public static double calcMin(int start, int end, double[] data) { double min = Double.MAX_VALUE; for (int i = start; i <= end; i++) { if (data[i] < min) min = data[i]; } return min; } public static double calcMax(int start, int end, double[] data) { double max = -999999999; for (int i = start; i <= end; i++) { if (data[i] > max) max = data[i]; } return max; } } /** * Class to hold data about a DrCIF tree when multi threading. */ private static class MultiThreadBuildHolder { int[] subsampleAtts; int[][] intervalDimensions; Classifier tree; int[][][] interval; double[][] trainDistribution; int[] oobCounts; long errorTime; public MultiThreadBuildHolder() { } } /** * Class to build a DrCIF tree when multi threading. */ private class TreeBuildThread implements Callable<MultiThreadBuildHolder> { int i; double[][][][] dimensions; int[] classVals; Instances result; public TreeBuildThread(int i, double[][][][] dimensions, int[] classVals, Instances result) { this.i = i; this.dimensions = dimensions; this.classVals = classVals; this.result = result; } /** * generate random intervals * do the transfrorms * build the classifier **/ @Override public MultiThreadBuildHolder call() throws Exception { MultiThreadBuildHolder h = new MultiThreadBuildHolder(); Random rand = new Random(seed + i * numClassifiers); Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); //1. Select random intervals for tree i int[][][] interval = new int[dimensions[0].length][][]; for (int r = 0; r < dimensions[0].length; r++) { interval[r] = new int[numIntervals[r]][2]; for (int j = 0; j < numIntervals[r]; j++) { if (rand.nextBoolean()) { if (dimensions[0][r][0].length - minIntervalLength[r] > 0) interval[r][j][0] = rand.nextInt(dimensions[0][r][0].length - minIntervalLength[r]); //Start point int range = Math.min(dimensions[0][r][0].length - interval[r][j][0], maxIntervalLength[r]); int length; if (range - minIntervalLength[r] == 0) length = minIntervalLength[r]; else length = rand.nextInt(range - minIntervalLength[r]) + minIntervalLength[r]; interval[r][j][1] = interval[r][j][0] + length; } else { if (dimensions[0][r][0].length - minIntervalLength[r] > 0) interval[r][j][1] = rand.nextInt(dimensions[0][r][0].length - minIntervalLength[r]) + minIntervalLength[r]; //Start point int range = Math.min(interval[r][j][1], maxIntervalLength[r]); int length; if (range - minIntervalLength[r] == 0) length = minIntervalLength[r]; else length = rand.nextInt(range - minIntervalLength[r]) + minIntervalLength[r]; interval[r][j][0] = interval[r][j][1] - length; } } } //If bagging find instances with replacement int[] instInclusions = null; boolean[] inBag = null; if (bagging) { inBag = new boolean[numInstances]; instInclusions = new int[numInstances]; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { inBag[n] = true; } } } //find attributes to subsample ArrayList<Integer> arrl = new ArrayList<>(startNumAttributes); for (int n = 0; n < startNumAttributes; n++) { arrl.add(n); } int[] subsampleAtts = new int[numAttributes]; for (int n = 0; n < numAttributes; n++) { subsampleAtts[n] = arrl.remove(rand.nextInt(arrl.size())); } //find dimensions for each interval int[][] intervalDimensions = new int[dimensions[0].length][]; for (int r = 0; r < dimensions[0].length; r++) { intervalDimensions[r] = new int[numIntervals[r]]; for (int n = 0; n < numIntervals[r]; n++) { intervalDimensions[r][n] = rand.nextInt(numDimensions); } Arrays.sort(intervalDimensions[r]); } h.subsampleAtts = subsampleAtts; h.intervalDimensions = intervalDimensions; //For bagging int instIdx = 0; int lastIdx = -1; //2. Generate and store attributes for (int k = 0; k < numInstances; k++) { //For each instance if (bagging) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.set(k, new DenseInstance(result.instance(k - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; result.instance(k).setValue(result.classIndex(), classVals[instIdx]); } else { instIdx = k; } int p = 0; for (int r = 0; r < dimensions[0].length; r++) { for (int j = 0; j < numIntervals[r]; j++) { //extract the interval double[] series = dimensions[instIdx][r][intervalDimensions[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[r][j][0], interval[r][j][1] + 1); //process features for (int a = 0; a < numAttributes; a++) { if (subsampleAtts[a] < 22) { result.instance(k).setValue(p, c22.getSummaryStatByIndex(subsampleAtts[a], j, intervalArray)); } else { result.instance(k).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtts[a], interval[r][j][0], interval[r][j][1], series)); } p++; } } } } //3. Create and build tree using all the features. Feature selection Classifier tree = AbstractClassifier.makeCopy(base); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (i + 1)); tree.buildClassifier(result); if (getEstimateOwnPerformance() && bagging) { long t1 = System.nanoTime(); int[] oobCounts = new int[numInstances]; double[][] trainDistributions = new double[numInstances][numClasses]; if (base instanceof ContinuousIntervalTree) { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; double[] newProbs = ((ContinuousIntervalTree) tree).distributionForInstance(dimensions[n], functions, interval, subsampleAtts, intervalDimensions); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } else { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; int p = 0; for (int r = 0; r < dimensions[0].length; r++) { for (int j = 0; j < numIntervals[r]; j++) { double[] series = dimensions[n][r][intervalDimensions[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, interval[r][j][0], interval[r][j][1] + 1); for (int a = 0; a < numAttributes; a++) { if (subsampleAtts[a] < 22) { result.instance(0).setValue(p, c22.getSummaryStatByIndex(subsampleAtts[a], j, intervalArray)); } else { result.instance(0).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtts[a], interval[r][j][0], interval[r][j][1], series)); } p++; } } } double[] newProbs = tree.distributionForInstance(testHolder.instance(0)); oobCounts[n]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } } h.oobCounts = oobCounts; h.trainDistribution = trainDistributions; h.errorTime = System.nanoTime() - t1; } else if (getEstimateOwnPerformance() && trainEstimateMethod == TrainEstimateMethod.TRAIN) { long t1 = System.nanoTime(); double[][] trainDistributions = new double[numInstances][numClasses]; for (int n = 0; n < numInstances; n++) { double[] newProbs = tree.distributionForInstance(result.instance(n)); for (int k = 0; k < newProbs.length; k++) trainDistributions[n][k] += newProbs[k]; } h.trainDistribution = trainDistributions; h.errorTime = System.nanoTime() - t1; } else if (getEstimateOwnPerformance() && trainEstimateMethod == TrainEstimateMethod.CV) { long t1 = System.nanoTime(); double[][] trainDistributions = new double[numInstances][numClasses]; int numFolds = Math.min(result.numInstances(), 10); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5 * i); cv.setNumFolds(numFolds); Classifier cvtree = AbstractClassifier.makeCopy(base); if (seedClassifier && cvtree instanceof Randomizable) ((Randomizable) cvtree).setSeed(seed * (i + 1)); ClassifierResults cvResults = cv.evaluate(cvtree, result); for (int g = 0; g < result.numInstances(); g++) { double[] dist = cvResults.getProbabilityDistribution(g); for (int n = 0; n < trainDistributions[g].length; n++) { trainDistributions[g][n] += dist[n]; } } h.trainDistribution = trainDistributions; h.errorTime = System.nanoTime() - t1; } h.tree = tree; h.interval = interval; return h; } } /** * Class to hold data about a DrCIF tree when multi threading. */ private static class MultiThreadPredictionHolder { int c; public MultiThreadPredictionHolder() { } } /** * Class to make a class prediction using a DrCIF tree when multi threading. */ private class TreePredictionThread implements Callable<MultiThreadPredictionHolder> { int i; double[][][] dimensions; Classifier tree; Instances testHolder; public TreePredictionThread(int i, double[][][] dimensions, Classifier tree, Instances testHolder) { this.i = i; this.dimensions = dimensions; this.tree = tree; this.testHolder = testHolder; } @Override public MultiThreadPredictionHolder call() throws Exception { MultiThreadPredictionHolder h = new MultiThreadPredictionHolder(); if (base instanceof ContinuousIntervalTree) { h.c = (int) ((ContinuousIntervalTree) trees.get(i)).classifyInstance(dimensions, functions, intervals.get(i), subsampleAtts.get(i), intervalDimensions.get(i)); } else { Catch22 c22 = new Catch22(); c22.setOutlierNormalise(outlierNorm); int p = 0; for (int r = 0; r < dimensions.length; r++) { for (int j = 0; j < intervals.get(i)[r].length; j++) { double[] series = dimensions[r][intervalDimensions.get(i)[r][j]]; double[] intervalArray = Arrays.copyOfRange(series, intervals.get(i)[r][j][0], intervals.get(i)[r][j][1] + 1); for (int a = 0; a < numAttributes; a++) { if (subsampleAtts.get(i)[a] < 22) { testHolder.instance(0).setValue(p, c22.getSummaryStatByIndex(subsampleAtts.get(i)[a], j, intervalArray)); } else { testHolder.instance(0).setValue(p, FeatureSet.calcFeatureByIndex(subsampleAtts.get(i)[a], intervals.get(i)[r][j][0], intervals.get(i)[r][j][1], series)); } p++; } } } h.c = (int) tree.classifyInstance(testHolder.instance(0)); } return h; } } public static final Function<Interval, Double> c22_0 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(0, intervalArray, true); }; public static final Function<Interval, Double> c22_1 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(1, intervalArray, true); }; public static final Function<Interval, Double> c22_2 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(2, intervalArray, true); }; public static final Function<Interval, Double> c22_3 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(3, intervalArray, true); }; public static final Function<Interval, Double> c22_4 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(4, intervalArray, true); }; public static final Function<Interval, Double> c22_5 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(5, intervalArray, true); }; public static final Function<Interval, Double> c22_6 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(6, intervalArray, true); }; public static final Function<Interval, Double> c22_7 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(7, intervalArray, true); }; public static final Function<Interval, Double> c22_8 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(8, intervalArray, true); }; public static final Function<Interval, Double> c22_9 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(9, intervalArray, true); }; public static final Function<Interval, Double> c22_10 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(10, intervalArray, true); }; public static final Function<Interval, Double> c22_11 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(11, intervalArray, true); }; public static final Function<Interval, Double> c22_12 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(12, intervalArray, true); }; public static final Function<Interval, Double> c22_13 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(13, intervalArray, true); }; public static final Function<Interval, Double> c22_14 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(14, intervalArray, true); }; public static final Function<Interval, Double> c22_15 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(15, intervalArray, true); }; public static final Function<Interval, Double> c22_16 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(16, intervalArray, true); }; public static final Function<Interval, Double> c22_17 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(17, intervalArray, true); }; public static final Function<Interval, Double> c22_18 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(18, intervalArray, true); }; public static final Function<Interval, Double> c22_19 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(19, intervalArray, true); }; public static final Function<Interval, Double> c22_20 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(20, intervalArray, true); }; public static final Function<Interval, Double> c22_21 = (Interval i) -> { double[] intervalArray = Arrays.copyOfRange(i.series, i.start, i.end + 1); return Catch22.getSummaryStatByIndex(21, intervalArray, true); }; public static final Function<Interval, Double> mean = (Interval i) -> FeatureSet.calcFeatureByIndex(22, i.start, i.end, i.series); public static final Function<Interval, Double> median = (Interval i) -> FeatureSet.calcFeatureByIndex(23, i.start, i.end, i.series); public static final Function<Interval, Double> stdev = (Interval i) -> FeatureSet.calcFeatureByIndex(24, i.start, i.end, i.series); public static final Function<Interval, Double> slope = (Interval i) -> FeatureSet.calcFeatureByIndex(25, i.start, i.end, i.series); public static final Function<Interval, Double> iqr = (Interval i) -> FeatureSet.calcFeatureByIndex(26, i.start, i.end, i.series); public static final Function<Interval, Double> min = (Interval i) -> FeatureSet.calcFeatureByIndex(27, i.start, i.end, i.series); public static final Function<Interval, Double> max = (Interval i) -> FeatureSet.calcFeatureByIndex(28, i.start, i.end, i.series); /** * DrCIF attributes as functions **/ public static final Function<Interval, Double>[] functions = new Function[]{c22_0, c22_1, c22_2, c22_3, c22_4, c22_5, c22_6, c22_7, c22_8, c22_9, c22_10, c22_11, c22_12, c22_13, c22_14, c22_15, c22_16, c22_17, c22_18, c22_19, c22_20, c22_21, mean, median, stdev, slope, iqr, min, max}; /** * Development tests for the DrCIF classifier. * * @param arg arguments, unused * @throws Exception if tests fail */ public static void main(String[] arg) throws Exception { Instances[] data = DatasetLoading.sampleItalyPowerDemand(0); Instances train = data[0]; Instances test = data[1]; DrCIF c = new DrCIF(); c.setSeed(0); c.estimateOwnPerformance = true; c.trainEstimateMethod = TrainEstimateMethod.OOB; double a; long t1 = System.nanoTime(); c.buildClassifier(train); System.out.println("Train time=" + (System.nanoTime() - t1) * 1e-9); System.out.println("build ok: original atts = " + (train.numAttributes() - 1) + " new atts = " + (c.testHolder.numAttributes() - 1) + " num trees = " + c.trees.size() + " num intervals = " + Arrays.toString(c.numIntervals)); System.out.println("recorded times: train time = " + (c.trainResults.getBuildTime() * 1e-9) + " estimate time = " + (c.trainResults.getErrorEstimateTime() * 1e-9) + " both = " + (c.trainResults.getBuildPlusEstimateTime() * 1e-9)); a = ClassifierTools.accuracy(test, c); System.out.println("Test Accuracy = " + a); System.out.println("Train Accuracy = " + c.trainResults.getAcc()); //Test Accuracy = 0.9650145772594753 //Train Accuracy = 0.9552238805970149 } }
77,886
40.829753
187
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/LPS.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import experiments.data.DatasetLoading; import fileIO.OutFile; import java.io.FileInputStream; import java.io.ObjectInputStream; import java.io.Serializable; import java.text.DecimalFormat; import java.util.*; import java.util.concurrent.TimeUnit; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.ParameterSplittable; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.classifiers.Evaluation; import weka.core.Instances; import weka.core.Attribute; import weka.core.Capabilities; import weka.core.ContingencyTables; import weka.core.DenseInstance; import weka.core.Drawable; import weka.core.Instance; import weka.core.Option; import weka.core.OptionHandler; import weka.core.PartitionGenerator; import weka.core.Randomizable; import weka.core.RevisionUtils; import weka.core.TechnicalInformation; import weka.core.TechnicalInformationHandler; import weka.core.Utils; import weka.core.WeightedInstancesHandler; /** * * @author ajb. Implementation of the learned pattern similarity algorithm * by M. Baydogan * article{baydogan15lps, title={Time series representation and similarity based on local autopatterns}, author={M. Baydogan and G. Runger}, journal={Data Mining and Knowledge Discovery}, volume = {30}, number = {2}, pages = {476--509}, year = {2016} } * */ public class LPS extends EnhancedAbstractClassifier implements ParameterSplittable,TechnicalInformationHandler{ RandomRegressionTree[] trees; public static final int PARASEARCH_NOS_TREES=25; public static final int DEFAULT_NOS_TREES=200; int nosTrees=DEFAULT_NOS_TREES; int nosSegments=20; double[] ratioLevels={0.01,0.1,0.25,0.5}; double[] segmentProps={0.05,0.1,0.25,0.5,0.75,0.95}; double segmentProp=segmentProps[0]; double ratioLevel=ratioLevels[0]; int[] treeDepths={2,4,6}; int treeDepth=treeDepths[2]; int[] segLengths; int[][] segStarts; int[][] segDiffStarts; Instances sequences; int[] nosLeafNodes; int[][][] leafNodeCounts; double[] trainClassVals; int[] classAtt; boolean paramSearch=true; double acc=0; public LPS(){ super(CANNOT_ESTIMATE_OWN_PERFORMANCE); trees=new RandomRegressionTree[nosTrees]; } public String globalInfo() { return "Blah"; } @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "M. Baydogan and G. Runger"); result.setValue(TechnicalInformation.Field.YEAR, "2016"); result.setValue(TechnicalInformation.Field.TITLE, "Time series representation and similarity based on local\n" + "autopatterns"); result.setValue(TechnicalInformation.Field.JOURNAL, "Data Mining and Knowledge Discovery"); result.setValue(TechnicalInformation.Field.VOLUME, "30"); result.setValue(TechnicalInformation.Field.NUMBER, "2"); result.setValue(TechnicalInformation.Field.PAGES, "476-509"); return result; } //<editor-fold defaultstate="collapsed" desc="problems used in DAMI paper"> public static String[] problems={ "Adiac", "ArrowHead", // "ARSim", "Beef", "BeetleFly", "BirdChicken", "Car", "CBF", "ChlorineConcentration", "CinCECGtorso", "Coffee", "Computers", "CricketX", "CricketY", "CricketZ", "DiatomSizeReduction", "DistalPhalanxOutlineAgeGroup", "DistalPhalanxOutlineCorrect", "DistalPhalanxTW", "Earthquakes", "ECGFiveDays", "ElectricDevices", "FaceAll", "FaceFour", "FacesUCR", "Fiftywords", "Fish", "FordA", "FordB", "GunPoint", "Haptics", "Herring", "InlineSkate", "ItalyPowerDemand", "LargeKitchenAppliances", "Lightning2", "Lightning7", "Mallat", "MedicalImages", "MiddlePhalanxOutlineAgeGroup", "MiddlePhalanxOutlineCorrect", "MiddlePhalanxTW", "MoteStrain", "NonInvasiveFatalECGThorax1", "NonInvasiveFatalECGThorax2", "OliveOil", "OSULeaf", "PhalangesOutlinesCorrect", "Plane", "ProximalPhalanxOutlineAgeGroup", "ProximalPhalanxOutlineCorrect", "ProximalPhalanxTW", "RefrigerationDevices", "ScreenType", "ShapeletSim", "ShapesAll", "SmallKitchenAppliances", "SonyAIBORobotSurface1", "SonyAIBORobotSurface2", "StarLightCurves", "SwedishLeaf", "Symbols", "SyntheticControl", "ToeSegmentation1", "ToeSegmentation2", "Trace", "TwoLeadECG", "TwoPatterns", "UWaveGestureLibraryX", "UWaveGestureLibraryY", "UWaveGestureLibraryZ", "UWaveGestureLibraryAll", "Wafer", "WordSynonyms", "Yoga"}; //</editor-fold> //<editor-fold defaultstate="collapsed" desc="results reported in DAMI paper (errors)"> static double[] reportedErrorResults ={ 0.211, 0.2, // 0.004, 0.367, 0.15, 0.05, 0.183, 0.002, 0.352, 0.064, 0.071, 0.136, 0.282, 0.208, 0.305, 0.049, 0.237, 0.234, 0.327, 0.331, 0.155, 0.273, 0.242, 0.04, 0.098, 0.213, 0.094, 0.09, 0.223, 0, 0.562, 0.398, 0.494, 0.053, 0.157, 0.197, 0.411, 0.093, 0.297, 0.523, 0.208, 0.497, 0.114, 0.183, 0.147, 0.133, 0.134, 0.226, 0, 0.112, 0.172, 0.278, 0.329, 0.44, 0.006, 0.218, 0.225, 0.225, 0.123, 0.033, 0.072, 0.03, 0.027, 0.077, 0.1, 0.02, 0.061, 0.014, 0.189, 0.263, 0.253, 0.025, 0.001, 0.27, 0.136 }; //</editor-fold> public static void compareToPublished(String datasetPath, String resultsPath) throws Exception{ DecimalFormat df=new DecimalFormat("###.###"); OutFile res=new OutFile(resultsPath+"recreatedLPS.csv"); int b=0; int t=0; System.out.println("problem,recreated,published"); for(int i=0;i<problems.length;i++){ String s=problems[i]; System.out.print(s+","); Instances train = DatasetLoading.loadDataNullable(datasetPath+s+"\\"+s+"_TRAIN.arff"); Instances test = DatasetLoading.loadDataNullable(datasetPath+s+"\\"+s+"_TEST.arff"); LPS l=new LPS(); l.setParamSearch(false); l.buildClassifier(train); double a=ClassifierTools.accuracy(test, l); System.out.println(df.format(1-a)+","+df.format(reportedErrorResults[i])+","+df.format(1-a- reportedErrorResults[i])); if((1-a)< reportedErrorResults[i]) b++; if((1-a)== reportedErrorResults[i]) t++; res.writeLine(s+","+(1-a)+","+ reportedErrorResults[i]); } System.out.println("Reported better ="+(problems.length-t-b)+" ties ="+t+" ours better = "+b); } @Override public void setParamSearch(boolean b) { paramSearch=b; } @Override public void setParametersFromIndex(int x) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public String getParas() { return ratioLevel+","+treeDepth; } @Override public void buildClassifier(Instances data) throws Exception { long startTime=System.nanoTime(); //determine minimum and maximum possible segment length if(paramSearch){ double bestErr=1; int bestRatio=0; int bestTreeDepth=0; LPS trainer=new LPS(); if(seedClassifier) trainer.setSeed(seed*42); trainer.nosTrees=50; trainer.setParamSearch(false); int folds=10; for(int i=0;i<ratioLevels.length;i++){ trainer.ratioLevel=ratioLevels[i]; for(int j=0;j<treeDepths.length;j++){ trainer.treeDepth=treeDepths[j]; Evaluation eval=new Evaluation(data); eval.crossValidateModel(trainer, data, folds,rand); double e=eval.errorRate(); if(e<bestErr){ bestErr=e; bestTreeDepth=j; bestRatio=i; } } } ratioLevel=ratioLevels[bestRatio]; treeDepth=treeDepths[bestTreeDepth]; if(debug) System.out.println("Best ratio level ="+ratioLevel+" best tree depth ="+treeDepth+" with CV error ="+bestErr); } int seriesLength=data.numAttributes()-1; int minSegment=(int)(seriesLength*0.1); int maxSegment=(int)(seriesLength*0.9); segLengths=new int[nosTrees]; nosLeafNodes=new int[nosTrees]; segStarts=new int[nosTrees][nosSegments]; segDiffStarts=new int[nosTrees][nosSegments]; leafNodeCounts=new int[data.numInstances()][nosTrees][]; trainClassVals=new double[data.numInstances()]; for(int i=0;i<data.numInstances();i++) trainClassVals[i]=data.instance(i).classValue(); classAtt=new int[nosTrees]; //For each tree 1 to N for(int i=0;i<nosTrees;i++){ // %select random segment length for each tree segLengths[i]=minSegment+rand.nextInt(maxSegment-minSegment); // %select target segments randomly for each tree // %ind=1:(2*nsegment); // int target=r.nextInt(2*nosSegments); //times 2 for diffs // %construct segment matrix (both observed and difference) // stx=randsample(tlen-segmentlen(i),nsegment,true); // stxdiff=randsample(tlen-segmentlen(i)-1,nsegment,true); //Sample with replacement. for(int j=0;j<nosSegments;j++){ segStarts[i][j]=rand.nextInt(seriesLength-segLengths[i]); segDiffStarts[i][j]=rand.nextInt(seriesLength-segLengths[i]-1); } //Set up the instances for this tree //2- Generate segments for each time series and // concatenate these segments rowwise, let resulting matrix be M ArrayList<Attribute> atts=new ArrayList<>(); String name; for(int j=0;j<2*nosSegments;j++){ name = "SegFeature"+j; atts.add(new Attribute(name)); } sequences = new Instances("SubsequenceIntervals",atts,segLengths[i]*data.numInstances()); for(int j=0;j<data.numInstances();j++){ Instance series=data.instance(j); for(int k=0;k<segLengths[i];k++){ DenseInstance in=new DenseInstance(sequences.numAttributes()); for(int m=0;m<nosSegments;m++) in.setValue(m, series.value(segStarts[i][m]+k)); for(int m=0;m<nosSegments;m++) in.setValue(nosSegments+m, series.value(segDiffStarts[i][m]+k)-series.value(segDiffStarts[i][m]+k+1)); sequences.add(in); // System.out.println(" TRAIN INS ="+in+" CLASS ="+series.classValue()); } } //3- Choose a random target column from M, let this target column be t classAtt[i]=rand.nextInt(sequences.numAttributes());// sequences.setClassIndex(classAtt[i]); trees[i]= new RandomRegressionTree(); trees[i].setMaxDepth(treeDepth); trees[i].setKValue(1); // System.out.println("Min Num ="+(int)(sequences.numInstances()*ratioLevel)); trees[i].setMinNum((int)(sequences.numInstances()*ratioLevel));//leafratio*size(segments,1) trees[i].buildClassifier(sequences); nosLeafNodes[i]=trees[i].nosLeafNodes; // System.out.println("Num of leaf nodes ="+trees[i].nosLeafNodes); for(int j=0;j<data.numInstances();j++){ leafNodeCounts[j][i]=new int[trees[i].nosLeafNodes]; for(int k=0;k<segLengths[i];k++){ trees[i].distributionForInstance(sequences.instance(j*segLengths[i]+k)); int leafID=RandomRegressionTree.lastNode; // System.out.println("Seq Number ="+(j*segLengths[i]+k)); leafNodeCounts[j][i][leafID]++; } } //Set up no pruning, minimum number at leaf nodes to leafratio*size(segments,1), //nvartosample means only single variable considered at each node. // splitting consider only one random column, namely r and find the split value. // tree = classregtree(segments(:,ind~=target(i)),segments(:,target(i)),'method','regression', ... // 'prune','off','minleaf',leafratio*size(segments,1),'nvartosample',1); } // System.out.println(" Nos Sequence Cases ="+sequences.numInstances()); /* for (int i = 0; i < data.numInstances(); i++) { //Find the leaf node of every subsequence belonging to instance i for every tree System.out.print("Instance "+i+" HIST: "); for(int j=0;j<leafNodeCounts[i].length;j++) for(int k=0;k<leafNodeCounts[i][j].length;k++) System.out.print(leafNodeCounts[i][j][k]+" "); System.out.print(" CLASS ="+data.instance(i).classValue()+" \n "); } */ sequences=null; trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime()-startTime); System.gc(); } public double distance(int[][] test, int[][] train){ double d=0; for(int i=0;i<test.length;i++) for(int j=0;j<test[i].length;j++){ double x=(test[i][j]-train[i][j]); if(x>0) d+=x; else d+=-x; } return d; } public double classifyInstance(Instance ins) throws Exception{ int[][] testNodeCounts=new int[nosTrees][]; //Extract sequences, shove them into instances. // concatenate these segments rowwise, let resulting matrix be M for(int i=0;i<nosTrees;i++){ ArrayList<Attribute> atts=new ArrayList<>(); String name; for(int j=0;j<2*nosSegments;j++){ name = "SegFeature"+j; atts.add(new Attribute(name)); } sequences = new Instances("SubsequenceIntervals",atts,segLengths[i]); for(int k=0;k<segLengths[i];k++){ DenseInstance in=new DenseInstance(sequences.numAttributes()); for(int m=0;m<nosSegments;m++) in.setValue(m, ins.value(segStarts[i][m]+k)); for(int m=0;m<nosSegments;m++) in.setValue(nosSegments+m, ins.value(segDiffStarts[i][m]+k)-ins.value(segDiffStarts[i][m]+k+1)); sequences.add(in); // System.out.println(" TEST INS ="+in+" CLASS ="+ins.classValue()); } sequences.setClassIndex(classAtt[i]); testNodeCounts[i]=new int[trees[i].nosLeafNodes]; for(int k=0;k<sequences.numInstances();k++){ trees[i].distributionForInstance(sequences.instance(k)); int leafID=RandomRegressionTree.lastNode; // System.out.println("Seq Number ="+(j*segLengths[i]+k)); testNodeCounts[i][leafID]++; } } // System.out.println(" TEST NODE COUNTS ="); // for(int i=0;i<testNodeCounts.length;i++){ // for(int j=0;j<testNodeCounts[i].length;j++) // System.out.print(" "+testNodeCounts[i][j]); // System.out.println(""); // } // System.out.println(" TRAIN NODE COUNTS ="); // for(int k=0;k<leafNodeCounts.length;k++){ // for(int i=0;i<leafNodeCounts[k].length;i++){ // for(int j=0;j<leafNodeCounts[k][i].length;j++) // System.out.print(" "+leafNodeCounts[k][i][j]); // System.out.println(""); // } // } //1-NN on the counts double minDist=Double.MAX_VALUE; int closest=0; for(int i=0;i<leafNodeCounts.length;i++){ double d=distance(testNodeCounts,leafNodeCounts[i]); if(d<minDist){ minDist=d; closest=i; } } return trainClassVals[closest]; } public static Object readFromFile(String filename) { Object obj=null; try{ FileInputStream fis = new FileInputStream(filename); ObjectInputStream in = new ObjectInputStream(fis); obj =in.readObject(); in.close(); } catch(Exception ex){ ex.printStackTrace(); } return obj; } public static void main(String[] args) throws Exception { // LPS lps = new LPS(); // lps.setSeed(0); // System.out.println(ClassifierTools.testUtils_getIPDAcc(lps)); // System.out.println(ClassifierTools.testUtils_confirmIPDReproduction(lps, 0.9339164237123421, "2019_09_26")); // compareToPublished(); // System.exit(0); LPS l=new LPS(); l.setParamSearch(false); String prob="ItalyPowerDemand"; double mean=0; Instances train = DatasetLoading.loadDataNullable("C:\\Users\\ajb\\Dropbox\\TSC Problems\\"+prob+"\\"+prob+"_TRAIN.arff"); Instances test = DatasetLoading.loadDataNullable("C:\\Users\\ajb\\Dropbox\\TSC Problems\\"+prob+"\\"+prob+"_TEST.arff"); // Instances train = ClassifierTools.loadDataThrowable("C:\\Users\\ajb\\Dropbox\\Big TSC Bake Off\\Code\\Baydogan LPS\\Train.arff"); // Instances test = ClassifierTools.loadDataThrowable("C:\\Users\\ajb\\Dropbox\\Big TSC Bake Off\\Code\\Baydogan LPS\\Test.arff"); // train.setClassIndex(train.numAttributes()-1); // test.setClassIndex(test.numAttributes()-1); // System.out.println("Train = "+train); // System.out.println("Test = "+test); l.buildClassifier(train); double a=ClassifierTools.accuracy(test, l); System.out.println( "test prob accuracy = "+a); } /** * After obtaining the ensemble, what I do is to find out which rows of M goes * in to what terminal node of each tree. * * Let's consider one tree. Rows of M extracted from time series S are residing * * in particular nodes of this tree. * I characterize each time series by the number of rows residing in each * terminal node. * * * * When I do the same for all trees in the ensemble, it is * all about combining these terminal node distribution vectors into one * long vector and compute similarity over this single vector. * Without loss of generality, suppose I have 16 terminal nodes for each tree * in my ensemble of 10 trees. That will result in a representation vector of * length 16x10=160. Then I compute the similarity (actually dissimilarity) * by taking the sum of absolute differences. 1,2,3,4,5,6,7,8 8,7,6,5,4,3,2,1 Let l=3, nsegs =2, start pos be 2 and 4 Series 1 Seg 1: 2,3,4 Seg 2: 4,5,6 Series 2 Seg 1: 7,6,5 Seg 2: 5,4,3 M equals 2,4 3,5 4,6 7,5 6,4 5,3 **/ public void debugFeatureExtraction(){ //determine minimum and maximum possible segment length ArrayList<Attribute> atts2=new ArrayList<>(); for(int j=0;j<9;j++){ atts2.add(new Attribute("SegFeature"+j)); } double[] t1={1,2,3,4,5,6,7,8}; double[] t2={8,7,6,5,4,3,2,1}; Instances data= new Instances("SubsequenceIntervals",atts2,2); DenseInstance ins=new DenseInstance(data.numAttributes()); for (int i = 0; i < t1.length; i++) { ins.setValue(i, t1[i]); } data.add(ins); ins=new DenseInstance(data.numAttributes()); for (int i = 0; i < t2.length; i++) { ins.setValue(i, t2[i]); } data.add(ins); System.out.println("TEST DATA ="+data); nosSegments=2; nosTrees=1; int seriesLength=data.numAttributes()-1; int minSegment=(int)(seriesLength*0.1); int maxSegment=(int)(seriesLength*0.9); segLengths=new int[nosTrees]; segStarts=new int[nosTrees][nosSegments]; segDiffStarts=new int[nosTrees][nosSegments]; //For each tree 1 to N for(int i=0;i<nosTrees;i++){ // %select random segment length for each tree segLengths[i]=minSegment+rand.nextInt(maxSegment-minSegment); segLengths[i]=3; System.out.println("SEG LENGTH ="+segLengths[i]); // %select target segments randomly for each tree // %ind=1:(2*nsegment); int target=rand.nextInt(2*nosSegments); //times 2 for diffs // %construct segment matrix (both observed and difference) // stx=randsample(tlen-segmentlen(i),nsegment,true); // stxdiff=randsample(tlen-segmentlen(i)-1,nsegment,true); //Sample with replacement. for(int j=0;j<nosSegments;j++){ segStarts[i][j]=rand.nextInt(seriesLength-segLengths[i]); segDiffStarts[i][j]=rand.nextInt(seriesLength-segLengths[i]-1); System.out.println("SEG START ="+segStarts[i][j]); System.out.println("SEG DIFF START ="+segDiffStarts[i][j]); } //Set up the instances for this tree Instances tr=null; ArrayList<Attribute> atts=new ArrayList<>(); String name; for(int j=0;j<2*nosSegments;j++){ name = "SegFeature"+j; atts.add(new Attribute(name)); } Instances result = new Instances("SubsequenceIntervals",atts,segLengths[i]*data.numInstances()); for(int j=0;j<data.numInstances();j++){ Instance series=data.instance(j); for(int k=0;k<segLengths[i];k++){ DenseInstance in=new DenseInstance(result.numAttributes()); for(int m=0;m<nosSegments;m++) in.setValue(m, series.value(segStarts[i][m]+k)); for(int m=0;m<nosSegments;m++) in.setValue(nosSegments+m, series.value(segDiffStarts[i][m]+k)-series.value(segDiffStarts[i][m]+k+1)); result.add(in); } } System.out.println("DESIRED OUTPUT : "); System.out.println("2,4\n" + "3,5\n" + "4,6\n" + "7,5\n" + "6,4\n" + "5,3"); System.out.println("TRANSFORMED INSTANCES ="+result); } } /* * RandomRegressionTree.java * Copyright (C) 2001-2012 University of Waikato, Hamilton, New Zealand * <!-- options-end --> * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Richard Kirkby (rkirkby@cs.waikato.ac.nz) * @version $Revision: 11907 $ */ static public class RandomRegressionTree extends AbstractClassifier implements OptionHandler, WeightedInstancesHandler, Randomizable, Drawable, PartitionGenerator { /** for serialization */ private static final long serialVersionUID = -9051119597407396024L; /** The Tree object */ protected Tree m_Tree = null; /** The header information. */ protected Instances m_Info = null; /** Minimum number of instances for leaf. */ protected double m_MinNum = 1.0; /** The number of attributes considered for a split. */ protected int m_KValue = 0; /** The random seed to use. */ protected int m_randomSeed = 1; /** The maximum depth of the tree (0 = unlimited) */ protected int m_MaxDepth = 0; /** Determines how much data is used for backfitting */ protected int m_NumFolds = 0; /** Whether unclassified instances are allowed */ protected boolean m_AllowUnclassifiedInstances = false; /** Whether to break ties randomly. */ protected boolean m_BreakTiesRandomly = false; /** a ZeroR model in case no model can be built from the data */ protected Classifier m_zeroR; /** * The minimum proportion of the total variance (over all the data) required * for split. */ protected double m_MinVarianceProp = 1e-3; public int nosLeafNodes=0; /** * Returns a string describing classifier * * @return a description suitable for displaying in the explorer/experimenter * gui */ public String globalInfo() { return "Class for constructing a tree that considers K randomly " + " chosen attributes at each node. Performs no pruning. Also has" + " an option to allow estimation of class probabilities (or target mean " + "in the regression case) based on a hold-out set (backfitting)."; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String minNumTipText() { return "The minimum total weight of the instances in a leaf."; } /** * Get the value of MinNum. * * @return Value of MinNum. */ public double getMinNum() { return m_MinNum; } /** * Set the value of MinNum. * * @param newMinNum Value to assign to MinNum. */ public void setMinNum(double newMinNum) { m_MinNum = newMinNum; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String minVariancePropTipText() { return "The minimum proportion of the variance on all the data " + "that needs to be present at a node in order for splitting to " + "be performed in regression trees."; } /** * Get the value of MinVarianceProp. * * @return Value of MinVarianceProp. */ public double getMinVarianceProp() { return m_MinVarianceProp; } /** * Set the value of MinVarianceProp. * * @param newMinVarianceProp Value to assign to MinVarianceProp. */ public void setMinVarianceProp(double newMinVarianceProp) { m_MinVarianceProp = newMinVarianceProp; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String KValueTipText() { return "Sets the number of randomly chosen attributes. If 0, int(log_2(#predictors) + 1) is used."; } /** * Get the value of K. * * @return Value of K. */ public int getKValue() { return m_KValue; } /** * Set the value of K. * * @param k Value to assign to K. */ public void setKValue(int k) { m_KValue = k; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String seedTipText() { return "The random number seed used for selecting attributes."; } /** * Set the seed for random number generation. * * @param seed the seed */ @Override public void setSeed(int seed) { m_randomSeed = seed; } /** * Gets the seed for the random number generations * * @return the seed for the random number generation */ @Override public int getSeed() { return m_randomSeed; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String maxDepthTipText() { return "The maximum depth of the tree, 0 for unlimited."; } /** * Get the maximum depth of trh tree, 0 for unlimited. * * @return the maximum depth. */ public int getMaxDepth() { return m_MaxDepth; } /** * Set the maximum depth of the tree, 0 for unlimited. * * @param value the maximum depth. */ public void setMaxDepth(int value) { m_MaxDepth = value; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String numFoldsTipText() { return "Determines the amount of data used for backfitting. One fold is used for " + "backfitting, the rest for growing the tree. (Default: 0, no backfitting)"; } /** * Get the value of NumFolds. * * @return Value of NumFolds. */ public int getNumFolds() { return m_NumFolds; } /** * Set the value of NumFolds. * * @param newNumFolds Value to assign to NumFolds. */ public void setNumFolds(int newNumFolds) { m_NumFolds = newNumFolds; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String allowUnclassifiedInstancesTipText() { return "Whether to allow unclassified instances."; } /** * Gets whether tree is allowed to abstain from making a prediction. * * @return true if tree is allowed to abstain from making a prediction. */ public boolean getAllowUnclassifiedInstances() { return m_AllowUnclassifiedInstances; } /** * Set the value of AllowUnclassifiedInstances. * * @param newAllowUnclassifiedInstances true if tree is allowed to abstain from making a prediction */ public void setAllowUnclassifiedInstances(boolean newAllowUnclassifiedInstances) { m_AllowUnclassifiedInstances = newAllowUnclassifiedInstances; } /** * Returns the tip text for this property * * @return tip text for this property suitable for displaying in the * explorer/experimenter gui */ public String breakTiesRandomlyTipText() { return "Break ties randomly when several attributes look equally good."; } /** * Get whether to break ties randomly. * * @return true if ties are to be broken randomly. */ public boolean getBreakTiesRandomly() { return m_BreakTiesRandomly; } /** * Set whether to break ties randomly. * * @param newBreakTiesRandomly true if ties are to be broken randomly */ public void setBreakTiesRandomly(boolean newBreakTiesRandomly) { m_BreakTiesRandomly = newBreakTiesRandomly; } /** * Lists the command-line options for this classifier. * * @return an enumeration over all possible options */ @Override public Enumeration<Option> listOptions() { Vector<Option> newVector = new Vector<Option>(); newVector.addElement(new Option( "\tNumber of attributes to randomly investigate.\t(default 0)\n" + "\t(<0 = int(log_2(#predictors)+1)).", "K", 1, "-K <number of attributes>")); newVector.addElement(new Option( "\tSet minimum number of instances per leaf.\n\t(default 1)", "M", 1, "-M <minimum number of instances>")); newVector.addElement(new Option( "\tSet minimum numeric class variance proportion\n" + "\tof train variance for split (default 1e-3).", "V", 1, "-V <minimum variance for split>")); newVector.addElement(new Option("\tSeed for random number generator.\n" + "\t(default 1)", "S", 1, "-S <num>")); newVector.addElement(new Option( "\tThe maximum depth of the tree, 0 for unlimited.\n" + "\t(default 0)", "depth", 1, "-depth <num>")); newVector.addElement(new Option("\tNumber of folds for backfitting " + "(default 0, no backfitting).", "N", 1, "-N <num>")); newVector.addElement(new Option("\tAllow unclassified instances.", "U", 0, "-U")); newVector.addElement(new Option("\t" + breakTiesRandomlyTipText(), "B", 0, "-B")); newVector.addAll(Collections.list(super.listOptions())); return newVector.elements(); } /** * Gets options from this classifier. * * @return the options for the current setup */ @Override public String[] getOptions() { Vector<String> result = new Vector<String>(); result.add("-K"); result.add("" + getKValue()); result.add("-M"); result.add("" + getMinNum()); result.add("-V"); result.add("" + getMinVarianceProp()); result.add("-S"); result.add("" + getSeed()); if (getMaxDepth() > 0) { result.add("-depth"); result.add("" + getMaxDepth()); } if (getNumFolds() > 0) { result.add("-N"); result.add("" + getNumFolds()); } if (getAllowUnclassifiedInstances()) { result.add("-U"); } if (getBreakTiesRandomly()) { result.add("-B"); } Collections.addAll(result, super.getOptions()); return result.toArray(new String[result.size()]); } /** * Parses a given list of options. * <p/> * <!-- options-start --> * Valid options are: <p> * * <pre> -K &lt;number of attributes&gt; * Number of attributes to randomly investigate. (default 0) * (&lt;0 = int(log_2(#predictors)+1)).</pre> * * <pre> -M &lt;minimum number of instances&gt; * Set minimum number of instances per leaf. * (default 1)</pre> * * <pre> -V &lt;minimum variance for split&gt; * Set minimum numeric class variance proportion * of train variance for split (default 1e-3).</pre> * * <pre> -S &lt;num&gt; * Seed for random number generator. * (default 1)</pre> * * <pre> -depth &lt;num&gt; * The maximum depth of the tree, 0 for unlimited. * (default 0)</pre> * * <pre> -N &lt;num&gt; * Number of folds for backfitting (default 0, no backfitting).</pre> * * <pre> -U * Allow unclassified instances.</pre> * * <pre> -B * Break ties randomly when several attributes look equally good.</pre> * * <pre> -output-debug-info * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -do-not-check-capabilities * If set, classifier capabilities are not checked before classifier is built * (use with caution).</pre> * * <pre> -num-decimal-places * The number of decimal places for the output of numbers in the model (default 2).</pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ @Override public void setOptions(String[] options) throws Exception { String tmpStr; tmpStr = Utils.getOption('K', options); if (tmpStr.length() != 0) { m_KValue = Integer.parseInt(tmpStr); } else { m_KValue = 0; } tmpStr = Utils.getOption('M', options); if (tmpStr.length() != 0) { m_MinNum = Double.parseDouble(tmpStr); } else { m_MinNum = 1; } String minVarString = Utils.getOption('V', options); if (minVarString.length() != 0) { m_MinVarianceProp = Double.parseDouble(minVarString); } else { m_MinVarianceProp = 1e-3; } tmpStr = Utils.getOption('S', options); if (tmpStr.length() != 0) { setSeed(Integer.parseInt(tmpStr)); } else { setSeed(1); } tmpStr = Utils.getOption("depth", options); if (tmpStr.length() != 0) { setMaxDepth(Integer.parseInt(tmpStr)); } else { setMaxDepth(0); } String numFoldsString = Utils.getOption('N', options); if (numFoldsString.length() != 0) { m_NumFolds = Integer.parseInt(numFoldsString); } else { m_NumFolds = 0; } setAllowUnclassifiedInstances(Utils.getFlag('U', options)); setBreakTiesRandomly(Utils.getFlag('B', options)); super.setOptions(options); Utils.checkForRemainingOptions(options); } /** * Returns default capabilities of the classifier. * * @return the capabilities of this classifier */ @Override public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); // attributes result.enable(Capabilities.Capability.NOMINAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); result.enable(Capabilities.Capability.DATE_ATTRIBUTES); result.enable(Capabilities.Capability.MISSING_VALUES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); result.enable(Capabilities.Capability.NUMERIC_CLASS); result.enable(Capabilities.Capability.MISSING_CLASS_VALUES); return result; } /** * Builds classifier. * * @param data the data to train with * @throws Exception if something goes wrong or the data doesn't fit */ @Override public void buildClassifier(Instances data) throws Exception { nodeCount=0; nosLeafNodes=0; // Make sure K value is in range if (m_KValue > data.numAttributes() - 1) { m_KValue = data.numAttributes() - 1; } if (m_KValue < 1) { m_KValue = (int) Utils.log2(data.numAttributes() - 1) + 1; } // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); // only class? -> build ZeroR model if (data.numAttributes() == 1) { System.err .println("Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!"); m_zeroR = new weka.classifiers.rules.ZeroR(); m_zeroR.buildClassifier(data); return; } else { m_zeroR = null; } // Figure out appropriate datasets Instances train = null; Instances backfit = null; Random rand = data.getRandomNumberGenerator(m_randomSeed); if (m_NumFolds <= 0) { train = data; } else { data.randomize(rand); data.stratify(m_NumFolds); train = data.trainCV(m_NumFolds, 1, rand); backfit = data.testCV(m_NumFolds, 1); } // Create the attribute indices window int[] attIndicesWindow = new int[data.numAttributes() - 1]; int j = 0; for (int i = 0; i < attIndicesWindow.length; i++) { if (j == data.classIndex()) { j++; // do not include the class } attIndicesWindow[i] = j++; } double totalWeight = 0; double totalSumSquared = 0; // Compute initial class counts double[] classProbs = new double[train.numClasses()]; for (int i = 0; i < train.numInstances(); i++) { Instance inst = train.instance(i); if (data.classAttribute().isNominal()) { classProbs[(int) inst.classValue()] += inst.weight(); totalWeight += inst.weight(); } else { classProbs[0] += inst.classValue() * inst.weight(); totalSumSquared += inst.classValue() * inst.classValue() * inst.weight(); totalWeight += inst.weight(); } } double trainVariance = 0; if (data.classAttribute().isNumeric()) { trainVariance = RandomRegressionTree.singleVariance(classProbs[0], totalSumSquared, totalWeight) / totalWeight; classProbs[0] /= totalWeight; } // Build tree m_Tree = new Tree(); m_Info = new Instances(data, 0); m_Tree.buildTree(train, classProbs, attIndicesWindow, totalWeight, rand, 0, m_MinVarianceProp * trainVariance); // Backfit if required if (backfit != null) { m_Tree.backfitData(backfit); } } /** * Computes class distribution of an instance using the tree. * * @param instance the instance to compute the distribution for * @return the computed class probabilities * @throws Exception if computation fails */ @Override public double[] distributionForInstance(Instance instance) throws Exception { if (m_zeroR != null) { return m_zeroR.distributionForInstance(instance); } else { return m_Tree.distributionForInstance(instance); } } /** * Outputs the decision tree. * * @return a string representation of the classifier */ @Override public String toString() { // only ZeroR model? if (m_zeroR != null) { StringBuffer buf = new StringBuffer(); buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n"); buf.append(this.getClass().getName().replaceAll(".*\\.", "") .replaceAll(".", "=") + "\n\n"); buf .append("Warning: No model could be built, hence ZeroR model is used:\n\n"); buf.append(m_zeroR.toString()); return buf.toString(); } if (m_Tree == null) { return "RandomTree: no model has been built yet."; } else { return "\nRandomTree\n==========\n" + m_Tree.toString(0) + "\n" + "\nSize of the tree : " + m_Tree.numNodes() + (getMaxDepth() > 0 ? ("\nMax depth of tree: " + getMaxDepth()) : ("")); } } /** * Returns graph describing the tree. * * @return the graph describing the tree * @throws Exception if graph can't be computed */ @Override public String graph() throws Exception { if (m_Tree == null) { throw new Exception("RandomTree: No model built yet."); } StringBuffer resultBuff = new StringBuffer(); m_Tree.toGraph(resultBuff, 0, null); String result = "digraph RandomTree {\n" + "edge [style=bold]\n" + resultBuff.toString() + "\n}\n"; return result; } /** * Returns the type of graph this classifier represents. * * @return Drawable.TREE */ @Override public int graphType() { return Drawable.TREE; } /** * Builds the classifier to generate a partition. */ @Override public void generatePartition(Instances data) throws Exception { buildClassifier(data); } /** * Computes array that indicates node membership. Array locations are * allocated based on breadth-first exploration of the tree. */ @Override public double[] getMembershipValues(Instance instance) throws Exception { if (m_zeroR != null) { double[] m = new double[1]; m[0] = instance.weight(); return m; } else { // Set up array for membership values double[] a = new double[numElements()]; // Initialize queues Queue<Double> queueOfWeights = new LinkedList<Double>(); Queue<Tree> queueOfNodes = new LinkedList<Tree>(); queueOfWeights.add(instance.weight()); queueOfNodes.add(m_Tree); int index = 0; // While the queue is not empty while (!queueOfNodes.isEmpty()) { a[index++] = queueOfWeights.poll(); Tree node = queueOfNodes.poll(); // Is node a leaf? if (node.m_Attribute <= -1) { continue; } // Compute weight distribution double[] weights = new double[node.m_Successors.length]; if (instance.isMissing(node.m_Attribute)) { System.arraycopy(node.m_Prop, 0, weights, 0, node.m_Prop.length); } else if (m_Info.attribute(node.m_Attribute).isNominal()) { weights[(int) instance.value(node.m_Attribute)] = 1.0; } else { if (instance.value(node.m_Attribute) < node.m_SplitPoint) { weights[0] = 1.0; } else { weights[1] = 1.0; } } for (int i = 0; i < node.m_Successors.length; i++) { queueOfNodes.add(node.m_Successors[i]); queueOfWeights.add(a[index - 1] * weights[i]); } } return a; } } /** * Returns the number of elements in the partition. */ @Override public int numElements() throws Exception { if (m_zeroR != null) { return 1; } return m_Tree.numNodes(); } /** * The inner class for dealing with the tree. */ public static int nodeCount=0; // reset in RegressionTree buildClassifier public static int lastNode=0; protected class Tree implements Serializable { public int leafNodeID; /** For serialization */ private static final long serialVersionUID = 3549573538656522569L; /** The subtrees appended to this tree. */ protected Tree[] m_Successors; /** The attribute to split on. */ protected int m_Attribute = -1; /** The split point. */ protected double m_SplitPoint = Double.NaN; /** The proportions of training instances going down each branch. */ protected double[] m_Prop = null; /** * Class probabilities from the training data in the nominal case. Holds the * mean in the numeric case. */ protected double[] m_ClassDistribution = null; /** * Holds the sum of squared errors and the weight in the numeric case. */ protected double[] m_Distribution = null; /** * Backfits the given data into the tree. */ public void backfitData(Instances data) throws Exception { double totalWeight = 0; double totalSumSquared = 0; // Compute initial class counts double[] classProbs = new double[data.numClasses()]; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); if (data.classAttribute().isNominal()) { classProbs[(int) inst.classValue()] += inst.weight(); totalWeight += inst.weight(); } else { classProbs[0] += inst.classValue() * inst.weight(); totalSumSquared += inst.classValue() * inst.classValue() * inst.weight(); totalWeight += inst.weight(); } } double trainVariance = 0; if (data.classAttribute().isNumeric()) { trainVariance = RandomRegressionTree.singleVariance(classProbs[0], totalSumSquared, totalWeight) / totalWeight; classProbs[0] /= totalWeight; } // Fit data into tree backfitData(data, classProbs, totalWeight); } /** * Computes class distribution of an instance using the decision tree. * * @param instance the instance to compute the distribution for * @return the computed class distribution * @throws Exception if computation fails */ public double[] distributionForInstance(Instance instance) throws Exception { double[] returnedDist = null; if(m_Attribute > -1) { // Node is not a leaf if (instance.isMissing(m_Attribute)) { // Value is missing returnedDist = new double[m_Info.numClasses()]; // Split instance up for (int i = 0; i < m_Successors.length; i++) { double[] help = m_Successors[i].distributionForInstance(instance); if (help != null) { for (int j = 0; j < help.length; j++) { returnedDist[j] += m_Prop[i] * help[j]; } } } } else if (m_Info.attribute(m_Attribute).isNominal()) { // For nominal attributes returnedDist = m_Successors[(int) instance.value(m_Attribute)] .distributionForInstance(instance); } else { // For numeric attributes if (instance.value(m_Attribute) < m_SplitPoint) { returnedDist = m_Successors[0].distributionForInstance(instance); } else { returnedDist = m_Successors[1].distributionForInstance(instance); } } } // Node is a leaf or successor is empty? if ((m_Attribute == -1) || (returnedDist == null)) { lastNode=leafNodeID; // System.out.println("Setting last node ="+leafNodeID); // Is node empty? if (m_ClassDistribution == null) { if (getAllowUnclassifiedInstances()) { double[] result = new double[m_Info.numClasses()]; if (m_Info.classAttribute().isNumeric()) { result[0] = Utils.missingValue(); } return result; } else { return null; } } // Else return normalized distribution double[] normalizedDistribution = m_ClassDistribution.clone(); if (m_Info.classAttribute().isNominal()) { Utils.normalize(normalizedDistribution); } return normalizedDistribution; } else { return returnedDist; } } /** * Outputs one node for graph. * * @param text the buffer to append the output to * @param num unique node id * @return the next node id * @throws Exception if generation fails */ public int toGraph(StringBuffer text, int num) throws Exception { int maxIndex = Utils.maxIndex(m_ClassDistribution); String classValue = m_Info.classAttribute().isNominal() ? m_Info .classAttribute().value(maxIndex) : Utils.doubleToString( m_ClassDistribution[0], 2); num++; if (m_Attribute == -1) { text.append("N" + Integer.toHexString(hashCode()) + " [label=\"" + num + ": " + classValue + "\"" + "shape=box]\n"); } else { text.append("N" + Integer.toHexString(hashCode()) + " [label=\"" + num + ": " + classValue + "\"]\n"); for (int i = 0; i < m_Successors.length; i++) { text.append("N" + Integer.toHexString(hashCode()) + "->" + "N" + Integer.toHexString(m_Successors[i].hashCode()) + " [label=\"" + m_Info.attribute(m_Attribute).name()); if (m_Info.attribute(m_Attribute).isNumeric()) { if (i == 0) { text.append(" < " + Utils.doubleToString(m_SplitPoint, 2)); } else { text.append(" >= " + Utils.doubleToString(m_SplitPoint, 2)); } } else { text.append(" = " + m_Info.attribute(m_Attribute).value(i)); } text.append("\"]\n"); num = m_Successors[i].toGraph(text, num); } } return num; } /** * Outputs a leaf. * * @return the leaf as string * @throws Exception if generation fails */ protected String leafString() throws Exception { double sum = 0, maxCount = 0; int maxIndex = 0; double classMean = 0; double avgError = 0; if (m_ClassDistribution != null) { if (m_Info.classAttribute().isNominal()) { sum = Utils.sum(m_ClassDistribution); maxIndex = Utils.maxIndex(m_ClassDistribution); maxCount = m_ClassDistribution[maxIndex]; } else { classMean = m_ClassDistribution[0]; if (m_Distribution[1] > 0) { avgError = m_Distribution[0] / m_Distribution[1]; } } } if (m_Info.classAttribute().isNumeric()) { return " : " + Utils.doubleToString(classMean, 2) + " (" + Utils.doubleToString(m_Distribution[1], 2) + "/" + Utils.doubleToString(avgError, 2) + ")"; } return " : " + m_Info.classAttribute().value(maxIndex) + " (" + Utils.doubleToString(sum, 2) + "/" + Utils.doubleToString(sum - maxCount, 2) + ")"; } /** * Recursively outputs the tree. * * @param level the current level of the tree * @return the generated subtree */ protected String toString(int level) { try { StringBuffer text = new StringBuffer(); if (m_Attribute == -1) { // Output leaf info return leafString(); } else if (m_Info.attribute(m_Attribute).isNominal()) { // For nominal attributes for (int i = 0; i < m_Successors.length; i++) { text.append("\n"); for (int j = 0; j < level; j++) { text.append("| "); } text.append(m_Info.attribute(m_Attribute).name() + " = " + m_Info.attribute(m_Attribute).value(i)); text.append(m_Successors[i].toString(level + 1)); } } else { // For numeric attributes text.append("\n"); for (int j = 0; j < level; j++) { text.append("| "); } text.append(m_Info.attribute(m_Attribute).name() + " < " + Utils.doubleToString(m_SplitPoint, 2)); text.append(m_Successors[0].toString(level + 1)); text.append("\n"); for (int j = 0; j < level; j++) { text.append("| "); } text.append(m_Info.attribute(m_Attribute).name() + " >= " + Utils.doubleToString(m_SplitPoint, 2)); text.append(m_Successors[1].toString(level + 1)); } return text.toString(); } catch (Exception e) { e.printStackTrace(); return "RandomTree: tree can't be printed"; } } /** * Recursively backfits data into the tree. * * @param data the data to work with * @param classProbs the class distribution * @throws Exception if generation fails */ protected void backfitData(Instances data, double[] classProbs, double totalWeight) throws Exception { // Make leaf if there are no training instances if (data.numInstances() == 0) { m_Attribute = -1; m_ClassDistribution = null; if (data.classAttribute().isNumeric()) { m_Distribution = new double[2]; } m_Prop = null; return; } double priorVar = 0; if (data.classAttribute().isNumeric()) { // Compute prior variance double totalSum = 0, totalSumSquared = 0, totalSumOfWeights = 0; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); totalSum += inst.classValue() * inst.weight(); totalSumSquared += inst.classValue() * inst.classValue() * inst.weight(); totalSumOfWeights += inst.weight(); } priorVar = RandomRegressionTree.singleVariance(totalSum, totalSumSquared, totalSumOfWeights); } // Check if node doesn't contain enough instances or is pure // or maximum depth reached m_ClassDistribution = classProbs.clone(); /* * if (Utils.sum(m_ClassDistribution) < 2 * m_MinNum || * Utils.eq(m_ClassDistribution[Utils.maxIndex(m_ClassDistribution)], * Utils .sum(m_ClassDistribution))) { * * // Make leaf m_Attribute = -1; m_Prop = null; return; } */ // Are we at an inner node if (m_Attribute > -1) { // Compute new weights for subsets based on backfit data m_Prop = new double[m_Successors.length]; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); if (!inst.isMissing(m_Attribute)) { if (data.attribute(m_Attribute).isNominal()) { m_Prop[(int) inst.value(m_Attribute)] += inst.weight(); } else { m_Prop[(inst.value(m_Attribute) < m_SplitPoint) ? 0 : 1] += inst .weight(); } } } // If we only have missing values we can make this node into a leaf if (Utils.sum(m_Prop) <= 0) { m_Attribute = -1; m_Prop = null; if (data.classAttribute().isNumeric()) { m_Distribution = new double[2]; m_Distribution[0] = priorVar; m_Distribution[1] = totalWeight; } return; } // Otherwise normalize the proportions Utils.normalize(m_Prop); // Split data Instances[] subsets = splitData(data); // Go through subsets for (int i = 0; i < subsets.length; i++) { // Compute distribution for current subset double[] dist = new double[data.numClasses()]; double sumOfWeights = 0; for (int j = 0; j < subsets[i].numInstances(); j++) { if (data.classAttribute().isNominal()) { dist[(int) subsets[i].instance(j).classValue()] += subsets[i] .instance(j).weight(); } else { dist[0] += subsets[i].instance(j).classValue() * subsets[i].instance(j).weight(); sumOfWeights += subsets[i].instance(j).weight(); } } if (sumOfWeights > 0) { dist[0] /= sumOfWeights; } // Backfit subset m_Successors[i].backfitData(subsets[i], dist, totalWeight); } // If unclassified instances are allowed, we don't need to store the // class distribution if (getAllowUnclassifiedInstances()) { m_ClassDistribution = null; return; } for (int i = 0; i < subsets.length; i++) { if (m_Successors[i].m_ClassDistribution == null) { return; } } m_ClassDistribution = null; // If we have a least two non-empty successors, we should keep this tree /* * int nonEmptySuccessors = 0; for (int i = 0; i < subsets.length; i++) * { if (m_Successors[i].m_ClassDistribution != null) { * nonEmptySuccessors++; if (nonEmptySuccessors > 1) { return; } } } * * // Otherwise, this node is a leaf or should become a leaf * m_Successors = null; m_Attribute = -1; m_Prop = null; return; */ } } /** * Recursively generates a tree. * * @param data the data to work with * @param classProbs the class distribution * @param attIndicesWindow the attribute window to choose attributes from * @param random random number generator for choosing random attributes * @param depth the current depth * @throws Exception if generation fails */ protected void buildTree(Instances data, double[] classProbs, int[] attIndicesWindow, double totalWeight, Random random, int depth, double minVariance) throws Exception { // Make leaf if there are no training instances if (data.numInstances() == 0) { m_Attribute = -1; m_ClassDistribution = null; m_Prop = null; if (data.classAttribute().isNumeric()) { m_Distribution = new double[2]; } leafNodeID=nosLeafNodes++; return; } double priorVar = 0; if (data.classAttribute().isNumeric()) { // Compute prior variance double totalSum = 0, totalSumSquared = 0, totalSumOfWeights = 0; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); totalSum += inst.classValue() * inst.weight(); totalSumSquared += inst.classValue() * inst.classValue() * inst.weight(); totalSumOfWeights += inst.weight(); } priorVar = RandomRegressionTree.singleVariance(totalSum, totalSumSquared, totalSumOfWeights); } // Check if node doesn't contain enough instances or is pure // or maximum depth reached if (data.classAttribute().isNominal()) { totalWeight = Utils.sum(classProbs); } // System.err.println("Total weight " + totalWeight); // double sum = Utils.sum(classProbs); if (totalWeight < 2 * m_MinNum || // Nominal case (data.classAttribute().isNominal() && Utils.eq( classProbs[Utils.maxIndex(classProbs)], Utils.sum(classProbs))) || // Numeric case (data.classAttribute().isNumeric() && priorVar / totalWeight < minVariance) || // check tree depth ((getMaxDepth() > 0) && (depth >= getMaxDepth()))) { // Make leaf m_Attribute = -1; m_ClassDistribution = classProbs.clone(); if (data.classAttribute().isNumeric()) { m_Distribution = new double[2]; m_Distribution[0] = priorVar; m_Distribution[1] = totalWeight; } leafNodeID=nosLeafNodes++; m_Prop = null; return; } // Compute class distributions and value of splitting // criterion for each attribute double val = -Double.MAX_VALUE; double split = -Double.MAX_VALUE; double[][] bestDists = null; double[] bestProps = null; int bestIndex = 0; // Handles to get arrays out of distribution method double[][] props = new double[1][0]; double[][][] dists = new double[1][0][0]; double[][] totalSubsetWeights = new double[data.numAttributes()][0]; // Investigate K random attributes int attIndex = 0; int windowSize = attIndicesWindow.length; int k = m_KValue; boolean gainFound = false; double[] tempNumericVals = new double[data.numAttributes()]; while ((windowSize > 0) && (k-- > 0 || !gainFound)) { int chosenIndex = random.nextInt(windowSize); attIndex = attIndicesWindow[chosenIndex]; // shift chosen attIndex out of window attIndicesWindow[chosenIndex] = attIndicesWindow[windowSize - 1]; attIndicesWindow[windowSize - 1] = attIndex; windowSize--; double currSplit = data.classAttribute().isNominal() ? distribution( props, dists, attIndex, data) : numericDistribution(props, dists, attIndex, totalSubsetWeights, data, tempNumericVals); double currVal = data.classAttribute().isNominal() ? gain(dists[0], priorVal(dists[0])) : tempNumericVals[attIndex]; if (Utils.gr(currVal, 0)) { gainFound = true; } if ((currVal > val) || ((!getBreakTiesRandomly()) && (currVal == val) && (attIndex < bestIndex))) { val = currVal; bestIndex = attIndex; split = currSplit; bestProps = props[0]; bestDists = dists[0]; } } // Find best attribute m_Attribute = bestIndex; // Any useful split found? if (Utils.gr(val, 0)) { // Build subtrees m_SplitPoint = split; m_Prop = bestProps; Instances[] subsets = splitData(data); m_Successors = new Tree[bestDists.length]; double[] attTotalSubsetWeights = totalSubsetWeights[bestIndex]; for (int i = 0; i < bestDists.length; i++) { m_Successors[i] = new Tree(); m_Successors[i].buildTree(subsets[i], bestDists[i], attIndicesWindow, data.classAttribute().isNominal() ? 0 : attTotalSubsetWeights[i], random, depth + 1, minVariance); } // If all successors are non-empty, we don't need to store the class // distribution boolean emptySuccessor = false; for (int i = 0; i < subsets.length; i++) { if (m_Successors[i].m_ClassDistribution == null) { emptySuccessor = true; break; } } if (emptySuccessor) { m_ClassDistribution = classProbs.clone(); } } else { // Make leaf m_Attribute = -1; m_ClassDistribution = classProbs.clone(); if (data.classAttribute().isNumeric()) { m_Distribution = new double[2]; m_Distribution[0] = priorVar; m_Distribution[1] = totalWeight; } } } /** * Computes size of the tree. * * @return the number of nodes */ public int numNodes() { if (m_Attribute == -1) { return 1; } else { int size = 1; for (Tree m_Successor : m_Successors) { size += m_Successor.numNodes(); } return size; } } /** * Splits instances into subsets based on the given split. * * @param data the data to work with * @return the subsets of instances * @throws Exception if something goes wrong */ protected Instances[] splitData(Instances data) throws Exception { // Allocate array of Instances objects Instances[] subsets = new Instances[m_Prop.length]; for (int i = 0; i < m_Prop.length; i++) { subsets[i] = new Instances(data, data.numInstances()); } // Go through the data for (int i = 0; i < data.numInstances(); i++) { // Get instance Instance inst = data.instance(i); // Does the instance have a missing value? if (inst.isMissing(m_Attribute)) { // Split instance up for (int k = 0; k < m_Prop.length; k++) { if (m_Prop[k] > 0) { Instance copy = (Instance) inst.copy(); copy.setWeight(m_Prop[k] * inst.weight()); subsets[k].add(copy); } } // Proceed to next instance continue; } // Do we have a nominal attribute? if (data.attribute(m_Attribute).isNominal()) { subsets[(int) inst.value(m_Attribute)].add(inst); // Proceed to next instance continue; } // Do we have a numeric attribute? if (data.attribute(m_Attribute).isNumeric()) { subsets[(inst.value(m_Attribute) < m_SplitPoint) ? 0 : 1].add(inst); // Proceed to next instance continue; } // Else throw an exception throw new IllegalArgumentException("Unknown attribute type"); } // Save memory for (int i = 0; i < m_Prop.length; i++) { subsets[i].compactify(); } // Return the subsets return subsets; } /** * Computes numeric class distribution for an attribute * * @param props * @param dists * @param att * @param subsetWeights * @param data * @param vals * @return * @throws Exception if a problem occurs */ protected double numericDistribution(double[][] props, double[][][] dists, int att, double[][] subsetWeights, Instances data, double[] vals) throws Exception { double splitPoint = Double.NaN; Attribute attribute = data.attribute(att); double[][] dist = null; double[] sums = null; double[] sumSquared = null; double[] sumOfWeights = null; double totalSum = 0, totalSumSquared = 0, totalSumOfWeights = 0; int indexOfFirstMissingValue = data.numInstances(); if (attribute.isNominal()) { sums = new double[attribute.numValues()]; sumSquared = new double[attribute.numValues()]; sumOfWeights = new double[attribute.numValues()]; int attVal; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); if (inst.isMissing(att)) { // Skip missing values at this stage if (indexOfFirstMissingValue == data.numInstances()) { indexOfFirstMissingValue = i; } continue; } attVal = (int) inst.value(att); sums[attVal] += inst.classValue() * inst.weight(); sumSquared[attVal] += inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[attVal] += inst.weight(); } totalSum = Utils.sum(sums); totalSumSquared = Utils.sum(sumSquared); totalSumOfWeights = Utils.sum(sumOfWeights); } else { // For numeric attributes sums = new double[2]; sumSquared = new double[2]; sumOfWeights = new double[2]; double[] currSums = new double[2]; double[] currSumSquared = new double[2]; double[] currSumOfWeights = new double[2]; // Sort data data.sort(att); // Move all instances into second subset for (int j = 0; j < data.numInstances(); j++) { Instance inst = data.instance(j); if (inst.isMissing(att)) { // Can stop as soon as we hit a missing value indexOfFirstMissingValue = j; break; } currSums[1] += inst.classValue() * inst.weight(); currSumSquared[1] += inst.classValue() * inst.classValue() * inst.weight(); currSumOfWeights[1] += inst.weight(); } totalSum = currSums[1]; totalSumSquared = currSumSquared[1]; totalSumOfWeights = currSumOfWeights[1]; sums[1] = currSums[1]; sumSquared[1] = currSumSquared[1]; sumOfWeights[1] = currSumOfWeights[1]; // Try all possible split points double currSplit = data.instance(0).value(att); double currVal, bestVal = Double.MAX_VALUE; for (int i = 0; i < indexOfFirstMissingValue; i++) { Instance inst = data.instance(i); if (inst.value(att) > currSplit) { currVal = RandomRegressionTree.variance(currSums, currSumSquared, currSumOfWeights); if (currVal < bestVal) { bestVal = currVal; splitPoint = (inst.value(att) + currSplit) / 2.0; // Check for numeric precision problems if (splitPoint <= currSplit) { splitPoint = inst.value(att); } for (int j = 0; j < 2; j++) { sums[j] = currSums[j]; sumSquared[j] = currSumSquared[j]; sumOfWeights[j] = currSumOfWeights[j]; } } } currSplit = inst.value(att); double classVal = inst.classValue() * inst.weight(); double classValSquared = inst.classValue() * classVal; currSums[0] += classVal; currSumSquared[0] += classValSquared; currSumOfWeights[0] += inst.weight(); currSums[1] -= classVal; currSumSquared[1] -= classValSquared; currSumOfWeights[1] -= inst.weight(); } } // Compute weights props[0] = new double[sums.length]; for (int k = 0; k < props[0].length; k++) { props[0][k] = sumOfWeights[k]; } if (!(Utils.sum(props[0]) > 0)) { for (int k = 0; k < props[0].length; k++) { props[0][k] = 1.0 / props[0].length; } } else { Utils.normalize(props[0]); } // Distribute weights for instances with missing values for (int i = indexOfFirstMissingValue; i < data.numInstances(); i++) { Instance inst = data.instance(i); for (int j = 0; j < sums.length; j++) { sums[j] += props[0][j] * inst.classValue() * inst.weight(); sumSquared[j] += props[0][j] * inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[j] += props[0][j] * inst.weight(); } totalSum += inst.classValue() * inst.weight(); totalSumSquared += inst.classValue() * inst.classValue() * inst.weight(); totalSumOfWeights += inst.weight(); } // Compute final distribution dist = new double[sums.length][data.numClasses()]; for (int j = 0; j < sums.length; j++) { if (sumOfWeights[j] > 0) { dist[j][0] = sums[j] / sumOfWeights[j]; } else { dist[j][0] = totalSum / totalSumOfWeights; } } // Compute variance gain double priorVar = singleVariance(totalSum, totalSumSquared, totalSumOfWeights); double var = variance(sums, sumSquared, sumOfWeights); double gain = priorVar - var; // Return distribution and split point subsetWeights[att] = sumOfWeights; dists[0] = dist; vals[att] = gain; return splitPoint; } /** * Computes class distribution for an attribute. * * @param props * @param dists * @param att the attribute index * @param data the data to work with * @throws Exception if something goes wrong */ protected double distribution(double[][] props, double[][][] dists, int att, Instances data) throws Exception { double splitPoint = Double.NaN; Attribute attribute = data.attribute(att); double[][] dist = null; int indexOfFirstMissingValue = data.numInstances(); if (attribute.isNominal()) { // For nominal attributes dist = new double[attribute.numValues()][data.numClasses()]; for (int i = 0; i < data.numInstances(); i++) { Instance inst = data.instance(i); if (inst.isMissing(att)) { // Skip missing values at this stage if (indexOfFirstMissingValue == data.numInstances()) { indexOfFirstMissingValue = i; } continue; } dist[(int) inst.value(att)][(int) inst.classValue()] += inst.weight(); } } else { // For numeric attributes double[][] currDist = new double[2][data.numClasses()]; dist = new double[2][data.numClasses()]; // Sort data data.sort(att); // Move all instances into second subset for (int j = 0; j < data.numInstances(); j++) { Instance inst = data.instance(j); if (inst.isMissing(att)) { // Can stop as soon as we hit a missing value indexOfFirstMissingValue = j; break; } currDist[1][(int) inst.classValue()] += inst.weight(); } // Value before splitting double priorVal = priorVal(currDist); // Save initial distribution for (int j = 0; j < currDist.length; j++) { System.arraycopy(currDist[j], 0, dist[j], 0, dist[j].length); } // Try all possible split points double currSplit = data.instance(0).value(att); double currVal, bestVal = -Double.MAX_VALUE; for (int i = 0; i < indexOfFirstMissingValue; i++) { Instance inst = data.instance(i); double attVal = inst.value(att); // Can we place a sensible split point here? if (attVal > currSplit) { // Compute gain for split point currVal = gain(currDist, priorVal); // Is the current split point the best point so far? if (currVal > bestVal) { // Store value of current point bestVal = currVal; // Save split point splitPoint = (attVal + currSplit) / 2.0; // Check for numeric precision problems if (splitPoint <= currSplit) { splitPoint = attVal; } // Save distribution for (int j = 0; j < currDist.length; j++) { System.arraycopy(currDist[j], 0, dist[j], 0, dist[j].length); } } // Update value currSplit = attVal; } // Shift over the weight int classVal = (int) inst.classValue(); currDist[0][classVal] += inst.weight(); currDist[1][classVal] -= inst.weight(); } } // Compute weights for subsets props[0] = new double[dist.length]; for (int k = 0; k < props[0].length; k++) { props[0][k] = Utils.sum(dist[k]); } if (Utils.eq(Utils.sum(props[0]), 0)) { for (int k = 0; k < props[0].length; k++) { props[0][k] = 1.0 / props[0].length; } } else { Utils.normalize(props[0]); } // Distribute weights for instances with missing values for (int i = indexOfFirstMissingValue; i < data.numInstances(); i++) { Instance inst = data.instance(i); if (attribute.isNominal()) { // Need to check if attribute value is missing if (inst.isMissing(att)) { for (int j = 0; j < dist.length; j++) { dist[j][(int) inst.classValue()] += props[0][j] * inst.weight(); } } } else { // Can be sure that value is missing, so no test required for (int j = 0; j < dist.length; j++) { dist[j][(int) inst.classValue()] += props[0][j] * inst.weight(); } } } // Return distribution and split point dists[0] = dist; return splitPoint; } /** * Computes value of splitting criterion before split. * * @param dist the distributions * @return the splitting criterion */ protected double priorVal(double[][] dist) { return ContingencyTables.entropyOverColumns(dist); } /** * Computes value of splitting criterion after split. * * @param dist the distributions * @param priorVal the splitting criterion * @return the gain after the split */ protected double gain(double[][] dist, double priorVal) { return priorVal - ContingencyTables.entropyConditionedOnRows(dist); } /** * Returns the revision string. * * @return the revision */ public String getRevision() { return RevisionUtils.extract("$Revision: 11907 $"); } /** * Outputs one node for graph. * * @param text the buffer to append the output to * @param num the current node id * @param parent the parent of the nodes * @return the next node id * @throws Exception if something goes wrong */ protected int toGraph(StringBuffer text, int num, Tree parent) throws Exception { num++; if (m_Attribute == -1) { text.append("N" + Integer.toHexString(Tree.this.hashCode()) + " [label=\"" + num + Utils.backQuoteChars(leafString()) + "\"" + " shape=box]\n"); } else { text.append("N" + Integer.toHexString(Tree.this.hashCode()) + " [label=\"" + num + ": " + Utils.backQuoteChars(m_Info.attribute(m_Attribute).name()) + "\"]\n"); for (int i = 0; i < m_Successors.length; i++) { text.append("N" + Integer.toHexString(Tree.this.hashCode()) + "->" + "N" + Integer.toHexString(m_Successors[i].hashCode()) + " [label=\""); if (m_Info.attribute(m_Attribute).isNumeric()) { if (i == 0) { text.append(" < " + Utils.doubleToString(m_SplitPoint, 2)); } else { text.append(" >= " + Utils.doubleToString(m_SplitPoint, 2)); } } else { text.append(" = " + Utils.backQuoteChars(m_Info.attribute(m_Attribute).value(i))); } text.append("\"]\n"); num = m_Successors[i].toGraph(text, num, this); } } return num; } } /** * Computes variance for subsets. * * @param s * @param sS * @param sumOfWeights * @return the variance */ protected static double variance(double[] s, double[] sS, double[] sumOfWeights) { double var = 0; for (int i = 0; i < s.length; i++) { if (sumOfWeights[i] > 0) { var += singleVariance(s[i], sS[i], sumOfWeights[i]); } } return var; } /** * Computes the variance for a single set * * @param s * @param sS * @param weight the weight * @return the variance */ protected static double singleVariance(double s, double sS, double weight) { return sS - ((s * s) / weight); } /** * Main method for this class. * * @param argv the commandline parameters */ public static void main(String[] argv) { runClassifier(new RandomRegressionTree(), argv); } } }
81,141
30.401703
147
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/RISE.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.evaluators.SingleSampleEvaluator; import evaluation.storage.ClassifierResults; import evaluation.tuning.ParameterSpace; import experiments.ClassifierLists; import experiments.data.DatasetLists; import fileIO.FullAccessOutFile; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.Tuneable; import tsml.transformers.*; import tsml.transformers.FFT; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.classifiers.trees.RandomTree; import weka.core.*; import java.io.*; import java.util.ArrayList; import java.util.Collections; import java.util.Random; import java.util.concurrent.TimeUnit; import tsml.classifiers.Checkpointable; import tsml.classifiers.TrainTimeContractable; import static experiments.data.DatasetLoading.loadDataNullable; /** <!-- globalinfo-start --> * Variation of Random Interval Spectral Ensemble [lines2018time]. * * This implementation extends the original to include: * down sampling * stabilisation (constraining interval length to within some distance of previous length) * check pointing * contracting * * Overview: Input n series length m * for each tree * sample interval of random size * transform interval into ACF and PS features * build tree on concatenated features * ensemble the trees with majority vote <!-- globalinfo-end --> <!-- technical-bibtex-start --> * Bibtex * <pre> * @article{lines2018time, * title={Time series classification with HIVE-COTE: The hierarchical vote collective of transformation-based ensembles}, * author={Lines, Jason and Taylor, Sarah and Bagnall, Anthony}, * journal={ACM Transactions on Knowledge Discovery from Data (TKDD)}, * volume={12}, * number={5}, * pages={52}, * year={2018}, * publisher={ACM} * } * </pre> <!-- technical-bibtex-end --> <!-- options-start --> <!-- options-end --> * @author Michael Flynn and Tony Bagnall * @date 19/02/19 * updated 4/3/20 to conform to tsml standards * updated 10/3/20 to allow for internal CV estimate of train acc, same structure as TSF **/ public class RISE extends EnhancedAbstractClassifier implements TrainTimeContractable, TechnicalInformationHandler, Checkpointable, Tuneable { boolean tune = false; TransformType[] transforms = {TransformType.ACF_FFT}; //maxIntervalLength is used when contract is set. Via the timer the interval space is constricted to prevent breach // on contract. private int maxIntervalLength = 0; private int minIntervalLength = 16; private int numClassifiers = 500; //Global variable due to serialisation. private int classifiersBuilt = 0; //Used in conjunction with contract to enforce a minimum number of trees. private int minNumTrees = 0; //Enable random downsampling of intervals. private boolean downSample = false; private boolean loadedFromFile = false; //stabilise can be used to limit the neighbourhood of potential interval sizes (based on previous interval size). This // increases the robustness of the timing model and subsequently improves contract adherence. private int stabilise = 0; //Used in ACF. private final int DEFAULT_MAXLAG = 100; private final int DEFAULT_MINLAG = 1; //Given a contract and the need to obtain train accuracy, perForBag defines what percentage of the contract is assigned //bagging (Excess time added onto time remaining to complete full build). private double perForBag = 0.5; private Timer timer = null; private boolean trainTimeContract = false; private long trainContractTimeNanos = 0; private Classifier classifier = new RandomTree(); private ArrayList<Classifier> baseClassifiers = null; //A list of: rawIntervalLength[0], startIndex[1], downSampleFactor[2]; for each interval. private ArrayList<int[]> intervalsInfo = null; //The indexs of each interval (after any downsampling). private ArrayList<ArrayList<Integer>> intervalsAttIndexes = null; private ArrayList<Integer> rawIntervalIndexes = null; private PowerSpectrum PS; private TransformType transformType = TransformType.ACF_FFT; private Instances data = null; /**** Checkpointing variables *****/ private boolean checkpoint = false; private String checkpointPath = null; private long checkpointTime = 0; //Time between checkpoints in nanosecs private long lastCheckpointTime = 0; //Time since last checkpoint in nanos. //Updated work public boolean printStartEndPoints = false; private ArrayList<int[]> startEndPoints = null; private int intervalMethod = 3; private int partitions = 1; /** * Constructor * @param seed */ public RISE(long seed){ super(CAN_ESTIMATE_OWN_PERFORMANCE); super.setSeed((int)seed); timer = new Timer(); this.setTransformType(TransformType.ACF_FFT); } public RISE(){ this(0); } public enum TransformType {ACF, FFT, MFCC, SPEC, AF, ACF_FFT, MFCC_FFT, MFCC_ACF, SPEC_MFCC, AF_MFCC, AF_FFT, AF_FFT_MFCC} /** * Function used to reset internal state of classifier. * Is called at beginning of buildClassifier. Can subsequently call buildClassifier multiple times per instance of RISE. */ private void initialise(){ timer.reset(); baseClassifiers = new ArrayList<>(); intervalsInfo = new ArrayList<>(); intervalsAttIndexes = new ArrayList<>(); rawIntervalIndexes = new ArrayList<>(); startEndPoints = new ArrayList<>(); PS = new PowerSpectrum(); classifiersBuilt = 0; } /** * Sets number of trees. * @param numClassifiers */ public void setNumClassifiers(int numClassifiers){ this.numClassifiers = numClassifiers; } public void setIntervalMethod(int x){ this.intervalMethod = x; } /** * Sets minimum number of trees RISE will build if contracted. * @param minNumTrees */ public void setMinNumTrees(int minNumTrees){ this.minNumTrees = minNumTrees; } /** * Boolean to set downSample. * If true down sample rate is randomly selected per interval. * @param bool */ public void setDownSample(boolean bool){ this.downSample = bool; } /** * Parameter to control width of interval space with prior interval length centered. * e.g. priorIntervalLength = 53 * width = 7 * possibleWidths = 50 < x < 56 (inclusive) * Has the effect of constraining the space around the previous interval length, contributing to a more robust * timing model via preventing leveraging in large problems. * @param width */ public void setStabilise(int width){ this.stabilise = width; } /** * Location of folder in which to save timing model information. * @param modelOutPath */ public void setModelOutPath(String modelOutPath){ timer.modelOutPath = modelOutPath; } /** * Default transform combined ACF+PS * @param transformType */ public void setTransformType(TransformType transformType){ this.transformType = transformType; } /** * Pass in instance of {@code weka.classifiers.trees} to replace default base classifier. * @param classifier */ public void setBaseClassifier(Classifier classifier){ this.classifier = classifier; } public void setPercentageOfContractForBagging(double x){ perForBag = x; } /** * RISE will attempt to load serialisation file on method call using the seed set on instantiation as file identifier. * If successful this object is returned to state in which it was at creation of serialisation file. * @param path Path to folder in which to save serialisation files. */ @Override //Checkpointable public boolean setCheckpointPath(String path) { boolean validPath=Checkpointable.super.createDirectories(path); printLineDebug(" Writing checkpoint to "+path); if(validPath){ this.checkpointPath = path; checkpoint = true; } return validPath; } public int getMaxLag(Instances instances){ int maxLag = (instances.numAttributes()-1)/4; if(DEFAULT_MAXLAG < maxLag) maxLag = DEFAULT_MAXLAG; return maxLag; } public TransformType getTransformType(){ return this.transformType; } /** * Method controlling interval length, interval start position and down sample factor (if set). * Takes into account stabilisation parameter if set. * @param maxIntervalLength maximum length interval can be in order to adhere to minimum number of trees and contract constraints. * @param instanceLength * @return int[] of size three: * int[0] = rawIntervalLength * int[1] = startIndex * int[2] = downSampleFactor */ private int[] selectIntervalAttributes(int maxIntervalLength, int instanceLength){ //rawIntervalLength[0], startIndex[1], downSampleFactor[2]; int[] intervalInfo = new int[3]; //Produce powers of 2 ArrayList for interval selection. ArrayList<Integer> powersOf2 = new ArrayList<>(); for (int j = maxIntervalLength; j >= 1; j--) { // If j is a power of 2 if ((j & (j - 1)) == 0){ powersOf2.add(j); } } Collections.reverse(powersOf2); int index = 0; //If stabilise is set. if(stabilise > 0 && !rawIntervalIndexes.isEmpty()){ //Check stabilise is valid value. if(stabilise > powersOf2.size()-1){ stabilise = powersOf2.size()-1; while(stabilise % 2 == 0){ stabilise --; } }else if(stabilise < 2){ stabilise = 2; while(stabilise % 2 == 0){ stabilise ++; } }else{ while(stabilise % 2 == 0){ stabilise ++; } } //Select random value between 0 - (stabilise - 1) //Map value onto valid interval length based on previous length, correcting for occasions in which previous //length = 0 | length = maxLength. int option = rand.nextInt(stabilise - 1); if(rawIntervalIndexes.get(rawIntervalIndexes.size()-1) - ((stabilise - 1)/2) <= 2){ index = option + 2; } if (rawIntervalIndexes.get(rawIntervalIndexes.size()-1) - ((stabilise - 1)/2) > 2 && rawIntervalIndexes.get(rawIntervalIndexes.size()-1) + ((stabilise - 1)/2) < powersOf2.size() - 1) { option = option - ((stabilise - 1)/2); index = rawIntervalIndexes.get(rawIntervalIndexes.size()-1) + option; } if(rawIntervalIndexes.get(rawIntervalIndexes.size()-1) + ((stabilise - 1)/2) >= powersOf2.size() - 1) { index = (powersOf2.size() - 1) - option; } }else{ //If stabilise is not set. //Select a new interval length at random (Selects in linear space and maps onto closest power of two). int temp = rand.nextInt(powersOf2.get(powersOf2.size() - 1)) + 1; while((temp & (temp - 1)) != 0) temp++; for (int i = 0; i < powersOf2.size() && temp != powersOf2.get(i); i++) { index = i; } index++; } //If this tree is one of first four trees use tree number as powersOf2 index. Establishes robust foundation for //timing model. However, logic should be refactored to check this before executing prior code. try{ if(classifiersBuilt < 4){ index = (classifiersBuilt + 2) < powersOf2.size()-1 ? (classifiersBuilt + 2) : powersOf2.size()-1; } if(classifiersBuilt == 4){ index = powersOf2.size()-1; } intervalInfo[0] = powersOf2.get(index); }catch(Exception e){ System.out.println(e); } //Select random start index to take interval from. if ((instanceLength - intervalInfo[0]) != 0 ) { intervalInfo[1] = rand.nextInt(instanceLength - intervalInfo[0]); }else{ intervalInfo[1] = 0; } //Select down sample factor such that it is a smaller or equal power of 2 whilst ensuring resulting interval //length is also a power of 2. //e.g. if length is 8 down sample factor can be 1, 2, 4 or 8. Results in step lengths of, 8(8/1), 4(8/2), 2(8/4) or 1(8/8) //and total interval lengths of 1, 2, 4 or 8. if (downSample) { intervalInfo[2] = powersOf2.get(rand.nextInt(index) + 1); }else{ intervalInfo[2] = intervalInfo[0]; } this.intervalsInfo.add(intervalInfo); this.rawIntervalIndexes.add(index); return intervalInfo; } private Instances produceIntervalInstance(Instance testInstance, int classifierNum){ Instances intervalInstances = null; ArrayList<Attribute>attributes = new ArrayList<>(); int nearestPowerOfTwo = startEndPoints.get(classifierNum)[1] - startEndPoints.get(classifierNum)[0]; for (int i = 0; i < nearestPowerOfTwo; i ++) { Attribute att = i + startEndPoints.get(classifierNum)[0] < testInstance.numAttributes() - 1 ? testInstance.attribute(i + startEndPoints.get(classifierNum)[0]) : new Attribute("att"+ (i + 1 + startEndPoints.get(classifierNum)[0])); attributes.add(att); } attributes.add(testInstance.attribute(testInstance.numAttributes()-1)); intervalInstances = new Instances(testInstance.dataset().relationName(), attributes, 1); double[] intervalInstanceValues = new double[nearestPowerOfTwo + 1]; for (int j = 0; j < nearestPowerOfTwo; j++) { double value = j + startEndPoints.get(classifierNum)[0] < testInstance.numAttributes() - 1 ? testInstance.value(j + startEndPoints.get(classifierNum)[0]) : 0.0; intervalInstanceValues[j] = value; } DenseInstance intervalInstance = new DenseInstance(intervalInstanceValues.length); intervalInstance.replaceMissingValues(intervalInstanceValues); intervalInstance.setValue(intervalInstanceValues.length-1, testInstance.classValue()); intervalInstances.add(intervalInstance); intervalInstances.setClassIndex(intervalInstances.numAttributes() - 1); return intervalInstances; } /** * Transforms instances into either PS ACF or concatenation based on {@code setTransformType} * @param instances * @return transformed instances. */ private Instances transformInstances(Instances instances, TransformType transformType){ Instances temp = null; switch(transformType){ case ACF: ACF acf = new ACF(); acf.setNormalized(false); try { temp = acf.transform(instances); } catch (Exception e) { System.out.println(" Exception in Combo="+e+" max lag =" + (instances.get(0).numAttributes()-1/4)); } break; case FFT: Fast_FFT Fast_FFT = new Fast_FFT(); try { int nfft = (int) FFT.MathsPower2.roundPow2(instances.numAttributes()-1) * 2; Fast_FFT.setNFFT(nfft); temp = Fast_FFT.transform(instances); } catch (Exception e) { e.printStackTrace(); } break; case MFCC: MFCC MFCC= new MFCC(); try { temp = MFCC.transform(instances); } catch (Exception e) { e.printStackTrace(); } break; case SPEC: Spectrogram spec = new Spectrogram(); try{ temp = spec.transform(instances); }catch(Exception e){ e.printStackTrace(); } break; case AF: AudioFeatures af = new AudioFeatures(); try{ temp = af.transform(instances); }catch(Exception e){ e.printStackTrace(); } break; case MFCC_FFT: temp = transformInstances(instances, TransformType.MFCC); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.FFT)); temp.setClassIndex(temp.numAttributes()-1); break; case MFCC_ACF: temp = transformInstances(instances, TransformType.MFCC); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.ACF)); temp.setClassIndex(temp.numAttributes()-1); break; case ACF_FFT: temp = transformInstances(instances, TransformType.FFT); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.ACF)); temp.setClassIndex(temp.numAttributes()-1); break; case SPEC_MFCC: temp = transformInstances(instances, TransformType.SPEC); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.MFCC)); temp.setClassIndex(temp.numAttributes()-1); break; case AF_MFCC: temp = transformInstances(instances, TransformType.AF); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.MFCC)); temp.setClassIndex(temp.numAttributes()-1); break; case AF_FFT: temp = transformInstances(instances, TransformType.AF); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.FFT)); temp.setClassIndex(temp.numAttributes()-1); break; case AF_FFT_MFCC: temp = transformInstances(instances, TransformType.AF); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.FFT)); temp.setClassIndex(-1); temp.deleteAttributeAt(temp.numAttributes()-1); temp = Instances.mergeInstances(temp, transformInstances(instances, TransformType.MFCC)); temp.setClassIndex(temp.numAttributes()-1); break; } return temp; } @Override // Checkpointable public void saveToFile(String filename) throws Exception{ Checkpointable.super.saveToFile(checkpointPath + "RISE" + seed + "temp.ser"); File file = new File(checkpointPath + "RISE" + seed + "temp.ser"); File file2 = new File(checkpointPath + "RISE" + seed + ".ser"); file2.delete(); file.renameTo(file2); } /** * Legacy method for old version of serialise * @param seed */ private void saveToFile(long seed){ try{ System.out.println("Serialising classifier."); File file = new File(checkpointPath + (checkpointPath.isEmpty()? "SERIALISE_cRISE_" : "\\SERIALISE_cRISE_") + seed + ".txt"); file.setWritable(true, false); file.setExecutable(true, false); file.setReadable(true, false); FileOutputStream f = new FileOutputStream(file); ObjectOutputStream o = new ObjectOutputStream(f); this.timer.forestElapsedTime = System.nanoTime() - this.timer.forestStartTime; o.writeObject(this); o.close(); f.close(); System.out.println("Serialisation completed: " + classifiersBuilt + " trees"); } catch (IOException ex) { System.out.println("Serialisation failed: " + ex); } } private RISE readSerialise(long seed){ ObjectInputStream oi = null; RISE temp = null; try { FileInputStream fi = new FileInputStream(new File( checkpointPath + (checkpointPath.isEmpty()? "SERIALISE_cRISE_" : "\\SERIALISE_cRISE_") + seed + ".txt")); oi = new ObjectInputStream(fi); temp = (RISE)oi.readObject(); oi.close(); fi.close(); System.out.println("File load successful: " + ((RISE)temp).classifiersBuilt + " trees."); } catch (IOException | ClassNotFoundException ex) { System.out.println("File load: failed."); } return temp; } @Override public void copyFromSerObject(Object temp){ RISE rise; try{ rise=(RISE)temp; }catch(Exception ex){ throw new RuntimeException(" Trying to load from ser object thar is not a RISE object. QUITING at copyFromSerObject"); } this.baseClassifiers = rise.baseClassifiers; this.classifier = rise.classifier; this.data = rise.data; this.downSample = rise.downSample; this.PS = rise.PS; this.intervalsAttIndexes = rise.intervalsAttIndexes; this.intervalsInfo = rise.intervalsInfo; this.maxIntervalLength = rise.maxIntervalLength; this.minIntervalLength = rise.minIntervalLength; this.numClassifiers = rise.numClassifiers; this.rand = rise.rand; this.rawIntervalIndexes = rise.rawIntervalIndexes; this.checkpointPath = rise.checkpointPath; this.stabilise = rise.stabilise; this.timer = rise.timer; this.transformType = rise.transformType; this.classifiersBuilt = rise.classifiersBuilt; this.startEndPoints = rise.startEndPoints; this.loadedFromFile = true; printDebug("Variable assignment: successful."); printDebug("Classifiers built = "+classifiersBuilt); } /** * Method to maintain timing, takes into consideration that object may have been read from file and therefore be * mid way through a contract. * @return */ private long getTime(){ long time = 0; if(loadedFromFile){ time = timer.forestElapsedTime; }else{ time = 0; } return time; } /** * Build classifier * @param trainingData whole training set. * @throws Exception */ @Override public void buildClassifier(Instances trainingData) throws Exception { // Can classifier handle the data? getCapabilities().testWithFail(trainingData); //Start forest timer. timer.forestStartTime = System.nanoTime(); long startTime=timer.forestStartTime; File file = new File(checkpointPath + "RISE" + seed + ".ser"); //if checkpointing and serialised files exist load said files if (checkpoint && file.exists()){ //path checkpoint files will be saved to printLineDebug("Loading from checkpoint file"); loadFromFile(checkpointPath + "RISE" + seed + ".ser"); // checkpointTimeElapsed -= System.nanoTime()-t1; this.loadedFromFile = true; } //If not loaded from file e.g. Starting fresh experiment. if (!loadedFromFile) { //Just used for getParameters. data = trainingData; //(re)Initialise all variables to account for multiple calls of buildClassifier. initialise(); //Check min & max interval lengths are valid. if(maxIntervalLength > trainingData.numAttributes()-1 || maxIntervalLength <= 0){ maxIntervalLength = trainingData.numAttributes()-1; } if(minIntervalLength >= trainingData.numAttributes()-1 || minIntervalLength <= 0){ minIntervalLength = (trainingData.numAttributes()-1)/2; } lastCheckpointTime=timer.forestStartTime; printLineDebug("Building RISE: minIntervalLength = " + minIntervalLength+" max number of trees ="+numClassifiers); } if(getTuneTransform()){ tuneTransform(data); } if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(data); long est2 = System.nanoTime(); trainResults.setErrorEstimateTime(est2 - est1); initialise(); timer.reset(); if(trainTimeContract) this.setTrainTimeLimit(TimeUnit.NANOSECONDS, (long) ((timer.forestTimeLimit * (1.0 / perForBag)))); } for (; classifiersBuilt < numClassifiers && ((classifiersBuilt==0)||(System.nanoTime() - timer.forestStartTime) < (timer.forestTimeLimit - getTime())); classifiersBuilt++) { if(debug && classifiersBuilt%100==0) printLineDebug("Building RISE tree "+classifiersBuilt+" time taken = "+(System.nanoTime()-startTime)+" contract ="+trainContractTimeNanos+" nanos"); //Start tree timer. timer.treeStartTime = System.nanoTime(); //Compute maximum interval length given time remaining. if(trainTimeContract) { timer.buildModel(); maxIntervalLength = (int) timer.getFeatureSpace((timer.forestTimeLimit) - (System.nanoTime() - (timer.forestStartTime - getTime()))); } //Produce intervalInstances from trainingData using interval attributes. Instances intervalInstances; //intervalInstances = produceIntervalInstances(maxIntervalLength, trainingData); intervalInstances = produceIntervalInstances(maxIntervalLength, trainingData); //Transform instances. if (transformType != null) { intervalInstances = transformInstances(intervalInstances, transformType); } //Add independent variable to model (length of interval). timer.makePrediciton(intervalInstances.numAttributes() - 1); timer.independentVariables.add(intervalInstances.numAttributes() - 1); //Build classifier with intervalInstances. //Build classifier with intervalInstances. if(classifier instanceof RandomTree){ ((RandomTree)classifier).setKValue(intervalInstances.numAttributes() - 1); } baseClassifiers.add(AbstractClassifier.makeCopy(classifier)); baseClassifiers.get(baseClassifiers.size()-1).buildClassifier(intervalInstances); //Add dependent variable to model (time taken). timer.dependentVariables.add(System.nanoTime() - timer.treeStartTime); //Serialise every 100 trees by default (if set to checkpoint). if (checkpoint){ if(checkpointTime>0) //Timed checkpointing { if(System.nanoTime()-lastCheckpointTime>checkpointTime){ saveToFile(checkpointPath); // checkpoint(startTime); lastCheckpointTime=System.nanoTime(); } } else { //Default checkpoint every 100 trees if(classifiersBuilt %100 == 0 && classifiersBuilt >0) saveToFile(checkpointPath); } } } if(classifiersBuilt==0){//Not enough time to build a single classifier throw new Exception((" ERROR in RISE, no trees built, this should not happen. Contract time ="+trainContractTimeNanos/1000000000)); } if (checkpoint) { saveToFile(checkpointPath); } if (timer.modelOutPath != null) { timer.saveModelToCSV(trainingData.relationName()); } timer.forestElapsedTime = (System.nanoTime() - timer.forestStartTime); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setParas(getParameters()); if(getEstimateOwnPerformance()){ trainResults.setBuildTime(timer.forestElapsedTime - trainResults.getErrorEstimateTime()); } else{ trainResults.setBuildTime(timer.forestElapsedTime); } trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()+trainResults.getErrorEstimateTime()); printLineDebug("*************** Finished RISE Build with "+classifiersBuilt+" Trees built ***************"); /*for (int i = 0; i < this.startEndPoints.size(); i++) { System.out.println(this.startEndPoints.get(i)[0] + ", " + this.startEndPoints.get(i)[1]); }*/ } private void estimateOwnPerformance(Instances data) throws Exception{ trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setEstimatorName(getClassifierName()); trainResults.setDatasetName(data.relationName()); if(trainEstimateMethod == TrainEstimateMethod.OOB) { trainResults.setFoldID(seed); int numTrees = 500; int bagProp = 100; int treeCount = 0; Classifier[] classifiers = new Classifier[numTrees]; int[] timesInTest = new int[data.size()]; double[][][] distributions = new double[numTrees][data.size()][(int) data.numClasses()]; double[][] finalDistributions = new double[data.size()][(int) data.numClasses()]; int[][] bags; ArrayList[] testIndexs = new ArrayList[numTrees]; double[] bagAccuracies = new double[numTrees]; if (trainTimeContract) { this.setTrainTimeLimit(timer.forestTimeLimit, TimeUnit.NANOSECONDS); this.timer.forestTimeLimit = (long) ((double) timer.forestTimeLimit * perForBag); } bags = generateBags(numTrees, bagProp, data); for (; treeCount < numTrees && (System.nanoTime() - timer.forestStartTime) < (timer.forestTimeLimit - getTime()); treeCount++) { //Start tree timer. timer.treeStartTime = System.nanoTime(); //Compute maximum interval length given time remaining. timer.buildModel(); maxIntervalLength = (int) timer.getFeatureSpace((timer.forestTimeLimit) - (System.nanoTime() - (timer.forestStartTime - getTime()))); Instances intervalInstances = produceIntervalInstances(maxIntervalLength, data); intervalInstances = transformInstances(intervalInstances, transformType); //Add independent variable to model (length of interval). timer.makePrediciton(intervalInstances.numAttributes() - 1); timer.independentVariables.add(intervalInstances.numAttributes() - 1); Instances trainHeader = new Instances(intervalInstances, 0); Instances testHeader = new Instances(intervalInstances, 0); ArrayList<Integer> indexs = new ArrayList<>(); for (int j = 0; j < bags[treeCount].length; j++) { if (bags[treeCount][j] == 0) { testHeader.add(intervalInstances.get(j)); timesInTest[j]++; indexs.add(j); } for (int k = 0; k < bags[treeCount][j]; k++) { trainHeader.add(intervalInstances.get(j)); } } testIndexs[treeCount] = indexs; classifiers[treeCount] = new RandomTree(); ((RandomTree) classifiers[treeCount]).setKValue(trainHeader.numAttributes() - 1); try { classifiers[treeCount].buildClassifier(trainHeader); } catch (Exception e) { e.printStackTrace(); } for (int j = 0; j < testHeader.size(); j++) { try { distributions[treeCount][indexs.get(j)] = classifiers[treeCount].distributionForInstance(testHeader.get(j)); if (classifiers[treeCount].classifyInstance(testHeader.get(j)) == testHeader.get(j).classValue()) { bagAccuracies[treeCount]++; } } catch (Exception e) { e.printStackTrace(); } } bagAccuracies[treeCount] /= testHeader.size(); trainHeader.clear(); testHeader.clear(); timer.dependentVariables.add(System.nanoTime() - timer.treeStartTime); } for (int i = 0; i < bags.length; i++) { for (int j = 0; j < bags[i].length; j++) { if (bags[i][j] == 0) { for (int k = 0; k < finalDistributions[j].length; k++) { finalDistributions[j][k] += distributions[i][j][k]; } } } } for (int i = 0; i < finalDistributions.length; i++) { if (timesInTest[i] > 1) { for (int j = 0; j < finalDistributions[i].length; j++) { finalDistributions[i][j] /= timesInTest[i]; } } } //Add to trainResults. for (int i = 0; i < finalDistributions.length; i++) { double predClass = findIndexOfMax(finalDistributions[i], rand); trainResults.addPrediction(data.get(i).classValue(), finalDistributions[i], predClass, 0, ""); } trainResults.setEstimatorName("RISEOOB"); trainResults.setErrorEstimateMethod("OOB"); } else if(trainEstimateMethod == TrainEstimateMethod.CV || trainEstimateMethod == TrainEstimateMethod.NONE) { /** Defaults to 10 or numInstances, whichever is smaller. * Interface TrainAccuracyEstimate * Could this be handled better? */ int numFolds = setNumberOfFolds(data); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5); cv.setNumFolds(numFolds); RISE rise = new RISE(); //NEED TO SET PARAMETERS // rise.copyParameters(this); if (seedClassifier) rise.setSeed(seed * 100); if (trainTimeContract) {//Set the contract for each fold rise.setTrainTimeLimit((long)(((double) timer.forestTimeLimit * perForBag) / (numFolds - 2))); } rise.setEstimateOwnPerformance(false); trainResults = cv.evaluate(rise, data); long est2 = System.nanoTime(); trainResults.setEstimatorName("RISECV"); trainResults.setErrorEstimateMethod("CV_" + numFolds); } this.timer.forestElapsedTime = System.nanoTime() - this.timer.forestStartTime; } private int[][] generateBags(int numBags, int bagProp, Instances data){ int[][] bags = new int[numBags][data.size()]; Random random = new Random(seed); for (int i = 0; i < numBags; i++) { for (int j = 0; j < data.size() * (bagProp/100.0); j++) { bags[i][random.nextInt(data.size())]++; } } return bags; } private Instances produceIntervalInstances(int maxIntervalLength, Instances trainingData) { Instances intervalInstances; ArrayList<Attribute>attributes = new ArrayList<>(); ArrayList<Integer> intervalAttIndexes = new ArrayList<>(); startEndPoints = selectStartEndPoints(startEndPoints, intervalMethod); int nearestPowerOfTwo = startEndPoints.get(startEndPoints.size() - 1)[1] - startEndPoints.get(startEndPoints.size() - 1)[0]; for (int i = 0; i < nearestPowerOfTwo; i ++) { Attribute att = i + startEndPoints.get(startEndPoints.size() - 1)[0] < trainingData.numAttributes() - 1 ? trainingData.attribute(i + startEndPoints.get(startEndPoints.size() - 1)[0]) : new Attribute("att" + (i + 1 + startEndPoints.get(startEndPoints.size() - 1)[0])); attributes.add(att); } attributes.add(trainingData.attribute(trainingData.numAttributes()-1)); intervalInstances = new Instances(trainingData.relationName(), attributes, trainingData.size()); double[] intervalInstanceValues = new double[nearestPowerOfTwo + 1]; for (int i = 0; i < trainingData.size(); i++) { for (int j = 0; j < nearestPowerOfTwo; j++) { double value = j + startEndPoints.get(startEndPoints.size() - 1)[0] < trainingData.numAttributes() - 1 ? trainingData.get(i).value(j + startEndPoints.get(startEndPoints.size() - 1)[0]) : 0.0; intervalInstanceValues[j] = value; } DenseInstance intervalInstance = new DenseInstance(intervalInstanceValues.length); intervalInstance.replaceMissingValues(intervalInstanceValues); intervalInstance.setValue(intervalInstanceValues.length-1, trainingData.get(i).classValue()); intervalInstances.add(intervalInstance); } intervalInstances.setClassIndex(intervalInstances.numAttributes() - 1); return intervalInstances; } private ArrayList<int[]> selectStartEndPoints(ArrayList<int[]> startEndPoints, int x){ if(x == 0){ startEndPoints.add(new int[2]); if(startEndPoints.size() == 1){ startEndPoints.get(startEndPoints.size() - 1)[0] = 0; startEndPoints.get(startEndPoints.size() - 1)[1] = data.numAttributes() - 2; }else{ startEndPoints.get(startEndPoints.size() - 1)[0]=rand.nextInt((data.numAttributes() - 2)- minIntervalLength); //This avoid calling nextInt(0) if(startEndPoints.get(startEndPoints.size() - 1)[0] == (data.numAttributes() - 2) - 1 - minIntervalLength) startEndPoints.get(startEndPoints.size() - 1)[1] = data.numAttributes() - 1 - 1; else{ startEndPoints.get(startEndPoints.size() - 1)[1] = rand.nextInt((data.numAttributes() - 2) - startEndPoints.get(startEndPoints.size() - 1)[0]); if(startEndPoints.get(startEndPoints.size() - 1)[1] < minIntervalLength) startEndPoints.get(startEndPoints.size() - 1)[1] = minIntervalLength; startEndPoints.get(startEndPoints.size() - 1)[1] += startEndPoints.get(startEndPoints.size() - 1)[0]; } } } if(x == 1){ startEndPoints.add(new int[2]); startEndPoints.get(startEndPoints.size() - 1)[0] = rand.nextInt((data.numAttributes() - 2) - minIntervalLength); //Start point int range = (data.numAttributes() - 1) - startEndPoints.get(startEndPoints.size() - 1)[0] > maxIntervalLength ? maxIntervalLength : (data.numAttributes() - 2) - startEndPoints.get(startEndPoints.size() - 1)[0]; int length = rand.nextInt(range - minIntervalLength) + minIntervalLength; startEndPoints.get(startEndPoints.size() - 1)[1] = startEndPoints.get(startEndPoints.size() - 1)[0] + length; } if(x == 2){ startEndPoints.add(new int[2]); startEndPoints.get(startEndPoints.size() - 1)[1] = rand.nextInt((data.numAttributes() - 2) - minIntervalLength) + minIntervalLength; int range = startEndPoints.get(startEndPoints.size() - 1)[1] > maxIntervalLength ? maxIntervalLength : startEndPoints.get(startEndPoints.size() - 1)[1]; int length; if (range - minIntervalLength == 0) length = 3; else length = rand.nextInt(range - minIntervalLength) + minIntervalLength; startEndPoints.get(startEndPoints.size() - 1)[0] = startEndPoints.get(startEndPoints.size() - 1)[1] - length; } if(x == 3){ startEndPoints.add(new int[2]); if (rand.nextBoolean()) { startEndPoints.get(startEndPoints.size() - 1)[0] = rand.nextInt((data.numAttributes() - 1) - minIntervalLength); //Start point printLineDebug(" start end points ="+startEndPoints.get(startEndPoints.size() - 1)[0]); int range = (data.numAttributes() - 1) - startEndPoints.get(startEndPoints.size() - 1)[0] > maxIntervalLength ? maxIntervalLength : (data.numAttributes() - 1) - startEndPoints.get(startEndPoints.size() - 1)[0]; printLineDebug("TRUE range = "+range+" min = "+minIntervalLength+" "+" max = "+maxIntervalLength); int length = rand.nextInt(range - minIntervalLength) + minIntervalLength; startEndPoints.get(startEndPoints.size() - 1)[1] = startEndPoints.get(startEndPoints.size() - 1)[0] + length; } else { startEndPoints.get(startEndPoints.size() - 1)[1] = rand.nextInt((data.numAttributes() - 1) - minIntervalLength) + minIntervalLength; //Start point int range = startEndPoints.get(startEndPoints.size() - 1)[1] > maxIntervalLength ? maxIntervalLength : startEndPoints.get(startEndPoints.size() - 1)[1]; int length; printLineDebug("FALSE range = "+range+" min = "+minIntervalLength+" "); if (range - minIntervalLength == 0) length = 3; else length = rand.nextInt(range - minIntervalLength) + minIntervalLength; startEndPoints.get(startEndPoints.size() - 1)[0] = startEndPoints.get(startEndPoints.size() - 1)[1] - length; } } if(x == 4){ //Need to error check partitions <= numAtts; int n = startEndPoints.size(); int length = rand.nextInt((data.numAttributes() - 2) - minIntervalLength); //Which one we're on. double temp = (double)n/(double)partitions; temp = temp - Math.floor(temp); temp = temp + ((1.0/partitions)/2); //Anchor point. double anchorPoint = 0; if(partitions == 1){ anchorPoint = (data.numAttributes() -1) / 2; }else { anchorPoint = Math.floor(((data.numAttributes() - 1) / 1.0) * temp); } //StartEndPoints. startEndPoints.add(new int[2]); startEndPoints.get(startEndPoints.size() - 1)[0] = (int) Math.floor(anchorPoint - (length * temp)); startEndPoints.get(startEndPoints.size() - 1)[1] = startEndPoints.get(startEndPoints.size() - 1)[0] + length; //System.out.println("%: " + temp + "\tAnchor: " + anchorPoint + "\tStartEnd: " + (int) Math.floor(anchorPoint - (length * temp)) + " - " + (startEndPoints.get(startEndPoints.size() - 1)[0] + length)); } if(printStartEndPoints){ printStartEndPoints(); } return startEndPoints; } private void printStartEndPoints() { for (int i = 0; i < data.numAttributes() - 1; i++) { if(i < startEndPoints.get(startEndPoints.size() - 1)[0] || i > startEndPoints.get(startEndPoints.size() - 1)[1]){ System.out.print("0"); }else{ System.out.print("1"); } if(i < data.numAttributes() - 1){ System.out.print(", "); } } System.out.println(); } private boolean getTuneTransform(){ return tune; } public void setTransformsToTuneWith(TransformType[] transforms){ this.transforms = transforms; } public void setTuneTransform(boolean x){ tune = x; } private void tuneTransform(Instances trainingData){ System.out.println("Tuning"); ClassifierResults cr = new ClassifierResults(); double acc = 0.0; double cAcc = 0.0; int index = 0; int numFolds = 5; RISE c = new RISE(); for (int i = 0; i < transforms.length; i++) { System.out.print(transforms[i] + "\t\t\t"); //Instances temp = ((RISE)c).transformInstances(trainingData, TransformType.values()[i]); for (int j = 0; j < numFolds; j++) { SingleSampleEvaluator sse = new SingleSampleEvaluator(j, false, false); /*KNN knn = new KNN(); knn.setSeed(j);*/ ((RISE)c).setTransformType(transforms[i]); try { sse.setPropInstancesInTrain(0.50); cr = sse.evaluate(c, trainingData); cAcc += cr.getAcc() * (1.0/numFolds); } catch (Exception e) { e.printStackTrace(); } } System.out.print(cAcc); if(cAcc > acc){ System.out.print("\tTrue"); acc = cAcc; index = i; } System.out.println(); cAcc = 0; } this.setTransformType(TransformType.values()[index]); } /** * Classify one instance from test set. * @param instance the instance to be classified * @return double representing predicted class of test instance. * @throws Exception */ @Override public double classifyInstance(Instance instance) throws Exception { double[] distribution = distributionForInstance(instance); return findIndexOfMax(distribution, rand); } /** * Distribution or probabilities over classes for one test instance. * @param testInstance * @return double array of size numClasses containing probabilities of test instance belonging to each class. * @throws Exception */ @Override public double[] distributionForInstance(Instance testInstance) throws Exception { double[]distribution = new double[testInstance.numClasses()]; //For every base classifier. for (int i = 0; i < baseClassifiers.size(); i++) { Instance intervalInstance = null; //Transform interval instance into PS, ACF, ACF_PS or ACF_PS_AR if (transformType != null) { try{ intervalInstance = transformInstances(produceIntervalInstance(testInstance, i), transformType).firstInstance(); }catch(Exception e){ intervalInstance = transformInstances(produceIntervalInstance(testInstance, i), transformType).firstInstance(); } } distribution[(int)baseClassifiers.get(i).classifyInstance((intervalInstance))]++; } if(baseClassifiers.size()>0) { for (int j = 0; j < testInstance.numClasses(); j++) { distribution[j] /= baseClassifiers.size(); } } return distribution; } /** * Method returning all classifier parameters as a string. * for EnhancedAbstractClassifier. General format: * super.getParameters()+classifier parameters+contract information (if contracted)+train estimate information (if generated) * @return */ @Override public String getParameters() { String result=super.getParameters(); result+=", MaxNumTrees," + numClassifiers + ", NumTrees," + classifiersBuilt + ", MinIntervalLength," + minIntervalLength + ", Filters, " + this.transformType.toString() + ",BaseClassifier, "+classifier.getClass().getSimpleName(); if(classifier instanceof RandomTree) result+=",AttsConsideredPerNode,"+((RandomTree)classifier).getKValue(); if(trainTimeContract) result+= ",trainContractTimeNanos," +trainContractTimeNanos; else result+=",NoContract"; if(trainTimeContract) { result += ", TimeModelCoefficients(time = a * x^2 + b * x + c)" + ", a, " + timer.a + ", b, " + timer.b + ", c, " + timer.c; } result+=",EstimateOwnPerformance,"+getEstimateOwnPerformance(); if(getEstimateOwnPerformance()) { result += ",trainEstimateMethod," + trainEstimateMethod; if (trainEstimateMethod == TrainEstimateMethod.OOB && trainTimeContract) result += ", Percentage contract for OOB, " + perForBag; } return result; } /** * for interface TrainTimeEstimate * @param amount: time in nanoseconds */ @Override public void setTrainTimeLimit(long amount) { printLineDebug(" RISE setting contract to "+amount); if(amount>0) { trainTimeContract = true; trainContractTimeNanos = amount; timer.setTimeLimit(amount); } else trainTimeContract = false; } @Override public boolean withinTrainContract(long start) { return start<trainContractTimeNanos; } /** * for interface TechnicalInformationHandler * @return info on paper */ @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "Flynn M., Large J., Bagnall A."); result.setValue(TechnicalInformation.Field.YEAR, "2019"); result.setValue(TechnicalInformation.Field.TITLE, "The Contract Random Interval Spectral Ensemble (c-RISE): The Effect of Contracting a Classifier on Accuracy."); result.setValue(TechnicalInformation.Field.JOURNAL, "LNCS"); result.setValue(TechnicalInformation.Field.VOLUME, "11734"); result.setValue(TechnicalInformation.Field.PAGES, "381-392"); return result; } @Override //Tuneable public ParameterSpace getDefaultParameterSearchSpace(){ ParameterSpace ps=new ParameterSpace(); String[] numTrees={"100","200","300","400","500","600"}; ps.addParameter("K", numTrees); String[] minInterv={"4","8","16","32","64","128"}; ps.addParameter("I", minInterv); String[] transforms={"ACF","PS","ACF PS","ACF AR PS"}; ps.addParameter("T", transforms); return ps; } /** * Parses a given list of options to set the parameters of the classifier. * We use this for the tuning mechanism, setting parameters through setOptions <!-- options-start --> * Valid options are: <p/> * <pre> -T * Number of trees.</pre> * * <pre> -I * Number of intervals to fit.</pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ @Override public void setOptions(String[] options) throws Exception { String numTreesString = Utils.getOption('T', options); if (numTreesString.length() != 0) this.setNumClassifiers(Integer.parseInt(numTreesString)); String classifier = Utils.getOption('C', options); if (numTreesString.length() != 0) this.setBaseClassifier(ClassifierLists.setClassifierClassic(classifier, this.seed)); String downSample = Utils.getOption('D', options); if (downSample.length() != 0) this.setDownSample(Boolean.parseBoolean(downSample)); String minNumTrees = Utils.getOption('M', options); if (minNumTrees.length() != 0) this.setMinNumTrees(Integer.parseInt(minNumTrees)); String perForBag = Utils.getOption('P', options); if (perForBag.length() != 0) this.setPercentageOfContractForBagging(Double.parseDouble(perForBag)); String serialisePath = Utils.getOption('S', options); if (serialisePath.length() != 0) this.setCheckpointPath(serialisePath); String stabilise = Utils.getOption('N', options); if (stabilise.length() != 0) this.setStabilise(Integer.parseInt(stabilise)); String transform = Utils.getOption('R', options); if (transform.length() != 0) { TransformType transformType = null; switch (transform) { case "FFT": transformType = TransformType.FFT; break; case "ACF": transformType = TransformType.ACF; break; case "COMBO": transformType = TransformType.ACF_FFT; break; } this.setTransformType(transformType); } String trainLimit = Utils.getOption('L', options); String trainLimitFormat = Utils.getOption('F', options); if (trainLimit.length() != 0 && trainLimitFormat.length() == 0){ this.setTrainTimeLimit(Long.parseLong(trainLimit)); } if(trainLimit.length() != 0 && trainLimitFormat.length() != 0){ TimeUnit timeUnit = null; switch(trainLimitFormat){ case "NANO": timeUnit = TimeUnit.NANOSECONDS; break; case "SEC": timeUnit = TimeUnit.SECONDS; break; case "HOUR": timeUnit = TimeUnit.HOURS; break; case "DAY": timeUnit = TimeUnit.DAYS; break; } this.setTrainTimeLimit(Long.parseLong(trainLimit), timeUnit); } } /** * Private inner class containing all logic pertaining to timing. * CRISE is contracted via updating a linear regression model (y = a * x^2 + b * x + c) in which the dependent variable (y) is time taken and the independent variable (x) is interval length. The equation is then reordered to solve for positive x, providing the upper bound on the interval space. Dividing this by minNumtrees - treeCount gives the maximum space such that in the worse case the contract is met. */ private class Timer implements Serializable { protected long forestTimeLimit = Long.MAX_VALUE; protected long forestStartTime = 0; protected long treeStartTime = 0; protected long forestElapsedTime = 0; protected ArrayList<Integer> independentVariables = null; protected ArrayList<Long> dependentVariables = null; protected ArrayList<Double> predictions = null; private ArrayList<Double> aValues = null; private ArrayList<Double> bValues = null; private ArrayList<Double> cValues = null; protected double a = 0.0; protected double b = 0.0; protected double c = 0.0; protected String modelOutPath = null; /** * Called in CRISE.initialise in order to reset timer. */ protected void reset(){ independentVariables = new ArrayList<>(); dependentVariables = new ArrayList<>(); predictions = new ArrayList<>(); aValues = new ArrayList<>(); bValues = new ArrayList<>(); cValues = new ArrayList<>(); } /** * computes coefficients (a, b, c). */ protected void buildModel(){ a = 0.0; b = 0.0; c = 0.0; double numberOfVals = (double) independentVariables.size(); double smFrstScrs = 0.0; double smScndScrs = 0.0; double smSqrFrstScrs = 0.0; double smCbFrstScrs = 0.0; double smPwrFrFrstScrs = 0.0; double smPrdtFrstScndScrs = 0.0; double smSqrFrstScrsScndScrs = 0.0; for (int i = 0; i < independentVariables.size(); i++) { smFrstScrs += independentVariables.get(i); smScndScrs += dependentVariables.get(i); smSqrFrstScrs += Math.pow(independentVariables.get(i), 2); smCbFrstScrs += Math.pow(independentVariables.get(i), 3); smPwrFrFrstScrs += Math.pow(independentVariables.get(i), 4); smPrdtFrstScndScrs += independentVariables.get(i) * dependentVariables.get(i); smSqrFrstScrsScndScrs += Math.pow(independentVariables.get(i), 2) * dependentVariables.get(i); } double valOne = smSqrFrstScrs - (Math.pow(smFrstScrs, 2) / numberOfVals); double valTwo = smPrdtFrstScndScrs - ((smFrstScrs * smScndScrs) / numberOfVals); double valThree = smCbFrstScrs - ((smSqrFrstScrs * smFrstScrs) / numberOfVals); double valFour = smSqrFrstScrsScndScrs - ((smSqrFrstScrs * smScndScrs) / numberOfVals); double valFive = smPwrFrFrstScrs - (Math.pow(smSqrFrstScrs, 2) / numberOfVals); a = ((valFour * valOne) - (valTwo * valThree)) / ((valOne * valFive) - Math.pow(valThree, 2)); b = ((valTwo * valFive) - (valFour * valThree)) / ((valOne * valFive) - Math.pow(valThree, 2)); c = (smScndScrs / numberOfVals) - (b * (smFrstScrs / numberOfVals)) - (a * (smSqrFrstScrs / numberOfVals)); aValues.add(a); bValues.add(b); cValues.add(c); } /** * Adds x(y') to predictions arrayList for model output. * @param x interval size. */ protected void makePrediciton(int x){ predictions.add(a * Math.pow(x, 2) + b * x + c); } /** * Given time remaining returns largest interval space possible. * Takes into account whether minNumTrees is satisfied. * ensures minIntervalLength < x < maxIntervalLength. * @param timeRemaining * @return interval length */ protected double getFeatureSpace(long timeRemaining){ double y = timeRemaining; double x = ((-b) + (Math.sqrt((b * b) - (4 * a * (c - y))))) / (2 * a); if (classifiersBuilt < minNumTrees) { x = x / (minNumTrees - classifiersBuilt); } if(classifiersBuilt == minNumTrees){ maxIntervalLength = data.numAttributes()-1; } if (x > maxIntervalLength || Double.isNaN(x)) { x = maxIntervalLength; } if(x < minIntervalLength){ x = minIntervalLength+1; } return x; } /** * * @param timeLimit in nano seconds */ protected void setTimeLimit(long timeLimit){ if(timeLimit>0) this.forestTimeLimit = timeLimit; else this.forestTimeLimit = Long.MAX_VALUE; } protected void printModel(){ for (int i = 0; i < independentVariables.size(); i++) { System.out.println(Double.toString(independentVariables.get(i)) + "," + Double.toString(dependentVariables.get(i)) + "," + Double.toString(predictions.get(i))); } } protected void saveModelToCSV(String problemName){ try{ FullAccessOutFile outFile = new FullAccessOutFile((modelOutPath.isEmpty() ? "timingModel" + (int) seed + ".csv" : modelOutPath + "/" + problemName + "/" + "/timingModel" + (int) seed + ".csv")); for (int i = 0; i < independentVariables.size(); i++) { outFile.writeLine(Double.toString(independentVariables.get(i)) + "," + Double.toString(dependentVariables.get(i)) + "," + Double.toString(predictions.get(i)) + "," + Double.toString(timer.aValues.get(i)) + "," + Double.toString(timer.bValues.get(i)) + "," + Double.toString(timer.cValues.get(i))); } outFile.closeFile(); }catch(Exception e){ System.out.println("Mismatch between relation name and name of results folder: " + e); } } } public static void main(String[] args) { Instances dataTrain = loadDataNullable("Z:/ArchiveData/Univariate_arff" + "/" + DatasetLists.tscProblems112[11] + "/" + DatasetLists.tscProblems112[11] + "_TRAIN"); Instances dataTest = loadDataNullable("Z:/ArchiveData/Univariate_arff" + "/" + DatasetLists.tscProblems112[11] + "/" + DatasetLists.tscProblems112[11] + "_TEST"); Instances data = dataTrain; data.addAll(dataTest); ClassifierResults cr = null; SingleSampleEvaluator sse = new SingleSampleEvaluator(); //sse.setPropInstancesInTrain(0.5); sse.setSeed(1); RISE RISE = null; System.out.println("Dataset name: " + data.relationName()); System.out.println("Numer of cases: " + data.size()); System.out.println("Number of attributes: " + (data.numAttributes() - 1)); System.out.println("Number of classes: " + data.classAttribute().numValues()); System.out.println("\n"); try { RISE = new RISE(); RISE.setTransformType(TransformType.ACF_FFT); RISE.setIntervalMethod(4); cr = sse.evaluate(RISE, data); System.out.println(RISE.getTransformType().toString()); System.out.println("Accuracy: " + cr.getAcc()); System.out.println("Build time (ns): " + cr.getBuildTimeInNanos()); } catch (Exception e) { e.printStackTrace(); } /*try { RISE = new RISE(); RISE.setTuneTransform(true); RISE.setTransformsToTuneWith(new TransformType[]{TransformType.FFT, TransformType.ACF}); cr = sse.evaluate(RISE, data); System.out.println(RISE.getTransformType().toString()); System.out.println("Accuracy: " + cr.getAcc()); System.out.println("Build time (ns): " + cr.getBuildTimeInNanos()); } catch (Exception e) { e.printStackTrace(); }*/ /*RISE = new RISE(); for (int i = 0; i < TransformType.values().length; i++) { RISE.setTransformType(TransformType.values()[i]); try { cr = sse.evaluate(RISE, data); } catch (Exception e) { e.printStackTrace(); } System.out.println(RISE.getTransformType().toString() + "\t" + "Acc: " + cr.getAcc() + "\t" + "Build time: " + cr.getBuildTimeInNanos()); }*/ } } /* Dataset = ADIAC With reload (@ 200 trees) Accuracy: 0.7868020304568528 Build time (ns): 60958242098 With reload (@ 500 trees (Completed build)) Accuracy: 0.7868020304568528 Build time (ns): 8844999832 With no reload but serialising at 100 intervals. Accuracy: 0.7868020304568528 Build time (ns): 96078716938 No serialising Accuracy: 0.7868020304568528 Build time (ns): 88964973765 */
65,293
41.871963
279
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/STSF.java
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import evaluation.evaluators.CrossValidationEvaluator; import experiments.data.DatasetLoading; import machine_learning.classifiers.ContinuousIntervalTree; import tsml.classifiers.*; import tsml.data_containers.TSCapabilities; import tsml.data_containers.TSCapabilitiesHandler; import tsml.data_containers.TimeSeriesInstance; import tsml.data_containers.TimeSeriesInstances; import tsml.data_containers.utilities.Converter; import tsml.transformers.ColumnNormalizer; import tsml.transformers.Differences; import tsml.transformers.PowerSpectrum; import tsml.transformers.Resizer; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.TimeUnit; import static utilities.ArrayUtilities.sum; import static utilities.StatisticalUtilities.median; import static utilities.Utilities.extractTimeSeries; /** * Implementation of the Supervised Time Series Forest * This classifier is Contractable, Checkpointable and can estimate performance from the train data internally. * * @author Matthew Middlehurst */ public class STSF extends EnhancedAbstractClassifier implements TechnicalInformationHandler, TrainTimeContractable, TSCapabilitiesHandler { //Static defaults private final static int DEFAULT_NUM_CLASSIFIERS=500; /** Primary parameters potentially tunable*/ private int numClassifiers=DEFAULT_NUM_CLASSIFIERS; /** Ensemble members of base classifier, default to random forest RandomTree */ private ArrayList<Classifier> trees; private Classifier classifier = new ContinuousIntervalTree(); /** for each classifier i representation r attribute a interval j starts at intervals[i][r][a][j][0] and ends at intervals[i][r][a][j][1] */ private ArrayList<ArrayList<int[]>[][]> intervals; /**Holding variable for test classification in order to retain the header info*/ private ArrayList<Instances> testHolders; /** voteEnsemble determines whether to aggregate classifications or * probabilities when predicting */ private boolean voteEnsemble=true; private int numInstances; private int newNumInstances; /** * Resizer to transform data if unequal length */ private enum paddingType {ZERO, MEAN, MEAN_NOISE} private enum paddingLength {MAX, MEDIAN, WEIGHTED_MEDIAN} private paddingType chosenPType; private paddingLength chosenPLen; private Resizer resizer; private int seriesLength; private boolean trainTimeContract = false; transient private long trainContractTimeNanos = 0; transient private long finalBuildtrainContractTimeNanos = 0; private PowerSpectrum ps = new PowerSpectrum(); private Differences di = new Differences(); protected static final long serialVersionUID = 32554L; public STSF(){ //STSF Has the capability to form train estimates super(CAN_ESTIMATE_OWN_PERFORMANCE); chosenPLen = paddingLength.MAX; chosenPType = paddingType.ZERO; } public STSF(int s){ super(CAN_ESTIMATE_OWN_PERFORMANCE); setSeed(s); chosenPLen = paddingLength.MAX; chosenPType = paddingType.ZERO; } /** * * @param c a base classifier constructed elsewhere and cloned into ensemble */ public void setBaseClassifier(Classifier c){ classifier =c; } /** * ok, two methods are a bit pointless, experimenting with ensemble method * @param b boolean to set vote ensemble */ public void setVoteEnsemble(boolean b){ voteEnsemble=b; } public void setProbabilityEnsemble(boolean b){ voteEnsemble=!b; } /** * Methods to set the padding options for handling uneven length series * @param pt paddingType enum */ public void setPaddingType(paddingType pt){ chosenPType = pt; } /** * Methods to set the padding options for handling uneven length series * @param pl paddingLength enum */ public void setPaddingLen(paddingLength pl){ chosenPLen = pl; } /** * Perhaps make this coherent with setOptions(String[] ar)? * @return String written to results files */ @Override public String getParameters() { int numTrees = trees == null ? 0 : trees.size(); String result=super.getParameters()+",numTrees,"+numTrees+",voting,"+voteEnsemble+",BaseClassifier,"+ classifier.getClass().getSimpleName(); if(trainTimeContract) result+= ",trainContractTimeNanos," +trainContractTimeNanos; else result+=",NoContract"; //Any other contract information here result+=",EstimateOwnPerformance,"+getEstimateOwnPerformance(); if(getEstimateOwnPerformance()) result+=",EstimateMethod,"+ trainEstimateMethod; return result; } public void setNumTrees(int t){ numClassifiers=t; } /** * paper defining STSF * @return TechnicalInformation */ @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "C. Nestor, N. Elham, Q. Jianzhong and K. Lars"); result.setValue(TechnicalInformation.Field.YEAR, "2020"); result.setValue(TechnicalInformation.Field.TITLE, "Fast and Accurate Time Series Classification Through Supervised Interval Search"); result.setValue(TechnicalInformation.Field.JOURNAL, "IEEE International Conference on Data Mining"); return result; } /** * buildClassifier wrapper for TimeSeriesInstances. * * @param data TimeSeriesInstances training data. * @author l-walker */ @Override public void buildClassifier(TimeSeriesInstances data) throws Exception { System.out.println("Building STSF with TimeSeriesInstances"); // Test if classifier can handle data. boolean canHandle = getTSCapabilities().test(data); if (!canHandle) throw new Exception("STSF cannot handle this type of data!"); long startTime=System.nanoTime(); if (!data.isEqualLength()) { if (chosenPLen == paddingLength.MAX) { if (chosenPType == paddingType.ZERO){ resizer = new Resizer(new Resizer.MaxResizeMetric(), new Resizer.FlatPadMetric(0)); } else if(chosenPType == paddingType.MEAN){ resizer = new Resizer(new Resizer.MaxResizeMetric(), new Resizer.MeanPadMetric()); } else if(chosenPType == paddingType.MEAN_NOISE){ resizer = new Resizer(new Resizer.MaxResizeMetric(), new Resizer.MeanNoisePadMetric()); } } else if(chosenPLen == paddingLength.MEDIAN){ if (chosenPType == paddingType.ZERO){ resizer = new Resizer(new Resizer.MedianResizeMetric(), new Resizer.FlatPadMetric(0)); } else if(chosenPType == paddingType.MEAN){ resizer = new Resizer(new Resizer.MedianResizeMetric(), new Resizer.MeanPadMetric()); } else if(chosenPType == paddingType.MEAN_NOISE){ resizer = new Resizer(new Resizer.MedianResizeMetric(), new Resizer.MeanNoisePadMetric()); } } else if(chosenPLen == paddingLength.WEIGHTED_MEDIAN){ if (chosenPType == paddingType.ZERO){ resizer = new Resizer(new Resizer.WeightedMedianResizeMetric(), new Resizer.FlatPadMetric(0)); } else if(chosenPType == paddingType.MEAN){ resizer = new Resizer(new Resizer.WeightedMedianResizeMetric(), new Resizer.MeanPadMetric()); } else if(chosenPType == paddingType.MEAN_NOISE){ resizer = new Resizer(new Resizer.WeightedMedianResizeMetric(), new Resizer.MeanNoisePadMetric()); } } TimeSeriesInstances padded = resizer.fitTransform(data); data = padded; } setTSTrainData(data); // Set attributes numInstances = data.numInstances(); trees = new ArrayList(numClassifiers); intervals = new ArrayList(); testHolders = new ArrayList(); seriesLength = data.getMaxLength(); finalBuildtrainContractTimeNanos=trainContractTimeNanos; //If contracted and estimating own performance, distribute the contract evenly between estimation and the final build if(trainTimeContract && getEstimateOwnPerformance()){ finalBuildtrainContractTimeNanos/=2; printLineDebug(" Setting final contract time to "+finalBuildtrainContractTimeNanos+" nanos"); } TimeSeriesInstances[] representations = new TimeSeriesInstances[3]; representations[0] = data; ArrayList<Integer>[] idxByClass = new ArrayList[data.getClassLabels().length]; for (int i = 0; i < idxByClass.length; i++){ idxByClass[i] = new ArrayList<>(); } for (int i = 0; i < data.numInstances(); i++){ idxByClass[(int)data.get(i).getTargetValue()].add(i); } double average = (double)data.numInstances()/data.getClassLabels().length; int[] instToAdd = new int[numInstances]; for (int i = 0; i < idxByClass.length; i++) { if (idxByClass[i].size() < average) { int n = idxByClass[i].size(); while (n < average) { instToAdd[idxByClass[i].get(rand.nextInt(idxByClass[i].size()))]++; n++; } } } newNumInstances = numInstances + sum(instToAdd); ps = new PowerSpectrum(); representations[1] = ps.transform(representations[0]); di = new Differences(); di.setSubtractFormerValue(true); representations[2] = di.transform(representations[0]); int classifiersBuilt = trees.size(); /** MAIN BUILD LOOP * For each base classifier * generate random intervals * do the transforms * build the classifier * */ while(withinTrainContract(startTime) && (classifiersBuilt < numClassifiers)) { if (classifiersBuilt % 100 == 0) printLineDebug("\t\t\t\t\tBuilding STSF tree " + classifiersBuilt + " time taken = " + (System.nanoTime() - startTime) + " contract =" + finalBuildtrainContractTimeNanos + " nanos"); //If bagging find instances with replacement int[] instInclusions = new int[numInstances]; int[] baggingClassCounts = new int[representations[0].numClasses()]; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; instInclusions[n] += instToAdd[n]; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { baggingClassCounts[(int)representations[0].get(n).getTargetValue()] += instInclusions[n]; } } //1. Select intervals for tree i intervals.add(new ArrayList[3][]); int totalAtts = 0; for (int r = 0; r < representations.length; r++) { intervals.get(classifiersBuilt)[r] = findCandidateDiscriminatoryIntervals(representations[r], instInclusions, baggingClassCounts); for (int a = 0; a < intervals.get(classifiersBuilt)[r].length; a++) { totalAtts += intervals.get(classifiersBuilt)[r][a].size(); } } // Convert representations and data to weka instances. Instances[] newRepresentations = new Instances[3]; newRepresentations[0] = Converter.toArff(representations[0]); newRepresentations[1] = Converter.toArff(representations[1]); newRepresentations[2] = Converter.toArff(representations[2]); Instances newData = new Instances(Converter.toArff(data)); //2. Generate and store attributes ArrayList<Attribute> atts = new ArrayList<>(); for (int j = 0; j < totalAtts; j++) { atts.add(new Attribute("att" + j)); } atts.add(newData.classAttribute()); //create blank instances with the correct class value Instances result = new Instances("Tree", atts, newNumInstances); result.setClassIndex(result.numAttributes() - 1); Instances testHolder = new Instances(result, 0); testHolder.add(new DenseInstance(result.numAttributes())); testHolders.add(testHolder); //For bagging int instIdx = 0; int lastIdx = -1; for (int n = 0; n < newNumInstances; n++) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.add(n, new DenseInstance(result.instance(n - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, newRepresentations[0].instance(instIdx).classValue()); int p = 0; for (int r = 0; r < representations.length; r++) { double[] series = extractTimeSeries(newRepresentations[r].get(instIdx)); for (int a = 0; a < FeatureSet.numFeatures; a++) { for (int j = 0; j < intervals.get(classifiersBuilt)[r][a].size(); j++) { int[] interval = intervals.get(classifiersBuilt)[r][a].get(j); double val = FeatureSet.calcFeatureByIndex(a, interval[0], interval[1], series); in.setValue(p, val); p++; } } } result.add(in); } //3. Create and build tree using all the features. Classifier tree = AbstractClassifier.makeCopy(classifier); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (classifiersBuilt + 1)); tree.buildClassifier(result); trees.add(tree); classifiersBuilt++; } if(classifiersBuilt==0){//Not enough time to build a single classifier throw new Exception((" ERROR in STSF, no trees built, contract time probably too low. Contract time ="+trainContractTimeNanos)); } long endTime=System.nanoTime(); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(endTime-startTime-trainResults.getErrorEstimateTime()); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()); /** Estimate accuracy from Train data * distributions and predictions stored in trainResults */ if(getEstimateOwnPerformance()){ long est1=System.nanoTime(); estimateOwnPerformance(Converter.toArff(data)); long est2=System.nanoTime(); trainResults.setErrorEstimateTime(est2-est1); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()+trainResults.getErrorEstimateTime()); } trainResults.setParas(getParameters()); printLineDebug("*************** Finished STSF Build with "+classifiersBuilt+" Trees built in "+(System.nanoTime()-startTime)/1000000000+" Seconds ***************"); } /** * main buildClassifier * @param data * @throws Exception */ @Override public void buildClassifier(Instances data) throws Exception { super.buildClassifier(data); // can classifier handle the data? getCapabilities().testWithFail(data); long startTime=System.nanoTime(); // Set attributes. numInstances = data.numInstances(); trees = new ArrayList(numClassifiers); intervals = new ArrayList(); testHolders = new ArrayList(); finalBuildtrainContractTimeNanos=trainContractTimeNanos; //If contracted and estimating own performance, distribute the contract evenly between estimation and the final build if(trainTimeContract && getEstimateOwnPerformance()){ finalBuildtrainContractTimeNanos/=2; printLineDebug(" Setting final contract time to "+finalBuildtrainContractTimeNanos+" nanos"); } Instances[] representations = new Instances[3]; representations[0] = new Instances(data); ArrayList<Integer>[] idxByClass = new ArrayList[data.numClasses()]; for (int i = 0; i < idxByClass.length; i++){ idxByClass[i] = new ArrayList<>(); } for (int i = 0; i < data.numInstances(); i++){ idxByClass[(int)data.get(i).classValue()].add(i); } double average = (double)data.numInstances()/data.numClasses(); int[] instToAdd = new int[numInstances]; for (int i = 0; i < idxByClass.length; i++) { if (idxByClass[i].size() < average) { int n = idxByClass[i].size(); while (n < average) { instToAdd[idxByClass[i].get(rand.nextInt(idxByClass[i].size()))]++; n++; } } } newNumInstances = numInstances + sum(instToAdd); ps = new PowerSpectrum(); representations[1] = ps.transform(representations[0]); di = new Differences(); di.setSubtractFormerValue(true); representations[2] = di.transform(representations[0]); int classifiersBuilt = trees.size(); /** MAIN BUILD LOOP * For each base classifier * generate random intervals * do the transforms * build the classifier * */ while(withinTrainContract(startTime) && (classifiersBuilt < numClassifiers)) { if (classifiersBuilt % 100 == 0) printLineDebug("\t\t\t\t\tBuilding STSF tree " + classifiersBuilt + " time taken = " + (System.nanoTime() - startTime) + " contract =" + finalBuildtrainContractTimeNanos + " nanos"); //If bagging find instances with replacement int[] instInclusions = new int[numInstances]; int[] baggingClassCounts = new int[numClasses];; for (int n = 0; n < numInstances; n++) { instInclusions[rand.nextInt(numInstances)]++; instInclusions[n] += instToAdd[n]; } for (int n = 0; n < numInstances; n++) { if (instInclusions[n] > 0) { baggingClassCounts[(int)representations[0].get(n).classValue()] += instInclusions[n]; } } //1. Select intervals for tree i intervals.add(new ArrayList[3][]); int totalAtts = 0; for (int r = 0; r < representations.length; r++) { intervals.get(classifiersBuilt)[r] = findCandidateDiscriminatoryIntervals(representations[r], instInclusions, baggingClassCounts); for (int a = 0; a < intervals.get(classifiersBuilt)[r].length; a++) { totalAtts += intervals.get(classifiersBuilt)[r][a].size(); } } //2. Generate and store attributes ArrayList<Attribute> atts = new ArrayList<>(); for (int j = 0; j < totalAtts; j++) { atts.add(new Attribute("att" + j)); } atts.add(data.classAttribute()); //create blank instances with the correct class value Instances result = new Instances("Tree", atts, newNumInstances); result.setClassIndex(result.numAttributes() - 1); Instances testHolder = new Instances(result, 0); testHolder.add(new DenseInstance(result.numAttributes())); testHolders.add(testHolder); //For bagging int instIdx = 0; int lastIdx = -1; for (int n = 0; n < newNumInstances; n++) { boolean sameInst = false; while (true) { if (instInclusions[instIdx] == 0) { instIdx++; } else { instInclusions[instIdx]--; if (instIdx == lastIdx) { result.add(n, new DenseInstance(result.instance(n - 1))); sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; DenseInstance in = new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes() - 1, representations[0].instance(instIdx).classValue()); int p = 0; for (int r = 0; r < representations.length; r++) { double[] series = extractTimeSeries(representations[r].get(instIdx)); for (int a = 0; a < FeatureSet.numFeatures; a++) { for (int j = 0; j < intervals.get(classifiersBuilt)[r][a].size(); j++) { int[] interval = intervals.get(classifiersBuilt)[r][a].get(j); double val = FeatureSet.calcFeatureByIndex(a, interval[0], interval[1], series); in.setValue(p, val); p++; } } } result.add(in); } //3. Create and build tree using all the features. Classifier tree = AbstractClassifier.makeCopy(classifier); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (classifiersBuilt + 1)); tree.buildClassifier(result); trees.add(tree); classifiersBuilt++; } if(classifiersBuilt==0){//Not enough time to build a single classifier throw new Exception((" ERROR in STSF, no trees built, contract time probably too low. Contract time ="+trainContractTimeNanos)); } long endTime=System.nanoTime(); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(endTime-startTime-trainResults.getErrorEstimateTime()); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()); /** Estimate accuracy from Train data * distributions and predictions stored in trainResults */ if(getEstimateOwnPerformance()){ long est1=System.nanoTime(); estimateOwnPerformance(data); long est2=System.nanoTime(); trainResults.setErrorEstimateTime(est2-est1); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()+trainResults.getErrorEstimateTime()); } trainResults.setParas(getParameters()); printLineDebug("*************** Finished STSF Build with "+classifiersBuilt+" Trees built in "+(System.nanoTime()-startTime)/1000000000+" Seconds ***************"); } private ArrayList<int[]>[] findCandidateDiscriminatoryIntervals(Instances rep, int[] instInclusions, int[] classCounts){ int seriesLength = rep.numAttributes()-1; int splitPoint; if (seriesLength <= 8) splitPoint = seriesLength/2; else splitPoint = rand.nextInt(seriesLength-8)+4; //min 4, max serieslength-4 ColumnNormalizer rn = new ColumnNormalizer(); rn.fit(rep); rn.setNormMethod(ColumnNormalizer.NormType.STD_NORMAL); Instances data = rn.transform(rep); ArrayList<int[]>[] newIntervals = new ArrayList[FeatureSet.numFeatures]; for (int i = 0; i < FeatureSet.numFeatures; i++){ newIntervals[i] = new ArrayList<>(); supervisedIntervalSearch(data, instInclusions, i, newIntervals[i], classCounts, 0, splitPoint); supervisedIntervalSearch(data, instInclusions, i, newIntervals[i], classCounts, splitPoint+1, rep.numAttributes()-2); } return newIntervals; } /** * Method to carry out supervised learning with TimeSeriesInstances * @param rep TimeSeriesInstances * @param instInclusions TimeSeriesInstances to include * @param classCounts Bagging Class counts * @return newIntervals ArrayList<int[]>[] */ private ArrayList<int[]>[] findCandidateDiscriminatoryIntervals(TimeSeriesInstances rep, int[] instInclusions, int[] classCounts){ int seriesLength = rep.getMaxLength(); int splitPoint; if (seriesLength <= 8) splitPoint = seriesLength/2; else splitPoint = rand.nextInt(seriesLength-8)+4; //min 4, max serieslength-4 Instances data = Converter.toArff(rep); ColumnNormalizer rn = new ColumnNormalizer(); rn.fit(data); rn.setNormMethod(ColumnNormalizer.NormType.STD_NORMAL); data = rn.transform(data); ArrayList<int[]>[] newIntervals = new ArrayList[FeatureSet.numFeatures]; for (int i = 0; i < FeatureSet.numFeatures; i++){ newIntervals[i] = new ArrayList<>(); supervisedIntervalSearch(data, instInclusions, i, newIntervals[i], classCounts, 0, splitPoint); supervisedIntervalSearch(data, instInclusions, i, newIntervals[i], classCounts, splitPoint+1, rep.getMaxLength()-1); } return newIntervals; } private void supervisedIntervalSearch(Instances data, int[] instInclusions, int featureIdx, ArrayList<int[]> intervals, int[] classCount, int start, int end){ int seriesLength = end-start; if (seriesLength < 4) return; int halfSeriesLength = seriesLength/2; double[] x1 = new double[newNumInstances]; double[] x2 = new double[newNumInstances]; double[] y = new double[newNumInstances]; int e1 = start + halfSeriesLength; int e2 = start + halfSeriesLength + 1; int instIdx = 0; int lastIdx = -1; int[] instInclusionsCopy = Arrays.copyOf(instInclusions, instInclusions.length); for (int i = 0; i < newNumInstances; i++){ boolean sameInst = false; while (true) { if (instInclusionsCopy[instIdx] == 0) { instIdx++; } else { instInclusionsCopy[instIdx]--; if (instIdx == lastIdx) { x1[i] = x1[i-1]; x2[i] = x2[i-1]; y[i] = y[i-1]; sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; double[] series = data.instance(instIdx).toDoubleArray(); x1[i] = FeatureSet.calcFeatureByIndex(featureIdx, start, e1, series); x2[i] = FeatureSet.calcFeatureByIndex(featureIdx, e2, end, series); y[i] = series[series.length-1]; } double s1 = fisherScore(x1, y, classCount); double s2 = fisherScore(x2, y, classCount); if (s2 < s1){ intervals.add(new int[]{start, e1}); supervisedIntervalSearch(data, instInclusions, featureIdx, intervals, classCount, start, e1); } else{ intervals.add(new int[]{e2, end}); supervisedIntervalSearch(data, instInclusions, featureIdx, intervals, classCount, e2, end); } } /** * Method to search for the intervals. */ private void supervisedIntervalSearch(TimeSeriesInstances data, int[] instInclusions, int featureIdx, ArrayList<int[]> intervals, int[] classCount, int start, int end){ int seriesLength = end-start; if (seriesLength < 4) return; int halfSeriesLength = seriesLength/2; double[] x1 = new double[newNumInstances]; double[] x2 = new double[newNumInstances]; double[] y = new double[newNumInstances]; int e1 = start + halfSeriesLength; int e2 = start + halfSeriesLength + 1; int instIdx = 0; int lastIdx = -1; int[] instInclusionsCopy = Arrays.copyOf(instInclusions, instInclusions.length); for (int i = 0; i < newNumInstances; i++){ boolean sameInst = false; while (true) { if (instInclusionsCopy[instIdx] == 0) { instIdx++; } else { instInclusionsCopy[instIdx]--; if (instIdx == lastIdx) { x1[i] = x1[i-1]; x2[i] = x2[i-1]; y[i] = y[i-1]; sameInst = true; } else { lastIdx = instIdx; } break; } } if (sameInst) continue; double[] series = data.get(instIdx).toValueArray()[0]; x1[i] = FeatureSet.calcFeatureByIndex(featureIdx, start, e1, series); x2[i] = FeatureSet.calcFeatureByIndex(featureIdx, e2, end, series); y[i] = series[series.length-1]; } double s1 = fisherScore(x1, y, classCount); double s2 = fisherScore(x2, y, classCount); if (s2 < s1){ intervals.add(new int[]{start, e1}); supervisedIntervalSearch(data, instInclusions, featureIdx, intervals, classCount, start, e1); } else{ intervals.add(new int[]{e2, end}); supervisedIntervalSearch(data, instInclusions, featureIdx, intervals, classCount, e2, end); } } private double fisherScore(double[] x, double[] y, int[] classCounts){ double a = 0, b = 0; double xMean = 0; for (int n = 0; n < x.length; n++){ xMean += x[n]; } xMean /= x.length; for (int i = 0; i < classCounts.length; i++){ double xyMean = 0; for (int n = 0; n < x.length; n++){ if (i == y[n]) { xyMean += x[n]; } } xyMean /= classCounts[i]; double squareSum = 0; for (int n = 0; n < x.length; n++){ if (i == y[n]) { double temp = x[n] - xyMean; squareSum += temp * temp; } } double xyStdev = classCounts[i]-1 == 0 ? 0 : Math.sqrt(squareSum/(classCounts[i]-1)); a += classCounts[i]*Math.pow(xyMean-xMean, 2); b += classCounts[i]*Math.pow(xyStdev, 2); } return b == 0 ? 0 : a/b; } /** * estimating own performance * Three scenarios * 1. If we bagged the full build (bagging ==true), we estimate using the full build OOB. Assumes the final * model has already been built * If we built on all data (bagging ==false) we estimate either * 2. with a 10xCV if estimator==EstimatorMethod.CV * 3. Build a bagged model simply to get the estimate estimator==EstimatorMethod.OOB * Note that all this needs to come out of any contract time we specify. * @param data * @throws Exception from distributionForInstance */ private void estimateOwnPerformance(Instances data) throws Exception { // Defaults to 10 or numInstances, whichever is smaller. int numFolds=setNumberOfFolds(data); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed*5); cv.setNumFolds(numFolds); STSF tsf=new STSF(); tsf.copyParameters(this); tsf.setDebug(this.debug); if (seedClassifier) tsf.setSeed(seed*100); tsf.setEstimateOwnPerformance(false); if(trainTimeContract)//Need to split the contract time, will give time/(numFolds+2) to each fio tsf.setTrainTimeLimit(finalBuildtrainContractTimeNanos/numFolds); printLineDebug(" Doing CV evaluation estimate performance with "+tsf.getTrainContractTimeNanos()/1000000000+" secs per fold."); long buildTime = trainResults.getBuildTime(); trainResults=cv.evaluate(tsf,data); trainResults.setBuildTime(buildTime); trainResults.setEstimatorName("TSFCV"); trainResults.setErrorEstimateMethod("CV_"+numFolds); } private void copyParameters(STSF other){ this.numClassifiers=other.numClassifiers; } @Override public long getTrainContractTimeNanos(){ return trainContractTimeNanos; } /** * @param ins to classifier * @return array of doubles: probability of each class * @throws Exception */ @Override public double[] distributionForInstance(Instance ins) throws Exception { double[] d=new double[ins.numClasses()]; //Build transformed instance double[][] representations = new double[3][]; representations[0] = ins.toDoubleArray(); representations[1] = ps.transform(ins).toDoubleArray(); representations[2] = di.transform(ins).toDoubleArray(); for(int i=0;i<trees.size();i++){ Instances testHolder = testHolders.get(i); int p = 0; for (int r = 0; r < representations.length; r++){ for (int a = 0; a < FeatureSet.numFeatures; a++){ for (int j = 0; j < intervals.get(i)[r][a].size(); j++){ int[] interval = intervals.get(i)[r][a].get(j); double val = FeatureSet.calcFeatureByIndex(a, interval[0], interval[1], representations[r]); testHolder.instance(0).setValue(p, val); p++; } } } if(voteEnsemble){ int c=(int)trees.get(i).classifyInstance(testHolder.instance(0)); d[c]++; }else{ double[] temp=trees.get(i).distributionForInstance(testHolder.instance(0)); for(int j=0;j<temp.length;j++) d[j]+=temp[j]; } } double sum=0; for(double x:d) sum+=x; if(sum>0) for(int i=0;i<d.length;i++) d[i]=d[i]/sum; return d; } /** * @param ins TimeSeriesInstance * @return array of doubles: probability of each class * @throws Exception */ @Override public double[] distributionForInstance(TimeSeriesInstance ins) throws Exception { // check if unequal length if (seriesLength != ins.getMaxLength()) { // reformat TimeSeriesInstance padded = resizer.transform(ins); ins = padded; } double[] d = new double[getTSTrainData().getClassLabels().length];// length of class variables double[][] representations = new double[3][]; representations[0] = ins.toValueArray()[0]; representations[1] = ps.transform(ins).toValueArray()[0]; representations[2] = di.transform(ins).toValueArray()[0]; for(int i=0;i<trees.size();i++){ Instances testHolder = testHolders.get(i); int p = 0; for (int r = 0; r < representations.length; r++){ for (int a = 0; a < FeatureSet.numFeatures; a++){ for (int j = 0; j < intervals.get(i)[r][a].size(); j++){ int[] interval = intervals.get(i)[r][a].get(j); double val = FeatureSet.calcFeatureByIndex(a, interval[0], interval[1], representations[r]); testHolder.instance(0).setValue(p, val); p++; } } } if(voteEnsemble){ int c=(int)trees.get(i).classifyInstance(testHolder.instance(0)); d[c]++; }else{ double[] temp=trees.get(i).distributionForInstance(testHolder.instance(0)); for(int j=0;j<temp.length;j++) d[j]+=temp[j]; } } double sum=0; for(double x:d) sum+=x; if(sum>0) for(int i=0;i<d.length;i++) d[i]=d[i]/sum; return d; } /** * @param ins Weka Instance * @return double * @throws Exception */ @Override public double classifyInstance(Instance ins) throws Exception { double[] d=distributionForInstance(ins); return findIndexOfMax(d, rand); } /** * @param ins TimeSeriesInstance * @return double * @throws Exception */ @Override public double classifyInstance(TimeSeriesInstance ins) throws Exception { double[] d = distributionForInstance(ins); return findIndexOfMax(d, rand); } @Override//TrainTimeContractable public void setTrainTimeLimit(long amount) { printLineDebug("STSF setting contract to "+amount); if(amount>0) { trainContractTimeNanos = amount; trainTimeContract = true; } else trainTimeContract = false; } @Override//TrainTimeContractable public boolean withinTrainContract(long start){ if(trainContractTimeNanos<=0) return true; //Not contracted return System.nanoTime()-start < finalBuildtrainContractTimeNanos; } // TSCapabilities for TSInstances. @Override public TSCapabilities getTSCapabilities() { TSCapabilities tsCapabilities = new TSCapabilities(); tsCapabilities.enable(TSCapabilities.EQUAL_OR_UNEQUAL_LENGTH) .enable(TSCapabilities.UNIVARIATE) .enable(TSCapabilities.NO_MISSING_VALUES) .enable(TSCapabilities.MIN_LENGTH(2)); return tsCapabilities; } //Nested class to store seven simple summary features used to construct train data private static class FeatureSet{ static int numFeatures = 7; public static double calcFeatureByIndex(int idx, int start, int end, double[] data) { switch (idx){ case 0: return calcMean(start, end, data); case 1: return calcMedian(start, end, data); case 2: return calcStandardDeviation(start, end, data); case 3: return calcSlope(start, end, data); case 4: return calcInterquartileRange(start, end, data); case 5: return calcMin(start, end, data); case 6: return calcMax(start, end, data); default: return Double.NaN; } } public static double calcMean(int start, int end, double[] data){ double sumY = 0; for(int i=start;i<=end;i++) { sumY += data[i]; } int length = end-start+1; return sumY/length; } public static double calcMedian(int start, int end, double[] data){ ArrayList<Double> sortedData = new ArrayList<>(end-start+1); for(int i=start;i<=end;i++){ sortedData.add(data[i]); } return median(sortedData, false); //sorted in function } public static double calcStandardDeviation(int start, int end, double[] data){ double sumY = 0; double sumYY = 0; for(int i=start;i<=end;i++) { sumY += data[i]; sumYY += data[i] * data[i]; } int length = (end-start)+1; return (sumYY-(sumY*sumY)/length)/(length-1); } public static double calcSlope(int start, int end, double[] data){ double sumY = 0; double sumX = 0, sumXX = 0, sumXY = 0; for(int i=start;i<=end;i++) { sumY += data[i]; sumX+=(i-start); sumXX+=(i-start)*(i-start); sumXY+=data[i]*(i-start); } int length = end-start+1; double slope=(sumXY-(sumX*sumY)/length); double denom=sumXX-(sumX*sumX)/length; slope = denom == 0 ? 0 : slope/denom; return slope; } public static double calcInterquartileRange(int start, int end, double[] data){ ArrayList<Double> sortedData = new ArrayList<>(end-start+1); for(int i=start;i<=end;i++){ sortedData.add(data[i]); } Collections.sort(sortedData); int length = end-start+1; ArrayList<Double> left = new ArrayList<>(length / 2 + 1); ArrayList<Double> right = new ArrayList<>(length / 2 + 1); if (length % 2 == 1) { for (int i = 0; i <= length / 2; i++){ left.add(sortedData.get(i)); } } else { for (int i = 0; i < length / 2; i++){ left.add(sortedData.get(i)); } } for (int i = length / 2; i < sortedData.size(); i++){ right.add(sortedData.get(i)); } return median(right, false) - median(left, false); } public static double calcMin(int start, int end, double[] data){ double min = Double.MAX_VALUE; for(int i=start;i<=end;i++){ if (data[i] < min) min = data[i]; } return min; } public static double calcMax(int start, int end, double[] data){ double max = -999999999; for(int i=start;i<=end;i++){ if (data[i] > max) max = data[i]; } return max; } } public static void main(String[] arg) throws Exception{ // Basic correctness tests, including setting paras through String dataLocation="Z:\\ArchiveData\\Univariate_arff\\"; String problem="ItalyPowerDemand"; //Instances train=DatasetLoading.loadDataNullable(dataLocation+problem+"\\"+problem+"_TRAIN"); //Instances test=DatasetLoading.loadDataNullable(dataLocation+problem+"\\"+problem+"_TEST"); int seed = 0; Instances[] trainTest = DatasetLoading.sampleItalyPowerDemand(seed); //Instances train = trainTest[0]; //Instances test = trainTest[1]; TimeSeriesInstances train = Converter.fromArff(trainTest[0]); TimeSeriesInstances test = Converter.fromArff(trainTest[1]); STSF tsf = new STSF(); tsf.setSeed(0); tsf.setPaddingLen(paddingLength.MAX); tsf.setPaddingType(paddingType.ZERO); double a; tsf.buildClassifier(train); System.out.println(tsf.trainResults.getBuildTime()); a=ClassifierTools.accuracy(test, tsf); System.out.println("Test Accuracy ="+a); System.out.println(); } }
45,196
38.233507
198
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/TSBF.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import experiments.data.DatasetLoading; import fileIO.OutFile; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.ParameterSplittable; import tsml.classifiers.interval_based.TSF.FeatureSet; import utilities.ClassifierTools; import utilities.InstanceTools; import weka.classifiers.Classifier; import machine_learning.classifiers.tuned.TunedRandomForest; import weka.classifiers.trees.RandomForest; import weka.core.Attribute; import weka.core.DenseInstance; import weka.core.FastVector; import weka.core.Instance; import weka.core.Instances; import weka.core.TechnicalInformation; import weka.core.TechnicalInformationHandler; /** * * Time Series Bag of Features (TSBF): Baydogan Time series classification with a bag-of-features (TSBF) algorithm. series length =m, num series =n PARAMETERS minimum interval length: wmin=5; bin size for codebook generation in phase 2: binsize=10 VARIABLES number of intervals per subseries numIntervals=(int)((zLevel*seriesLength)/wmin); number of subseries: numSub= (seriesLength/wmin)-numIntervals; 1. Subsequences are sampled and partitioned into intervals for feature extraction. number of subseries numSub= floor(m/wmin)-d each subseries is of random length ls each subseries is split into d segments mean, variance and slope is extracted for each segment For i=1 to number of subsequences select start and end point s1 and s2 for each time series t in T generate intervals on t_s1 and t_s2 generate features (mean, std dev and slope) from intervals add to new features for t nos of features per sub series=3*d+4 nos features per series = numSub*(3*d+4) This forms a new data set that is identical to TSF except for the global features. 2. "Each subsequence feature set is labeled with the class of the time series and each time series forms the bag." I think it works by building a random forest on the labelled transformed subseries and use the class probability estimates from the forest. 3. A classifier generates class probability estimates. 4. Histograms of the class probability estimates are generated (and concatenated) to summarize the subsequence information. 5. Global features are added. 6. A final classifier is then trained on the new representation to assign each time series. * * @author ajb * * PARAMETERS: * zLevel: minimum subsequence length factors * wmin: * ARGUMENTS * */ public class TSBF extends EnhancedAbstractClassifier implements ParameterSplittable,TechnicalInformationHandler{ //Paras //<editor-fold defaultstate="collapsed" desc="results reported in PAMI paper (errors)"> static double[] reportedErrorResults ={0.245, 0.287, 0.009, 0.336, 0.262, 0.004, 0.278, 0.259, 0.263, 0.126, 0.183, 0.234, 0.051, 0.090, 0.209, 0.080, 0.011, 0.488, 0.603, 0.096, 0.257, 0.262, 0.037, 0.269, 0.135, 0.138, 0.130, 0.090, 0.329, 0.175, 0.196, 0.022, 0.075, 0.034, 0.008, 0.020, 0.046, 0.001, 0.164, 0.249, 0.217, 0.004, 0.302, 0.149}; //</editor-fold> //<editor-fold defaultstate="collapsed" desc="problems used in PAMI paper"> static String[] problems={"Adiac", "Beef", "CBF", "ChlorineConcentration", "CinCECGtorso", "Coffee", "CricketX", "CricketY", "CricketZ", "DiatomSizeReduction", "ECGFiveDays", "FaceAll", "FaceFour", "FacesUCR", "FiftyWords", "Fish", "GunPoint", "Haptics", "InlineSkate", "ItalyPowerDemand", "Lightning2", "Lightning7", "Mallat", "MedicalImages", "MoteStrain", "NonInvasiveFatalECGThorax1", "NonInvasiveFatalECGThorax2", "OliveOil", "OSULeaf", "SonyAIBORobotSurface1", "SonyAIBORobotSurface2", "StarLightCurves", "SwedishLeaf", "Symbols", "SyntheticControl", "Trace", "TwoLeadECG", "TwoPatterns", "UWaveGestureLibraryX", "UWaveGestureLibraryY", "UWaveGestureLibraryZ", "Wafer", "WordSynonyms", "Yoga"}; //</editor-fold> public static void recreatePublishedResults(String datasetPath, String resultsPath) throws Exception{ OutFile of=new OutFile(resultsPath+"RecreateTSBF.csv"); System.out.println("problem,published,recreated"); double meanDiff=0; int publishedBetter=0; for(int i=0;i<problems.length;i++){ Instances train = DatasetLoading.loadDataNullable(datasetPath+problems[i]+"/"+problems[i]+"_TRAIN"); Instances test = DatasetLoading.loadDataNullable(datasetPath+problems[i]+"/"+problems[i]+"_TEST"); TSBF tsbf=new TSBF(); tsbf.searchParameters(true); double a=ClassifierTools.singleTrainTestSplitAccuracy(tsbf, train, test); System.out.println(problems[i]+","+ reportedErrorResults[i]+","+(1-a)); of.writeLine(problems[i]+","+ reportedErrorResults[i]+","+(1-a)); meanDiff+= reportedErrorResults[i]-(1-a); if(reportedErrorResults[i]<(1-a)) publishedBetter++; } System.out.println("Mean diff ="+meanDiff/problems.length+" Published better ="+publishedBetter); of.writeLine(",,,,Mean diff ="+meanDiff/problems.length+" Published better ="+publishedBetter); } public TSBF() { super(CANNOT_ESTIMATE_OWN_PERFORMANCE); } @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "M. Baydogan, G. Runger and E. Tuv"); result.setValue(TechnicalInformation.Field.YEAR, "2013"); result.setValue(TechnicalInformation.Field.TITLE, "A Bag-Of-Features Framework to Classify Time Series"); result.setValue(TechnicalInformation.Field.JOURNAL, "IEEE Trans. PAMI"); result.setValue(TechnicalInformation.Field.VOLUME, "35"); result.setValue(TechnicalInformation.Field.NUMBER, "11"); result.setValue(TechnicalInformation.Field.PAGES, "2796-2802"); return result; } int minIntervalLength=5; int numBins=10; //bin size for codebook generation int numReps=1; double oobError; static double[] zLevels={0.1,0.25,0.5,0.75}; //minimum subsequence length factors (z) to be evaluated double z=zLevels[0]; int folds=10; //Variables, dont need to be global, can be local to buildClassifier int seriesLength; //data specific int numIntervals; //nos intervals per sub series=(int)((zLevel*seriesLength)/minIntervalLength); int numSubSeries; //nos subseries = (int)(seriesLength/minIntervalLength)-numIntervals; int minSubLength; // min series length = zlevel*seriesLength int numOfTreeStep=50; //step size for tree building process boolean paramSearch=true; double trainAcc; boolean stepWise=false;// This will add trees incrementally rather than build all at once. int[][] subSeries; int[][][] intervals; RandomForest subseriesRandomForest; RandomForest finalRandForest; Instances first; static double TOLERANCE =0.05; public void searchParameters(boolean b){ paramSearch=b; } public void setZLevel(double zLevel){ z=zLevel;} public void setParametersFromIndex(int x){z=zLevels[x-1];} public String getParas(){ return z+"";} public double getAcc(){ return trainAcc;} @Override public void setParamSearch(boolean b){paramSearch =b;} Instances formatIntervalInstances(Instances data){ //3 stats for whole subseries, start and end point, 3 stats per interval int numFeatures=(3+2+3*numIntervals); //Set up instances size and format. FastVector atts=new FastVector(); String name; for(int j=0;j<numFeatures;j++){ name = "F"+j; atts.addElement(new Attribute(name)); } //Get the class values as a fast vector Attribute target =data.attribute(data.classIndex()); FastVector vals=new FastVector(target.numValues()); for(int j=0;j<target.numValues();j++) vals.addElement(target.value(j)); atts.addElement(new Attribute(data.attribute(data.classIndex()).name(),vals)); //create blank instances with the correct class value Instances result = new Instances("SubsequenceIntervals",atts,data.numInstances()); result.setClassIndex(result.numAttributes()-1); for(int i=0;i<data.numInstances();i++){ double cval=data.instance(i).classValue(); for(int j=0;j<numSubSeries;j++){ DenseInstance in=new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes()-1,cval); result.add(in); } } return result; } Instances formatProbabilityInstances(double[][] probs,Instances data){ int numClasses=data.numClasses(); int numFeatures=(numClasses-1)*numSubSeries; //Set up instances size and format. FastVector atts=new FastVector(); String name; for(int j=0;j<numFeatures;j++){ name = "ProbFeature"+j; atts.addElement(new Attribute(name)); } //Get the class values as a fast vector Attribute target =data.attribute(data.classIndex()); FastVector vals=new FastVector(target.numValues()); for(int j=0;j<target.numValues();j++) vals.addElement(target.value(j)); atts.addElement(new Attribute(data.attribute(data.classIndex()).name(),vals)); //create blank instances with the correct class value Instances result = new Instances("SubsequenceIntervals",atts,data.numInstances()); result.setClassIndex(result.numAttributes()-1); for(int i=0;i<data.numInstances();i++){ double cval=data.instance(i).classValue(); DenseInstance in=new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes()-1,cval); int pos=0; for(int j=0;j<numSubSeries;j++){ for(int k=0;k<numClasses-1;k++) in.setValue(pos++, probs[j+numSubSeries*i][k]); } result.add(in); } return result; } //count indexes i: instance, j = class count, k= bin Instances formatFrequencyBinInstances(int[][][] counts,double[][] classProbs,Instances data){ int numClasses=data.numClasses(); int numFeatures=numBins*(numClasses-1)+numClasses; //Set up instances size and format. FastVector atts=new FastVector(); String name; for(int j=0;j<numFeatures;j++){ name = "FreqBinFeature"+j; atts.addElement(new Attribute(name)); } //Get the class values as a fast vector Attribute target =data.attribute(data.classIndex()); FastVector vals=new FastVector(target.numValues()); for(int j=0;j<target.numValues();j++) vals.addElement(target.value(j)); atts.addElement(new Attribute(data.attribute(data.classIndex()).name(),vals)); //create blank instances with the correct class value Instances result = new Instances("HistogramCounts",atts,data.numInstances()); result.setClassIndex(result.numAttributes()-1); for(int i=0;i<data.numInstances();i++){ double cval=data.instance(i).classValue(); DenseInstance in=new DenseInstance(result.numAttributes()); in.setValue(result.numAttributes()-1,cval); int pos=0; //Set values here for(int j=0;j<numClasses-1;j++){ for(int k=0;k<numBins;k++) in.setValue(pos++,counts[i][j][k]); } // for(int j=0;j<numClasses;j++) in.setValue(pos++,classProbs[i][j]); result.add(in); } return result; } Classifier findIncrementalClassifier(Instances data) throws Exception{ /* iter=1 while(iter<20&&cur_OOBerror<(1-tolerance)*prev_OOBerror){ prev_OOBerror=cur_OOBerror RFsubmid <- foreach(ntree=rep(noftree_step/nofthreads, nofthreads), .combine=combine, .packages='randomForest') %dopar% randomForest(x$trainsub,x$classtr,ntree=ntree) RFsub <- combine(RFsub, RFsubmid) cur_OOBerror=1-sum(predict(RFsub,type='response')==x$classtr)/nrow(x$trainsub) iter=iter+1 } */ int iteration=1; int nofTreeStep=50; double curOOBerror=0; double prevOOBerror=1; //Build first model TunedRandomForest rf= new TunedRandomForest(); rf.setNumTrees(nofTreeStep); rf.buildClassifier(data); curOOBerror=rf.measureOutOfBagError(); while(iteration<20 && curOOBerror< (1-TOLERANCE)*prevOOBerror){ //Add in nofTreeStep models rf.addTrees(nofTreeStep,data); //Find new OOB error. This is probably not updated? double a=rf.measureOutOfBagError(); prevOOBerror=curOOBerror; curOOBerror=a; } return rf; } private void cloneToThis(TSBF other){ numBins=other.numBins; //bin size for codebook generation oobError=other.oobError; z=other.z; //Variables, dont need to be global, can be local to buildClassifier folds=other.folds; seriesLength=other.seriesLength; //data specific numIntervals=other.numIntervals; //nos intervals per sub series=(int)((zLevel*seriesLength)/minIntervalLength); numSubSeries=other.numSubSeries; //nos subseries = (int)(seriesLength/minIntervalLength)-numIntervals; minSubLength=other.minSubLength; // min series length = zlevel*seriesLength numOfTreeStep=other.numOfTreeStep; //step size for tree building process paramSearch=other.paramSearch; trainAcc=other.trainAcc; stepWise=other.stepWise; subSeries=other.subSeries; intervals=other.intervals; subseriesRandomForest=other.subseriesRandomForest; finalRandForest=other.finalRandForest; first=other.first; } @Override public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); long t1=System.nanoTime(); if(numReps>1){ double bestOOB=1; TSBF bestRun=this; int r=0; for(int i=0;i<numReps;i++){ TSBF reps=new TSBF(); if(seedClassifier) reps.setSeed(seed*33); reps.numReps=1; reps.paramSearch=true; reps.buildClassifier(data); System.out.println("REP "+i+" ACC = "+reps.trainAcc); if(bestOOB>(1-reps.trainAcc)){ bestOOB=(1-reps.trainAcc); bestRun=reps; r=i; } reps=null; System.gc(); //Try reduce the memory footprint! } cloneToThis(bestRun); System.out.println("BEST TRAIN ACC="+trainAcc+" REP ="+r); } else{ first=new Instances(data,0); double bestZ=0; double maxAcc=0; RandomForest bestFinalModel=null; RandomForest bestSubseriesModel=null; int[][] bestSubSeries=null; int[][][] bestIntervals=null; seriesLength=data.numAttributes()-1; double [] paras; if(paramSearch) paras=zLevels; else{ paras=new double[1]; paras[0]=z; } for(double zLevel:paras){ // System.out.println(" ZLEVEL ="+zLevel+" paramSearch ="+paramSearch); numIntervals=(int)((zLevel*seriesLength)/minIntervalLength); if(numIntervals==0) //Skip this z setting? numIntervals=1; minSubLength=minIntervalLength*numIntervals;// Same as (int)((zLevel*seriesLength)) but clearer numSubSeries= (int)(seriesLength/minIntervalLength)-numIntervals; //r -d in the paper, very large! if(minSubLength<minIntervalLength) //if minimum subsequence length is smaller than wmin skip this z value continue; //1. Select subsequences and intervals selectSubsequencesAndIntervals(); //2. Build first transform Instances features=formatIntervalInstances(data); buildFirstClassificationProblem(data,features); //3. Generate class probability estimate for each new instance with a random forest through cross validation // CHANGE THIS TO MATCH PAPER ALGORITHM // subseriesRandomForest=findIncrementalClassifier(features); /* int iteration=1; double currentOOBError; double prevOOBError=1; double TOLERANCE=0.05; while(iter<20&&cur_OOBerror<(1-tolerance)*prev_OOBerror){ */ double[][] probs; if(stepWise){ subseriesRandomForest = new TunedRandomForest(); subseriesRandomForest.buildClassifier(features); double currentOOBError=subseriesRandomForest.measureOutOfBagError(); double prevOOBError=1; int iter=1; while(iter<20&&currentOOBError<(1-TOLERANCE)*prevOOBError){ //This implementation is faithful to the original prevOOBError=currentOOBError; ((TunedRandomForest)subseriesRandomForest).addTrees(numOfTreeStep, features); currentOOBError=subseriesRandomForest.measureOutOfBagError(); } probs=((TunedRandomForest)subseriesRandomForest).getOBProbabilities(); } else{ subseriesRandomForest=new RandomForest(); subseriesRandomForest.setNumTrees(500); probs=ClassifierTools.crossValidate(subseriesRandomForest,features,folds); subseriesRandomForest.buildClassifier(features); } //4. Discretise probabilities into equal width bins, form counts for each instance //then concatinate class probabilies to form new set of instances int[][][] counts = new int[data.numInstances()][data.numClasses()-1][numBins]; double[][] classProbs = new double[data.numInstances()][data.numClasses()]; countsFormat(counts,classProbs,probs,data.numClasses(),data.numInstances()); Instances second= formatFrequencyBinInstances(counts,classProbs,data); //5. Train a final classifier (random forest). Paper results generated with rand forest double acc=0; if(stepWise){ finalRandForest = new TunedRandomForest(); finalRandForest.buildClassifier(second); double currentOOBError=finalRandForest.measureOutOfBagError(); double prevOOBError=1; int iter=1; while(iter<20&&currentOOBError<(1-TOLERANCE)*prevOOBError){ //The way he has coded it will add in too many trees! prevOOBError=currentOOBError; ((TunedRandomForest)finalRandForest).addTrees(numOfTreeStep, second); currentOOBError=finalRandForest.measureOutOfBagError(); } acc=1-currentOOBError; } else{ finalRandForest=new RandomForest(); finalRandForest.setNumTrees(500); //6. Form a CV estimate of accuracy to choose z value int folds=10; if(data.numInstances()<folds) folds=data.numInstances(); acc=ClassifierTools.stratifiedCrossValidation(data, finalRandForest,folds,rand.nextInt()); } if(acc>maxAcc){ if(!stepWise) finalRandForest.buildClassifier(second); bestSubseriesModel=subseriesRandomForest; bestFinalModel= finalRandForest; maxAcc=acc; bestZ=zLevel; bestIntervals=intervals; bestSubSeries=subSeries; } } //Reset to the best model // System.out.println("Best acc="+maxAcc+" for level "+bestZ+" has "+finalRandForest.getNumTrees()+" trees"); numIntervals=(int)((bestZ*seriesLength)/minIntervalLength); if(numIntervals==0) numIntervals=1; minSubLength=minIntervalLength*numIntervals;// Same as (int)((zLevel*seriesLength)) but clearer numSubSeries= (int)(seriesLength/minIntervalLength)-numIntervals; //r -d in the paper, very large! intervals=bestIntervals; subSeries=bestSubSeries; subseriesRandomForest=bestSubseriesModel; finalRandForest=bestFinalModel; trainAcc=maxAcc; } } public void countsFormat(int[][][] counts,double[][] classProbs,double[][] probs,int numClasses, int numInstances){ for(int i=0;i<numInstances;i++){ for(int j=0;j<numSubSeries;j++){ for(int k=0;k<numClasses-1;k++){ //Will need to check for special case 1.0 prob int bin; if(probs[i*numSubSeries+j][k]==1) bin=numBins-1; else bin=(int)(numBins*probs[i*numSubSeries+j][k]); counts[i][k][bin]++; } } } //The relative frequencies of the predicted classes over each series are also concatenated in the codebook for(int i=0;i<numInstances;i++){ for(int j=0;j<numSubSeries;j++){ int predicted=0; for(int k=1;k<numClasses;k++){ if(probs[i*numSubSeries+j][predicted]<probs[i*numSubSeries+j][k]) predicted=k; } classProbs[i][predicted]++; } for(int k=0;k<numClasses;k++) classProbs[i][k]/=numSubSeries; } } @Override public double[] distributionForInstance(Instance ins) throws Exception{ //Buid first transform first.add(ins); Instances features=formatIntervalInstances(first); buildFirstClassificationProblem(first,features); //Classify subsequences double[][] probs=new double[features.numInstances()][]; for (int i = 0; i < probs.length; i++) { probs[i]=subseriesRandomForest.distributionForInstance(features.instance(i)); } //Get histograms int[][][] counts = new int[1][ins.numClasses()-1][numBins]; double[][] classProbs = new double[1][ins.numClasses()]; countsFormat(counts,classProbs,probs,ins.numClasses(),1); Instances second= formatFrequencyBinInstances(counts,classProbs,first); //Construct new instance first.remove(0); //Classify that instance return finalRandForest.distributionForInstance(second.get(0)); } private void selectSubsequencesAndIntervals(){ // System.out.println("total series length ="+seriesLength+" min subseries length ="+minIntervalLength+" z value = "+zLevel+" num subs ="+numSubSeries+" num intervals ="+numIntervals); subSeries =new int[numSubSeries][2]; intervals =new int[numSubSeries][numIntervals][2]; //Find series and intervals ran for(int i=0;i<numSubSeries;i++){ //Generate subsequences of min length wmin. These are the same for all series subSeries[i][0]=rand.nextInt(seriesLength-minSubLength); subSeries[i][1]=rand.nextInt(seriesLength-subSeries[i][0]-minSubLength)+subSeries[i][0]+minSubLength; int subSeriesLength=subSeries[i][1]-subSeries[i][0]+1; // System.out.println(" SUBSERIES :"+i+"=["+subSeries[i][0]+","+subSeries[i][1]+"]"+" length ="+subSeriesLength); //Generate the interval length for the current series. // st_sub=floor(runif(0,*lenseries-*minsublength)); // max_intlen=((*lenseries)-st_sub)/(*nofint); // cur_intlen=floor(runif(min_intlen,max_intlen)); int maxIntLength=subSeriesLength/numIntervals; if(maxIntLength<minIntervalLength){ System.out.println("MAX INT LENGTH < minIntervalLength subseries length ="+subSeriesLength+" num intervals ="+numIntervals+" max int length="+maxIntLength); System.exit(0); } // int minIntLength=minIntervalLength; // System.out.println("Max int length="+maxIntLength+" Min int length="+minIntLength); int currentIntLength=minIntervalLength; if(maxIntLength>minIntervalLength) currentIntLength=rand.nextInt(maxIntLength-minIntervalLength+1)+minIntervalLength; // System.out.println(" current length ="+currentIntLength); //Generate intervals. The length of the intervals is randomised, but then that should effect the number?? // Seems to ignore the end bit not divisible by length // What happens if this exceed the length of the subseries? for(int j=0;j<numIntervals;j++){ intervals[i][j][0]=subSeries[i][0]+j*currentIntLength; intervals[i][j][1]=subSeries[i][0]+(j+1)*currentIntLength-1; if(intervals[i][j][1]>subSeries[i][1]){ System.out.println("\t INTERVAL "+j+"["+intervals[i][j][0]+","+intervals[i][j][1]+"] EXCEEDS SUBSERIES "+subSeries[i][0]+","+subSeries[i][1]+"]"); System.out.println("\t\t Max interval length ="+maxIntLength+" min interval length ="+minIntervalLength); } } } } private void buildFirstClassificationProblem(Instances data, Instances features){ int instPos=0; // System.out.println(" Number of subsequences ="+numSubSeries+"number of intervals per subsequence ="+numIntervals+" number of cases ="+data.numInstances()+" new number of cases ="+features.numInstances()); for(int k=0;k<data.numInstances();k++){// Instance ins:data){ double[] series=data.instance(k).toDoubleArray(); // if(k==0) // System.out.println("INSTANCE 0="+data.instance(0)); // System.out.println(" Series length ="+(series.length-1)); for(int i=0;i<numSubSeries;i++){ int pos=0; // if(k==0) // System.out.println(" Setting subseries "+i+" ["+subSeries[i][0]+","+subSeries[i][1]+"]"); //Get whole subseries instance subseries features Instance newIns=features.get(instPos++); FeatureSet f=new FeatureSet(); f.setFeatures(series,subSeries[i][0], subSeries[i][1]); // if(k==0) // System.out.println("New num features ="+newIns.numAttributes()+" Whole subsequence features ="+f); newIns.setValue(pos++,f.mean); newIns.setValue(pos++,f.stDev); newIns.setValue(pos++,f.slope); //Add start and end point newIns.setValue(pos++,subSeries[i][0]); newIns.setValue(pos++,subSeries[i][1]); //Get interval features for(int j=0;j<numIntervals;j++){ // if(k==0) // System.out.println(" Setting interval "+j+" ["+intervals[i][j][0]+","+intervals[i][j][1]+"]"); f.setFeatures(series, intervals[i][j][0],intervals[i][j][1]); newIns.setValue(pos++,f.mean); newIns.setValue(pos++,f.stDev); newIns.setValue(pos++,f.slope); } } } if(InstanceTools.hasMissing(features)){ System.out.println(" MISSING A VALUE"); for(int i=0;i<features.numInstances();i++){ if(features.instance(i).hasMissingValue()){ System.out.println("Instance ="+features.instance(i)+" from original instance "+i/numSubSeries+" ::"+data.instance(i/numSubSeries)); System.out.println("\tSubsequence = ["+subSeries[i%numSubSeries][0]+","+subSeries[i%numSubSeries][1]+"]"); for(int j=0;j<numIntervals;j++){ System.out.println("\t\t interval "+j+" ["+intervals[i%numSubSeries][j][0]+","+intervals[i%numSubSeries][j][1]+"]"); } } } // System.out.println(" new data ="+features); System.exit(0); } } public static void testBinMaker(){ double[][] probs={{0.05,0.83,0.12},{0.25,0.73,0.02},{0.25,0.13,0.62},{0.1,0.1,0.8},{1,0,0},{0.5,0.2,0.3}}; //4. Discretise probabilities into equal width bins, form counts for each instance int numClasses=3; int numBins=10; int numInstances=2; int numSubSeries=3; int[][][] counts = new int[numInstances][numClasses-1][numBins]; for(int i=0;i<numInstances;i++){ for(int j=0;j<numSubSeries;j++){ for(int k=0;k<numClasses-1;k++){ //Will need to check for special case 1.0 prob int bin; if(probs[i*numSubSeries+j][k]==1) bin=numBins-1; else bin=(int)(numBins*probs[i*numSubSeries+j][k]); counts[i][k][bin]++; } } } //The relative frequencies of the predicted classes over each series are also concatenated in the codebook double[][] classProbs = new double[numInstances][numClasses]; for(int i=0;i<numInstances;i++){ for(int j=0;j<numSubSeries;j++){ //Find predicted class int predicted=0; for(int k=1;k<numClasses;k++){ if(probs[i*numSubSeries+j][predicted]<probs[i*numSubSeries+j][k]) predicted=k; } System.out.println(" instance "+i+" subseries "+j+" predicted ="+predicted); classProbs[i][predicted]++; } for(int k=0;k<numClasses;k++) classProbs[i][k]/=numSubSeries; } for(int i=0;i<numInstances;i++){ System.out.println("COUNTS INSTANCE "+i); for(int k=0;k<numClasses-1;k++){ System.out.print(" CLASS = "+k+" :::: "); for(int j=0;j<numBins;j++){ System.out.print(counts[i][k][j]+","); } } System.out.print(" CLASS PROBS ::"); for(int j=0;j<numClasses;j++) System.out.print(classProbs[i][j]+","); System.out.print("\n"); } // System.out.print(probs); // Instances second= formatFrequencyBinInstances(counts,classProbs); } public static void main(String[] args) throws Exception { String dataDir = "C:/users/ajb/dropbox/Code2019/tsml/src/main/java/experiments/data/tsc/"; String datasetName = "Chinatown"; Instances train = DatasetLoading.loadDataNullable(dataDir+datasetName+"/"+datasetName+"_TRAIN"); Instances test = DatasetLoading.loadDataNullable(dataDir+datasetName+"/"+datasetName+"_TEST"); TSBF tsbf = new TSBF(); System.out.println("Example usage of TSBF: this is the code used in the paper"); System.out.println(tsbf.getTechnicalInformation().toString()); System.out.println("Evaluated on "+datasetName+" Building ...."); tsbf.buildClassifier(train); System.out.println("Classifier built: Parameter info ="+tsbf.getParameters()); double a=ClassifierTools.accuracy(test, tsbf); System.out.println("Test acc for "+datasetName+" = "+a); } }
33,392
41.10971
214
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/interval_based/TSF.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.interval_based; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import evaluation.tuning.ParameterSpace; import experiments.data.DatasetLoading; import fileIO.OutFile; import machine_learning.classifiers.ContinuousIntervalTree; import tsml.classifiers.*; import tsml.data_containers.*; import tsml.data_containers.utilities.Converter; import tsml.data_containers.utilities.TimeSeriesSummaryStatistics; import tsml.transformers.Resizer; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.*; import java.io.File; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * <!-- globalinfo-start --> * Implementation of Time Series Forest * This classifier is Tunable, Contractable, Checkpointable and can estimate performance from the train data internally. * * Time Series Forest (TimeSeriesForest) Deng 2013: * buildClassifier * Overview: Input n series length m * for each tree * sample sqrt(m) intervals * find three features on each interval: mean, standard deviation and slope * concatenate to new feature set * build tree on new feature set * classifyInstance * ensemble the trees with majority vote * * This implementation may deviate from the original, as it is using the same * structure as the weka random forest. In the paper the splitting criteria has a * tiny refinement. Ties in entropy gain are split with a further stat called margin * that measures the distance of the split point to the closest data. * So if the split value for feature * f=f_1,...f_n is v the margin is defined as * margin= min{ |f_i-v| } * for simplicity of implementation, and for the fact when we did try it and it made * no difference, we have not used this. Note also, the original R implementation * may do some sampling of cases * * <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * Bibtex * <pre> * article{deng13forest, * author = {H. Deng and G. Runger and E. Tuv and M. Vladimir}, * title = {A time series forest for classification and feature extraction}, * journal = {Information Sciences}, * volume = {239}, * year = {2013} * } * </pre> * <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: * * <pre> -T * set number of trees in the ensemble.</pre> * * <pre> -I * set number of intervals to calculate.</pre> * <!-- options-end --> * * version1.0 author Tony Bagnall * date 7/10/15 Tony Bagnall * update 14/2/19 Tony Bagnall * A few changes made to enable testing refinements. * 1. general baseClassifier rather than a hard coded RandomTree. We tested a few * alternatives, they did not improve things * 2. Added setOptions to allow parameter tuning. Tuning on parameters: #trees, #features * update2 13/9/19: Adjust to allow three methods for estimating test accuracy Tony Bagnall * version2.0 13/03/20 Matthew Middlehurst. contractable, checkpointable and tuneable, * This classifier is tested and deemed stable on 10/3/2020. It is unlikely to change again * results for this classifier on 112 UCR data sets can be found at * www.timeseriesclassification.com/results/ResultsByClassifier/TSF.csv. The first column of results are on the default * train/test split. The others are found through stratified resampling of the combined train/test * individual results on each fold are * timeseriesclassification.com/results/ResultsByClassifier/TSF/Predictions * update 1/7/2020: Tony Bagnall. Sort out correct recording of timing, and tidy up comments. The storage option for * either CV or OOB */ public class TSF extends EnhancedAbstractClassifier implements TechnicalInformationHandler, TrainTimeContractable, Checkpointable, Tuneable, Visualisable, TSCapabilitiesHandler { protected static final long serialVersionUID = 32554L; //Static defaults private final static int DEFAULT_NUM_CLASSIFIERS = 500; //<editor-fold defaultstate="collapsed" desc="results reported in Info Sciences paper (errors)"> static double[] reportedErrorResults = { 0.2659, 0.2302, 0.2333, 0.0256, 0.2537, 0.0391, 0.0357, 0.2897, 0.2, 0.2436, 0.049, 0.08, 0.0557, 0.2325, 0.0227, 0.101, 0.1543, 0.0467, 0.552, 0.6818, 0.0301, 0.1803, 0.2603, 0.0448, 0.2237, 0.119, 0.0987, 0.0865, 0.0667, 0.4339, 0.233, 0.1868, 0.0357, 0.1056, 0.1116, 0.0267, 0.02, 0.1177, 0.0543, 0.2102, 0.2876, 0.2624, 0.0054, 0.3793, 0.1513 }; //<editor-fold defaultstate="collapsed" desc="problems used in Info Sciences paper"> static String[] problems = { "FiftyWords", "Adiac", "Beef", "CBF", "ChlorineConcentration", "CinCECGtorso", "Coffee", "CricketX", "CricketY", "CricketZ", "DiatomSizeReduction", "ECG", "ECGFiveDays", "FaceAll", "FaceFour", "FacesUCR", "Fish", "GunPoint", "Haptics", "InlineSkate", "ItalyPowerDemand", "Lightning2", "Lightning7", "Mallat", "MedicalImages", "MoteStrain", "NonInvasiveFetalECGThorax1", "NonInvasiveFetalECGThorax2", "OliveOil", "OSULeaf", "SonyAIBORobotSurface1", "SonyAIBORobot Surface2", "StarLightCurves", "SwedishLeaf", "Symbols", "Synthetic Control", "Trace", "TwoLeadECG", "TwoPatterns", "UWaveGestureLibraryX", "UWaveGestureLibraryY", "UWaveGestureLibraryZ", "Wafer", "WordsSynonyms", "Yoga" }; /** * Primary parameters potentially tunable */ private int numClassifiers = DEFAULT_NUM_CLASSIFIERS; /** * numIntervalsFinder sets numIntervals in buildClassifier. */ private int numIntervals = 0; private transient Function<Integer, Integer> numIntervalsFinder; /** * Secondary parameter, mainly there to avoid single item intervals, * which have no slope or std dev */ private int minIntervalLength = 3; /** * Ensemble members of base classifier, default to random forest RandomTree */ private ArrayList<Classifier> trees; private Classifier classifier = new ContinuousIntervalTree(); /** * for each classifier [i] interval j starts at intervals[i][j][0] and * ends at intervals[i][j][1] */ private ArrayList<int[][]> intervals; /** * Holding variable for test classification in order to retain the header info */ private Instances testHolder; /** * voteEnsemble determines whether to aggregate classifications or * probabilities when predicting */ private boolean voteEnsemble = true; /** * Resizer to transform data if unequal length */ private Resizer resizer; /** * Flags and data required if Bagging **/ private boolean bagging = false; //Use if we want an OOB estimate private ArrayList<boolean[]> inBag; private int[] oobCounts; private double[][] trainDistributions; /**** Checkpointing variables *****/ private boolean checkpoint = false; private String checkpointPath = null; private long checkpointTime = 0; //Time between checkpoints in nanosecs private long lastCheckpointTime = 0; //Time since last checkpoint in nanos. private long checkpointTimeElapsed = 0; private boolean trainTimeContract = false; transient private long trainContractTimeNanos = 0; transient private long finalBuildtrainContractTimeNanos = 0; private int seriesLength; private String visSavePath; public TSF() { //TSF Has the capability to form train estimates super(CAN_ESTIMATE_OWN_PERFORMANCE); } public TSF(int s) { super(CAN_ESTIMATE_OWN_PERFORMANCE); setSeed(s); } /** * @param c a base classifier constructed elsewhere and cloned into ensemble */ public void setBaseClassifier(Classifier c) { classifier = c; } public void setBagging(boolean b) { bagging = b; } /** * ok, two methods are a bit pointless, experimenting with ensemble method * * @param b boolean to set vote ensemble */ public void setVoteEnsemble(boolean b) { voteEnsemble = b; } public void setProbabilityEnsemble(boolean b) { voteEnsemble = !b; } //</editor-fold> /** * Perhaps make this coherent with setOptions(String[] ar)? * * @return String written to results files */ @Override public String getParameters() { String result = super.getParameters() + ",numTrees," + trees.size() + ",numIntervals," + numIntervals + ",voting," + voteEnsemble + ",BaseClassifier," + classifier.getClass().getSimpleName() + ",Bagging," + bagging; if (trainTimeContract) result += ",trainContractTimeNanos," + trainContractTimeNanos; else result += ",NoContract"; //Any other contract information here result += ",EstimateOwnPerformance," + getEstimateOwnPerformance(); if (getEstimateOwnPerformance()) result += ",EstimateMethod," + trainEstimateMethod; return result; } //</editor-fold> public void setNumTrees(int t) { numClassifiers = t; } /** * paper defining TSF * * @return TechnicalInformation */ @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "H. Deng, G. Runger, E. Tuv and M. Vladimir"); result.setValue(TechnicalInformation.Field.YEAR, "2013"); result.setValue(TechnicalInformation.Field.TITLE, "A time series forest for classification and feature extraction"); result.setValue(TechnicalInformation.Field.JOURNAL, "Information Sciences"); result.setValue(TechnicalInformation.Field.VOLUME, "239"); result.setValue(TechnicalInformation.Field.PAGES, "142-153"); return result; } /** * buildClassifier wrapper for TimeSeriesInstances. * * @param data TimeSeriesInstances train data. * @author c-eg */ @Override public void buildClassifier(TimeSeriesInstances data) throws Exception { // can classifier handle the data? boolean canHandle = getTSCapabilities().test(data); if (!canHandle) throw new Exception("TSF cannot handle this type of data"); if (!data.isEqualLength()) { // pad with 0s resizer = new Resizer(new Resizer.MaxResizeMetric(), new Resizer.FlatPadMetric(0)); TimeSeriesInstances padded = resizer.fitTransform(data); data = padded; } // set the classifier data setTSTrainData(data); long startTime = System.nanoTime(); File file = new File(checkpointPath + "TSF" + seed + ".ser"); // Set up checkpointing (saving to file) / if checkpoint and serialised file exist, load file if (checkpoint && file.exists()) { // path checkpoint files will be saved to printLineDebug("Loading from checkpoint file"); loadFromFile(checkpointPath + "TSF" + seed + ".ser"); } // otherwise initialise variables else { numClasses = data.getClassLabels().length; seriesLength = data.getMaxLength(); if (numIntervalsFinder == null) { numIntervals = (int) Math.sqrt(seriesLength); } else { numIntervals = numIntervalsFinder.apply(seriesLength); } printDebug(String.format("Building TSF: number of intervals = %d number of trees = %d\n", numIntervals, numClassifiers)); trees = new ArrayList<>(numClassifiers); // Set up for train estimates if (getEstimateOwnPerformance()) { trainDistributions = new double[data.numInstances()][data.numClasses()]; } // Set up for bagging if (bagging) { inBag = new ArrayList<>(); oobCounts = new int[data.numInstances()]; printLineDebug("TSF is using Bagging"); } intervals = new ArrayList<>(); lastCheckpointTime = startTime; } finalBuildtrainContractTimeNanos = trainContractTimeNanos; // if Contracted and estimating own performance, // distribute the contract evenly between estimation and final build if (trainTimeContract && !bagging && getEstimateOwnPerformance()) { finalBuildtrainContractTimeNanos /= 2; printLineDebug(String.format(" Setting final contract time to %s nanos", finalBuildtrainContractTimeNanos)); } // create 2d double array to store mean, standard deviation and slope of each interval double[][] transformedData = new double[data.numInstances()][numIntervals * 3]; int classifiersBuilt = trees.size(); /** MAIN BUILD LOOP * For each base classifier * generate random intervals * do the transforms * build the classifier * */ while (withinTrainContract(startTime) && (classifiersBuilt < numClassifiers)) { if (classifiersBuilt % 100 == 0) printLineDebug("\t\t\t\t\tBuilding TSF tree " + classifiersBuilt + " time taken = " + (System.nanoTime() - startTime) + " contract =" + finalBuildtrainContractTimeNanos + " nanos"); /* * 1. Select random intervals for current tree */ int[][] interval = new int[numIntervals][2]; // Start and end if (data.numInstances() < minIntervalLength) minIntervalLength = data.numInstances(); for (int i = 0; i < numIntervals; i++) { if (data.getMaxLength() - minIntervalLength > 0) interval[i][0] = rand.nextInt(data.getMaxLength() - minIntervalLength); // Start point int length = rand.nextInt(data.getMaxLength() - interval[i][0]); // Min length 3 if (length < minIntervalLength) length = minIntervalLength; interval[i][1] = interval[i][0] + length; } /* * 2. Generate and store attributes */ for (int i = 0; i < numIntervals; i++) { // create slice of all series from intervals double[][][] slice = data.getVSliceArray(interval[i][0], interval[i][1]); for (int j = 0; j < data.numInstances(); j++) { // get sliced series double[] slicedSeries = slice[j][0]; // get stats about data double mean = TimeSeriesSummaryStatistics.mean(slicedSeries); double variance = TimeSeriesSummaryStatistics.variance(slicedSeries, mean); double std = Math.sqrt(variance); double sum = TimeSeriesSummaryStatistics.sum(slicedSeries); double sumSq = TimeSeriesSummaryStatistics.sumSq(slicedSeries); double slope = TimeSeriesSummaryStatistics.slope(slicedSeries, sum, sumSq, std); // set mean, standard deviation and slope transformedData[j][i * 3] = mean; transformedData[j][i * 3 + 1] = std; transformedData[j][i * 3 + 2] = slope; } } /* * 3. Create and build tree using all the features */ Classifier tree = AbstractClassifier.makeCopy(classifier); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (classifiersBuilt + 1)); if (bagging) { // convert data to Instances double[][][] tempSeries = new double[data.numInstances()][1][numIntervals * 3]; int i = 0; for (double[] t : transformedData) { tempSeries[i++][0] = t; } TimeSeriesInstances temp = new TimeSeriesInstances(tempSeries, data.getClassIndexes(), data.getClassLabels()); Instances converted = Converter.toArff(temp); long t1 = System.nanoTime(); boolean[] bag = new boolean[converted.numInstances()]; Instances bagData = converted.resampleWithWeights(rand, bag); tree.buildClassifier(bagData); inBag.add(bag); if (getEstimateOwnPerformance()) { for (int j = 0; j < converted.numInstances(); j++) { if (bag[j]) continue; double[] newProbs = tree.distributionForInstance(converted.instance(j)); oobCounts[j]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[j][k] += newProbs[k]; } } long t2 = System.nanoTime(); if (getEstimateOwnPerformance()) trainResults.setErrorEstimateTime(t2 - t1 + trainResults.getErrorEstimateTime()); } else { double[][][] tempSeries = new double[data.numInstances()][1][numIntervals * 3]; int i = 0; for (double[] t : transformedData) { tempSeries[i++][0] = t; } TimeSeriesInstances temp = new TimeSeriesInstances(tempSeries, data.getClassIndexes(), data.getClassLabels()); tree.buildClassifier(Converter.toArff(temp)); } intervals.add(interval); trees.add(tree); classifiersBuilt++; if (checkpoint) { // Timed checkpointing if (checkpointTime > 0) { if (System.nanoTime() - lastCheckpointTime > checkpointTime) { saveToFile(checkpointPath); lastCheckpointTime = System.nanoTime(); } } // Default checkpoint every 100 trees else if (classifiersBuilt % 100 == 0 && classifiersBuilt > 0) saveToFile(checkpointPath); } } // Not enough time to build a single classifier if (classifiersBuilt == 0) throw new Exception((" ERROR in TSF, no trees built, contract time probably too low. Contract time =" + trainContractTimeNanos)); if (checkpoint) saveToFile(checkpointPath); long endTime = System.nanoTime(); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(endTime - startTime - trainResults.getErrorEstimateTime()); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()); /* * Estimate accuracy from Train data, distributions and predictions stored in trainResults */ if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(Converter.toArff(data)); long est2 = System.nanoTime(); if (bagging) trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); else trainResults.setErrorEstimateTime(est2 - est1); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); } trainResults.setParas(getParameters()); printLineDebug("*************** Finished TSF Build with " + classifiersBuilt + " Trees built in " + (System.nanoTime() - startTime) / 1000000000 + " Seconds ***************"); } /** * main buildClassifier * * @param data * @throws Exception */ @Override public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); long startTime = System.nanoTime(); File file = new File(checkpointPath + "TSF" + seed + ".ser"); //Set up Checkpointing (saving to file)/ if checkpointing and serialised files exist load said file if (checkpoint && file.exists()) { //path checkpoint files will be saved to printLineDebug("Loading from checkpoint file"); loadFromFile(checkpointPath + "TSF" + seed + ".ser"); } else {//else initialise variables numClasses = data.numClasses(); seriesLength = data.numAttributes() - 1; if (numIntervalsFinder == null) { numIntervals = (int) Math.sqrt(seriesLength); } else { numIntervals = numIntervalsFinder.apply(data.numAttributes() - 1); } printDebug("Building TSF: number of intervals = " + numIntervals + " number of trees =" + numClassifiers + "\n"); trees = new ArrayList(numClassifiers); // Set up for train estimates if (getEstimateOwnPerformance()) { trainDistributions = new double[data.numInstances()][data.numClasses()]; } //Set up for bagging if (bagging) { inBag = new ArrayList(); oobCounts = new int[data.numInstances()]; printLineDebug("TSF is using Bagging"); } intervals = new ArrayList(); lastCheckpointTime = startTime; } finalBuildtrainContractTimeNanos = trainContractTimeNanos; //If contracted and estimating own performance, distribute the contract evenly between estimation and the final build if (trainTimeContract && !bagging && getEstimateOwnPerformance()) { finalBuildtrainContractTimeNanos /= 2; printLineDebug(" Setting final contract time to " + finalBuildtrainContractTimeNanos + " nanos"); } ArrayList<Attribute> atts = new ArrayList<>(); String name; for (int j = 0; j < numIntervals * 3; j++) { name = "F" + j; atts.add(new Attribute(name)); } //Get the class values as an array list Attribute target = data.attribute(data.classIndex()); ArrayList<String> vals = new ArrayList<>(target.numValues()); for (int j = 0; j < target.numValues(); j++) vals.add(target.value(j)); atts.add(new Attribute(data.attribute(data.classIndex()).name(), vals)); //create blank instances with the correct class value Instances transformedData = new Instances("Tree", atts, data.numInstances()); transformedData.setClassIndex(transformedData.numAttributes() - 1); for (int i = 0; i < data.numInstances(); i++) { DenseInstance in = new DenseInstance(transformedData.numAttributes()); in.setValue(transformedData.numAttributes() - 1, data.instance(i).classValue()); transformedData.add(in); } testHolder = new Instances(transformedData, 0); DenseInstance in = new DenseInstance(transformedData.numAttributes()); testHolder.add(in); int classifiersBuilt = trees.size(); /** MAIN BUILD LOOP * For each base classifier * generate random intervals * do the transforms * build the classifier * */ while (withinTrainContract(startTime) && (classifiersBuilt < numClassifiers)) { if (classifiersBuilt % 100 == 0) printLineDebug("\t\t\t\t\tBuilding TSF tree " + classifiersBuilt + " time taken = " + (System.nanoTime() - startTime) + " contract =" + finalBuildtrainContractTimeNanos + " nanos"); //1. Select random intervals for tree i int[][] interval = new int[numIntervals][2]; //Start and end if (data.numAttributes() - 1 < minIntervalLength) minIntervalLength = data.numAttributes() - 1; for (int j = 0; j < numIntervals; j++) { if (data.numAttributes() - 1 - minIntervalLength > 0) interval[j][0] = rand.nextInt(data.numAttributes() - 1 - minIntervalLength); //Start point int length = rand.nextInt(data.numAttributes() - 1 - interval[j][0]);//Min length 3 if (length < minIntervalLength) length = minIntervalLength; interval[j][1] = interval[j][0] + length; } //2. Generate and store attributes for (int j = 0; j < numIntervals; j++) { for (int k = 0; k < data.numInstances(); k++) { //extract the interval, work out the features double[] series = data.instance(k).toDoubleArray(); FeatureSet f = new FeatureSet(); f.setFeatures(series, interval[j][0], interval[j][1]); transformedData.instance(k).setValue(j * 3, f.mean); transformedData.instance(k).setValue(j * 3 + 1, f.stDev); transformedData.instance(k).setValue(j * 3 + 2, f.slope); } } //3. Create and build tree using all the features. Classifier tree = AbstractClassifier.makeCopy(classifier); if (seedClassifier && tree instanceof Randomizable) ((Randomizable) tree).setSeed(seed * (classifiersBuilt + 1)); if (bagging) { long t1 = System.nanoTime(); boolean[] bag = new boolean[transformedData.numInstances()]; Instances bagData = transformedData.resampleWithWeights(rand, bag); tree.buildClassifier(bagData); inBag.add(bag); if (getEstimateOwnPerformance()) { for (int j = 0; j < transformedData.numInstances(); j++) { if (bag[j]) continue; double[] newProbs = tree.distributionForInstance(transformedData.instance(j)); oobCounts[j]++; for (int k = 0; k < newProbs.length; k++) trainDistributions[j][k] += newProbs[k]; } } long t2 = System.nanoTime(); if (getEstimateOwnPerformance()) trainResults.setErrorEstimateTime(t2 - t1 + trainResults.getErrorEstimateTime()); } else tree.buildClassifier(transformedData); intervals.add(interval); trees.add(tree); classifiersBuilt++; if (checkpoint) { if (checkpointTime > 0) //Timed checkpointing { if (System.nanoTime() - lastCheckpointTime > checkpointTime) { saveToFile(checkpointPath); lastCheckpointTime = System.nanoTime(); } } else { //Default checkpoint every 100 trees if (classifiersBuilt % 100 == 0 && classifiersBuilt > 0) saveToFile(checkpointPath); } } } if (classifiersBuilt == 0) {//Not enough time to build a single classifier throw new Exception((" ERROR in TSF, no trees built, contract time probably too low. Contract time =" + trainContractTimeNanos)); } if (checkpoint) { saveToFile(checkpointPath); } long endTime = System.nanoTime(); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(endTime - startTime - trainResults.getErrorEstimateTime()); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime()); /** Estimate accuracy from Train data * distributions and predictions stored in trainResults */ if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(data); long est2 = System.nanoTime(); if (bagging) trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); else trainResults.setErrorEstimateTime(est2 - est1); trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); } trainResults.setParas(getParameters()); printLineDebug("*************** Finished TSF Build with " + classifiersBuilt + " Trees built in " + (System.nanoTime() - startTime) / 1000000000 + " Seconds ***************"); } /** * estimating own performance * Three scenarios * 1. If we bagged the full build (bagging ==true), we estimate using the full build OOB. Assumes the final * model has already been built * If we built on all data (bagging ==false) we estimate either * 2. with a 10xCV if estimator==EstimatorMethod.CV * 3. Build a bagged model simply to get the estimate estimator==EstimatorMethod.OOB * Note that all this needs to come out of any contract time we specify. * * @param data * @throws Exception from distributionForInstance */ private void estimateOwnPerformance(Instances data) throws Exception { if (bagging) { // Use bag data, counts normalised to probabilities printLineDebug("Finding the OOB estimates"); double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()];//Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); for (int k = 0; k < trainDistributions[j].length; k++) if (oobCounts[j] > 0) trainDistributions[j][k] /= oobCounts[j]; preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.instance(j).classValue(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setEstimatorName("TSFBagging"); trainResults.setDatasetName(data.relationName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.finaliseResults(actuals); trainResults.setErrorEstimateMethod("OOB"); } //Either do a CV, or bag and get the estimates else if (trainEstimateMethod == TrainEstimateMethod.CV || trainEstimateMethod == TrainEstimateMethod.NONE) { // Defaults to 10 or numInstances, whichever is smaller. int numFolds = setNumberOfFolds(data); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5); cv.setNumFolds(numFolds); TSF tsf = new TSF(); tsf.copyParameters(this); tsf.setDebug(this.debug); if (seedClassifier) tsf.setSeed(seed * 100); tsf.setEstimateOwnPerformance(false); if (trainTimeContract)//Need to split the contract time, will give time/(numFolds+2) to each fio tsf.setTrainTimeLimit(finalBuildtrainContractTimeNanos / numFolds); printLineDebug(" Doing CV evaluation estimate performance with " + tsf.getTrainContractTimeNanos() / 1000000000 + " secs per fold."); long buildTime = trainResults.getBuildTime(); trainResults = cv.evaluate(tsf, data); trainResults.setBuildTime(buildTime); trainResults.setEstimatorName("TSFCV"); trainResults.setErrorEstimateMethod("CV_" + numFolds); } else if (trainEstimateMethod == TrainEstimateMethod.OOB) { // Build a single new TSF using Bagging, and extract the estimate from this TSF tsf = new TSF(); tsf.copyParameters(this); tsf.setDebug(this.debug); tsf.setSeed(seed); tsf.setEstimateOwnPerformance(true); tsf.bagging = true; tsf.setTrainTimeLimit(finalBuildtrainContractTimeNanos); printLineDebug(" Doing Bagging estimate performance with " + tsf.getTrainContractTimeNanos() / 1000000000 + " secs per fold "); tsf.buildClassifier(data); long buildTime = trainResults.getBuildTime(); trainResults = tsf.trainResults; trainResults.setBuildTime(buildTime); trainResults.setEstimatorName("TSFOOB"); trainResults.setErrorEstimateMethod("OOB"); } } private void copyParameters(TSF other) { this.numClassifiers = other.numClassifiers; this.numIntervalsFinder = other.numIntervalsFinder; } @Override public long getTrainContractTimeNanos() { return trainContractTimeNanos; } /** * @param ins TimeSeriesInstance to classifier * @return array of doubles: probability of each class * @author c-eg */ @Override public double[] distributionForInstance(TimeSeriesInstance ins) throws Exception { // check if unequal length if (seriesLength > ins.getMaxLength()) { // pad with 0s TimeSeriesInstance padded = resizer.transform(ins); ins = padded; } double[] classProbability = new double[getTSTrainData().getClassLabels().length]; // length of class variables double[] statsData = new double[numIntervals * 3]; for (int i = 0; i < trees.size(); i++) { for (int j = 0; j < numIntervals; j++) { // get sliced series double[] tsAtInterval = ins.get(0).getVSliceArray(intervals.get(i)[j][0], intervals.get(i)[j][1]); // get stats about data double mean = TimeSeriesSummaryStatistics.mean(tsAtInterval); double variance = TimeSeriesSummaryStatistics.variance(tsAtInterval, mean); double std = Math.sqrt(variance); double sum = TimeSeriesSummaryStatistics.sum(tsAtInterval); double sumSq = TimeSeriesSummaryStatistics.sumSq(tsAtInterval); double slope = TimeSeriesSummaryStatistics.slope(tsAtInterval, sum, sumSq, std); // set mean, standard deviation and slope statsData[j * 3] = mean; statsData[j * 3 + 1] = std; statsData[j * 3 + 2] = slope; } // convert statistics data from series into a TimeSeriesInstance double[][] statsSeries = new double[][]{statsData}; TimeSeriesInstance ts = new TimeSeriesInstance(statsSeries, ins.getLabelIndex()); Instance tsConverted = Converter.toArff(ts, getTSTrainData().getClassLabels()); // convert to Instance for Weka if (voteEnsemble) { int c = (int) trees.get(i).classifyInstance(tsConverted); classProbability[c]++; } else { double[] temp = trees.get(i).distributionForInstance(tsConverted); for (int j = 0; j < temp.length; j++) classProbability[j] += temp[j]; } } double sum = 0; for (double x : classProbability) sum += x; if (sum > 0) for (int i = 0; i < classProbability.length; i++) classProbability[i] = classProbability[i] / sum; return classProbability; } /** * @param ins Weka Instance to classifier * @return array of doubles: probability of each class * @throws Exception */ @Override public double[] distributionForInstance(Instance ins) throws Exception { double[] d = new double[numClasses]; //Build transformed instance double[] series = ins.toDoubleArray(); for (int i = 0; i < trees.size(); i++) { for (int j = 0; j < numIntervals; j++) { //extract all intervals FeatureSet f = new FeatureSet(); f.setFeatures(series, intervals.get(i)[j][0], intervals.get(i)[j][1]); testHolder.instance(0).setValue(j * 3, f.mean); testHolder.instance(0).setValue(j * 3 + 1, f.stDev); testHolder.instance(0).setValue(j * 3 + 2, f.slope); } if (voteEnsemble) { int c = (int) trees.get(i).classifyInstance(testHolder.instance(0)); d[c]++; } else { double[] temp = trees.get(i).distributionForInstance(testHolder.instance(0)); for (int j = 0; j < temp.length; j++) d[j] += temp[j]; } } double sum = 0; for (double x : d) sum += x; if (sum > 0) for (int i = 0; i < d.length; i++) d[i] = d[i] / sum; return d; } /** * @param ins TimeSeriesInstance * @return double * @author c-eg */ @Override public double classifyInstance(TimeSeriesInstance ins) throws Exception { double[] d = distributionForInstance(ins); return findIndexOfMax(d, rand); } /** * @param ins Weka Instance * @return * @throws Exception */ @Override public double classifyInstance(Instance ins) throws Exception { double[] d = distributionForInstance(ins); return findIndexOfMax(d, rand); } /** * Parses a given list of options to set the parameters of the classifier. * We use this for the tuning mechanism, setting parameters through setOptions * <!-- options-start --> * Valid options are: <p/> * <pre> -T * Number of trees.</pre> * * <pre> -I * Number of intervals to fit.</pre> * * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ @Override public void setOptions(String[] options) throws Exception { /* System.out.print("TSF para sets "); for (String str:options) System.out.print(","+str); System.out.print("\n"); */ String numTreesString = Utils.getOption('T', options); if (numTreesString.length() != 0) { numClassifiers = Integer.parseInt(numTreesString); } String numFeaturesString = Utils.getOption('I', options); //Options here are a double between 0 and 1 (proportion of features), a text //string sqrt or log, or an integer number if (numFeaturesString.length() != 0) { try { if (numFeaturesString.equals("sqrt")) numIntervalsFinder = (numAtts) -> (int) (Math.sqrt(numAtts)); else if (numFeaturesString.equals("log")) numIntervalsFinder = (numAtts) -> (int) Utils.log2(numAtts) + 1; else { double d = Double.parseDouble(numFeaturesString); if (d <= 0) throw new Exception("proportion of features of of range 0 to 1"); if (d <= 1) numIntervalsFinder = (numAtts) -> (int) (d * numAtts); else numIntervalsFinder = (numAtts) -> (int) (d); } } catch (Exception e) { System.err.print(" Error: invalid parameter passed to TSF setOptions for number of parameters. Setting to default"); System.err.print("Value" + numIntervalsFinder + " Permissable values: sqrt, log, or a double range 0...1"); numIntervalsFinder = (numAtts) -> (int) (Math.sqrt(numAtts)); } } else numIntervalsFinder = (numAtts) -> (int) (Math.sqrt(numAtts)); } @Override //Checkpointable public boolean setCheckpointPath(String path) { boolean validPath = Checkpointable.super.createDirectories(path); printLineDebug(" Writing checkpoint to " + path); if (validPath) { checkpointPath = path; checkpoint = true; } return validPath; } @Override //Checkpointable public boolean setCheckpointTimeHours(int t) { checkpointTime = TimeUnit.NANOSECONDS.convert(t, TimeUnit.HOURS); checkpoint = true; return true; } @Override //Checkpointable public void copyFromSerObject(Object obj) throws Exception { if (!(obj instanceof TSF)) throw new Exception("The SER file is not an instance of TSF"); TSF saved = ((TSF) obj); try { printLineDebug("Loading TSF" + seed + ".ser"); numClassifiers = saved.numClassifiers; numIntervals = saved.numIntervals; //numIntervalsFinder = saved.numIntervalsFinder; minIntervalLength = saved.minIntervalLength; trees = saved.trees; classifier = saved.classifier; intervals = saved.intervals; //testHolder = saved.testHolder; voteEnsemble = saved.voteEnsemble; bagging = saved.bagging; inBag = saved.inBag; oobCounts = saved.oobCounts; trainDistributions = saved.trainDistributions; trainEstimateMethod = saved.trainEstimateMethod; checkpoint = saved.checkpoint; checkpointPath = saved.checkpointPath; checkpointTime = saved.checkpointTime; checkpointTimeElapsed = saved.checkpointTime; //intentional, time spent building previously unchanged // trainTimeContract = saved.trainTimeContract; // trainContractTimeNanos = saved.trainContractTimeNanos; seriesLength = saved.seriesLength; numClasses = saved.numClasses; rand = saved.rand; seedClassifier = saved.seedClassifier; seed = saved.seed; trainResults = saved.trainResults; estimateOwnPerformance = saved.estimateOwnPerformance; } catch (Exception ex) { System.out.println("Unable to assign variables when loading serialised file"); } } @Override //TrainTimeContractable public void setTrainTimeLimit(long amount) { printLineDebug(" TSF setting contract to " + amount); if (amount > 0) { trainContractTimeNanos = amount; trainTimeContract = true; } else trainTimeContract = false; } @Override //TrainTimeContractable public boolean withinTrainContract(long start) { if (trainContractTimeNanos <= 0) return true; //Not contracted return System.nanoTime() - start < finalBuildtrainContractTimeNanos; } @Override // Checkpointable public void saveToFile(String filename) throws Exception { Checkpointable.super.saveToFile(checkpointPath + "TSF" + seed + "temp.ser"); File file = new File(checkpointPath + "TSF" + seed + "temp.ser"); File file2 = new File(checkpointPath + "TSF" + seed + ".ser"); file2.delete(); file.renameTo(file2); } // Capabilities of classifier when using TSInstances @Override public TSCapabilities getTSCapabilities() { TSCapabilities tsCapabilities = new TSCapabilities(); tsCapabilities.enable(TSCapabilities.EQUAL_OR_UNEQUAL_LENGTH) .enable(TSCapabilities.UNIVARIATE) .enable(TSCapabilities.NO_MISSING_VALUES) .enable(TSCapabilities.MIN_LENGTH(2)); return tsCapabilities; } /** * TUNED TSF Classifiers. Method for interface Tuneable * Valid options are: <p/> * <pre> -T Number of trees.</pre> * <pre> -I Number of intervals to fit.</pre> * * @return ParameterSpace object */ @Override public ParameterSpace getDefaultParameterSearchSpace() { ParameterSpace ps = new ParameterSpace(); String[] numTrees = {"100", "200", "300", "400", "500", "600", "700", "800", "900", "1000"}; ps.addParameter("T", numTrees); String[] numInterv = {"sqrt", "log", "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", "0.7", "0.8", "0.9"}; ps.addParameter("I", numInterv); return ps; } @Override public boolean setVisualisationSavePath(String path) { boolean validPath = Visualisable.super.createVisualisationDirectories(path); if (validPath) { visSavePath = path; } return validPath; } @Override public boolean createVisualisation() throws Exception { if (!(classifier instanceof ContinuousIntervalTree)) { System.err.println("TSF temporal importance curve only available for time series tree."); return false; } if (visSavePath == null){ System.err.println("TSF visualisation save path not set."); return false; } double[][] curves = new double[3][seriesLength]; for (int i = 0; i < trees.size(); i++){ ContinuousIntervalTree tree = (ContinuousIntervalTree)trees.get(i); ArrayList<Double>[] sg = tree.getTreeSplitsGain(); for (int n = 0; n < sg[0].size(); n++){ double split = sg[0].get(n); double gain = sg[1].get(n); int interval = (int)(split/3); int att = (int)(split%3); for (int j = intervals.get(i)[interval][0]; j <= intervals.get(i)[interval][1]; j++){ curves[att][j] += gain; } } } OutFile of = new OutFile(visSavePath + "/vis" + seed + ".txt"); String[] atts = new String[]{"mean","stdev","slope"}; for (int i = 0 ; i < 3; i++){ of.writeLine(atts[i]); of.writeLine("0"); of.writeLine(Arrays.toString(curves[i])); } of.closeFile(); Process p = Runtime.getRuntime().exec("py src/main/python/visualisation/visCIF.py \"" + visSavePath.replace("\\", "/")+ "\" " + seed + " 3 1 3"); if (debug) { System.out.println("TSF vis python output:"); BufferedReader out = new BufferedReader(new InputStreamReader(p.getInputStream())); BufferedReader err = new BufferedReader(new InputStreamReader(p.getErrorStream())); System.out.println("output : "); String outLine = out.readLine(); while (outLine != null) { System.out.println(outLine); outLine = out.readLine(); } System.out.println("error : "); String errLine = err.readLine(); while (errLine != null) { System.out.println(errLine); errLine = err.readLine(); } } return true; } //Nested class to store three simple summary features used to construct train data public static class FeatureSet { public static boolean findSkew = false; public static boolean findKurtosis = false; double mean; double stDev; double slope; double skew; double kurtosis; public void setFeatures(double[] data, int start, int end) { double sumX = 0, sumYY = 0; double sumY3 = 0, sumY4 = 0; double sumY = 0, sumXY = 0, sumXX = 0; int length = end - start + 1; for (int i = start; i <= end; i++) { sumY += data[i]; sumYY += data[i] * data[i]; sumX += (i - start); sumXX += (i - start) * (i - start); sumXY += data[i] * (i - start); } mean = sumY / length; stDev = sumYY - (sumY * sumY) / length; slope = (sumXY - (sumX * sumY) / length); double denom = sumXX - (sumX * sumX) / length; if (denom != 0) slope /= denom; else slope = 0; stDev /= length; if (stDev == 0) //Flat line slope = 0; // else //Why not doing this? Because not needed? // stDev=Math.sqrt(stDev); if (slope == 0) stDev = 0; if (findSkew) { if (stDev == 0) skew = 1; else { for (int i = start; i <= end; i++) sumY3 += data[i] * data[i] * data[i]; skew = sumY3 - 3 * sumY * sumYY + 2 * sumY * sumY; skew /= length * stDev * stDev * stDev; } } if (findKurtosis) { if (stDev == 0) kurtosis = 1; else { for (int i = start; i <= end; i++) sumY4 += data[i] * data[i] * data[i] * data[i]; kurtosis = sumY4 - 4 * sumY * sumY3 + 6 * sumY * sumY * sumYY - 3 * sumY * sumY * sumY * sumY; skew /= length * stDev * stDev * stDev * stDev; } } } public void setFeatures(double[] data) { setFeatures(data, 0, data.length - 1); } @Override public String toString() { return "mean=" + mean + " stdev = " + stDev + " slope =" + slope; } } public static void main(String[] arg) throws Exception { // System.out.println(ClassifierTools.testUtils_getIPDAcc(new TSF(0))); // System.out.println(ClassifierTools.testUtils_confirmIPDReproduction(new TSF(0), 0.967930029154519, "2019/09/25")); // Basic correctness tests, including setting paras through String dataLocation="Z:\\ArchiveData\\Univariate_arff\\"; String resultsLocation="D:\\temp\\"; String problem="ItalyPowerDemand"; File f= new File(resultsLocation+problem); if(!f.isDirectory()) f.mkdirs(); Instances train=DatasetLoading.loadDataNullable(dataLocation+problem+"\\"+problem+"_TRAIN"); Instances test=DatasetLoading.loadDataNullable(dataLocation+problem+"\\"+problem+"_TEST"); TSF tsf = new TSF(); tsf.setSeed(0); tsf.setTrainTimeLimit((long) 1.5e+10); //tsf.setSavePath("D:\\temp\\"); tsf.setEstimateOwnPerformance(true); double a; tsf.buildClassifier(train); ClassifierResults trainres = tsf.getTrainResults(); trainres.writeFullResultsToFile(resultsLocation + problem + "trainFold0.csv"); System.out.println("build ok: original atts=" + (train.numAttributes() - 1) + " new atts =" + tsf.testHolder.numAttributes() + " num trees = " + tsf.numClassifiers + " num intervals = " + tsf.numIntervals); System.out.println(tsf.trainResults.getBuildTime()); a = ClassifierTools.accuracy(test, tsf); System.out.println("Test Accuracy =" + a); System.out.println(); tsf = new TSF(); tsf.setSeed(1); tsf.setTrainTimeLimit((long) 1.5e+10); //tsf.setSavePath("D:\\temp\\"); tsf.setEstimateOwnPerformance(true); tsf.setTrainEstimateMethod("OOB"); String[] options = new String[4]; options[0] = "-T"; options[1] = "10"; options[2] = "-I"; options[3] = "1"; tsf.setOptions(options); tsf.buildClassifier(train); System.out.println("build ok: original atts=" + (train.numAttributes() - 1) + " new atts =" + tsf.testHolder.numAttributes() + " num trees = " + tsf.numClassifiers + " num intervals = " + tsf.numIntervals); System.out.println(tsf.trainResults.getBuildTime()); a = ClassifierTools.accuracy(test, tsf); System.out.println("Test Accuracy =" + a); } }
54,461
39.282544
223
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/kernel_based/Arsenal.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.kernel_based; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.storage.ClassifierResults; import experiments.data.DatasetLoading; import machine_learning.classifiers.RidgeClassifierCV; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.MultiThreadable; import tsml.classifiers.TrainTimeContractable; import tsml.transformers.ROCKET; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import weka.core.Randomizable; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.TimeUnit; /** * Contractable classifier making use of the ROCKET transformer. * <p> * Transform based on sktime python implementation by the author: * https://github.com/alan-turing-institute/sktime/blob/master/sktime/transformers/series_as_features/rocket.py * * @author Matthew Middlehurst */ public class Arsenal extends EnhancedAbstractClassifier implements TrainTimeContractable, MultiThreadable { private int numKernels = 2000; private int ensembleSize = 25; private boolean normalise = true; private Classifier cls = new RidgeClassifierCV(); private boolean bagging = false; private double[] oobCounts; private double[][] trainDistributions; private long trainContractTimeNanos = 0; private boolean trainTimeContract = false; private int maxEnsembleSize = 200; private boolean multithreading = false; private int threads; private Classifier[] classifiers; private ROCKET[] rockets; private double weightSum; private Instances header; public Arsenal() { super(CAN_ESTIMATE_OWN_PERFORMANCE); } @Override public String getParameters() { int cl = classifiers == null ? 0 : classifiers.length; return super.getParameters() + ",numKernels," + numKernels + ",normalise," + normalise + ",ensembleSize," + cl + ",trainContract," + trainTimeContract + ",contractTime," + trainContractTimeNanos; } @Override public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); result.setMinimumNumberInstances(2); // attributes result.enable(Capabilities.Capability.RELATIONAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); return result; } public void setNumKernels(int numKernels) { this.numKernels = numKernels; } public void setNormalise(boolean normalise) { this.normalise = normalise; } public void setClassifier(Classifier cls) { this.cls = cls; } public void setEnsembleSize(int ensembleSize) { this.ensembleSize = ensembleSize; } public void setBagging(boolean bagging) { this.bagging = bagging; } @Override public void setTrainTimeLimit(long time) { trainContractTimeNanos = time; trainTimeContract = true; } @Override public boolean withinTrainContract(long start) { if (trainContractTimeNanos <= 0) return true; //Not contracted int d = getEstimateOwnPerformance() && trainEstimateMethod == TrainEstimateMethod.OOB && !bagging ? 2 : 1; return System.nanoTime() - start < trainContractTimeNanos / d; } @Override public void enableMultiThreading(int numThreads) { multithreading = true; threads = numThreads; } @Override public void buildClassifier(Instances data) throws Exception { super.buildClassifier(data); trainResults.setBuildTime(System.nanoTime()); getCapabilities().testWithFail(data); int numInstances = data.numInstances(); if (multithreading && cls instanceof MultiThreadable) ((MultiThreadable) cls).enableMultiThreading(threads); if (trainTimeContract) ensembleSize = maxEnsembleSize; int numFolds = -1; if (getEstimateOwnPerformance()) { trainDistributions = new double[numInstances][numClasses]; if (bagging) { oobCounts = new double[numInstances]; } else { numFolds = Math.min(data.numInstances(), 10); } } ArrayList<Classifier> tempCls = new ArrayList<>(); ArrayList<ROCKET> tempROCKET = new ArrayList<>(); weightSum = 0; int i = 0; while (i < ensembleSize && withinTrainContract(trainResults.getBuildTime())) { ROCKET r = new ROCKET(); r.setNumKernels(numKernels); r.setNormalise(normalise); if (seedClassifier) r.setSeed(seed + (i + 1) * 47); if (multithreading) { r.enableMultiThreading(threads); } //If bagging find instances with replacement boolean[] inBag = null; Instances newData; if (bagging) { newData = new Instances(data, numInstances); inBag = new boolean[numInstances]; for (int n = 0; n < numInstances; n++) { int idx = rand.nextInt(numInstances); newData.add(data.get(idx)); inBag[idx] = true; } } else { newData = data; } Instances transformedData = r.fitTransform(newData); if (header == null) header = new Instances(transformedData, 0); Classifier c = AbstractClassifier.makeCopy(cls); if (seedClassifier && c instanceof Randomizable) { ((Randomizable) c).setSeed(seed + (i + 1) * 47); } c.buildClassifier(transformedData); tempCls.add(c); tempROCKET.add(r); double w = cls instanceof RidgeClassifierCV ? Math.pow(((RidgeClassifierCV) c).getBestScore(), 4) : 1; weightSum += w; if (getEstimateOwnPerformance()) { long t1 = System.nanoTime(); if (bagging) { for (int n = 0; n < numInstances; n++) { if (inBag[n]) continue; Instance inst = r.transform(data.get(n)); inst.setDataset(transformedData); double[] newProbs = c.distributionForInstance(inst); oobCounts[n] += w; for (int j = 0; j < newProbs.length; j++) trainDistributions[n][j] += newProbs[j] * w; } } else if (trainEstimateMethod != TrainEstimateMethod.OOB) { CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed + (i + 1) * 67); cv.setNumFolds(numFolds); Classifier cvCls = AbstractClassifier.makeCopy(cls); if (seedClassifier && cls instanceof Randomizable) ((Randomizable) cvCls).setSeed(seed + (i + 1) * 67); ClassifierResults results = cv.evaluate(cvCls, transformedData); for (int n = 0; n < numInstances; n++) { double[] dist = results.getProbabilityDistribution(n); for (int j = 0; j < trainDistributions[n].length; j++) trainDistributions[n][j] += dist[j] * w; } } trainResults.setErrorEstimateTime(trainResults.getErrorEstimateTime() + (System.nanoTime() - t1)); } i++; } classifiers = new Classifier[tempCls.size()]; classifiers = tempCls.toArray(classifiers); rockets = new ROCKET[tempROCKET.size()]; rockets = tempROCKET.toArray(rockets); trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime() - trainResults.getBuildTime()); if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); findEnsembleTrainEstimate(data); long est2 = System.nanoTime(); trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); } trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); trainResults.setParas(getParameters()); printLineDebug("*************** Finished Arsenal Build with "+rockets.length+" rockets in " + (trainResults.getBuildTime()/1000000000/60/60.0) + " hours, Train+Estimate time = "+(trainResults.getBuildPlusEstimateTime()/1000000000/60/60.0)+" hours ***************"); } private void findEnsembleTrainEstimate(Instances data) throws Exception { if (bagging) { double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()]; //Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); if (oobCounts[j] == 0) Arrays.fill(trainDistributions[j], 1.0/trainDistributions[j].length); else for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= oobCounts[j]; preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).classValue(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setDatasetName(data.relationName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setEstimatorName("ArsenalOOB"); trainResults.setErrorEstimateMethod("OOB"); } else if (trainEstimateMethod == TrainEstimateMethod.CV || trainEstimateMethod == TrainEstimateMethod.NONE || trainEstimateMethod == TrainEstimateMethod.TRAIN) { double[] preds = new double[data.numInstances()]; double[] actuals = new double[data.numInstances()]; long[] predTimes = new long[data.numInstances()]; //Dummy variable, need something for (int j = 0; j < data.numInstances(); j++) { long predTime = System.nanoTime(); for (int k = 0; k < trainDistributions[j].length; k++) trainDistributions[j][k] /= weightSum; preds[j] = findIndexOfMax(trainDistributions[j], rand); actuals[j] = data.get(j).classValue(); predTimes[j] = System.nanoTime() - predTime; } trainResults.addAllPredictions(actuals, preds, trainDistributions, predTimes, null); trainResults.setDatasetName(data.relationName()); trainResults.setSplit("train"); trainResults.setFoldID(seed); trainResults.setEstimatorName("ArsenalCV"); trainResults.setErrorEstimateMethod("CV_10"); } else if (trainEstimateMethod == TrainEstimateMethod.OOB) { Arsenal ar = new Arsenal(); ar.copyParameters(this); ar.setSeed(seed * 5); ar.setEstimateOwnPerformance(true); if (trainTimeContract) ar.setTrainTimeLimit((long)(trainContractTimeNanos / 10 * 4.5)); ar.bagging = true; ar.buildClassifier(data); long tt = trainResults.getBuildTime(); trainResults = ar.trainResults; trainResults.setBuildTime(tt); trainResults.setEstimatorName("ArsenalOOB"); trainResults.setErrorEstimateMethod("OOB"); } } @Override public double classifyInstance(Instance instance) throws Exception { double[] probs = distributionForInstance(instance); return findIndexOfMax(probs, rand); } public double[] distributionForInstance(Instance instance) throws Exception { double[] probs = new double[header.numClasses()]; for (int i = 0; i < classifiers.length; i++) { Instance transformedInst = rockets[i].transform(instance); transformedInst.setDataset(header); double pred = classifiers[i].classifyInstance(transformedInst); double w = cls instanceof RidgeClassifierCV ? Math.pow(((RidgeClassifierCV) classifiers[i]).getBestScore(), 4) : 1; probs[(int) pred] += w; } for (int i = 0; i < probs.length; i++) probs[i] /= weightSum; return probs; } private void copyParameters(Arsenal other) { this.numKernels = other.numKernels; this.ensembleSize = other.ensembleSize; this.normalise = other.normalise; this.cls = other.cls; this.bagging = other.bagging; this.trainContractTimeNanos = other.trainContractTimeNanos; this.trainTimeContract = other.trainTimeContract; } public static void main(String[] args) throws Exception { int fold = 0; Instances[] data = DatasetLoading.sampleItalyPowerDemand(fold); Instances train = data[0]; Instances test = data[1]; Instances[] data2 = DatasetLoading.sampleERing(fold); Instances train2 = data2[0]; Instances test2 = data2[1]; Arsenal c; double accuracy; c = new Arsenal(); c.seed = fold; c.setTrainEstimateMethod(TrainEstimateMethod.OOB); c.setEstimateOwnPerformance(true); c.buildClassifier(train); accuracy = ClassifierTools.accuracy(test, c); System.out.println("Arsenal accuracy on ItalyPowerDemand fold " + fold + " = " + accuracy); System.out.println("Train accuracy on ItalyPowerDemand fold " + fold + " = " + c.trainResults.getAcc()); System.out.println("Build time on ItalyPowerDemand fold " + fold + " = " + TimeUnit.SECONDS.convert(c.trainResults.getBuildTime(), TimeUnit.NANOSECONDS) + " seconds"); System.out.println("Estimate time on ItalyPowerDemand fold " + fold + " = " + TimeUnit.SECONDS.convert(c.trainResults.getErrorEstimateTime(), TimeUnit.NANOSECONDS) + " seconds"); c = new Arsenal(); c.seed = fold; c.buildClassifier(train2); accuracy = ClassifierTools.accuracy(test2, c); System.out.println("Arsenal accuracy on ERing fold " + fold + " = " + accuracy); System.out.println("Build time on ERing fold " + fold + " = " + TimeUnit.SECONDS.convert(c.trainResults.getBuildTime(), TimeUnit.NANOSECONDS) + " seconds"); } }
15,885
39.217722
187
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/kernel_based/ROCKETClassifier.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.kernel_based; import evaluation.evaluators.CrossValidationEvaluator; import experiments.data.DatasetLoading; import machine_learning.classifiers.RidgeClassifierCV; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.MultiThreadable; import tsml.classifiers.TrainTimeContractable; import tsml.transformers.ROCKET; import utilities.ClassifierTools; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.core.*; import java.util.ArrayList; import java.util.concurrent.TimeUnit; /** * Contractable classifier making use of the ROCKET transformer. * <p> * Transform based on sktime python implementation by the author: * https://github.com/alan-turing-institute/sktime/blob/master/sktime/transformers/series_as_features/rocket.py * * @author Matthew Middlehurst */ public class ROCKETClassifier extends EnhancedAbstractClassifier implements TrainTimeContractable, MultiThreadable { private int numKernels = 10000; private boolean normalise = true; private Classifier cls = new RidgeClassifierCV(); private long trainContractTimeNanos = 0; private boolean trainTimeContract = false; private int numKernelsStep = 50; public int maxKernels = 100000; private boolean multithreading = false; private int threads; private ROCKET rocket; private Instances header; public ROCKETClassifier() { super(CAN_ESTIMATE_OWN_PERFORMANCE); } @Override public String getParameters() { int nc = rocket == null ? numKernels : rocket.getNumKernels(); return super.getParameters() + ",numKernels," + nc + ",normalise," + normalise + ",trainContract," + trainTimeContract + ",contractTime," + trainContractTimeNanos + ",numKernelStep," + numKernelsStep; } @Override public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); result.disableAll(); result.setMinimumNumberInstances(2); // attributes result.enable(Capabilities.Capability.RELATIONAL_ATTRIBUTES); result.enable(Capabilities.Capability.NUMERIC_ATTRIBUTES); // class result.enable(Capabilities.Capability.NOMINAL_CLASS); return result; } public void setNumKernels(int numKernels) { this.numKernels = numKernels; } public void setNormalise(boolean normalise) { this.normalise = normalise; } public void setClassifier(Classifier cls) { this.cls = cls; } public void setNumKernelsStep(int numKernelsStep) { this.numKernelsStep = numKernelsStep; } @Override public void setTrainTimeLimit(long time) { trainContractTimeNanos = time; trainTimeContract = true; } @Override public boolean withinTrainContract(long start) { if (trainContractTimeNanos <= 0) return true; //Not contracted return System.nanoTime() - start < trainContractTimeNanos/5*4; } @Override public void enableMultiThreading(int numThreads) { multithreading = true; threads = numThreads; } @Override public void buildClassifier(Instances data) throws Exception { super.buildClassifier(data); trainResults.setBuildTime(System.nanoTime()); getCapabilities().testWithFail(data); if (multithreading && cls instanceof MultiThreadable) ((MultiThreadable) cls).enableMultiThreading(threads); Instances trainEstData = null; if (trainTimeContract) { ArrayList<Instances> fragmentedTransformedData = new ArrayList<>(); rocket = new ROCKET(); rocket.setNumKernels(0); rocket.setNormalise(normalise); if (seedClassifier) rocket.setSeed(seed); int l = 0; while (withinTrainContract(trainResults.getBuildTime()) && rocket.getNumKernels() < maxKernels) { ROCKET tempRocket = new ROCKET(); tempRocket.setNumKernels(numKernelsStep); tempRocket.setNormalise(normalise); tempRocket.setSeed(seed + l * numKernelsStep); if (multithreading) { tempRocket.enableMultiThreading(threads); } fragmentedTransformedData.add(tempRocket.fitTransform(data)); rocket.addKernels(tempRocket); l++; } Instances transformedData = rocket.determineOutputFormat(data); header = new Instances(transformedData, 0); for (int i = 0; i < data.numInstances(); i++) { double[] arr = new double[transformedData.numAttributes()]; int a1 = 0; for (Instances insts : fragmentedTransformedData) { Instance inst = insts.get(i); for (int j = 0; j < numKernelsStep * 2; j++) { arr[a1 + j] = inst.value(j); } a1 += numKernelsStep * 2; } arr[arr.length - 1] = data.get(i).classValue(); transformedData.add(new DenseInstance(1, arr)); } if (cls instanceof Randomizable) { ((Randomizable) cls).setSeed(seed); } cls.buildClassifier(transformedData); if (getEstimateOwnPerformance()) { trainEstData = transformedData; } } else { rocket = new ROCKET(); rocket.setNumKernels(numKernels); rocket.setNormalise(normalise); if (seedClassifier) rocket.setSeed(seed); if (multithreading) { rocket.enableMultiThreading(threads); } Instances transformedData = rocket.fitTransform(data); header = new Instances(transformedData, 0); if (cls instanceof Randomizable) { ((Randomizable) cls).setSeed(seed); } cls.buildClassifier(transformedData); if (getEstimateOwnPerformance()) { trainEstData = transformedData; } } trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setBuildTime(System.nanoTime() - trainResults.getBuildTime()); if (getEstimateOwnPerformance()) { long est1 = System.nanoTime(); estimateOwnPerformance(trainEstData); long est2 = System.nanoTime(); trainResults.setErrorEstimateTime(est2 - est1 + trainResults.getErrorEstimateTime()); } trainResults.setBuildPlusEstimateTime(trainResults.getBuildTime() + trainResults.getErrorEstimateTime()); trainResults.setParas(getParameters()); } private void estimateOwnPerformance(Instances data) throws Exception { int numFolds = Math.min(data.numInstances(), 10); CrossValidationEvaluator cv = new CrossValidationEvaluator(); if (seedClassifier) cv.setSeed(seed * 5); cv.setNumFolds(numFolds); Classifier newCls = AbstractClassifier.makeCopy(cls); if (seedClassifier && cls instanceof Randomizable) ((Randomizable) newCls).setSeed(seed * 100); long tt = trainResults.getBuildTime(); trainResults = cv.evaluate(newCls, data); trainResults.setBuildTime(tt); trainResults.setEstimatorName("ROCKETCV"); trainResults.setErrorEstimateMethod("CV_" + numFolds); } @Override public double classifyInstance(Instance instance) throws Exception { double[] probs = distributionForInstance(instance); return findIndexOfMax(probs, rand); } public double[] distributionForInstance(Instance instance) throws Exception { Instance transformedInst = rocket.transform(instance); transformedInst.setDataset(header); return cls.distributionForInstance(transformedInst); } public static void main(String[] args) throws Exception { int fold = 0; //Minimum working example String dataset = "ItalyPowerDemand"; Instances[] data = DatasetLoading.sampleItalyPowerDemand(fold); Instances train = data[0]; Instances test = data[1]; String dataset2 = "ERing"; Instances[] data2 = DatasetLoading.sampleERing(fold); Instances train2 = data2[0]; Instances test2 = data2[1]; ROCKETClassifier c; double accuracy; c = new ROCKETClassifier(); c.seed = fold; c.setEstimateOwnPerformance(true); c.buildClassifier(train); accuracy = ClassifierTools.accuracy(test, c); System.out.println("ROCKETClassifier accuracy on " + dataset + " fold " + fold + " = " + accuracy); System.out.println("Train accuracy on " + dataset + " fold " + fold + " = " + c.trainResults.getAcc()); System.out.println("Build time on " + dataset + " fold " + fold + " = " + TimeUnit.SECONDS.convert(c.trainResults.getBuildTime(), TimeUnit.NANOSECONDS) + " seconds"); c = new ROCKETClassifier(); c.seed = fold; c.buildClassifier(train2); accuracy = ClassifierTools.accuracy(test2, c); System.out.println("ROCKETClassifier accuracy on " + dataset2 + " fold " + fold + " = " + accuracy); System.out.println("Build time on " + dataset2 + " fold " + fold + " = " + TimeUnit.SECONDS.convert(c.trainResults.getBuildTime(), TimeUnit.NANOSECONDS) + " seconds"); } }
10,356
35.340351
116
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/RISE.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy; import evaluation.evaluators.SingleSampleEvaluator; import evaluation.storage.ClassifierResults; import evaluation.tuning.ParameterSpace; import experiments.data.DatasetLists; import java.util.ArrayList; import java.util.Random; import java.util.concurrent.TimeUnit; import tsml.transformers.Fast_FFT; import tsml.classifiers.EnhancedAbstractClassifier; import weka.classifiers.AbstractClassifier; import weka.classifiers.Classifier; import weka.classifiers.trees.RandomTree; import weka.core.Attribute; import weka.core.DenseInstance; import weka.core.Instance; import weka.core.Instances; import weka.core.TechnicalInformation; import tsml.transformers.ACF; import tsml.transformers.PowerSpectrum; import tsml.transformers.ACF_PACF; import tsml.transformers.ARMA; import tsml.transformers.PACF; import tsml.transformers.Transformer; import weka.core.Randomizable; import weka.core.TechnicalInformationHandler; import weka.core.Utils; import tsml.classifiers.Tuneable; import static experiments.data.DatasetLoading.loadDataNullable; /** * This version is now here only for legacy reasons. The new version in * tsml.classifiers.frequency_based is as accurate and *much* faster version. * * Development code for RISE 1. set number of trees to max(500,m) 2. Set the * first tree to the full interval 3. Randomly select the interval length and * start point for each other tree * 4. Find the PS, ACF, PACF and AR features * 5. Build each base classifier (default RandomTree). * * 19/3/19: DONE A1. Restructure A2. Test whether we need all four components, * particularly AR and PACF! A3. Implement speed up to avoid recalculating ACF * each time A4. Compare to python version * * @author Tony Bagnall. <!-- globalinfo-start --> Random Interval Spectral * Ensemble * * This implementation is the base RISE Overview: Input n series length * m for each tree sample interval of random size (minimum set to 16) * transform interval into ACF, PS, AR and PACF features build tree on * concatenated features ensemble the trees with majority vote <!-- * globalinfo-end --> <!-- technical-bibtex-start --> Bibtex * * <pre> * &#64;article{lines2018time, * title={Time series classification with HIVE-COTE: The hierarchical vote collective of transformation-based ensembles}, * author={Lines, Jason and Taylor, Sarah and Bagnall, Anthony}, * journal={ACM Transactions on Knowledge Discovery from Data (TKDD)}, * volume={12}, * number={5}, * pages={52}, * year={2018}, * publisher={ACM} * } * </pre> * * <!-- technical-bibtex-end --> <!-- options-start --> Valid options * are: * <p/> * * <pre> * -T * set number of trees. * </pre> * * <pre> * -F * set number of features. * </pre> * * <!-- options-end --> * @author Tony Bagnall * @date Some time in 2017 **/ public class RISE extends EnhancedAbstractClassifier implements SubSampleTrainer, Randomizable, TechnicalInformationHandler, Tuneable { /** Default to a random tree */ private Classifier baseClassifierTemplate = new RandomTree(); /** Ensemble base classifiers */ private Classifier[] baseClassifiers; /** Ensemble size */ private static int DEFAULT_NUM_CLASSIFIERS = 500; private int numBaseClassifiers = DEFAULT_NUM_CLASSIFIERS; /** Random Intervals for the transform. INTERVAL BOUNDS ARE INCLUSIVE */ private int[] startPoints; private int[] endPoints; /** Minimum size of all intervals */ private static int DEFAULT_MIN_INTERVAL = 16; private int minInterval = DEFAULT_MIN_INTERVAL; /** Can seed for reproducibility */ Transformer[] transformers; /** Power Spectrum transformer, probably dont need to store this here */ // private PowerSpectrum ps=new PowerSpectrum(); /** If we are estimating the CV, it is possible to sample fewer elements. **/ // Really should try bagging this! private boolean subSample = false; private double sampleProp = 1; public RISE() { super(CAN_ESTIMATE_OWN_PERFORMANCE); transformers = new Transformer[3]; ACF acf = new ACF(); acf.setNormalized(false); transformers[0] = acf; PACF pacf = new PACF(); transformers[1] = pacf; transformers[2] = new PowerSpectrum(); rand = new Random(); } public RISE(int s) { this(); setSeed(s); } /* * New default for speed. */ public void setFastRISE() { this.setTransforms("FFT", "ACF"); } /** * This interface is not formalised and needs to be considered in the next * review * * @param prop * @param s */ @Override public void subSampleTrain(double prop, int s) { subSample = true; sampleProp = prop; seed = s; } public void setTransforms(String... trans) { transformers = new Transformer[trans.length]; int count = 0; for (String s : trans) { switch (s) { case "ACF": case "Autocorrelation": transformers[count] = new ACF(); break; case "PACF": case "PartialAutocorrelation": transformers[count] = new PACF(); break; case "AR": case "AutoRegressive": transformers[count] = new ARMA(); break; case "PS": case "PowerSpectrum": transformers[count] = new PowerSpectrum(); ((PowerSpectrum) transformers[count]).useFFT(); break; case "FFT": transformers[count] = new Fast_FFT(); break; case "ACF_PACF": case "PACF_ACF": transformers[count] = new ACF_PACF(); break; default: System.out.println("Unknown tranform " + s); continue; } count++; } if (count < transformers.length) { Transformer[] temp = new Transformer[count]; for (int i = 0; i < count; i++) temp[i] = transformers[i]; transformers = temp; } } /** * Changes the base classifier, * * @param c new base classifier */ public void setBaseClassifier(Classifier c) { baseClassifierTemplate = c; } /** * * @param k Ensemble size */ public void setNumClassifiers(int k) { numBaseClassifiers = k; } /** * * @return number of classifiers in the ensemble */ public int getNumClassifiers() { return numBaseClassifiers; } /** * Holders for the headers of each transform. */ Instances[] testHolders; @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "J. Lines, S. Taylor and A. Bagnall"); result.setValue(TechnicalInformation.Field.YEAR, "2018"); result.setValue(TechnicalInformation.Field.TITLE, "Time series classification with HIVE-COTE: The hierarchical vote collective of transformation-based ensembles"); result.setValue(TechnicalInformation.Field.JOURNAL, "ACM Transactions on Knowledge Discovery from Data "); result.setValue(TechnicalInformation.Field.VOLUME, "12"); result.setValue(TechnicalInformation.Field.NUMBER, "5"); result.setValue(TechnicalInformation.Field.PAGES, "NA"); return result; } /** * Parses a given list of options to set the parameters of the classifier. We * use this for the tuning mechanism, setting parameters through setOptions <!-- * options-start --> Valid options are: * <p/> * * <pre> * -K * Number of base classifiers. * </pre> * * <pre> * -I * min Interval, integer, should be in range 3 to m-MINa check in build classifier is made to see if if. * </pre> * * <pre> * -T transforms, a space separated list. * </pre> * * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ @Override public void setOptions(String[] options) throws Exception { String numCls = Utils.getOption('K', options); if (numCls.length() != 0) numBaseClassifiers = Integer.parseInt(numCls); else numBaseClassifiers = DEFAULT_NUM_CLASSIFIERS; /** Minimum size of all intervals */ String minInt = Utils.getOption('I', options); if (minInt.length() != 0) minInterval = Integer.parseInt(minInt); /** Transforms to use */ String trans = Utils.getOption('T', options); if (trans.length() != 0) { String[] t = trans.split(" "); // NEED TO CHECK THIS WORKS setTransforms(t); } } @Override public String getParameters() { String str = super.getParameters() + ",numClassifiers," + numBaseClassifiers + "," + "MinInterval," + minInterval; for (int i = 0; i < transformers.length; i++) str += ",Filter" + i + "," + transformers[i].getClass().getSimpleName(); return str; } @Override public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); int m = data.numAttributes() - 1; if (minInterval > m) minInterval = m / 2; startPoints = new int[numBaseClassifiers]; endPoints = new int[numBaseClassifiers]; // TO DO trainResults.setTimeUnit(TimeUnit.NANOSECONDS); long start = System.nanoTime(); // Option to sub sample for training if (subSample) { data = subSample(data, sampleProp, seed); System.out.println(" TRAIN SET SIZE NOW " + data.numInstances()); } // Initialise the memory baseClassifiers = new Classifier[numBaseClassifiers]; testHolders = new Instances[numBaseClassifiers]; // Select random intervals for each tree for (int i = 0; i < numBaseClassifiers; i++) { // Do whole series for first classifier if (i == 0) { startPoints[i] = 0; endPoints[i] = m - 1; } else { // Random interval at least minInterval in size startPoints[i] = rand.nextInt(m - minInterval); // This avoid calling nextInt(0) if (startPoints[i] == m - 1 - minInterval) endPoints[i] = m - 1; else { endPoints[i] = rand.nextInt(m - startPoints[i]); if (endPoints[i] < minInterval) endPoints[i] = minInterval; endPoints[i] += startPoints[i]; } } // Set up train instances prior to trainsform. int numFeatures = endPoints[i] - startPoints[i] + 1; String name; ArrayList<Attribute> atts = new ArrayList(); for (int j = 0; j < numFeatures; j++) { name = "F" + j; atts.add(new Attribute(name)); } // Get the class values as a fast vector Attribute target = data.attribute(data.classIndex()); ArrayList<String> vals = new ArrayList<>(target.numValues()); for (int j = 0; j < target.numValues(); j++) vals.add(target.value(j)); atts.add(new Attribute(data.attribute(data.classIndex()).name(), vals)); // create blank instances with the correct class value Instances result = new Instances("Tree", atts, data.numInstances()); result.setClassIndex(result.numAttributes() - 1); for (int j = 0; j < data.numInstances(); j++) { DenseInstance in = new DenseInstance(result.numAttributes()); double[] v = data.instance(j).toDoubleArray(); for (int k = 0; k < numFeatures; k++) in.setValue(k, v[startPoints[i] + k]); // Set interval features in.setValue(result.numAttributes() - 1, data.instance(j).classValue()); result.add(in); } testHolders[i] = new Instances(result, 0); DenseInstance in = new DenseInstance(result.numAttributes()); testHolders[i].add(in); // Perform the transform Instances newTrain = filterData(result); // Build Classifier: Defaults to a RandomTree, but WHY ALL THE ATTS? if (baseClassifierTemplate instanceof RandomTree) { baseClassifiers[i] = new RandomTree(); ((RandomTree) baseClassifiers[i]).setKValue(numFeatures); } else baseClassifiers[i] = AbstractClassifier.makeCopy(baseClassifierTemplate); // if(baseClassifiers[i] instanceof Randomisable) if (baseClassifiers[i] instanceof Randomizable && seedClassifier) ((Randomizable) baseClassifiers[i]).setSeed(i * seed); baseClassifiers[i].buildClassifier(newTrain); } if (getEstimateOwnPerformance()) findTrainAcc(data); trainResults.setBuildTime(System.nanoTime() - start); trainResults.setParas(getParameters()); } private void findTrainAcc(Instances data) { trainResults.setTimeUnit(TimeUnit.NANOSECONDS); trainResults.setEstimatorName(getClassifierName()); trainResults.setDatasetName(data.relationName()); trainResults.setFoldID(seed); trainResults.setParas(getParameters()); int numTrees = 500; int bagProp = 100; Classifier[] classifiers = new Classifier[numTrees]; RISE RISE = new RISE(seed); int[] timesInTest = new int[data.size()]; double[][][] distributions = new double[numTrees][data.size()][(int) data.numClasses()]; double[][] finalDistributions = new double[data.size()][(int) data.numClasses()]; int[][] bags; ArrayList[] testIndexs = new ArrayList[numTrees]; double[] bagAccuracies = new double[numTrees]; bags = generateBags(numTrees, bagProp, data); for (int i = 0; i < bags.length; i++) { Instances intervalInstances = produceIntervalInstances(data, i); try { intervalInstances = filterData(intervalInstances); } catch (Exception e) { System.out.println("Could not filter data in findTrainAcc: " + e.toString()); } Instances trainHeader = new Instances(intervalInstances, 0); Instances testHeader = new Instances(intervalInstances, 0); ArrayList<Integer> indexs = new ArrayList<>(); for (int j = 0; j < bags[i].length; j++) { if (bags[i][j] == 0) { testHeader.add(intervalInstances.get(j)); timesInTest[j]++; indexs.add(j); } for (int k = 0; k < bags[i][j]; k++) { trainHeader.add(intervalInstances.get(j)); } } testIndexs[i] = indexs; classifiers[i] = new RandomTree(); ((RandomTree) classifiers[i]).setKValue(trainHeader.numAttributes() - 1); try { // RISE.buildClassifier(trainHeader); classifiers[i].buildClassifier(trainHeader); } catch (Exception e) { e.printStackTrace(); } for (int j = 0; j < testHeader.size(); j++) { try { // distributions[i][indexs.get(j)] = // RISE.distributionForInstance(testHeader.get(j)); distributions[i][indexs.get(j)] = classifiers[i].distributionForInstance(testHeader.get(j)); // if (RISE.classifyInstance(testHeader.get(j)) == // testHeader.get(j).classValue()){ if (classifiers[i].classifyInstance(testHeader.get(j)) == testHeader.get(j).classValue()) { bagAccuracies[i]++; } } catch (Exception e) { e.printStackTrace(); } } bagAccuracies[i] /= testHeader.size(); trainHeader.clear(); testHeader.clear(); } for (int i = 0; i < bags.length; i++) { for (int j = 0; j < bags[i].length; j++) { if (bags[i][j] == 0) { for (int k = 0; k < finalDistributions[j].length; k++) { finalDistributions[j][k] += distributions[i][j][k]; } } } } for (int i = 0; i < finalDistributions.length; i++) { if (timesInTest[i] > 1) { for (int j = 0; j < finalDistributions[i].length; j++) { finalDistributions[i][j] /= timesInTest[i]; } } } // Add to trainResults. double acc = 0.0; for (int i = 0; i < finalDistributions.length; i++) { double predClass = 0; double predProb = 0.0; for (int j = 0; j < finalDistributions[i].length; j++) { if (finalDistributions[i][j] > predProb) { predProb = finalDistributions[i][j]; predClass = j; } } trainResults.addPrediction(data.get(i).classValue(), finalDistributions[i], predClass, 0, ""); if (predClass == data.get(i).classValue()) { acc++; } trainResults.setAcc(acc / data.size()); } } private int[][] generateBags(int numBags, int bagProp, Instances data) { int[][] bags = new int[numBags][data.size()]; Random random = new Random(seed); for (int i = 0; i < numBags; i++) { for (int j = 0; j < data.size() * (bagProp / 100.0); j++) { bags[i][random.nextInt(data.size())]++; } } return bags; } private Instances produceIntervalInstances(Instances data, int x) { Instances intervalInstances; ArrayList<Attribute> attributes = new ArrayList<>(); ArrayList<Integer> intervalAttIndexes = new ArrayList<>(); for (int i = startPoints[x]; i < endPoints[x]; i++) { attributes.add(data.attribute(i)); intervalAttIndexes.add(i); } // intervalsAttIndexes.add(intervalAttIndexes); attributes.add(data.attribute(data.numAttributes() - 1)); intervalInstances = new Instances(data.relationName(), attributes, data.size()); double[] intervalInstanceValues = new double[(endPoints[x] - startPoints[x]) + 1]; for (int i = 0; i < data.size(); i++) { for (int j = 0; j < (endPoints[x] - startPoints[x]); j++) { intervalInstanceValues[j] = data.get(i).value(intervalAttIndexes.get(j)); } DenseInstance intervalInstance = new DenseInstance(intervalInstanceValues.length); intervalInstance.replaceMissingValues(intervalInstanceValues); intervalInstance.setValue(intervalInstanceValues.length - 1, data.get(i).classValue()); intervalInstances.add(intervalInstance); } intervalInstances.setClassIndex(intervalInstances.numAttributes() - 1); return intervalInstances; } private Instances filterData(Instances result) throws Exception { int maxLag = (result.numAttributes() - 1) / 4; if (maxLag > ACF.DEFAULT_MAXLAG) maxLag = ACF.DEFAULT_MAXLAG; Instances[] t = new Instances[transformers.length]; for (int j = 0; j < transformers.length; j++) { // Im not sure this a sensible or robust way of doing this // What if L meant something else to the SimpleFilter? // Can you use a whole string, e.g. MAXLAG? transformers[j].setOptions(new String[] { "L", maxLag + "" }); t[j] = transformers[j].transform(result); } // 4. Merge them all together Instances combo = new Instances(t[0]); for (int j = 1; j < transformers.length; j++) { if (j < transformers.length) { combo.setClassIndex(-1); combo.deleteAttributeAt(combo.numAttributes() - 1); } combo = Instances.mergeInstances(combo, t[j]); } combo.setClassIndex(combo.numAttributes() - 1); return combo; } @Override public double[] distributionForInstance(Instance ins) throws Exception { double[] votes = new double[ins.numClasses()]; //// Build instance double[] series = ins.toDoubleArray(); for (int i = 0; i < baseClassifiers.length; i++) { int numFeatures = endPoints[i] - startPoints[i] + 1; // extract the interval for (int j = 0; j < numFeatures; j++) { testHolders[i].instance(0).setValue(j, ins.value(j + startPoints[i])); } // Do the transform Instances temp = filterData(testHolders[i]); int c = (int) baseClassifiers[i].classifyInstance(temp.instance(0)); votes[c]++; } for (int i = 0; i < votes.length; i++) votes[i] /= baseClassifiers.length; return votes; } public static void main(String[] arg) throws Exception { Instances dataTrain = loadDataNullable("Z:/ArchiveData/Univariate_arff" + "/" + DatasetLists.newProblems27[2] + "/" + DatasetLists.newProblems27[2] + "_TRAIN"); Instances dataTest = loadDataNullable("Z:/ArchiveData/Univariate_arff" + "/" + DatasetLists.newProblems27[2] + "/" + DatasetLists.newProblems27[2] + "_TEST"); Instances data = dataTrain; data.addAll(dataTest); ClassifierResults cr = null; SingleSampleEvaluator sse = new SingleSampleEvaluator(); sse.setPropInstancesInTrain(0.5); sse.setSeed(0); RISE RISE = null; System.out.println("Dataset name: " + data.relationName()); System.out.println("Numer of cases: " + data.size()); System.out.println("Number of attributes: " + (data.numAttributes() - 1)); System.out.println("Number of classes: " + data.classAttribute().numValues()); System.out.println("\n"); try { RISE = new RISE(); RISE.setTransforms("PS"); cr = sse.evaluate(RISE, data); System.out.println("PS"); System.out.println("Accuracy: " + cr.getAcc()); System.out.println("Build time (ns): " + cr.getBuildTimeInNanos()); /* * RISE = new RISE(); cr = sse.evaluate(RISE, data); * System.out.println("ACF_FFT"); RISE.setTransforms("ACF", "FFT"); * System.out.println("Accuracy: " + cr.getAcc()); * System.out.println("Build time (ns): " + cr.getBuildTimeInNanos()); */ } catch (Exception e) { e.printStackTrace(); } /* * Instances train=DatasetLoading. * loadDataNullable("C:\\Users\\ajb\\Dropbox\\TSC Problems\\ItalyPowerDemand\\ItalyPowerDemand_TRAIN" * ); Instances test=DatasetLoading. * loadDataNullable("C:\\Users\\ajb\\Dropbox\\TSC Problems\\ItalyPowerDemand\\ItalyPowerDemand_TEST" * ); RISE rif = new RISE(); rif.setTransforms("ACF","AR","AFC"); for(Filter f: * rif.filters) System.out.println(f); String[] * temp={"PS","Autocorellation","BOB","PACF"}; rif.setTransforms(temp); * for(Filter f: rif.filters) System.out.println(f); System.exit(0); * * rif.buildClassifier(train); System.out.println("build ok:"); double * a=ClassifierTools.accuracy(test, rif); System.out.println(" Accuracy ="+a); */ /* * //Get the class values as a fast vector Attribute target * =data.attribute(data.classIndex()); * * FastVector vals=new FastVector(target.numValues()); for(int * j=0;j<target.numValues();j++) vals.addElement(target.value(j)); * atts.addElement(new * Attribute(data.attribute(data.classIndex()).name(),vals)); //Does this create * the actual instances? Instances result = new * Instances("Tree",atts,data.numInstances()); for(int * i=0;i<data.numInstances();i++){ DenseInstance in=new * DenseInstance(result.numAttributes()); result.add(in); } * result.setClassIndex(result.numAttributes()-1); Instances testHolder =new * Instances(result,10); //For each tree * System.out.println("Train size "+result.numInstances()); * System.out.println("Test size "+testHolder.numInstances()); */ } @Override public ParameterSpace getDefaultParameterSearchSpace() { // TUNED TSC Classifiers // TESTY /* * Valid options are: <p/> <pre> -T Number of base classifiers. <pre> -I min * Interval, integer, should be in range 3 to m-MINa check in build classifier * is made to see if if. </pre> */ ParameterSpace ps = new ParameterSpace(); String[] numTrees = { "100", "200", "300", "400", "500", "600" }; ps.addParameter("K", numTrees); String[] minInterv = { "4", "8", "16", "32", "64", "128" }; ps.addParameter("I", minInterv); String[] transforms = { "ACF", "PS", "ACF PS", "ACF AR PS" }; ps.addParameter("T", transforms); return ps; } }
27,233
38.242075
129
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/ShapeletTransformClassifierLegacy.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy; import evaluation.evaluators.CrossValidationEvaluator; import evaluation.tuning.ParameterSpace; import experiments.data.DatasetLoading; import fileIO.FullAccessOutFile; import fileIO.OutFile; import machine_learning.classifiers.ensembles.ContractRotationForest; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.TrainTimeContractable; import tsml.classifiers.Tuneable; import tsml.transformers.ShapeletTransform; import tsml.transformers.shapelet_tools.ShapeletTransformFactory; import tsml.transformers.shapelet_tools.ShapeletTransformFactoryOptions.ShapeletTransformOptions; import tsml.transformers.shapelet_tools.ShapeletTransformTimingUtilities; import tsml.transformers.shapelet_tools.distance_functions.ShapeletDistance; import tsml.transformers.shapelet_tools.quality_measures.ShapeletQuality; import tsml.transformers.shapelet_tools.search_functions.ShapeletSearch.SearchType; import tsml.transformers.shapelet_tools.search_functions.ShapeletSearchOptions; import utilities.InstanceTools; import weka.classifiers.Classifier; import weka.core.Instance; import weka.core.Instances; import weka.core.TechnicalInformation; import weka.core.Utils; import java.io.File; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.concurrent.TimeUnit; /** * ShapeletTransformClassifier * Builds a time series classifier by first extracting the best numShapeletsInTransform * * By default, performs a shapelet transform through full enumeration (max 1000 shapelets selected) * then classifies with rotation forest. * If can be contracted to a maximum run time for shapelets, and can be configured for a different base classifier * * //Subsequence distances in SubsSeqDistance : defines how to do sDist, outcomes should be the same, difference efficiency * * */ public class ShapeletTransformClassifierLegacy extends EnhancedAbstractClassifier implements TrainTimeContractable, Tuneable { //Basic pipeline is transform, then build classifier on transformed space private ShapeletTransform transform; //Configurable ST //Default to one hour private Instances shapeletData; //Transformed shapelets header info stored here private Classifier classifier; //Final classifier built on transformed shapelet data /*************** TRANSFORM STRUCTURE SETTINGS *************************************/ /** Shapelet transform parameters that can be configured through the STC, stored here **/ private ShapeletTransformOptions transformOptions=new ShapeletTransformOptions(); private int numShapeletsInTransform = ShapeletTransform.MAXTRANSFORMSIZE; private SearchType searchType = SearchType.RANDOM;//FULL == enumeration, RANDOM =random sampled to train time cotnract /** Redundant features in the shapelet space are removed prior to building the classifier **/ int[] redundantFeatures; /** PCA Option: not currently implemented, as it has not been debugged private boolean performPCA=false; private PCA pca; private int numPCAFeatures=100; */ /****************** CONTRACTING *************************************/ /*The contracting is controlled by the number of shapelets to evaluate. This can either be explicitly set by the user * through setNumberOfShapeletsToEvaluate, or, if a contract time is set, it is estimated from the contract. * If this is zero and no contract time is set, a full evaluation is done. */ private boolean trainTimeContract =false; private long trainContractTimeNanos = 0; //Time limit for transform + classifier, fixed by user. If <=0, no contract private long transformContractTime = TimeUnit.NANOSECONDS.convert(1, TimeUnit.HOURS);//Time limit assigned to transform, based on contractTime, but fixed in buildClassifier in an adhoc way private long classifierContractTime = 0;//Time limit assigned to classifier, based on contractTime, but fixed in buildClassifier in an adhoc way private long numShapeletsInProblem = 0; //Number of shapelets in problem if we do a full enumeration private double singleShapeletTime=0; //Estimate of the time to evaluate a single shapelet private double proportionToEvaluate=1;// Proportion of total num shapelets to evaluate based on time contract private long numShapeletsToEvaluate = 0; //Total num shapelets to evaluate over all cases (NOT per case) private long transformBuildTime=0; public void setTransformTime(long t){ transformContractTime=t; } public void setTransformTimeHours(long t){ transformContractTime=TimeUnit.NANOSECONDS.convert(t, TimeUnit.HOURS); } /************* CHECKPOINTING and SAVING ************ Could all move to transformOptions */ //Check pointing is not fully debugged private String checkpointFullPath=""; //location to check point private boolean checkpoint=false; //If these are set, the shapelet meta information is saved to <path>/Workspace/ and the transforms saved to <path>/Transforms private String shapeletOutputPath; private boolean saveShapelets=false; private boolean pruneMatchingShapelets=false; /** * @param pruneMatchingShapelets the pruneMatchingShapelets to set */ public void setPruneMatchingShapelets(boolean pruneMatchingShapelets) { this.pruneMatchingShapelets = pruneMatchingShapelets; } //This is set up to allow the user to easily employ a default configuration. Needs a bit of work enum ShapeletConfig{BAKEOFF,BALANCED,DEFAULT} ShapeletConfig sConfig= ShapeletConfig.DEFAULT;//The default or user set up, to avoid overwriting user defined config /** If trainAccuracy is required, there are two mechanisms to obtain it: * 2. estimator=CV: do a 10x CV on the train set with a clone * of this classifier * 3. estimator=OOB: build an OOB model just to get the OOB * accuracy estimate */ enum EstimatorMethod{CV,OOB} private EstimatorMethod estimator= EstimatorMethod.CV; public void setTrainEstimateMethod(String str){ String s=str.toUpperCase(); if(s.equals("CV")) estimator= EstimatorMethod.CV; else if(s.equals("OOB")) estimator= EstimatorMethod.OOB; else throw new UnsupportedOperationException("Unknown estimator method in TSF = "+str); } public ShapeletTransformClassifierLegacy(){ super(CAN_ESTIMATE_OWN_PERFORMANCE); configureDefaultShapeletTransform(); ContractRotationForest rotf=new ContractRotationForest(); rotf.setMaxNumTrees(200); classifier=rotf; /* CAWPE base= new CAWPE();//Change to RotF base.setupOriginalHESCASettings(); base.setEstimateOwnPerformance(false);//Defaults to false anyway classifier=base; */ } /* Not debugged, doesnt currently work public void usePCA(){ setPCA(true); } public void setPCA(boolean b) { setPCA(b,numPCAFeatures); } public void setPCA(boolean b, int numberEigenvectorsToRetain) { performPCA = b; pca=new PCA(); numPCAFeatures=numberEigenvectorsToRetain; pca.setNumAttributesToKeep(numPCAFeatures); } */ @Override public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); //Add the requirement to test if there are at least one of each class long startTime=System.nanoTime(); //Give 2/3 time for transform, 1/3 for classifier. Need to only do this if its set to have one. //All in nanos System.out.println("Are we contracting? "+trainTimeContract+" transform contract time ="+trainContractTimeNanos); if(trainTimeContract) { transformContractTime = trainContractTimeNanos * 2 / 3; classifierContractTime = trainContractTimeNanos - transformContractTime; } else{ classifierContractTime=0; } // Full set up of configs to match published. Note these will reset other parameters to their default // to the default, so user set parameters prior to this point will be overwritten, switch(sConfig){ case BAKEOFF: //Full enumeration, early abandon, CAWPE basic config, 10n shapelets in transform, capped at n*m configureBakeoffShapeletTransform(data); break; case BALANCED: //As with bakeoff, but with binary shapelets and class balancing when #classes > 2, 10n shapelets in transform, uncapped! configureDawakShapeletTransform(data); break; default: configureDataDependentShapeletTransform(data); } //Contracting with the shapelet transform is handled by setting the number of shapelets per series to evaluate. //This is done by estimating the time to evaluate a single shapelet then extrapolating (not in aarons way) if(transformContractTime >0) { printLineDebug(" Contract time limit = "+ transformContractTime); configureTrainTimeContract(data, transformContractTime); } //This is hacked to build a cShapeletTransform transform= constructShapeletTransform(data); transform.setSuppressOutput(debug); //The cConfig CONTRACT option is currently hacked into buildTransfom. here for now // if(transform instanceof cShapeletFilter) // ((cShapeletFilter)transform).setContractTime(transformTimeLimit); if(transformContractTime >0) { // long numberOfShapeletsPerSeries=numShapeletsInProblem/data.numInstances(); double timePerShapelet= transformContractTime /numShapeletsToEvaluate; printLineDebug("Total shapelets per series "+numShapeletsInProblem/data.numInstances()+" num to eval = "+numShapeletsToEvaluate/data.numInstances()); transform.setContractTime(transformContractTime); transform.setAdaptiveTiming(true); transform.setTimePerShapelet(timePerShapelet); printLineDebug(" Time per shapelet = "+timePerShapelet); // transform.setProportionToEvaluate(proportionToEvaluate); } //Put this in the options rather than here transform.setPruneMatchingShapelets(pruneMatchingShapelets); shapeletData = transform.fitTransform(data); transformBuildTime=System.nanoTime()-startTime; //Need to store this printLineDebug("SECONDS:Transform contract =" +(transformContractTime /1000000000L)+" Actual transform time taken = " + (transformBuildTime / 1000000000L+" Proportion of contract used ="+((double)transformBuildTime/ transformContractTime))); printLineDebug(" Transform getParas ="+transform.getParameters()); redundantFeatures=InstanceTools.removeRedundantTrainAttributes(shapeletData); if(saveShapelets) saveShapeletData(data); printLineDebug("Starting STC build classifier after "+(System.nanoTime()-startTime)/1000000000+" ......"); if(getEstimateOwnPerformance()){ // if the classifier can estimate its own performance, do that. This is not yet in the time contract! boolean doExternalCV=false; doExternalCV=!((classifier instanceof EnhancedAbstractClassifier)&&((EnhancedAbstractClassifier)classifier).ableToEstimateOwnPerformance()); if(doExternalCV) { printLineDebug("Doing a CV with base to estimate accuracy"); int numFolds = setNumberOfFolds(data); CrossValidationEvaluator cv = new CrossValidationEvaluator(); cv.setSeed(seed * 12); cv.setNumFolds(numFolds); trainResults = cv.crossValidateWithStats(classifier, shapeletData); } else{//The classifier can handler it internally throw new RuntimeException(("ERROR: internal estimates not sorted out yet")); } } if(classifierContractTime>0 && classifier instanceof TrainTimeContractable) { ((TrainTimeContractable) classifier).setTrainTimeLimit(classifierContractTime); } //Optionally do a PCA to reduce dimensionality. Not an option currently, it is broken /* if(performPCA){ pca.setNumAttributesToKeep(shapeletData.numAttributes()-1); pca.fit(shapeletData); shapeletData=pca.transform(shapeletData); System.out.println(shapeletData.toString()); } */ //Here get the train estimate directly from classifier using cv for now classifier.buildClassifier(shapeletData); shapeletData=new Instances(data,0); trainResults.setBuildTime(System.nanoTime()-startTime); trainResults.setParas(getParameters()); //HERE: If the base classifier can estimate its own performance, then lets do it here } @Override public double classifyInstance(Instance ins) throws Exception{ shapeletData.add(ins); Instances temp = transform.transform(shapeletData); //Delete redundant for(int del:redundantFeatures) temp.deleteAttributeAt(del); /* if(performPCA){ temp=pca.transform(temp); } */ Instance test = temp.get(0); shapeletData.remove(0); return classifier.classifyInstance(test); } @Override public double[] distributionForInstance(Instance ins) throws Exception{ shapeletData.add(ins); Instances temp = transform.transform(shapeletData); //Delete redundant for(int del:redundantFeatures) temp.deleteAttributeAt(del); /* if(performPCA){ temp=pca.transform(temp); } */ Instance test = temp.get(0); shapeletData.remove(0); return classifier.distributionForInstance(test); } public void setShapeletOutputFilePath(String path){ shapeletOutputPath = path; saveShapelets=true; } private void saveShapeletData(Instances data){ System.out.println("Saving the transform as an arff file and the transform data in different files. The shapelets will also be saved by the transform in the same location."); //Write shapelet transform to arff file File f= new File(shapeletOutputPath+"ShapeletTransforms/"+data.relationName()); if(!f.exists()) f.mkdirs(); shapeletData.setRelationName(data.relationName()); DatasetLoading.saveDataset(shapeletData,shapeletOutputPath+"ShapeletTransforms/"+data.relationName()+"/"+data.relationName()+seed+"_TRAIN"); f= new File(shapeletOutputPath+"Workspace/"+data.relationName()); if(!f.exists()) f.mkdirs(); FullAccessOutFile of=new FullAccessOutFile(shapeletOutputPath+"Workspace/"+data.relationName()+"/shapleletInformation"+seed+".csv"); String str= getTransformParameters(); Date date = new Date(); SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); of.writeLine("Generated by ShapeletTransformClassifier.java on " + formatter.format(date)); of.writeLine(str); of.writeLine("NumShapelets,"+transform.getNumberOfShapelets()); of.writeLine("Operations count(not sure!),"+transform.getCount()); of.writeString("ShapeletLengths"); ArrayList<Integer> lengths=transform.getShapeletLengths(); for(Integer i:lengths) of.writeString(","+i); /* ArrayList<Shapelet> shapelets= transform.getShapelets(); of.writeLine("SHAPELETS:"); for(Shapelet s:shapelets){ double[] d=s.getUnivariateShapeletContent(); for(double x:d) of.writeString(x+","); of.writeString("\n"); */ of.closeFile(); } public ShapeletTransform constructShapeletTransform(Instances data){ //**** Builds the transform using transformOptions and a search builder ****/ ShapeletSearchOptions.Builder searchBuilder = new ShapeletSearchOptions.Builder(); if(seedClassifier) searchBuilder.setSeed(2*seed); //For some reason stored twice in the transform options and the search builder. searchBuilder.setMin(transformOptions.getMinLength()); searchBuilder.setMax(transformOptions.getMaxLength()); searchBuilder.setSearchType(searchType); if(numShapeletsInProblem==0) numShapeletsInProblem=ShapeletTransformTimingUtilities.calculateNumberOfShapelets(data.numInstances(), data.numAttributes()-1, transformOptions.getMinLength(), transformOptions.getMaxLength()); transformOptions.setKShapelets(numShapeletsInTransform); searchBuilder.setNumShapeletsToEvaluate(numShapeletsToEvaluate/data.numInstances());//This is ignored if full search is performed transformOptions.setSearchOptions(searchBuilder.build()); //Finally, get the transform from a Factory with the options set by the builder ShapeletTransform st = new ShapeletTransformFactory(transformOptions.build()).getTransform(); if(saveShapelets && shapeletOutputPath != null) st.setLogOutputFile(shapeletOutputPath+"Workspace/"+data.relationName()+"/shapelets"+seed+".csv"); return st; } /*********** METHODS TO CONFIGURE TRANSFORM * Note there are two types of parameters: data independent and data dependent. The * former are set in the constructor, the latter in buildClassifier. We could tidy this up with lambdas * Sets up this default parameters that are not data dependent. This is called in the constructor * and the user can reconfigure these prior to classifier build. These could also be tuned. */ public void configureDefaultShapeletTransform(){ searchType= SearchType.FULL; transformOptions.setDistanceType(ShapeletDistance.DistanceType.IMPROVED_ONLINE); transformOptions.setQualityMeasure(ShapeletQuality.ShapeletQualityChoice.INFORMATION_GAIN); transformOptions.setRescalerType(ShapeletDistance.RescalerType.NORMALISATION); transformOptions.setRoundRobin(true); transformOptions.setCandidatePruning(true); transformOptions.setBinaryClassValue(false); transformOptions.setClassBalancing(false); transformOptions.setKShapelets(ShapeletTransform.MAXTRANSFORMSIZE); } /** * Sets up the parameters that require the data characteristics (series length, number of classes and number of cases */ public void configureDataDependentShapeletTransform(Instances train){ int n = train.numInstances(); int m = train.numAttributes()-1; transformOptions.setMinLength(3); transformOptions.setMaxLength(m); //DEtermine balanced or not, if(train.numClasses() > 2) { transformOptions.setBinaryClassValue(true); transformOptions.setClassBalancing(true); }else{ transformOptions.setBinaryClassValue(false); transformOptions.setClassBalancing(false); } if(numShapeletsInTransform==ShapeletTransform.MAXTRANSFORMSIZE)//It has not then been set by the user numShapeletsInTransform= 10*train.numInstances() < ShapeletTransform.MAXTRANSFORMSIZE ? 10*train.numInstances(): ShapeletTransform.MAXTRANSFORMSIZE; //Got to cap this surely! } /** * Specific set up for the DAWAK version (rename) described in * @param train */ public void configureDawakShapeletTransform(Instances train) { configureDefaultShapeletTransform(); if(train.numClasses() > 2) { transformOptions.setBinaryClassValue(true); transformOptions.setClassBalancing(true); }else{ transformOptions.setBinaryClassValue(false); transformOptions.setClassBalancing(false); } if(numShapeletsInTransform==ShapeletTransform.MAXTRANSFORMSIZE)//It has not then been set by the user numShapeletsInTransform= 10*train.numInstances() < ShapeletTransform.MAXTRANSFORMSIZE ? 10*train.numInstances(): ShapeletTransform.MAXTRANSFORMSIZE; //Got to cap this surely! transformOptions.setKShapelets(numShapeletsInTransform); } /** * configuring a ShapeletTransform to the original ST format used in the bakeoff * * @param train data set Work in progress */ public void configureBakeoffShapeletTransform(Instances train){ transformOptions.setDistanceType(ShapeletDistance.DistanceType.NORMAL); if(train.numClasses() <10) transformOptions.setCandidatePruning(true); else transformOptions.setCandidatePruning(false); transformOptions.setBinaryClassValue(false); transformOptions.setClassBalancing(false); if(numShapeletsInTransform==ShapeletTransform.MAXTRANSFORMSIZE)//It has not then been set by the user numShapeletsInTransform= 10*train.numInstances() < ShapeletTransform.MAXTRANSFORMSIZE ? 10*train.numInstances(): ShapeletTransform.MAXTRANSFORMSIZE; //Got to cap this surely! transformOptions.setKShapelets(numShapeletsInTransform); } /** * This method estimates how many shapelets per series (numShapeletsToEvaluate) can be evaluated given a specific time contract. * It should just return this value * It also calculates numShapeletsInTransform and proportionToEvaluate, both stored by the classifier. It can set searchType to FULL, if the proportion * is estimated to be full. * Note the user can set numShapeletsToEvaluate explicitly. The user can also set the contract time explicitly, thus invoking * this method in buildClassifier. If both numShapeletsToEvaluate and time have been set, we have a contradiction from the user. * We assume time take precedence, and overwrite numShapeletsToEvaluate * *NEED TO RECONFIGURE FOR USER SET numShapeletToEvaluate * @param train train data * @param time contract time in nanoseconds */ public void configureTrainTimeContract(Instances train, long time){ //Configure the search options if a contract has been ser // else int n = train.numInstances(); int m = train.numAttributes() - 1; if(time>0){ searchType = SearchType.RANDOM; if(debug) System.out.println("Number in transform ="+numShapeletsInTransform+" number to evaluate = "+numShapeletsToEvaluate+" contract time (secs) = "+ time/1000000000); numShapeletsInProblem = ShapeletTransformTimingUtilities.calculateNumberOfShapelets(n, m, 3, m); //This is aarons way of doing it based on hard coded estimate of the time for a single operation proportionToEvaluate= estimatePropOfFullSearchAaron(n,m,time); if(proportionToEvaluate==1.0) { searchType = SearchType.FULL; numShapeletsToEvaluate=numShapeletsInProblem; } else numShapeletsToEvaluate = (long)(numShapeletsInProblem*proportionToEvaluate); if(debug) { System.out.println(" Total number of shapelets = " + numShapeletsInProblem); System.out.println(" Proportion to evaluate = " + proportionToEvaluate); System.out.println(" Number to evaluate = " + numShapeletsToEvaluate); } if(numShapeletsToEvaluate<n)//Got to do 1 per series. Really should reduce if we do this. numShapeletsToEvaluate=n; numShapeletsInTransform = numShapeletsToEvaluate > numShapeletsInTransform ? numShapeletsInTransform : (int) numShapeletsToEvaluate; } } // Tony's way of doing it based on a timing model for predicting for a single shapelet //Point estimate to set prop, could use a hard coded //This is a bit unintuitive, should move full towards a time per shapelet model private double estimatePropOfFullSearchTony(int n, int m, int totalNumShapelets, long time){ double nPower=1.2; double mPower=1.3; double scaleFactor=Math.pow(2,26); singleShapeletTime=Math.pow(n,nPower)*Math.pow(m,mPower)/scaleFactor; long timeRequired=(long)(singleShapeletTime*totalNumShapelets); double p=1; if(timeRequired>time) p=timeRequired/(double)time; return p; } // Aarons way of doing it based on time for a single operation private double estimatePropOfFullSearchAaron(int n, int m, long time){ //nanoToOp is currently a hard coded to 10 nanosecs in ShapeletTransformTimingUtilities. This is a bit crap //HERE we can estimate it for this run long nanoTimeForOp=ShapeletTransformTimingUtilities.nanoToOp; // Operations contract BigInteger allowedNumberOfOperations = new BigInteger(Long.toString(time / nanoTimeForOp)); // Operations required BigInteger requiredNumberOfOperations = ShapeletTransformTimingUtilities.calculateOps(n, m, 1, 1); //Need more operations than we are allowed double p=1; if (requiredNumberOfOperations.compareTo(allowedNumberOfOperations) > 0) { BigDecimal oct = new BigDecimal(allowedNumberOfOperations); BigDecimal oc = new BigDecimal(requiredNumberOfOperations); BigDecimal prop = oct.divide(oc, MathContext.DECIMAL64); p= prop.doubleValue(); } return p; } /** * @return String, comma separated relevant variables, used in Experiment.java to write line 2 of results */ @Override public String getParameters(){ String paras=transform.getShapeletCounts(); //Build time info String result=super.getParameters(); //Shapelet numbers and contract info result+=",numberOfShapeletsInProblem,"+numShapeletsInProblem+",proportionToEvaluate,"+proportionToEvaluate; //transform config result+=",SearchType,"+searchType; result+=","+transformOptions.toString(); result+=",ConfigSetup,"+sConfig; result+=","+paras; result+=",Classifier,"+classifier.getClass().getSimpleName(); String classifierParas="No Classifier Para Info"; if(classifier instanceof EnhancedAbstractClassifier) classifierParas=((EnhancedAbstractClassifier)classifier).getParameters(); result+=","+classifierParas; if(trainTimeContract) result+= ",TimeContract(ns), " +trainContractTimeNanos; else result+=",NoContract"; result+= ",TransformActualBuildTime,"+transformBuildTime+",trainContractTimeNanos,"+ trainContractTimeNanos +",transformContractTime,"+ transformContractTime; result+=",EstimateOwnPerformance,"+getEstimateOwnPerformance(); if(getEstimateOwnPerformance()) { result += ",trainEstimateMethod," + estimator; } return result; } /** * * @return a string containing just the transform parameters */ public String getTransformParameters(){ String paras=transform.getShapeletCounts(); String str= "TransformActualBuildTime,"+transformBuildTime+",totalTimeContract,"+ trainContractTimeNanos +",transformTimeContract,"+ transformContractTime; //Shapelet numbers and contract info str+=",numberOfShapeletsInProblem,"+numShapeletsInProblem+",proportionToEvaluate,"+proportionToEvaluate; //transform config str+=",SearchType,"+searchType; str+=","+transformOptions.toString(); str+=",ConfigSetup,"+sConfig; str+=","+paras; return str; } public long getTransformOpCount(){ return transform.getCount(); } public void setTrainTimeLimit(long amount) { trainTimeContract=true; trainContractTimeNanos = amount; } @Override public boolean withinTrainContract(long start) { return start<trainContractTimeNanos; } public void setNumberOfShapeletsToEvaluate(long numS){ numShapeletsToEvaluate = numS; } public void setNumberOfShapeletsInTransform(int numS){ numShapeletsInTransform = numS; } public void setConfiguration(ShapeletConfig s){ sConfig=s; } public final void setConfiguration(String s){ String temp=s.toUpperCase(); switch(temp){ case "BAKEOFF": case "BAKE OFF": case "BAKE-OFF": case "FULL": sConfig= ShapeletConfig.BAKEOFF; break; case "DAWAK": case "BINARY": case "AARON": case "BALANCED": sConfig= ShapeletConfig.BALANCED; break; default: sConfig= ShapeletConfig.DEFAULT; } } /** * Checkpoint methods */ public void setSavePath(String path){ checkpointFullPath=path; } public void copyFromSerObject(Object obj) throws Exception{ if(!(obj instanceof ShapeletTransformClassifierLegacy)) throw new Exception("Not a ShapeletTransformClassifier object"); //Copy meta data ShapeletTransformClassifierLegacy st=(ShapeletTransformClassifierLegacy)obj; //We assume the classifiers have not been built, so are basically copying over the set up classifier=st.classifier; shapeletOutputPath=st.shapeletOutputPath; transform=st.transform; shapeletData=st.shapeletData; int[] redundantFeatures=st.redundantFeatures; transformBuildTime=st.transformBuildTime; trainResults =st.trainResults; numShapeletsInTransform =st.numShapeletsInTransform; searchType =st.searchType; numShapeletsToEvaluate =st.numShapeletsToEvaluate; seed =st.seed; seedClassifier=st.seedClassifier; trainContractTimeNanos =st.trainContractTimeNanos; } /*********** SETTERS AND GETTERS : Methods for manual configuration **********/ /** * Set how shapelets are assessed * @param qual Quality measure type, options are INFORMATION_GAIN,F_STAT,KRUSKALL_WALLIS,MOODS_MEDIAN */ public void setQualityMeasure(ShapeletQuality.ShapeletQualityChoice qual){ transformOptions.setQualityMeasure(qual); } public void setRescalerType(ShapeletDistance.RescalerType r){ transformOptions.setRescalerType(r); } /** * Set how shapelets are searched for in a given series. * @param type: Search type with valid values * SearchType {FULL, FS, GENETIC, RANDOM, LOCAL, MAGNIFY, TIMED_RANDOM, SKIPPING, TABU, * REFINED_RANDOM, IMP_RANDOM, SUBSAMPLE_RANDOM, SKEWED, BO_SEARCH}; */ public void setSearchType(SearchType type) { searchType = type; } public void setClassifier(Classifier c){ classifier=c; } public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "authors"); result.setValue(TechnicalInformation.Field.YEAR, "A shapelet transform for time series classification"); result.setValue(TechnicalInformation.Field.TITLE, "stuff"); result.setValue(TechnicalInformation.Field.JOURNAL, "places"); result.setValue(TechnicalInformation.Field.VOLUME, "vol"); result.setValue(TechnicalInformation.Field.PAGES, "pages"); return result; } /** * From the interface Tuneable * @return the range of parameters to tune over */ @Override public ParameterSpace getDefaultParameterSearchSpace(){ ParameterSpace ps=new ParameterSpace(); String[] maxNumShapelets={"100","200","300","400","500","600","700","800","900","1000"}; ps.addParameter("T", maxNumShapelets); return ps; } /** * Parses a given list of options to set the parameters of the classifier. * We use this for the tuning mechanism, setting parameters through setOptions <!-- options-start --> * Valid options are: <p/> * <pre> -S * Number of shapelets kept in the transform.</pre> * More to follow <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ @Override public void setOptions(String[] options) throws Exception{ String numShapeletsString= Utils.getOption('S', options); if (numShapeletsString.length() != 0) numShapeletsInTransform = Integer.parseInt(numShapeletsString); else throw new Exception("in setOptions Unable to read number of intervals, -T flag is not set"); } public static void main(String[] args) throws Exception { // String dataLocation = "C:\\Temp\\TSC\\"; String dataLocation = "E:\\Data\\TSCProblems2018\\"; String saveLocation = "C:\\Temp\\TSC\\"; String datasetName = "FordA"; int fold = 0; Instances train= DatasetLoading.loadDataNullable(dataLocation+datasetName+File.separator+datasetName+"_TRAIN"); Instances test= DatasetLoading.loadDataNullable(dataLocation+datasetName+File.separator+datasetName+"_TEST"); String trainS= saveLocation+datasetName+File.separator+"TrainCV.csv"; String testS=saveLocation+datasetName+File.separator+"TestPreds.csv"; String preds=saveLocation+datasetName; System.out.println("Data Loaded"); ShapeletTransformClassifierLegacy st= new ShapeletTransformClassifierLegacy(); //st.saveResults(trainS, testS); st.setShapeletOutputFilePath(saveLocation+datasetName+"Shapelets.csv"); st.setMinuteLimit(2); System.out.println("Start transform"); long t1= System.currentTimeMillis(); st.configureDefaultShapeletTransform(); st.configureTrainTimeContract(train,st.trainContractTimeNanos); Instances stTrain=st.transform.fitTransform(train); long t2= System.currentTimeMillis(); System.out.println("BUILD TIME "+((t2-t1)/1000)+" Secs"); OutFile out=new OutFile(saveLocation+"ST_"+datasetName+".arff"); out.writeString(stTrain.toString()); } }
35,206
44.545925
249
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/SubSampleTrainer.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy; import utilities.InstanceTools; import weka.core.Instances; /** * Indicates that the class can subsample the train set if the option is set * * @author ajb */ public interface SubSampleTrainer { public void subSampleTrain(double prop, int seed); default Instances subSample(Instances full, double proportion, int seed){ return InstanceTools.subSampleFixedProportion(full, proportion, seed); } }
1,218
33.828571
78
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/COTE/FlatCote.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.COTE; import experiments.data.DatasetLoading; import tsml.classifiers.distance_based.ElasticEnsemble; import java.util.ArrayList; import java.util.Random; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.shapelet_based.ShapeletTransformClassifier; import tsml.filters.shapelet_filters.ShapeletFilter; import tsml.transformers.shapelet_tools.ShapeletTransformTimingUtilities; import utilities.ClassifierTools; import machine_learning.classifiers.ensembles.CAWPE; import weka.core.Instance; import weka.core.Instances; import weka.core.TechnicalInformation; import tsml.transformers.ACF; import tsml.transformers.PowerSpectrum; import weka.core.Randomizable; import weka.core.TechnicalInformationHandler; /** * NOTE: consider this code legacy. There is no reason to use FlatCote over HiveCote. * Also note that file writing/reading from file is not currently supported (will be added soon) @article{bagnall15cote, title={Time-Series Classification with {COTE}: The Collective of Transformation-Based Ensembles}, author={A. Bagnall and J. Lines and J. Hills and A. Bostrom}, journal={{IEEE} Transactions on Knowledge and Data Engineering}, volume={27}, issue={9}, pages={2522--2535}, year={2015} } * @author Jason Lines (j.lines@uea.ac.uk) */ public class FlatCote extends EnhancedAbstractClassifier implements TechnicalInformationHandler{ public FlatCote() { super(CANNOT_ESTIMATE_OWN_PERFORMANCE); } @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "A. Bagnall and J. Lines and J. Hills and A. Bostrom"); result.setValue(TechnicalInformation.Field.TITLE, "Time-Series Classification with COTE: The Collective of Transformation-Based Ensembles"); result.setValue(TechnicalInformation.Field.JOURNAL, "IEEE Transactions on Knowledge and Data Engineering"); result.setValue(TechnicalInformation.Field.VOLUME, "27"); result.setValue(TechnicalInformation.Field.NUMBER, "9"); result.setValue(TechnicalInformation.Field.PAGES, "2522-2535"); result.setValue(TechnicalInformation.Field.YEAR, "2015"); return result; } // Flat-COTE includes 35 constituent classifiers: // - 11 from the Elastic Ensemble // - 8 from the Shapelet Transform Ensemble // - 8 from CAWPE (ACF transformed) // - 8 from CAWPE (PS transformed) private Instances train; private ElasticEnsemble ee; private CAWPE st; private CAWPE acf; private CAWPE ps; private int numClassifiers=0; // private ShapeletTransform shapeletTransform; private double[][] cvAccs; private double cvSum; private double[] weightByClass; @Override public void buildClassifier(Instances train) throws Exception{ long t1=System.nanoTime(); this.train = train; ee = new ElasticEnsemble(); ShapeletTransformClassifier stc = new ShapeletTransformClassifier(); stc.setHourLimit(24); stc.setClassifier(new CAWPE()); //Redo for STC //ShapeletTransform shapeletTransform = ShapeletTransformFactory.createTransform(train); ShapeletFilter shapeletFilter = ShapeletTransformTimingUtilities.createTransformWithTimeLimit(train, 24); // now defaults to max of 24 hours shapeletFilter.supressOutput(); st = new CAWPE(); //st.setTransform(shapeletFilter); //TODO: Update Shapelets so i can update CAWPE st.setupOriginalHESCASettings(); acf = new CAWPE(); acf.setupOriginalHESCASettings(); acf.setTransform(new ACF()); ps = new CAWPE(); ps.setupOriginalHESCASettings(); ps.setTransform(new PowerSpectrum()); if(seedClassifier){ if(ee instanceof Randomizable) ((Randomizable)ee).setSeed(seed); if(st instanceof Randomizable) ((Randomizable)st).setSeed(seed); if(acf instanceof Randomizable) ((Randomizable)st).setSeed(seed); if(acf instanceof Randomizable) ((Randomizable)st).setSeed(seed); } // st.setDebugPrinting(true); ee.buildClassifier(train); acf.buildClassifier(train); ps.buildClassifier(train); st.buildClassifier(train); cvAccs = new double[4][]; cvAccs[0] = ee.getCVAccs(); cvAccs[1] = st.getIndividualAccEstimates(); cvAccs[2] = acf.getIndividualAccEstimates(); cvAccs[3] = ps.getIndividualAccEstimates(); cvSum = 0; for(int e = 0; e < cvAccs.length;e++){ for(int c = 0; c < cvAccs[e].length; c++){ cvSum+=cvAccs[e][c]; } } long t2=System.nanoTime(); trainResults.setBuildTime(t2-t1); for(int i=0;i<cvAccs.length;i++) numClassifiers+=cvAccs[i].length; } @Override public double[] distributionForInstance(Instance test) throws Exception{ weightByClass = null; classifyInstance(test); double[] dists = new double[weightByClass.length]; for(int c = 0; c < weightByClass.length; c++){ dists[c] = weightByClass[c]/this.cvSum; } return dists; } @Override public double classifyInstance(Instance test) throws Exception{ double[][] preds = new double[4][]; preds[0] = this.ee.classifyInstanceByConstituents(test); preds[1] = this.st.classifyInstanceByConstituents(test); preds[2] = this.acf.classifyInstanceByConstituents(test); preds[3] = this.ps.classifyInstanceByConstituents(test); weightByClass = new double[train.numClasses()]; ArrayList<Double> bsfClassVals = new ArrayList<>(); double bsfWeight = -1; for(int e = 0; e < preds.length; e++){ for(int c = 0; c < preds[e].length; c++){ weightByClass[(int)preds[e][c]]+=cvAccs[e][c]; // System.out.print(preds[e][c]+","); if(weightByClass[(int)preds[e][c]] > bsfWeight){ bsfWeight = weightByClass[(int)preds[e][c]]; bsfClassVals = new ArrayList<>(); bsfClassVals.add(preds[e][c]); }else if(weightByClass[(int)preds[e][c]] > bsfWeight){ bsfClassVals.add(preds[e][c]); } } } if(bsfClassVals.size()>1){ return bsfClassVals.get(new Random().nextInt(bsfClassVals.size())); } return bsfClassVals.get(0); } @Override public String getParameters() { String str=super.getParameters(); str+=",NumClassifiers,"+numClassifiers+",EE,"+cvAccs[0].length+",ACF_"+acf.getEnsembleName()+","+cvAccs[1].length+",PS_"+ps.getEnsembleName()+","+cvAccs[2].length+",ST_"+st.getEnsembleName()+","+cvAccs[3].length+",CVAccs,"; for(int i=0;i<cvAccs.length;i++) for(int j=0;j<cvAccs[i].length;j++) str+=cvAccs[i][j]+","; return str; } public static void main(String[] args) throws Exception{ // System.out.println(ClassifierTools.testUtils_getIPDAcc(new FlatCote())); FlatCote fc = new FlatCote(); String datasetName = "Chinatown"; Instances train = DatasetLoading.loadDataNullable("Z:/ArchiveData/Univariate_arff/"+datasetName+"/"+datasetName+"_TRAIN"); Instances test = DatasetLoading.loadDataNullable("Z:/ArchiveData/Univariate_arff/"+datasetName+"/"+datasetName+"_TEST"); System.out.println("Example usage of HiveCote: this is the code used in the paper"); System.out.println(fc.getTechnicalInformation().toString()); System.out.println("Evaluated on "+datasetName); fc.buildClassifier(train); System.out.println("Build is complete"); System.out.println("Flat Cote parameters :"+fc.getParameters()); double a=ClassifierTools.accuracy(test, fc); System.out.println("Test acc for "+datasetName+" = "+a); } }
9,212
38.038136
231
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/COTE/HiveCote.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.COTE; import evaluation.storage.ClassifierResults; import experiments.data.DatasetLoading; import tsml.classifiers.interval_based.TSF; import tsml.classifiers.legacy.RISE; import tsml.classifiers.dictionary_based.BOSS; import tsml.classifiers.distance_based.ElasticEnsemble; import java.io.File; import java.io.FileWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Random; import java.util.Scanner; import java.util.concurrent.TimeUnit; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.filters.shapelet_filters.ShapeletFilter; import utilities.ClassifierTools; import weka.classifiers.Classifier; import weka.core.*; import tsml.classifiers.TrainTimeContractable; import tsml.classifiers.shapelet_based.ShapeletTransformClassifier; /** * Note this is here for legacy reasons only, it has been replaced by tsml.classifiers.hybrids.HIVE_COTE * @author Jason Lines (j.lines@uea.ac.uk) * * Basic use case * HiveCote hc=new HiveCote(); * hc.buildClassifier(data); DEVELOPMENT NOTES for any users added by ajb on 23/7/18: * 1. Hive COTE defaults to shouty mode (prints out stuff). To turn off, call * hc.setShouty(false) * 2. Hive COTE can be used with existing results as a post processor, or built all in one. * For larger problems we advise building the components individually as it makes it easier to control * The mechanism for doing this is to use HiveCotePostProcessed in the cote package (NOTE could be tidied). * 3. Full Hive COTE is very slow, but a huge amount of the computation is unnecessary. * The slowest module is the shapelet transform, when * set to do a full enumeration of the shapelet space. However, this is never necessary. * You can contract ST to only search for a fixed time. We are making all the components contract classifiers * By default, we set the sequential build time to 7 days. This is APPROXIMATE and OPTIMISTIC. So we advise set lower to start then increase * change hours amount with, e.g. * hc.setDayLimit(int), hc.setHourLimit(int), hc.setMinuteLimit(int) * or by default, to set hours, * hc.setTrainTimeLimit(long) //breaking aarons interface, soz * to remove any limits, call * hc.setContract(false) * * * To review: whole file writing thing. */ public class HiveCote extends EnhancedAbstractClassifier implements TrainTimeContractable,TechnicalInformationHandler{ private ArrayList<Classifier> classifiers; private ArrayList<String> names; private ConstituentHiveEnsemble[] modules; private boolean verbose = true; private int maxCvFolds = 10;// note: this only affects manual CVs from this class using the crossvalidate method. This will not affect internal classifier cv's if they are set within those classes // private boolean writeEnsembleTrainingPredictions = false; // private String ensembleTrainingPredictionsPathAndName = null; private boolean fileWriting = false; private String fileOutputDir; private String fileOutputDataset; private String fileOutputResampleId; private static int MAXCONTRACTHOURS=7*24; private int contractHours=MAXCONTRACTHOURS; //Default to maximum 7 days run time private long trainContractTimeNanos = 0; private boolean trainTimeContract =false; public HiveCote(){ super(CANNOT_ESTIMATE_OWN_PERFORMANCE); this.setDefaultEnsembles(); } public HiveCote(ArrayList<Classifier> classifiers, ArrayList<String> classifierNames){ super(CANNOT_ESTIMATE_OWN_PERFORMANCE); setClassifiers(classifiers,classifierNames); this.classifiers = classifiers; this.names = classifierNames; if(trainTimeContract){ setTrainTimeLimit(TimeUnit.HOURS,contractHours); } } public final void setClassifiers(ArrayList<Classifier> classifiers, ArrayList<String> classifierNames){ for(Classifier c: classifiers) { if (c instanceof EnhancedAbstractClassifier) if (((EnhancedAbstractClassifier) c).ableToEstimateOwnPerformance()) ((EnhancedAbstractClassifier) c).setEstimateOwnPerformance(true); } } @Override public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(TechnicalInformation.Type.ARTICLE); result.setValue(TechnicalInformation.Field.AUTHOR, "J. Lines, S. Taylor and A. Bagnall"); result.setValue(TechnicalInformation.Field.TITLE, "Time Series Classification with HIVE-COTE: The Hierarchical Vote Collective of Transformation-Based Ensembles"); result.setValue(TechnicalInformation.Field.JOURNAL, "ACM Transactions on Knowledge Discovery from Data"); result.setValue(TechnicalInformation.Field.VOLUME, "12"); result.setValue(TechnicalInformation.Field.NUMBER, "5"); result.setValue(TechnicalInformation.Field.PAGES, "52"); result.setValue(TechnicalInformation.Field.YEAR, "2018"); return result; } public void setContract(boolean b){ trainTimeContract =b; contractHours=MAXCONTRACTHOURS; setHourLimit(contractHours); } public void setContract(int hours){ trainTimeContract =true; contractHours=hours; setHourLimit(contractHours); } @Override public void setSeed(int seed) { seedClassifier=true; this.seed = seed; rand=new Random(seed); int count =2; for(Classifier c:classifiers){ if(c instanceof Randomizable) ((Randomizable)c).setSeed(seed+count++); } } private void setDefaultEnsembles(){ classifiers = new ArrayList<>(); names = new ArrayList<>(); ElasticEnsemble ee = new ElasticEnsemble(); ee.setEstimateOwnPerformance(true); classifiers.add(ee); ShapeletTransformClassifier stc = new ShapeletTransformClassifier(); // CAWPE h = new CAWPE(); // DefaultShapeletTransformPlaceholder st= new DefaultShapeletTransformPlaceholder(); if(trainTimeContract){ stc.setHourLimit(contractHours); } classifiers.add(stc); // to get around the issue of needing training data RISE rise = new RISE(); classifiers.add(rise); BOSS boss = new BOSS(); boss.setEstimateOwnPerformance(true); classifiers.add(boss); TSF tsf=new TSF(); tsf.setTrainEstimateMethod("CV"); tsf.setEstimateOwnPerformance(true); classifiers.add(tsf); names.add("EE"); names.add("ST"); names.add("RISE"); names.add("BOSS"); names.add("TSF"); } public void turnOnFileWriting(String outputDir, String datasetName){ turnOnFileWriting(outputDir, datasetName, "0"); } public void turnOnFileWriting(String outputDir, String datasetName, String resampleFoldIdentifier){ this.fileWriting = true; this.fileOutputDir = outputDir; this.fileOutputDataset = datasetName; this.fileOutputResampleId = resampleFoldIdentifier; } @Override public void buildClassifier(Instances train) throws Exception{ getCapabilities().testWithFail(train); long t1=System.nanoTime(); optionalOutputLine("Start of training"); modules = new ConstituentHiveEnsemble[classifiers.size()]; System.out.println("modules include:"); for(int i = 0; i < classifiers.size();i++){ System.out.println(names.get(i)); } double ensembleAcc; String outputFilePathAndName; for(int i = 0; i < classifiers.size(); i++){ // if classifier is an implementation of TrainAccuracyEstimator, no need to cv for ensemble accuracy as it can self-report // e.g. of the default modules, EE, CAWPE, and BOSS should all have this functionality (group a); RISE and TSF do not currently (group b) so must manualy cv if(EnhancedAbstractClassifier.classifierIsEstimatingOwnPerformance(classifiers.get(i))){ optionalOutputLine("training (group a): "+this.names.get(i)); classifiers.get(i).buildClassifier(train); ClassifierResults res= ((EnhancedAbstractClassifier)classifiers.get(i)).getTrainResults(); modules[i] = new ConstituentHiveEnsemble(this.names.get(i), this.classifiers.get(i), res.getAcc()); if(this.fileWriting){ outputFilePathAndName = fileOutputDir+names.get(i)+"/Predictions/"+this.fileOutputDataset+"/trainFold"+this.fileOutputResampleId+".csv"; genericCvResultsFileWriter(outputFilePathAndName, train, res.getPredClassValsAsArray(), this.fileOutputDataset, modules[i].classifierName, ((EnhancedAbstractClassifier)(modules[i].classifier)).getParameters(), modules[i].ensembleCvAcc); } // else we must do a manual cross validation to get the module's encapsulated cv acc // note this isn't optimal; would be better to change constituent ensembles to self-record cv acc during training, rather than cv-ing and then building // however, this is effectively a wrapper so we can add any classifier to the collective without worrying about implementation support }else{ optionalOutputLine("crossval (group b): "+this.names.get(i)); ensembleAcc = crossValidateWithFileWriting(classifiers.get(i), train, maxCvFolds,this.names.get(i)); optionalOutputLine("training (group b): "+this.names.get(i)); classifiers.get(i).buildClassifier(train); modules[i] = new ConstituentHiveEnsemble(this.names.get(i), this.classifiers.get(i), ensembleAcc); } optionalOutputLine("done "+modules[i].classifierName); } if(verbose){ printModuleCvAccs(); } long t2=System.nanoTime(); trainResults.setBuildTime(t2-t1); } private static void genericCvResultsFileWriter(String outFilePathAndName, Instances instances, String classifierName, double[] preds, double cvAcc) throws Exception{ genericCvResultsFileWriter(outFilePathAndName, instances, preds, instances.relationName(), classifierName, "noParamInfo", cvAcc); } private static void genericCvResultsFileWriter(String outFilePathAndName, Instances instances, double[] preds, String datasetName, String classifierName, String paramInfo, double cvAcc) throws Exception{ if(instances.numInstances()!=preds.length){ throw new Exception("Error: num instances doesn't match num preds."); } File outPath = new File(outFilePathAndName); outPath.getParentFile().mkdirs(); FileWriter out = new FileWriter(outFilePathAndName); out.append(datasetName+","+classifierName+",train\n"); out.append(paramInfo+"\n"); out.append(cvAcc+"\n"); for(int i =0; i < instances.numInstances(); i++){ out.append(instances.instance(i).classValue()+","+preds[i]+"\n"); } out.close(); } @Override public double[] distributionForInstance(Instance instance) throws Exception{ return distributionForInstance(instance, null); } private double[] distributionForInstance(Instance instance, StringBuilder[] outputFileBuilders) throws Exception{ if(outputFileBuilders!=null && outputFileBuilders.length!=(modules.length+1)){ throw new Exception("Error: to write test files, there must be m+1 output StringBuilders (where m is the number of modules)"); } double[] hiveDists = new double[instance.numClasses()]; double[] moduleDists; double moduleWeight; double bsfClassVal,bsfClassWeight; StringBuilder moduleString; double cvAccSum = 0; for(int m = 0; m < modules.length; m++){ moduleDists = modules[m].classifier.distributionForInstance(instance); moduleString = new StringBuilder(); moduleWeight = modules[m].ensembleCvAcc; bsfClassVal = -1; bsfClassWeight = -1; for(int c = 0; c < hiveDists.length; c++){ hiveDists[c] += moduleDists[c]*moduleWeight; if(outputFileBuilders!=null){ if(moduleDists[c] > bsfClassWeight){ bsfClassWeight = moduleDists[c]; bsfClassVal = c; } moduleString.append(",").append(moduleDists[c]); } } if(outputFileBuilders!=null){ outputFileBuilders[m].append(instance.classValue()).append(",").append(bsfClassVal).append(",").append(moduleString.toString()+"\n"); } cvAccSum+=modules[m].ensembleCvAcc; } for(int h = 0; h < hiveDists.length; h++){ hiveDists[h]/=cvAccSum; } if(outputFileBuilders!=null){ bsfClassVal = -1; bsfClassWeight = -1; moduleString = new StringBuilder(); for(int c = 0; c < hiveDists.length; c++){ if(hiveDists[c] > bsfClassWeight){ bsfClassWeight = hiveDists[c]; bsfClassVal = c; } moduleString.append(",").append(hiveDists[c]); } outputFileBuilders[outputFileBuilders.length-1].append(instance.classValue()).append(",").append(bsfClassVal).append(",").append(moduleString.toString()+"\n"); } return hiveDists; } public double[] classifyInstanceByEnsemble(Instance instance) throws Exception{ double[] output = new double[modules.length]; for(int m = 0; m < modules.length; m++){ output[m] = modules[m].classifier.classifyInstance(instance); } return output; } public void printModuleCvAccs() throws Exception{ if(this.modules==null){ throw new Exception("Error: modules don't exist. Train classifier first."); } System.out.println("CV accs by module:"); System.out.println("------------------"); StringBuilder line1 = new StringBuilder(); StringBuilder line2 = new StringBuilder(); for (ConstituentHiveEnsemble module : modules) { line1.append(module.classifierName).append(","); line2.append(module.ensembleCvAcc).append(","); } System.out.println(line1); System.out.println(line2); System.out.println(); } public void makeShouty(){ this.verbose = true; } public void setShouty(boolean b){ this.verbose = b; } private void optionalOutputLine(String message){ if(this.verbose){ System.out.println(message); } } public void setMaxCvFolds(int maxFolds){ this.maxCvFolds = maxFolds; } public void writeTestPredictionsToFile(Instances test, String outputDir, String datasetName) throws Exception{ writeTestPredictionsToFile(test, outputDir, datasetName, "0"); } public void writeTestPredictionsToFile(Instances test, String outputDir, String datasetName, String datasetResampleIdentifier) throws Exception{ this.fileOutputDir = outputDir; this.fileOutputDataset = datasetName; this.fileOutputResampleId = datasetResampleIdentifier; StringBuilder[] outputs = new StringBuilder[this.modules.length+1]; for(int m = 0; m < outputs.length; m++){ outputs[m] = new StringBuilder(); } for(int i = 0; i < test.numInstances(); i++){ this.distributionForInstance(test.instance(i), outputs); } FileWriter out; File dir; Scanner scan; int correct; String lineParts[]; for(int m = 0; m < modules.length; m++){ dir = new File(this.fileOutputDir+modules[m].classifierName+"/Predictions/"+this.fileOutputDataset+"/"); if(dir.exists()==false){ dir.mkdirs(); } correct = 0; scan = new Scanner(outputs[m].toString()); scan.useDelimiter("\n"); while(scan.hasNext()){ lineParts = scan.next().split(","); if(lineParts[0].trim().equalsIgnoreCase(lineParts[1].trim())){ correct++; } } scan.close(); out = new FileWriter(this.fileOutputDir+modules[m].classifierName+"/Predictions/"+this.fileOutputDataset+"/testFold"+this.fileOutputResampleId+".csv"); out.append(this.fileOutputDataset+","+this.modules[m].classifierName+",test\n"); out.append("builtInHive\n"); out.append(((double)correct/test.numInstances())+"\n"); out.append(outputs[m]); out.close(); } correct = 0; scan = new Scanner(outputs[outputs.length-1].toString()); scan.useDelimiter("\n"); while(scan.hasNext()){ lineParts = scan.next().split(","); if(lineParts[0].trim().equalsIgnoreCase(lineParts[1].trim())){ correct++; } } scan.close(); dir = new File(this.fileOutputDir+"HIVE-COTE/Predictions/"+this.fileOutputDataset+"/"); if(!dir.exists()){ dir.mkdirs(); } out = new FileWriter(this.fileOutputDir+"HIVE-COTE/Predictions/"+this.fileOutputDataset+"/testFold"+this.fileOutputResampleId+".csv"); out.append(this.fileOutputDataset+",HIVE-COTE,test\nconstituentCvAccs,"); for(int m = 0; m < modules.length; m++){ out.append(modules[m].classifierName+","+modules[m].ensembleCvAcc+","); } out.append("\n"+((double)correct/test.numInstances())+"\n"); out.append("\n"+outputs[outputs.length-1]); out.close(); } public double crossValidate(Classifier classifier, Instances train, int maxFolds) throws Exception{ return crossValidateWithFileWriting(classifier, train, maxFolds, null); } public double crossValidateWithFileWriting(Classifier classifier, Instances train, int maxFolds, String classifierName) throws Exception{ int numFolds = maxFolds; if(numFolds <= 1 || numFolds > train.numInstances()){ numFolds = train.numInstances(); } ArrayList<Instances> folds = new ArrayList<>(); ArrayList<ArrayList<Integer>> foldIndexing = new ArrayList<>(); for(int i = 0; i < numFolds; i++){ folds.add(new Instances(train,0)); foldIndexing.add(new ArrayList<>()); } ArrayList<Integer> instanceIds = new ArrayList<>(); for(int i = 0; i < train.numInstances(); i++){ instanceIds.add(i); } Collections.shuffle(instanceIds, rand); ArrayList<Instances> byClass = new ArrayList<>(); ArrayList<ArrayList<Integer>> byClassIndices = new ArrayList<>(); for(int i = 0; i < train.numClasses(); i++){ byClass.add(new Instances(train,0)); byClassIndices.add(new ArrayList<>()); } int thisInstanceId; double thisClassVal; for(int i = 0; i < train.numInstances(); i++){ thisInstanceId = instanceIds.get(i); thisClassVal = train.instance(thisInstanceId).classValue(); byClass.get((int)thisClassVal).add(train.instance(thisInstanceId)); byClassIndices.get((int)thisClassVal).add(thisInstanceId); } // now stratify Instances strat = new Instances(train,0); ArrayList<Integer> stratIndices = new ArrayList<>(); int stratCount = 0; int[] classCounters = new int[train.numClasses()]; while(stratCount < train.numInstances()){ for(int c = 0; c < train.numClasses(); c++){ if(classCounters[c] < byClass.get(c).size()){ strat.add(byClass.get(c).instance(classCounters[c])); stratIndices.add(byClassIndices.get(c).get(classCounters[c])); classCounters[c]++; stratCount++; } } } train = strat; instanceIds = stratIndices; double foldSize = (double)train.numInstances()/numFolds; double thisSum = 0; double lastSum = 0; int floor; int foldSum = 0; int currentStart = 0; for(int f = 0; f < numFolds; f++){ thisSum = lastSum+foldSize+0.000000000001; // to try and avoid double imprecision errors (shouldn't ever be big enough to effect folds when double imprecision isn't an issue) floor = (int)thisSum; if(f==numFolds-1){ floor = train.numInstances(); // to make sure all instances are allocated in case of double imprecision causing one to go missing } for(int i = currentStart; i < floor; i++){ folds.get(f).add(train.instance(i)); foldIndexing.get(f).add(instanceIds.get(i)); } foldSum+=(floor-currentStart); currentStart = floor; lastSum = thisSum; } if(foldSum!=train.numInstances()){ throw new Exception("Error! Some instances got lost file creating folds (maybe a double precision bug). Training instances contains "+train.numInstances()+", but the sum of the training folds is "+foldSum); } Instances trainLoocv; Instances testLoocv; double pred, actual; double[] predictions = new double[train.numInstances()]; int correct = 0; Instances temp; // had to add in redundant instance storage so we don't keep killing the base set of Instances by mistake for(int testFold = 0; testFold < numFolds; testFold++){ trainLoocv = null; testLoocv = new Instances(folds.get(testFold)); for(int f = 0; f < numFolds; f++){ if(f==testFold){ continue; } temp = new Instances(folds.get(f)); if(trainLoocv==null){ trainLoocv = temp; }else{ trainLoocv.addAll(temp); } } classifier.buildClassifier(trainLoocv); for(int i = 0; i < testLoocv.numInstances(); i++){ pred = classifier.classifyInstance(testLoocv.instance(i)); actual = testLoocv.instance(i).classValue(); predictions[foldIndexing.get(testFold).get(i)] = pred; if(pred==actual){ correct++; } } } double cvAcc = (double)correct/train.numInstances(); if(this.fileWriting){ String outputFilePathAndName = fileOutputDir+classifierName+"/Predictions/"+this.fileOutputDataset+"/trainFold"+this.fileOutputResampleId+".csv"; genericCvResultsFileWriter(outputFilePathAndName, train, predictions, this.fileOutputDataset, classifierName, "genericInternalCv,numFolds,"+numFolds, cvAcc); } return cvAcc; // return predictions; } @Override public String getParameters() { String str=super.getParameters(); str+=",NumModules,"+classifiers.size(); for(String s:names) str+=","+s; str+=",trainAccEstimate"; for(ConstituentHiveEnsemble m:modules) str+=","+m.ensembleCvAcc; return str; } @Override public void setTrainTimeLimit(long amount) { //Split the time up equally if contracted, if not we have no control trainTimeContract =true; trainContractTimeNanos =amount; long used=0; for(Classifier c:classifiers){ if(c instanceof TrainTimeContractable) ((TrainTimeContractable) c).setTrainTimeLimit(amount/classifiers.size()); used+=amount/classifiers.size(); } long remaining = amount-used; //Give any extra to first contracted, //for no real reason othe than simplicity and to avoid hidden randomization. if(remaining>0){ for(Classifier c:classifiers){ if(c instanceof TrainTimeContractable){ ((TrainTimeContractable) c).setTrainTimeLimit(amount/classifiers.size()+remaining); break; } } } } @Override public boolean withinTrainContract(long start) { return start<trainContractTimeNanos; } /** * Parses a given list of options to set the parameters of the classifier. * We use this for the tuning mechanism, setting parameters through setOptions <!-- options-start --> * Valid options are: <p/> * <pre> -T * Number of trees.</pre> * * <pre> -I * Number of intervals to fit.</pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ private class ConstituentHiveEnsemble{ public final Classifier classifier; public final double ensembleCvAcc; public final String classifierName; public ConstituentHiveEnsemble(String classifierName, Classifier classifier, double ensembleCvAcc){ this.classifierName = classifierName; this.classifier = classifier; this.ensembleCvAcc = ensembleCvAcc; } } public static class DefaultShapeletFilterPlaceholder extends ShapeletFilter {} public static void main(String[] args) throws Exception{ // System.out.println(ClassifierTools.testUtils_getIPDAcc(new HiveCote())); String dataDir = "C:/users/ajb/dropbox/Code2019/tsml/src/main/java/experiments/data/tsc/"; String datasetName = "Chinatown"; Instances train = DatasetLoading.loadDataNullable(dataDir+datasetName+"/"+datasetName+"_TRAIN"); Instances test = DatasetLoading.loadDataNullable(dataDir+datasetName+"/"+datasetName+"_TEST"); HiveCote hive = new HiveCote(); System.out.println("Example usage of HiveCote: this is the code used in the paper"); System.out.println(hive.getTechnicalInformation().toString()); System.out.println("Evaluated on "+datasetName); hive.makeShouty(); hive.buildClassifier(train); System.out.println("Classifier built: Parameter info ="+hive.getParameters()); double a=ClassifierTools.accuracy(test, hive); System.out.println("Test acc for "+datasetName+" = "+a); System.out.println("This is exiting now. After here in main is legacy code. Ask Jason Lines!"); System.exit(0); hive.writeTestPredictionsToFile(test, "prototypeSheets/", datasetName, "0"); int correct = 0; double[] predByEnsemble; int[] correctByEnsemble = new int[hive.modules.length]; for(int i = 0; i < test.numInstances(); i++){ if(hive.classifyInstance(test.instance(i))==test.instance(i).classValue()){ correct++; } predByEnsemble = hive.classifyInstanceByEnsemble(test.instance(i)); // not efficient, just informative. can add this in to the classifyInstance in a hacky way later if need be for(int m = 0; m < predByEnsemble.length; m++){ if(predByEnsemble[m]==test.instance(i).classValue()){ correctByEnsemble[m]++; } } } System.out.println("Overall Acc: "+(double)correct/test.numInstances()); System.out.println("Acc by Module:"); StringBuilder line1 = new StringBuilder(); StringBuilder line2 = new StringBuilder(); for(int m = 0; m < hive.modules.length; m++){ line1.append(hive.modules[m].classifierName).append(","); line2.append((double)correctByEnsemble[m]/test.numInstances()).append(","); } System.out.println(line1); System.out.println(line2); } }
29,793
38.831551
256
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/COTE/cote/AbstractPostProcessedCote.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.COTE.cote; import java.io.File; import java.io.FileWriter; import java.util.ArrayList; import java.util.Scanner; /** * * @author Jason Lines (j.lines@uea.ac.uk) */ public abstract class AbstractPostProcessedCote { protected String resultsDir; protected String datasetName; protected int resampleId = 0;; protected ArrayList<String> classifierNames; protected double[] cvAccs; // [classifier] protected double[] testAccs; // [classifier] protected double[][] testPreds; // [classifier][instance] protected double[][][] testDists; // [classifier][instance][classVal] private double[] testActualClassVals; public static String CLASSIFIER_NAME = "AbstractCOTE"; public void loadResults() throws Exception{ cvAccs = new double[classifierNames.size()]; testPreds = new double[classifierNames.size()][]; testDists = new double[classifierNames.size()][][]; int testSetSize = -1; // we can work this out when we process the first of the test results int numClassVals = -1; File trainResult, testResult; Scanner scan; String[] lineParts; int counter; for(int c = 0; c < this.classifierNames.size(); c++){ trainResult = new File(resultsDir+classifierNames.get(c)+"/Predictions/"+datasetName+"/trainFold"+resampleId+".csv"); if(!trainResult.exists() || trainResult.length() == 0){ throw new Exception("Error: training results do not exist ("+trainResult.getAbsolutePath()+")"); } scan = new Scanner(trainResult); scan.useDelimiter("\n"); scan.next(); scan.next(); cvAccs[c] = Double.parseDouble(scan.next().trim()); scan.close(); // we don't need the cv predictions for anything in the current COTE configs, can address this later if we need to store them though testResult = new File(resultsDir+classifierNames.get(c)+"/Predictions/"+datasetName+"/testFold"+resampleId+".csv"); if(testSetSize<=0){ scan = new Scanner(testResult); scan.useDelimiter("\n"); scan.next(); // header scan.next(); // param info scan.next(); // test acc testSetSize = 0; while(scan.hasNext()){ if(numClassVals<=0){ lineParts = scan.next().split(","); numClassVals = lineParts.length-3; // subtract actual, pred, and empty cell for padding }else{ scan.next(); } testSetSize++; } scan.close(); } if(c==0){ testPreds = new double[classifierNames.size()][testSetSize]; testDists = new double[classifierNames.size()][testSetSize][numClassVals]; testActualClassVals = new double[testSetSize]; testAccs = new double[classifierNames.size()]; } scan = new Scanner(testResult); scan.useDelimiter("\n"); scan.next(); scan.next(); testAccs[c] = Double.parseDouble(scan.next().trim()); counter = 0; while(scan.hasNext()){ lineParts = scan.next().split(","); if(lineParts.length==1){ //Tony's RISE files have rogue lines at the end (sometimes!) continue; } testPreds[c][counter] = Double.parseDouble(lineParts[1].trim()); for(int d = 0; d < numClassVals; d++){ testDists[c][counter][d] = Double.parseDouble(lineParts[d+3].trim()); } if(c==0){ testActualClassVals[counter] = Double.parseDouble(lineParts[0].trim()); }else{ if(testActualClassVals[counter]!=Double.parseDouble(lineParts[0].trim())){ throw new Exception("Error: class value mismatch. Test file for "+classifierNames.get(c)+ " states that instance "+counter+" has the class value of "+lineParts[0]+", but in "+classifierNames.get(0)+" it was "+testActualClassVals[counter]+"."); } } counter++; } scan.close(); } } protected double classifyInstanceFromDistribution(double[] dist){ double bsfClassVal = -1; double bsfClassWeight = -1; for(int d = 0; d < dist.length; d++){ if(dist[d] > bsfClassWeight){ bsfClassWeight = dist[d]; bsfClassVal = d; } } return bsfClassVal; } public abstract double[] distributionForInstance(int testInstanceId) throws Exception; public void writeTestSheet() throws Exception{ writeTestSheet(this.resultsDir); } public void writeTestSheet(String outputDir) throws Exception{ File outputPath = new File(outputDir+CLASSIFIER_NAME+"/Predictions/"+datasetName+"/"); if(!outputPath.exists()){ outputPath.mkdirs(); if(!outputPath.exists()){ throw new Exception("Error: invalid results path ("+outputPath+")."); } } if(cvAccs==null){ loadResults(); } StringBuilder st = new StringBuilder(); int correct = 0; double act, pred; double[] dist; for(int i = 0; i < testPreds[0].length; i++){ dist = this.distributionForInstance(i); act = this.testActualClassVals[i]; pred = this.classifyInstanceFromDistribution(dist); if(act==pred){ correct++; } st.append(act+","+pred+","); for(int d = 0; d < dist.length; d++){ st.append(","+dist[d]); } st.append("\n"); } FileWriter out = new FileWriter(outputDir+CLASSIFIER_NAME+"/Predictions/"+datasetName+"/testFold"+resampleId+".csv"); out.append(CLASSIFIER_NAME+","+datasetName+",test\n"); out.append("constituentCvAccs"); for(int c = 0; c < classifierNames.size(); c++){ out.append(","+classifierNames.get(c)); out.append(","+cvAccs[c]); } out.append("\n"+((double)correct/testPreds[0].length+"\n")); out.append(st); out.close(); } public double[] getHiveTestPredictions() throws Exception{ return null; } }
7,620
37.882653
267
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/COTE/cote/HiveCotePostProcessed.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.COTE.cote; import experiments.data.DatasetLists; import java.util.ArrayList; /** * * @author Jason Lines (j.lines@uea.ac.uk) */ public class HiveCotePostProcessed extends AbstractPostProcessedCote{ private double alpha = 1; private boolean useVoting = false; { HiveCotePostProcessed.CLASSIFIER_NAME = "HIVE-COTE"; } public HiveCotePostProcessed(String resultsDir, String datasetName, int resampleId, ArrayList<String> classifierNames) { this.resultsDir = resultsDir; this.datasetName = datasetName; this.resampleId = resampleId; this.classifierNames = classifierNames; } public HiveCotePostProcessed(String resultsDir, String datasetName, ArrayList<String> classifierNames) { this.resultsDir = resultsDir; this.datasetName = datasetName; this.resampleId = 0; this.classifierNames = classifierNames; } public HiveCotePostProcessed(String resultsDir, String datasetName, int resampleId) { this.resultsDir = resultsDir; this.datasetName = datasetName; this.resampleId = resampleId; this.classifierNames = getDefaultClassifierNames(); } public HiveCotePostProcessed(String resultsDir, String datasetName) { this.resultsDir = resultsDir; this.datasetName = datasetName; this.resampleId = 0; this.classifierNames = getDefaultClassifierNames(); } public void setAlpha(double alpha){ this.alpha = alpha; } private void useVotes(){ this.useVoting = true; } private void useProbs(){ this.useVoting = false; } private ArrayList<String> getDefaultClassifierNames(){ ArrayList<String> names = new ArrayList<>(); names.add("EE"); names.add("ST"); names.add("RISE"); names.add("BOSS"); names.add("TSF"); return names; } @Override public double[] distributionForInstance(int testInstanceId) throws Exception{ if(useVoting){ return this.distributionForInstanceWithVoting(testInstanceId); }else{ return this.distributionForInstanceWithProbs(testInstanceId); } } public double[] distributionForInstanceWithProbs(int testInstanceId) throws Exception{ if(this.testDists==null){ throw new Exception("Error: classifier not initialised correctly. Load results before classifiying."); } int numClasses = this.testDists[0][0].length; double[] outDist = new double[numClasses]; double cvAccSum = 0; for(int classifier = 0; classifier < testDists.length; classifier++){ for(int classVal = 0; classVal < numClasses; classVal++){ outDist[classVal]+= testDists[classifier][testInstanceId][classVal]*(Math.pow(this.cvAccs[classifier],alpha)); } cvAccSum+=(Math.pow(this.cvAccs[classifier],alpha)); } for(int classVal = 0; classVal < numClasses; classVal++){ outDist[classVal]/= cvAccSum; } return outDist; } public double[] distributionForInstanceWithVoting(int testInstanceId) throws Exception{ if(this.testDists==null){ throw new Exception("Error: classifier not initialised correctly. Load results before classifiying."); } int numClasses = this.testDists[0][0].length; double[] outDist = new double[numClasses]; double cvAccSum = 0; int maxId; double bsfWeight; for(int classifier = 0; classifier < testDists.length; classifier++){ // find max class value maxId = -1; bsfWeight = -1; for(int classVal = 0; classVal < numClasses; classVal++){ if(testDists[classifier][testInstanceId][classVal] > bsfWeight){ maxId = classVal; bsfWeight = testDists[classifier][testInstanceId][classVal]; } } outDist[maxId]+=(Math.pow(this.cvAccs[classifier],alpha)); cvAccSum+=(Math.pow(this.cvAccs[classifier],alpha)); } for(int classVal = 0; classVal < numClasses; classVal++){ outDist[classVal]/= cvAccSum; } return outDist; } public static void main(String[] args) throws Exception{ // String datasetName = "ItalyPowerDemand"; // Instances train = loadData("C:/users/sjx07ngu/dropbox/tsc problems/"+datasetName+"/"+datasetName+"_TRAIN"); // Instances test = loadData("C:/users/sjx07ngu/dropbox/tsc problems/"+datasetName+"/"+datasetName+"_TEST"); /* Step 1: build Hive and write to file` */ // // HiveCote hc = new HiveCote(); // hc.makeShouty(); // hc.turnOnFileWriting("hiveWritingProto/", datasetName); // hc.buildClassifier(train); // hc.writeTestPredictionsToFile(test, "hiveWritingProto/", datasetName); /* Step 2: read from file and (hhopefully) recreate the same results */ // HiveCotePostProcessed hcpp = new HiveCotePostProcessed("hiveWritingProto/", datasetName); // hcpp.writeTestSheet("hiveWritingProtoRewrite/"); // with alpha = 1 and =4 HiveCotePostProcessed hcpp; double[] alphas = {1.0,4.0}; // double[] alphas = {1.0}; ArrayList<String> classifiersToUse = new ArrayList<>(); classifiersToUse.add("EE_proto"); classifiersToUse.add("ST_HiveProto"); classifiersToUse.add("RISE"); classifiersToUse.add("BOSS"); classifiersToUse.add("TSF"); System.out.println("votes"); for(double alpha:alphas){ for(String datasetName: DatasetLists.tscProblems85){ System.out.println(datasetName+" "+alpha); for(int resample = 0; resample < 100; resample++){ try{ // hcpp = new HiveCotePostProcessed("//cmptscsvr.cmp.uea.ac.uk/ueatsc/Results/JayMovingInProgress/", datasetName, resample, classifiersToUse); // hcpp = new HiveCotePostProcessed("C:/3xsshare/Jay/LocalWork/coteConstituentResultsgress/", datasetName, resample, classifiersToUse); hcpp = new HiveCotePostProcessed("C:/3xsshare/Jay/LocalWork/coteConstituentResults/", datasetName, resample, classifiersToUse); hcpp.setAlpha(alpha); hcpp.useVotes(); // hcpp.writeTestSheet("hiveWritingProtoRewrite_alpha"+alpha+"/"); hcpp.writeTestSheet("hiveWritingProtoRewrite_alpha"+alpha+"_votes/"); // hcpp.writeTestSheet("hiveWritingProtoRewrite_alpha"+alpha+"_probs/"); }catch(Exception e){ System.err.println(datasetName+"_"+resample+"_"+alpha); // e.printStackTrace(); } } } } } }
7,982
37.941463
165
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/DTW1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_DTW; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbKeogh; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.DTW; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import java.util.ArrayList; import java.util.Collections; /** * @author Jason Lines (j.lines@uea.ac.uk) */ public class DTW1NN extends Efficient1NN { private double r = 1; private int window; /** * Constructor with specified window size (between 0 and 1). When a window * size is specified, cross-validation methods will become inactive for this * object. * <p> * Note: if window = 1, classifierIdentifier will be DTW_R1_1NN; other * window sizes will results in cId of DTW_Rn_1NN instead. This information * is used for any file writing * * @param r */ public DTW1NN(double r) { this.allowLoocv = false; this.r = r; if (r != 1) { this.classifierIdentifier = "DTW_Rn_1NN"; } else { this.classifierIdentifier = "DTW_R1_1NN"; } } /** * A default constructor. Sets the window to 1 (100%), but allows for the * option of cross-validation if the relevant method is called. * <p> * classifierIdentifier is initially set to DTW_R1_1NN, but will * update automatically to DTW_Rn_1NN if loocv is called */ public DTW1NN() { this.r = 1; this.classifierIdentifier = "DTW_R1_1NN"; } public void setWindow(double w) { r = w; } public void turnOffCV() { this.allowLoocv = false; } public void turnOnCV() { this.allowLoocv = true; } @Override public double[] loocv(Instances train) throws Exception { if (this.allowLoocv == true && this.classifierIdentifier.contains("R1")) { this.classifierIdentifier = this.classifierIdentifier.replace("R1", "Rn"); } return super.loocv(train); } @Override public double[] loocv(Instances[] train) throws Exception { if (this.allowLoocv == true && this.classifierIdentifier.contains("R1")) { this.classifierIdentifier = this.classifierIdentifier.replace("R1", "Rn"); } return super.loocv(train); } final public int getWindowSize(int n) { window = (int) (r * n); //Rounded down. //No Warp, windowSize=1 if (window < 1) window = 1; //Full Warp : windowSize=n, otherwise scale between else if (window < n) window++; return window; } // public double classifyInstance(Instance instance) throws Exception{ // if(isDerivative){ // Instances temp = new Instances(instance.dataset(),1); // temp.add(instance); // temp = new Derivative().process(temp); // return classifyInstance(temp.instance(0)); // } // return super.classifyInstance(instance); // } public final double distance(Instance first, Instance second, double cutoff) { // base case - we're assuming class val is last. If this is true, this method is fine, // if not, we'll default to the DTW class if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) { DTW temp = new DTW(); temp.setR(r); return temp.distance(first, second, cutoff); } double minDist; boolean tooBig; int n = first.numAttributes() - 1; int m = second.numAttributes() - 1; /* Parameter 0<=r<=1. 0 == no warp, 1 == full warp generalised for variable window size * */ int windowSize = getWindowSize(n); //Extra memory than required, could limit to windowsize, // but avoids having to recreate during CV //for varying window sizes double[][] matrixD = new double[n][m]; /* //Set boundary elements to max. */ int start, end; for (int i = 0; i < n; i++) { start = windowSize < i ? i - windowSize : 0; end = i + windowSize + 1 < m ? i + windowSize + 1 : m; for (int j = start; j < end; j++) { matrixD[i][j] = Double.MAX_VALUE; } } matrixD[0][0] = (first.value(0) - second.value(0)) * (first.value(0) - second.value(0)); //a is the longer series. //Base cases for warping 0 to all with max interval r //Warp first.value(0] onto all second.value(1]...second.value(r+1] for (int j = 1; j < windowSize && j < m; j++) { matrixD[0][j] = matrixD[0][j - 1] + (first.value(0) - second.value(j)) * (first.value(0) - second.value(j)); } // Warp second.value(0] onto all first.value(1]...first.value(r+1] for (int i = 1; i < windowSize && i < n; i++) { matrixD[i][0] = matrixD[i - 1][0] + (first.value(i) - second.value(0)) * (first.value(i) - second.value(0)); } //Warp the rest, for (int i = 1; i < n; i++) { tooBig = true; start = windowSize < i ? i - windowSize + 1 : 1; end = i + windowSize < m ? i + windowSize : m; for (int j = start; j < end; j++) { minDist = matrixD[i][j - 1]; if (matrixD[i - 1][j] < minDist) { minDist = matrixD[i - 1][j]; } if (matrixD[i - 1][j - 1] < minDist) { minDist = matrixD[i - 1][j - 1]; } matrixD[i][j] = minDist + (first.value(i) - second.value(j)) * (first.value(i) - second.value(j)); if (tooBig && matrixD[i][j] < cutoff) { tooBig = false; } } //Early abandon if (tooBig) { return Double.MAX_VALUE; } } //Find the minimum distance at the end points, within the warping window. return matrixD[n - 1][m - 1]; } @Override public double[] distributionForInstance(Instance instance) throws Exception { double[] res = new double[instance.numClasses()]; int r = (int) classifyInstance(instance); res[r] = 1; return res; } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; // String datasetName = "SonyAiboRobotSurface1"; double r = 0.1; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation DTW oldDtw = new DTW(); oldDtw.setR(r); knn.setDistanceFunction(oldDtw); knn.buildClassifier(train); // new version DTW1NN dtwNew = new DTW1NN(r); dtwNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = dtwNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { for (int i = 0; i < 10; i++) { runComparison(); } } @Override public void setParamsFromParamId(Instances train, int paramId) { if (this.allowLoocv) { if (this.classifierIdentifier.contains("R1")) { this.classifierIdentifier = this.classifierIdentifier.replace("R1", "Rn"); } this.r = (double) paramId / 100; } else { throw new RuntimeException("Warning: trying to set parameters of a fixed window DTW"); } } @Override public String getParamInformationString() { return this.r + ""; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex) { return LbKeogh.distance(candidate, trainCache.getUE(queryIndex, window), trainCache.getLE(queryIndex, window)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue) { return LbKeogh.distance(candidate, trainCache.getUE(queryIndex, window), trainCache.getLE(queryIndex, window), cutOffValue); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, SequenceStatsCache cache) { return LbKeogh.distance(candidate, cache.getUE(queryIndex, window), cache.getLE(queryIndex, window)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue, SequenceStatsCache cache) { return LbKeogh.distance(candidate, cache.getUE(queryIndex, window), cache.getLE(queryIndex, window), cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is too small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final LazyAssessNN_DTW[] lazyAssessNNS = new LazyAssessNN_DTW[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_DTW(cache); } final ArrayList<LazyAssessNN_DTW> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { final Instance sCurrent = train.get(current); challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_DTW d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final int win = getWindowSize(maxWindow); final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have the NN for sure, but we still have to check if current is the new NN for previous for (int previous = 0; previous < current; ++previous) { final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN final double toBeat = prevNN.distance; final LazyAssessNN_DTW challenger = lazyAssessNNS[previous]; final LazyAssessNN_DTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, win); // --- Check the result if (rrt == LazyAssessNN_DTW.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(win); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have the NN yet. // Sort the challengers so we have the better chance to organize the good pruning. Collections.sort(challengers); for (LazyAssessNN_DTW challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_DTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, win); // --- Check the result if (rrt == LazyAssessNN_DTW.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(win); currPNN.set(previous, r, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, win); // --- Check the result if (rrt == LazyAssessNN_DTW.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(win); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid final int r = currPNN.r; final double d = currPNN.distance; final int index = currPNN.index; final int winEnd = getParamIdFromWindow(r, train.numAttributes() - 1); for (int tmp = paramId; tmp >= winEnd; --tmp) { candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); classCounts[tmp][current] = classCounts[paramId][current].clone(); } } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-APPROX-NNS-TABLE] Set is too small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } final LazyAssessNN_DTW[] lazyAssessNNS = new LazyAssessNN_DTW[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_DTW(cache); } final ArrayList<LazyAssessNN_DTW> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { final Instance sCurrent = train.get(current); challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous == current) continue; final LazyAssessNN_DTW d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final int win = getWindowSize(maxWindow); final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); boolean newNN = false; for (LazyAssessNN_DTW challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_DTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, win); // --- Check the result if (rrt == LazyAssessNN_DTW.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(win); currPNN.set(previous, r, d, CandidateNN.Status.BC); newNN = true; } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, win); // --- Check the result if (rrt == LazyAssessNN_DTW.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(win); prevNN.set(current, r, d, CandidateNN.Status.NN); } } } if (newNN) { int r = currPNN.r; double d = currPNN.distance; int index = currPNN.index; int winEnd = getParamIdFromWindow(r, train.numAttributes() - 1); for (int tmp = paramId; tmp >= winEnd; --tmp) { candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); } } } } } protected int getParamIdFromWindow(final int w, final int n) { double r = 1.0 * w / n; return (int) Math.ceil(r * 100); } }
22,926
41.378928
148
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/DTWKNN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import java.util.Comparator; import java.util.PriorityQueue; import static utilities.GenericTools.indexOfMax; import utilities.generic_storage.Pair; import weka.core.Instance; /** * Extension of DTW1NN to allow different values of k, originally written * to include in HESCA (for timeseries data) so that DTW returns reasonable probability distributions, * instead of a zero-one vector * * ***DO NOT USE * * There's some mega edge case where the distributionForInstance returns a distribution with NaN values, * found only in ElectricDevices (so far) 99.9...% of test predictions were fine, enough for me personally * to show that 1NN > 5NN > 11NN anyways, so I'm not going to bother spending the time fixing it * * @author James Large (james.large@uea.ac.uk) */ public class DTWKNN extends DTW1NN { public int k; private static Comparator<Pair<Double, Integer>> comparator = new Comparator<Pair<Double, Integer>>() { @Override public int compare(Pair<Double, Integer> o1, Pair<Double, Integer> o2) { return o1.var1.compareTo(o2.var1) * (-1); //reverse ordering } }; public DTWKNN() { super(); k = 5; } public DTWKNN(int k) { super(); this.k = k; } public DTWKNN(double r) { super(r); k = 5; } public DTWKNN(double r, int k) { super(r); this.k = k; } @Override public double classifyInstance(Instance instance) throws Exception { return indexOfMax(distributionForInstance(instance)); } @Override public double[] distributionForInstance(Instance testInst) throws Exception { //the pair is <distance, classvalue> PriorityQueue<Pair<Double, Integer>> topK = new PriorityQueue<>(k, comparator); double thisDist = distance(testInst, train.instance(0), Double.MAX_VALUE); topK.add(new Pair<>(thisDist, (int)train.instance(0).classValue())); for(int i = 1; i < train.numInstances(); ++i){ Instance trainInst = train.instance(i); thisDist = distance(testInst, trainInst, topK.peek().var1); if (topK.size() < k) //not yet full topK.add(new Pair<>(thisDist, (int)trainInst.classValue())); else if(thisDist < topK.peek().var1){ topK.poll(); topK.add(new Pair<>(thisDist, (int)trainInst.classValue())); } } double distanceSum = .0; for (Pair<Double, Integer> pair : topK) distanceSum += pair.var1; double[] distribution = new double[train.numClasses()]; //todo must be some way to do it in a single loop just brain farting on it right now and it's not important // for (Pair<Double, Integer> pair : topK) { // double voteWeight = (distanceSum - pair.var1) / distanceSum; // distribution[pair.var2] += voteWeight; // } double distanceSum2 = .0; for (Pair<Double, Integer> pair : topK) { pair.var1 = 1 - (pair.var1 / distanceSum); distanceSum2 += pair.var1; } for (Pair<Double, Integer> pair : topK) distribution[pair.var2] += pair.var1 / distanceSum2; return distribution; } }
4,211
34.1
115
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/ED1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbKim; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; //import efficient_standalone_classifiers.Eff /** * adjusted April '16 * note: not using DTW class in here (redoing the method) as even though the DTW class is already about as efficient, it still * involves some array copying. Here we can opperate straight on the Instance values instead * * @author sjx07ngu */ public class ED1NN extends Efficient1NN { public ED1NN() { this.classifierIdentifier = "Euclidean_1NN"; this.allowLoocv = false; this.singleParamCv = true; } public final double distance(Instance first, Instance second, double cutoff) { // base case - we're assuming class val is last. If this is true, this method is fine, // if not, we'll default to the DTW class if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) { EuclideanDistance temp = new EuclideanDistance(); temp.setDontNormalize(true); return temp.distance(first, second, cutoff); } double sum = 0; for (int a = 0; a < first.numAttributes() - 1; a++) { sum += (first.value(a) - second.value(a)) * (first.value(a) - second.value(a)); } // return Math.sqrt(sum); return sum; } @Override public double[] distributionForInstance(Instance instance) throws Exception { double[] dist = new double[instance.numClasses()]; dist[(int) classifyInstance(instance)] = 1; return dist; } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; // String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; String datasetName = "SonyAiboRobotSurface1"; double r = 0.1; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation EuclideanDistance oldED = new EuclideanDistance(); oldED.setDontNormalize(true); knn.setDistanceFunction(oldED); knn.buildClassifier(train); // new version ED1NN edNew = new ED1NN(); edNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = edNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { for (int i = 0; i < 10; i++) { runComparison(); } } @Override public void setParamsFromParamId(Instances train, int paramId) { // could throw an Exception but it shouldn't make a difference since this measure has no params, // so just warn the user that they're probably not doing what they think they are! // System.err.println("warning: ED has not parameters to set; call to setParamFromParamId made no changes"); } @Override public String getParamInformationString() { return "NoParams"; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex) { return LbKim.distance(query, candidate); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue) { return LbKim.distance(query, candidate); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, SequenceStatsCache cache) { return LbKim.distance(query, candidate); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue, SequenceStatsCache cache) { return LbKim.distance(query, candidate); } @Override public void initNNSTable(Instances trainData, SequenceStatsCache cache) { // do nothing } @Override public void initApproxNNSTable(Instances trainData, SequenceStatsCache cache, int nSamples) { // do nothing } }
7,339
36.44898
148
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/ERP1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_ERP; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbErp; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.ERPDistance; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import java.util.ArrayList; import java.util.Collections; //import efficient_standalone_classifiers.Eff /** * adjusted April '16 * note: not using DTW class in here (redoing the method) as even though the DTW class is already about as efficient, it still * involves some array copying. Here we can opperate straight on the Instance values instead * * @author sjx07ngu */ public class ERP1NN extends Efficient1NN { private double g; private double bandSize; private double[] gValues; private double[] windowSizes; private boolean gAndWindowsRefreshed = false; public ERP1NN(double g, double bandSize) { this.g = g; this.bandSize = bandSize; this.gAndWindowsRefreshed = false; this.classifierIdentifier = "ERP_1NN"; this.allowLoocv = false; } public ERP1NN() { // note: default params probably won't suit the majority of problems. Should set through cv or prior knowledge this.g = 0.5; this.bandSize = 5; this.gAndWindowsRefreshed = false; this.classifierIdentifier = "ERP_1NN"; } @Override public void buildClassifier(Instances train) throws Exception { super.buildClassifier(train); this.gAndWindowsRefreshed = false; } public final double distance(Instance first, Instance second, double cutoff) { // base case - we're assuming class val is last. If this is true, this method is fine, // if not, we'll default to the DTW class if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) { return new ERPDistance(this.g, this.bandSize).distance(first, second, cutoff); } int m = first.numAttributes() - 1; int n = second.numAttributes() - 1; // Current and previous columns of the matrix double[] curr = new double[m]; double[] prev = new double[m]; // size of edit distance band // bandsize is the maximum allowed distance to the diagonal // int band = (int) Math.ceil(v2.getDimensionality() * bandSize); int band = (int) Math.ceil(m * bandSize); // g parameter for local usage double gValue = g; for (int i = 0; i < m; i++) { // Swap current and prev arrays. We'll just overwrite the new curr. { double[] temp = prev; prev = curr; curr = temp; } int l = i - (band + 1); if (l < 0) { l = 0; } int r = i + (band + 1); if (r > (m - 1)) { r = (m - 1); } for (int j = l; j <= r; j++) { if (Math.abs(i - j) <= band) { // compute squared distance of feature vectors double val1 = first.value(i); double val2 = gValue; double diff = (val1 - val2); final double d1 = Math.sqrt(diff * diff); val1 = gValue; val2 = second.value(j); diff = (val1 - val2); final double d2 = Math.sqrt(diff * diff); val1 = first.value(i); val2 = second.value(j); diff = (val1 - val2); final double d12 = Math.sqrt(diff * diff); final double dist1 = d1 * d1; final double dist2 = d2 * d2; final double dist12 = d12 * d12; final double cost; if ((i + j) != 0) { if ((i == 0) || ((j != 0) && (((prev[j - 1] + dist12) > (curr[j - 1] + dist2)) && ((curr[j - 1] + dist2) < (prev[j] + dist1))))) { // del cost = curr[j - 1] + dist2; } else if ((j == 0) || ((i != 0) && (((prev[j - 1] + dist12) > (prev[j] + dist1)) && ((prev[j] + dist1) < (curr[j - 1] + dist2))))) { // ins cost = prev[j] + dist1; } else { // match cost = prev[j - 1] + dist12; } } else { cost = 0; } curr[j] = cost; // steps[i][j] = step; } else { curr[j] = Double.POSITIVE_INFINITY; // outside band } } } return Math.sqrt(curr[m - 1]); } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; // String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; String datasetName = "SonyAiboRobotSurface1"; double r = 0.1; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation ERPDistance oldDtw = new ERPDistance(0.1, 0.1); // oldDtw.setR(r); knn.setDistanceFunction(oldDtw); knn.buildClassifier(train); // new version ERP1NN dtwNew = new ERP1NN(0.1, 0.1); dtwNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = dtwNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { for (int i = 0; i < 10; i++) { runComparison(); } } @Override public void setParamsFromParamId(Instances train, int paramId) { if (!this.gAndWindowsRefreshed) { double stdv = ERPDistance.stdv_p(train); windowSizes = ERPDistance.getInclusive10(0, 0.25); gValues = ERPDistance.getInclusive10(0.2 * stdv, stdv); this.gAndWindowsRefreshed = true; } this.g = gValues[paramId / 10]; this.bandSize = windowSizes[paramId % 10]; } @Override public String getParamInformationString() { return this.g + "," + this.bandSize; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex) { return LbErp.distance(candidate, trainCache.getUE(queryIndex, g, bandSize), trainCache.getLE(queryIndex, g, bandSize)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue) { return LbErp.distance(candidate, trainCache.getUE(queryIndex, g, bandSize), trainCache.getLE(queryIndex, g, bandSize), cutOffValue); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, SequenceStatsCache cache) { return LbErp.distance(candidate, cache.getUE(queryIndex, g, bandSize), cache.getLE(queryIndex, g, bandSize)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue, SequenceStatsCache cache) { return LbErp.distance(candidate, cache.getUE(queryIndex, g, bandSize), cache.getLE(queryIndex, g, bandSize), cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final LazyAssessNN_ERP[] lazyAssessNNS = new LazyAssessNN_ERP[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_ERP(cache); } final ArrayList<LazyAssessNN_ERP> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { final Instance sCurrent = train.get(current); challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_ERP d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have the NN for sure, but we still have to check if current is the new NN for previous for (int previous = 0; previous < current; ++previous) { // --- Get the data final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN final double toBeat = prevNN.distance; final LazyAssessNN_ERP challenger = lazyAssessNNS[previous]; final LazyAssessNN_ERP.RefineReturnType rrt = challenger.tryToBeat(toBeat, g, bandSize); // --- Check the result if (rrt == LazyAssessNN_ERP.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(ERPDistance.getBandSize(bandSize, train.numAttributes() - 1)); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have the NN yet. // Sort the challengers so we have the better chance to organize the good pruning. Collections.sort(challengers); for (LazyAssessNN_ERP challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_ERP.RefineReturnType rrt = challenger.tryToBeat(toBeat, g, bandSize); // --- Check the result if (rrt == LazyAssessNN_ERP.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(ERPDistance.getBandSize(bandSize, train.numAttributes() - 1)); currPNN.set(previous, r, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, g, bandSize); // --- Check the result if (rrt == LazyAssessNN_ERP.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(ERPDistance.getBandSize(bandSize, train.numAttributes() - 1)); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid final int r = currPNN.r; final double d = currPNN.distance; final int index = currPNN.index; final double prevG = g; int w = ERPDistance.getBandSize(bandSize, train.numAttributes() - 1); int tmp = paramId; while (tmp > 0 && paramId % 10 > 0 && prevG == g && w >= r) { candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); classCounts[tmp][current] = classCounts[paramId][current].clone(); tmp--; this.setParamsFromParamId(train, tmp); w = ERPDistance.getBandSize(bandSize, train.numAttributes() - 1); } } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } final LazyAssessNN_ERP[] lazyAssessNNS = new LazyAssessNN_ERP[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_ERP(cache); } final ArrayList<LazyAssessNN_ERP> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { final Instance sCurrent = train.get(current); challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous == current) continue; final LazyAssessNN_ERP d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); boolean newNN = false; for (LazyAssessNN_ERP challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_ERP.RefineReturnType rrt = challenger.tryToBeat(toBeat, g, bandSize); // --- Check the result if (rrt == LazyAssessNN_ERP.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(ERPDistance.getBandSize(bandSize, train.numAttributes() - 1)); currPNN.set(previous, r, d, CandidateNN.Status.BC); newNN = true; } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, g, bandSize); // --- Check the result if (rrt == LazyAssessNN_ERP.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(ERPDistance.getBandSize(bandSize, train.numAttributes() - 1)); prevNN.set(current, r, d, CandidateNN.Status.NN); } } } if (newNN) { int r = currPNN.r; double d = currPNN.distance; int index = currPNN.index; int w = ERPDistance.getBandSize(bandSize, train.numAttributes() - 1); int tmp = paramId; double prevG = g; while (tmp >= 0 && prevG == g && w >= r) { prevG = g; candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); tmp--; if (tmp >= 0) { this.setParamsFromParamId(train, tmp); w = ERPDistance.getBandSize(bandSize, train.numAttributes() - 1); } } } } } } }
22,370
43.037402
157
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/Efficient1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import java.io.File; import java.io.FileWriter; import java.text.DecimalFormat; import java.util.Scanner; import experiments.data.DatasetLoading; import tsml.classifiers.EnhancedAbstractClassifier; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import utilities.InstanceTools; import tsml.classifiers.SaveParameterInfo; import weka.core.DenseInstance; import weka.core.Instance; import weka.core.Instances; /** * An abstract class to allow for distance-specific optimisations of DTW for use * in the ElasticEnsemble. This class allows for univariate and multivariate * time series to be used; the multivariate version calculates distances as the * sum of individual distance calculations between common dimensions of two * instances (using the same parameter setting on all channels). * <p> * E.G. a DTW implementation with window = 0.5 (50%) for two instances with 10 * channels would calculate the DTW distance separately for each channel, and * sum the 10 distances together. * * @author Jason Lines (j.lines@uea.ac.uk) */ public abstract class Efficient1NN extends EnhancedAbstractClassifier implements SaveParameterInfo{ protected Instances train; protected Instances[] trainGroup; protected String classifierIdentifier; protected boolean allowLoocv = true; protected boolean singleParamCv = false; private boolean fileWriting = false; private boolean individualCvParamFileWriting = false; private String outputDir; private String datasetName; private int resampleId; public Efficient1NN() { super(CANNOT_ESTIMATE_OWN_PERFORMANCE); } /** * Abstract method to calculates the distance between two Instance objects * * @param first * @param second * @param cutOffValue a best-so-far value to allow early abandons * @return the distance between first and second. If early abandon occurs, it will return Double.MAX_VALUE. */ public abstract double distance(Instance first, Instance second, double cutOffValue); /** * Multi-dimensional equivalent of the univariate distance method. Iterates * through channels calculating distances independently using the same param * options, summing together at the end to return a single distance. * * @param first * @param second * @param cutOffValue * @return */ public double distance(Instance[] first, Instance[] second, double cutOffValue) { double sum = 0; double decliningCutoff = cutOffValue; double thisDist; for (int d = 0; d < first.length; d++) { thisDist = this.distance(first[d], second[d], decliningCutoff); sum += thisDist; if (sum > cutOffValue) { return Double.MAX_VALUE; } decliningCutoff -= thisDist; } return sum; } public abstract double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex); public abstract double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue); public abstract double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final SequenceStatsCache cache); public abstract double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue, final SequenceStatsCache cache); /** * Utility method for easy cross-validation experiments. Each inheriting * class has 100 param options to select from (some dependent on information * for the training data). Passing in the training data and a param * * @param train * @param paramId */ public abstract void setParamsFromParamId(Instances train, int paramId); public void buildClassifier(Instances train) throws Exception { this.train = train; this.trainGroup = null; } public void buildClassifier(Instances[] trainGroup) throws Exception { this.train = null; this.trainGroup = trainGroup; } @Override public double classifyInstance(Instance instance) throws Exception { double bsfDistance = Double.MAX_VALUE; // for tie splitting int[] classCounts = new int[this.train.numClasses()]; double thisDist; for (Instance i : this.train) { thisDist = distance(instance, i, bsfDistance); if (thisDist < bsfDistance) { bsfDistance = thisDist; classCounts = new int[train.numClasses()]; classCounts[(int) i.classValue()]++; } else if (thisDist == bsfDistance) { classCounts[(int) i.classValue()]++; } } double bsfClass = -1; double bsfCount = -1; for (int c = 0; c < classCounts.length; c++) { if (classCounts[c] > bsfCount) { bsfCount = classCounts[c]; bsfClass = c; } } return bsfClass; } public double classifyInstanceMultivariate(Instance[] instance) throws Exception { if (this.trainGroup == null) { throw new Exception("Error: this configuration is for multivariate data"); } double bsfDistance = Double.MAX_VALUE; // for tie splitting int[] classCounts = new int[this.trainGroup[0].numClasses()]; double thisDist; Instance[] trainInstancesByDimension; for (int i = 0; i < this.trainGroup[0].numInstances(); i++) { trainInstancesByDimension = new Instance[this.trainGroup.length]; for (int j = 0; j < trainInstancesByDimension.length; j++) { trainInstancesByDimension[j] = this.trainGroup[j].instance(i); } thisDist = distance(instance, trainInstancesByDimension, bsfDistance); if (thisDist < bsfDistance) { bsfDistance = thisDist; classCounts = new int[trainGroup[0].numClasses()]; classCounts[(int) trainGroup[0].instance(i).classValue()]++; } else if (thisDist == bsfDistance) { classCounts[(int) trainGroup[0].instance(i).classValue()]++; } } double bsfClass = -1; double bsfCount = -1; for (int c = 0; c < classCounts.length; c++) { if (classCounts[c] > bsfCount) { bsfCount = classCounts[c]; bsfClass = c; } } return bsfClass; } @Override public double[] distributionForInstance(Instance instance) throws Exception { double bsfDistance = Double.MAX_VALUE; // for tie splitting int[] classCounts = new int[this.train.numClasses()]; double thisDist; int sumOfBest = 0; for (Instance i : this.train) { thisDist = distance(instance, i, bsfDistance); if (thisDist < bsfDistance) { bsfDistance = thisDist; classCounts = new int[train.numClasses()]; classCounts[(int) i.classValue()]++; sumOfBest = 1; } else if (thisDist == bsfDistance) { classCounts[(int) i.classValue()]++; sumOfBest++; } } double[] classDistributions = new double[this.train.numClasses()]; for (int c = 0; c < classCounts.length; c++) { classDistributions[c] = (double) classCounts[c] / sumOfBest; } return classDistributions; } public void setClassifierIdentifier(String classifierIdentifier) { this.classifierIdentifier = classifierIdentifier; } public String getClassifierIdentifier() { return classifierIdentifier; } // could parallelise here // public void writeLOOCVOutput(String tscProblemDir, String datasetName, int resampleId, String outputResultsDir, boolean tidyUp) throws Exception{ // for(int paramId = 0; paramId < 100; paramId++){ // writeLOOCVOutput(tscProblemDir, datasetName, resampleId, outputResultsDir, paramId); // } // parseLOOCVResults(tscProblemDir, datasetName, resampleId, outputResultsDir, tidyUp); // } // // public double writeLOOCVOutput(String tscProblemDir, String datasetName, int resampleId, String outputResultsDir, int paramId) throws Exception{ // new File(outputResultsDir+classifierIdentifier+"/Predictions/"+datasetName+"/loocvForParamOptions/").mkdirs(); // // Instances train = ClassifierTools.loadDataThrowable(tscProblemDir+datasetName+"/"+datasetName+"_TRAIN"); // Instances test = ClassifierTools.loadDataThrowable(tscProblemDir+datasetName+"/"+datasetName+"_TEST"); // // if(resampleId!=0){ // Instances[] temp = InstanceTools.resampleTrainAndTestInstances(train, test, resampleId); // train = temp[0]; // test = temp[1]; // } // // this.setParamsFromParamId(paramId); // // Instances trainLoocv; // Instance testLoocv; // // int correct = 0; // double pred, actual; // for(int i = 0; i < train.numInstances(); i++){ // trainLoocv = new Instances(train); // testLoocv = trainLoocv.remove(i); // actual = testLoocv.classValue(); // this.buildClassifier(train); // pred = this.classifyInstance(testLoocv); // if(pred==actual){ // correct++; // } // } // // return (double)correct/train.numInstances(); // } public void setFileWritingOn(String outputDir, String datasetName, int resampleId) { this.fileWriting = true; this.outputDir = outputDir; this.datasetName = datasetName; this.resampleId = resampleId; } public void setIndividualCvFileWritingOn(String outputDir, String datasetName, int resampleId) { this.individualCvParamFileWriting = true; this.outputDir = outputDir; this.datasetName = datasetName; this.resampleId = resampleId; } public double[] loocv(Instances train) throws Exception { double[] accAndPreds = null; String parsedFileName = this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; // System.out.println(parsedFileName); if (fileWriting) { File existing = new File(parsedFileName); if (existing.exists()) { // throw new Exception("Parsed results already exist for this measure: "+ parsedFileName); Scanner scan = new Scanner(existing); scan.useDelimiter("\n"); scan.next(); // skip header line int paramId = Integer.parseInt(scan.next().trim().split(",")[0]); if (this.allowLoocv) { this.setParamsFromParamId(train, paramId); } this.buildClassifier(train); accAndPreds = new double[train.numInstances() + 1]; accAndPreds[0] = Double.parseDouble(scan.next().trim().split(",")[0]); int correct = 0; String[] temp; for (int i = 0; i < train.numInstances(); i++) { temp = scan.next().split(","); accAndPreds[i + 1] = Double.parseDouble(temp[1]); if (accAndPreds[i + 1] == Double.parseDouble(temp[0])) { correct++; } } // commented out for now as this breaks the new EE loocv thing we're doing for the competition. Basically, if we try and load for train-1 ins for test in loocv, the number of train instances doesn't match so the acc is slightly off. should be an edge case, but can leave this check out so long as we trust the code // if(((double)correct/train.numInstances())!=accAndPreds[0]){ // System.err.println(existing.getAbsolutePath()); // System.err.println(((double)correct/train.numInstances())+" "+accAndPreds[0]); // throw new Exception("Attempted file loading, but accuracy doesn't match itself?!"); // } return accAndPreds; } else { new File(this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/").mkdirs(); } } // write output // maybe a different version which looks for missing files and runs them? double bsfAcc = -1; int bsfParamId = -1; double[] bsfaccAndPreds = null; for (int paramId = 0; paramId < 100; paramId++) { // System.out.print(paramId+" "); accAndPreds = loocvAccAndPreds(train, paramId); // System.out.println(this.allowLoocv); // System.out.println(accAndPreds[0]); if (accAndPreds[0] > bsfAcc) { bsfAcc = accAndPreds[0]; bsfParamId = paramId; bsfaccAndPreds = accAndPreds; } if (!this.allowLoocv) { paramId = 100; } } // System.out.println(this.classifierIdentifier+", bsfParamId "+bsfParamId); this.buildClassifier(train); if (this.allowLoocv) { this.setParamsFromParamId(train, bsfParamId); } if (fileWriting) { FileWriter out = new FileWriter(parsedFileName); out.append(this.classifierIdentifier + "," + datasetName + ",parsedTrain\n"); out.append(bsfParamId + "\n"); out.append(bsfAcc + "\n"); for (int i = 1; i < bsfaccAndPreds.length; i++) { out.append(train.instance(i - 1).classValue() + "," + bsfaccAndPreds[i] + "\n"); } out.close(); } return bsfaccAndPreds; } DecimalFormat df = new DecimalFormat("##.###"); public double[] loocv(Instances[] trainGroup) throws Exception { double[] accAndPreds = null; String parsedFileName = this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; Instances concatenated = concatenate(trainGroup); if (fileWriting) { File existing = new File(parsedFileName); if (existing.exists()) { // throw new Exception("Parsed results already exist for this measure: "+ parsedFileName); Scanner scan = new Scanner(existing); scan.useDelimiter("\n"); scan.next(); // skip header line int paramId = Integer.parseInt(scan.next().trim().split(",")[0]); if (this.allowLoocv) { this.setParamsFromParamId(concatenated, paramId); } this.buildClassifier(trainGroup); accAndPreds = new double[trainGroup[0].numInstances() + 1]; accAndPreds[0] = Double.parseDouble(scan.next().trim().split(",")[0]); int correct = 0; String[] temp; for (int i = 0; i < trainGroup[0].numInstances(); i++) { temp = scan.next().split(","); accAndPreds[i + 1] = Double.parseDouble(temp[1]); if (accAndPreds[i + 1] == Double.parseDouble(temp[0])) { correct++; } } // commented out for now as this breaks the new EE loocv thing we're doing for the competition. Basically, if we try and load for train-1 ins for test in loocv, the number of train instances doesn't match so the acc is slightly off. should be an edge case, but can leave this check out so long as we trust the code // if(((double)correct/train.numInstances())!=accAndPreds[0]){ // System.err.println(existing.getAbsolutePath()); // System.err.println(((double)correct/train.numInstances())+" "+accAndPreds[0]); // throw new Exception("Attempted file loading, but accuracy doesn't match itself?!"); // } return accAndPreds; } else { new File(this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/").mkdirs(); } } // write output // maybe a different version which looks for missing files and runs them? double bsfAcc = -1; int bsfParamId = -1; double[] bsfaccAndPreds = null; for (int paramId = 0; paramId < 100; paramId++) { // System.out.print(paramId+" "); accAndPreds = loocvAccAndPreds(trainGroup, concatenated, paramId); // System.out.println(this.allowLoocv); // System.out.println(accAndPreds[0]); if (accAndPreds[0] > bsfAcc) { bsfAcc = accAndPreds[0]; bsfParamId = paramId; bsfaccAndPreds = accAndPreds; } System.out.println("\t" + paramId + ": " + df.format(accAndPreds[0] * 100) + " (" + df.format(bsfAcc * 100) + ")"); if (!this.allowLoocv) { paramId = 100; } } // System.out.println(this.classifierIdentifier+", bsfParamId "+bsfParamId); this.buildClassifier(trainGroup); if (this.allowLoocv) { this.setParamsFromParamId(concatenated, bsfParamId); } if (fileWriting) { FileWriter out = new FileWriter(parsedFileName); out.append(this.classifierIdentifier + "," + datasetName + ",parsedTrain\n"); out.append(bsfParamId + "\n"); out.append(bsfAcc + "\n"); for (int i = 1; i < bsfaccAndPreds.length; i++) { out.append(trainGroup[0].instance(i - 1).classValue() + "," + bsfaccAndPreds[i] + "\n"); } out.close(); } return bsfaccAndPreds; } public double[] loocvAccAndPreds(Instances train, int paramId) throws Exception { if (this.allowLoocv) { this.setParamsFromParamId(train, paramId); } FileWriter out = null; String parsedFileName = this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; String singleFileName = this.outputDir + this.classifierIdentifier + "/cv/" + datasetName + "/trainFold" + resampleId + "/pid" + paramId + ".csv"; if (this.individualCvParamFileWriting) { if (new File(parsedFileName).exists()) {//|| new File(singleFileName).exists()){ throw new Exception("Error: Full parsed training results already exist - " + parsedFileName); } else if (new File(singleFileName).exists()) { throw new Exception("Error: CV training results already exist for this pid - " + singleFileName); } } // else we already know what the params are, so don't need to set Instances trainLoocv; Instance testLoocv; int correct = 0; double pred, actual; double[] accAndPreds = new double[train.numInstances() + 1]; for (int i = 0; i < train.numInstances(); i++) { trainLoocv = new Instances(train); testLoocv = trainLoocv.remove(i); actual = testLoocv.classValue(); this.buildClassifier(trainLoocv); pred = this.classifyInstance(testLoocv); if (pred == actual) { correct++; } accAndPreds[i + 1] = pred; } accAndPreds[0] = (double) correct / train.numInstances(); // System.out.println(accAndPreds[0]); if (individualCvParamFileWriting) { new File(this.outputDir + this.classifierIdentifier + "/cv/" + datasetName + "/trainFold" + resampleId + "/").mkdirs(); out = new FileWriter(singleFileName); out.append(this.classifierIdentifier + "," + datasetName + ",cv\n"); out.append(paramId + "\n"); out.append(accAndPreds[0] + "\n"); for (int i = 1; i < accAndPreds.length; i++) { out.append(train.instance(i - 1).classValue() + "," + accAndPreds[i] + "\n"); } out.close(); } return accAndPreds; } public double[] loocvAccAndPreds(Instances[] trainGroup, Instances concatenated, int paramId) throws Exception { FileWriter out = null; String parsedFileName = this.outputDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; String singleFileName = this.outputDir + this.classifierIdentifier + "/cv/" + datasetName + "/trainFold" + resampleId + "/pid" + paramId + ".csv"; // if(this.fileWriting){ // if(new File(parsedFileName).exists()){//|| new File(singleFileName).exists()){ // throw new Exception("Error: Full parsed training results already exist - "+parsedFileName); // }else if(new File(singleFileName).exists()){ // throw new Exception("Error: CV training results already exist for this pid - "+singleFileName); // } // } if (this.allowLoocv) { // System.out.println("allowed"); this.setParamsFromParamId(concatenated, paramId); // System.out.println(this.toString()); } // else we already know what the params are, so don't need to set Instances[] trainLoocv; Instance[] testLoocv; int correct = 0; double pred, actual; double[] accAndPreds = new double[trainGroup[0].numInstances() + 1]; for (int i = 0; i < trainGroup[0].numInstances(); i++) { trainLoocv = new Instances[trainGroup.length]; testLoocv = new Instance[trainGroup.length]; for (int d = 0; d < trainGroup.length; d++) { trainLoocv[d] = new Instances(trainGroup[d]); testLoocv[d] = trainLoocv[d].remove(i); } // trainLoocv = new Instances(train); // testLoocv = trainLoocv.remove(i); actual = testLoocv[0].classValue(); this.buildClassifier(trainLoocv); pred = this.classifyInstanceMultivariate(testLoocv); if (pred == actual) { correct++; } accAndPreds[i + 1] = pred; } accAndPreds[0] = (double) correct / trainGroup[0].numInstances(); // System.out.println(accAndPreds[0]); // if(fileWriting){ // out = new FileWriter(singleFileName); // out.append(this.classifierIdentifier+","+datasetName+",cv\n"); // out.append(paramId+"\n"); // out.append(accAndPreds[0]+"\n"); // for(int i = 1; i < accAndPreds.length;i++){ // out.append(train.instance(i-1).classValue()+","+accAndPreds[i]+"\n"); // } // out.close(); // } return accAndPreds; } public void writeTrainTestOutput(String tscProblemDir, String datasetName, int resampleId, String outputResultsDir) throws Exception { // load in param id from training results File cvResults = new File(outputResultsDir + classifierIdentifier + "/Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"); if (!cvResults.exists()) { throw new Exception("Error loading file " + cvResults.getAbsolutePath()); } Scanner scan = new Scanner(cvResults); scan.useDelimiter(System.lineSeparator()); scan.next(); int paramId = Integer.parseInt(scan.next().trim()); this.setParamsFromParamId(train, paramId); // Now classifier is set up, make the associated files and do the test classification new File(outputResultsDir + classifierIdentifier + "/Predictions/" + datasetName + "/").mkdirs(); StringBuilder headerInfo = new StringBuilder(); headerInfo.append(classifierIdentifier).append(System.lineSeparator()); headerInfo.append(this.getParamInformationString()).append(System.lineSeparator()); Instances train = DatasetLoading.loadDataNullable(tscProblemDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProblemDir + datasetName + "/" + datasetName + "_TEST"); if (resampleId != 0) { Instances[] temp = InstanceTools.resampleTrainAndTestInstances(train, test, resampleId); train = temp[0]; test = temp[1]; } this.buildClassifier(train); StringBuilder classificationInfo = new StringBuilder(); int correct = 0; double pred, actual; for (int i = 0; i < test.numInstances(); i++) { actual = test.instance(i).classValue(); pred = this.classifyInstance(test.instance(i)); classificationInfo.append(actual).append(",").append(pred).append(System.lineSeparator()); if (actual == pred) { correct++; } } FileWriter outWriter = new FileWriter(outputResultsDir + this.classifierIdentifier + "/Predictions/" + datasetName + "/testFold" + resampleId + ".csv"); outWriter.append(headerInfo); outWriter.append(((double) correct / test.numInstances()) + System.lineSeparator()); outWriter.append(classificationInfo); outWriter.close(); } // public static void parseLOOCVResults(String tscProblemDir, String datasetName, int resampleId, String outputResultsDir, boolean tidyUp){ // // } public abstract String getParamInformationString(); public Instances getTrainingData() { return this.train; } public static Instances concatenate(Instances[] train) { // make a super arff for finding params that need stdev etc Instances temp = new Instances(train[0], 0); for (int i = 1; i < train.length; i++) { for (int j = 0; j < train[i].numAttributes() - 1; j++) { temp.insertAttributeAt(train[i].attribute(j), temp.numAttributes() - 1); } } int dataset, attFromData; for (int insId = 0; insId < train[0].numInstances(); insId++) { DenseInstance dense = new DenseInstance(temp.numAttributes()); for (int attId = 0; attId < temp.numAttributes() - 1; attId++) { dataset = attId / (train[0].numAttributes() - 1); attFromData = attId % (train[0].numAttributes() - 1); dense.setValue(attId, train[dataset].instance(insId).value(attFromData)); } dense.setValue(temp.numAttributes() - 1, train[0].instance(insId).classValue()); temp.add(dense); } return temp; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ CandidateNN[][] candidateNNS; // a table to store nearest neighbours for each instance in the training set final int nParams = 100; // number of parameters to learn int[][][] classCounts; int maxWindow; SequenceStatsCache trainCache; // cache to store some pre-computed data on the training set private int bsfParamId; // classify instance with lower bounds public int classifyInstance(final Instances trainData, final Instance query, final int queryIndex, final SequenceStatsCache cache) { int[] classCounts = new int[train.numClasses()]; double dist; Instance candidate = trainData.get(0); double bsfDistance = distance(query, candidate, Double.POSITIVE_INFINITY); classCounts[(int) candidate.classValue()]++; for (int candidateIndex = 1; candidateIndex < trainData.size(); candidateIndex++) { candidate = trainData.get(candidateIndex); dist = lowerBound(query, candidate, queryIndex, candidateIndex, bsfDistance, cache); if (dist <= bsfDistance) { dist = distance(query, candidate, bsfDistance); if (dist < bsfDistance) { bsfDistance = dist; classCounts = new int[trainData.numClasses()]; classCounts[(int) candidate.classValue()]++; } else if (dist == bsfDistance) { classCounts[(int) candidate.classValue()]++; } } } int bsfClass = -1; double bsfCount = -1; for (int i = 0; i < classCounts.length; i++) { if (classCounts[i] > bsfCount) { bsfCount = classCounts[i]; bsfClass = i; } } return bsfClass; } public abstract void initNNSTable(final Instances trainData, final SequenceStatsCache cache); public abstract void initApproxNNSTable(final Instances trainData, final SequenceStatsCache cache, final int nSamples); public double[] fastParameterSearch(final Instances train) throws Exception { double[] accAndPreds; this.maxWindow = train.numAttributes() - 1; this.trainCache = new SequenceStatsCache(train, train.numAttributes() - 1); initNNSTable(train, trainCache); double bsfAcc = -1; int bsfParamId = -1; double[] bsfaccAndPreds = null; if(debug) System.out.print("[1-NN] Fast Parameter Search for " + this.classifierIdentifier + ", training "); for (int paramId = 0; paramId < nParams; paramId++) { if(debug) System.out.print("."); accAndPreds = fastParameterAccAndPred(train, paramId, train.size()); if (accAndPreds[0] > bsfAcc) { bsfAcc = accAndPreds[0]; bsfParamId = paramId; bsfaccAndPreds = accAndPreds; } } if(debug) System.out.println(); this.buildClassifier(train); this.setParamsFromParamId(train, bsfParamId); if (fileWriting) { String parsedFileName = this.outputDir + this.classifierIdentifier + "/FastEE_Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; FileWriter out = new FileWriter(parsedFileName); out.append(this.classifierIdentifier + "," + datasetName + ",parsedTrain\n"); out.append(bsfParamId + "\n"); out.append(bsfAcc + "\n"); for (int i = 1; i < bsfaccAndPreds.length; i++) { out.append(train.instance(i - 1).classValue() + "," + bsfaccAndPreds[i] + "\n"); } out.close(); } return bsfaccAndPreds; } private double[] fastParameterAccAndPred(final Instances train, final int paramId, final int n) { this.setParamsFromParamId(train, paramId); int correct = 0; double pred, actual; final double[] accAndPreds = new double[n + 1]; for (int i = 0; i < n; i++) { actual = train.get(i).classValue(); pred = -1; double bsfCount = -1; for (int c = 0; c < classCounts[paramId][i].length; c++) { if (classCounts[paramId][i][c] > bsfCount) { bsfCount = classCounts[paramId][i][c]; pred = c; } } if (pred == actual) { correct++; } accAndPreds[i + 1] = pred; } accAndPreds[0] = 1.0 * correct / n; return accAndPreds; } public double[] approxParameterSearch(final Instances train, final int nSamples) throws Exception { final long start = System.nanoTime(); double[] accAndPreds; this.maxWindow = train.numAttributes() - 1; this.trainCache = new SequenceStatsCache(train, train.numAttributes() - 1); initApproxNNSTable(train, trainCache, nSamples); bsfParamId = -1; double bsfAcc = -1; double[] bsfaccAndPreds = null; if(debug) System.out.print("[1-NN] Approximate Parameter Search for " + this.classifierIdentifier + ", training "); for (int paramId = 0; paramId < nParams; paramId++) { if(debug) System.out.print("."); accAndPreds = approxParameterAccAndPred(train, paramId, nSamples); if (accAndPreds[0] > bsfAcc) { bsfAcc = accAndPreds[0]; bsfParamId = paramId; bsfaccAndPreds = accAndPreds; } } if(debug) System.out.println(); this.buildClassifier(train); this.setParamsFromParamId(train, bsfParamId); if (fileWriting) { String parsedFileName = this.outputDir + this.classifierIdentifier + "/ApproxEE_Predictions/" + datasetName + "/trainFold" + resampleId + ".csv"; FileWriter out = new FileWriter(parsedFileName); out.append(this.classifierIdentifier + "," + datasetName + ",parsedTrain\n"); out.append(bsfParamId + "\n"); out.append(bsfAcc + "\n"); for (int i = 1; i < bsfaccAndPreds.length; i++) { out.append(train.instance(i - 1).classValue() + "," + bsfaccAndPreds[i] + "\n"); } out.close(); } return bsfaccAndPreds; } private double[] approxParameterAccAndPred(final Instances train, final int paramId, final int nSamples) { this.setParamsFromParamId(train, paramId); int correct = 0; double pred, actual; final double[] accAndPreds = new double[nSamples + 1]; for (int i = 0; i < nSamples; i++) { actual = train.get(i).classValue(); pred = train.get(candidateNNS[paramId][i].index).classValue(); if (pred == actual) { correct++; } accAndPreds[i + 1] = pred; } accAndPreds[0] = 1.0 * correct / nSamples; return accAndPreds; } }
35,518
40.015012
330
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/ElasticEnsembleClusterDistributer.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.distance_based.ElasticEnsemble; import java.io.File; import java.io.FileWriter; import java.util.Scanner; import utilities.InstanceTools; import weka.core.Instances; /** * * @author Jason Lines (j.lines@uea.ac.uk) * * A class to assist with distributing cross-validation experiments for the * Elastic Ensemble class on an LSF HPC. Methods inclue script making, cv * running, and cv parsing. * * Also includes a clusterMaster method for managing the workflow when passed * args remotely. * * Note: class is experimental and not robustly tested (e.g. cluster master */ public class ElasticEnsembleClusterDistributer { /** * A method to create bsub submission scripts for running CV experiments in a distributed environment. * Scripts create array jobs with 100 subjobs, creating a single script to do all 100 param options of * a single classifier on a single resample of a dataset. * * @param datasetName the name of the dataset * @param resample the resample id to run * @param classifier the relevant enum corresponding to the classifier * @param instructionBuilder a StringBuilder to concatenate sh instructions. If null, method will just ignore this step * @throws Exception */ public static void scriptMaker_runCv(String datasetName, int resample, ElasticEnsemble.ConstituentClassifiers classifier, StringBuilder instructionBuilder) throws Exception{ String theWholeMess = "#!/bin/csh\n" + "\n" + "#BSUB -q long-eth\n" + "#BSUB -J runCv_"+datasetName+"_"+resample+"_"+classifier+"[1-100]\n" + "#BSUB -oo output/runCv_"+datasetName+"_"+resample+"_"+classifier+"_%I.out\n" + "#BSUB -eo error/runCv_"+datasetName+"_"+resample+"_"+classifier+"_%I.err\n" + "#BSUB -R \"rusage[mem=4000]\"\n" + "#BSUB -M 4000\n" + "\n" + "module add java/jdk1.8.0_51\n" + "\n" + "java -jar -Xmx4000m TimeSeriesClassification.jar runCv "+datasetName+" "+resample+" "+classifier+" $LSB_JOBINDEX"; File outputDir = new File("scripts_eeCv/"); outputDir.mkdirs(); FileWriter out = new FileWriter("scripts_eeCv/"+datasetName+"_"+resample+"_"+classifier+".bsub"); out.append(theWholeMess); out.close(); if(instructionBuilder!=null){ instructionBuilder.append("bsub < scripts_eeCv/").append(datasetName).append("_").append(resample).append("_").append(classifier).append(".bsub\n"); } } /** * A method to run the CV experiment for a single param id of a measure on a dataset. * NOTE: method does not resample data; this should be done independently of the method * (access to test data is necessary for repartitioning the data). resampleId param is * purely for file writing purposes * * @param train * @param dataName * @param resampleIdentifier * @param classifier * @param paramId * @throws Exception */ private static void runCv(Instances train, String dataName, int resampleIdentifier, ElasticEnsemble.ConstituentClassifiers classifier, int paramId) throws Exception{ String resultsDir = "eeResults/"; if(classifier==ElasticEnsemble.ConstituentClassifiers.DDTW_R1_1NN || classifier == ElasticEnsemble.ConstituentClassifiers.DTW_R1_1NN || classifier == ElasticEnsemble.ConstituentClassifiers.Euclidean_1NN){ if(paramId > 0){ return; } } Efficient1NN oneNN = ElasticEnsemble.getClassifier(classifier); oneNN.setIndividualCvFileWritingOn(resultsDir, dataName, resampleIdentifier); oneNN.loocvAccAndPreds(train, paramId); } /** * A method to parse the 100 output files for a dataset/resample/measure * combination. Results in a single file with the CV results of the best * paramId for this classifier. Also includes the option to delete old cv * files after parsing to help storage management * * @param resultsDir * @param dataName * @param resampleId * @param measureType * @param tidyUp boolean to delete the now-redundant 100 param cv files once the best param id file has been written * @throws Exception */ private static void runCv_parseIndividualCvsForBest(String resultsDir, String dataName, int resampleId, ElasticEnsemble.ConstituentClassifiers measureType, boolean tidyUp) throws Exception{ String cvPath = resultsDir+measureType+"/cv/"+dataName+"/trainFold"+resampleId+"/"; String parsedPath = resultsDir+measureType+"/Predictions/"+dataName+"/"; String parsedName = parsedPath+"trainFold"+resampleId+".csv"; File existingParsed = new File(parsedName); if(existingParsed.exists() && existingParsed.length() > 0){ if(tidyUp){ deleteDir(new File(resultsDir+measureType+"/cv/")); } return; } int expectedParams; if(measureType.equals(ElasticEnsemble.ConstituentClassifiers.Euclidean_1NN)||measureType.equals(ElasticEnsemble.ConstituentClassifiers.DTW_R1_1NN)||measureType.equals(ElasticEnsemble.ConstituentClassifiers.DDTW_R1_1NN)){ expectedParams = 1; }else{ expectedParams =100; } double acc; double bsfAcc = -1; Scanner scan; File individualCv; File bsfParsed = null; for(int p = 0; p < expectedParams; p++){ // check accuracy of each parameter individualCv = new File(cvPath+"pid"+p+".csv"); if(individualCv.exists()==false){ throw new Exception("error: cv file does not exist - "+individualCv.getAbsolutePath()); } scan = new Scanner(individualCv); scan.useDelimiter("\n"); scan.next(); scan.next(); acc = Double.parseDouble(scan.next().trim()); scan.close(); if(acc > bsfAcc){ bsfAcc = acc; bsfParsed = new File(cvPath+"pid"+p+".csv"); } } new File(parsedPath).mkdirs(); scan = new Scanner(bsfParsed); scan.useDelimiter("\n"); FileWriter out = new FileWriter(parsedName); while(scan.hasNext()){ out.append(scan.next()+"\n"); } out.close(); if(tidyUp){ deleteDir(new File(resultsDir+measureType+"/cv/")); } } /** * Alternative main method written for remote computation that is triggered * when args.length > 0. * @param args * @throws Exception */ public static void clusterMaster(String[] args)throws Exception{ String arffDir = "Problems/"; if(args[0].equalsIgnoreCase("makeScripts")){ // do locally for now }else if(args[0].equalsIgnoreCase("runCv")){ String datasetName = args[1].trim(); int resampleId = Integer.parseInt(args[2].trim()); String classifier = args[3].trim(); int paramId = Integer.parseInt(args[4].trim())-1; Instances train = DatasetLoading.loadDataNullable(arffDir+datasetName+"_TRAIN"); runCv(train, datasetName, resampleId, ElasticEnsemble.ConstituentClassifiers.valueOf(classifier), paramId); }else if(args[0].equalsIgnoreCase("parseCv")){ String datasetName = args[1].trim(); String resultsDirName = args[2].trim(); int resampleId = Integer.parseInt(args[3].trim()); for(ElasticEnsemble.ConstituentClassifiers c: ElasticEnsemble.ConstituentClassifiers.values()){ runCv_parseIndividualCvsForBest(resultsDirName, datasetName, resampleId, c, true); } }else if(args[0].equalsIgnoreCase("buildEEandRunTest")){ String datasetName = args[1].trim(); String resultsDirName = args[2].trim(); String arffPath = args[3].trim(); int resampleId = 0; Instances train = DatasetLoading.loadDataNullable(arffPath+datasetName+"/"+datasetName+"_TRAIN"); Instances test = DatasetLoading.loadDataNullable(arffPath+datasetName+"/"+datasetName+"_TEST"); if(args.length > 4){ resampleId = Integer.parseInt(args[4].trim()); Instances temp[] = InstanceTools.resampleTrainAndTestInstances(train, test, resampleId); train = temp[0]; test = temp[1]; } ElasticEnsemble ee = new ElasticEnsemble(resultsDirName, datasetName, resampleId); ee.buildClassifier(train); ee.writeTestResultsToFile(test, datasetName, "EE", ee.getParameters(), resultsDirName+"EE/Predictions/"+datasetName+"/testFold"+resampleId+".csv"); }else{ throw new Exception("Error: Unexpected operation - " + args[0]); } } /** * Utility method to recursively remove a directory and a contents * @param dir File object of the directory to be deleted */ private static void deleteDir(File dir){ if(dir.exists()==false){ return; } if(dir.isDirectory()){ File[] files = dir.listFiles(); for (File file: files) { deleteDir(file); } } dir.delete(); } /** * Main method. When args.length > 0, clusterMaster method is triggered with * args instead of the local main method. * * @param args * @throws Exception */ public static void main(String[] args) throws Exception{ if(args.length>0){ clusterMaster(args); return; } // else, local: // String problemName = "alphabet_raw_26_sampled_10"; String problemName = "vowel_raw_sampled_10"; StringBuilder instructionBuilder = new StringBuilder(); for(ElasticEnsemble.ConstituentClassifiers c:ElasticEnsemble.ConstituentClassifiers.values()){ scriptMaker_runCv(problemName, 0, c, instructionBuilder); } FileWriter out = new FileWriter("instructions_"+problemName+".txt"); out.append(instructionBuilder); out.close(); } }
11,450
39.462898
228
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/LCSS1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_LCSS; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbLcss; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.LCSSDistance; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import java.util.ArrayList; import java.util.Collections; /** * checked April l16 * * @author sjx07ngu */ public class LCSS1NN extends Efficient1NN { private int delta; private double epsilon; boolean epsilonsAndDeltasRefreshed; double[] epsilons; int[] deltas; public LCSS1NN(int delta, double epsilon) { this.delta = delta; this.epsilon = epsilon; epsilonsAndDeltasRefreshed = false; this.classifierIdentifier = "LCSS_1NN"; this.allowLoocv = false; } public LCSS1NN() { // note: these default params may be garbage for most datasets, should set them through CV this.delta = 3; this.epsilon = 1; epsilonsAndDeltasRefreshed = false; this.classifierIdentifier = "LCSS_1NN"; } @Override public void buildClassifier(Instances train) throws Exception { super.buildClassifier(train); // used for setting params with the paramId method epsilonsAndDeltasRefreshed = false; } public double distance(Instance first, Instance second) { // need to remove class index/ignore // simple check - if its last, ignore it. If it's not last, copy the instances, remove that attribue, and then call again // edit: can't do a simple copy with Instance objs by the looks of things. Fail-safe: fall back to the original measure int m, n; if (first.classIndex() == first.numAttributes() - 1 && second.classIndex() == second.numAttributes() - 1) { m = first.numAttributes() - 1; n = second.numAttributes() - 1; } else { // default case, use the original LCSS class (horrible efficiency, but just in as a fail safe for edge-cases) System.err.println("Warning: class designed to use problems with class index as last attribute. Defaulting to original LCSS distance"); return new LCSSDistance(this.delta, this.epsilon).distance(first, second); } int[][] lcss = new int[m + 1][n + 1]; for (int i = 0; i < m; i++) { for (int j = i - delta; j <= i + delta; j++) { if (j < 0) { j = -1; } else if (j >= n) { j = i + delta; } else if (second.value(j) + this.epsilon >= first.value(i) && second.value(j) - epsilon <= first.value(i)) { lcss[i + 1][j + 1] = lcss[i][j] + 1; } else if (lcss[i][j + 1] > lcss[i + 1][j]) { lcss[i + 1][j + 1] = lcss[i][j + 1]; } else { lcss[i + 1][j + 1] = lcss[i + 1][j]; } // could maybe do an early abandon here? Not sure, investigate further } } int max = -1; for (int i = 1; i < lcss[lcss.length - 1].length; i++) { if (lcss[lcss.length - 1][i] > max) { max = lcss[lcss.length - 1][i]; } } return 1 - ((double) max / m); } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void main(String[] args) throws Exception { for (int i = 0; i < 10; i++) { runComparison(); } } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; // String datasetName = "SonyAiboRobotSurface1"; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); int delta = 10; double epsilon = 0.5; // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation LCSSDistance lcssOld = new LCSSDistance(delta, epsilon); knn.setDistanceFunction(lcssOld); knn.buildClassifier(train); // new version LCSS1NN lcssNew = new LCSS1NN(delta, epsilon); lcssNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and in-build 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = lcssNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } @Override public double distance(Instance first, Instance second, double cutOffValue) { // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. return this.distance(first, second); } @Override public void setParamsFromParamId(Instances train, int paramId) { // more efficient to only calculate these when the training data has been changed, so could call in build classifier // however, these values are only needed in this method, so calculate here. // If the training data hasn't changed (i.e. no calls to buildClassifier, then they don't need recalculated if (!epsilonsAndDeltasRefreshed) { double stdTrain = LCSSDistance.stdv_p(train); double stdFloor = stdTrain * 0.2; epsilons = LCSSDistance.getInclusive10(stdFloor, stdTrain); deltas = LCSSDistance.getInclusive10(0, (train.numAttributes() - 1) / 4); epsilonsAndDeltasRefreshed = true; } this.delta = deltas[paramId / 10]; this.epsilon = epsilons[paramId % 10]; } @Override public String getParamInformationString() { return this.delta + "," + this.epsilon; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex) { return LbLcss.distance(candidate, trainCache.getUE(queryIndex, delta, epsilon), trainCache.getLE(queryIndex, delta, epsilon)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue) { return LbLcss.distance(candidate, trainCache.getUE(queryIndex, delta, epsilon), trainCache.getLE(queryIndex, delta, epsilon), cutOffValue); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, SequenceStatsCache cache) { return LbLcss.distance(candidate, cache.getUE(queryIndex, delta, epsilon), cache.getLE(queryIndex, delta, epsilon)); } @Override public double lowerBound(Instance query, Instance candidate, int queryIndex, int candidateIndex, double cutOffValue, SequenceStatsCache cache) { return LbLcss.distance(candidate, cache.getUE(queryIndex, delta, epsilon), cache.getLE(queryIndex, delta, epsilon), cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final LazyAssessNN_LCSS[] lazyAssessNNS = new LazyAssessNN_LCSS[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_LCSS(cache); } final ArrayList<LazyAssessNN_LCSS> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_LCSS d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have NN for sure, but we still have to check if current is new NN for previous for (int previous = 0; previous < current; ++previous) { final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN double toBeat = prevNN.distance; final LazyAssessNN_LCSS challenger = lazyAssessNNS[previous]; final LazyAssessNN_LCSS.RefineReturnType rrt = challenger.tryToBeat(toBeat, delta, epsilon); // --- Check the result if (rrt == LazyAssessNN_LCSS.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(delta); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have NN yet. // Sort the challengers so we have better chance to organize good pruning. Collections.sort(challengers); for (LazyAssessNN_LCSS challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate of reference: double toBeat = currPNN.distance; LazyAssessNN_LCSS.RefineReturnType rrt = challenger.tryToBeat(toBeat, delta, epsilon); // --- Check the result if (rrt == LazyAssessNN_LCSS.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(delta); currPNN.set(previous, r, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, delta, epsilon); // --- Check the result if (rrt == LazyAssessNN_LCSS.RefineReturnType.New_best) { final int r = challenger.getMinWindowValidityForFullDistance(); final double d = challenger.getDistance(delta); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid final int r = currPNN.r; final double d = currPNN.distance; final int index = currPNN.index; final double prevEpsilon = epsilon; int tmp = paramId; while (tmp > 0 && paramId % 10 > 0 && prevEpsilon == epsilon && delta >= r) { candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); classCounts[tmp][current] = classCounts[paramId][current].clone(); tmp--; this.setParamsFromParamId(train, tmp); } } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][nSamples][train.numClasses()]; final LazyAssessNN_LCSS[] lazyAssessNNS = new LazyAssessNN_LCSS[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_LCSS(cache); } final ArrayList<LazyAssessNN_LCSS> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous == current) continue; final LazyAssessNN_LCSS d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); boolean newNN = false; for (LazyAssessNN_LCSS challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate of reference: double toBeat = currPNN.distance; LazyAssessNN_LCSS.RefineReturnType rrt = challenger.tryToBeat(toBeat, delta, epsilon); // --- Check the result if (rrt == LazyAssessNN_LCSS.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(delta); currPNN.set(previous, r, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } newNN = true; } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, delta, epsilon); // --- Check the result if (rrt == LazyAssessNN_LCSS.RefineReturnType.New_best) { int r = challenger.getMinWindowValidityForFullDistance(); double d = challenger.getDistance(delta); prevNN.set(current, r, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } if (newNN) { int r = currPNN.r; double d = currPNN.distance; int index = currPNN.index; int tmp = paramId; candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); double prevEpsilon = epsilon; while (tmp > 0 && paramId % 10 > 0 && prevEpsilon == epsilon && delta >= r) { candidateNNS[tmp][current].set(index, r, d, CandidateNN.Status.NN); classCounts[tmp][current] = classCounts[paramId][current].clone(); tmp--; this.setParamsFromParamId(train, tmp); } } } } } }
22,129
44.534979
148
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/MSM1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_MSM; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbMsm; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.MSMDistance; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import java.util.ArrayList; import java.util.Collections; /** * checked April '16 * * @author sjx07ngu */ //public class MSM1NN implements Classifier{ public class MSM1NN extends Efficient1NN { private Instances train = null; private double c = 0; protected static double[] msmParams = { // <editor-fold defaultstate="collapsed" desc="hidden for space"> 0.01, 0.01375, 0.0175, 0.02125, 0.025, 0.02875, 0.0325, 0.03625, 0.04, 0.04375, 0.0475, 0.05125, 0.055, 0.05875, 0.0625, 0.06625, 0.07, 0.07375, 0.0775, 0.08125, 0.085, 0.08875, 0.0925, 0.09625, 0.1, 0.136, 0.172, 0.208, 0.244, 0.28, 0.316, 0.352, 0.388, 0.424, 0.46, 0.496, 0.532, 0.568, 0.604, 0.64, 0.676, 0.712, 0.748, 0.784, 0.82, 0.856, 0.892, 0.928, 0.964, 1, 1.36, 1.72, 2.08, 2.44, 2.8, 3.16, 3.52, 3.88, 4.24, 4.6, 4.96, 5.32, 5.68, 6.04, 6.4, 6.76, 7.12, 7.48, 7.84, 8.2, 8.56, 8.92, 9.28, 9.64, 10, 13.6, 17.2, 20.8, 24.4, 28, 31.6, 35.2, 38.8, 42.4, 46, 49.6, 53.2, 56.8, 60.4, 64, 67.6, 71.2, 74.8, 78.4, 82, 85.6, 89.2, 92.8, 96.4, 100// </editor-fold> }; public MSM1NN() { this.c = 0.1; this.classifierIdentifier = "MSM_1NN"; } public MSM1NN(double c) { this.c = c; this.classifierIdentifier = "MSM_1NN"; this.allowLoocv = false; } public double distance(Instance first, Instance second, double cutOffValue) { // need to remove class index/ignore // simple check - if its last, ignore it. If it's not last, copy the instances, remove that attribue, and then call again // Not particularly efficient in the latter case, but a reasonable assumption to make here since all of the UCR/UEA problems // match that format. int m, n; if (first.classIndex() == first.numAttributes() - 1 && second.classIndex() == second.numAttributes() - 1) { m = first.numAttributes() - 1; n = second.numAttributes() - 1; } else { // default case, use the original MSM class (horrible efficiency, but just in as a fail safe for edge-cases) System.err.println("Warning: class designed to use problems with class index as last attribute. Defaulting to original MSM distance"); MSMDistance msm = new MSMDistance(this.c); return new MSMDistance(this.c).distance(first, second); } double[][] cost = new double[m][n]; // Initialization cost[0][0] = Math.abs(first.value(0) - second.value(0)); for (int i = 1; i < m; i++) { cost[i][0] = cost[i - 1][0] + calcualteCost(first.value(i), first.value(i - 1), second.value(0)); } for (int i = 1; i < n; i++) { cost[0][i] = cost[0][i - 1] + calcualteCost(second.value(i), first.value(0), second.value(i - 1)); } // Main Loop double min; for (int i = 1; i < m; i++) { min = cutOffValue; for (int j = 1; j < n; j++) { double d1, d2, d3; d1 = cost[i - 1][j - 1] + Math.abs(first.value(i) - second.value(j)); d2 = cost[i - 1][j] + calcualteCost(first.value(i), first.value(i - 1), second.value(j)); d3 = cost[i][j - 1] + calcualteCost(second.value(j), first.value(i), second.value(j - 1)); cost[i][j] = Math.min(d1, Math.min(d2, d3)); if (cost[i][j] >= cutOffValue) { cost[i][j] = Double.MAX_VALUE; } if (cost[i][j] < min) { min = cost[i][j]; } } if (min >= cutOffValue) { return Double.MAX_VALUE; } } // Output return cost[m - 1][n - 1]; } public double calcualteCost(double new_point, double x, double y) { double dist = 0; if (((x <= new_point) && (new_point <= y)) || ((y <= new_point) && (new_point <= x))) { dist = c; } else { dist = c + Math.min(Math.abs(new_point - x), Math.abs(new_point - y)); } return dist; } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; // String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; String datasetName = "SonyAiboRobotSurface1"; double c = 0.1; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation MSMDistance msmOld = new MSMDistance(c); knn.setDistanceFunction(msmOld); knn.buildClassifier(train); // new version MSM1NN msmNew = new MSM1NN(c); msmNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = msmNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { // for (int i = 0; i < 10; i++) { runComparison(); } } @Override public void setParamsFromParamId(Instances train, int paramId) { this.c = msmParams[paramId]; } @Override public String getParamInformationString() { return this.c + ""; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex){ return LbMsm.distance(query, candidate, c, trainCache.getMax(queryIndex), trainCache.getMin(queryIndex)); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue){ return LbMsm.distance(query, candidate, c, trainCache.getMax(queryIndex), trainCache.getMin(queryIndex), cutOffValue); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final SequenceStatsCache cache){ return LbMsm.distance(query, candidate, c, cache.getMax(queryIndex), cache.getMin(queryIndex)); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue, final SequenceStatsCache cache){ return LbMsm.distance(query, candidate, c, cache.getMax(queryIndex), cache.getMin(queryIndex), cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final LazyAssessNN_MSM[] lazyAssessNNS = new LazyAssessNN_MSM[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_MSM(cache); } final ArrayList<LazyAssessNN_MSM> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_MSM d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = 0; paramId < nParams; ++paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have NN for sure, but we still have to check if current is new NN for previous for (int previous = 0; previous < current; ++previous) { final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN final double toBeat = prevNN.distance; final LazyAssessNN_MSM challenger = lazyAssessNNS[previous]; final LazyAssessNN_MSM.RefineReturnType rrt = challenger.tryToBeat(toBeat, c); // --- Check the result if (rrt == LazyAssessNN_MSM.RefineReturnType.New_best) { double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have NN yet. // Sort the challengers so we have better chance to organize good pruning. Collections.sort(challengers); for (LazyAssessNN_MSM challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_MSM.RefineReturnType rrt = challenger.tryToBeat(toBeat, c); // --- Check the result if (rrt == LazyAssessNN_MSM.RefineReturnType.New_best) { final double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, c); // --- Check the result if (rrt == LazyAssessNN_MSM.RefineReturnType.New_best) { final double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid if (paramId > 0) { final double currD = currPNN.distance; final double prevD = candidateNNS[paramId - 1][current].distance; final int index = currPNN.index; if (currD == prevD) { for (int j = paramId; j < nParams; j++) { candidateNNS[j][current].set(index, currD, CandidateNN.Status.NN); classCounts[j][current] = classCounts[paramId][current].clone(); } } } } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } final LazyAssessNN_MSM[] lazyAssessNNS = new LazyAssessNN_MSM[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_MSM(cache); } final ArrayList<LazyAssessNN_MSM> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous==current) continue; final LazyAssessNN_MSM d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = 0; paramId < nParams; ++paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); boolean newNN = false; for (LazyAssessNN_MSM challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_MSM.RefineReturnType rrt = challenger.tryToBeat(toBeat, c); // --- Check the result if (rrt == LazyAssessNN_MSM.RefineReturnType.New_best) { double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); newNN = true; } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, c); // --- Check the result if (rrt == LazyAssessNN_MSM.RefineReturnType.New_best) { double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); } } } if (newNN && paramId > 0) { double currD = currPNN.distance; double prevD = candidateNNS[paramId - 1][current].distance; int index = currPNN.index; if (currD == prevD) { for (int j = paramId; j < nParams; j++) { candidateNNS[j][current].set(index, currD, CandidateNN.Status.NN); } } } } } } }
21,507
37.893309
184
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/TWE1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_TWED; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbTwed; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.TWEDistance; import java.util.ArrayList; import java.util.Collections; //import efficient_standalone_classifiers.Eff /** * written April '16 - looks good * * @author sjx07ngu */ public class TWE1NN extends Efficient1NN { private static final double DEGREE = 2; // not bothering to set the degree in this code, it's fixed to 2 in the other anyway double nu = 1; double lambda = 1; protected static double[] twe_nuParams = { // <editor-fold defaultstate="collapsed" desc="hidden for space"> 0.00001, 0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1,// </editor-fold> }; protected static double[] twe_lamdaParams = { // <editor-fold defaultstate="collapsed" desc="hidden for space"> 0, 0.011111111, 0.022222222, 0.033333333, 0.044444444, 0.055555556, 0.066666667, 0.077777778, 0.088888889, 0.1,// </editor-fold> }; public TWE1NN(double nu, double lambda) { this.nu = nu; this.lambda = lambda; this.classifierIdentifier = "TWE_1NN"; this.allowLoocv = false; } public TWE1NN() { // note: these defaults may be garbage for most measures. Should set them through CV or prior knowledge this.nu = 0.005; this.lambda = 0.5; this.classifierIdentifier = "TWE_1NN"; } public final double distance(Instance first, Instance second, double cutoff) { // note: I can't see a simple way to use the cutoff, so unfortunately there isn't one! // base case - we're assuming class val is last. If this is true, this method is fine, // if not, we'll default to the DTW class if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) { return new TWEDistance(nu, lambda).distance(first, second, cutoff); } int m = first.numAttributes() - 1; int n = second.numAttributes() - 1; int dim = 1; double dist, disti1, distj1; double[][] ta = new double[m][dim]; double[][] tb = new double[m][dim]; double[] tsa = new double[m]; double[] tsb = new double[n]; // look like time staps for (int i = 0; i < tsa.length; i++) { tsa[i] = (i + 1); } for (int i = 0; i < tsb.length; i++) { tsb[i] = (i + 1); } int r = ta.length; // this is just m?! int c = tb.length; // so is this, but surely it should actually be n anyway int i, j, k; //Copy over values for (i = 0; i < m; i++) { ta[i][0] = first.value(i); } for (i = 0; i < n; i++) { tb[i][0] = second.value(i); } /* allocations in c double **D = (double **)calloc(r+1, sizeof(double*)); double *Di1 = (double *)calloc(r+1, sizeof(double)); double *Dj1 = (double *)calloc(c+1, sizeof(double)); for(i=0; i<=r; i++) { D[i]=(double *)calloc(c+1, sizeof(double)); } */ double[][] D = new double[r + 1][c + 1]; double[] Di1 = new double[r + 1]; double[] Dj1 = new double[c + 1]; // local costs initializations for (j = 1; j <= c; j++) { distj1 = 0; for (k = 0; k < dim; k++) { if (j > 1) { //CHANGE AJB 8/1/16: Only use power of 2 for speed distj1 += (tb[j - 2][k] - tb[j - 1][k]) * (tb[j - 2][k] - tb[j - 1][k]); // OLD VERSION distj1+=Math.pow(Math.abs(tb[j-2][k]-tb[j-1][k]),degree); // in c: distj1+=pow(fabs(tb[j-2][k]-tb[j-1][k]),degree); } else { distj1 += tb[j - 1][k] * tb[j - 1][k]; } } //OLD distj1+=Math.pow(Math.abs(tb[j-1][k]),degree); Dj1[j] = (distj1); } for (i = 1; i <= r; i++) { disti1 = 0; for (k = 0; k < dim; k++) { if (i > 1) { disti1 += (ta[i - 2][k] - ta[i - 1][k]) * (ta[i - 2][k] - ta[i - 1][k]); } // OLD disti1+=Math.pow(Math.abs(ta[i-2][k]-ta[i-1][k]),degree); else { disti1 += (ta[i - 1][k]) * (ta[i - 1][k]); } } //OLD disti1+=Math.pow(Math.abs(ta[i-1][k]),degree); Di1[i] = (disti1); for (j = 1; j <= c; j++) { dist = 0; for (k = 0; k < dim; k++) { dist += (ta[i - 1][k] - tb[j - 1][k]) * (ta[i - 1][k] - tb[j - 1][k]); // dist+=Math.pow(Math.abs(ta[i-1][k]-tb[j-1][k]),degree); if (i > 1 && j > 1) { dist += (ta[i - 2][k] - tb[j - 2][k]) * (ta[i - 2][k] - tb[j - 2][k]); } // dist+=Math.pow(Math.abs(ta[i-2][k]-tb[j-2][k]),degree); } D[i][j] = (dist); } }// for i // border of the cost matrix initialization D[0][0] = 0; for (i = 1; i <= r; i++) { D[i][0] = D[i - 1][0] + Di1[i]; } for (j = 1; j <= c; j++) { D[0][j] = D[0][j - 1] + Dj1[j]; } double dmin, htrans, dist0; int iback; for (i = 1; i <= r; i++) { for (j = 1; j <= c; j++) { htrans = Math.abs((tsa[i - 1] - tsb[j - 1])); if (j > 1 && i > 1) { htrans += Math.abs((tsa[i - 2] - tsb[j - 2])); } dist0 = D[i - 1][j - 1] + nu * htrans + D[i][j]; dmin = dist0; if (i > 1) { htrans = ((tsa[i - 1] - tsa[i - 2])); } else { htrans = tsa[i - 1]; } dist = Di1[i] + D[i - 1][j] + lambda + nu * htrans; if (dmin > dist) { dmin = dist; } if (j > 1) { htrans = (tsb[j - 1] - tsb[j - 2]); } else { htrans = tsb[j - 1]; } dist = Dj1[j] + D[i][j - 1] + lambda + nu * htrans; if (dmin > dist) { dmin = dist; } D[i][j] = dmin; } } dist = D[r][c]; return dist; } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; // String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; String datasetName = "SonyAiboRobotSurface1"; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation TWEDistance oldDtw = new TWEDistance(0.001, 0.5); knn.setDistanceFunction(oldDtw); knn.buildClassifier(train); // new version TWE1NN dtwNew = new TWE1NN(0.001, 0.5); dtwNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = dtwNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { for (int i = 0; i < 10; i++) { runComparison(); } } @Override public void setParamsFromParamId(Instances train, int paramId) { this.nu = twe_nuParams[paramId / 10]; this.lambda = twe_lamdaParams[paramId % 10]; } @Override public String getParamInformationString() { return this.nu + "," + this.lambda; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex) { return LbTwed.distance(query, candidate, trainCache.getMax(queryIndex), trainCache.getMin(queryIndex), nu, lambda); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue) { return LbTwed.distance(query, candidate, trainCache.getMax(queryIndex), trainCache.getMin(queryIndex), nu, lambda, cutOffValue); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final SequenceStatsCache cache) { return LbTwed.distance(query, candidate, cache.getMax(queryIndex), cache.getMin(queryIndex), nu, lambda); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue, final SequenceStatsCache cache) { return LbTwed.distance(query, candidate, cache.getMax(queryIndex), cache.getMin(queryIndex), nu, lambda, cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final LazyAssessNN_TWED[] lazuAssessNN = new LazyAssessNN_TWED[train.size()]; for (int i = 0; i < train.size(); ++i) { lazuAssessNN[i] = new LazyAssessNN_TWED(cache); } final ArrayList<LazyAssessNN_TWED> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { // --- --- Get the data --- --- final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_TWED d = lazuAssessNN[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = 0; paramId < nParams; ++paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have NN for sure, but we still have to check if current is new NN for previous for (int previous = 0; previous < current; ++previous) { // --- Get the data final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN final double toBeat = prevNN.distance; final LazyAssessNN_TWED challenger = lazuAssessNN[previous]; final LazyAssessNN_TWED.RefineReturnType rrt = challenger.tryToBeat(toBeat, nu, lambda); // --- Check the result if (rrt == LazyAssessNN_TWED.RefineReturnType.New_best) { final double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have NN yet. // Sort the challengers so we have better chance to organize good pruning. Collections.sort(challengers); for (LazyAssessNN_TWED challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_TWED.RefineReturnType rrt = challenger.tryToBeat(toBeat, nu, lambda); // --- Check the result if (rrt == LazyAssessNN_TWED.RefineReturnType.New_best) { final double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazuAssessNN[previous]; rrt = challenger.tryToBeat(toBeat, nu, lambda); // --- Check the result if (rrt == LazyAssessNN_TWED.RefineReturnType.New_best) { final double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid final double d = currPNN.distance; final int index = currPNN.index; candidateNNS[paramId][current].set(index, d, CandidateNN.Status.NN); } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } final LazyAssessNN_TWED[] lazyAssessNN = new LazyAssessNN_TWED[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNN[i] = new LazyAssessNN_TWED(cache); } final ArrayList<LazyAssessNN_TWED> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { // --- --- Get the data --- --- final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous == current) continue; final LazyAssessNN_TWED d = lazyAssessNN[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = 0; paramId < nParams; ++paramId) { setParamsFromParamId(train, paramId); final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); boolean newNN = false; for (LazyAssessNN_TWED challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_TWED.RefineReturnType rrt = challenger.tryToBeat(toBeat, nu, lambda); // --- Check the result if (rrt == LazyAssessNN_TWED.RefineReturnType.New_best) { double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); newNN = true; } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNN[previous]; rrt = challenger.tryToBeat(toBeat, nu, lambda); // --- Check the result if (rrt == LazyAssessNN_TWED.RefineReturnType.New_best) { double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); } } } if (newNN) { double d = currPNN.distance; int index = currPNN.index; candidateNNS[paramId][current].set(index, d, CandidateNN.Status.NN); } } } } }
22,430
40.385609
184
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/WDTW1NN.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble; import experiments.data.DatasetLoading; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.CandidateNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN.LazyAssessNN_WDTW; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbWdtw; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import machine_learning.classifiers.kNN; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.WeightedDTW; import java.util.ArrayList; import java.util.Collections; //import efficient_standalone_classifiers.Eff /** * written April '16 - looks good * * @author sjx07ngu */ public class WDTW1NN extends Efficient1NN { private double g = 0; private double[] weightVector; private static final double WEIGHT_MAX = 1; private boolean refreshWeights = true; public WDTW1NN(double g) { this.g = g; this.classifierIdentifier = "WDTW_1NN"; this.allowLoocv = false; } public WDTW1NN() { this.g = 0; this.classifierIdentifier = "WDTW_1NN"; } private void initWeights(int seriesLength) { this.weightVector = new double[seriesLength]; double halfLength = (double) seriesLength / 2; for (int i = 0; i < seriesLength; i++) { weightVector[i] = WEIGHT_MAX / (1 + Math.exp(-g * (i - halfLength))); } refreshWeights = false; } public final double distance(Instance first, Instance second, double cutoff) { // base case - we're assuming class val is last. If this is true, this method is fine, // if not, we'll default to the DTW class if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) { return new WeightedDTW(g).distance(first, second, cutoff); } int m = first.numAttributes() - 1; int n = second.numAttributes() - 1; if (this.refreshWeights) { this.initWeights(m); } //create empty array double[][] distances = new double[m][n]; //first value distances[0][0] = this.weightVector[0] * (first.value(0) - second.value(0)) * (first.value(0) - second.value(0)); //early abandon if first values is larger than cut off if (distances[0][0] > cutoff) { return Double.MAX_VALUE; } //top row for (int i = 1; i < n; i++) { distances[0][i] = distances[0][i - 1] + this.weightVector[i] * (first.value(0) - second.value(i)) * (first.value(0) - second.value(i)); //edited by Jay } //first column for (int i = 1; i < m; i++) { distances[i][0] = distances[i - 1][0] + this.weightVector[i] * (first.value(i) - second.value(0)) * (first.value(i) - second.value(0)); //edited by Jay } //warp rest double minDistance; for (int i = 1; i < m; i++) { boolean overflow = true; for (int j = 1; j < n; j++) { //calculate distances minDistance = Math.min(distances[i][j - 1], Math.min(distances[i - 1][j], distances[i - 1][j - 1])); distances[i][j] = minDistance + this.weightVector[Math.abs(i - j)] * (first.value(i) - second.value(j)) * (first.value(i) - second.value(j)); if (overflow && distances[i][j] < cutoff) { overflow = false; // because there's evidence that the path can continue } } //early abandon if (overflow) { return Double.MAX_VALUE; } } return distances[m - 1][n - 1]; } @Override public Capabilities getCapabilities() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public static void runComparison() throws Exception { String tscProbDir = "C:/users/sjx07ngu/Dropbox/TSC Problems/"; // String datasetName = "ItalyPowerDemand"; String datasetName = "GunPoint"; // String datasetName = "Beef"; // String datasetName = "Coffee"; // String datasetName = "SonyAiboRobotSurface1"; double r = 0.1; Instances train = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TRAIN"); Instances test = DatasetLoading.loadDataNullable(tscProbDir + datasetName + "/" + datasetName + "_TEST"); // old version kNN knn = new kNN(); //efaults to k = 1 without any normalisation WeightedDTW oldDtw = new WeightedDTW(r); knn.setDistanceFunction(oldDtw); knn.buildClassifier(train); // new version WDTW1NN dtwNew = new WDTW1NN(r); dtwNew.buildClassifier(train); int correctOld = 0; int correctNew = 0; long start, end, oldTime, newTime; double pred; // classification with old MSM class and kNN start = System.nanoTime(); correctOld = 0; for (int i = 0; i < test.numInstances(); i++) { pred = knn.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctOld++; } } end = System.nanoTime(); oldTime = end - start; // classification with new MSM and own 1NN start = System.nanoTime(); correctNew = 0; for (int i = 0; i < test.numInstances(); i++) { pred = dtwNew.classifyInstance(test.instance(i)); if (pred == test.instance(i).classValue()) { correctNew++; } } end = System.nanoTime(); newTime = end - start; System.out.println("Comparison of MSM: " + datasetName); System.out.println("=========================================="); System.out.println("Old acc: " + ((double) correctOld / test.numInstances())); System.out.println("New acc: " + ((double) correctNew / test.numInstances())); System.out.println("Old timing: " + oldTime); System.out.println("New timing: " + newTime); System.out.println("Relative Performance: " + ((double) newTime / oldTime)); } public static void main(String[] args) throws Exception { // for(int i = 0; i < 10; i++){ // runComparison(); // } Instances train = DatasetLoading.loadDataNullable("C:/users/sjx07ngu/dropbox/tsc problems/SonyAiboRobotSurface1/SonyAiboRobotSurface1_TRAIN"); Instance one, two; one = train.firstInstance(); two = train.lastInstance(); WeightedDTW wdtw; WDTW1NN wnn = new WDTW1NN(); double g; for (int paramId = 0; paramId < 100; paramId++) { g = (double) paramId / 100; wdtw = new WeightedDTW(g); wnn.setParamsFromParamId(train, paramId); System.out.print(wdtw.distance(one, two) + "\t"); System.out.println(wnn.distance(one, two, Double.MAX_VALUE)); } } @Override public void setParamsFromParamId(Instances train, int paramId) { this.g = (double) paramId / 100; refreshWeights = true; } @Override public String getParamInformationString() { return this.g + ","; } /************************************************************************************************ Support for FastEE @author Chang Wei Tan, Monash University (chang.tan@monash.edu) ************************************************************************************************/ protected static final int weightMax = 1; @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex) { return LbWdtw.distance(candidate, weightVector[0], trainCache.getMax(queryIndex), trainCache.getMin(queryIndex)); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue) { return LbWdtw.distance(candidate, weightVector[0], trainCache.getMax(queryIndex), trainCache.getMin(queryIndex), cutOffValue); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final SequenceStatsCache cache) { return LbWdtw.distance(candidate, weightVector[0], cache.getMax(queryIndex), cache.getMin(queryIndex)); } @Override public double lowerBound(final Instance query, final Instance candidate, final int queryIndex, final int candidateIndex, final double cutOffValue, final SequenceStatsCache cache) { return LbWdtw.distance(candidate, weightVector[0], cache.getMax(queryIndex), cache.getMin(queryIndex), cutOffValue); } @Override public void initNNSTable(Instances train, SequenceStatsCache cache) { if (train.size() < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + train.size() + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][train.size()]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < train.size(); ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } classCounts = new int[nParams][train.size()][train.numClasses()]; final boolean[] vectorCreated = new boolean[nParams]; final double[][] weightVectors = new double[nParams][maxWindow]; final LazyAssessNN_WDTW[] lazyAssessNNS = new LazyAssessNN_WDTW[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_WDTW(cache); } final ArrayList<LazyAssessNN_WDTW> challengers = new ArrayList<>(train.size()); for (int current = 1; current < train.size(); ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < current; ++previous) { final LazyAssessNN_WDTW d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); if (!vectorCreated[paramId]) { weightVector = initWeights(sCurrent.numAttributes() - 1, g, weightMax); weightVectors[paramId] = weightVector; vectorCreated[paramId] = true; } final CandidateNN currPNN = candidateNNS[paramId][current]; if (currPNN.isNN()) { // --- --- WITH NN CASE --- --- // We already have the NN for sure, but we still have to check if current is the new NN for previous for (int previous = 0; previous < current; ++previous) { // --- Get the data final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Try to beat the previous best NN final double toBeat = prevNN.distance; final LazyAssessNN_WDTW challenger = lazyAssessNNS[previous]; final LazyAssessNN_WDTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, weightVectors[paramId]); // --- Check the result if (rrt == LazyAssessNN_WDTW.RefineReturnType.New_best) { final double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } } else { // --- --- WITHOUT NN CASE --- --- // We don't have the NN yet. // Sort the challengers so we have the better chance to organize the good pruning. Collections.sort(challengers); for (LazyAssessNN_WDTW challenger : challengers) { final int previous = challenger.indexQuery; final CandidateNN prevNN = candidateNNS[paramId][previous]; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_WDTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, weightVectors[paramId]); // --- Check the result if (rrt == LazyAssessNN_WDTW.RefineReturnType.New_best) { final double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); if (d < toBeat) { classCounts[paramId][current] = new int[train.numClasses()]; classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } else if (d == toBeat) { classCounts[paramId][current][(int) challenger.getQuery().classValue()]++; } } // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, weightVectors[paramId]); // --- Check the result if (rrt == LazyAssessNN_WDTW.RefineReturnType.New_best) { final double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); if (d < toBeat) { classCounts[paramId][previous] = new int[train.numClasses()]; classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } else if (d == toBeat) { classCounts[paramId][previous][(int) challenger.getReference().classValue()]++; } } } // --- When we looked at every past sequences, // the current best candidate is really the best one, so the NN. // So assign the current NN to all the windows that are valid candidateNNS[paramId][current].set(currPNN.index, currPNN.distance, CandidateNN.Status.NN); } } } } @Override public void initApproxNNSTable(Instances train, SequenceStatsCache cache, int nSamples) { if (nSamples < 2) { System.err.println("[INIT-NNS-TABLE] Set is to small: " + nSamples + " sequence. At least 2 sequences needed."); } candidateNNS = new CandidateNN[nParams][nSamples]; for (int paramId = 0; paramId < nParams; ++paramId) { for (int len = 0; len < nSamples; ++len) { candidateNNS[paramId][len] = new CandidateNN(); } } final boolean[] vectorCreated = new boolean[nParams]; final double[][] weightVectors = new double[nParams][maxWindow]; final LazyAssessNN_WDTW[] lazyAssessNNS = new LazyAssessNN_WDTW[train.size()]; for (int i = 0; i < train.size(); ++i) { lazyAssessNNS[i] = new LazyAssessNN_WDTW(cache); } final ArrayList<LazyAssessNN_WDTW> challengers = new ArrayList<>(nSamples); for (int current = 0; current < nSamples; ++current) { final Instance sCurrent = train.get(current); // Clear off the previous challengers and add all the previous sequences challengers.clear(); for (int previous = 0; previous < train.size(); ++previous) { if (previous == current) continue; final LazyAssessNN_WDTW d = lazyAssessNNS[previous]; d.set(train.get(previous), previous, sCurrent, current); challengers.add(d); } for (int paramId = nParams - 1; paramId > -1; --paramId) { setParamsFromParamId(train, paramId); if (!vectorCreated[paramId]) { weightVector = initWeights(sCurrent.numAttributes()-1, g, weightMax); weightVectors[paramId] = weightVector; vectorCreated[paramId] = true; } final CandidateNN currPNN = candidateNNS[paramId][current]; Collections.sort(challengers); for (LazyAssessNN_WDTW challenger : challengers) { // --- Get the data int previous = challenger.indexQuery; if (previous == current) previous = challenger.indexReference; if (previous == currPNN.index) continue; // --- First we want to beat the current best candidate: double toBeat = currPNN.distance; LazyAssessNN_WDTW.RefineReturnType rrt = challenger.tryToBeat(toBeat, weightVectors[paramId]); // --- Check the result if (rrt == LazyAssessNN_WDTW.RefineReturnType.New_best) { double d = challenger.getDistance(); currPNN.set(previous, d, CandidateNN.Status.BC); } if (previous < nSamples) { CandidateNN prevNN = candidateNNS[paramId][previous]; // --- Now check for previous NN // --- Try to beat the previous best NN toBeat = prevNN.distance; challenger = lazyAssessNNS[previous]; rrt = challenger.tryToBeat(toBeat, weightVectors[paramId]); // --- Check the result if (rrt == LazyAssessNN_WDTW.RefineReturnType.New_best) { double d = challenger.getDistance(); prevNN.set(current, d, CandidateNN.Status.NN); } } } } } } private double[] initWeights(final int seriesLength, final double g, final double maxWeight) { final double[] weightVector = new double[seriesLength]; double halfLength = (double) seriesLength / 2; for (int i = 0; i < seriesLength; i++) { weightVector[i] = maxWeight / (1 + Math.exp(-g * (i - halfLength))); } return weightVector; } public String toString() { return "this weight: " + this.g; } }
20,574
41.775468
184
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/BasicDTW.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* * A simple DTW algorithm that computes the warped path with no constraints * */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import experiments.data.DatasetLoading; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; /** * * @author Chris Rimmer */ public class BasicDTW extends EuclideanDistance{ protected double[][] distances; // private int distanceCount = 0; /** * BasicDTW Constructor * * Early Abandon Disabled */ public BasicDTW(){ super(); this.m_DontNormalize = true; } /** * BasicDTW Constructor that allows enabling of early abandon * * //@param earlyAbandon boolean value setting if early abandon is enabled */ public BasicDTW(Instances d) { super(d); this.m_DontNormalize = true; } /** * Distance method * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @param stats * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ //Get the double arrays return distance(first,second,cutOffValue); } @Override public double distance(Instance first, Instance second) { return distance(first, second, Double.POSITIVE_INFINITY); } /** * Calculates the distance between two instances. * * @param first the first instance * @param second the second instance * @return the distance between the two given instances */ @Override public double distance(Instance first, Instance second, double cutOffValue){ //remove class index from first instance if there iscutOffValue one int firtClassIndex = first.classIndex(); double[] arr1; if(firtClassIndex > 0){ arr1 = new double[first.numAttributes()-1]; for(int i = 0,j = 0; i < first.numAttributes(); i++){ if(i != firtClassIndex){ arr1[j]= first.value(i); j++; } } }else{ arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if(secondClassIndex > 0){ arr2 = new double[second.numAttributes()-1]; for(int i = 0,j = 0; i < second.numAttributes(); i++){ if(i != secondClassIndex){ arr2[j]= second.value(i); j++; } } }else{ arr2 = second.toDoubleArray(); } return distance(arr1,arr2,cutOffValue); } /** * calculates the distance between two instances (been converted to arrays) * * @param first instance 1 as array * @param second instance 2 as array * @param cutOffValue used for early abandon * @return distance between instances */ public double distance(double[] first, double[] second, double cutOffValue){ //create empty array this.distances = new double[first.length][second.length]; //first value this.distances[0][0] = (first[0]-second[0])*(first[0]-second[0]); //top row for(int i=1;i<second.length;i++){ this.distances[0][i] = this.distances[0][i-1]+((first[0]-second[i])*(first[0]-second[i])); } //first column for(int i=1;i<first.length;i++){ this.distances[i][0] = this.distances[i-1][0]+((first[i]-second[0])*(first[i]-second[0])); } boolean overFlow = true; //warp rest double minDistance; for(int i = 1; i<first.length; i++){ overFlow = true; for(int j = 1; j<second.length; j++){ //calculate distances minDistance = Math.min(this.distances[i][j-1], Math.min(this.distances[i-1][j], this.distances[i-1][j-1])); this.distances[i][j] = minDistance+((first[i]-second[j])*(first[i]-second[j])); if(overFlow && this.distances[i][j] < cutOffValue){ overFlow = false; } } if(overFlow){ // i.e. none of the valid directions for the warping path are less than the passed cut-off return Double.MAX_VALUE; } } // return Math.sqrt(this.distances[first.length-1][second.length-1]); return (this.distances[first.length-1][second.length-1]); } /** * Generates a string of the minimum cost warp path * * Distances array must be populated through use of distance method * * @return Path */ public String printMinCostWarpPath(){ return findPath(this.distances.length-1, this.distances[0].length-1); } /** * Recursive method that finds and prints the minimum warped path * * @param i position in distances, should be max of series * @param j position in distances, should be max of series * * @return current position */ protected String findPath(int i, int j){ double prevDistance = this.distances[i][j]; int oldI = i; int oldJ = j; //final condition if(i != 0 || j != 0){ //decrementing i and j if(i > 0 && j > 0){ double min = Math.min(this.distances[i-1][j], Math.min(this.distances[i-1][j-1], this.distances[i][j-1])); if(this.distances[i-1][j-1] == min){ i--; j--; }else if(this.distances[i-1][j] == min){ i--; }else if(this.distances[i][j-1] == min){ j--; } }else if(j > 0){ j--; }else if(i > 0){ i--; } //recursive step return "("+oldI+","+oldJ+") = "+prevDistance+"\n" + findPath(i,j); }else{ return "("+oldI+","+oldJ+") = "+prevDistance+"\n"; } } /** * returns the Euclidean distances array * * @return double[][] distances */ public double[][] getDistanceArray(){ return this.distances; } /** * This will print the diagonal route with no warping */ public void printDiagonalRoute(){ System.out.println("------------------ Diagonal Route ------------------"); for(int i = this.distances.length-1; i >= 0; i--){ System.out.print(this.distances[i][i]+" "); } System.out.println("\n------------------ End ------------------"); } /** * Prints the distances array as a table */ public void printDistances(){ System.out.println("------------------ Distances Table ------------------"); for(int i = 0; i<this.distances.length; i++){ System.out.print("Row ="+i+" = "); for(int j = 0; j<this.distances[0].length; j++){ System.out.print(" "+ distances[i][j]); } System.out.print("\n"); } System.out.println("------------------ End ------------------"); } @Override public String toString() { return "BasicDTW"; } public static void main(String[] args){ //Test BasicDTW Instances test = DatasetLoading.loadDataNullable("C:\\Users\\ajb\\Dropbox\\test\\Beef"); BasicDTW dtw=new BasicDTW(test); EuclideanDistance ed=new EuclideanDistance(test); ed.setDontNormalize(true); System.out.println(" DATA \n"+test.toString()); System.out.println(" ED ="+ed.distance(test.instance(0),test.instance(1))); System.out.println(" ED ="+ed.distance(test.instance(0),test.instance(1),2)); System.out.println(" DTW ="+dtw.distance(test.instance(0),test.instance(1))); System.out.println(" DTW ="+dtw.distance(test.instance(0),test.instance(1),1)); //Test Early abandon } }
9,187
31.013937
123
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/DTW.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* DTW with early abandon */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.GenericTools; import weka.core.Instance; import weka.core.Instances; /** * @author ajb */ public final class DTW extends DTW_DistanceBasic { /** * @param a * @param b * @param cutoff * @return */ @Override public final double distance(double[] a, double[] b, double cutoff) { double minDist; boolean tooBig; // Set the longest series to a. is this necessary? double[] temp; if (a.length < b.length) { temp = a; a = b; b = temp; } int n = a.length; int m = b.length; /* Parameter 0<=r<=1. 0 == no warp, 1 == full warp generalised for variable window size * */ windowSize = getWindowSize(n); //Extra memory than required, could limit to windowsize, // but avoids having to recreate during CV //for varying window sizes if (matrixD == null) matrixD = new double[n][m]; /* //Set boundary elements to max. */ int start, end; for (int i = 0; i < n; i++) { start = windowSize < i ? i - windowSize : 0; end = i + windowSize + 1 < m ? i + windowSize + 1 : m; for (int j = start; j < end; j++) matrixD[i][j] = Double.MAX_VALUE; } matrixD[0][0] = (a[0] - b[0]) * (a[0] - b[0]); //a is the longer series. //Base cases for warping 0 to all with max interval r //Warp a[0] onto all b[1]...b[r+1] for (int j = 1; j < windowSize && j < m; j++) matrixD[0][j] = matrixD[0][j - 1] + (a[0] - b[j]) * (a[0] - b[j]); // Warp b[0] onto all a[1]...a[r+1] for (int i = 1; i < windowSize && i < n; i++) matrixD[i][0] = matrixD[i - 1][0] + (a[i] - b[0]) * (a[i] - b[0]); //Warp the rest, for (int i = 1; i < n; i++) { tooBig = true; start = windowSize < i ? i - windowSize + 1 : 1; end = i + windowSize < m ? i + windowSize : m; for (int j = start; j < end; j++) { minDist = matrixD[i][j - 1]; if (matrixD[i - 1][j] < minDist) minDist = matrixD[i - 1][j]; if (matrixD[i - 1][j - 1] < minDist) minDist = matrixD[i - 1][j - 1]; matrixD[i][j] = minDist + (a[i] - b[j]) * (a[i] - b[j]); if (tooBig && matrixD[i][j] < cutoff) tooBig = false; } //Early abandon if (tooBig) { return Double.MAX_VALUE; } } //Find the minimum distance at the end points, within the warping window. return matrixD[n - 1][m - 1]; } /************************************************************************************************ Support for FastEE ************************************************************************************************/ private final static int MAX_SEQ_LENGTH = 4000; private final static int DIAGONAL = 0; // value for diagonal private final static int LEFT = 1; // value for left private final static int UP = 2; private final static double[][] distMatrix = new double[MAX_SEQ_LENGTH][MAX_SEQ_LENGTH]; private final static int[][] minDistanceToDiagonal = new int[MAX_SEQ_LENGTH][MAX_SEQ_LENGTH]; public static WarpingPathResults distanceExt(final Instance first, final Instance second, final int windowSize) { if(first.attribute(0).isRelationValued()) return distanceExtMultivariate( first, second, windowSize); double minDist = 0.0; final int n = first.numAttributes() - 1; final int m = second.numAttributes() - 1; double diff; int i, j, indiceRes, absIJ; int jStart, jEnd, indexInfyLeft; diff = first.value(0) - second.value(0); distMatrix[0][0] = diff * diff; minDistanceToDiagonal[0][0] = 0; for (i = 1; i < Math.min(n, 1 + windowSize); i++) { diff = first.value(i) - second.value(0); distMatrix[i][0] = distMatrix[i - 1][0] + diff * diff; minDistanceToDiagonal[i][0] = i; } for (j = 1; j < Math.min(m, 1 + windowSize); j++) { diff = first.value(0) - second.value(j); distMatrix[0][j] = distMatrix[0][j - 1] + diff * diff; minDistanceToDiagonal[0][j] = j; } if (j < m) distMatrix[0][j] = Double.POSITIVE_INFINITY; for (i = 1; i < n; i++) { jStart = Math.max(1, i - windowSize); jEnd = Math.min(m, i + windowSize + 1); indexInfyLeft = i - windowSize - 1; if (indexInfyLeft >= 0) distMatrix[i][indexInfyLeft] = Double.POSITIVE_INFINITY; for (j = jStart; j < jEnd; j++) { absIJ = Math.abs(i - j); indiceRes = GenericTools.argMin3(distMatrix[i - 1][j - 1], distMatrix[i][j - 1], distMatrix[i - 1][j]); switch (indiceRes) { case DIAGONAL: minDist = distMatrix[i - 1][j - 1]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j - 1]); break; case LEFT: minDist = distMatrix[i][j - 1]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i][j - 1]); break; case UP: minDist = distMatrix[i - 1][j]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j]); break; } diff = first.value(i) - second.value(j); distMatrix[i][j] = minDist + diff * diff; } if (j < m) distMatrix[i][j] = Double.POSITIVE_INFINITY; } WarpingPathResults resExt = new WarpingPathResults(); resExt.distance = distMatrix[n - 1][m - 1]; resExt.distanceFromDiagonal = minDistanceToDiagonal[n - 1][m - 1]; return resExt; } private static double multivariatePointDistance(Instances data1, Instances data2, int posA, int posB){ double diff=0; for(int i=0;i<data1.numInstances();i++) diff += (data1.instance(i).value(posA) - data2.instance(i).value(posB))*(data1.instance(i).value(posA) - data2.instance(i).value(posB)); return diff; } public static WarpingPathResults distanceExtMultivariate(final Instance first, final Instance second, final int windowSize) { Instances data1=first.relationalValue(0); Instances data2=second.relationalValue(0); double minDist = 0.0; final int n = first.numAttributes() - 1; final int m = second.numAttributes() - 1; double diff=0; int i, j, indiceRes, absIJ; int jStart, jEnd, indexInfyLeft; diff=multivariatePointDistance(data1,data2,0,0); distMatrix[0][0] = diff; minDistanceToDiagonal[0][0] = 0; for (i = 1; i < Math.min(n, 1 + windowSize); i++) { diff = multivariatePointDistance(data1,data2,i,0); distMatrix[i][0] = distMatrix[i - 1][0] + diff; minDistanceToDiagonal[i][0] = i; } for (j = 1; j < Math.min(m, 1 + windowSize); j++) { diff =multivariatePointDistance(data1,data2,0,j); distMatrix[0][j] = distMatrix[0][j - 1] + diff; minDistanceToDiagonal[0][j] = j; } if (j < m) distMatrix[0][j] = Double.POSITIVE_INFINITY; for (i = 1; i < n; i++) { jStart = Math.max(1, i - windowSize); jEnd = Math.min(m, i + windowSize + 1); indexInfyLeft = i - windowSize - 1; if (indexInfyLeft >= 0) distMatrix[i][indexInfyLeft] = Double.POSITIVE_INFINITY; for (j = jStart; j < jEnd; j++) { absIJ = Math.abs(i - j); indiceRes = GenericTools.argMin3(distMatrix[i - 1][j - 1], distMatrix[i][j - 1], distMatrix[i - 1][j]); switch (indiceRes) { case DIAGONAL: minDist = distMatrix[i - 1][j - 1]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j - 1]); break; case LEFT: minDist = distMatrix[i][j - 1]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i][j - 1]); break; case UP: minDist = distMatrix[i - 1][j]; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j]); break; } diff = multivariatePointDistance(data1,data2,i,j); distMatrix[i][j] = minDist + diff; } if (j < m) distMatrix[i][j] = Double.POSITIVE_INFINITY; } WarpingPathResults resExt = new WarpingPathResults(); resExt.distance = distMatrix[n - 1][m - 1]; resExt.distanceFromDiagonal = minDistanceToDiagonal[n - 1][m - 1]; return resExt; } public static int getWindowSize(final int n, final double r) { return (int) (r * n); } }
10,395
39.768627
148
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/DTW_D.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import static utilities.multivariate_tools.MultivariateInstanceTools.splitMultivariateInstance; import weka.core.Attribute; import weka.core.DenseInstance; import weka.core.Instance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; import weka.core.converters.ConverterUtils.DataSource; import java.util.ArrayList; /** * * @author ABostrom */ public class DTW_D extends DTW_DistanceBasic{ public DTW_D(){} public DTW_D(Instances train){ super(train); m_Data = null; m_Validated = true; } //DIRTY HACK TO MAKE IT WORK WITH kNN. because of relational attribute stuff. @Override protected void validate() {} @Override public void update(Instance ins) {} @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ //Get the double arrays return distance(first,second,cutOffValue); } @Override public double distance(Instance first, Instance second) { return distance(first, second, Double.POSITIVE_INFINITY); } @Override public double distance(Instance multiSeries1, Instance multiseries2, double cutoff){ //split the instance. Instance[] multi1 = splitMultivariateInstance(multiSeries1); Instance[] multi2 = splitMultivariateInstance(multiseries2); //TODO: might need to normalise here. double[][] data1 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi1); double[][] data2 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi2); return Math.sqrt(distance(data1, data2, cutoff)); } //because a and b are transposed, we can grab a column with a[0]. //a.length is the number of attributes //and a[0].length is the number of channels. public double distance(double[][] a, double[][] b, double cutoff){ double minDist; boolean tooBig=true; // Set the longest series to a double[][] temp; if(a.length<b.length){ temp=a; a=b; b=temp; } int n=a.length; int m=b.length; /* Parameter 0<=r<=1. 0 == no warp, 1 == full warp generalised for variable window size * */ matrixD = new double[n][n]; windowSize = getWindowSize(n); /* //Set all to max. This is necessary for the window but I dont need to do it all */ for(int i=0;i<n;i++) for(int j=0;j<m;j++) matrixD[i][j]=Double.MAX_VALUE; matrixD[0][0]= sqMultiDist(a[0], b[0]); //Base cases for warping 0 to all with max interval r //Warp a[0] onto all b[1]...b[r+1] for(int j=1;j<windowSize && j<n;j++) matrixD[0][j]=matrixD[0][j-1]+ sqMultiDist(a[0],b[j]); // Warp b[0] onto all a[1]...a[r+1] for(int i=1;i<windowSize && i<n;i++) matrixD[i][0]=matrixD[i-1][0]+ sqMultiDist(a[i], b[0]); //Warp the rest, for (int i=1;i<n;i++){ tooBig=true; for (int j = 1;j<m;j++){ //Find the min of matrixD[i][j-1],matrixD[i-1][j] and matrixD[i-1][j-1] if (i < j + windowSize && j < i + windowSize) { minDist=matrixD[i][j-1]; if(matrixD[i-1][j]<minDist) minDist=matrixD[i-1][j]; if(matrixD[i-1][j-1]<minDist) minDist=matrixD[i-1][j-1]; matrixD[i][j]=minDist + sqMultiDist(a[i], b[j]); if(tooBig&&matrixD[i][j]<cutoff) tooBig=false; } } //Early abandon if(tooBig){ return Double.MAX_VALUE; } } //Find the minimum distance at the end points, within the warping window. return matrixD[n-1][m-1]; } double sqDist(double a, double b){ return (a-b)*(a-b); } //given each aligned value in the channel. double sqMultiDist(double[] a, double[] b){ double sum = 0; for(int i=0; i<a.length; i++){ sum += sqDist(a[i], b[i]); } return sum; } public static void main(String[] args){ } }
5,298
31.115152
129
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/DTW_DistanceBasic.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /** Basic DTW implementation for Weka. /Each instance is assumed to be a time series. Basically we pull all the data out and proceed as usual! **/ import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; public class DTW_DistanceBasic extends EuclideanDistance{ private static final long serialVersionUID = 1L; protected int windowSize; protected double r=1; //Warping window size percentage, between 0 and 1 protected double[][] matrixD; protected int endX=0; protected int endY=0; public DTW_DistanceBasic(){ super(); m_DontNormalize=true; } public DTW_DistanceBasic(Instances data) { super(data); m_DontNormalize=true; } //Needs overriding to avoid cutoff check public double distance(Instance first, Instance second){ return distance(first, second, Double.POSITIVE_INFINITY, null, false); } @Override public double distance(Instance first, Instance second, PerformanceStats stats) { //debug method pls remove after use return distance(first, second, Double.POSITIVE_INFINITY, stats, false); } @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ return distance(first,second,cutOffValue,stats,false); } public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats, boolean print) { return distance(first,second,cutOffValue); } private double[] extractSeries(Instance a){ int fClass=a.classIndex(); double[] s; if(fClass>0) { s=new double[a.numAttributes()-1]; int count=0; for(int i=0;i<s.length+1;i++){ if(i!=fClass){ s[count]=a.value(i); count++; } } } else s=a.toDoubleArray(); return s; } public double multivariateDistance(Instance first, Instance second, double cutOffValue) { double minDist; boolean tooBig=true; Instances data1=first.relationalValue(0); Instances data2=second.relationalValue(0); int n=data1.numAttributes(); int m=data2.numAttributes(); if(n!=m) throw new RuntimeException("Cannot handle unequal length series"); // Parameter 0<=r<=1. 0 == no warp, 1 == full warp matrixD = new double[n][n]; windowSize = getWindowSize(n); //Set all to max. This is necessary for the window but I dont need to do for(int i=0;i<n;i++) for(int j=0;j<n;j++) matrixD[i][j]=Double.MAX_VALUE; matrixD[0][0]=multivariatePointDistance(data1,data2,0,0); //Base cases for warping 0 to all with max interval r //Warp a[0] onto all b[1]...b[r+1] for(int j=1;j<windowSize && j<n;j++) matrixD[0][j]=matrixD[0][j-1]+multivariatePointDistance(data1,data2,0,j); // Warp b[0] onto all a[1]...a[r+1] for(int i=1;i<windowSize && i<n;i++) matrixD[i][0]=matrixD[i-1][0]+multivariatePointDistance(data1,data2,i,0); //Warp the rest, for (int i=1;i<n;i++){ tooBig=true; for (int j = 1;j<m;j++){ //Find the min of matrixD[i][j-1],matrixD[i-1][j] and matrixD[i-1][j-1] if (i < j + windowSize && j < i + windowSize) { minDist=matrixD[i][j-1]; if(matrixD[i-1][j]<minDist) minDist=matrixD[i-1][j]; if(matrixD[i-1][j-1]<minDist) minDist=matrixD[i-1][j-1]; matrixD[i][j]=minDist+multivariatePointDistance(data1,data2,i,j); if(tooBig&&matrixD[i][j]<cutOffValue) tooBig=false; } } //Early abandon if(tooBig){ return Double.MAX_VALUE; } } //Find the minimum distance at the end points, within the warping window. return matrixD[n-1][m-1]; } private static double multivariatePointDistance(Instances data1, Instances data2, int posA, int posB){ double diff=0; for(int i=0;i<data1.numInstances();i++) diff += (data1.instance(i).value(posA) - data2.instance(i).value(posB))*(data1.instance(i).value(posA) - data2.instance(i).value(posB)); return diff; } @Override public double distance(Instance first, Instance second, double cutOffValue) { //Hack to handle multivariate // System.out.println(" Multivariate, cut off value ="+cutOffValue); if(first.attribute(0).isRelationValued()) return multivariateDistance(first,second,cutOffValue); double[] f=extractSeries(first); double[] s=extractSeries(second); return distance(f,s,cutOffValue); } /* DTW Distance with early abandon: * */ public double distance(double[] a,double[] b, double cutoff){ double minDist; boolean tooBig=true; // Set the longest series to a double[] temp; if(a.length<b.length){ temp=a; a=b; b=temp; } int n=a.length; int m=b.length; /* Parameter 0<=r<=1. 0 == no warp, 1 == full warp generalised for variable window size * */ matrixD = new double[n][n]; windowSize = getWindowSize(n); /* //Set all to max. This is necessary for the window but I dont need to do it all */ for(int i=0;i<n;i++) for(int j=0;j<m;j++) matrixD[i][j]=Double.MAX_VALUE; matrixD[0][0]=(a[0]-b[0])*(a[0]-b[0]); //Base cases for warping 0 to all with max interval r //Warp a[0] onto all b[1]...b[r+1] for(int j=1;j<windowSize && j<n;j++) matrixD[0][j]=matrixD[0][j-1]+(a[0]-b[j])*(a[0]-b[j]); // Warp b[0] onto all a[1]...a[r+1] for(int i=1;i<windowSize && i<n;i++) matrixD[i][0]=matrixD[i-1][0]+(a[i]-b[0])*(a[i]-b[0]); //Warp the rest, for (int i=1;i<n;i++){ tooBig=true; for (int j = 1;j<m;j++){ //Find the min of matrixD[i][j-1],matrixD[i-1][j] and matrixD[i-1][j-1] if (i < j + windowSize && j < i + windowSize) { minDist=matrixD[i][j-1]; if(matrixD[i-1][j]<minDist) minDist=matrixD[i-1][j]; if(matrixD[i-1][j-1]<minDist) minDist=matrixD[i-1][j-1]; matrixD[i][j]=minDist+(a[i]-b[j])*(a[i]-b[j]); if(tooBig&&matrixD[i][j]<cutoff) tooBig=false; } } //Early abandon if(tooBig){ return Double.MAX_VALUE; } } //Find the minimum distance at the end points, within the warping window. return matrixD[n-1][m-1]; } static public int findWindowSize(double rr,int n){ int w=(int)(rr*n); //Rounded down. //No Warp, windowSize=1 if(w<1) w=1; //Full Warp : windowSize=n, otherwise scale between else if(w<n) w++; return w; } final public int getWindowSize(int n){ int w=(int)(r*n); //Rounded down. //No Warp, windowSize=1 if(w<1) w=1; //Full Warp : windowSize=n, otherwise scale between else if(w<n) w++; return w; } final public int findMaxWindow(){ //Find Path backwards in pairs int n=matrixD.length; int m=matrixD[0].length; int x=n-1,y=m-1; int maxDiff=0; while(x>0 && y>0) { //Look along double min=matrixD[x-1][y-1]; if(min<=matrixD[x-1][y] && min<=matrixD[x][y-1]){ x--; y--; } else if(matrixD[x-1][y] < matrixD[x][y-1]) x--; else y--; int diff=(x>y)?x-y:y-x; if(diff>maxDiff) maxDiff=diff; } return maxDiff; } void printPath(){ //Find Path backwards in pairs int n=matrixD.length; int m=matrixD[0].length; int x=n-1,y=m-1; int count=0; System.out.println(count+"END Point = "+x+","+y+" value ="+matrixD[x][y]); while(x>0 && y>0) { //Look along double min=matrixD[x-1][y-1]; if(min<=matrixD[x-1][y] && min<=matrixD[x][y-1]){ x--; y--; } else if(matrixD[x-1][y] < matrixD[x][y-1]) x--; else y--; count++; System.out.println(count+" Point = "+x+","+y+" value ="+matrixD[x][y]); } while(x>0){ x--; System.out.println(count+" Point = "+x+","+y+" value ="+matrixD[x][y]); } while(y>0){ y--; System.out.println(count+" Point = "+x+","+y+" value ="+matrixD[x][y]); } } public String toString() { return "DTW BASIC. r="+r;} public String globalInfo() {return " DTW Basic Distance";} public String getRevision() {return "Version 1.0"; } public void setR(double x){ r=x;} public double getR(){ return r;} public int getWindowSize(){ return windowSize;} public static void main(String[] args) { System.out.println(" Very basic test for DTW distance"); double[] a ={1,2,3,4,5,6,7,8}; double[] b ={2,3,4,5,6,7,8,9}; for(int i=0;i<a.length;i++) System.out.print(a[i]+","); System.out.println("\n************"); for(int i=0;i<b.length;i++) System.out.print(b[i]+","); System.out.println("\n Euclidean distance is 8, DTW should be 2"); DTW_DistanceBasic dtw= new DTW_DistanceBasic(); // dtw.printPath(); /* for(double[] d:dtw.matrixD){ for(double x:d) System.out.print(x+","); System.out.print("\n"); } */ } }
11,357
33.522796
148
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/DTW_DistanceEfficient.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /** Basic DTW implementation for Weka. /Each instance is assumed to be a time series. **/ import weka.core.Instances; public class DTW_DistanceEfficient extends DTW_DistanceBasic{ double[] row1; double[] row2; public DTW_DistanceEfficient(){ super(); m_DontNormalize=true; } public DTW_DistanceEfficient(Instances data) { super(data); m_DontNormalize=true; } /* DTW Distance: * * This implementation is more memory efficient in that it only stores * two rows. It also implements the early abandon. If all of row 2 are above the cuttoff then * we can ditch the distance */ public double distance(double[] a,double[] b, double cutoff){ double minDist; boolean tooBig=true; // System.out.println("\t\t\tIn Efficient with cutoff ="+cutoff); // Set the longest series to a double[] temp; if(a.length<b.length){ temp=a; a=b; b=temp; } int n=a.length; int m=b.length; //No Warp: windowSize=1, full warp: windowSize=m int windowSize = getWindowSize(m); row1=new double[m]; row2=new double[m]; //Set all to max row1[0]=(a[0]-b[0])*(a[0]-b[0]); if(row1[0]<cutoff) tooBig=false; for(int j=1;j<n&&j<=windowSize;j++){ row1[j]=Double.MAX_VALUE; } //Warp a[0] onto all b[1]...b[WindowSize] for(int j=1;j<windowSize && j<m;j++){ row1[j]=row1[j-1]+(a[0]-b[j])*(a[0]-b[j]); if(row1[j]<cutoff) tooBig=false; } if(tooBig){ return Double.MAX_VALUE; } int start,end; //For each remaining row, warp row i for (int i=1;i<n;i++){ tooBig=true; row2=new double[m]; //Find point to start from if(i-windowSize<1) start=0; else start=i-windowSize+1; if(start==0){ row2[0]=row1[0]+(a[i]-b[0])*(a[i]-b[0]); start=1; } else row2[start-1]=Double.MAX_VALUE; //Find end point if(start+windowSize>=m) end=m; else end=start+windowSize; //Warp a[i] onto b[j=start..end] for (int j = start;j<end;j++){ //Find the min of row2[j-1],row1[j] and row1[j-1] minDist=row2[j-1]; if(row1[j]<minDist) minDist=row1[j]; if(row1[j-1]<minDist) minDist=row1[j-1]; row2[j]=minDist+(a[i]-b[j])*(a[i]-b[j]); if(tooBig&&row2[j]<cutoff) tooBig=false; } if(end<m) row2[end]=Double.MAX_VALUE; //Swap row 2 into row 1. row1=row2; //Early abandon if(tooBig){ return Double.MAX_VALUE; } } return row1[m-1]; } public String toString() { return "DTW EFFICIENT"; } /* Test Harness to check the outputs are the same with DTW Basic and TW_DistanceSpaceEfficient */ public static void main(String[] args){ DTW_DistanceBasic b=new DTW_DistanceBasic(); DTW_DistanceEfficient c=new DTW_DistanceEfficient(); double[] a1={1,1,1,6}; double[] a2={1,6,6,6}; b.setR(0); c.setR(0); System.out.println("***************** TEST 1: Two small arrays *******************"); //Zero warp distance should be 50, System.out.println("\nZero warp full matrix ="+b.distance(a1,a2,Double.MAX_VALUE)); System.out.println("Zero warp limited matrix ="+c.distance(a1,a2,Double.MAX_VALUE)); // Full warp should be 0 b.setR(1); c.setR(1); System.out.println("\nFull warp full matrix ="+b.distance(a1,a2,Double.MAX_VALUE)); System.out.println("Full warp limited matrix ="+c.distance(a1,a2,Double.MAX_VALUE)); // System.out.println("Full warp full matrix JML version="+b.measure(a1,a2)); // 1/4 Warp should be 25 b.setR(0.25); c.setR(0.25); System.out.println("\nQuarter warp full matrix ="+b.distance(a1,a2,Double.MAX_VALUE)); System.out.println("Quarter warp limited matrix ="+c.distance(a1,a2,Double.MAX_VALUE)); System.out.println("***************** TEST2: Longer arrays *******************"); //Longer arrays double[] a3={1,10,11,15,1,2,4,56,6,7,8}; double[] a4={10,11,10,1,1,2,4,56,6,7,8}; double d=0; for(int i=0;i<a3.length;i++) d+=(a3[i]-a4[i])*(a3[i]-a4[i]); System.out.println("\nEuclidean distance ="+d); //Zero warp distance should be b.setR(0); c.setR(0); System.out.println("Zero warp full matrix ="+b.distance(a3,a4,Double.MAX_VALUE)); System.out.println("Zero warp limited matrix ="+c.distance(a3,a4,100)); // b.printPath(); // Full warp should be b.setR(1); c.setR(1); System.out.println("\nFull warp full matrix ="+b.distance(a3,a4,100)); // b.printPath(); // System.out.println("Full warp full matrix JML version="+b.measure(a3,a4)); System.out.println("Full warp limited matrix ="+c.distance(a3,a4,100)); // 1/4 Warp should be b.setR(0.25); c.setR(0.25); System.out.println("\nQuarter warp full matrix ="+b.distance(a3,a4,Double.MAX_VALUE)); System.out.println("Quarter warp limited matrix ="+c.distance(a3,a4,Double.MAX_VALUE)); // 1/2 Warp should be b.setR(0.5); c.setR(0.5); System.out.println("Half warp full matrix ="+b.distance(a3,a4,Double.MAX_VALUE)); System.out.println("Half warp limited matrix ="+c.distance(a3,a4,Double.MAX_VALUE)); // b.printPath(); System.out.println("***************** TEST3: Variable length arrays *******************"); System.out.println("NOT IMPLEMENTED FOR VARIABLE LENGTH"); /* double[] a5={1,10,11}; double[] a6={1,10,11,15,1}; //Zero warp distance should be 50, System.out.println("Zero warp full matrix ="+b.distance(a5,a6,0)); // System.out.println("Zero warp limited matrix ="+c.distance(a1,a2,0)); // Full warp should be 0 b.setR(1); c.setR(1); System.out.println("Full warp full matrix ="+b.distance(a5,a6,0)); // System.out.println("Full warp full matrix JML version="+b.measure(a1,a2)); // System.out.println("Full warp limited matrix ="+c.distance(a1,a2,0)); // 1/4 Warp should be 25 b.setR(0.25); c.setR(0.25); System.out.println("Quarter warp full matrix ="+b.distance(a5,a6,0)); // System.out.println("Quarter warp limited matrix ="+c.distance(a1,a2,0)); */ //Variable length arrays } }
8,126
35.28125
98
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/DTW_I.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import static utilities.multivariate_tools.MultivariateInstanceTools.splitMultivariateInstance; import weka.core.Instance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; /** * * @author ABostrom */ public class DTW_I extends DTW_DistanceBasic{ public DTW_I(){} public DTW_I(Instances train){ super(train); m_Data = null; m_Validated = true; } //DIRTY HACK TO MAKE IT WORK WITH kNN. because of relational attribute stuff. @Override protected void validate() {} @Override public void update(Instance ins) {} @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ return distance(first,second,cutOffValue); } @Override public double distance(Instance first, Instance second) { return distance(first, second, Double.POSITIVE_INFINITY); } @Override public double distance(Instance multiSeries1, Instance multiseries2, double cutoff){ //split the instance. Instance[] multi1 = splitMultivariateInstance(multiSeries1); Instance[] multi2 = splitMultivariateInstance(multiseries2); //TODO: might need to normalise here. //pairwise compare and sum dtw measures. double cumulative_distance = 0; for(int i=0; i< multi1.length; i++){ cumulative_distance += Math.sqrt(super.distance(multi1[i], multi2[i], cutoff)); } return cumulative_distance; } }
2,432
29.797468
104
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/ERPDistance.java
package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /* This file is part of ELKI: Environment for Developing KDD-Applications Supported by Index-Structures Copyright (C) 2011 Ludwig-Maximilians-Universität München Lehr- und Forschungseinheit für Datenbanksysteme ELKI Development Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Modified by Jason Lines (j.lines@uea.ac.uk) */ import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import weka.core.Instance; import weka.core.EuclideanDistance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; public class ERPDistance extends EuclideanDistance { private double g; private double bandSize; public ERPDistance(double g, double bandSize) { this.g = g; this.bandSize = bandSize; } /** * Distance method * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @param stats * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats) { //Get the double arrays return distance(first, second, cutOffValue); } /** * distance method that converts instances to arrays of doubles * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue) { //remove class index from first instance if there is one int firtClassIndex = first.classIndex(); double[] arr1; if (firtClassIndex > 0) { arr1 = new double[first.numAttributes() - 1]; for (int i = 0, j = 0; i < first.numAttributes(); i++) { if (i != firtClassIndex) { arr1[j] = first.value(i); j++; } } } else { arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if (secondClassIndex > 0) { arr2 = new double[second.numAttributes() - 1]; for (int i = 0, j = 0; i < second.numAttributes(); i++) { if (i != secondClassIndex) { arr2[j] = second.value(i); j++; } } } else { arr2 = second.toDoubleArray(); } return distance(arr1, arr2, cutOffValue); } public double distance(double[] first, double[] second, double cutOffValue) { // return ERPDistance(first, second); return ERPDistance(new NumberVector(first), new NumberVector(second)); } private static class NumberVector { private double[] values; public NumberVector(double[] values) { this.values = values; } public int getDimensionality() { return values.length; } public double doubleValue(int dimension) { try { return values[dimension - 1]; } catch (IndexOutOfBoundsException e) { throw new IllegalArgumentException("Dimension " + dimension + " out of range."); } } } //public double doubleDistance(NumberVector<?, ?> v1, NumberVector<?, ?> v2) { public double ERPDistance(NumberVector v1, NumberVector v2) { // Current and previous columns of the matrix double[] curr = new double[v2.getDimensionality()]; double[] prev = new double[v2.getDimensionality()]; // size of edit distance band // bandsize is the maximum allowed distance to the diagonal // int band = (int) Math.ceil(v2.getDimensionality() * bandSize); int band = (int) Math.ceil(v2.getDimensionality() * bandSize); // g parameter for local usage double gValue = g; for (int i = 0; i < v1.getDimensionality(); i++) { // Swap current and prev arrays. We'll just overwrite the new curr. { double[] temp = prev; prev = curr; curr = temp; } int l = i - (band + 1); if (l < 0) { l = 0; } int r = i + (band + 1); if (r > (v2.getDimensionality() - 1)) { r = (v2.getDimensionality() - 1); } for (int j = l; j <= r; j++) { if (Math.abs(i - j) <= band) { // compute squared distance of feature vectors double val1 = v1.doubleValue(i + 1); double val2 = gValue; double diff = (val1 - val2); final double d1 = Math.sqrt(diff * diff); val1 = gValue; val2 = v2.doubleValue(j + 1); diff = (val1 - val2); final double d2 = Math.sqrt(diff * diff); val1 = v1.doubleValue(i + 1); val2 = v2.doubleValue(j + 1); diff = (val1 - val2); final double d12 = Math.sqrt(diff * diff); final double dist1 = d1 * d1; final double dist2 = d2 * d2; final double dist12 = d12 * d12; final double cost; if ((i + j) != 0) { if ((i == 0) || ((j != 0) && (((prev[j - 1] + dist12) > (curr[j - 1] + dist2)) && ((curr[j - 1] + dist2) < (prev[j] + dist1))))) { // del cost = curr[j - 1] + dist2; } else if ((j == 0) || ((i != 0) && (((prev[j - 1] + dist12) > (prev[j] + dist1)) && ((prev[j] + dist1) < (curr[j - 1] + dist2))))) { // ins cost = prev[j] + dist1; } else { // match cost = prev[j - 1] + dist12; } } else { cost = 0; } curr[j] = cost; // steps[i][j] = step; } else { curr[j] = Double.POSITIVE_INFINITY; // outside band } } } return Math.sqrt(curr[v2.getDimensionality() - 1]); } // utility functions, useful for cv experiments public static double stdv_p(Instances input) { double sumx = 0; double sumx2 = 0; double[] ins2array; for (int i = 0; i < input.numInstances(); i++) { ins2array = input.instance(i).toDoubleArray(); for (int j = 0; j < ins2array.length - 1; j++) {//-1 to avoid classVal sumx += ins2array[j]; sumx2 += ins2array[j] * ins2array[j]; } } int n = input.numInstances() * (input.numAttributes() - 1); double mean = sumx / n; return Math.sqrt(sumx2 / (n) - mean * mean); } public static int[] getInclusive10(int min, int max) { int[] output = new int[10]; double diff = (double) (max - min) / 9; double[] doubleOut = new double[10]; doubleOut[0] = min; output[0] = min; for (int i = 1; i < 9; i++) { doubleOut[i] = doubleOut[i - 1] + diff; output[i] = (int) Math.round(doubleOut[i]); } output[9] = max; // to make sure max isn't omitted due to double imprecision return output; } public static double[] getInclusive10(double min, double max) { double[] output = new double[10]; double diff = (double) (max - min) / 9; output[0] = min; for (int i = 1; i < 9; i++) { output[i] = output[i - 1] + diff; } output[9] = max; return output; } public static void main(String[] args) { ERPDistance erp = new ERPDistance(0.5, 0.5); double[] one = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; double[] two = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; // double[] two = {2,3,4,5,6,7,8,9,10,11}; System.out.println(erp.distance(one, two, 0)); } /************************************************************************************************ Support for FastEE ************************************************************************************************/ private final static int MAX_SEQ_LENGTH = 4000; private static double[] prev = new double[MAX_SEQ_LENGTH]; private static double[] curr = new double[MAX_SEQ_LENGTH]; private final static int[][] minDistanceToDiagonal = new int[MAX_SEQ_LENGTH][MAX_SEQ_LENGTH]; public static int getBandSize(double bandSize, int n) { return (int) Math.ceil(bandSize * n); } public static WarpingPathResults distanceExt(Instance first, Instance second, final double g, final double bandSize) { final int m = first.numAttributes() - 1; final int n = second.numAttributes() - 1; final int band = getBandSize(bandSize, m); double diff, d1, d2, d12, cost; int i, j, left, right, absIJ; Instance tmp; if (n < m) { tmp = first; first = second; second = tmp; } minDistanceToDiagonal[0][0] = 0; for (i = 0; i < m; i++) { // Swap current and prev arrays. We'll just overwrite the new curr. double[] temp = prev; prev = curr; curr = temp; left = i - (band + 1); if (left < 0) { left = 0; } right = i + (band + 1); if (right > (m - 1)) { right = (m - 1); } for (j = left; j <= right; j++) { absIJ = Math.abs(i - j); if (absIJ <= band) { diff = first.value(i) - g; d1 = (diff * diff); diff = g - second.value(j); d2 = (diff * diff); diff = first.value(i) - second.value(j); d12 = (diff * diff); if ((i + j) != 0) { if ((i == 0) || ((j != 0) && (((prev[j - 1] + d12) >= (curr[j - 1] + d2)) && ((curr[j - 1] + d2) <= (prev[j] + d1))))) { // del cost = curr[j - 1] + d2; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i][j - 1]); } else if (j == 0 || prev[j - 1] + d12 >= prev[j] + d1 && prev[j] + d1 <= curr[j - 1] + d2) { // ins cost = prev[j] + d1; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j]); } else { // match cost = prev[j - 1] + d12; minDistanceToDiagonal[i][j] = Math.max(absIJ, minDistanceToDiagonal[i - 1][j - 1]); } } else { cost = 0; minDistanceToDiagonal[i][j] = 0; } curr[j] = cost; // steps[i][j] = step; } else { curr[j] = Double.POSITIVE_INFINITY; // outside band } } } WarpingPathResults resExt = new WarpingPathResults(); resExt.distance = curr[m - 1]; resExt.distanceFromDiagonal = minDistanceToDiagonal[n - 1][m - 1]; return resExt; } }
12,672
33.625683
157
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/EuclideanDistance_D.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import static utilities.multivariate_tools.MultivariateInstanceTools.splitMultivariateInstance; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; /** * * @author Aaron */ public class EuclideanDistance_D extends EuclideanDistance{ public EuclideanDistance_D(){} public EuclideanDistance_D(Instances train){ super(train); m_Data = null; m_Validated = true; } @Override public double distance(Instance multiSeries1, Instance multiseries2, double cutoff){ //split the instance. Instance[] multi1 = splitMultivariateInstance(multiSeries1); Instance[] multi2 = splitMultivariateInstance(multiseries2); //TODO: might need to normalise here. double[][] data1 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi1); double[][] data2 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi2); return Math.sqrt(distance(data1, data2, cutoff)); } public double distance(double[][] a, double[][] b, double cutoff){ //assume a and b are the same length. double sum =0; for(int i=0; i<a.length; i++){ sum += sqMultiDist(a[i],b[i]); } return sum; } double sqDist(double a, double b){ return (a-b)*(a-b); } //given each aligned value in the channel. double sqMultiDist(double[] a, double[] b){ double sum = 0; for(int i=0; i<a.length; i++){ sum += sqDist(a[i], b[i]); } return sum; } }
2,540
30.7625
129
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/EuclideanDistance_I.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import static utilities.multivariate_tools.MultivariateInstanceTools.splitMultivariateInstance; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; /** * * @author Aaron */ public class EuclideanDistance_I extends EuclideanDistance{ public EuclideanDistance_I(){} public EuclideanDistance_I(Instances train){ super(train); m_Data = null; m_Validated = true; } @Override public double distance(Instance multiSeries1, Instance multiseries2, double cutoff){ //split the instance. Instance[] multi1 = splitMultivariateInstance(multiSeries1); Instance[] multi2 = splitMultivariateInstance(multiseries2); //TODO: might need to normalise here. double[][] data1 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi1); double[][] data2 = utilities.multivariate_tools.MultivariateInstanceTools.convertMultiInstanceToTransposedArrays(multi2); return distance(data1, data2, cutoff); } public double distance(double[][] a, double[][] b, double cutoff){ //assume a and b are the same length. double sum =0; for(int i=0; i<a.length; i++){ sum += Math.sqrt(sqMultiDist(a[i],b[i])); } return sum; } double sqDist(double a, double b){ return (a-b)*(a-b); } //given each aligned value in the channel. double sqMultiDist(double[] a, double[] b){ double sum = 0; for(int i=0; i<a.length; i++){ sum += sqDist(a[i], b[i]); } return sum; } }
2,538
31.139241
129
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/LCSSDistance.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.Instances; import weka.core.neighboursearch.PerformanceStats; /** * @author sjx07ngu */ public class LCSSDistance extends EuclideanDistance { private double epsilon; private int delta; public LCSSDistance(int delta, double epsilon) { this.m_DontNormalize = true; this.delta = delta; this.epsilon = epsilon; } public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats) { return distance(first, second); } /** * distance method that converts instances to arrays of doubles * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue) { // if(this.distanceCount % 10000000 == 0){ // System.out.println("New Instance: "+this.distanceCount); // } // this.distanceCount++; //remove class index from first instance if there is one int firtClassIndex = first.classIndex(); double[] arr1; if (firtClassIndex > 0) { arr1 = new double[first.numAttributes() - 1]; for (int i = 0, j = 0; i < first.numAttributes(); i++) { if (i != firtClassIndex) { arr1[j] = first.value(i); j++; } } } else { arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if (secondClassIndex > 0) { arr2 = new double[second.numAttributes() - 1]; for (int i = 0, j = 0; i < second.numAttributes(); i++) { if (i != secondClassIndex) { arr2[j] = second.value(i); j++; } } } else { arr2 = second.toDoubleArray(); } return distance(arr1, arr2, cutOffValue); } /** * calculates the distance between two instances (been converted to arrays) * * @param first instance 1 as array * @param second instance 2 as array * @param cutOffValue used for early abandon * @return distance between instances */ public double distance(double[] first, double[] second, double cutOffValue) { return distance(first, second); } public double distance(double[] first, double[] second) { double[] a = first; double[] b = second; int m = first.length; int n = second.length; int[][] lcss = new int[m + 1][n + 1]; int[][] lastX = new int[m + 1][n + 1]; int[][] lastY = new int[m + 1][n + 1]; for (int i = 0; i < m; i++) { for (int j = i - delta; j <= i + delta; j++) { // System.out.println("here"); if (j < 0 || j >= n) { //do nothing } else if (b[j] + this.epsilon >= a[i] && b[j] - epsilon <= a[i]) { lcss[i + 1][j + 1] = lcss[i][j] + 1; lastX[i + 1][j + 1] = i; lastY[i + 1][j + 1] = j; } else if (lcss[i][j + 1] > lcss[i + 1][j]) { lcss[i + 1][j + 1] = lcss[i][j + 1]; lastX[i + 1][j + 1] = i; lastY[i + 1][j + 1] = j + 1; } else { lcss[i + 1][j + 1] = lcss[i + 1][j]; lastX[i + 1][j + 1] = i + 1; lastY[i + 1][j + 1] = j; } } } int max = -1; for (int i = 1; i < lcss[lcss.length - 1].length; i++) { if (lcss[lcss.length - 1][i] > max) { max = lcss[lcss.length - 1][i]; } } return 1 - ((double) max / m); } public static double stdv_s(double[] input) { double sumx = 0; double sumx2 = 0; for (int j = 0; j < input.length; j++) { sumx += input[j]; sumx2 += input[j] * input[j]; } int n = input.length; double mean = sumx / n; double standardDev = Math.sqrt(sumx2 / (n - 1) - mean * mean); return standardDev; } public static double stdv_p(Instances input) { double sumx = 0; double sumx2 = 0; double[] ins2array; for (int i = 0; i < input.numInstances(); i++) { ins2array = input.instance(i).toDoubleArray(); for (int j = 0; j < ins2array.length - 1; j++) {//-1 to avoid classVal sumx += ins2array[j]; sumx2 += ins2array[j] * ins2array[j]; } } int n = input.numInstances() * (input.numAttributes() - 1); double mean = sumx / n; return Math.sqrt(sumx2 / (n) - mean * mean); } public static int[] getInclusive10(int min, int max) { int[] output = new int[10]; double diff = (double) (max - min) / 9; double[] doubleOut = new double[10]; doubleOut[0] = min; output[0] = min; for (int i = 1; i < 9; i++) { doubleOut[i] = doubleOut[i - 1] + diff; output[i] = (int) Math.round(doubleOut[i]); } output[9] = max; // to make sure max isn't omitted due to double imprecision return output; } public static double[] getInclusive10(double min, double max) { double[] output = new double[10]; double diff = (double) (max - min) / 9; output[0] = min; for (int i = 1; i < 9; i++) { output[i] = output[i - 1] + diff; } output[9] = max; return output; } /************************************************************************************************ Support for FastEE ************************************************************************************************/ public static WarpingPathResults distanceExt(final Instance first, final Instance second, final double epsilon, final int delta) { final int m = first.numAttributes() - 1; final int n = second.numAttributes() - 1; int i, j, absIJ; final int[][] matrixD = new int[m + 1][n + 1]; final int[][] minDelta = new int[m + 1][n + 1]; for (i = 0; i < m; i++) { for (j = i - delta; j <= i + delta; j++) { if (j < 0) { j = -1; } else if (j >= n) { j = i + delta; } else if (second.value(j) + epsilon >= first.value(i) && second.value(j) - epsilon <= first.value(i)) { absIJ = Math.abs(i - j); matrixD[i + 1][j + 1] = matrixD[i][j] + 1; minDelta[i + 1][j + 1] = Math.max(absIJ, minDelta[i][j]); } else if (delta == 0) { matrixD[i + 1][j + 1] = matrixD[i][j]; minDelta[i + 1][j + 1] = 0; } else if (matrixD[i][j + 1] > matrixD[i + 1][j]) { matrixD[i + 1][j + 1] = matrixD[i][j + 1]; minDelta[i + 1][j + 1] = minDelta[i][j + 1]; } else { matrixD[i + 1][j + 1] = matrixD[i + 1][j]; minDelta[i + 1][j + 1] = minDelta[i + 1][j]; } } } int max = -1, maxR = -1; for (i = 1; i < m + 1; i++) { if (matrixD[m][i] > max) { max = matrixD[m][i]; maxR = minDelta[m][i]; } } WarpingPathResults resExt = new WarpingPathResults(); resExt.distance = 1.0 - 1.0 * matrixD[m][n] / m; resExt.distanceFromDiagonal = maxR; return resExt; } }
9,053
33.166038
134
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/MSMDistance.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /* * Move Split Merge distance measure from @ARTICLE{stefan13move-split-merge, AUTHOR = "A. Stefan andf V. Athitsos and G. Das ", TITLE = "The Move-Split-Merge Metric for Time Series", JOURNAL = "{IEEE} TRANSACTIONS ON KNOWLEDGE AND DATA ENGINEERING", YEAR = "2013", VOLUME = "25 ", NUMBER = "6 ", PAGES="1425--1438" } * */ import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.neighboursearch.PerformanceStats; /** * * @author Chris Rimmer */ public class MSMDistance extends EuclideanDistance{ // c - cost of Split/Merge operation. Change this value to what is more // appropriate for your data. double c = 0.1; public MSMDistance(){ super(); this.m_DontNormalize = true; } public MSMDistance(double c){ super(); this.m_DontNormalize = true; this.c = c; } public void setC(double v){c=v;} /** * Distance method * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @param stats * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ //Get the double arrays return distance(first,second,cutOffValue); } /** * distance method that converts instances to arrays of doubles * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue){ //remove class index from first instance if there is one int firtClassIndex = first.classIndex(); double[] arr1; if(firtClassIndex > 0){ arr1 = new double[first.numAttributes()-1]; for(int i = 0,j = 0; i < first.numAttributes(); i++){ if(i != firtClassIndex){ arr1[j]= first.value(i); j++; } } }else{ arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if(secondClassIndex > 0){ arr2 = new double[second.numAttributes()-1]; for(int i = 0,j = 0; i < second.numAttributes(); i++){ if(i != secondClassIndex){ arr2[j]= second.value(i); j++; } } }else{ arr2 = second.toDoubleArray(); } return distance(arr1,arr2,cutOffValue); } /** * calculates the distance between two instances (been converted to arrays) * Exact code from the authors downloaded from * http://omega.uta.edu/~athitsos/msm/ * * @param first instance 1 as array * @param second instance 2 as array * @param cutOffValue used for early abandon * @return distance between instances */ public double MSM_Distance(double[] a, double[] b){ int m, n, i, j; m = a.length; n = b.length; double[][] cost = new double[m][n]; // Initialization cost[0][0] = Math.abs(a[0] - b[0]); for (i = 1; i< m; i++) { cost[i][0] = cost[i-1][0] + editCost(a[i], a[i-1], b[0]); } for (j = 1; j < n; j++) { cost[0][j] = cost[0][j-1] + editCost(b[j], a[0], b[j-1]); } // Main Loop for( i = 1; i < m; i++){ for ( j = 1; j < n; j++){ double d1,d2, d3; d1 = cost[i-1][j-1] + Math.abs(a[i] - b[j] ); d2 = cost[i-1][j] + editCost(a[i], a[i-1], b[j]); d3 = cost[i][j-1] + editCost(b[j], a[i], b[j-1]); cost[i][j] = Math.min( d1, Math.min(d2,d3) ); } } // Output return cost[m-1][n-1]; } public double editCost( double new_point, double x, double y){ double dist = 0; if ( ( (x <= new_point) && (new_point <= y) ) || ( (y <= new_point) && (new_point <= x) ) ) { dist = c; } else{ dist = c + Math.min( Math.abs(new_point - x) , Math.abs(new_point - y) ); } return dist; } public double distance(double[] first, double[] second, double cutOffValue){ return MSM_Distance(first,second); } }
5,445
28.759563
104
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/PiecewiseDTW.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* * Piecewise DTW distance metric */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /** * * @author Chris Rimmer */ public class PiecewiseDTW extends BasicDTW { private int frameSize; private double[] reducedDimensionSeries1; private double[] reducedDimensionSeries2; /** * Creates new Piecewise DTW distance metric * * @param frameSize size of frame to split the data * @throws IllegalArgumentException frameSize must be a factor of number of attributes in data */ public PiecewiseDTW(int frameSize) throws IllegalArgumentException{ super(); setup(frameSize); } /** * Setup distance metric * * @param frameSize size of frame to split the data * @throws IllegalArgumentException frameSize must be a factor of number of attributes in data */ private void setup(int frameSize) throws IllegalArgumentException{ if(frameSize < 1){ throw new IllegalArgumentException("Frame Size must be 1 or greater"); } this.frameSize = frameSize; } /** * reduces the data dimensionally in equal sized frames and passes to superclass to calculate distance * * @param first array 1 * @param second array 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(double[] first,double[] second, double cutOffValue){ //check can divide into equal parts if(first.length % this.frameSize != 0){ throw new IllegalArgumentException("Frame size must be a factor of the number of attributes"); } //setup arrays int seriesLength = first.length/this.frameSize; this.reducedDimensionSeries1 = new double[seriesLength]; this.reducedDimensionSeries2 = new double[seriesLength]; double series1Frame = 0; double series2Frame = 0; //reduces the dimensionality of the data for(int i = 0, reducedPos = 0; i < first.length; i+=this.frameSize, reducedPos++){ series1Frame = 0; series2Frame = 0; for(int j = i; j < i+this.frameSize; j++){ series1Frame += first[j]; series2Frame += second[j]; } this.reducedDimensionSeries1[reducedPos] = series1Frame/this.frameSize; this.reducedDimensionSeries2[reducedPos] = series2Frame/this.frameSize; } return super.distance(reducedDimensionSeries1, reducedDimensionSeries2, cutOffValue); } /** * Sets the frame size * * @param frameSize size of frame to split the data * @throws IllegalArgumentException frameSize must be a factor of number of attributes in data */ public void setFrameSize(int frameSize) throws IllegalArgumentException{ setup(frameSize); } /** * Gets the current frame size * * @return current frame size */ public int getFrameSize() { return frameSize; } /** * Gets dimensionally reduced series 1 * * @return reduced series 1 */ public double[] getReducedDimensionSeries1() { return reducedDimensionSeries1; } /** * Gets dimensionally reduced series 2 * * @return reduced series 2 */ public double[] getReducedDimensionSeries2() { return reducedDimensionSeries2; } /** * Prints the reduced dimensionality series arrays */ public void printReducedSeries(){ System.out.println("------------------ Reduced Series 1 ------------------"); for(int i = 0; i<this.reducedDimensionSeries1.length; i++){ System.out.print(" "+ reducedDimensionSeries1[i]+"\n"); } System.out.println("------------------ End ------------------"); System.out.println("------------------ Reduced Series 2 ------------------"); for(int i = 0; i<this.reducedDimensionSeries2.length; i++){ System.out.print(" "+ reducedDimensionSeries2[i]+"\n"); } System.out.println("------------------ End ------------------"); } @Override public String toString() { return "PiecewiseDTW{ " + "frameSize=" + this.frameSize + ", }"; } }
5,175
31.759494
106
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/SakoeChibaDTW.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* * Sakoe Chiba DTW distance metric */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; /** * * @author Chris Rimmer */ public class SakoeChibaDTW extends BasicDTW { // private int bandSize; private double bandPercent; /** * Creates new Sakoe Chiba Distance metric * * @param bandPercent warping window width as a percentage * @throws IllegalArgumentException bandSize must be > 0 */ public SakoeChibaDTW(double bandPercent) throws IllegalArgumentException { super(); setup(bandPercent); } /** * sets up the distance metric * * @param bandSize * @throws IllegalArgumentException */ // private void setup(int bandSize) throws IllegalArgumentException { // if (bandSize < 1) { // throw new IllegalArgumentException("Band Size must be 1 or greater"); // } // // this.bandSize = bandSize; // } private void setup(double bandPercent) throws IllegalArgumentException { if (bandPercent <0 || bandPercent > 1) { throw new IllegalArgumentException("Band Size must be between 0 and 1"); } this.bandPercent = bandPercent; } public int calculateBandSize(int instanceLength){ if(this.bandPercent==0){ return 1; }else{ double width = instanceLength*this.bandPercent; return (int)Math.ceil(width); } } /** * calculates the distance between two instances (been converted to arrays) * * @param first instance 1 as array * @param second instance 2 as array * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(double[] first, double[] second, double cutOffValue) { int bandSize = this.calculateBandSize(first.length); //create empty array this.distances = new double[first.length][second.length]; //first value this.distances[0][0] = (first[0] - second[0]) * (first[0] - second[0]); //top row for (int i = 1; i < second.length; i++) { if (i < bandSize) { this.distances[0][i] = this.distances[0][i - 1] + ((first[0] - second[i]) * (first[0] - second[i])); } else { this.distances[0][i] = Double.MAX_VALUE; } } //first column for (int i = 1; i < first.length; i++) { if (i < bandSize) { this.distances[i][0] = this.distances[i - 1][0] + ((first[i] - second[0]) * (first[i] - second[0])); } else { this.distances[i][0] = Double.MAX_VALUE; } } //warp rest double minDistance; // edited by Jay (07/07/15) - cutoff wasn't being used, so added overFlow etc to use early abandon boolean overFlow; for (int i = 1; i < first.length; i++) { overFlow = true; for (int j = 1; j < second.length; j++) { //Checks if i and j are within the band window if (i < j + bandSize && j < i + bandSize) { minDistance = Math.min(this.distances[i][j - 1], Math.min(this.distances[i - 1][j], this.distances[i - 1][j - 1])); //Assign distance this.distances[i][j] = minDistance + ((first[i] - second[j]) * (first[i] - second[j])); } else { this.distances[i][j] = Double.MAX_VALUE; } if(overFlow && this.distances[i][j] < cutOffValue){ overFlow=false; } } if(overFlow){ return Double.MAX_VALUE; } } return this.distances[first.length - 1][second.length - 1]; } /** * Sets the size of the warping window * * @param bandSize band width * @throws IllegalArgumentException */ public void setBandSize(int bandSize) throws IllegalArgumentException { setup(bandSize); } /** * Gets the current warping window width * * @return warping window width */ public double getBandPercentage() { return this.bandPercent; } @Override public String toString() { return "SakoeChibaDTW{ " + "bandSize=" + this.bandPercent + "}"; } }
5,228
29.401163
135
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/TAA.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.Map; import java.util.Stack; import tsml.classifiers.SaveParameterInfo; import static utilities.Utilities.extractTimeSeries; import weka.core.Instance; import weka.core.NormalizableDistance; import weka.core.TechnicalInformation; import weka.core.TechnicalInformationHandler; public class TAA extends NormalizableDistance implements SaveParameterInfo, TechnicalInformationHandler { // WARNING: NOT DEBUGGED. ADD MORE COMMENTS. odo summary for each measure / relate to paper // auth // d Itakura Parallelogram private static final TAA_banded TAA = new TAA_banded(); private int k; public TAA(int k, double gPenalty, double tPenalty) { this.k = k; this.gPenalty = gPenalty; this.tPenalty = tPenalty; } private double gPenalty; public int getK() { return k; } public void setK(int k) { this.k = k; } public double getGPenalty() { return gPenalty; } public void setGPenalty(double gPenalty) { this.gPenalty = gPenalty; } public double getTPenalty() { return tPenalty; } public void setTPenalty(double tPenalty) { this.tPenalty = tPenalty; } private double tPenalty; private int[] naturalNumbers(int size) { int[] numbers = new int[size]; for(int i = 0; i < numbers.length; i++) { numbers[i] = i; } return numbers; } protected double measureDistance(double[] timeSeriesA, double[] timeSeriesB, double cutOff) { return TAA.score(timeSeriesA, naturalNumbers(timeSeriesA.length), timeSeriesB, naturalNumbers(timeSeriesB.length), 1, 1, 1); } @Override public String getRevision() { return null; } @Override public TechnicalInformation getTechnicalInformation() { return null; } public static void main(String[] args) { double[] a = new double[] {1,1,2,2,3,3,2,2,1,1}; double[] b = new double[] {1,2,3,2,1,1,1,1,1,2}; int[] aIntervals = new int[] {1,2,3,4,5,6,7,8,9,10}; int[] bIntervals = new int[] {1,2,3,4,5,6,7,8,9,10}; System.out.println(new TAA_banded().score(a,aIntervals,b,bIntervals,2,2,2)); TAA taa = new TAA(2,2,2); System.out.println(taa.distance(a,b)); } @Override public String getParameters() { return "k=" + k + ",tPenalty=" + tPenalty + ",gPenalty=" + gPenalty + ","; } @Override public String toString() { return "TAA"; } /** * measures distance between time series, swapping the two time series so A is always the longest * @param timeSeriesA time series * @param timeSeriesB time series * @param cutOff cut off value to abandon distance measurement early * @return distance between two time series */ public final double distance(double[] timeSeriesA, double[] timeSeriesB, double cutOff) { if(timeSeriesA.length < timeSeriesB.length) { double[] temp = timeSeriesA; timeSeriesA = timeSeriesB; timeSeriesB = temp; } return measureDistance(timeSeriesA, timeSeriesB, cutOff); } /** * measures distance between time series, swapping the two time series so A is always the longest * @param timeSeriesA time series * @param timeSeriesB time series * @return distance between two time series */ public final double distance(double[] timeSeriesA, double[] timeSeriesB) { return distance(timeSeriesA, timeSeriesB, Double.POSITIVE_INFINITY); } /** * find distance between two instances * @param instanceA first instance * @param instanceB second instance * @return distance between the two instances */ public final double distance(Instance instanceA, Instance instanceB) { return distance(instanceA, instanceB, Double.POSITIVE_INFINITY); } /** * find distance between two instances * @param instanceA first instance * @param instanceB second instance * @param cutOff cut off value to abandon distance measurement early * @return distance between the two instances */ public final double distance(Instance instanceA, Instance instanceB, double cutOff) { return measureDistance(extractTimeSeries(instanceA), extractTimeSeries(instanceB), cutOff); } @Override public String globalInfo() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } @Override protected double updateDistance(double currDist, double diff) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } private static class TAA_banded { public static final char VERTICAL = 'v'; public static final char DIAGONAL = 'd'; public static final char HORIZONTAL = 'h'; public static final double t_event = 116; public double score(double[] seqA, int[] sA, double[] seqB, int[] sB, double gPenalty, double tPenalty, int K) { // the event 't' in seqA and in seqB is represented as the value 116 (which is the ascii for t int m = seqA.length; int n = seqB.length; double[][] V = new double[m + 1][n + 1]; // Score matrix double[][] G = new double[m + 1][n + 1]; // Match matrix double[][] E = new double[m + 1][n + 1]; // Horizontal gap Matrix double[][] F = new double[m + 1][n + 1]; // Vertical gap Matrix LinkedList<Integer> EPointers = new LinkedList<Integer>(); EPointers.addFirst(0); ArrayList<LinkedList<Integer>> FPointers = new ArrayList<LinkedList<Integer>>(); LinkedList<Integer> EBlockList = new LinkedList<Integer>(); EBlockList.addFirst(n); ArrayList<LinkedList<Integer>> FBlockList = new ArrayList<LinkedList<Integer>>(); for (int i = 0; i <= n; i++) { LinkedList<Integer> list = new LinkedList<Integer>(); list.addFirst(m); FBlockList.add(list); LinkedList<Integer> pointerList = new LinkedList<Integer>(); pointerList.addFirst(0); FPointers.add(pointerList); } // init V for (int i = 0; i <= m; i++) { // start end seqLabels, seqNoTimeMap, delimiter V[i][0] = Integer.MAX_VALUE; E[i][0] = Integer.MAX_VALUE; } // init V int I = 1; for (int j = 0; j <= n; j++) { V[0][j] = Integer.MAX_VALUE; F[0][j] = Integer.MAX_VALUE; if (j != 0 && j < K) { I = 1; } else { I = I + 1; } E[I][j] = Integer.MAX_VALUE; F[I][j] = Integer.MAX_VALUE; G[I][j] = Integer.MAX_VALUE; V[I][j] = Integer.MAX_VALUE; } for (int i = 0; i <= m; i++) { E[i][Math.max(1, i - K - 1)] = Integer.MAX_VALUE; F[i][Math.max(1, i - K - 1)] = Integer.MAX_VALUE; G[i][Math.max(1, i - K - 1)] = Integer.MAX_VALUE; V[i][Math.max(1, i - K - 1)] = Integer.MAX_VALUE; } V[0][0] = 0; for (int i = 1; i <= m; i++) { // reset E Pointers EPointers.clear(); EPointers.addFirst(Math.max(1, i - K - 1)); // initially all will point to 0 index //reset E Intervals EBlockList.clear(); EBlockList.addFirst(n); // initially there is only one interval all the way to the end for (int j = Math.max(1, i - K); j <= Math.min(i + K, n); j++) { int k = EPointers.getFirst(); int fK = FPointers.get(j).getFirst(); E[i][j] = CandR(k, j, V, i, sB, gPenalty, tPenalty); F[i][j] = CandC(fK, i, V, j, sA, gPenalty, tPenalty); if (seqA[i - 1] == t_event && seqB[j - 1] == t_event) { G[i][j] = V[i - 1][j - 1]; } else if (seqA[i - 1] == t_event || seqB[j - 1] == t_event) { G[i][j] = Integer.MAX_VALUE; } else { // the events are a match G[i][j] = V[i - 1][j - 1] + Math.abs(seqA[i - 1] - seqB[j - 1]); //match case: penalty is the norm; for timed-event sequences, you can use a scoring matrix } V[i][j] = Math.min(E[i][j], Math.min(F[i][j], G[i][j])); int jPrime = EBlockList.getFirst(); int jPointer = EPointers.getFirst(); if (j != n && CandR(jPointer, j + 1, V, i, sB, gPenalty, tPenalty) > CandR(j, j + 1, V, i, sB, gPenalty, tPenalty)) { // if the candidate from j wins // j's candidate wins while (!EBlockList.isEmpty() && CandR(jPointer, jPrime, V, i, sB, gPenalty, tPenalty) > CandR(j, jPrime, V, i, sB, gPenalty, tPenalty)) { // if j keeps winning EBlockList.removeFirst(); EPointers.removeFirst(); if (!EBlockList.isEmpty()) { jPrime = EBlockList.getFirst(); } } if (EBlockList.isEmpty()) { // if the candidate from j is the best to the end EBlockList.addFirst(n); // you have one contiguous block from j through to n } else { // figure out where the candidate from j stops being the best int BsRow = EPointers.getFirst(); //Analytically figure out when the candidate from j will stop winning // for logarithmic functions, you can compute when one of them will overtake the other double eC = Math.exp(V[i][j] - V[i][BsRow]) / tPenalty; int d = (int) Math.ceil((BsRow - j) / (1 - eC)) - 1; //d is the offset of cells at which j's candidate is no longer better if (seqB[j - 1] == t_event) { d = d * 2; // you have to account for the static events in between } else { d = d * 2 - 1; // you have to account for the static events in between } // Time series format: A t1 A t A t. if (d > 0) { int p = j + d; // p is the cell at which the candidate from j is no longer better if (p <= n) { EBlockList.addFirst(p); } } } EPointers.addFirst(j); } int iPrime = FBlockList.get(j).getFirst(); int iPointer = FPointers.get(j).getFirst(); if (i != m && CandC(iPointer, i + 1, V, j, sA, gPenalty, tPenalty) > CandC(i, i + 1, V, j, sA, gPenalty, tPenalty)) { while (!FBlockList.get(j).isEmpty() && CandC(iPointer, iPrime, V, j, sA, gPenalty, tPenalty) > CandC(i, iPrime, V, j, sA, gPenalty, tPenalty)) { int removedItem = FBlockList.get(j).removeFirst(); int removedPointer = FPointers.get(j).removeFirst(); if (!FBlockList.get(j).isEmpty()) { iPrime = FBlockList.get(j).getFirst(); } } if (FBlockList.get(j).isEmpty()) { FBlockList.get(j).addFirst(m); } else { int BsCol = FPointers.get(j).getFirst(); //Find the point at which j's candidate overtakes the candidate from B_s // for logarithmic functions, you can compute when one of them will overtake the other double eC = Math.exp(V[i][j] - V[BsCol][j]) / tPenalty; int d = (int) Math.ceil((BsCol - i) / (1 - eC)) - 1; //d is the offset of cells after which j's candidate is no longer better if (seqB[j - 1] == t_event) { d = d * 2; // you have to account for the static events in between } else { d = d * 2 - 1; // you have to account for the static events in between } // d is the offset of number of cells after which j's candidate stops winning if (d > 0) { int p = j + d; // cell p is the cell where j's candidate stops winning if (p <= n) { FBlockList.get(j).addFirst(p); // } } } FPointers.get(j).addFirst(i); } } } return V[m][n]; } /** * * @param k cell that sends candidates * @param j this is destination cell * @param V Reference for the V matrix * @param row this is row number. * @return */ private double CandR(int k, int j, double[][] V, int row, int[] s, double gPenalty, double tPenalty) { return V[row][k] + W(k, j, Math.abs(s[k] - s[j]), gPenalty, tPenalty); } /** * * @param k row that sends candidates * @param i row that receives candidates * @param V reference to V matrix * @param col column number * @return */ private double CandC(int k, int i, double[][] V, int col, int[] s, double gPenalty, double tPenalty) { return V[k][col] + W(k, i, Math.abs(s[k] - s[i]), gPenalty, tPenalty); } // W is the penalty function and requires the nmber of public double W(int k, int i, int Nstatic, double gPenalty, double tPenalty) { //s is the number of static events. compute penalty double penalty = tPenalty * Math.log(Math.abs(i - k) - Nstatic + 1) + Nstatic * gPenalty; if (i == k) { // if the alignment is from the current cell to the current cell, then there is no penalty penalty = 0; } return penalty; } private String getString(Stack<String> stack) { StringBuilder seqResult = new StringBuilder(); while (!stack.isEmpty()) { seqResult.append(stack.pop() + " "); } return seqResult.toString().trim(); } private String generateGap(int length) { char[] fill = new char[length]; Arrays.fill(fill, '-'); return new String(fill); } private static String[] generateTime(int length) { String[] f = new String[length]; Arrays.fill(f, "t"); return f; } private int[] getIndexSequence(String[] seq, Map<String, Integer> map) { int[] indexSeq = new int[seq.length]; for (int i = 0; i < seq.length; i++) { indexSeq[i] = map.get(seq[i]); } return indexSeq; } private static void printScoreMatrix(double[][] matrix) { for (int i = 0; i < matrix.length; i++) { for (int j = 0; j < matrix[0].length; j++) { //System.out.print("" + matrix[i][j] + " "); System.out.print(""); if (matrix[i][j] >= 0) { System.out.print("+"); } System.out.printf("%.8f", matrix[i][j]); System.out.print(" "); } System.out.println(); } } private static void addSeqEventsToScoreMatrix(String[] seq, Map<String, Integer> map) { // for a single sequence, this adds the events to the labels of the scoring matrix for (int i = 0; i < seq.length; i++) { if (!map.containsKey(seq[i])) { map.put(seq[i], map.size()); } } } private static String[] seqRemoveTiming(String seq, Map<String, Integer> map) { //remove timing values from the string if there are timing values and add the events to the labels for the score matrix String[] chunks = seq.split(" "); String[] sbSeq = new String[chunks.length]; for (int i = 0; i < chunks.length; i++) { String chunk = chunks[i]; String[] planAndTime = chunk.split("\\."); sbSeq[i] = planAndTime[0]; } return sbSeq; } private static String[] seqAddTimings(String seq, int[] staticEventCount, int seqL) { //remove timing values from the string if there are timing values and add the events to the labels for the score matrix String[] chunks = seq.split(" "); String event; int time; String[] newSeq = new String[seqL]; // because we don't know final size int j; int staticCount = 0; // raw index in the array int counter = 0; staticEventCount[0] = 0; // the first row and column of matrix should get a static event count of 0 for (int i = 0; i < chunks.length; i++) { String[] planAndTime = chunks[i].split("\\."); event = planAndTime[0]; time = Integer.parseInt(planAndTime[1]); // amount of time newSeq[counter] = event; // add the event counter = counter + 1; staticCount = staticCount + 1; //Number of static events staticEventCount[counter] = staticCount; for (j = 0; j < time; j++) { //j is the time number; newSeq[counter] = "t"; counter = counter + 1; staticEventCount[counter] = staticCount; } } return newSeq; } private static ArrayList<String> seqIncludeTimingEvents(String seq, Map<Integer, int[]> tMap, Map<Integer, Integer> eMap) { //remove timing values from the string if there are timing values and add the events to the labels for the score matrix String[] chunks = seq.split(" "); String[] sbSeq = new String[chunks.length]; String event; int time; ArrayList<String> newSeq = new ArrayList<String>(); // because we don't know final size int tCounts = 1; int j; int rawIndex = 0; // raw index in the array for (int i = 0; i < chunks.length; i++) { String chunk = chunks[i]; String[] planAndTime = chunk.split("\\."); event = planAndTime[0]; newSeq.add(event); // add the event rawIndex = rawIndex + 1; //Raw Array Index eMap.put(rawIndex, tCounts); //for each static event, store the index of the upcoming timing event so it can be used for calculating timing penalties... time = Integer.parseInt(planAndTime[1]); // amount of time for (j = tCounts; j < tCounts + time; j++) { //j is the time number; rawIndex = rawIndex + 1; //count each timing event //map each timing event to a key in the array sequence tMap.put(j, new int[]{i + j, i + 1}); // index of timing event in sequence, second argument is number of static events so far. newSeq.add(Integer.toString(j)); } tCounts = tCounts + time; } System.out.println("rawIndex = " + Integer.toString(rawIndex)); return newSeq; } } }
21,648
40.632692
183
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/TWEDistance.java
/* * Java distance measure derived from * Filename: code-twed.c source code for the Time Warp Edit Distance in ansi C. Author: Pierre-Francois Marteau Version: V1.1 du 10/3/2010 Licence: GPL ****************************************************************** This software and description is free delivered "AS IS" with no guaranties for work at all. Its up to you testing it modify it as you like, but no help could be expected from me due to lag of time at the moment. I will answer short relevant questions and help as my time allow it. I have tested it played with it and found no problems in stability or malfunctions so far. Have fun. ***************************************************************** Please cite as: @article{Marteau:2009:TWED, author = {Marteau, Pierre-Francois}, title = {Time Warp Edit Distance with Stiffness Adjustment for Time Series Matching}, journal = {IEEE Trans. Pattern Anal. Mach. Intell.}, issue_date = {February 2009}, volume = {31}, number = {2}, month = feb, year = {2009}, issn = {0162-8828}, pages = {306--318},, } * Original code was structured to work on a set of time series and * had an extra parameter for the power of the pointwise distance measure * We implement it pairwise and assume Euclidean distance, for simplicity * and equivalence to the other measures. */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import weka.core.EuclideanDistance; import weka.core.Instance; import weka.core.neighboursearch.PerformanceStats; /* * TWE has two parameters: double nu and lambda * nu controls the "stiffness". It is a multiplicative penalty * on the distance between matched points. 0 gives no weighting (full DTW), * infinity gives Euclidean distance. * lambda is a constant penalty for the * amount of shrinkage caused be the delete operation. * Ranges for parameters are 0... infinity. * * from the paper: "stiffness value nu is selected from * 10^-5; 10^-4; 10^-3; 10^-2; 10^-1; 1 and  lambda is selected from 0; .25; .5; .75; 1.0. */ public class TWEDistance extends EuclideanDistance{ double nu=1; double lambda=1; double degree=2; public void setNu(double n){nu=n;} public void setLambda(double n){lambda=n;} public TWEDistance(){ super(); this.m_DontNormalize = true; } public TWEDistance(double nu, double lambda){ super(); this.m_DontNormalize = true; this.nu = nu; this.lambda = lambda; } /** * Distance method * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @param stats * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ //Get the double arrays return distance(first,second,cutOffValue); } /** * distance method that converts instances to arrays of doubles * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue) { //remove class index from first instance if there is one int firtClassIndex = first.classIndex(); double[] arr1; if(firtClassIndex > 0){ arr1 = new double[first.numAttributes()-1]; for(int i = 0,j = 0; i < first.numAttributes(); i++){ if(i != firtClassIndex){ arr1[j]= first.value(i); j++; } } }else{ arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if(secondClassIndex > 0){ arr2 = new double[second.numAttributes()-1]; for(int i = 0,j = 0; i < second.numAttributes(); i++){ if(i != secondClassIndex){ arr2[j]= second.value(i); j++; } } }else{ arr2 = second.toDoubleArray(); } return distance(arr1,arr2,cutOffValue); } /** * calculates the distance between two instances (been converted to arrays) * Altered c code from the authors downloaded from * * http://www-irisa.univ-ubs.fr/Pierre-Francois.Marteau/TWED/code-twed.c * @param first instance 1 as array * @param second instance 2 as array * * @return distance between instances */ public double TWE_Distance(double[] a, double[] b){ /*This code is faithful to the c version, so uses a redundant * Multidimensional representation. The c code does not describe what the arguments * tsB and tsA are. We assume they are the time stamps (i.e. index sets), * and initialise them accordingly. */ int dim=1; double dist, disti1, distj1; double[][] ta=new double[a.length][dim]; double[][] tb=new double[a.length][dim]; double[] tsa=new double[a.length]; double[] tsb=new double[b.length]; for(int i=0;i<tsa.length;i++) tsa[i]=(i+1); for(int i=0;i<tsb.length;i++) tsb[i]=(i+1); int r = ta.length; int c = tb.length; int i,j,k; //Copy over values for(i=0;i<a.length;i++) ta[i][0]=a[i]; for(i=0;i<b.length;i++) tb[i][0]=b[i]; /* allocations in c double **D = (double **)calloc(r+1, sizeof(double*)); double *Di1 = (double *)calloc(r+1, sizeof(double)); double *Dj1 = (double *)calloc(c+1, sizeof(double)); for(i=0; i<=r; i++) { D[i]=(double *)calloc(c+1, sizeof(double)); } */ double [][]D = new double[r+1][c+1]; double[] Di1 = new double[r+1]; double[] Dj1 = new double[c+1]; // local costs initializations for(j=1; j<=c; j++) { distj1=0; for(k=0; k<dim; k++) if(j>1){ //CHANGE AJB 8/1/16: Only use power of 2 for speed up, distj1+=(tb[j-2][k]-tb[j-1][k])*(tb[j-2][k]-tb[j-1][k]); // OLD VERSION distj1+=Math.pow(Math.abs(tb[j-2][k]-tb[j-1][k]),degree); // in c: distj1+=pow(fabs(tb[j-2][k]-tb[j-1][k]),degree); } else distj1+=tb[j-1][k]*tb[j-1][k]; //OLD distj1+=Math.pow(Math.abs(tb[j-1][k]),degree); Dj1[j]=(distj1); } for(i=1; i<=r; i++) { disti1=0; for(k=0; k<dim; k++) if(i>1) disti1+=(ta[i-2][k]-ta[i-1][k])*(ta[i-2][k]-ta[i-1][k]); // OLD disti1+=Math.pow(Math.abs(ta[i-2][k]-ta[i-1][k]),degree); else disti1+=(ta[i-1][k])*(ta[i-1][k]); //OLD disti1+=Math.pow(Math.abs(ta[i-1][k]),degree); Di1[i]=(disti1); for(j=1; j<=c; j++) { dist=0; for(k=0; k<dim; k++){ dist+=(ta[i-1][k]-tb[j-1][k])*(ta[i-1][k]-tb[j-1][k]); // dist+=Math.pow(Math.abs(ta[i-1][k]-tb[j-1][k]),degree); if(i>1&&j>1) dist+=(ta[i-2][k]-tb[j-2][k])*(ta[i-2][k]-tb[j-2][k]); // dist+=Math.pow(Math.abs(ta[i-2][k]-tb[j-2][k]),degree); } D[i][j]=(dist); } }// for i // border of the cost matrix initialization D[0][0]=0; for(i=1; i<=r; i++) D[i][0]=D[i-1][0]+Di1[i]; for(j=1; j<=c; j++) D[0][j]=D[0][j-1]+Dj1[j]; double dmin, htrans, dist0; int iback; for (i=1; i<=r; i++){ for (j=1; j<=c; j++){ htrans=Math.abs((tsa[i-1]-tsb[j-1])); if(j>1&&i>1) htrans+=Math.abs((tsa[i-2]-tsb[j-2])); dist0=D[i-1][j-1]+nu*htrans+D[i][j]; dmin=dist0; if(i>1) htrans=((tsa[i-1]-tsa[i-2])); else htrans=tsa[i-1]; dist=Di1[i]+D[i-1][j]+lambda+nu*htrans; if(dmin>dist){ dmin=dist; } if(j>1) htrans=(tsb[j-1]-tsb[j-2]); else htrans=tsb[j-1]; dist=Dj1[j]+D[i][j-1]+lambda+nu*htrans; if(dmin>dist){ dmin=dist; } D[i][j] = dmin; } } dist = D[r][c]; return dist; } public double distance(double[] first, double[] second, double cutOffValue){ return TWE_Distance(first,second); } }
8,880
30.052448
96
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/distance_functions/WeightedDTW.java
/* * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ /* * A simple DTW algorithm that computes the warped path with no constraints * */ package tsml.classifiers.legacy.elastic_ensemble.distance_functions; import weka.core.Instance; import weka.core.neighboursearch.PerformanceStats; /** * * @author Jason Lines (adapted from BasicDTW) */ public class WeightedDTW extends BasicDTW{ protected double[][] distances; protected boolean isEarlyAbandon; private static final double WEIGHT_MAX = 1; private double g; // "empirical constant that controls the curvature (slope) of the function private double[] weightVector; // initilised on first distance call // private int distanceCount = 0; /** * BasicDTW Constructor * * Early Abandon Disabled */ public WeightedDTW(){ super(); this.g = 0; this.weightVector = null; this.m_DontNormalize = true; this.isEarlyAbandon = false; } public WeightedDTW(double g){ super(); this.g = g; this.weightVector = null; this.m_DontNormalize = true; this.isEarlyAbandon = false; } public WeightedDTW(double g, double[] weightVector){ super(); this.g = g; this.weightVector = weightVector; this.m_DontNormalize = true; this.isEarlyAbandon = false; } /** * BasicDTW Constructor that allows enabling of early abandon * * @param earlyAbandon boolean value setting if early abandon is enabled */ public WeightedDTW(boolean earlyAbandon) { super(); this.g = 0; this.weightVector = null; this.isEarlyAbandon = earlyAbandon; this.m_DontNormalize = true; } public WeightedDTW(double g, boolean earlyAbandon) { super(); this.g = g; this.weightVector = null; this.isEarlyAbandon = earlyAbandon; this.m_DontNormalize = true; } /** * Distance method * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @param stats * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue, PerformanceStats stats){ //Get the double arrays return distance(first,second,cutOffValue); } /** * distance method that converts instances to arrays of doubles * * @param first instance 1 * @param second instance 2 * @param cutOffValue used for early abandon * @return distance between instances */ @Override public double distance(Instance first, Instance second, double cutOffValue){ // if(this.distanceCount % 10000000 == 0){ // System.out.println("New Instance: "+this.distanceCount); // } // this.distanceCount++; //remove class index from first instance if there is one int firtClassIndex = first.classIndex(); double[] arr1; if(firtClassIndex > 0){ arr1 = new double[first.numAttributes()-1]; for(int i = 0,j = 0; i < first.numAttributes(); i++){ if(i != firtClassIndex){ arr1[j]= first.value(i); j++; } } }else{ arr1 = first.toDoubleArray(); } //remove class index from second instance if there is one int secondClassIndex = second.classIndex(); double[] arr2; if(secondClassIndex > 0){ arr2 = new double[second.numAttributes()-1]; for(int i = 0,j = 0; i < second.numAttributes(); i++){ if(i != secondClassIndex){ arr2[j]= second.value(i); j++; } } }else{ arr2 = second.toDoubleArray(); } return distance(arr1,arr2,cutOffValue); } /** * calculates the distance between two instances (been converted to arrays) * * @param first instance 1 as array * @param second instance 2 as array * @param cutOffValue used for early abandon * @return distance between instances */ public double distance(double[] first, double[] second, double cutOffValue){ if(this.weightVector==null){ this.initWeights(first.length); } //create empty array this.distances = new double[first.length][second.length]; //first value this.distances[0][0] = this.weightVector[0]*(first[0]-second[0])*(first[0]-second[0]); //early abandon if first values is larger than cut off if(this.distances[0][0] > cutOffValue && this.isEarlyAbandon){ return Double.MAX_VALUE; } //top row for(int i=1;i<second.length;i++){ this.distances[0][i] = this.distances[0][i-1]+this.weightVector[i]*(first[0]-second[i])*(first[0]-second[i]); //edited by Jay } //first column for(int i=1;i<first.length;i++){ this.distances[i][0] = this.distances[i-1][0]+this.weightVector[i]*(first[i]-second[0])*(first[i]-second[0]); //edited by Jay } //warp rest double minDistance; for(int i = 1; i<first.length; i++){ boolean overflow = true; for(int j = 1; j<second.length; j++){ //calculate distances minDistance = Math.min(this.distances[i][j-1], Math.min(this.distances[i-1][j], this.distances[i-1][j-1])); this.distances[i][j] = minDistance+this.weightVector[Math.abs(i-j)] *(first[i]-second[j])*(first[i]-second[j]); //edited by Jay if(overflow && this.distances[i][j] < cutOffValue){ overflow = false; // because there's evidence that the path can continue } // // if(minDistance > cutOffValue && this.isEarlyAbandon){ // this.distances[i][j] = Double.MAX_VALUE; // }else{ // this.distances[i][j] = minDistance+this.weightVector[Math.abs(i-j)] *(first[i]-second[j])*(first[i]-second[j]); //edited by Jay // overflow = false; // } } //early abandon if(overflow && this.isEarlyAbandon){ return Double.MAX_VALUE; } } return this.distances[first.length-1][second.length-1]; } // // Added by Jay for weighted DTW // private double getWeight(int seriesLength, int i, int j){ // return WEIGHT_MAX/(1+Math.exp(-g*(Math.abs(i-j)-(double)seriesLength/2))); // } private void initWeights(int seriesLength){ this.weightVector = new double[seriesLength]; double halfLength = (double)seriesLength/2; for(int i = 0; i < seriesLength; i++){ weightVector[i] = WEIGHT_MAX/(1+Math.exp(-g*(i-halfLength))); } } public static double[] calculateWeightVector(int seriesLength, double g){ double[] weights = new double[seriesLength]; double halfLength = (double)seriesLength/2; for(int i = 0; i < seriesLength; i++){ weights[i] = WEIGHT_MAX/(1+Math.exp(-g*(i-halfLength))); } return weights; } /** * Generates a string of the minimum cost warp path * * Distances array must be populated through use of distance method * * @return Path */ public String printMinCostWarpPath(){ return findPath(this.distances.length-1, this.distances[0].length-1); } /** * Recursive method that finds and prints the minimum warped path * * @param i position in distances, should be max of series * @param j position in distances, should be max of series * * @return current position */ protected String findPath(int i, int j){ double prevDistance = this.distances[i][j]; int oldI = i; int oldJ = j; //final condition if(i != 0 || j != 0){ //decrementing i and j if(i > 0 && j > 0){ double min = Math.min(this.distances[i-1][j], Math.min(this.distances[i-1][j-1], this.distances[i][j-1])); if(this.distances[i-1][j-1] == min){ i--; j--; }else if(this.distances[i-1][j] == min){ i--; }else if(this.distances[i][j-1] == min){ j--; } }else if(j > 0){ j--; }else if(i > 0){ i--; } //recursive step return "("+oldI+","+oldJ+") = "+prevDistance+"\n" + findPath(i,j); }else{ return "("+oldI+","+oldJ+") = "+prevDistance+"\n"; } } /** * returns the Euclidean distances array * * @return double[][] distances */ public double[][] getDistanceArray(){ return this.distances; } /** * This will print the diagonal route with no warping */ public void printDiagonalRoute(){ System.out.println("------------------ Diagonal Route ------------------"); for(int i = this.distances.length-1; i >= 0; i--){ System.out.print(this.distances[i][i]+" "); } System.out.println("\n------------------ End ------------------"); } /** * Prints the distances array as a table */ public void printDistances(){ System.out.println("------------------ Distances Table ------------------"); for(int i = 0; i<this.distances.length; i++){ System.out.print("Row ="+i+" = "); for(int j = 0; j<this.distances[0].length; j++){ System.out.print(" "+ distances[i][j]); } System.out.print("\n"); } System.out.println("------------------ End ------------------"); } /** * Check if early abandon enabled * * @return early abandon enabled */ public boolean isEarlyAbandon() { return isEarlyAbandon; } /** * Set early abandon * * @param isEarlyAbandon value for early abandon */ public void setIsEarlyAbandon(boolean isEarlyAbandon) { this.isEarlyAbandon = isEarlyAbandon; } @Override public String toString() { return "BasicDTW{ " + "earlyAbandon=" + this.isEarlyAbandon + " }"; } /************************************************************************************************ Support for FastEE ************************************************************************************************/ private final static int MAX_SEQ_LENGTH = 4000; private final static double[][] matrixD = new double[MAX_SEQ_LENGTH][MAX_SEQ_LENGTH]; public static double distance(final Instance first, final Instance second, final double[] weightVector) { final int m = first.numAttributes()-1; final int n = second.numAttributes()-1; double diff; double minDistance; int i, j; //first value diff = first.value(0) - second.value(0); matrixD[0][0] = weightVector[0] * diff * diff; //first column for (i = 1; i < m; i++) { diff = first.value(i) - second.value(0); matrixD[i][0] = matrixD[i - 1][0] + weightVector[i] * diff * diff; } //top row for (j = 1; j < n; j++) { diff = first.value(0) - second.value(j); matrixD[0][j] = matrixD[0][j - 1] + weightVector[j] * diff * diff; } //warp rest for (i = 1; i < m; i++) { for (j = 1; j < n; j++) { //calculate distances minDistance = Math.min(matrixD[i][j - 1], Math.min(matrixD[i - 1][j], matrixD[i - 1][j - 1])); diff = first.value(i) - second.value(j); matrixD[i][j] = minDistance + weightVector[Math.abs(i - j)] * diff * diff; } } return matrixD[m - 1][n - 1]; } }
13,073
31.766917
149
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/CandidateNN.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble; /** * This is a class for a candidate nearest neighbour that is used in FastEE. * It stores some of the meta data of the nearest neighbour that will make the NN search more efficient. * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class CandidateNN { public enum Status { NN, // This is the Nearest Neighbour BC, // Best Candidate so far } public int index; // Index of the sequence in train[] public int r; // Window validity public double distance; // Computed lower bound private Status status; public CandidateNN() { this.index = Integer.MIN_VALUE; // Will be an invalid, negative, index. this.r = Integer.MAX_VALUE; // Max: stands for "haven't found yet" this.distance = Double.POSITIVE_INFINITY; // Infinity: stands for "not computed yet". this.status = Status.BC; // By default, we don't have any found NN. } public void set(final int index, final int r, final double distance, final Status status) { this.index = index; this.r = r; this.distance = distance; this.status = status; } public void set(final int index, final double distance, final Status status) { this.index = index; this.r = -1; this.distance = distance; this.status = status; } public boolean isNN() { return this.status == Status.NN; } @Override public String toString() { return "" + this.index; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CandidateNN that = (CandidateNN) o; return index == that.index; } public int compareTo(CandidateNN potentialNN) { return Double.compare(this.distance, potentialNN.distance); } }
2,886
33.369048
104
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/WarpingPathResults.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble; /** * This class stores some results from computing DTW. * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class WarpingPathResults { public double distance; public int distanceFromDiagonal; //The smallest window that would give the same distance public WarpingPathResults() { } public WarpingPathResults(double d, int distanceFromDiagonal) { this.distance = d; this.distanceFromDiagonal = distanceFromDiagonal; } }
1,348
31.902439
92
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import weka.core.Instance; /** * A super class to access if a query is the nearest neighbour of a reference time series, given a cache. * * @author Chang Wei Tan (chang.tan@monash.edu) */ public abstract class LazyAssessNN implements Comparable<LazyAssessNN> { public int indexQuery; public int indexReference; // Index for query and reference SequenceStatsCache cache; // Cache to store the information for the sequences Instance query, reference; // Query and reference sequences int indexStoppedLB, oldIndexStoppedLB; // Index where we stop LB double minDist; // distance double bestMinDist; // best so far distance LBStatus status; // Status of Lower Bound public LazyAssessNN(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.cache = cache; this.bestMinDist = minDist; } public LazyAssessNN(final SequenceStatsCache cache) { this.cache = cache; } public abstract void set(final Instance query, final int index, final Instance reference, final int indexReference); public void setBestMinDist(final double bestMinDist) { this.bestMinDist = bestMinDist; } public double getBestMinDist() { return this.bestMinDist; } @Override public String toString() { return "" + indexQuery + " - " + indexReference + " - " + bestMinDist; } public int getOtherIndex(final int index) { if (index == indexQuery) { return indexReference; } else { return indexQuery; } } public Instance getSequenceForOtherIndex(final int index) { if (index == indexQuery) { return reference; } else { return query; } } public double getMinDist() { return minDist; } public void setMinDist(final double minDist) { this.minDist = minDist; } @Override public int compareTo(final LazyAssessNN o) { return this.compare(o); } private int compare(final LazyAssessNN o) { double num1 = this.getDoubleValueForRanking(); double num2 = o.getDoubleValueForRanking(); return Double.compare(num1, num2); } public abstract double getDoubleValueForRanking(); @Override public boolean equals(final Object o) { LazyAssessNN d = (LazyAssessNN) o; return (this.indexQuery == d.indexQuery && this.indexReference == d.indexReference); } public LBStatus getStatus() { return status; } public abstract void setFullDistStatus(); public double getBestLB() { return bestMinDist; } public Instance getQuery() { return query; } public Instance getReference() { return reference; } // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Internal types // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- public enum RefineReturnType { Pruned_with_LB, Pruned_with_Dist, New_best } public enum LBStatus { None, LB_Kim, Partial_LB_KeoghQR, Full_LB_KeoghQR, Partial_LB_KeoghRQ, Full_LB_KeoghRQ, // DTW Previous_LB_DTW, Previous_DTW, Full_DTW, Partial_DTW, // DTW Partial_LB_WDTWQR, Partial_LB_WDTWRQ, Full_LB_WDTWQR, Full_LB_WDTWRQ, // WDTW Previous_LB_WDTW, Previous_WDTW, Full_WDTW, // WDTW Partial_LB_MSM, Full_LB_MSM, Previous_LB_MSM, Previous_MSM, Full_MSM, // MSM Partial_LB_ERPQR, Partial_LB_ERPRQ, Full_LB_ERPQR, Full_LB_ERPRQ, // ERP Previous_G_LB_ERP, Previous_Band_LB_ERP, Previous_Band_ERP, Full_ERP, // ERP Partial_LB_TWE, Full_LB_TWE, Previous_LB_TWE, Previous_TWE, Full_TWE, // TWE Partial_LB_LCSS, Full_LB_LCSS, Previous_LB_LCSS, Previous_LCSS, Full_LCSS // LCSS } }
5,633
33.777778
120
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_DTW.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.DTW; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_DTW extends LazyAssessNN { private int currentW; // Current warping window for DTW private int minWindowValidity; // Minimum window validity for DTW, ERP, LCSS private int nOperationsLBKim; // Number of operations for LB Kim private double EuclideanDist; // euclidean distance public LazyAssessNN_DTW(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_DTW(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); tryLBKim(); this.bestMinDist = minDist; this.status = LBStatus.LB_Kim; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; currentW = 0; minWindowValidity = 0; nOperationsLBKim = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; tryLBKim(); this.bestMinDist = minDist; this.status = LBStatus.LB_Kim; } private void tryLBKim() { final double diffFirsts = query.value(0) - reference.value(0); final double diffLasts = query.value(query.numAttributes() - 2) - reference.value(reference.numAttributes() - 2); minDist = diffFirsts * diffFirsts + diffLasts * diffLasts; nOperationsLBKim = 2; if (!cache.isMinFirst(indexQuery) && !cache.isMinFirst(indexReference) && !cache.isMinLast(indexQuery) && !cache.isMinLast(indexReference)) { final double diffMin = cache.getMin(indexQuery) - cache.getMin(indexReference); minDist += diffMin * diffMin; nOperationsLBKim++; } if (!cache.isMaxFirst(indexQuery) && !cache.isMaxFirst(indexReference) && !cache.isMaxLast(indexQuery) && !cache.isMaxLast(indexReference)) { final double diffMax = cache.getMax(indexQuery) - cache.getMax(indexReference); minDist += diffMax * diffMax; nOperationsLBKim++; } status = LBStatus.LB_Kim; } private void tryContinueLBKeoghQR(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double[] LEQ = cache.getLE(indexQuery, currentW); final double[] UEQ = cache.getUE(indexQuery, currentW); while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < LEQ[index]) { final double diff = LEQ[index] - c; minDist += diff * diff; } else if (UEQ[index] < c) { final double diff = UEQ[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryContinueLBKeoghRQ(final double scoreToBeat) { final int length = reference.numAttributes() - 1; final double[] LER = cache.getLE(indexReference, currentW); final double[] UER = cache.getUE(indexReference, currentW); while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < LER[index]) { final double diff = LER[index] - c; minDist += diff * diff; } else if (UER[index] < c) { final double diff = UER[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryFullLBKeoghQR() { final int length = query.numAttributes() - 1; final double[] LEQ = cache.getLE(indexQuery, currentW); final double[] UEQ = cache.getUE(indexQuery, currentW); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < LEQ[index]) { final double diff = LEQ[index] - c; minDist += diff * diff; } else if (UEQ[index] < c) { final double diff = UEQ[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryFullLBKeoghRQ() { final int length = reference.numAttributes() - 1; final double[] LER = cache.getLE(indexReference, currentW); final double[] UER = cache.getUE(indexReference, currentW); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < LER[index]) { final double diff = LER[index] - c; minDist += diff * diff; } else if (UER[index] < c) { final double diff = UER[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void setCurrentW(final int currentW) { if (this.currentW != currentW) { this.currentW = currentW; if (this.status == LBStatus.Full_DTW) { if (this.currentW < minWindowValidity) { this.status = LBStatus.Previous_DTW; } } else { this.status = LBStatus.Previous_LB_DTW; this.oldIndexStoppedLB = indexStoppedLB; } } } public RefineReturnType tryToBeat(final double scoreToBeat, final int w) { setCurrentW(w); switch (status) { case Previous_LB_DTW: case Previous_DTW: case LB_Kim: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghQR: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; tryContinueLBKeoghQR(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_KeoghQR; else status = LBStatus.Full_LB_KeoghQR; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_KeoghQR; case Full_LB_KeoghQR: indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghRQ: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; tryContinueLBKeoghRQ(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < reference.numAttributes() - 1) status = LBStatus.Partial_LB_KeoghRQ; else status = LBStatus.Full_LB_KeoghRQ; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_KeoghRQ; case Full_LB_KeoghRQ: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; final WarpingPathResults res = DTW.distanceExt(query, reference, currentW); minDist = res.distance; if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_DTW; minWindowValidity = res.distanceFromDiagonal; case Full_DTW: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance(final int window) { if ((status == LBStatus.Full_DTW) && minWindowValidity <= window) { return minDist; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinWindowValidityForFullDistance() { if (status == LBStatus.Full_DTW) { return minWindowValidity; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinwindow() { return minWindowValidity; } public void setMinwindow(final int w) { minWindowValidity = w; } public double getEuclideanDistance() { return EuclideanDist; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_DTW; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // DTW case Full_DTW: case Full_LB_KeoghQR: case Full_LB_KeoghRQ: return thisD / (query.numAttributes() - 1); case LB_Kim: return thisD / nOperationsLBKim; case Partial_LB_KeoghQR: case Partial_LB_KeoghRQ: return thisD / indexStoppedLB; case Previous_DTW: return 0.8 * thisD / (query.numAttributes() - 1); // DTW(w+1) should be tighter case Previous_LB_DTW: if (indexStoppedLB == 0) { //lb kim return thisD / nOperationsLBKim; } else { //lbkeogh return thisD / oldIndexStoppedLB; } case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
11,876
39.955172
149
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_ERP.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.ERPDistance; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_ERP extends LazyAssessNN { private double currentG; private double currentBandSize; private int minWindowValidity; public LazyAssessNN_ERP(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_ERP(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); this.bestMinDist = minDist; this.status = LBStatus.None; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; currentG = 0; currentBandSize = 0; minWindowValidity = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.bestMinDist = minDist; this.status = LBStatus.None; } void tryContinueLBERPQR(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double[] LEQ = cache.getLE(indexQuery, currentG, currentBandSize); final double[] UEQ = cache.getUE(indexQuery, currentG, currentBandSize); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < LEQ[index]) { final double diff = LEQ[index] - c; minDist += diff * diff; } else if (UEQ[index] < c) { final double diff = UEQ[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryContinueLBERPRQ(final double scoreToBeat) { final int length = reference.numAttributes() - 1; final double[] LER = cache.getLE(indexReference, currentG, currentBandSize); final double[] UER = cache.getUE(indexReference, currentG, currentBandSize); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < LER[index]) { final double diff = LER[index] - c; minDist += diff * diff; } else if (UER[index] < c) { final double diff = UER[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryFullLBERPQR() { final int length = query.numAttributes() - 1; final double[] LEQ = cache.getLE(indexQuery, currentG, currentBandSize); final double[] UEQ = cache.getUE(indexQuery, currentG, currentBandSize); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < LEQ[index]) { final double diff = LEQ[index] - c; minDist += diff * diff; } else if (UEQ[index] < c) { final double diff = UEQ[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void tryFullLBERPRQ() { final int length = reference.numAttributes() - 1; final double[] LER = cache.getLE(indexReference, currentG, currentBandSize); final double[] UER = cache.getUE(indexReference, currentG, currentBandSize); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < LER[index]) { final double diff = LER[index] - c; minDist += diff * diff; } else if (UER[index] < c) { final double diff = UER[index] - c; minDist += diff * diff; } indexStoppedLB++; } } private void setCurrentGandBandSize(final double g, final double bandSize) { if (this.currentG != g) { this.currentBandSize = bandSize; this.currentG = g; this.minDist = 0.0; this.bestMinDist = minDist; indexStoppedLB = oldIndexStoppedLB = 0; this.status = LBStatus.Previous_G_LB_ERP; } else if (this.currentBandSize != bandSize) { this.currentBandSize = bandSize; if (status == LBStatus.Full_ERP) { if (this.currentBandSize < minWindowValidity) { this.status = LBStatus.Previous_Band_ERP; } } else { this.status = LBStatus.Previous_Band_LB_ERP; this.oldIndexStoppedLB = indexStoppedLB; } } } public RefineReturnType tryToBeat(final double scoreToBeat, final double g, final double bandSize) { setCurrentGandBandSize(g, bandSize); switch (status) { case None: case Previous_G_LB_ERP: case Previous_Band_LB_ERP: case Previous_Band_ERP: case LB_Kim: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_ERPQR: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; tryContinueLBERPQR(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_ERPQR; else status = LBStatus.Full_LB_ERPQR; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_ERPQR; case Full_LB_ERPQR: indexStoppedLB = 0; minDist = 0; case Partial_LB_ERPRQ: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; tryContinueLBERPRQ(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < reference.numAttributes() - 1) status = LBStatus.Partial_LB_ERPRQ; else status = LBStatus.Full_LB_ERPRQ; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_ERPRQ; case Full_LB_ERPRQ: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; final WarpingPathResults res = ERPDistance.distanceExt(query, reference, currentG, currentBandSize); minDist = res.distance; if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_ERP; minWindowValidity = res.distanceFromDiagonal; case Full_ERP: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance(final int window) { if ((status == LBStatus.Full_ERP) && minWindowValidity <= window) { return minDist; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinWindowValidityForFullDistance() { if (status == LBStatus.Full_ERP) { return minWindowValidity; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinwindow() { return minWindowValidity; } public void setMinwindow(final int w) { minWindowValidity = w; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_ERP; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // ERP case Full_ERP: case Full_LB_ERPQR: case Full_LB_ERPRQ: return thisD / (query.numAttributes() - 1); case Partial_LB_ERPQR: case Partial_LB_ERPRQ: return thisD / indexStoppedLB; case Previous_Band_ERP: return 0.8 * thisD / (query.numAttributes() - 1); case Previous_G_LB_ERP: case Previous_Band_LB_ERP: return thisD / oldIndexStoppedLB; case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
10,941
39.227941
126
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_LCSS.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.WarpingPathResults; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.LCSSDistance; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_LCSS extends LazyAssessNN { private double currentEpsilon; private int currentDelta; private int minWindowValidity; public LazyAssessNN_LCSS(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_LCSS(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); this.bestMinDist = minDist; this.status = LBStatus.None; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; minWindowValidity = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.bestMinDist = minDist; this.status = LBStatus.None; } private void tryContinueLBLCSS(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double ub = Math.abs(1.0 - scoreToBeat) * length; final double[] LEQ = cache.getLE(indexQuery, currentDelta, currentEpsilon); final double[] UEQ = cache.getUE(indexQuery, currentDelta, currentEpsilon); double lcs = 0; this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length && lcs > ub) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (reference.value(index) <= UEQ[index] && reference.value(index) >= LEQ[index]) { lcs++; } indexStoppedLB++; } this.minDist = 1 - lcs / length; } private void tryFullLBLCSS() { final int length = query.numAttributes() - 1; final double[] LEQ = cache.getLE(indexQuery, currentDelta, currentEpsilon); final double[] UEQ = cache.getUE(indexQuery, currentDelta, currentEpsilon); double lcs = 0; this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (reference.value(index) <= UEQ[index] && reference.value(index) >= LEQ[index]) { lcs++; } indexStoppedLB++; } this.minDist = 1 - lcs / length; } private void setCurrentDeltaAndEpsilon(final int delta, final double epsilon) { if (this.currentEpsilon != epsilon) { this.currentEpsilon = epsilon; this.currentDelta = delta; this.minDist = 0.0; this.bestMinDist = minDist; indexStoppedLB = oldIndexStoppedLB = 0; this.status = LBStatus.Previous_LB_LCSS; } else if (this.currentDelta != delta) { this.currentDelta = delta; if (status == LBStatus.Full_LCSS) { if (this.currentDelta < minWindowValidity) { this.status = LBStatus.Previous_LCSS; } } else { this.status = LBStatus.Previous_LB_LCSS; this.oldIndexStoppedLB = indexStoppedLB; } } } public RefineReturnType tryToBeat(final double scoreToBeat, final int delta, final double epsilon) { setCurrentDeltaAndEpsilon(delta, epsilon); switch (status) { case None: case Previous_LB_LCSS: case Previous_LCSS: if (bestMinDist > scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_LCSS: tryFullLBLCSS(); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist > scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_LCSS; else status = LBStatus.Full_LB_LCSS; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_LCSS; case Full_LB_LCSS: if (bestMinDist > scoreToBeat) return RefineReturnType.Pruned_with_LB; final WarpingPathResults res = LCSSDistance.distanceExt(query, reference, epsilon, delta); minDist = res.distance; if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_LCSS; minWindowValidity = res.distanceFromDiagonal; case Full_LCSS: if (bestMinDist > scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance(final int window) { if ((status == LBStatus.Full_LCSS) && minWindowValidity <= window) { return minDist; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinWindowValidityForFullDistance() { if (status == LBStatus.Full_LCSS) { return minWindowValidity; } throw new RuntimeException("Shouldn't call getDistance if not sure there is argMin3 valid already-computed Distance"); } public int getMinwindow() { return minWindowValidity; } public void setMinwindow(final int w) { minWindowValidity = w; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_LCSS; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // LCSS case Full_LCSS: case Full_LB_LCSS: return thisD / (query.numAttributes() - 1); case Partial_LB_LCSS: return thisD / indexStoppedLB; case Previous_LCSS: return 0.8 * thisD / (query.numAttributes() - 1); case Previous_LB_LCSS: return thisD / oldIndexStoppedLB; case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
8,045
38.24878
126
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_MSM.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.MSMDistance; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_MSM extends LazyAssessNN { private double currentC; private MSMDistance distanceComputer = new MSMDistance(); public LazyAssessNN_MSM(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_MSM(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); this.bestMinDist = minDist; this.status = LBStatus.None; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; currentC = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.bestMinDist = minDist; this.status = LBStatus.None; } private void tryContinueLBMSM(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double QMAX = cache.getMax(indexQuery); final double QMIN = cache.getMin(indexQuery); this.minDist = Math.abs(query.value(0) - reference.value(0)); this.indexStoppedLB = 0; while (indexStoppedLB < length && minDist < scoreToBeat) { int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (index > 0 && index < length - 1) { final double curr = reference.value(index); final double prev = reference.value(index - 1); if (prev <= curr && curr < QMIN) { minDist += Math.min(Math.abs(reference.value(index) - QMIN), this.currentC); } else if (prev >= curr && curr >= QMAX) { minDist += Math.min(Math.abs(reference.value(index) - QMAX), this.currentC); } } indexStoppedLB++; } } private void tryFullLBMSM() { final int length = query.numAttributes() - 1; final double QMAX = cache.getMax(indexQuery); final double QMIN = cache.getMin(indexQuery); this.minDist = Math.abs(query.value(0) - reference.value(0)); this.indexStoppedLB = 0; while (indexStoppedLB < length) { int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (index > 0 && index < length - 1) { final double curr = reference.value(index); final double prev = reference.value(index - 1); if (prev <= curr && curr < QMIN) { minDist += Math.min(Math.abs(reference.value(index) - QMIN), this.currentC); } else if (prev >= curr && curr >= QMAX) { minDist += Math.min(Math.abs(reference.value(index) - QMAX), this.currentC); } } indexStoppedLB++; } } private void setCurrentC(final double c) { if (this.currentC != c) { this.currentC = c; if (status == LBStatus.Full_MSM) { this.status = LBStatus.Previous_MSM; } else { this.status = LBStatus.Previous_LB_MSM; this.oldIndexStoppedLB = indexStoppedLB; } } } public RefineReturnType tryToBeat(final double scoreToBeat, final double c) { setCurrentC(c); switch (status) { case None: case Previous_LB_MSM: case Previous_MSM: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_MSM: tryContinueLBMSM(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_MSM; else status = LBStatus.Full_LB_MSM; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_MSM; case Full_LB_MSM: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; distanceComputer.setC(currentC); minDist = distanceComputer.distance(query, reference); if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_MSM; case Full_MSM: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance() { return minDist; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_MSM; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // MSM case Full_MSM: case Full_LB_MSM: return thisD / (query.numAttributes() - 1); case Partial_LB_MSM: return thisD / indexStoppedLB; case Previous_MSM: return 0.8 * thisD / (query.numAttributes() - 1); case Previous_LB_MSM: return thisD / oldIndexStoppedLB; case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
7,198
38.554945
112
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_TWED.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.TWEDistance; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_TWED extends LazyAssessNN { private double currentNu, currentLambda; private TWEDistance distanceComputer = new TWEDistance(); public LazyAssessNN_TWED(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_TWED(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); this.bestMinDist = minDist; this.status = LBStatus.None; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; this.currentNu = 0; this.currentLambda = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.bestMinDist = minDist; this.status = LBStatus.None; } private void tryContinueLBTWED(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double q0 = query.value(0); final double c0 = reference.value(0); double diff = q0 - c0; this.minDist = Math.min(diff * diff, Math.min(q0 * q0 + currentNu + currentLambda, c0 * c0 + currentNu + currentLambda)); this.indexStoppedLB = 1; while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (index > 0) { final double curr = reference.value(index); final double prev = reference.value(index - 1); final double max = Math.max(cache.getMax(indexQuery), prev); final double min = Math.min(cache.getMin(indexQuery), prev); if (curr < min) { diff = min - curr; this.minDist += Math.min(currentNu, diff * diff); } else if (max < curr) { diff = max - curr; this.minDist += Math.min(currentNu, diff * diff); } } indexStoppedLB++; } } private void tryFullLBTWED() { final int length = query.numAttributes() - 1; final double q0 = query.value(0); final double c0 = reference.value(0); double diff = q0 - c0; this.minDist = Math.min(diff * diff, Math.min(q0 * q0 + currentNu + currentLambda, c0 * c0 + currentNu + currentLambda)); this.indexStoppedLB = 1; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); if (index > 0) { final double curr = reference.value(index); final double prev = reference.value(index - 1); final double max = Math.max(cache.getMax(indexQuery), prev); final double min = Math.min(cache.getMin(indexQuery), prev); if (curr < min) { diff = min - curr; this.minDist += Math.min(currentNu, diff * diff); } else if (max < curr) { diff = max - curr; this.minDist += Math.min(currentNu, diff * diff); } } indexStoppedLB++; } } private void setCurrentNuAndLambda(final double nu, final double lambda) { if (this.currentNu != nu) { this.currentLambda = lambda; this.currentNu = nu; this.minDist = 0.0; this.bestMinDist = minDist; indexStoppedLB = oldIndexStoppedLB = 0; this.status = LBStatus.Previous_LB_TWE; } else if (this.currentLambda != lambda) { this.currentLambda = lambda; if (status == LBStatus.Full_TWE) { this.status = LBStatus.Previous_TWE; } else { this.status = LBStatus.Previous_LB_TWE; this.oldIndexStoppedLB = indexStoppedLB; } } } public RefineReturnType tryToBeat(final double scoreToBeat, final double nu, final double lambda) { setCurrentNuAndLambda(nu, lambda); switch (status) { case None: case Previous_LB_TWE: case Previous_TWE: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_TWE: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; tryContinueLBTWED(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_TWE; else status = LBStatus.Full_LB_TWE; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_TWE; case Full_LB_TWE: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; distanceComputer.setNu(nu); distanceComputer.setLambda(lambda); minDist = distanceComputer.distance(query, reference); if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_TWE; case Full_TWE: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance() { return minDist; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_TWE; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // MSM case Full_TWE: case Full_LB_TWE: return thisD / (query.numAttributes() - 1); case Partial_LB_TWE: return thisD / indexStoppedLB; case Previous_TWE: return 0.8 * thisD / (query.numAttributes() - 1); case Previous_LB_TWE: return thisD / oldIndexStoppedLB; case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
8,258
39.092233
112
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/assessingNN/LazyAssessNN_WDTW.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.assessingNN; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.WeightedDTW; import weka.core.Instance; /** * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LazyAssessNN_WDTW extends LazyAssessNN { private double[] currentWeightVector; public LazyAssessNN_WDTW(final SequenceStatsCache cache) { super(cache); } public LazyAssessNN_WDTW(final Instance query, final int index, final Instance reference, final int indexReference, final SequenceStatsCache cache) { super(query, index, reference, indexReference, cache); this.bestMinDist = minDist; this.status = LBStatus.None; } public void set(final Instance query, final int index, final Instance reference, final int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.bestMinDist = minDist; this.status = LBStatus.None; } private void tryContinueLBWDTWQR(final double scoreToBeat) { final int length = query.numAttributes() - 1; final double QMAX = cache.getMax(indexQuery); final double QMIN = cache.getMin(indexQuery); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < QMIN) { final double diff = QMIN - c; minDist += diff * diff * currentWeightVector[0]; } else if (QMAX < c) { final double diff = QMAX - c; minDist += diff * diff * currentWeightVector[0]; } indexStoppedLB++; } } private void tryContinueLBWDTWRQ(final double scoreToBeat) { final int length = reference.numAttributes() - 1; final double QMAX = cache.getMax(indexReference); final double QMIN = cache.getMin(indexReference); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length && minDist <= scoreToBeat) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < QMIN) { final double diff = QMIN - c; minDist += diff * diff * currentWeightVector[0]; } else if (QMAX < c) { final double diff = QMAX - c; minDist += diff * diff * currentWeightVector[0]; } indexStoppedLB++; } } private void tryFullLBWDTWQR() { final int length = query.numAttributes() - 1; final double QMAX = cache.getMax(indexQuery); final double QMIN = cache.getMin(indexQuery); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); final double c = reference.value(index); if (c < QMIN) { final double diff = QMIN - c; minDist += diff * diff; } else if (QMAX < c) { final double diff = QMAX - c; minDist += diff * diff; } indexStoppedLB++; } this.minDist *= currentWeightVector[0]; } private void tryFullLBWDTWRQ() { final int length = reference.numAttributes() - 1; final double QMAX = cache.getMax(indexReference); final double QMIN = cache.getMin(indexReference); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { final int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); final double c = query.value(index); if (c < QMIN) { final double diff = QMIN - c; minDist += diff * diff; } else if (QMAX < c) { final double diff = QMAX - c; minDist += diff * diff; } indexStoppedLB++; } this.minDist *= currentWeightVector[0]; } private void setCurrentWeightVector(final double[] weightVector) { this.currentWeightVector = weightVector; if (status == LBStatus.Full_WDTW) { this.status = LBStatus.Previous_WDTW; } else { this.status = LBStatus.Previous_LB_WDTW; this.oldIndexStoppedLB = indexStoppedLB; } } public RefineReturnType tryToBeat(final double scoreToBeat, final double[] weightVector) { setCurrentWeightVector(weightVector); switch (status) { case None: case Previous_LB_WDTW: case Previous_WDTW: if (bestMinDist * weightVector[0] >= scoreToBeat) return RefineReturnType.Pruned_with_LB; indexStoppedLB = 0; minDist = 0; case Partial_LB_WDTWQR: tryContinueLBWDTWQR(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_WDTWQR; else status = LBStatus.Full_LB_WDTWQR; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_WDTWQR; case Full_LB_WDTWQR: indexStoppedLB = 0; minDist = 0; case Partial_LB_WDTWRQ: tryContinueLBWDTWRQ(scoreToBeat); if (minDist > bestMinDist) bestMinDist = minDist; if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < query.numAttributes() - 1) status = LBStatus.Partial_LB_WDTWRQ; else status = LBStatus.Full_LB_WDTWRQ; return RefineReturnType.Pruned_with_LB; } else status = LBStatus.Full_LB_WDTWRQ; case Full_LB_WDTWRQ: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_LB; minDist = WeightedDTW.distance(query, reference, weightVector); if (minDist > bestMinDist) bestMinDist = minDist; status = LBStatus.Full_WDTW; case Full_WDTW: if (bestMinDist >= scoreToBeat) return RefineReturnType.Pruned_with_Dist; else return RefineReturnType.New_best; default: throw new RuntimeException("Case not managed"); } } public double getDistance() { return minDist; } @Override public void setFullDistStatus() { this.status = LBStatus.Full_WDTW; } @Override public double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch (status) { // WDTW case Full_WDTW: case Full_LB_WDTWQR: case Full_LB_WDTWRQ: return thisD / (query.numAttributes() - 1); case Partial_LB_WDTWQR: case Partial_LB_WDTWRQ: return thisD / indexStoppedLB; case Previous_WDTW: return 0.8 * thisD / (query.numAttributes() - 1); case Previous_LB_WDTW: return thisD / oldIndexStoppedLB; case None: return Double.POSITIVE_INFINITY; default: throw new RuntimeException("shouldn't come here"); } } }
9,064
38.413043
112
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbEnhanced.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute the Enhanced lower bound for DTW distance * @inproceedings{tan2019elastic, * title={Elastic bands across the path: A new framework and method to lower bound DTW}, * author={Tan, Chang Wei and Petitjean, Fran{\c{c}}ois and Webb, Geoffrey I}, * booktitle={Proceedings of the 2019 SIAM International Conference on Data Mining}, * pages={522--530}, * year={2019}, * organization={SIAM} * } * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbEnhanced { public static double distance(final Instance a, final Instance b, final double[] U, final double[] L, final int w, final int v, final double cutOffValue) { final int n = a.numAttributes() - 1; final int m = b.numAttributes() - 1; final int l = n - 1; final int nBands = Math.min(l / 2, v); final int lastIndex = l - nBands; final double d00 = a.value(0) - b.value(0); final double dnm = a.value(n - 1) - b.value(m - 1); int i, j, rightEnd, rightStart; double minL, minR, tmp, aVal; double res = d00 * d00 + dnm * dnm; for (i = 1; i < nBands; i++) { rightEnd = l - i; minL = a.value(i) - b.value(i); minL *= minL; minR = a.value(rightEnd) - b.value(rightEnd); minR *= minR; for (j = Math.max(0, i - w); j < i; j++) { rightStart = l - j; tmp = a.value(i) - b.value(j); minL = Math.min(minL, tmp * tmp); tmp = a.value(j) - b.value(i); minL = Math.min(minL, tmp * tmp); tmp = a.value(rightEnd) - b.value(rightStart); minR = Math.min(minR, tmp * tmp); tmp = a.value(rightStart) - b.value(rightEnd); minR = Math.min(minR, tmp * tmp); } res += minL + minR; } if (res >= cutOffValue) return Double.POSITIVE_INFINITY; for (i = nBands; i <= lastIndex; i++) { aVal = a.value(i); if (aVal > U[i]) { tmp = aVal - U[i]; res += tmp * tmp; } else if (aVal < L[i]) { tmp = L[i] - aVal; res += tmp * tmp; } } return res; } public static double distance(final Instance a, final Instance b, final double[] U, final double[] L, final int w, final int v) { final int n = a.numAttributes() - 1; final int m = b.numAttributes() - 1; final int l = n - 1; final int nBands = Math.min(l / 2, v); final int lastIndex = l - nBands; final double d00 = a.value(0) - b.value(0); final double dnm = a.value(n - 1) - b.value(m - 1); int i, j, rightEnd, rightStart; double minL, minR, tmp, aVal; double res = d00 * d00 + dnm * dnm; for (i = 1; i < nBands; i++) { rightEnd = l - i; minL = a.value(i) - b.value(i); minL *= minL; minR = a.value(rightEnd) - b.value(rightEnd); minR *= minR; for (j = Math.max(0, i - w); j < i; j++) { rightStart = l - j; tmp = a.value(i) - b.value(j); minL = Math.min(minL, tmp * tmp); tmp = a.value(j) - b.value(i); minL = Math.min(minL, tmp * tmp); tmp = a.value(rightEnd) - b.value(rightStart); minR = Math.min(minR, tmp * tmp); tmp = a.value(rightStart) - b.value(rightEnd); minR = Math.min(minR, tmp * tmp); } res += minL + minR; } for (i = nBands; i <= lastIndex; i++) { aVal = a.value(i); if (aVal > U[i]) { tmp = aVal - U[i]; res += tmp * tmp; } else if (aVal < L[i]) { tmp = L[i] - aVal; res += tmp * tmp; } } return res; } }
4,974
34.791367
90
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbErp.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute the lower bounds for the ERP distance * Lower bound function for ERP distance * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbErp { private static double[] s = new double[2]; /** * Proposed lower bound function for ERP * |sum(Q)-sum(C)| * Modified slightly to have g value * http://www.vldb.org/conf/2004/RS21P2.PDF * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.90.6387&rep=rep1&type=pdf * * @param a first time series * @param b second time series * @param g g parameter * @return LB ERP distance distance */ public static double distance(final Instance a, final Instance b, final double g) { final int m = a.numAttributes() - 1; final int n = b.numAttributes() - 1; if (m == n) { sum2(a, b, g); return Math.abs(s[0] - s[1]); } else { return Math.abs(sum(a, g) - sum(b, g)); } } /** * Sum of all points in one sequence * * @param a time series * @param g g parameter value * @return sum */ private static double sum(final Instance a, final double g) { double s = 0; for (int i = 0; i < a.numAttributes() - 1; i++) { s += Math.abs(a.value(i) - g); } return s; } /** * Sum of all points in 2 sequences * * @param a first time series * @param b second time series * @param g g parameter value */ private static void sum2(final Instance a, final Instance b, final double g) { s = new double[2]; for (int i = 0; i < a.numAttributes() - 1; i++) { s[0] += Math.abs(a.value(i) - g); s[1] += Math.abs(b.value(i) - g); } } /** * Build the upper and lower envelope for LB Keogh with modification for ERP * * @param a time series * @param g g parameter value * @param bandSize size of the warping window * @param U upper envelope * @param L lower envelope */ public static void fillUL(final Instance a, final double g, final double bandSize, final double[] U, final double[] L) { final int length = a.numAttributes() - 1; final int r = (int) Math.ceil(length * bandSize); double min, max; for (int i = 0; i < length; i++) { min = g; max = g; final int startR = Math.max(0, i - r); final int stopR = Math.min(length - 1, i + r); for (int j = startR; j <= stopR; j++) { final double value = a.value(j); min = Math.min(min, value); max = Math.max(max, value); } U[i] = max; L[i] = min; } } /** * LB Keogh version for ERP * * @param a time series * @param U upper envelope * @param L lower envelope * @return LB Keogh distance for ERP */ public static double distance(final Instance a, final double[] U, final double[] L) { return LbKeogh.distance(a, U, L); } /** * LB Keogh version for ERP with early abandon * * @param a time series * @param U upper envelope * @param L lower envelope * @param cutOffValue cutoff value for early abandon * @return LB Keogh distance for ERP */ public static double distance(final Instance a, final double[] U, final double[] L, final double cutOffValue) { return LbKeogh.distance(a, U, L, cutOffValue); } }
4,430
31.343066
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbImproved.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute Lb Improved for DTW distance * @article{lemire2009faster, * title={Faster retrieval with a two-pass dynamic-time-warping lower bound}, * author={Lemire, Daniel}, * journal={Pattern recognition}, * volume={42}, * number={9}, * pages={2169--2180}, * year={2009}, * publisher={Elsevier} * } * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbImproved { public static double distance(final Instance a, final Instance b, final double[] Ub, final double[] Lb, final int r) { final int length = Math.min(Ub.length, a.numAttributes() - 1); final double[] y = new double[length]; final double[] Ux = new double[length]; final double[] Lx = new double[length]; int i; double res = 0; double diff, c; for (i = 0; i < length; i++) { c = a.value(i); if (c < Lb[i]) { diff = Lb[i] - c; res += diff * diff; y[i] = Lb[i]; } else if (Ub[i] < c) { diff = c - Ub[i]; res += diff * diff; y[i] = Ub[i]; } else { y[i] = c; } } LbKeogh.fillULStreaming(y, r, Ux, Lx); for (i = 0; i < length; i++) { c = b.value(i); if (c < Lx[i]) { diff = Lx[i] - c; res += diff * diff; } else if (Ux[i] < c) { diff = c - Ux[i]; res += diff * diff; } } return res; } public double distance(final Instance a, final Instance b, final double[] Ub, final double[] Lb, final int r, final double cutOffValue) { final int length = Math.min(Ub.length, a.numAttributes() - 1); final double[] y = new double[length]; int i; double res = 0; double diff, c; for (i = 0; i < length; i++) { c = a.value(i); if (c < Lb[i]) { diff = Lb[i] - c; res += diff * diff; y[i] = Lb[i]; } else if (Ub[i] < c) { diff = c - Ub[i]; res += diff * diff; y[i] = Ub[i]; } else { y[i] = c; } } if (res < cutOffValue) { final double[] Ux = new double[length]; final double[] Lx = new double[length]; LbKeogh.fillULStreaming(y, r, Ux, Lx); for (i = 0; i < length; i++) { c = b.value(i); if (c < Lx[i]) { diff = Lx[i] - c; res += diff * diff; } else if (Ux[i] < c) { diff = c - Ux[i]; res += diff * diff; } } } return res; } }
3,690
30.818966
141
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbKeogh.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import tsml.classifiers.legacy.elastic_ensemble.distance_functions.DTW; import tsml.transformers.Derivative; import weka.core.Instance; import java.util.ArrayDeque; import java.util.Deque; /** * A class to compute the LB Keogh for DTW distance * @article{keogh2005exact, * title={Exact indexing of dynamic time warping}, * author={Keogh, Eamonn and Ratanamahatana, Chotirat Ann}, * journal={Knowledge and information systems}, * volume={7}, * number={3}, * pages={358--386}, * year={2005}, * publisher={Springer} * } * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbKeogh { private static Derivative df = new Derivative(); public static void fillUL(final double[] sequence, final int window, final double[] U, final double[] L) { final int length = sequence.length; double min, max; for (int i = 0; i < length; i++) { min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; final int startR = Math.max(0, i - window); final int stopR = Math.min(length - 1, i + window); for (int j = startR; j <= stopR; j++) { final double value = sequence[j]; min = Math.min(min, value); max = Math.max(max, value); } L[i] = min; U[i] = max; } } public static void fillUL(final Instance sequence, final int window, final double[] U, final double[] L) { final int length = sequence.numAttributes() - 1; double min, max; for (int i = 0; i < length; i++) { min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; final int startR = Math.max(0, i - window); final int stopR = Math.min(length - 1, i + window); for (int j = startR; j <= stopR; j++) { final double value = sequence.value(j); min = Math.min(min, value); max = Math.max(max, value); } L[i] = min; U[i] = max; } } public static void fillUL(final Instance sequence, final double r, final double[] U, final double[] L) { final int length = sequence.numAttributes(); double min, max; final int window = DTW.getWindowSize(length, r); for (int i = 0; i < length; i++) { min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; final int startR = Math.max(0, i - window); final int stopR = Math.min(length - 1, i + window); for (int j = startR; j <= stopR; j++) { final double value = sequence.value(j); min = Math.min(min, value); max = Math.max(max, value); } L[i] = min; U[i] = max; } } public static void fillULStreaming(final double[] y, final int r, final double[] U, final double[] L) { Deque<Integer> u = new ArrayDeque<>(); Deque<Integer> l = new ArrayDeque<>(); u.addLast(0); l.addLast(0); final int width = 1 + 2 * r; int i; for (i = 1; i < y.length; ++i) { if (i >= r + 1) { U[i - r - 1] = y[u.getFirst()]; L[i - r - 1] = y[l.getFirst()]; } if (y[i] > y[i - 1]) { u.removeLast(); while (u.size() > 0) { if (y[i] <= y[u.getLast()]) break; u.removeLast(); } } else { l.removeLast(); while (l.size() > 0) { if (y[i] >= y[l.getLast()]) break; l.removeLast(); } } u.addLast(i); l.addLast(i); if (i == width + u.getFirst()) { u.removeFirst(); } else if (i == width + l.getFirst()) { l.removeFirst(); } } for (i = y.length; i <= y.length + r; ++i) { final int index = Math.max(i - r - 1, 0); U[index] = y[u.getFirst()]; L[index] = y[l.getFirst()]; if (i - u.getFirst() >= width) { u.removeFirst(); } if (i - l.getFirst() >= width) { l.removeFirst(); } } } public static double distance(final Instance a, final double[] U, final double[] L) { final int length = Math.min(U.length, a.numAttributes() - 1); double res = 0; for (int i = 0; i < length; i++) { final double c = a.value(i); if (c < L[i]) { final double diff = L[i] - c; res += diff * diff; } else if (U[i] < c) { final double diff = U[i] - c; res += diff * diff; } } return res; } public static double distance(final Instance a, final double[] U, final double[] L, final double cutOffValue) { final int length = Math.min(U.length, a.numAttributes() - 1); double res = 0; for (int i = 0; i < length; i++) { final double c = a.value(i); if (c < L[i]) { final double diff = L[i] - c; res += diff * diff; if (res >= cutOffValue) return Double.MAX_VALUE; } else if (U[i] < c) { final double diff = U[i] - c; res += diff * diff; if (res >= cutOffValue) return Double.MAX_VALUE; } } return res; } }
6,474
33.441489
115
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbKim.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import weka.core.Instance; /** * A class to compute Lb Kim for DTW distance. Slightly modified from the original paper. * @inproceedings{kim2001index, * title={An index-based approach for similarity search supporting time warping in large sequence databases}, * author={Kim, Sang-Wook and Park, Sanghyun and Chu, Wesley W}, * booktitle={Proceedings 17th International Conference on Data Engineering}, * pages={607--614}, * year={2001}, * organization={IEEE} * } * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbKim { public static double distance(final Instance query, final Instance reference) { double maxQ = query.value(0), maxR = reference.value(0); double minQ = query.value(0), minR = reference.value(0); final double diffFirsts = maxQ - maxR; final double diffLasts = query.value(query.numAttributes() - 2) - reference.value(reference.numAttributes() - 2); double minDist = diffFirsts * diffFirsts + diffLasts * diffLasts; boolean minFirstLastQ = true, minFirstLastR = true; boolean maxFirstLastQ = true, maxFirstLastR = true; for (int i = 1; i < query.numAttributes() - 1; i++) { if (query.value(i) > maxQ) { maxQ = query.value(i); maxFirstLastQ = false; } else if (query.value(i) < minQ) { minQ = query.value(i); minFirstLastQ = false; } if (reference.value(i) > maxR) { maxR = reference.value(i); maxFirstLastR = false; } else if (reference.value(i) < minR) { minR = reference.value(i); minFirstLastR = false; } } if (!(minFirstLastQ && minFirstLastR)) { final double diffMin = minQ - minR; minDist += diffMin * diffMin; } if (!(maxFirstLastQ && maxFirstLastR)) { final double diffMax = maxQ - maxR; minDist += diffMax * diffMax; } return minDist; } public static double distance(final Instance query, final Instance reference, final SequenceStatsCache queryCache, final SequenceStatsCache referenceCache, final int indexQuery, final int indexReference) { final double diffFirsts = query.value(0) - reference.value(0); final double diffLasts = query.value(query.numAttributes() - 2) - reference.value(reference.numAttributes() - 2); double minDist = diffFirsts * diffFirsts + diffLasts * diffLasts; if (!queryCache.isMinFirst(indexQuery) && !referenceCache.isMinFirst(indexReference) && !queryCache.isMinLast(indexQuery) && !referenceCache.isMinLast(indexReference)) { final double diffMin = queryCache.getMin(indexQuery) - referenceCache.getMin(indexReference); minDist += diffMin * diffMin; } if (!queryCache.isMaxFirst(indexQuery) && !referenceCache.isMaxFirst(indexReference) && !queryCache.isMaxLast(indexQuery) && !referenceCache.isMaxLast(indexReference)) { final double diffMax = queryCache.getMax(indexQuery) - referenceCache.getMax(indexReference); minDist += diffMax * diffMax; } return minDist; } public static double distance(final Instance query, final Instance reference, final SequenceStatsCache cache, final int indexQuery, final int indexReference) { final double diffFirsts = query.value(0) - reference.value(0); final double diffLasts = query.value(query.numAttributes() - 2) - reference.value(reference.numAttributes() - 2); double minDist = diffFirsts + diffLasts; if (!cache.isMinFirst(indexQuery) && !cache.isMinFirst(indexReference) && !cache.isMinLast(indexQuery) && !cache.isMinLast(indexReference)) { final double diffMin = cache.getMin(indexQuery) - cache.getMin(indexReference); minDist += diffMin * diffMin; } if (!cache.isMaxFirst(indexQuery) && !cache.isMaxFirst(indexReference) && !cache.isMaxLast(indexQuery) && !cache.isMaxLast(indexReference)) { final double diffMax = cache.getMax(indexQuery) - cache.getMax(indexReference); minDist += diffMax * diffMax; } return minDist; } }
5,322
45.286957
121
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbLcss.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute the lower bound function for LCSS distance * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbLcss { /** * Build the upper and lower envelope for Lb Keogh with modification for LCSS * * @param candidate candidate sequence * @param epsilon epsilon value * @param delta delta value * @param U upper envelope * @param L lower envelope */ public static void fillUL(final Instance candidate, final double epsilon, final int delta, final double[] U, final double[] L) { final int length = candidate.numAttributes() - 1; double min, max; for (int i = 0; i < length; i++) { min = Double.POSITIVE_INFINITY; max = Double.NEGATIVE_INFINITY; final int startR = (i - delta < 0) ? 0 : i - delta; final int stopR = (i + delta + 1 > length) ? length : i + delta + 1; for (int j = startR; j < stopR; j++) { final double value = candidate.value(j); min = Math.min(min, value); max = Math.max(max, value); } L[i] = min - epsilon; U[i] = max + epsilon; } } /** * Lower bound for LCSS distance * * @param c candidate sequence * @param U upper envelope * @param L lower envelope * @return lower bound distance */ public static double distance(final Instance c, final double[] U, final double[] L) { final int length = Math.min(U.length, c.numAttributes() - 1); double lcs = 0; for (int i = 0; i < length; i++) { if (c.value(i) <= U[i] && c.value(i) >= L[i]) { lcs++; } } return 1 - lcs / length; } /** * Lower bound for LCSS distance with early abandoning * * @param c candidate sequence * @param U upper envelope * @param L lower envelope * @param cutOffValue cutoff value for early abandoning * @return lower bound distance */ public static double distance(final Instance c, final double[] U, final double[] L, final double cutOffValue) { final int length = Math.min(U.length, c.numAttributes() - 1); final double ub = (1.0 - cutOffValue) * length; double lcs = 0; for (int i = 0; i < length; i++) { if (c.value(i) <= U[i] && c.value(i) >= L[i]) { lcs++; if (lcs <= ub) return 1; } } return 1 - lcs / length; } }
3,411
33.12
132
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbMsm.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import weka.core.Instance; /** * A class to compute the lower bounds for MSM distance * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbMsm { /** * Lower bound distance for MSM * * @param q first time series * @param c second time series * @param cc c param * @param qMax max of first time series * @param qMin min of second time series * @return lower bound distance */ public static double distance(final Instance q, final Instance c, final double cc, final double qMax, final double qMin) { final int len = q.numAttributes() - 1; double d = Math.abs(q.value(0) - c.value(0)); for (int i = 1; i < len; i++) { final double curr = c.value(i); final double prev = c.value(i - 1); if (prev >= curr && curr > qMax) { d += Math.min(Math.abs(curr - qMax), cc); } else if (prev <= curr && curr < qMin) { d += Math.min(Math.abs(curr - qMin), cc); } } return d; } /** * Lower bound distance for MSM with early abandon * * @param q first time series * @param c second time series * @param cc c param * @param qMax max of first time series * @param qMin min of second time series * @param cutOffValue cutoff value for early abandon * @return lower bound distance */ public static double distance(final Instance q, final Instance c, final double cc, final double qMax, final double qMin, final double cutOffValue) { final int len = q.numAttributes() - 1; double d = Math.abs(q.value(0) - c.value(0)); for (int i = 1; i < len; i++) { final double curr = c.value(i); final double prev = c.value(i - 1); if (prev >= curr && curr > qMax) { d += Math.min(Math.abs(curr - qMax), cc); if (d >= cutOffValue) return Double.MAX_VALUE; } else if (prev <= curr && curr < qMin) { d += Math.min(Math.abs(curr - qMin), cc); if (d >= cutOffValue) return Double.MAX_VALUE; } } return d; } /** * Lb Kim version for LB MSM * * @param q query sequence * @param c reference sequence * @param queryCache cache for query * @param candidateCache cache for reference * @param indexQuery query index * @param indexCandidate reference index * @return lower bound distance */ public static double distance(final Instance q, final Instance c, final SequenceStatsCache queryCache, final SequenceStatsCache candidateCache, final int indexQuery, final int indexCandidate) { final double diffFirsts = Math.abs(q.value(0) - c.value(0)); final double diffLasts = Math.abs(q.value(q.numAttributes() - 2) - c.value(c.numAttributes() - 2)); double minDist = diffFirsts + diffLasts; if (!queryCache.isMinFirst(indexQuery) && !candidateCache.isMinFirst(indexCandidate) && !queryCache.isMinLast(indexQuery) && !candidateCache.isMinLast(indexCandidate)) { minDist += Math.abs(queryCache.getMin(indexQuery) - candidateCache.getMin(indexCandidate)); } if (!queryCache.isMaxFirst(indexQuery) && !candidateCache.isMaxFirst(indexCandidate) && !queryCache.isMaxLast(indexQuery) && !candidateCache.isMaxLast(indexCandidate)) { minDist += Math.abs(queryCache.getMax(indexQuery) - candidateCache.getMax(indexCandidate)); } return minDist; } }
4,672
38.940171
152
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbTwed.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute the lower bound for TWE distance. * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbTwed { /** * Lower bound for TWED distance * * @param q first time series * @param c second time series * @param qMax max of the first time series * @param qMin min of the second time series * @param nu stiffness parameter * @param lambda constant penalty * @return lower bound distance */ public static double distance(final Instance q, final Instance c, final double qMax, final double qMin, final double nu, final double lambda) { final int length = q.numAttributes() - 1; final double q0 = q.value(0); final double c0 = c.value(0); double diff = q0 - c0; double res = Math.min(diff * diff, Math.min(q0 * q0 + nu + lambda, c0 * c0 + nu + lambda)); for (int i = 1; i < length; i++) { final double curr = c.value(i); final double prev = c.value(i - 1); final double max = Math.max(qMax, prev); final double min = Math.min(qMin, prev); if (curr < min) { diff = min - curr; res += Math.min(nu, diff * diff); } else if (max < curr) { diff = max - curr; res += Math.min(nu, diff * diff); } } return res; } /** * Lower bound for TWED distance with early abandon * * @param q first time series * @param c second time series * @param qMax max of the first time series * @param qMin min of the second time series * @param nu stiffness parameter * @param lambda constant penalty * @param cutOffValue cutoff value for early abandon * @return lower bound distance */ public static double distance(final Instance q, final Instance c, final double qMax, final double qMin, final double nu, final double lambda, final double cutOffValue) { final int length = q.numAttributes() - 1; final double q0 = q.value(0); final double c0 = c.value(0); double diff = q0 - c0; double res = Math.min(diff * diff, Math.min(q0 * q0 + nu + lambda, c0 * c0 + nu + lambda)); if (res >= cutOffValue) return res; for (int i = 1; i < length; i++) { final double curr = c.value(i); final double prev = c.value(i - 1); final double max = Math.max(qMax, prev); final double min = Math.min(qMin, prev); if (curr < min) { diff = min - curr; res += Math.min(nu, diff * diff); if (res >= cutOffValue) return res; } else if (max < curr) { diff = max - curr; res += Math.min(nu, diff * diff); if (res >= cutOffValue) return res; } } return res; } }
4,001
36.055556
107
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbWdtw.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import weka.core.Instance; /** * A class to compute the lower bounds for WDTW distance * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbWdtw { /** * WDTW lower bound using envelopes * * @param candidate time series * @param weight minimum weight * @param queryMax maximum of the other time series * @param queryMin minimum of the other time series * @return lower bound distance */ public static double distance(final Instance candidate, final double weight, final double queryMax, final double queryMin) { double res = 0; for (int i = 0; i < candidate.numAttributes() - 1; i++) { final double c = candidate.value(i); if (c < queryMin) { final double diff = queryMin - c; res += diff * diff; } else if (queryMax < c) { final double diff = queryMax - c; res += diff * diff; } } return weight * res; } /** * WDTW lower bound using envelopes and early abandon * * @param candidate time series * @param weight minimum weight * @param queryMax maximum of the other time series * @param queryMin minimum of the other time series * @param cutOffValue cutoff value for early abandon * @return lower bound distance */ public static double distance(final Instance candidate, final double weight, final double queryMax, final double queryMin, final double cutOffValue) { double res = 0; double cutoff = cutOffValue / weight; for (int i = 0; i < candidate.numAttributes() - 1; i++) { final double c = candidate.value(i); if (c < queryMin) { final double diff = queryMin - c; res += diff * diff; if (res >= cutoff) return Double.MAX_VALUE; } else if (queryMax < c) { final double diff = queryMax - c; res += diff * diff; if (res >= cutoff) return Double.MAX_VALUE; } } return weight * res; } }
2,977
34.879518
154
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/lowerBounds/LbYi.java
/* Copyright (C) 2019 Chang Wei Tan * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils.SequenceStatsCache; import weka.core.Instance; /** * A class to compute LB Yi for DTW distance *@inproceedings{yi2000fast, * title={Fast time sequence indexing for arbitrary Lp norms}, * author={Yi, Byoung-Kee and Faloutsos, Christos}, * booktitle={VLDB}, * volume={385}, * number={394}, * pages={99}, * year={2000} * } * @author Chang Wei Tan (chang.tan@monash.edu) */ public class LbYi { public static double distance(final Instance candidate, final SequenceStatsCache queryCache, final int indexQuery) { double lb = 0; for (int i = 0; i < candidate.numAttributes()-1; i++) { if (candidate.value(i) < queryCache.getMin(indexQuery)) { lb += queryCache.getMin(indexQuery); } else if (queryCache.getMax(indexQuery) < candidate.value(i)) { lb += queryCache.getMax(indexQuery); } } return lb; } }
1,780
36.104167
120
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/utils/GenericTools.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils; /** * This class contains some generic tools * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class GenericTools { public static double distanceTo(final double a, final double b) { double diff = a - b; return diff * diff; } public static int argMin3(final double a, final double b, final double c) { return (a <= b) ? ((a <= c) ? 0 : 2) : (b <= c) ? 1 : 2; } public static int argMax3(final double a, final double b, final double c) { return (a >= b) ? ((a >= c) ? 0 : 2) : (b >= c) ? 1 : 2; } public static double min3(final double a, final double b, final double c) { return (a <= b) ? ((a <= c) ? a : c) : (b <= c) ? b : c; } public static double max3(final double a, final double b, final double c) { return (a >= b) ? ((a >= c) ? a : c) : (b >= c) ? b : c; } public static double[] minmax(final double[] a) { double max = -99999999; double min = Double.MAX_VALUE; for (double v : a) { if (v > max) max = v; if (v < min) min = v; } return new double[]{min, max}; } public static int[] getInclusive10(final int min, final int max) { int[] output = new int[10]; double diff = 1.0 * (max - min) / 9; double[] doubleOut = new double[10]; doubleOut[0] = min; output[0] = min; for (int i = 1; i < 9; i++) { doubleOut[i] = doubleOut[i - 1] + diff; output[i] = (int) Math.round(doubleOut[i]); } output[9] = max; // to make sure max isn't omitted due to double imprecision return output; } public static double[] getInclusive10(final double min, final double max) { double[] output = new double[10]; double diff = 1.0 * (max - min) / 9; output[0] = min; for (int i = 1; i < 9; i++) { output[i] = output[i - 1] + diff; } output[9] = max; return output; } }
2,896
32.298851
84
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/utils/IndexedDouble.java
/* * Copyright (C) 2019 Chang Wei Tan * * This file is part of the UEA Time Series Machine Learning (TSML) toolbox. * * The UEA TSML toolbox is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The UEA TSML toolbox is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with the UEA TSML toolbox. If not, see <https://www.gnu.org/licenses/>. */ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils; /** * This class stores a double value with an index. It is used in FastEE. * * @author Chang Wei Tan (chang.tan@monash.edu) */ public class IndexedDouble { public double value; public int index; public IndexedDouble(int index, double value) { this.value = value; this.index = index; } }
1,206
32.527778
77
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_elastic_ensemble/utils/SequenceStatsCache.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.utils; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbErp; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbKeogh; import tsml.classifiers.legacy.elastic_ensemble.fast_elastic_ensemble.lowerBounds.LbLcss; import weka.core.Instances; import java.util.ArrayList; import java.util.Arrays; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * <p> * Cache for storing the information on the time series dataset * * @author Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb */ public class SequenceStatsCache { protected ArrayList<double[]> LEs, UEs; protected double[] mins, maxs; protected int[] indexMaxs, indexMins; protected boolean[] isMinFirst, isMinLast, isMaxFirst, isMaxLast; protected double[] lastWindowComputed; protected double[] lastERPWindowComputed; protected double[] lastLCSSWindowComputed; protected int currentWindow; protected Instances train; protected IndexedDouble[][] indicesSortedByAbsoluteValue; protected double[][] lbDistances; public SequenceStatsCache(final Instances train, final int startingWindow) { this.train = train; int nSequences = train.size(); int length = train.numAttributes() - 1; this.LEs = new ArrayList<>(nSequences); this.UEs = new ArrayList<>(nSequences); this.lastWindowComputed = new double[nSequences]; this.lastERPWindowComputed = new double[nSequences]; this.lastLCSSWindowComputed = new double[nSequences]; Arrays.fill(this.lastWindowComputed, -1); Arrays.fill(this.lastERPWindowComputed, -1); Arrays.fill(this.lastLCSSWindowComputed, -1); this.currentWindow = startingWindow; this.mins = new double[nSequences]; this.maxs = new double[nSequences]; this.indexMins = new int[nSequences]; this.indexMaxs = new int[nSequences]; this.isMinFirst = new boolean[nSequences]; this.isMinLast = new boolean[nSequences]; this.isMaxFirst = new boolean[nSequences]; this.isMaxLast = new boolean[nSequences]; this.indicesSortedByAbsoluteValue = new IndexedDouble[nSequences][length]; for (int i = 0; i < train.size(); i++) { double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; int indexMin = -1, indexMax = -1; for (int j = 0; j < train.numAttributes() - 1; j++) { double val = train.get(i).value(j); if (val > max) { max = val; indexMax = j; } if (val < min) { min = val; indexMin = j; } indicesSortedByAbsoluteValue[i][j] = new IndexedDouble(j, Math.abs(val)); } indexMaxs[i] = indexMax; indexMins[i] = indexMin; mins[i] = min; maxs[i] = max; isMinFirst[i] = (indexMin == 0); isMinLast[i] = (indexMin == (train.numAttributes() - 2)); isMaxFirst[i] = (indexMax == 0); isMaxLast[i] = (indexMax == (train.numAttributes() - 2)); Arrays.sort(indicesSortedByAbsoluteValue[i], (v1, v2) -> -Double.compare(v1.value, v2.value)); this.LEs.add(new double[length]); this.UEs.add(new double[length]); } } public double[] getLE(final int i, final int w) { if (lastWindowComputed[i] != w) { LEs.add(new double[train.get(i).numAttributes() - 1]); UEs.add(new double[train.get(i).numAttributes() - 1]); computeLEandUE(i, w); } return LEs.get(i); } public double[] getUE(final int i, final int w) { if (lastWindowComputed[i] != w) { LEs.add(new double[train.get(i).numAttributes() - 1]); UEs.add(new double[train.get(i).numAttributes() - 1]); computeLEandUE(i, w); } return UEs.get(i); } public void computeLEandUE(final int i, final int r) { LbKeogh.fillUL(train.get(i), r, UEs.get(i), LEs.get(i)); this.lastWindowComputed[i] = r; } public double[] getLE(final int i, final double g, final double bandSize) { if (lastERPWindowComputed[i] != bandSize) { LEs.add(new double[train.get(i).numAttributes()-1]); UEs.add(new double[train.get(i).numAttributes()-1]); computeLEandUE(i, g, bandSize); } return LEs.get(i); } public double[] getUE(final int i, final double g, final double bandSize) { if (lastERPWindowComputed[i] != bandSize) { LEs.add(new double[train.get(i).numAttributes()-1]); UEs.add(new double[train.get(i).numAttributes()-1]); computeLEandUE(i, g, bandSize); } return UEs.get(i); } public void computeLEandUE(final int i, final double g, final double bandSize) { LbErp.fillUL(train.get(i), g, bandSize, UEs.get(i), LEs.get(i)); this.lastERPWindowComputed[i] = bandSize; } public double[] getLE(final int i, final int delta, final double epsilon) { if (lastLCSSWindowComputed[i] != delta) { LEs.add(new double[train.get(i).numAttributes()-1]); UEs.add(new double[train.get(i).numAttributes()-1]); computeLEandUE(i, delta, epsilon); } return LEs.get(i); } public double[] getUE(final int i, final int delta, final double epsilon) { if (lastLCSSWindowComputed[i] != delta) { LEs.add(new double[train.get(i).numAttributes()-1]); UEs.add(new double[train.get(i).numAttributes()-1]); computeLEandUE(i, delta, epsilon); } return UEs.get(i); } public void computeLEandUE(final int i, final int delta, final double epsilon) { LbLcss.fillUL(train.get(i), epsilon, delta, UEs.get(i), LEs.get(i)); this.lastLCSSWindowComputed[i] = delta; } public boolean isMinFirst(int i) { return isMinFirst[i]; } public boolean isMaxFirst(int i) { return isMaxFirst[i]; } public boolean isMinLast(int i) { return isMinLast[i]; } public boolean isMaxLast(int i) { return isMaxLast[i]; } public double getMin(int i) { return mins[i]; } public double getMax(int i) { return maxs[i]; } public int getIMax(int i) { return indexMaxs[i]; } public int getIMin(int i) { return indexMins[i]; } public int getIndexNthHighestVal(int i, int n) { return indicesSortedByAbsoluteValue[i][n].index; } public void initLbDistances() { lbDistances = new double[train.size()][train.size()]; } public void setLbDistances(double lb, int qIndex, int cIndex) { lbDistances[qIndex][cIndex] = lb; } public double getLbDistances(int qIndex, int cIndex) { return lbDistances[qIndex][cIndex]; } public boolean lbDistanceExist(int qIndex, int cIndex) { return lbDistances[qIndex][cIndex] != 0; } }
8,275
36.447964
111
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/ScalabilityExperiment.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileWriter; import java.io.IOException; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.FastWWS; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.Trillion; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.LbKeoghPrunedDTW; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.WindowSearcher; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to plot Figure 1 of our SDM18 paper comparing the scalability of the different methods * We estimate the search time for dataset with sample size larger than estimate * * @author Chang Wei Tan * */ public class ScalabilityExperiment { private static int estimate = 10000; private static String osName, datasetName, username, projectPath, datasetPath, resDir, method; private static int[] sampleTrains = new int[]{100, 100, 250, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 500000, 1000000}; public static void main(String[] args) throws Exception { datasetName = "SITS1M_fold1"; // Name of dataset to be tested method = "FastWWSearch"; // Method type in finding the best window "naive, kdd12, sdm16, fastwws" // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; datasetPath = "C:/Users/" + username + "/workspace/Dataset/SITS_2006_NDVI_C/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; datasetPath = "/home/" + username + "/workspace/Dataset/SITS_2006_NDVI_C/"; } // Get initial heap size long heapMaxSize = Runtime.getRuntime().maxMemory(); long heapFreeSize = Runtime.getRuntime().freeMemory(); System.out.println("Heap Size -- Free " + 1.0*heapFreeSize/1e6 + ", " + 1.0*heapMaxSize/1e6); // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) method = args[2]; System.out.println("Scalability experiment with " + method); // Load all data Instances allData = loadAllData(); // Run the experiment depending on the given type switch (method) { case "LBKeogh": keogh(allData); break; case "UCRSuite": ucrSuite(allData); break; case "LBKeogh-PrunedDTW": keoghPrunedDTW(allData); break; case "FastWWSearch": fastWWS(allData); break; } }// End main /** * Load all data into 1 * @return */ private static Instances loadAllData() { resDir = projectPath + "outputs/Scaling/"; File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); System.out.println("Processing: " + datasetName); Instances data = ExperimentsLauncher.readAllInOne(datasetPath, datasetName); return data; } /** * Run FastWWSearch (SDM18) * @param data */ public static void fastWWS(Instances data) { System.out.println(method); double[] timeTaken = new double[sampleTrains.length]; int i = 0; for (int sampleSize : sampleTrains) { double searchTime = fastWWS(data, sampleSize); timeTaken[i++] = searchTime; } } /** * Run FastWWSearch (SDM18) for a given size * @param data * @param sampleSize * @return */ public static double fastWWS(Instances data, int sampleSize) { double searchTime = 0; long start, stop; try{ Instances newTrain = Sampling.sample(data, sampleSize); System.out.println("Size: " + sampleSize + ", Launching FastWWS"); FastWWS classifier = new FastWWS(datasetName); classifier.setResDir(resDir); classifier.setType(method); start = System.nanoTime(); classifier.buildClassifier(newTrain); stop = System.nanoTime(); searchTime = 1.0 * ((stop - start)/1e9); saveSearchTime(sampleSize, searchTime); System.out.println("Size: " + sampleSize + ", " + searchTime + " s"); } catch (Exception e) { e.printStackTrace(); } return searchTime; } /** * Run DTW with LB Keogh * @param data */ public static void keogh(Instances data) { System.out.println(method); double[] timeTaken = new double[sampleTrains.length]; int i = 0; for (int sampleSize : sampleTrains) { double searchTime = keogh(data, sampleSize); timeTaken[i++] = searchTime; } } /** * Run DTW with LB Keogh for a given size * @param data */ public static double keogh(Instances data, int sampleSize) { double share = 1, searchTime = 0; long start, stop; WindowSearcher classifier = new WindowSearcher(datasetName); classifier.setResDir(resDir); classifier.setType(method); try{ Instances newTrain = Sampling.sample(data, sampleSize); System.out.println("Size: " + sampleSize + ", Launching Keogh"); if (sampleSize < estimate+1) { start = System.nanoTime(); classifier.buildClassifier(newTrain); stop = System.nanoTime(); } else { start = System.nanoTime(); classifier.buildClassifierEstimate(newTrain, estimate); stop = System.nanoTime(); share = 1.0 * (estimate+1) / newTrain.numInstances(); } searchTime = 1.0 * ((stop - start)/1e9); searchTime = searchTime/share; saveSearchTime(sampleSize, searchTime); System.out.println("Size: " + sampleSize + ", " + searchTime + " s"); } catch (Exception e) { e.printStackTrace(); } return searchTime; } /** * Run UCRSuite method * @param data */ public static void ucrSuite(Instances data) { System.out.println(method); double[] timeTaken = new double[sampleTrains.length]; int i = 0; for (int sampleSize : sampleTrains) { double searchTime = ucrSuite(data, sampleSize); timeTaken[i++] = searchTime; } } /** * Run UCRSuite method for a given size * @param data * @param sampleSize * @return */ public static double ucrSuite(Instances data, int sampleSize) { double share = 1, searchTime = 0; long start, stop; try{ Instances newTrain = Sampling.sample(data, sampleSize); System.out.println("Size: " + sampleSize + ", Launching KDD12"); if (sampleSize < estimate+1) { Trillion classifier = new Trillion(datasetName); classifier.setResDir(resDir); classifier.setType(method); start = System.nanoTime(); classifier.buildClassifier(newTrain); stop = System.nanoTime(); } else { Trillion classifier = new Trillion(datasetName); classifier.setResDir(resDir); classifier.setType(method); start = System.nanoTime(); classifier.buildClassifierEstimate(newTrain, estimate); stop = System.nanoTime(); share = 1.0 * (estimate+1) / newTrain.numInstances(); System.out.println("Share: " + share); } searchTime = 1.0 * ((stop - start)/1e9); searchTime = searchTime/share; saveSearchTime(sampleSize, searchTime); System.out.println("Size: " + sampleSize + ", " + searchTime + " s"); } catch (Exception e) { e.printStackTrace(); } return searchTime; } /** * Run LBKeogh-PrunedDTW method * @param data */ public static void keoghPrunedDTW(Instances data) { System.out.println(method); double[] timeTaken = new double[sampleTrains.length]; int i = 0; for (int sampleSize : sampleTrains) { double searchTime = keoghPrunedDTW(data, sampleSize); timeTaken[i++] = searchTime; } } /** * Run LBKeogh-PrunedDTW method for a given size * @param data * @param sampleSize * @return */ public static double keoghPrunedDTW(Instances data, int sampleSize) { double share = 1, searchTime = 0; long start, stop; LbKeoghPrunedDTW classifier = new LbKeoghPrunedDTW(datasetName); classifier.setResDir(resDir); classifier.setType(method); try{ Instances newTrain = Sampling.sample(data, sampleSize); System.out.println("Size: " + sampleSize + ", Launching SDM16"); if (sampleSize < estimate+1) { start = System.nanoTime(); classifier.buildClassifier(newTrain); stop = System.nanoTime(); } else { start = System.nanoTime(); classifier.buildClassifierEstimate(newTrain, estimate); stop = System.nanoTime(); share = 1.0 * (estimate+1) /newTrain.numInstances(); } searchTime = 1.0 * ((stop - start)/1e9); searchTime = searchTime/share; saveSearchTime(sampleSize, searchTime); System.out.println("Size: " + sampleSize + ", " + searchTime + " s"); } catch (Exception e) { e.printStackTrace(); } return searchTime; } /** * Save results (search time) to csv * @param sampleSize * @param searchTime */ private static void saveSearchTime(int sampleSize, double searchTime) { String fileName = resDir + "scaling_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SampleSize,SearchTime(s)\n"); out.append(sampleSize + "," + searchTime + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
10,316
29.889222
131
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_FastWWS.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.FastWWS; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using our proposed method for SDM18 * * @author Chang Wei Tan * */ public class UCR_FastWWS { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ItalyPowerDemand"; // Name of dataset to be tested method = "FastWWSearch"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); FastWWS classifier = new FastWWS(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,333
35.128079
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_FastWWSPrunedDTW.java
package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.FastWWS; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.FastWWSPrunedDTW; import weka.classifiers.Evaluation; import weka.core.Instances; public class UCR_FastWWSPrunedDTW { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; private static boolean firstRun = false; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ElectricDevices"; // Name of dataset to be tested method = "FastWWSearch-PrunedDTW"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); switch(sampleType) { case "Sorted": System.out.println("Find best warping window with Sorted UCR datasets"); for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": System.out.println("Find best warping window with Small UCR datasets"); for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": System.out.println("Find best warping window with New TSC datasets"); for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": System.out.println("Find best warping window with " + datasetName + " dataset"); File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": System.out.println("Find best warping window with " + datasetName + " dataset"); singleProblem(datasetName); break; } }// End main private static void singleProblem (String datasetName) throws Exception { resDir = projectPath + "outputs/Incorporate_PrunedDTW/" + datasetName + "/"; File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); double[][] searchTimes = new double[2][nbRuns]; double speedUp = 0; double avgFastWWSTime = 0, avgFastWWSSDM16Time = 0; System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // somehow need to have a dummy run otherwise, the first run will have significantly longer time for smaller datasets if (!firstRun) { FastWWS tempClassifier = new FastWWS(datasetName); tempClassifier.setResDir(resDir); tempClassifier.setType(method); tempClassifier.buildClassifier(train); firstRun = true; } for (int i = 0; i < nbRuns; i++) { train = Sampling.random(train); method = "FastWWSearch"; System.out.println("Run " + i + ", Launching " + method); FastWWS fastwwsClassifier = new FastWWS(datasetName); fastwwsClassifier.setResDir(resDir); fastwwsClassifier.setType(method); long start = System.nanoTime(); fastwwsClassifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (double) ((stop - start)/1e9); System.out.println(searchTime + " s"); searchTimes[0][i] = searchTime; avgFastWWSTime += searchTime; bestWarpingWindow = fastwwsClassifier.getBestWin(); bestScore = fastwwsClassifier.getBestScore(); Evaluation eval = new Evaluation(train); eval.evaluateModel(fastwwsClassifier, test); System.out.println(eval.errorRate()); saveSearchTime(searchTime, eval.errorRate()); method = "FastWWSearch-PrunedDTW"; System.out.println("Run " + i + ", Launching FastWWS with SDM16"); FastWWSPrunedDTW fastwwsSDM16Classifier = new FastWWSPrunedDTW(datasetName); fastwwsSDM16Classifier.setResDir(resDir); fastwwsSDM16Classifier.setType(method); start = System.nanoTime(); fastwwsSDM16Classifier.buildClassifier(train); stop = System.nanoTime(); searchTime = (double) ((stop - start)/1e9); System.out.println(searchTime + " s"); searchTimes[1][i] = searchTime; avgFastWWSSDM16Time += searchTime; bestWarpingWindow = fastwwsSDM16Classifier.getBestWin(); bestScore = fastwwsSDM16Classifier.getBestScore(); eval = new Evaluation(train); eval.evaluateModel(fastwwsSDM16Classifier, test); System.out.println(eval.errorRate()); saveSearchTime(searchTime, eval.errorRate()); speedUp += searchTimes[0][i]/searchTimes[1][i]; } System.out.println("Average FastWWS Time: " + avgFastWWSTime/nbRuns + "s"); System.out.println("Average FastWWS with SDM16 Time: " + avgFastWWSSDM16Time/nbRuns + "s"); System.out.println("Average Speedup: " + speedUp/nbRuns); } private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
8,214
37.75
125
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_LbKeogh.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.WindowSearcher; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using the naive DTW method with LB Keogh * * Original paper: * Keogh, E., & Ratanamahatana, C. A. (2005). * Exact indexing of dynamic time warping. * Knowledge and information systems, 7(3), 358-386. * * @author Chang Wei Tan * */ public class UCR_LbKeogh { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "LBKeogh"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); WindowSearcher classifier = new WindowSearcher(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,510
34.937799
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_LbKeoghPrunedDTW.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.LbKeoghPrunedDTW; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using SDM16 method * * SDM16 paper: * Silva, D. F., & Batista, G. E. (2016, June). * Speeding up all-pairwise dynamic time warping matrix calculation. * In Proceedings of the 2016 SIAM International Conference on Data Mining (pp. 837-845). * Society for Industrial and Applied Mathematics. * * @author Chang Wei Tan * */ public class UCR_LbKeoghPrunedDTW { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "LBKeogh-PrunedDTW"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); LbKeoghPrunedDTW classifier = new LbKeoghPrunedDTW(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,842
34.65
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_NaiveDTW.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.NaiveDTW; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using Naive DTW method * * @author Chang Wei Tan * */ public class UCR_NaiveDTW { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "NaiveDTW"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); NaiveDTW classifier = new NaiveDTW(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,240
34.669951
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_Trillion.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.Trillion; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using KDD12 (UCR Suite) method * * KDD12 paper: * Rakthanmanon, T., Campana, B., Mueen, A., Batista, G., Westover, B., Zhu, Q., ... & Keogh, E. (2012, August). * Searching and mining trillions of time series subsequences under dynamic time warping. * In Proceedings of the 18th ACM SIGKDD international conference * on Knowledge discovery and data mining (pp. 262-270). ACM. Chicago * * @author Chang Wei Tan * */ public class UCR_Trillion { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "Trillion"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); Trillion classifier = new Trillion(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,610
35.416268
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_UCRSuite.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.UCRSuite; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using KDD12 (UCR Suite) method * but using Lazy UCR Suite instead of the original algorithm * * KDD12 paper: * Rakthanmanon, T., Campana, B., Mueen, A., Batista, G., Westover, B., Zhu, Q., ... & Keogh, E. (2012, August). * Searching and mining trillions of time series subsequences under dynamic time warping. * In Proceedings of the 18th ACM SIGKDD international conference * on Knowledge discovery and data mining (pp. 262-270). ACM. Chicago * * @author Chang Wei Tan * */ public class UCR_UCRSuite { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "UCRSuite"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); UCRSuite classifier = new UCRSuite(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
7,707
35.530806
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/experiments/UCR_UCRSuitePrunedDTW.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.experiments; import java.io.File; import java.io.FileFilter; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items.ExperimentsLauncher; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.Sampling; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCRArchive; import weka.classifiers.Evaluation; import weka.core.Instances; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.windowSearcher.UCRSuitePrunedDTW; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Experiment to search for the best warping window using KDD12 + SDM16 method * * KDD12 paper: * Rakthanmanon, T., Campana, B., Mueen, A., Batista, G., Westover, B., Zhu, Q., ... & Keogh, E. (2012, August). * Searching and mining trillions of time series subsequences under dynamic time warping. * In Proceedings of the 18th ACM SIGKDD international conference * on Knowledge discovery and data mining (pp. 262-270). ACM. Chicago * * SDM16 paper: * Silva, D. F., & Batista, G. E. (2016, June). * Speeding up all-pairwise dynamic time warping matrix calculation. * In Proceedings of the 2016 SIAM International Conference on Data Mining (pp. 837-845). * Society for Industrial and Applied Mathematics. * * @author Chang Wei Tan * */ public class UCR_UCRSuitePrunedDTW { private static String osName, datasetName, username, projectPath, datasetPath, resDir, sampleType, method; private static int bestWarpingWindow; private static double bestScore; private static int nbRuns = 1; public static void main(String[] args) throws Exception { // Initialise sampleType = "Single"; // Doing just 1 dataset, can be Sorted, Small, New or All datasetName = "ArrowHead"; // Name of dataset to be tested method = "UCRSuite-PrunedDTW"; // Method type in finding the best window // Get project and dataset path osName = System.getProperty("os.name"); username = System.getProperty("user.name"); if (osName.contains("Window")) { projectPath = "C:/Users/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "C:/Users/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "C:/Users/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } else { projectPath = "/home/" + username + "/workspace/SDM18/"; if (sampleType.equals("New")) datasetPath = "/home/" + username + "/workspace/Dataset/TSC_Problems/"; else datasetPath = "/home/" + username + "/workspace/Dataset/UCR_Time_Series_Archive/"; } // Get arguments if (args.length >= 1) projectPath = args[0]; if (args.length >= 2) datasetPath = args[1]; if (args.length >= 3) sampleType = args[2]; if (sampleType.equals("Single") && args.length >= 4) { datasetName = args[3]; if (args.length >= 5) nbRuns = Integer.parseInt(args[4]); } else if (args.length >= 4) { nbRuns = Integer.parseInt(args[3]); } if (sampleType.equals("Single")) System.out.println("Find best warping window with " + method + " on " + datasetName + " dataset -- " + nbRuns + " runs"); else System.out.println("Find best warping window with " + method + " on " + sampleType + " dataset -- " + nbRuns + " runs"); // Run the experiment depending on the given type switch(sampleType) { case "Sorted": for (int j = 0; j < UCRArchive.sortedDataset.length; j++) { datasetName = UCRArchive.sortedDataset[j]; singleProblem(datasetName); } break; case "Small": for (int j = 0; j < UCRArchive.smallDataset.length; j++) { datasetName = UCRArchive.smallDataset[j]; singleProblem(datasetName); } break; case "New": for (int j = 0; j < UCRArchive.newTSCProblems.length; j++) { datasetName = UCRArchive.newTSCProblems[j]; singleProblem(datasetName); } break; case "All": File rep = new File(datasetPath); File[] listData = rep.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory(); } }); Arrays.sort(listData); for (File dataRep : listData) { datasetName = dataRep.getName(); singleProblem(datasetName); } break; case "Single": singleProblem(datasetName); break; } }// End main /** * Running the experiment for a single dataset * @param datasetName * @throws Exception */ private static void singleProblem (String datasetName) throws Exception { // Setting output directory resDir = projectPath + "outputs/Benchmark/" + datasetName + "/"; // Check if it exist, else create the directory File dir = new File(resDir); if (!dir.exists()) dir.mkdirs(); // Reading the dataset System.out.println("Processing: " + datasetName); Instances[] data = ExperimentsLauncher.readTrainAndTest(datasetPath, datasetName); Instances train = data[0]; Instances test = data[1]; // Go through different runs and randomize the dataset for (int i = 0; i < nbRuns; i++) { // Sampling the dataset train = Sampling.random(train); // Initialising the classifier System.out.println("Run " + i + ", Launching " + method); UCRSuitePrunedDTW classifier = new UCRSuitePrunedDTW(datasetName); classifier.setResDir(resDir); classifier.setType(method); // Training the classifier for best window long start = System.nanoTime(); classifier.buildClassifier(train); long stop = System.nanoTime(); double searchTime = (stop - start)/1e9; System.out.println(searchTime + " s"); bestWarpingWindow = classifier.getBestWin(); bestScore = classifier.getBestScore(); // Evaluate the trained classfier with test set Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.errorRate()); // Save result saveSearchTime(searchTime, eval.errorRate()); } } /** * Save results (search time) to csv * @param searchTime * @param error */ private static void saveSearchTime(double searchTime, double error) { String fileName = resDir + datasetName + "_result_" + method + ".csv"; FileWriter out; boolean append = false; File file = new File(fileName); if (file.exists()) append = true; try { out = new FileWriter(fileName, append); if (!append) out.append("SearchTime(s),BestWin,BestScore,TestError\n"); out.append(searchTime + "," + bestWarpingWindow + "," + bestScore + "," + error + "\n"); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } } }
8,010
36.087963
124
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/items/DTWResult.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * DTW results that stores the distance and window validity * * @author Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * */ public class DTWResult { /** * The DTW distance */ public double distance; /** * The smallest window that would give the distance */ public int r; }
1,436
36.815789
111
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/items/ExperimentsLauncher.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items; import java.io.File; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.tools.UCR2CSV; import weka.core.Instances; import weka.core.converters.CSVLoader; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * @author Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * */ public class ExperimentsLauncher { // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Fields // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- public static String username = System.getProperty("user.name"); long startTime; long endTime; long duration; // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Constructor // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- public ExperimentsLauncher() { } // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Method // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- /** * Read train and test set from the given path * @param path * @param name * @return */ public static Instances[] readTrainAndTest(String path, String name) { File trainFile = new File(path + name + "/" + name + "_TRAIN"); if (!new File(trainFile.getAbsolutePath() + ".csv").exists()) { UCR2CSV.run(trainFile, new File(trainFile.getAbsolutePath() + ".csv")); } trainFile = new File(trainFile.getAbsolutePath() + ".csv"); File testFile = new File(path + name + "/" + name + "_TEST"); if (!new File(testFile.getAbsolutePath() + ".csv").exists()) { UCR2CSV.run(testFile, new File(testFile.getAbsolutePath() + ".csv")); } testFile = new File(testFile.getAbsolutePath() + ".csv"); CSVLoader loader = new CSVLoader(); Instances trainDataset = null; Instances testDataset = null; try { loader.setFile(trainFile); loader.setNominalAttributes("first"); trainDataset = loader.getDataSet(); trainDataset.setClassIndex(0); loader.setFile(testFile); loader.setNominalAttributes("first"); testDataset = loader.getDataSet(); testDataset.setClassIndex(0); } catch (Exception e) { e.printStackTrace(); } return new Instances[] { trainDataset, testDataset }; } /** * Read both train and test set from a given path and merge them both * @param path * @param name * @return */ public static Instances readAllInOne(String path, String name) { File trainFile = new File(path + name + "/" + name + "_TRAIN"); if (!new File(trainFile.getAbsolutePath() + ".csv").exists()) { UCR2CSV.run(trainFile, new File(trainFile.getAbsolutePath() + ".csv")); } trainFile = new File(trainFile.getAbsolutePath() + ".csv"); File testFile = new File(path + name + "/" + name + "_TEST"); if (!new File(testFile.getAbsolutePath() + ".csv").exists()) { UCR2CSV.run(testFile, new File(testFile.getAbsolutePath() + ".csv")); } testFile = new File(testFile.getAbsolutePath() + ".csv"); CSVLoader loader = new CSVLoader(); Instances trainDataset = null; Instances testDataset = null; try { loader.setFile(trainFile); loader.setNominalAttributes("first"); trainDataset = loader.getDataSet(); trainDataset.setClassIndex(0); loader.setFile(testFile); loader.setNominalAttributes("first"); testDataset = loader.getDataSet(); testDataset.setClassIndex(0); for (int i = 0; i < testDataset.numInstances(); i++) { trainDataset.add(testDataset.instance(i)); } } catch (Exception e) { e.printStackTrace(); } return trainDataset; } }
4,886
35.744361
118
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/items/Itemset.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * @author Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * */ public abstract class Itemset { @Override public abstract Itemset clone(); public abstract double distance(Itemset o); public abstract Itemset mean(Itemset[]tab); @Override public abstract String toString(); public double squaredDistance(Itemset o){ double dist = this.distance(o); return dist*dist; } }
1,528
39.236842
111
java
tsml-java
tsml-java-master/src/main/java/tsml/classifiers/legacy/elastic_ensemble/fast_window_search/items/LazyAssessNN.java
/******************************************************************************* * Copyright (C) 2017 Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * * This file is part of FastWWSearch. * * FastWWSearch is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FastWWSearch is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FastWWSearch. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package tsml.classifiers.legacy.elastic_ensemble.fast_window_search.items; import tsml.classifiers.legacy.elastic_ensemble.fast_window_search.sequences.SymbolicSequence; /** * Code for the paper "Efficient search of the best warping window for Dynamic Time Warping" published in SDM18 * * Class for LazyAssessNN distance introduced in our SDM18 paper. * It implements a "Lazy" UCR Suites so we do not need to compute DTW/LB distance for every warping window * We use this in FastWWSearch to fill in our desired table quickly * This distance does not use early abandoning for both LB Keogh and DTW * as it was found that early abandoning has longer runtime * * @author Chang Wei Tan, Francois Petitjean, Matthieu Herrmann, Germain Forestier, Geoff Webb * */ public class LazyAssessNN implements Comparable<LazyAssessNN> { // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Internal types // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- public enum RefineReturnType { Pruned_with_LB, Pruned_with_DTW, New_best } public enum LBStatus { LB_Kim, Partial_LB_KeoghQR, Full_LB_KeoghQR, Partial_LB_KeoghRQ, Full_LB_KeoghRQ, Previous_Window_LB, Previous_Window_DTW, Full_DTW } // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Fields // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- protected final static int RIEN = -1; protected final static int DIAGONALE = 0; protected final static int GAUCHE = 1; protected final static int HAUT = 2; SequenceStatsCache cache; // Cache to store the information for the sequences SymbolicSequence query, reference; // Query and reference sequences public int indexQuery, indexReference; // Index for query and reference int indexStoppedLB, oldIndexStoppedLB; // Index where we stop LB int currentW; // Current warping window int minWindowValidityFullDTW; // Minimum window validity for DTW int nOperationsLBKim; // Number of operations for LB Kim double minDist,LBKeogh1,LBKeogh2,bestMinDist,EuclideanDist; // Distances LBStatus status; // Status of Lower Bound public static double[] ubPartials; // Partial Upper Bound for PrunedDTW // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Constructor // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- public LazyAssessNN(SymbolicSequence query, int index, SymbolicSequence reference, int indexReference, SequenceStatsCache cache) { if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; this.cache = cache; tryLBKim(); this.bestMinDist= minDist; this.status = LBStatus.LB_Kim; } public LazyAssessNN(SequenceStatsCache cache){ this.cache = cache; } // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- // Method // --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- /** * Initialise the distance between query and reference * Reset all parameters * Compute LB Kim * @param query * @param index * @param reference * @param indexReference */ public void set (SymbolicSequence query, int index, SymbolicSequence reference, int indexReference) { // --- OTHER RESET indexStoppedLB = oldIndexStoppedLB = 0; currentW = 0; minWindowValidityFullDTW = 0; nOperationsLBKim = 0; LBKeogh1 = LBKeogh2 = 0; // --- From constructor if (index < indexReference) { this.query = query; this.indexQuery = index; this.reference = reference; this.indexReference = indexReference; } else { this.query = reference; this.indexQuery = indexReference; this.reference = query; this.indexReference = index; } this.minDist = 0.0; tryLBKim(); this.bestMinDist = minDist; this.status = LBStatus.LB_Kim; } /** * Initialise Upper Bound array for PrunedDTW */ public void initUBPartial() { ubPartials = new double[query.getNbTuples()+1]; } /** * Set the best minimum distance * @param bestMinDist */ public void setBestMinDist(double bestMinDist) { this.bestMinDist = bestMinDist; } /** * Set current warping window * @param currentW */ public void setCurrentW(int currentW) { if (this.currentW != currentW) { this.currentW = currentW; if (status == LBStatus.Full_DTW){ if(this.currentW >= minWindowValidityFullDTW) { this.status = LBStatus.Full_DTW; }else{ this.status = LBStatus.Previous_Window_DTW; } } else { this.status = LBStatus.Previous_Window_LB; this.oldIndexStoppedLB = indexStoppedLB; } } } /** * Compute Euclidean Distance as Upper Bound for PrunedDTW * @param scoreToBeat * @return */ public RefineReturnType tryEuclidean(double scoreToBeat) { if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } if(EuclideanDist >= scoreToBeat) { return RefineReturnType.Pruned_with_DTW; } ubPartials[query.getNbTuples()] = 0; for (int i = query.getNbTuples()-1; i >= 0; i--) { ubPartials[i] = ubPartials[i+1] + query.getItem(i).squaredDistance(reference.getItem(i)); } EuclideanDist = ubPartials[0]; return RefineReturnType.New_best; } /** * Run LB Kim using data from cache */ protected void tryLBKim() { double diffFirsts = query.sequence[0].squaredDistance(reference.sequence[0]); double diffLasts = query.sequence[query.getNbTuples() - 1].squaredDistance(reference.sequence[reference.getNbTuples() - 1]); minDist = diffFirsts + diffLasts; nOperationsLBKim = 2; if(!cache.isMinFirst(indexQuery)&&!cache.isMinFirst(indexReference) && !cache.isMinLast(indexQuery) && !cache.isMinLast(indexReference)){ double diffMin = cache.getMin(indexQuery)-cache.getMin(indexReference); minDist += diffMin*diffMin; nOperationsLBKim++; } if(!cache.isMaxFirst(indexQuery)&&!cache.isMaxFirst(indexReference)&& !cache.isMaxLast(indexQuery) && !cache.isMaxLast(indexReference)){ double diffMax = cache.getMax(indexQuery)-cache.getMax(indexReference); minDist += diffMax*diffMax; nOperationsLBKim++; } status = LBStatus.LB_Kim; } /** * Run Full LB Keogh(Q,R) using data from cache */ protected void tryFullLBKeoghQR() { int length = query.sequence.length; double[] LEQ = cache.getLE(indexQuery, currentW); double[] UEQ = cache.getUE(indexQuery, currentW); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { int index = cache.getIndexNthHighestVal(indexReference, indexStoppedLB); double c = ((MonoDoubleItemSet) reference.sequence[index]).value; if (c < LEQ[index]) { double diff = LEQ[index] - c; minDist += diff * diff; } else if (UEQ[index] < c) { double diff = UEQ[index] - c; minDist += diff * diff; } indexStoppedLB++; } } /** * Run Full LB Keogh(R,Q) using data from cache */ protected void tryFullLBKeoghRQ() { int length = reference.sequence.length; double[] LER = cache.getLE(indexReference, currentW); double[] UER = cache.getUE(indexReference, currentW); this.minDist = 0.0; this.indexStoppedLB = 0; while (indexStoppedLB < length) { int index = cache.getIndexNthHighestVal(indexQuery, indexStoppedLB); double c = ((MonoDoubleItemSet) query.sequence[index]).value; if (c < LER[index]) { double diff = LER[index] - c; minDist += diff * diff; } else if (UER[index] < c) { double diff = UER[index] - c; minDist += diff * diff; } indexStoppedLB++; } } /** * The main function for LazyUCR. * Start with LBKim,LBKeogh(Q,R),LBKeogh(R,Q),DTW * @param scoreToBeat * @param w * @return */ public RefineReturnType tryToBeat(double scoreToBeat, int w) { setCurrentW(w); switch (status) { case Previous_Window_LB: case Previous_Window_DTW: case LB_Kim: if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } // if LB_Kim_FL done, then start LB_Keogh(Q,R) indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghQR: // if had started LB_Keogh, then just starting from // previous index if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } tryFullLBKeoghQR(); if(minDist>bestMinDist){ bestMinDist = minDist; } if (bestMinDist >= scoreToBeat) { // Stopped in the middle so must be pruning if (indexStoppedLB < query.getNbTuples()) { status = LBStatus.Partial_LB_KeoghQR; } else { LBKeogh1 = minDist; status = LBStatus.Full_LB_KeoghQR; } return RefineReturnType.Pruned_with_LB; }else{ status = LBStatus.Full_LB_KeoghQR; } case Full_LB_KeoghQR: // if LB_Keogh(Q,R) has been done, then we do the second one indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghRQ: // if had started LB_Keogh, then just starting from // previous index if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } tryFullLBKeoghRQ(); if(minDist>bestMinDist){ bestMinDist = minDist; } if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < reference.getNbTuples()) { status = LBStatus.Partial_LB_KeoghRQ; } else { LBKeogh2 = minDist; status = LBStatus.Full_LB_KeoghRQ; } return RefineReturnType.Pruned_with_LB; }else{ status = LBStatus.Full_LB_KeoghRQ; } case Full_LB_KeoghRQ: // if had finished LB_Keogh(R,Q), then DTW if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } DTWResult res = query.DTWExtResults(reference, currentW); minDist = res.distance * res.distance; if(minDist>bestMinDist){ bestMinDist = minDist; } status = LBStatus.Full_DTW; minWindowValidityFullDTW = res.r; case Full_DTW: if (bestMinDist >= scoreToBeat) { return RefineReturnType.Pruned_with_DTW; } else { return RefineReturnType.New_best; } default: throw new RuntimeException("Case not managed"); } } /** * The main function for LazyUCR with PrunedDTW. * Start with LBKim,LBKeogh(Q,R),LBKeogh(R,Q),PrunedDTW * @param scoreToBeat * @param w * @return */ public RefineReturnType tryToBeatPrunedDTW(double scoreToBeat, int w) { setCurrentW(w); switch (status) { case Previous_Window_LB: case Previous_Window_DTW: case LB_Kim: if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } // if LB_Kim_FL done, then start LB_Keogh(Q,R) indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghQR: // If had started LB_Keogh, then just starting from // previous index if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } tryFullLBKeoghQR(); if(minDist>bestMinDist){ bestMinDist = minDist; } if (bestMinDist >= scoreToBeat) { // Stopped in the middle so must be pruning if (indexStoppedLB < query.getNbTuples()) { status = LBStatus.Partial_LB_KeoghQR; } else { LBKeogh1 = minDist; status = LBStatus.Full_LB_KeoghQR; } return RefineReturnType.Pruned_with_LB; }else{ status = LBStatus.Full_LB_KeoghQR; } case Full_LB_KeoghQR: // if LB_Keogh(Q,R) has been done, then we do the second one indexStoppedLB = 0; minDist = 0; case Partial_LB_KeoghRQ: // if had started LB_Keogh, then just starting from // previous index if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } tryFullLBKeoghRQ(); if(minDist>bestMinDist){ bestMinDist = minDist; } if (bestMinDist >= scoreToBeat) { if (indexStoppedLB < reference.getNbTuples()) { status = LBStatus.Partial_LB_KeoghRQ; } else { LBKeogh2 = minDist; status = LBStatus.Full_LB_KeoghRQ; } return RefineReturnType.Pruned_with_LB; }else{ status = LBStatus.Full_LB_KeoghRQ; } case Full_LB_KeoghRQ: // if had finished LB_Keogh(R,Q), then PrunedDTW if(bestMinDist>=scoreToBeat){ return RefineReturnType.Pruned_with_LB; } DTWResult res = query.PrunedDTWExtResults(reference, currentW); minDist = res.distance * res.distance; if(minDist>bestMinDist){ bestMinDist = minDist; } status = LBStatus.Full_DTW; minWindowValidityFullDTW = res.r; case Full_DTW: if (bestMinDist >= scoreToBeat) { return RefineReturnType.Pruned_with_DTW; } else { return RefineReturnType.New_best; } default: throw new RuntimeException("Case not managed"); } } @Override public String toString() { return "" + indexQuery+ " - "+indexReference+" - "+bestMinDist; } public int getOtherIndex(int index) { if (index == indexQuery) { return indexReference; } else { return indexQuery; } } public SymbolicSequence getSequenceForOtherIndex(int index) { if (index == indexQuery) { return reference; } else { return query; } } public double getDistance(int window) { if (status == LBStatus.Full_DTW && minWindowValidityFullDTW <= window) { return minDist; } throw new RuntimeException("Shouldn't call getDistance if not sure there is a valid already-computed DTW distance"); } public int getMinWindowValidityForFullDistance() { if (status == LBStatus.Full_DTW) { return minWindowValidityFullDTW; } throw new RuntimeException("Shouldn't call getDistance if not sure there is a valid already-computed DTW distance"); } public double[] getUBPartial() { return ubPartials; } public double getEuclideanDistance() { return EuclideanDist; } @Override public int compareTo(LazyAssessNN o) { int res = this.compare(o); return res; } protected int compare(LazyAssessNN o) { double num1 = this.getDoubleValueForRanking(); double num2 = o.getDoubleValueForRanking(); return Double.compare(num1, num2); } protected double getDoubleValueForRanking() { double thisD = this.bestMinDist; switch(status){ case Full_DTW: case Full_LB_KeoghQR: case Full_LB_KeoghRQ: return thisD/query.getNbTuples(); case LB_Kim: return thisD/nOperationsLBKim; case Partial_LB_KeoghQR: case Partial_LB_KeoghRQ: return thisD/indexStoppedLB; case Previous_Window_DTW: return 0.8*thisD/query.getNbTuples(); // DTW(w+1) should be tighter case Previous_Window_LB: if(indexStoppedLB==0){ //lb kim return thisD/nOperationsLBKim; }else{ //lbkeogh return thisD/oldIndexStoppedLB; } default: throw new RuntimeException("shouldn't come here"); } } @Override public boolean equals(Object o) { LazyAssessNN d = (LazyAssessNN) o; return (this.indexQuery == d.indexQuery && this.indexReference == d.indexReference); } public LBStatus getStatus() { return status; } public void setFullDistStatus(){ this.status = LBStatus.Full_DTW; } public double getBestLB(){ return bestMinDist; } }
16,334
29.25
139
java