code
stringlengths
73
34.1k
label
stringclasses
1 value
private DBIDs getNearestNeighbors(Relation<V> relation, SimilarityQuery<V> simQ, DBIDRef queryObject) { Heap<DoubleDBIDPair> nearestNeighbors = new TiedTopBoundedHeap<>(knn); for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { if(DBIDUtil.equal(iter, queryObject)) { continue; } double sim = simQ.similarity(queryObject, iter); if(sim > 0.) { nearestNeighbors.add(DBIDUtil.newPair(sim, iter)); } } // Collect DBIDs ArrayModifiableDBIDs dbids = DBIDUtil.newArray(nearestNeighbors.size()); while(nearestNeighbors.size() > 0) { dbids.add(nearestNeighbors.poll()); } return dbids; }
java
private static double[] computePerDimensionVariances(Relation<? extends NumberVector> relation, double[] center, DBIDs neighborhood) { final int dim = center.length; double[] variances = new double[dim]; for(DBIDIter iter = neighborhood.iter(); iter.valid(); iter.advance()) { NumberVector databaseObject = relation.get(iter); for(int d = 0; d < dim; d++) { final double deviation = databaseObject.doubleValue(d) - center[d]; variances[d] += deviation * deviation; } } return VMath.timesEquals(variances, 1. / neighborhood.size()); }
java
private double subspaceOutlierDegree(V queryObject, double[] center, long[] weightVector) { final int card = BitsUtil.cardinality(weightVector); if(card == 0) { return 0; } final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction(weightVector); return df.distance(queryObject, DoubleVector.wrap(center)) / card; }
java
public static long parseLongBase10(final CharSequence str, final int start, final int end) { // Current position and character. int pos = start; char cur = str.charAt(pos); // Match sign boolean isNegative = (cur == '-'); // Carefully consume the - character, update c and i: if((isNegative || (cur == '+')) && (++pos < end)) { cur = str.charAt(pos); } // Begin parsing real numbers! if((cur < '0') || (cur > '9')) { throw NOT_A_NUMBER; } // Parse digits into a long, remember offset of decimal point. long decimal = 0; while(true) { final int digit = cur - '0'; if((digit >= 0) && (digit <= 9)) { final long tmp = (decimal << 3) + (decimal << 1) + digit; if(tmp < decimal) { throw PRECISION_OVERFLOW; } decimal = tmp; } else { // No more digits, or a second dot. break; } if(++pos < end) { cur = str.charAt(pos); } else { break; } } if(pos != end) { throw TRAILING_CHARACTERS; } return isNegative ? -decimal : decimal; }
java
private static boolean matchInf(byte[] str, byte firstchar, int start, int end) { final int len = end - start; // The wonders of unicode. The infinity symbol \u221E is three bytes: if(len == 3 && firstchar == -0x1E && str[start + 1] == -0x78 && str[start + 2] == -0x62) { return true; } if((len != 3 && len != INFINITY_LENGTH) // || (firstchar != 'I' && firstchar != 'i')) { return false; } for(int i = 1, j = INFINITY_LENGTH + 1; i < INFINITY_LENGTH; i++, j++) { final byte c = str[start + i]; if(c != INFINITY_PATTERN[i] && c != INFINITY_PATTERN[j]) { return false; } if(i == 2 && len == 3) { return true; } } return true; }
java
private static boolean matchNaN(byte[] str, byte firstchar, int start, int end) { final int len = end - start; if(len < 2 || len > 3 || (firstchar != 'N' && firstchar != 'n')) { return false; } final byte c1 = str[start + 1]; if(c1 != 'a' && c1 != 'A') { return false; } // Accept just "NA", too: if(len == 2) { return true; } final byte c2 = str[start + 2]; return c2 == 'N' || c2 == 'n'; }
java
public static void setLookAndFeel() { try { if(PREFER_GTK) { LookAndFeelInfo[] lfs = UIManager.getInstalledLookAndFeels(); for(LookAndFeelInfo lf : lfs) { if(lf.getClassName().contains("GTK")) { UIManager.setLookAndFeel(lf.getClassName()); return; } } } // Fallback: UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch(Exception e) { // ignore } }
java
public static void logUncaughtExceptions(Logging logger) { try { Thread.setDefaultUncaughtExceptionHandler((t, e) -> logger.exception(e)); } catch(SecurityException e) { logger.warning("Could not set the Default Uncaught Exception Handler", e); } }
java
protected List<OneItemset> buildFrequentOneItemsets(final Relation<? extends SparseFeatureVector<?>> relation, final int dim, final int needed) { // TODO: use TIntList and prefill appropriately to avoid knowing "dim" // beforehand? int[] counts = new int[dim]; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { SparseFeatureVector<?> bv = relation.get(iditer); for(int it = bv.iter(); bv.iterValid(it); it = bv.iterAdvance(it)) { counts[bv.iterDim(it)]++; } } if(LOG.isStatistics()) { LOG.statistics(new LongStatistic(STAT + "1-items.candidates", dim)); } // Generate initial candidates of length 1. List<OneItemset> frequent = new ArrayList<>(dim); for(int i = 0; i < dim; i++) { if(counts[i] >= needed) { frequent.add(new OneItemset(i, counts[i])); } } return frequent; }
java
protected List<SparseItemset> buildFrequentTwoItemsets(List<OneItemset> oneitems, final Relation<BitVector> relation, final int dim, final int needed, DBIDs ids, ArrayModifiableDBIDs survivors) { int f1 = 0; long[] mask = BitsUtil.zero(dim); for(OneItemset supported : oneitems) { BitsUtil.setI(mask, supported.item); f1++; } if(LOG.isStatistics()) { LOG.statistics(new LongStatistic(STAT + "2-items.candidates", f1 * (long) (f1 - 1))); } // We quite aggressively size the map, assuming that almost each combination // is present somewhere. If this won't fit into memory, we're likely running // OOM somewhere later anyway! Long2IntOpenHashMap map = new Long2IntOpenHashMap((f1 * (f1 - 1)) >>> 1); final long[] scratch = BitsUtil.zero(dim); for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) { BitsUtil.setI(scratch, mask); relation.get(iditer).andOnto(scratch); int lives = 0; for(int i = BitsUtil.nextSetBit(scratch, 0); i >= 0; i = BitsUtil.nextSetBit(scratch, i + 1)) { for(int j = BitsUtil.nextSetBit(scratch, i + 1); j >= 0; j = BitsUtil.nextSetBit(scratch, j + 1)) { long key = (((long) i) << 32) | j; map.put(key, 1 + map.get(key)); ++lives; } } if(lives > 2) { survivors.add(iditer); } } // Generate candidates of length 2. List<SparseItemset> frequent = new ArrayList<>(f1 * (int) FastMath.sqrt(f1)); for(ObjectIterator<Long2IntMap.Entry> iter = map.long2IntEntrySet().fastIterator(); iter.hasNext();) { Long2IntMap.Entry entry = iter.next(); if(entry.getIntValue() >= needed) { int ii = (int) (entry.getLongKey() >>> 32); int ij = (int) (entry.getLongKey() & -1L); frequent.add(new SparseItemset(new int[] { ii, ij }, entry.getIntValue())); } } // The hashmap may produce them out of order. Collections.sort(frequent); if(LOG.isStatistics()) { LOG.statistics(new LongStatistic(STAT + "2-items.frequent", frequent.size())); } return frequent; }
java
protected List<? extends Itemset> frequentItemsets(List<? extends Itemset> candidates, Relation<BitVector> relation, int needed, DBIDs ids, ArrayModifiableDBIDs survivors, int length) { if(candidates.isEmpty()) { return Collections.emptyList(); } Itemset first = candidates.get(0); // We have an optimized codepath for large and sparse itemsets. // It probably pays off when #cands >> (avlen choose length) but we do not // currently have the average number of items. These thresholds yield // 2700, 6400, 12500, ... and thus will almost always be met until the // number of frequent itemsets is about to break down to 0. if(candidates.size() > length * length * length * 100 && first instanceof SparseItemset) { // Assume that all itemsets are sparse itemsets! @SuppressWarnings("unchecked") List<SparseItemset> sparsecand = (List<SparseItemset>) candidates; return frequentItemsetsSparse(sparsecand, relation, needed, ids, survivors, length); } for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) { BitVector bv = relation.get(iditer); // TODO: exploit that the candidate set it sorted? int lives = 0; for(Itemset candidate : candidates) { if(candidate.containedIn(bv)) { candidate.increaseSupport(); ++lives; } } if(lives > length) { survivors.add(iditer); } } // Retain only those with minimum support: List<Itemset> frequent = new ArrayList<>(candidates.size()); for(Iterator<? extends Itemset> iter = candidates.iterator(); iter.hasNext();) { final Itemset candidate = iter.next(); if(candidate.getSupport() >= needed) { frequent.add(candidate); } } return frequent; }
java
protected List<SparseItemset> frequentItemsetsSparse(List<SparseItemset> candidates, Relation<BitVector> relation, int needed, DBIDs ids, ArrayModifiableDBIDs survivors, int length) { // Current search interval: int begin = 0, end = candidates.size(); int[] scratchi = new int[length], iters = new int[length]; SparseItemset scratch = new SparseItemset(scratchi); for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) { BitVector bv = relation.get(iditer); if(!initializeSearchItemset(bv, scratchi, iters)) { continue; } int lives = 0; while(begin < end) { begin = binarySearch(candidates, scratch, begin, end); if(begin > 0) { candidates.get(begin).increaseSupport(); ++lives; } else { begin = (-begin) - 1; } if(begin >= end || !nextSearchItemset(bv, scratchi, iters)) { break; } } for(Itemset candidate : candidates) { if(candidate.containedIn(bv)) { candidate.increaseSupport(); ++lives; } } if(lives > length) { survivors.add(iditer); } } // Retain only those with minimum support: List<SparseItemset> frequent = new ArrayList<>(candidates.size()); for(Iterator<SparseItemset> iter = candidates.iterator(); iter.hasNext();) { final SparseItemset candidate = iter.next(); if(candidate.getSupport() >= needed) { frequent.add(candidate); } } return frequent; }
java
private boolean initializeSearchItemset(BitVector bv, int[] scratchi, int[] iters) { for(int i = 0; i < scratchi.length; i++) { iters[i] = (i == 0) ? bv.iter() : bv.iterAdvance(iters[i - 1]); if(iters[i] < 0) { return false; } scratchi[i] = bv.iterDim(iters[i]); } return true; }
java
private boolean nextSearchItemset(BitVector bv, int[] scratchi, int[] iters) { final int last = scratchi.length - 1; for(int j = last; j >= 0; j--) { int n = bv.iterAdvance(iters[j]); if(n >= 0 && (j == last || n != iters[j + 1])) { iters[j] = n; scratchi[j] = bv.iterDim(n); return true; // Success } } return false; }
java
private int binarySearch(List<SparseItemset> candidates, SparseItemset scratch, int begin, int end) { --end; while(begin < end) { final int mid = (begin + end) >>> 1; SparseItemset midVal = candidates.get(mid); int cmp = midVal.compareTo(scratch); if(cmp < 0) { begin = mid + 1; } else if(cmp > 0) { end = mid - 1; } else { return mid; // key found } } return -(begin + 1); // key not found, return next }
java
private <A> ArrayList<int[]> buildPartitions(NumberArrayAdapter<?, A> adapter1, A data1, int len, int depth) { final int[] idx = new int[len]; final double[] tmp = new double[len]; for(int i = 0; i < len; ++i) { idx[i] = i; tmp[i] = adapter1.getDouble(data1, i); } // Sort indexes: IntegerArrayQuickSort.sort(idx, (x, y) -> Double.compare(tmp[x], tmp[y])); Arrays.sort(tmp); // Should yield the same ordering ArrayList<int[]> ret = new ArrayList<>(1 << depth); divide(idx, tmp, ret, 0, tmp.length, depth); return ret; }
java
private void divide(int[] idx, double[] data, ArrayList<int[]> ret, int start, int end, int depth) { if(depth == 0) { int[] a = Arrays.copyOfRange(idx, start, end); Arrays.sort(a); ret.add(a); return; } final int count = end - start; if(count == 0) { // Corner case, that should barely happen. But for ties, we currently // Do not yet assure that it doesn't happen! for(int j = 1 << depth; j > 0; --j) { ret.add(new int[0]); } return; } double m = 0.; for(int i = start; i < end; i++) { m += data[i]; } m /= count; int pos = Arrays.binarySearch(data, start, end, m); if(pos >= 0) { // Ties: try to choose the most central element. final int opt = (start + end) >> 1; while(data[pos] == m) { if(pos < opt) { pos++; } else if(pos > opt) { pos--; } else { break; } } } else { pos = (-pos - 1); } divide(idx, data, ret, start, pos, depth - 1); divide(idx, data, ret, pos, end, depth - 1); }
java
private void intersectionMatrix(int[][] res, ArrayList<int[]> partsx, ArrayList<int[]> partsy, int gridsize) { for(int x = 0; x < gridsize; x++) { final int[] px = partsx.get(x); final int[] rowx = res[x]; for(int y = 0; y < gridsize; y++) { int[] py = partsy.get(y); rowx[y] = intersectionSize(px, py); } } }
java
private int intersectionSize(int[] px, int[] py) { int i = 0, j = 0, c = 0; while(i < px.length && j < py.length) { final int vx = px[i], vy = py[j]; if(vx < vy) { ++i; } else if(vx > vy) { ++j; } else { ++c; ++i; ++j; } } return c; }
java
private double getMCEntropy(int[][] mat, ArrayList<int[]> partsx, ArrayList<int[]> partsy, int size, int gridsize, double loggrid) { // Margin entropies: double[] mx = new double[gridsize]; double[] my = new double[gridsize]; for(int i = 0; i < gridsize; i++) { // Note: indexes are a bit tricky here, because we compute both margin // entropies at the same time! final double sumx = (double) partsx.get(i).length; final double sumy = (double) partsy.get(i).length; for(int j = 0; j < gridsize; j++) { double px = mat[i][j] / sumx; double py = mat[j][i] / sumy; if(px > 0.) { mx[i] -= px * FastMath.log(px); } if(py > 0.) { my[i] -= py * FastMath.log(py); } } } // Weighted sums of margin entropies. double sumx = 0., sumy = 0.; for(int i = 0; i < gridsize; i++) { sumx += mx[i] * partsx.get(i).length; sumy += my[i] * partsy.get(i).length; } double max = ((sumx > sumy) ? sumx : sumy); return max / (size * loggrid); }
java
public void add(E e) { // resize when needed if(size + 1 > queue.length) { resize(size + 1); } // final int pos = size; this.size += 1; heapifyUp(size - 1, e); heapModified(); }
java
@SuppressWarnings("unchecked") public E replaceTopElement(E e) { E oldroot = (E) queue[0]; heapifyDown(0, e); heapModified(); return oldroot; }
java
@SuppressWarnings("unchecked") protected E removeAt(int pos) { if(pos < 0 || pos >= size) { return null; } final E ret = (E) queue[pos]; // Replacement object: final Object reinsert = queue[size - 1]; queue[size - 1] = null; size--; heapifyDown(pos, reinsert); heapModified(); return ret; }
java
protected final void resize(int requiredSize) { // Double until 64, then increase by 50% each time. int newCapacity = ((queue.length < 64) ? ((queue.length + 1) << 1) : ((queue.length >> 1) + queue.length)); // overflow? if(newCapacity < 0) { throw new OutOfMemoryError(); } if(requiredSize > newCapacity) { newCapacity = requiredSize; } queue = Arrays.copyOf(queue, newCapacity); }
java
public void clear() { // clean up references in the array for memory management for(int i = 0; i < size; i++) { queue[i] = null; } this.size = 0; heapModified(); }
java
protected String checkHeap() { for(int i = 1; i < size; i++) { final int parent = (i - 1) >>> 1; if(comparator.compare(queue[parent], queue[i]) > 0) { return "@" + parent + ": " + queue[parent] + " < @" + i + ": " + queue[i]; } } return null; }
java
public void run() { // Input step Database db = inputStep.getDatabase(); hier = db.getHierarchy(); // Algorithms - Data Mining Step algorithmStep.runAlgorithms(db); // TODO: this could be nicer hier.add(db, new SettingsResult(settings)); // Evaluation evaluationStep.runEvaluators(hier, db); // Output / Visualization outputStep.runResultHandlers(hier, db); }
java
public void set(SpatialComparable obj) { final int dim = min.length; assert (obj.getDimensionality() == dim); if(obj instanceof ModifiableHyperBoundingBox) { ModifiableHyperBoundingBox ho = (ModifiableHyperBoundingBox) obj; System.arraycopy(ho.getMinRef(), 0, min, 0, dim); System.arraycopy(ho.getMaxRef(), 0, max, 0, dim); return; } for(int i = 0; i < dim; i++) { min[i] = obj.getMin(i); max[i] = obj.getMax(i); } }
java
public boolean extend(SpatialComparable obj) { final int dim = min.length; assert (obj.getDimensionality() == dim); boolean extended = false; for(int i = 0; i < dim; i++) { final double omin = obj.getMin(i); final double omax = obj.getMax(i); if(omin < min[i]) { min[i] = omin; extended = true; } if(omax > max[i]) { max[i] = omax; extended = true; } } return extended; }
java
public static int findClassLabelColumn(MultipleObjectsBundle bundle) { for(int i = 0, l = bundle.metaLength(); i < l; ++i) { if(TypeUtil.CLASSLABEL.isAssignableFromType(bundle.meta(i))) { return i; } } return -1; }
java
@SuppressWarnings({ "unchecked", "rawtypes" }) @Override public int compareTo(ClassLabel o) { HierarchicalClassLabel h = (HierarchicalClassLabel) o; for (int i = 0; i < this.levelwiseNames.length && i < h.levelwiseNames.length; i++) { int comp = 0; try { Comparable first = this.levelwiseNames[i]; Comparable second = h.levelwiseNames[i]; comp = first.compareTo(second); } catch (RuntimeException e) { String h1 = (String) (this.levelwiseNames[i] instanceof Integer ? this.levelwiseNames[i].toString() : this.levelwiseNames[i]); String h2 = (String) (h.levelwiseNames[i] instanceof Integer ? h.levelwiseNames[i].toString() : h.levelwiseNames[i]); comp = h1.compareTo(h2); } if (comp != 0) { return comp; } } return (this.levelwiseNames.length < h.levelwiseNames.length) ? -1 : ((this.levelwiseNames.length == h.levelwiseNames.length) ? 0 : 1); }
java
public String getNameAt(int level) { return this.levelwiseNames[level] instanceof Integer ? this.levelwiseNames[level].toString() : (String) this.levelwiseNames[level]; }
java
private void recursiveLogResult(StringBuilder buf, Hierarchy<Result> hier, Result result, int depth) { if(result == null) { buf.append("null"); LOG.warning("null result!"); return; } if(depth > 50) { LOG.warning("Probably infinitely nested results, aborting!"); return; } for(int i = 0; i < depth; i++) { buf.append(' '); } buf.append(result.getClass().getSimpleName()).append(": ").append(result.getLongName()) // .append(" (").append(result.getShortName()).append(")\n"); if(hier.numChildren(result) > 0) { for(It<Result> iter = hier.iterChildren(result); iter.valid(); iter.advance()) { recursiveLogResult(buf, hier, iter.get(), depth + 1); } } }
java
public ListParameterization addFlag(OptionID optionid) { parameters.add(new ParameterPair(optionid, Flag.SET)); return this; }
java
public ArrayList<String> serialize() { ArrayList<String> params = new ArrayList<>(); for(ParameterPair pair : parameters) { params.add(SerializedParameterization.OPTION_PREFIX + pair.option.toString()); if(pair.value instanceof String) { params.add((String) pair.value); } else if(pair.value instanceof Class) { params.add(((Class<?>) pair.value).getCanonicalName()); } else { // Fallback: params.add(pair.value.toString()); } } return params; }
java
@Override public OutlierResult run(Database db, Relation<V> relation) { DBIDs ids = relation.getDBIDs(); WritableDoubleDataStore abodvalues = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); DoubleMinMax minmaxabod = new DoubleMinMax(); if(kernelFunction.getClass() == LinearKernelFunction.class) { if(!kNNABOD(db, relation, ids, abodvalues, minmaxabod)) { // Fallback, if we do not have an index. fastABOD(db, relation, ids, abodvalues, minmaxabod); } } else { fastABOD(db, relation, ids, abodvalues, minmaxabod); } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Angle-Based Outlier Degree", "abod-outlier", abodvalues, relation.getDBIDs()); OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta(minmaxabod.getMin(), minmaxabod.getMax(), 0.0, Double.POSITIVE_INFINITY); return new OutlierResult(scoreMeta, scoreResult); }
java
private boolean kNNABOD(Database db, Relation<V> relation, DBIDs ids, WritableDoubleDataStore abodvalues, DoubleMinMax minmaxabod) { DistanceQuery<V> dq = db.getDistanceQuery(relation, SquaredEuclideanDistanceFunction.STATIC); KNNQuery<V> knnq = db.getKNNQuery(dq, DatabaseQuery.HINT_OPTIMIZED_ONLY); boolean squared = true; if(knnq == null) { dq = db.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC); knnq = db.getKNNQuery(dq, DatabaseQuery.HINT_OPTIMIZED_ONLY); if(knnq == null) { return false; } squared = false; } SimilarityQuery<V> lk = db.getSimilarityQuery(relation, LinearKernelFunction.STATIC); int k1 = k + 1; // We will get the query point back by the knnq. MeanVariance s = new MeanVariance(); for(DBIDIter pA = ids.iter(); pA.valid(); pA.advance()) { KNNList nl = knnq.getKNNForDBID(pA, k1); double simAA = lk.similarity(pA, pA); s.reset(); DoubleDBIDListIter iB = nl.iter(), iC = nl.iter(); for(; iB.valid(); iB.advance()) { double dAB = iB.doubleValue(); double simAB = lk.similarity(pA, iB); if(!(dAB > 0.)) { continue; } for(iC.seek(iB.getOffset() + 1); iC.valid(); iC.advance()) { double dAC = iC.doubleValue(); double simAC = lk.similarity(pA, iC); if(!(dAC > 0.)) { continue; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B, C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = lk.similarity(iB, iC); double numerator = simBC - simAB - simAC + simAA; if(squared) { double div = 1. / (dAB * dAC); s.put(numerator * div, FastMath.sqrt(div)); } else { double sqrtdiv = 1. / (dAB * dAC); s.put(numerator * sqrtdiv * sqrtdiv, sqrtdiv); } } } final double abof = s.getNaiveVariance(); minmaxabod.put(abof); abodvalues.putDouble(pA, abof); } return true; }
java
private void fastABOD(Database db, Relation<V> relation, DBIDs ids, WritableDoubleDataStore abodvalues, DoubleMinMax minmaxabod) { // Build a kernel matrix, to make O(n^3) slightly less bad. SimilarityQuery<V> sq = db.getSimilarityQuery(relation, kernelFunction); KernelMatrix kernelMatrix = new KernelMatrix(sq, relation, ids); MeanVariance s = new MeanVariance(); KNNHeap nn = DBIDUtil.newHeap(k); for(DBIDIter pA = ids.iter(); pA.valid(); pA.advance()) { final double simAA = kernelMatrix.getSimilarity(pA, pA); // Choose the k-min nearest nn.clear(); for(DBIDIter nB = relation.iterDBIDs(); nB.valid(); nB.advance()) { if(DBIDUtil.equal(nB, pA)) { continue; } double simBB = kernelMatrix.getSimilarity(nB, nB); double simAB = kernelMatrix.getSimilarity(pA, nB); double sqdAB = simAA + simBB - simAB - simAB; if(!(sqdAB > 0.)) { continue; } nn.insert(sqdAB, nB); } KNNList nl = nn.toKNNList(); s.reset(); DoubleDBIDListIter iB = nl.iter(), iC = nl.iter(); for(; iB.valid(); iB.advance()) { double sqdAB = iB.doubleValue(); double simAB = kernelMatrix.getSimilarity(pA, iB); if(!(sqdAB > 0.)) { continue; } for(iC.seek(iB.getOffset() + 1); iC.valid(); iC.advance()) { double sqdAC = iC.doubleValue(); double simAC = kernelMatrix.getSimilarity(pA, iC); if(!(sqdAC > 0.)) { continue; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B, C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = kernelMatrix.getSimilarity(iB, iC); double numerator = simBC - simAB - simAC + simAA; double div = 1. / (sqdAB * sqdAC); s.put(numerator * div, FastMath.sqrt(div)); } } final double abof = s.getNaiveVariance(); minmaxabod.put(abof); abodvalues.putDouble(pA, abof); } }
java
@Override public double getWeight(double distance, double max, double stddev) { if(stddev <= 0) { return 1; } return NormalDistribution.erfc(MathUtil.SQRTHALF * distance / stddev); }
java
public OutlierResult run(Database database, Relation<O> relation) { StepProgress stepprog = LOG.isVerbose() ? new StepProgress("LOF", 3) : null; DBIDs ids = relation.getDBIDs(); LOG.beginStep(stepprog, 1, "Materializing nearest-neighbor sets."); KNNQuery<O> knnq = DatabaseUtil.precomputedKNNQuery(database, relation, getDistanceFunction(), k); // Compute LRDs LOG.beginStep(stepprog, 2, "Computing Local Reachability Densities (LRD)."); WritableDoubleDataStore lrds = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); computeLRDs(knnq, ids, lrds); // compute LOF_SCORE of each db object LOG.beginStep(stepprog, 3, "Computing Local Outlier Factors (LOF)."); WritableDoubleDataStore lofs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); // track the maximum value for normalization. DoubleMinMax lofminmax = new DoubleMinMax(); computeLOFScores(knnq, ids, lrds, lofs, lofminmax); LOG.setCompleted(stepprog); // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Local Outlier Factor", "lof-outlier", lofs, ids); OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta(lofminmax.getMin(), lofminmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0); return new OutlierResult(scoreMeta, scoreResult); }
java
private void computeLRDs(KNNQuery<O> knnq, DBIDs ids, WritableDoubleDataStore lrds) { FiniteProgress lrdsProgress = LOG.isVerbose() ? new FiniteProgress("Local Reachability Densities (LRD)", ids.size(), LOG) : null; double lrd; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { lrd = computeLRD(knnq, iter); lrds.putDouble(iter, lrd); LOG.incrementProcessed(lrdsProgress); } LOG.ensureCompleted(lrdsProgress); }
java
protected double computeLRD(KNNQuery<O> knnq, DBIDIter curr) { final KNNList neighbors = knnq.getKNNForDBID(curr, k); double sum = 0.0; int count = 0; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { if(DBIDUtil.equal(curr, neighbor)) { continue; } KNNList neighborsNeighbors = knnq.getKNNForDBID(neighbor, k); sum += MathUtil.max(neighbor.doubleValue(), neighborsNeighbors.getKNNDistance()); count++; } // Avoid division by 0 return (sum > 0) ? (count / sum) : Double.POSITIVE_INFINITY; }
java
private void computeLOFScores(KNNQuery<O> knnq, DBIDs ids, DoubleDataStore lrds, WritableDoubleDataStore lofs, DoubleMinMax lofminmax) { FiniteProgress progressLOFs = LOG.isVerbose() ? new FiniteProgress("Local Outlier Factor (LOF) scores", ids.size(), LOG) : null; double lof; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { lof = computeLOFScore(knnq, iter, lrds); lofs.putDouble(iter, lof); // update minimum and maximum lofminmax.put(lof); LOG.incrementProcessed(progressLOFs); } LOG.ensureCompleted(progressLOFs); }
java
protected double computeLOFScore(KNNQuery<O> knnq, DBIDRef cur, DoubleDataStore lrds) { final double lrdp = lrds.doubleValue(cur); if(Double.isInfinite(lrdp)) { return 1.0; } double sum = 0.; int count = 0; final KNNList neighbors = knnq.getKNNForDBID(cur, k); for(DBIDIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { // skip the point itself if(DBIDUtil.equal(cur, neighbor)) { continue; } sum += lrds.doubleValue(neighbor); ++count; } return sum / (lrdp * count); }
java
protected double kNNDistance() { double result = getEntry(0).getKnnDistance(); for(int i = 1; i < getNumEntries(); i++) { double knnDistance = getEntry(i).getKnnDistance(); result = (result < knnDistance) ? knnDistance : result; } return result; }
java
public boolean readLine(Appendable buf) throws IOException { boolean success = false; while(true) { // Process buffer: while(pos < end) { success = true; final char c = buffer[pos++]; if(c == '\n') { return success; } if(c == '\r') { continue; } buf.append(c); } // Refill buffer: assert (pos >= end) : "Buffer wasn't empty when refilling!"; end = in.read(buffer, 0, buffer.length); pos = 0; if(end < 0) { // End of stream. return success; } } }
java
private static final int bestPivot(int rank, int m1, int m2, int m3, int m4, int m5) { if(rank < m1) { return m1; } if(rank > m5) { return m5; } if(rank < m2) { return m2; } if(rank > m4) { return m4; } return m3; }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); int k_max = knnDistances.length; out.writeInt(k_max); for(int i = 0; i < k_max; i++) { out.writeDouble(knnDistances[i]); } }
java
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); int k_max = in.readInt(); knnDistances = new double[k_max]; for(int i = 0; i < k_max; i++) { knnDistances[i] = in.readDouble(); } }
java
public double getValueAt(int k) { if (k < k_0) { return Double.POSITIVE_INFINITY; } return m * FastMath.log(k) + t; }
java
public void iterate() { // build covmat out of fitting matrix by multiplying diagonal elements with // 1+lambda for(int i = 0; i < numfit; i++) { System.arraycopy(alpha[i], 0, covmat[i], 0, numfit); covmat[i][i] *= (1.0 + lambda); } // Solve the equation system (Gauss-Jordan) LinearEquationSystem ls = new LinearEquationSystem(covmat, beta); ls.solveByTotalPivotSearch(); // update covmat with the inverse covmat = ls.getCoefficents(); // and deltaparams with the solution vector deltaparams = ls.getRHS(); // deltaparams = beta; for(int i = 0, i2 = 0; i < numparams; i++) { if(dofit[i]) { paramstry[i] = params[i] + deltaparams[i2++]; } } double newchisq = simulateParameters(paramstry); // have the results improved? if(newchisq < chisq) { // TODO: Do we need a larger limit than MIN_NORMAL? if(lambda * 0.1 > Double.MIN_NORMAL) { lambda *= 0.1; } chisq = newchisq; // keep modified covmat as new alpha matrix // and da as new beta for(int i = 0; i < numfit; i++) { System.arraycopy(covmat[i], 0, alpha[i], 0, numfit); beta[i] = deltaparams[i]; } System.arraycopy(paramstry, 0, params, 0, numparams); } else { // TODO: Do we need a larger limit than MAX_VALUE? // Does it ever make sense to go as far up? // Anyway, this should prevent overflows. if(lambda * 10 < Double.MAX_VALUE) { lambda *= 10; } } }
java
public void run() { int maxruns = this.maxruns, maxsmall = this.maxsmall; double oldchi = getChiSq(); while(maxruns-- > 0) { iterate(); double newchi = getChiSq(), deltachi = newchi - oldchi; oldchi = newchi; // stop condition: only a small improvement in Chi. if(deltachi < 0 && deltachi > -small && --maxsmall < 0) { break; } } }
java
public double[] get(T object) { double[] v = map.get(object); if(v == null) { return null; } return v.clone(); }
java
public double relativeFill() { double acc = 0.0; final int cols = widths.size(); final int rows = heights.size(); { for(int y = 0; y < rows; y++) { for(int x = 0; x < cols; x++) { if(usage.get(y).get(x) != null) { acc += widths.get(x) * heights.get(y); } } } } return acc / (twidth * theight); }
java
public boolean contains(NumberVector vector) { for(int i = 0; i < dims.length; i++) { final double value = vector.doubleValue(dims[i]); if(bounds[i << 1] > value || value >= bounds[(i << 1) + 1]) { return false; } } return true; }
java
public boolean addFeatureVector(DBIDRef id, NumberVector vector) { if(contains(vector)) { ids.add(id); return true; } return false; }
java
protected boolean containsRightNeighbor(CLIQUEUnit unit, int d) { final int e = dims.length - 1; return checkDimensions(unit, e) && bounds[e << 1] == unit.bounds[(e << 1) + 1]; }
java
protected CLIQUEUnit join(CLIQUEUnit other, double all, double tau) { if(other.dimensionality() != this.dimensionality()) { return null; } // n-1 dimensions must be the same: int e = dims.length - 1; if(!checkDimensions(other, e)) { return null; } if(dims[e] >= other.dims[e]) { return null; } HashSetModifiableDBIDs resultIDs = DBIDUtil.newHashSet(this.ids); resultIDs.retainAll(other.ids); if(resultIDs.size() / all < tau) { return null; } return new CLIQUEUnit(this, other.dims[e], other.bounds[e << 1], other.bounds[(e << 1) + 1], resultIDs); }
java
private boolean checkDimensions(CLIQUEUnit other, int e) { for(int i = 0, j = 0; i < e; i++, j += 2) { if(dims[i] != other.dims[i] || bounds[j] != other.bounds[j] || bounds[j + 1] != bounds[j + 1]) { return false; } } return true; }
java
protected synchronized void merge(DoubleMinMax minmax) { this.minmax.put(minmax.getMin()); this.minmax.put(minmax.getMax()); }
java
public void enableStart() { EventTarget targ = (EventTarget) element; targ.addEventListener(SVGConstants.SVG_EVENT_MOUSEDOWN, this, false); }
java
public void disableStart() { EventTarget targ = (EventTarget) element; targ.removeEventListener(SVGConstants.SVG_EVENT_MOUSEDOWN, this, false); }
java
protected void enableStop() { EventTarget targ = svgp.getDocument().getRootElement(); targ.addEventListener(SVGConstants.SVG_EVENT_MOUSEMOVE, this, false); targ.addEventListener(SVGConstants.SVG_EVENT_MOUSEUP, this, false); // FIXME: listen on the background object! targ.addEventListener(SVGConstants.SVG_EVENT_MOUSEOUT, this, false); }
java
protected void disableStop() { EventTarget targ = svgp.getDocument().getRootElement(); targ.removeEventListener(SVGConstants.SVG_EVENT_MOUSEMOVE, this, false); targ.removeEventListener(SVGConstants.SVG_EVENT_MOUSEUP, this, false); // FIXME: listen on the background object! targ.removeEventListener(SVGConstants.SVG_EVENT_MOUSEOUT, this, false); }
java
protected SVGPoint getCoordinates(Event evt) { return SVGUtil.elementCoordinatesFromEvent(this.svgp.getDocument(), this.coordref, evt); }
java
protected boolean startDrag(SVGPoint startPoint, Event evt) { if (listener != null) { return listener.startDrag(startPoint, evt); } return true; }
java
protected boolean duringDrag(SVGPoint startPoint, SVGPoint dragPoint, Event evt, boolean inside) { if (listener != null) { return listener.duringDrag(startPoint, dragPoint, evt, inside); } return true; }
java
public void makeInvisible() { CSSClass cls = new CSSClass(this, "unused"); cls.setStatement(SVGConstants.CSS_FILL_OPACITY_PROPERTY, "0"); cls.setStatement(SVGConstants.CSS_CURSOR_PROPERTY, SVGConstants.CSS_POINTER_VALUE); SVGUtil.setAtt(element, SVGConstants.SVG_STYLE_ATTRIBUTE, cls.inlineCSS()); }
java
public void makeVisible() { CSSClass cls = new CSSClass(this, "unused"); cls.setStatement(SVGConstants.CSS_FILL_PROPERTY, SVGConstants.CSS_GREEN_VALUE); cls.setStatement(SVGConstants.CSS_FILL_OPACITY_PROPERTY, "0.2"); cls.setStatement(SVGConstants.CSS_CURSOR_PROPERTY, SVGConstants.CSS_POINTER_VALUE); SVGUtil.setAtt(element, SVGConstants.SVG_STYLE_ATTRIBUTE, cls.inlineCSS()); }
java
public void setFill(double val, double min, double max) { this.val = val; this.min = min; this.max = max; }
java
public Element build(SVGPlot svgp, double x, double y, double width, double height) { Element barchart = svgp.svgElement(SVGConstants.SVG_G_TAG); // TODO: use style library for colors! Element bar = svgp.svgRect(x, y, width, height); bar.setAttribute(SVGConstants.SVG_FILL_ATTRIBUTE, "#a0a0a0"); bar.setAttribute(SVGConstants.SVG_STROKE_ATTRIBUTE, "#a0a0a0"); bar.setAttribute(SVGConstants.SVG_STROKE_WIDTH_ATTRIBUTE, String.valueOf(height * 0.01)); barchart.appendChild(bar); if(val >= min && val <= max && min < max) { final double frame = 0.02 * height; double fpos = (val - min) / (max - min) * (width - 2 * frame); Element chart; if(reversed) { chart = svgp.svgRect(x + frame + fpos, y + frame, width - fpos - 2 * frame, height - 2 * frame); } else { chart = svgp.svgRect(x + frame, y + frame, fpos, height - 2 * frame); } chart.setAttribute(SVGConstants.SVG_FILL_ATTRIBUTE, "#d4e4f1"); chart.setAttribute(SVGConstants.SVG_STROKE_ATTRIBUTE, "#a0a0a0"); chart.setAttribute(SVGConstants.SVG_STROKE_WIDTH_ATTRIBUTE, String.valueOf(height * 0.01)); barchart.appendChild(chart); } // Draw the values: if(format != null) { String num = Double.isNaN(val) ? "NaN" : format.format(val); Element lbl = svgp.svgText(x + 0.05 * width, y + 0.75 * height, num); lbl.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, "font-size: " + 0.75 * height + "; font-weight: bold"); barchart.appendChild(lbl); } // Draw the label if(label != null) { Element lbl = svgp.svgText(x + 1.05 * width, y + 0.75 * height, label); lbl.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, "font-size: " + 0.75 * height + "; font-weight: normal"); barchart.appendChild(lbl); } return barchart; }
java
void openBuffer() { if(buffer == null) { try { buffer = input.map(MapMode.READ_ONLY, 0, input.size()); } catch(IOException e) { throw new AbortException("Cannot map input bundle.", e); } } }
java
void readMeta() { final int check = buffer.getInt(); if(check != MAGIC) { throw new AbortException("File does not start with expected magic."); } final int nummeta = buffer.getInt(); assert (nummeta > 0) : "Empty bundle?"; meta = new BundleMeta(nummeta); sers = new ByteBufferSerializer<?>[nummeta]; data = new Object[nummeta]; for(int i = 0; i < nummeta; i++) { try { @SuppressWarnings("unchecked") SimpleTypeInformation<? extends Object> type = (SimpleTypeInformation<? extends Object>) TypeInformationSerializer.STATIC.fromByteBuffer(buffer); sers[i] = type.getSerializer(); if(i == 0 && DBID.class.isAssignableFrom(type.getRestrictionClass())) { hasids = true; } else { meta.add(type); } } catch(UnsupportedOperationException e) { throw new AbortException("Deserialization failed: " + e.getMessage(), e); } catch(IOException e) { throw new AbortException("IO error", e); } } }
java
void readObject() { for(int i = 0; i < sers.length; ++i) { try { data[i] = sers[i].fromByteBuffer(buffer); } catch(UnsupportedOperationException e) { throw new AbortException("Deserialization failed.", e); } catch(IOException e) { throw new AbortException("IO error", e); } } }
java
public Clustering<PrototypeModel<O>> run(Relation<O> relation) { RangeQuery<O> rq = relation.getRangeQuery(getDistanceFunction(), threshold); ModifiableDBIDs seen = DBIDUtil.newHashSet(relation.size()); Clustering<PrototypeModel<O>> clustering = new Clustering<>("Prototype clustering", "prototype-clustering"); int queries = 0; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Leader clustering", relation.size(), LOG) : null; for(DBIDIter it = relation.iterDBIDs(); it.valid() && seen.size() < relation.size(); it.advance()) { if(seen.contains(it)) { continue; } DoubleDBIDList res = rq.getRangeForDBID(it, threshold); ++queries; ModifiableDBIDs ids = DBIDUtil.newArray(res.size()); for(DBIDIter cand = res.iter(); cand.valid(); cand.advance()) { if(seen.add(cand)) { LOG.incrementProcessed(prog); ids.add(cand); } } assert (ids.size() > 0 && ids.contains(it)); PrototypeModel<O> mod = new SimplePrototypeModel<>(relation.get(it)); clustering.addToplevelCluster(new Cluster<>(ids, mod)); } LOG.statistics(new LongStatistic(this.getClass().getName() + ".queries", queries)); LOG.ensureCompleted(prog); return clustering; }
java
@Override public void writeToText(TextWriterStream out, String label) { String name = getNameAutomatic(); if(name != null) { out.commentPrintLn("Cluster name: " + name); } out.commentPrintLn("Cluster noise flag: " + isNoise()); out.commentPrintLn("Cluster size: " + ids.size()); // also print model, if any and printable if(getModel() != null && (getModel() instanceof TextWriteable)) { ((TextWriteable) getModel()).writeToText(out, label); } }
java
private void scheduleSetPlot(final SVGPlot oldplot, final SVGPlot newplot) { UpdateManager um = this.getUpdateManager(); if(um != null) { synchronized(um) { if(um.isRunning()) { // LoggingUtil.warning("Scheduling detach: " + this + " " + oldplot); final Runnable detach = new Runnable() { @Override public void run() { if(latest.compareAndSet(this, null)) { detachPlot(oldplot); attachPlot(newplot); } } }; latest.set(detach); um.getUpdateRunnableQueue().preemptLater(detach); return; } } } else { if(oldplot != null) { LoggingUtil.warning("No update manager, but a previous plot exists. Incorrectly initialized?"); } } detachPlot(oldplot); attachPlot(newplot); }
java
private void attachPlot(SVGPlot newplot) { this.plot = newplot; if(newplot == null) { super.setSVGDocument(null); return; } newplot.synchronizeWith(synchronizer); super.setSVGDocument(newplot.getDocument()); super.setDisableInteractions(newplot.getDisableInteractions()); }
java
private void detachPlot(SVGPlot oldplot) { if(oldplot == null) { return; } this.plot = null; oldplot.unsynchronizeWith(synchronizer); }
java
protected double estimateID(DBIDRef ignore, DoubleDBIDListIter it, double[] p) { int j = 0; for(it.seek(0); it.valid(); it.advance()) { if(it.doubleValue() == 0. || DBIDUtil.equal(ignore, it)) { continue; } p[j++] = it.doubleValue(); } if(j < 2) { throw new ArithmeticException("Too little data to estimate ID."); } return estimator.estimate(p, j); }
java
public static double logpdf(double val, double rate) { return val < 0. ? Double.NEGATIVE_INFINITY : FastMath.log(rate) - rate * val; }
java
public static double quantile(double val, double rate) { return val >= 0 && val <= 1 ? -FastMath.log(1 - val) / rate : Double.NaN; }
java
private MarkdownDocStream pendingBreak() { if(newline == Newline.NONE) { return this; } out.append(newline == Newline.BREAK ? "\\\n" : newline == Newline.PAR ? "\n\n" : "\n"); for(int i = indent, j = i; i > 0; i -= j) { out.append(WHITESPACES, 0, (j = i > WHITESPACES.length() ? WHITESPACES.length() : i)); } newline = Newline.NONE; return this; }
java
public MarkdownDocStream append(char c) { if(c == '\n') { newline = newline == Newline.NONE ? Newline.NEWLINE : Newline.PAR; return this; } pendingBreak(); out.append(c); return this; }
java
public MarkdownDocStream append(CharSequence p, int start, int end) { for(int pos = start; pos < end; ++pos) { final char c = p.charAt(pos); if(c == '\r') { continue; } append(c); // Uses \n magic. } return this; }
java
public MarkdownDocStream indent(int newindent) { if(newindent < indent) { newline = newline == Newline.BREAK ? Newline.NEWLINE : Newline.PAR; } indent = newindent; return this; }
java
public final void render(GL2 gl) { gl.glMatrixMode(GL2.GL_PROJECTION); gl.glPushMatrix(); gl.glLoadIdentity(); gl.glMatrixMode(GL2.GL_MODELVIEW); gl.glPushMatrix(); gl.glLoadIdentity(); gl.glOrtho(0, width, 0, height, -1, +1); gl.glColor4f(0f, 0f, 0f, .5f); // Fade background: gl.glBegin(GL2.GL_QUADS); gl.glVertex2f(0f, 0f); gl.glVertex2f(width, 0f); gl.glVertex2f(width, height); gl.glVertex2f(0f, height); gl.glEnd(); renderContents(gl); gl.glMatrixMode(GL2.GL_PROJECTION); gl.glPopMatrix(); gl.glMatrixMode(GL2.GL_MODELVIEW); gl.glPopMatrix(); }
java
@Override public int setPageID(P page) { int pageID = page.getPageID(); if(pageID == -1) { pageID = getNextEmptyPageID(); if(pageID == -1) { pageID = nextPageID++; } page.setPageID(pageID); } return pageID; }
java
public StringBuilder appendToBuffer(StringBuilder buf) { Iterator<double[]> iter = points.iterator(); while(iter.hasNext()) { double[] data = iter.next(); for(int i = 0; i < data.length; i++) { if(i > 0) { buf.append(','); } buf.append(data[i]); } if(iter.hasNext()) { buf.append(' '); } } return buf; }
java
public boolean containsPoint2D(double[] v) { assert (v.length == 2); final double testx = v[0]; final double testy = v[1]; boolean c = false; Iterator<double[]> it = points.iterator(); double[] pre = points.get(points.size() - 1); while(it.hasNext()) { final double[] cur = it.next(); final double curx = cur[0], cury = cur[1]; final double prex = pre[0], prey = pre[1]; if(((cury > testy) != (prey > testy))) { if((testx < (prex - curx) * (testy - cury) / (prey - cury) + curx)) { c = !c; } } pre = cur; } return c; }
java
public static AffineTransformation reorderAxesTransformation(int dim, int... axes) { double[][] m = zeroMatrix(dim + 1); // insert ones appropriately: for(int i = 0; i < axes.length; i++) { assert (0 < axes[i] && axes[i] <= dim); m[i][axes[i] - 1] = 1.0; } int useddim = 1; for(int i = axes.length; i < dim + 1; i++) { // find next "unused" dimension. { boolean search = true; while(search) { search = false; for(int a : axes) { if(a == useddim) { search = true; useddim++; break; } } } } m[i][useddim - 1] = 1.0; useddim++; } assert (useddim - 2 == dim); return new AffineTransformation(dim, m, null); }
java
public void addTranslation(double[] v) { assert (v.length == dim); // reset inverse transformation - needs recomputation. inv = null; double[][] homTrans = unitMatrix(dim + 1); for(int i = 0; i < dim; i++) { homTrans[i][dim] = v[i]; } trans = times(homTrans, trans); }
java
public void addMatrix(double[][] m) { assert (m.length == dim); assert (m[0].length == dim); // reset inverse transformation - needs recomputation. inv = null; // extend the matrix with an extra row and column double[][] ht = new double[dim + 1][dim + 1]; for(int i = 0; i < dim; i++) { for(int j = 0; j < dim; j++) { ht[i][j] = m[i][j]; } } // the other cells default to identity matrix ht[dim][dim] = 1.0; // Multiply from left. trans = times(ht, trans); }
java
public void addRotation(int axis1, int axis2, double angle) { // TODO: throw an exception instead of using assert assert (axis1 >= 0); assert (axis1 < dim); assert (axis1 >= 0); assert (axis2 < dim); assert (axis1 != axis2); // reset inverse transformation - needs recomputation. inv = null; double[][] ht = new double[dim + 1][dim + 1]; // identity matrix for(int i = 0; i < dim + 1; i++) { ht[i][i] = 1.0; } // insert rotation values final DoubleWrapper tmp = new DoubleWrapper(); // To return cosine double s = FastMath.sinAndCos(angle, tmp), c = tmp.value; ht[axis1][axis1] = +c; ht[axis1][axis2] = -s; ht[axis2][axis1] = +s; ht[axis2][axis2] = +c; // Multiply from left trans = times(ht, trans); }
java
public void addAxisReflection(int axis) { assert (0 < axis && axis <= dim); // reset inverse transformation - needs recomputation. inv = null; // Formal: // Matrix homTrans = Matrix.unitMatrix(dim + 1); // homTrans[axis - 1][axis - 1] = -1; // trans = homTrans.times(trans); // Faster: for(int i = 0; i <= dim; i++) { trans[axis - 1][i] = -trans[axis - 1][i]; } }
java
public double[] homogeneVector(double[] v) { assert (v.length == dim); double[] dv = Arrays.copyOf(v, dim + 1); dv[dim] = 1.0; return dv; }
java
public double[] homogeneRelativeVector(double[] v) { assert (v.length == dim); // TODO: this only works properly when trans[dim][dim] == 1.0, right? double[] dv = Arrays.copyOf(v, dim + 1); dv[dim] = 0.0; return dv; }
java
private double computeConfidence(int support, int samples) { final double z = NormalDistribution.standardNormalQuantile(alpha); final double eprob = support / (double) samples; return Math.max(0., eprob - z * FastMath.sqrt((eprob * (1 - eprob)) / samples)); }
java
protected Clustering<?> runClusteringAlgorithm(ResultHierarchy hierarchy, Result parent, DBIDs ids, DataStore<DoubleVector> store, int dim, String title) { SimpleTypeInformation<DoubleVector> t = new VectorFieldTypeInformation<>(DoubleVector.FACTORY, dim); Relation<DoubleVector> sample = new MaterializedRelation<>(t, ids, title, store); ProxyDatabase d = new ProxyDatabase(ids, sample); Clustering<?> clusterResult = samplesAlgorithm.run(d); d.getHierarchy().remove(sample); d.getHierarchy().remove(clusterResult); hierarchy.add(parent, sample); hierarchy.add(sample, clusterResult); return clusterResult; }
java
public static void load(Class<?> parent, ClassLoader cl) { char[] buf = new char[0x4000]; try { String fullName = RESOURCE_PREFIX + parent.getName(); Enumeration<URL> configfiles = cl.getResources(fullName); while(configfiles.hasMoreElements()) { URL nextElement = configfiles.nextElement(); URLConnection conn = nextElement.openConnection(); conn.setUseCaches(false); try ( InputStreamReader is = new InputStreamReader(conn.getInputStream(), "UTF-8");) { int start = 0, cur = 0, valid = is.read(buf, 0, buf.length); char c; while(cur < valid) { // Find newline or end while(cur < valid && (c = buf[cur]) != '\n' && c != '\r') { cur++; } if(cur == valid && is.ready()) { // Move consumed buffer contents: if(start > 0) { System.arraycopy(buf, start, buf, 0, valid - start); valid -= start; cur -= start; start = 0; } else if(valid == buf.length) { throw new IOException("Buffer size exceeded. Maximum line length in service files is: " + buf.length + " in file: " + fullName); } valid = is.read(buf, valid, buf.length - valid); continue; } parseLine(parent, buf, start, cur); while(cur < valid && ((c = buf[cur]) == '\n' || c == '\r')) { cur++; } start = cur; } } catch(IOException x) { throw new AbortException("Error reading configuration file", x); } } } catch(IOException x) { throw new AbortException("Could not load service configuration files.", x); } }
java