code
stringlengths
73
34.1k
label
stringclasses
1 value
public static double logcdf(double val, double shape1, double shape2) { if(val == Double.NEGATIVE_INFINITY) { return Double.NEGATIVE_INFINITY; } if(val == Double.POSITIVE_INFINITY) { return 0.; } if(val != val) { return Double.NaN; } if(shape1 == 0.) { val = FastMath.exp(-val); } else { double tmp = shape1 * val; if(tmp == Double.NEGATIVE_INFINITY) { return shape2 == 0 ? 0. : Double.NEGATIVE_INFINITY; } if(tmp >= 1.) { return shape2 == 0 ? Double.NEGATIVE_INFINITY : 0.; } val = FastMath.exp(FastMath.log1p(-tmp) / shape1); } if(shape2 == 0.) { return -val; } final double tmp = shape2 * val; return tmp < 1. ? FastMath.log1p(-tmp) / shape2 : Double.NEGATIVE_INFINITY; }
java
private double naiveQuerySparse(SparseNumberVector obj, WritableDoubleDataStore scores, HashSetModifiableDBIDs cands) { double len = 0.; // Length of query object, for final normalization for(int iter = obj.iter(); obj.iterValid(iter); iter = obj.iterAdvance(iter)) { final int dim = obj.iterDim(iter); final double val = obj.iterDoubleValue(iter); if(val == 0. || val != val) { continue; } len += val * val; // No matching documents in index: if(dim >= index.size()) { continue; } ModifiableDoubleDBIDList column = index.get(dim); for(DoubleDBIDListIter n = column.iter(); n.valid(); n.advance()) { scores.increment(n, n.doubleValue() * val); cands.add(n); } } return FastMath.sqrt(len); }
java
private double naiveQueryDense(NumberVector obj, WritableDoubleDataStore scores, HashSetModifiableDBIDs cands) { double len = 0.; // Length of query object, for final normalization for(int dim = 0, max = obj.getDimensionality(); dim < max; dim++) { final double val = obj.doubleValue(dim); if(val == 0. || val != val) { continue; } len += val * val; // No matching documents in index: if(dim >= index.size()) { continue; } ModifiableDoubleDBIDList column = index.get(dim); for(DoubleDBIDListIter n = column.iter(); n.valid(); n.advance()) { scores.increment(n, n.doubleValue() * val); cands.add(n); } } return FastMath.sqrt(len); }
java
private double naiveQuery(V obj, WritableDoubleDataStore scores, HashSetModifiableDBIDs cands) { if(obj instanceof SparseNumberVector) { return naiveQuerySparse((SparseNumberVector) obj, scores, cands); } else { return naiveQueryDense(obj, scores, cands); } }
java
protected BundleStreamSource invokeStreamFilters(BundleStreamSource stream) { assert (stream != null); if(filters == null) { return stream; } // We dynamically switch between streaming and bundle operations. MultipleObjectsBundle bundle = null; for(ObjectFilter filter : filters) { if(filter instanceof StreamFilter) { stream = ((StreamFilter) filter).init(bundle != null ? bundle.asStream() : stream); bundle = null; } else { bundle = filter.filter(stream != null ? stream.asMultipleObjectsBundle() : bundle); stream = null; } } return stream != null ? stream : bundle.asStream(); }
java
private void inferCallerELKI() { needToInferCaller = false; StackTraceElement[] stack = (new Throwable()).getStackTrace(); int ix = 0; // skip back to the logger. while(ix < stack.length) { StackTraceElement frame = stack[ix]; final String cls = frame.getClassName(); if(cls.equals(START_TRACE_AT)) { break; } ix++; } // skip further back through helper functions while(ix < stack.length) { StackTraceElement frame = stack[ix]; final String cls = frame.getClassName(); boolean ignore = false; for(int i = 0; i < IGNORE_CLASSES.length; i++) { if(cls.equals(IGNORE_CLASSES[i])) { ignore = true; break; } } if(!ignore) { super.setSourceClassName(frame.getClassName()); super.setSourceMethodName(frame.getMethodName()); break; } ix++; } }
java
public static SamplingResult getSamplingResult(final Relation<?> rel) { Collection<SamplingResult> selections = ResultUtil.filterResults(rel.getHierarchy(), rel, SamplingResult.class); if(selections.isEmpty()) { final SamplingResult newsam = new SamplingResult(rel); ResultUtil.addChildResult(rel, newsam); return newsam; } return selections.iterator().next(); }
java
public Element render(SVGPlot svgp) { Element tag = svgp.svgElement(SVGConstants.SVG_G_TAG); Element button = svgp.svgRect(x, y, w, h); if(!Double.isNaN(r)) { SVGUtil.setAtt(button, SVGConstants.SVG_RX_ATTRIBUTE, r); SVGUtil.setAtt(button, SVGConstants.SVG_RY_ATTRIBUTE, r); } SVGUtil.setAtt(button, SVGConstants.SVG_STYLE_ATTRIBUTE, butcss.inlineCSS()); tag.appendChild(button); // Add light effect: if (svgp.getIdElement(SVGEffects.LIGHT_GRADIENT_ID) != null) { Element light = svgp.svgRect(x, y, w, h); if(!Double.isNaN(r)) { SVGUtil.setAtt(light, SVGConstants.SVG_RX_ATTRIBUTE, r); SVGUtil.setAtt(light, SVGConstants.SVG_RY_ATTRIBUTE, r); } SVGUtil.setAtt(light, SVGConstants.SVG_STYLE_ATTRIBUTE, "fill:url(#"+SVGEffects.LIGHT_GRADIENT_ID+");fill-opacity:.5"); tag.appendChild(light); } // Add shadow effect: if(svgp.getIdElement(SVGEffects.SHADOW_ID) != null) { //Element shadow = svgp.svgRect(x + (w * .05), y + (h * .05), w, h); //SVGUtil.setAtt(button, SVGConstants.SVG_STYLE_ATTRIBUTE, SVGConstants.CSS_FILL_PROPERTY + ":" + SVGConstants.CSS_BLACK_VALUE); button.setAttribute(SVGConstants.SVG_FILTER_ATTRIBUTE, "url(#" + SVGEffects.SHADOW_ID + ")"); //tag.appendChild(shadow); } if(title != null) { Element label = svgp.svgText(x + w * .5, y + h * .7, title); label.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, titlecss.inlineCSS()); tag.appendChild(label); } return tag; }
java
public void setTitle(String title, String textcolor) { this.title = title; if(titlecss == null) { titlecss = new CSSClass(this, "text"); titlecss.setStatement(SVGConstants.CSS_TEXT_ANCHOR_PROPERTY, SVGConstants.CSS_MIDDLE_VALUE); titlecss.setStatement(SVGConstants.CSS_FILL_PROPERTY, textcolor); titlecss.setStatement(SVGConstants.CSS_FONT_SIZE_PROPERTY, .6 * h); } }
java
private Pair<PlotItem, VisualizationTask> key(PlotItem item, VisualizationTask task) { return new Pair<>(item, task); }
java
private Pair<Element, Visualization> value(Element elem, Visualization vis) { return new Pair<>(elem, vis); }
java
public void put(PlotItem it, VisualizationTask task, Element elem, Visualization vis) { map.put(key(it, task), value(elem, vis)); }
java
public Pair<Element, Visualization> remove(PlotItem it, VisualizationTask task) { return map.remove(key(it, task)); }
java
public void put(PlotItem it, VisualizationTask task, Pair<Element, Visualization> pair) { map.put(key(it, task), pair); }
java
public double coveringRadiusFromEntries(DBID routingObjectID, AbstractMTree<O, N, E, ?> mTree) { double coveringRadius = 0.; for(int i = 0; i < getNumEntries(); i++) { E entry = getEntry(i); final double cover = entry.getParentDistance() + entry.getCoveringRadius(); coveringRadius = coveringRadius < cover ? cover : coveringRadius; } return coveringRadius; }
java
public static double quadraticEuclidean(double[] v1, double[] v2) { final double d1 = v1[0] - v2[0], d2 = v1[1] - v2[1]; return (d1 * d1) + (d2 * d2); }
java
protected void aggregateSpecial(T value, int bin) { final T exist = getSpecial(bin); // Note: do not inline above accessor, as getSpecial will initialize the // special variable used below! special[bin] = aggregate(exist, value); }
java
protected void removePreviousRelation(Relation<?> relation) { if(keep) { return; } boolean first = true; for(It<Index> it = relation.getHierarchy().iterDescendants(relation).filter(Index.class); it.valid(); it.advance()) { if(first) { Logging.getLogger(getClass()).statistics("Index statistics when removing initial data relation."); first = false; } it.get().logStatistics(); } ResultUtil.removeRecursive(relation.getHierarchy(), relation); }
java
protected double[] kNNDistances() { int k = getEntry(0).getKnnDistances().length; double[] result = new double[k]; for(int i = 0; i < getNumEntries(); i++) { for(int j = 0; j < k; j++) { MkTabEntry entry = getEntry(i); result[j] = Math.max(result[j], entry.getKnnDistance(j + 1)); } } return result; }
java
public OutlierResult run(Database database, Relation<O> relation) { StepProgress stepprog = LOG.isVerbose() ? new StepProgress("VOV", 3) : null; DBIDs ids = relation.getDBIDs(); int dim = RelationUtil.dimensionality(relation); LOG.beginStep(stepprog, 1, "Materializing nearest-neighbor sets."); KNNQuery<O> knnq = DatabaseUtil.precomputedKNNQuery(database, relation, getDistanceFunction(), k); // Compute Volumes LOG.beginStep(stepprog, 2, "Computing Volumes."); WritableDoubleDataStore vols = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); computeVolumes(knnq, dim, ids, vols); // compute VOV of each object LOG.beginStep(stepprog, 3, "Computing Variance of Volumes (VOV)."); WritableDoubleDataStore vovs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); // track the maximum value for normalization. DoubleMinMax vovminmax = new DoubleMinMax(); computeVOVs(knnq, ids, vols, vovs, vovminmax); LOG.setCompleted(stepprog); // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Variance of Volume", "vov-outlier", vovs, ids); OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(vovminmax.getMin(), vovminmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 0.0); return new OutlierResult(scoreMeta, scoreResult); }
java
private void computeVOVs(KNNQuery<O> knnq, DBIDs ids, DoubleDataStore vols, WritableDoubleDataStore vovs, DoubleMinMax vovminmax) { FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Variance of Volume", ids.size(), LOG) : null; boolean warned = false; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { KNNList knns = knnq.getKNNForDBID(iter, k); DoubleDBIDListIter it = knns.iter(); double vbar = 0.; for(; it.valid(); it.advance()) { vbar += vols.doubleValue(it); } vbar /= knns.size(); // Average double vov = 0.; for(it.seek(0); it.valid(); it.advance()) { double v = vols.doubleValue(it) - vbar; vov += v * v; } if(!(vov < Double.POSITIVE_INFINITY) && !warned) { LOG.warning("Variance of Volumes has hit double precision limits, results are not reliable."); warned = true; } vov = (vov < Double.POSITIVE_INFINITY) ? vov / (knns.size() - 1) : Double.POSITIVE_INFINITY; vovs.putDouble(iter, vov); // update minimum and maximum vovminmax.put(vov); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); }
java
private void boundSize(HashSetModifiableDBIDs set, int items) { if(set.size() > items) { DBIDs sample = DBIDUtil.randomSample(set, items, rnd); set.clear(); set.addDBIDs(sample); } }
java
private boolean add(DBIDRef cur, DBIDRef cand, double distance) { KNNHeap neighbors = store.get(cur); if(neighbors.contains(cand)) { return false; } double newKDistance = neighbors.insert(distance, cand); return (distance <= newKDistance); }
java
private int sampleNew(DBIDs ids, WritableDataStore<HashSetModifiableDBIDs> sampleNewNeighbors, WritableDataStore<HashSetModifiableDBIDs> newNeighborHash, int items) { int t = 0; for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) { KNNHeap realNeighbors = store.get(iditer); HashSetModifiableDBIDs newNeighbors = newNeighborHash.get(iditer); HashSetModifiableDBIDs realNewNeighbors = sampleNewNeighbors.get(iditer); realNewNeighbors.clear(); // Reuse for(DoubleDBIDListIter heapiter = realNeighbors.unorderedIterator(); heapiter.valid(); heapiter.advance()) { if(newNeighbors.contains(heapiter)) { realNewNeighbors.add(heapiter); t++; } } boundSize(realNewNeighbors, items); newNeighbors.removeDBIDs(realNewNeighbors); newNeighborHash.put(iditer, newNeighbors); } return t; }
java
private void reverse(WritableDataStore<HashSetModifiableDBIDs> sampleNewHash, WritableDataStore<HashSetModifiableDBIDs> newReverseNeighbors, WritableDataStore<HashSetModifiableDBIDs> oldReverseNeighbors) { for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { KNNHeap heap = store.get(iditer); HashSetDBIDs newNeighbors = sampleNewHash.get(iditer); for(DoubleDBIDListIter heapiter = heap.unorderedIterator(); heapiter.valid(); heapiter.advance()) { (newNeighbors.contains(heapiter) ? newReverseNeighbors : oldReverseNeighbors).get(heapiter).add(iditer); } } }
java
public static double similarityNumberVector(NumberVector o1, NumberVector o2) { final int d1 = o1.getDimensionality(), d2 = o2.getDimensionality(); int intersection = 0, union = 0; int d = 0; for(; d < d1 && d < d2; d++) { double v1 = o1.doubleValue(d), v2 = o2.doubleValue(d); if(v1 != v1 || v2 != v2) { // Skip NaNs. continue; } if(v1 != 0. || v2 != 0) { ++union; if(v1 == v2) { ++intersection; } } } for(; d < d1; d++) { if(o1.doubleValue(d) != 0) { ++union; } } for(; d < d2; d++) { if(o2.doubleValue(d) != 0) { ++union; } } return intersection / (double) union; }
java
@Deprecated protected final Map<DBID, KNNList> batchNN(N node, DBIDs ids, int kmax) { Map<DBID, KNNList> res = new HashMap<>(ids.size()); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { DBID id = DBIDUtil.deref(iter); res.put(id, knnq.getKNNForDBID(id, kmax)); } return res; }
java
void writeResult(PrintStream out, DBIDs ids, OutlierResult result, ScalingFunction scaling, String label) { if(scaling instanceof OutlierScaling) { ((OutlierScaling) scaling).prepare(result); } out.append(label); DoubleRelation scores = result.getScores(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double value = scores.doubleValue(iter); value = scaling != null ? scaling.getScaled(value) : value; out.append(' ').append(Double.toString(value)); } out.append(FormatUtil.NEWLINE); }
java
private void runForEachK(String prefix, int mink, int maxk, IntFunction<OutlierResult> runner, BiConsumer<String, OutlierResult> out) { if(isDisabled(prefix)) { LOG.verbose("Skipping (disabled): " + prefix); return; // Disabled } LOG.verbose("Running " + prefix); final int digits = (int) FastMath.ceil(FastMath.log10(krange.getMax() + 1)); final String format = "%s-%0" + digits + "d"; krange.forEach(k -> { if(k >= mink && k <= maxk) { Duration time = LOG.newDuration(this.getClass().getCanonicalName() + "." + prefix + ".k" + k + ".runtime").begin(); OutlierResult result = runner.apply(k); LOG.statistics(time.end()); if(result != null) { out.accept(String.format(Locale.ROOT, format, prefix, k), result); result.getHierarchy().removeSubtree(result); } } }); }
java
public double[] getCoefficients() { double[] result = new double[b.length]; System.arraycopy(b, 0, result, 0, b.length); return result; }
java
public double getValueAt(int k) { double result = 0.; double log_k = FastMath.log(k), acc = 1.; for (int p = 0; p < b.length; p++) { result += b[p] * acc; acc *= log_k; } return result; }
java
@SuppressWarnings("unchecked") private static <V extends FeatureVector<F>, F> ArrayAdapter<F, ? super V> getAdapter(Factory<V, F> factory) { if(factory instanceof NumberVector.Factory) { return (ArrayAdapter<F, ? super V>) NumberVectorAdapter.STATIC; } return (ArrayAdapter<F, ? super V>) FeatureVectorAdapter.STATIC; }
java
protected void expandClusterOrder(DBID ipt, ClusterOrder order, DistanceQuery<V> dq, FiniteProgress prog) { UpdatableHeap<OPTICSHeapEntry> heap = new UpdatableHeap<>(); heap.add(new OPTICSHeapEntry(ipt, null, Double.POSITIVE_INFINITY)); while(!heap.isEmpty()) { final OPTICSHeapEntry current = heap.poll(); DBID currPt = current.objectID; order.add(currPt, current.reachability, current.predecessorID); processed.add(currPt); double coredist = inverseDensities.doubleValue(currPt); for(DBIDIter it = neighs.get(currPt).iter(); it.valid(); it.advance()) { if(processed.contains(it)) { continue; } double nrdist = dq.distance(currPt, it); if(coredist > nrdist) { nrdist = coredist; } if(reachDist.doubleValue(it) == UNDEFINED_DISTANCE) { reachDist.put(it, nrdist); } else if(nrdist < reachDist.doubleValue(it)) { reachDist.put(it, nrdist); } heap.add(new OPTICSHeapEntry(DBIDUtil.deref(it), currPt, nrdist)); } LOG.incrementProcessed(prog); } }
java
public synchronized void resizeMatrix(int newsize) throws IOException { if(newsize >= 0xFFFF) { throw new RuntimeException("Matrix size is too big and will overflow the integer datatype."); } if(!array.isWritable()) { throw new IOException("Can't resize a read-only array."); } array.resizeFile(arraysize(newsize)); this.matrixsize = newsize; ByteBuffer header = array.getExtraHeader(); header.putInt(this.matrixsize); }
java
private int computeOffset(int x, int y) { if(y > x) { return computeOffset(y, x); } return ((x * (x + 1)) >> 1) + y; }
java
private void validateHeader(boolean validateRecordSize) throws IOException { int readmagic = file.readInt(); // Validate magic number if (readmagic != this.magic) { file.close(); throw new IOException("Magic in LinearDiskCache does not match: " + readmagic + " instead of " + this.magic); } // Validate header size if (file.readInt() != this.headersize) { file.close(); throw new IOException("Header size in LinearDiskCache does not match."); } if (validateRecordSize) { // Validate record size if (file.readInt() != this.recordsize) { file.close(); throw new IOException("Recordsize in LinearDiskCache does not match."); } } else { // or just read it from file this.recordsize = file.readInt(); } // read the number of records and validate with file size. if (file.getFilePointer() != HEADER_POS_SIZE) { throw new IOException("Incorrect file position when reading header."); } this.numrecs = file.readInt(); if (numrecs < 0 || file.length() != indexToFileposition(numrecs)) { throw new IOException("File size and number of records do not agree."); } // yet another sanity check. We should have read all of our internal header // now. if (file.getFilePointer() != INTERNAL_HEADER_SIZE) { throw new IOException("Incorrect file position after reading header."); } }
java
public synchronized void resizeFile(int newsize) throws IOException { if (!writable) { throw new IOException("File is not writeable!"); } // update the number of records this.numrecs = newsize; file.seek(HEADER_POS_SIZE); file.writeInt(numrecs); // resize file file.setLength(indexToFileposition(numrecs)); mapArray(); }
java
public synchronized ByteBuffer getExtraHeader() throws IOException { final int size = headersize - INTERNAL_HEADER_SIZE; final MapMode mode = writable ? MapMode.READ_WRITE : MapMode.READ_ONLY; return file.getChannel().map(mode, INTERNAL_HEADER_SIZE, size); }
java
public PointerPrototypeHierarchyRepresentationResult run(Database db, Relation<O> relation) { DistanceQuery<O> dq = DatabaseUtil.precomputedDistanceQuery(db, relation, getDistanceFunction(), LOG); final DBIDs ids = relation.getDBIDs(); final int size = ids.size(); // Initialize space for result: PointerHierarchyRepresentationBuilder builder = new PointerHierarchyRepresentationBuilder(ids, dq.getDistanceFunction().isSquared()); Int2ObjectOpenHashMap<ModifiableDBIDs> clusters = new Int2ObjectOpenHashMap<>(size); // Allocate working space: MatrixParadigm mat = new MatrixParadigm(ids); ArrayModifiableDBIDs prots = DBIDUtil.newArray(MatrixParadigm.triangleSize(size)); initializeMatrices(mat, prots, dq); DBIDArrayMIter protiter = prots.iter(); FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("MiniMax clustering", size - 1, LOG) : null; DBIDArrayIter ix = mat.ix; for(int i = 1, end = size; i < size; i++) { end = AGNES.shrinkActiveSet(ix, builder, end, // findMerge(end, mat, protiter, builder, clusters, dq)); LOG.incrementProcessed(progress); } LOG.ensureCompleted(progress); return (PointerPrototypeHierarchyRepresentationResult) builder.complete(); }
java
protected static <O> void initializeMatrices(MatrixParadigm mat, ArrayModifiableDBIDs prots, DistanceQuery<O> dq) { final DBIDArrayIter ix = mat.ix, iy = mat.iy; final double[] distances = mat.matrix; int pos = 0; for(ix.seek(0); ix.valid(); ix.advance()) { for(iy.seek(0); iy.getOffset() < ix.getOffset(); iy.advance()) { distances[pos] = dq.distance(ix, iy); prots.add(iy); pos++; } } assert (prots.size() == pos); }
java
protected static int findMerge(int end, MatrixParadigm mat, DBIDArrayMIter prots, PointerHierarchyRepresentationBuilder builder, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<?> dq) { final DBIDArrayIter ix = mat.ix, iy = mat.iy; final double[] distances = mat.matrix; double mindist = Double.POSITIVE_INFINITY; int x = -1, y = -1; for(int dx = 0; dx < end; dx++) { // Skip if object is already linked if(builder.isLinked(ix.seek(dx))) { continue; } final int xoffset = MatrixParadigm.triangleSize(dx); for(int dy = 0; dy < dx; dy++) { // Skip if object is already linked if(builder.isLinked(iy.seek(dy))) { continue; } double dist = distances[xoffset + dy]; if(dist < mindist) { mindist = dist; x = dx; y = dy; } } } assert (y < x); merge(end, mat, prots, builder, clusters, dq, x, y); return x; }
java
protected static void merge(int size, MatrixParadigm mat, DBIDArrayMIter prots, PointerHierarchyRepresentationBuilder builder, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<?> dq, int x, int y) { assert (y < x); final DBIDArrayIter ix = mat.ix.seek(x), iy = mat.iy.seek(y); final double[] distances = mat.matrix; int offset = MatrixParadigm.triangleSize(x) + y; if(LOG.isDebuggingFine()) { LOG.debugFine("Merging: " + DBIDUtil.toString(ix) + " -> " + DBIDUtil.toString(iy) + " " + distances[offset]); } ModifiableDBIDs cx = clusters.get(x), cy = clusters.get(y); // Keep y if(cy == null) { cy = DBIDUtil.newHashSet(); cy.add(iy); } if(cx == null) { cy.add(ix); } else { cy.addDBIDs(cx); clusters.remove(x); } clusters.put(y, cy); // parent of x is set to y builder.add(ix, distances[offset], iy, prots.seek(offset)); updateMatrices(size, mat, prots, builder, clusters, dq, y); }
java
protected static <O> void updateMatrices(int size, MatrixParadigm mat, DBIDArrayMIter prots, PointerHierarchyRepresentationBuilder builder, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<O> dq, int c) { final DBIDArrayIter ix = mat.ix, iy = mat.iy; // c is the new cluster. // Update entries (at (x,y) with x > y) in the matrix where x = c or y = c // Update entries at (c,y) with y < c ix.seek(c); for(iy.seek(0); iy.getOffset() < c; iy.advance()) { // Skip entry if already merged if(builder.isLinked(iy)) { continue; } updateEntry(mat, prots, clusters, dq, c, iy.getOffset()); } // Update entries at (x,c) with x > c iy.seek(c); for(ix.seek(c + 1); ix.valid(); ix.advance()) { // Skip entry if already merged if(builder.isLinked(ix)) { continue; } updateEntry(mat, prots, clusters, dq, ix.getOffset(), c); } }
java
protected static void updateEntry(MatrixParadigm mat, DBIDArrayMIter prots, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<?> dq, int x, int y) { assert (y < x); final DBIDArrayIter ix = mat.ix, iy = mat.iy; final double[] distances = mat.matrix; ModifiableDBIDs cx = clusters.get(x), cy = clusters.get(y); DBIDVar prototype = DBIDUtil.newVar(ix.seek(x)); // Default prototype double minMaxDist; // Two "real" clusters: if(cx != null && cy != null) { minMaxDist = findPrototype(dq, cx, cy, prototype, Double.POSITIVE_INFINITY); minMaxDist = findPrototype(dq, cy, cx, prototype, minMaxDist); } else if(cx != null) { // cy is singleton. minMaxDist = findPrototypeSingleton(dq, cx, iy.seek(y), prototype); } else if(cy != null) { // cx is singleton. minMaxDist = findPrototypeSingleton(dq, cy, ix.seek(x), prototype); } else { minMaxDist = dq.distance(ix.seek(x), iy.seek(y)); prototype.set(ix); } final int offset = MatrixParadigm.triangleSize(x) + y; distances[offset] = minMaxDist; prots.seek(offset).setDBID(prototype); }
java
private static double findMax(DistanceQuery<?> dq, DBIDIter i, DBIDs cy, double maxDist, double minMaxDist) { for(DBIDIter j = cy.iter(); j.valid(); j.advance()) { double dist = dq.distance(i, j); if(dist > maxDist) { // Stop early, if we already know a better candidate. if(dist >= minMaxDist) { return dist; } maxDist = dist; } } return maxDist; }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(DBIDUtil.asInteger(id)); out.writeInt(values.length); for(double v : values) { out.writeDouble(v); } }
java
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { id = DBIDUtil.importInteger(in.read()); values = new double[in.readInt()]; for(int d = 0; d < values.length; d++) { values[d] = in.readDouble(); } }
java
@Override public StringBuilder appendToBuffer(StringBuilder buf) { buf.append(getTask()); buf.append(": "); buf.append(getProcessed()); return buf; }
java
private TypeInformation getInputTypeRestriction() { // Find maximum dimension requested int m = dims[0]; for(int i = 1; i < dims.length; i++) { m = Math.max(dims[i], m); } return VectorFieldTypeInformation.typeRequest(NumberVector.class, m, Integer.MAX_VALUE); }
java
private boolean isLocalMaximum(double kdist, DBIDs neighbors, WritableDoubleDataStore kdists) { for(DBIDIter it = neighbors.iter(); it.valid(); it.advance()) { if(kdists.doubleValue(it) < kdist) { return false; } } return true; }
java
protected int expandCluster(final int clusterid, final WritableIntegerDataStore clusterids, final KNNQuery<O> knnq, final DBIDs neighbors, final double maxkdist, final FiniteProgress progress) { int clustersize = 1; // initial seed! final ArrayModifiableDBIDs activeSet = DBIDUtil.newArray(); activeSet.addDBIDs(neighbors); // run expandCluster as long as this set is non-empty (non-recursive // implementation) DBIDVar id = DBIDUtil.newVar(); while(!activeSet.isEmpty()) { activeSet.pop(id); // Assign object to cluster final int oldclus = clusterids.intValue(id); if(oldclus == NOISE) { clustersize += 1; // Non core point cluster member: clusterids.putInt(id, -clusterid); } else if(oldclus == UNPROCESSED) { clustersize += 1; // expandCluster again: // Evaluate Neighborhood predicate final KNNList newneighbors = knnq.getKNNForDBID(id, k); // Evaluate Core-Point predicate if(newneighbors.getKNNDistance() <= maxkdist) { activeSet.addDBIDs(newneighbors); } clusterids.putInt(id, clusterid); LOG.incrementProcessed(progress); } } return clustersize; }
java
private void fillDensities(KNNQuery<O> knnq, DBIDs ids, WritableDoubleDataStore dens) { FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Densities", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { final KNNList neighbors = knnq.getKNNForDBID(iter, k); dens.putDouble(iter, neighbors.getKNNDistance()); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); }
java
public Clustering<SubspaceModel> run(Relation<? extends NumberVector> relation) { final int dimensionality = RelationUtil.dimensionality(relation); StepProgress step = new StepProgress(2); // 1. Identification of subspaces that contain clusters step.beginStep(1, "Identification of subspaces that contain clusters", LOG); ArrayList<List<CLIQUESubspace>> dimensionToDenseSubspaces = new ArrayList<>(dimensionality); List<CLIQUESubspace> denseSubspaces = findOneDimensionalDenseSubspaces(relation); dimensionToDenseSubspaces.add(denseSubspaces); if(LOG.isVerbose()) { LOG.verbose("1-dimensional dense subspaces: " + denseSubspaces.size()); } if(LOG.isDebugging()) { for(CLIQUESubspace s : denseSubspaces) { LOG.debug(s.toString()); } } for(int k = 2; k <= dimensionality && !denseSubspaces.isEmpty(); k++) { denseSubspaces = findDenseSubspaces(relation, denseSubspaces); assert (dimensionToDenseSubspaces.size() == k - 1); dimensionToDenseSubspaces.add(denseSubspaces); if(LOG.isVerbose()) { LOG.verbose(k + "-dimensional dense subspaces: " + denseSubspaces.size()); } if(LOG.isDebugging()) { for(CLIQUESubspace s : denseSubspaces) { LOG.debug(s.toString()); } } } // 2. Identification of clusters step.beginStep(2, "Identification of clusters", LOG); // build result Clustering<SubspaceModel> result = new Clustering<>("CLIQUE clustering", "clique-clustering"); for(int dim = 0; dim < dimensionToDenseSubspaces.size(); dim++) { List<CLIQUESubspace> subspaces = dimensionToDenseSubspaces.get(dim); List<Pair<Subspace, ModifiableDBIDs>> modelsAndClusters = determineClusters(subspaces); if(LOG.isVerbose()) { LOG.verbose((dim + 1) + "-dimensional clusters: " + modelsAndClusters.size()); } for(Pair<Subspace, ModifiableDBIDs> modelAndCluster : modelsAndClusters) { Cluster<SubspaceModel> newCluster = new Cluster<>(modelAndCluster.second); newCluster.setModel(new SubspaceModel(modelAndCluster.first, Centroid.make(relation, modelAndCluster.second).getArrayRef())); result.addToplevelCluster(newCluster); } } return result; }
java
private List<Pair<Subspace, ModifiableDBIDs>> determineClusters(List<CLIQUESubspace> denseSubspaces) { List<Pair<Subspace, ModifiableDBIDs>> clusters = new ArrayList<>(); for(CLIQUESubspace subspace : denseSubspaces) { List<Pair<Subspace, ModifiableDBIDs>> clustersInSubspace = subspace.determineClusters(); if(LOG.isDebugging()) { LOG.debugFine("Subspace " + subspace + " clusters " + clustersInSubspace.size()); } clusters.addAll(clustersInSubspace); } return clusters; }
java
private List<CLIQUESubspace> findOneDimensionalDenseSubspaces(Relation<? extends NumberVector> database) { List<CLIQUESubspace> denseSubspaceCandidates = findOneDimensionalDenseSubspaceCandidates(database); return prune ? pruneDenseSubspaces(denseSubspaceCandidates) : denseSubspaceCandidates; }
java
private void updateMinMax(NumberVector featureVector, double[] minima, double[] maxima) { assert (minima.length == featureVector.getDimensionality()); for(int d = 0; d < featureVector.getDimensionality(); d++) { double v = featureVector.doubleValue(d); if(v == v) { // Avoid NaN. maxima[d] = MathUtil.max(v, maxima[d]); minima[d] = MathUtil.min(v, minima[d]); } } }
java
private List<CLIQUESubspace> findOneDimensionalDenseSubspaceCandidates(Relation<? extends NumberVector> database) { Collection<CLIQUEUnit> units = initOneDimensionalUnits(database); // identify dense units double total = database.size(); for(DBIDIter it = database.iterDBIDs(); it.valid(); it.advance()) { NumberVector featureVector = database.get(it); // FIXME: rather than repeatedly testing, use a clever data structure? for(CLIQUEUnit unit : units) { unit.addFeatureVector(it, featureVector); } } int dimensionality = RelationUtil.dimensionality(database); Collection<CLIQUEUnit> denseUnits = new ArrayList<>(); CLIQUESubspace[] denseSubspaces = new CLIQUESubspace[dimensionality]; for(CLIQUEUnit unit : units) { // unit is a dense unit if(unit.selectivity(total) >= tau) { denseUnits.add(unit); // add the one-dimensional dense unit to its subspace int dim = unit.getDimension(0); CLIQUESubspace subspace_d = denseSubspaces[dim]; if(subspace_d == null) { denseSubspaces[dim] = subspace_d = new CLIQUESubspace(dim); } subspace_d.addDenseUnit(unit); } } // Omit null values where no dense unit was found: List<CLIQUESubspace> subspaceCandidates = new ArrayList<>(dimensionality); for(CLIQUESubspace s : denseSubspaces) { if(s != null) { subspaceCandidates.add(s); } } Collections.sort(subspaceCandidates, CLIQUESubspace.BY_COVERAGE); if(LOG.isDebugging()) { LOG.debugFine(new StringBuilder().append(" number of 1-dim dense units: ").append(denseUnits.size()) // .append("\n number of 1-dim dense subspace candidates: ").append(subspaceCandidates.size()).toString()); } return subspaceCandidates; }
java
private List<CLIQUESubspace> pruneDenseSubspaces(List<CLIQUESubspace> denseSubspaces) { int[][] means = computeMeans(denseSubspaces); double[][] diffs = computeDiffs(denseSubspaces, means[0], means[1]); double[] codeLength = new double[denseSubspaces.size()]; double minCL = Double.MAX_VALUE; int min_i = -1; for(int i = 0; i < denseSubspaces.size(); i++) { int mi = means[0][i], mp = means[1][i]; double cl = codeLength[i] = log2OrZero(mi) + diffs[0][i] + log2OrZero(mp) + diffs[1][i]; if(cl <= minCL) { minCL = cl; min_i = i; } } return denseSubspaces.subList(0, min_i + 1); }
java
private int[][] computeMeans(List<CLIQUESubspace> denseSubspaces) { int n = denseSubspaces.size() - 1; int[] mi = new int[n + 1], mp = new int[n + 1]; double resultMI = 0, resultMP = 0; for(int i = 0; i < denseSubspaces.size(); i++) { resultMI += denseSubspaces.get(i).getCoverage(); resultMP += denseSubspaces.get(n - i).getCoverage(); mi[i] = (int) FastMath.ceil(resultMI / (i + 1)); if(i != n) { mp[n - 1 - i] = (int) FastMath.ceil(resultMP / (i + 1)); } } return new int[][] { mi, mp }; }
java
private double[][] computeDiffs(List<CLIQUESubspace> denseSubspaces, int[] mi, int[] mp) { int n = denseSubspaces.size() - 1; double[] diff_mi = new double[n + 1], diff_mp = new double[n + 1]; double resultMI = 0, resultMP = 0; for(int i = 0; i < denseSubspaces.size(); i++) { double diffMI = Math.abs(denseSubspaces.get(i).getCoverage() - mi[i]); resultMI += log2OrZero(diffMI); double diffMP = (i != n) ? Math.abs(denseSubspaces.get(n - i).getCoverage() - mp[n - 1 - i]) : 0; resultMP += log2OrZero(diffMP); diff_mi[i] = resultMI; if(i != n) { diff_mp[n - 1 - i] = resultMP; } } return new double[][] { diff_mi, diff_mp }; }
java
public void append(SimpleTypeInformation<?> meta, Object data) { this.meta.add(meta); this.contents.add(data); }
java
public boolean contains(long[] bitset) { for(int i = 0; i < bitset.length; i++) { final long b = bitset[i]; if(i >= bits.length && b != 0L) { return false; } if((b & bits[i]) != b) { return false; } } return true; }
java
public double jaccardSimilarity(BitVector v2) { return BitsUtil.intersectionSize(bits, v2.bits) / (double) BitsUtil.unionSize(bits, v2.bits); }
java
public static int writeShort(byte[] array, int offset, int v) { array[offset + 0] = (byte) (v >>> 8); array[offset + 1] = (byte) (v >>> 0); return SIZE_SHORT; }
java
public static int writeInt(byte[] array, int offset, int v) { array[offset + 0] = (byte) (v >>> 24); array[offset + 1] = (byte) (v >>> 16); array[offset + 2] = (byte) (v >>> 8); array[offset + 3] = (byte) (v >>> 0); return SIZE_INT; }
java
public static int writeLong(byte[] array, int offset, long v) { array[offset + 0] = (byte) (v >>> 56); array[offset + 1] = (byte) (v >>> 48); array[offset + 2] = (byte) (v >>> 40); array[offset + 3] = (byte) (v >>> 32); array[offset + 4] = (byte) (v >>> 24); array[offset + 5] = (byte) (v >>> 16); array[offset + 6] = (byte) (v >>> 8); array[offset + 7] = (byte) (v >>> 0); return SIZE_LONG; }
java
public static int writeFloat(byte[] array, int offset, float v) { return writeInt(array, offset, Float.floatToIntBits(v)); }
java
public static int writeDouble(byte[] array, int offset, double v) { return writeLong(array, offset, Double.doubleToLongBits(v)); }
java
public static short readShort(byte[] array, int offset) { // First make integers to resolve signed vs. unsigned issues. int b0 = array[offset + 0] & 0xFF; int b1 = array[offset + 1] & 0xFF; return (short) ((b0 << 8) + (b1 << 0)); }
java
public static int readUnsignedShort(byte[] array, int offset) { // First make integers to resolve signed vs. unsigned issues. int b0 = array[offset + 0] & 0xFF; int b1 = array[offset + 1] & 0xFF; return ((b0 << 8) + (b1 << 0)); }
java
public static int readInt(byte[] array, int offset) { // First make integers to resolve signed vs. unsigned issues. int b0 = array[offset + 0] & 0xFF; int b1 = array[offset + 1] & 0xFF; int b2 = array[offset + 2] & 0xFF; int b3 = array[offset + 3] & 0xFF; return ((b0 << 24) + (b1 << 16) + (b2 << 8) + (b3 << 0)); }
java
public static long readLong(byte[] array, int offset) { // First make integers to resolve signed vs. unsigned issues. long b0 = array[offset + 0]; long b1 = array[offset + 1] & 0xFF; long b2 = array[offset + 2] & 0xFF; long b3 = array[offset + 3] & 0xFF; long b4 = array[offset + 4] & 0xFF; int b5 = array[offset + 5] & 0xFF; int b6 = array[offset + 6] & 0xFF; int b7 = array[offset + 7] & 0xFF; return ((b0 << 56) + (b1 << 48) + (b2 << 40) + (b3 << 32) + (b4 << 24) + (b5 << 16) + (b6 << 8) + (b7 << 0)); }
java
public static void writeUnsignedVarint(ByteBuffer buffer, int val) { // Extra bytes have the high bit set while((val & 0x7F) != val) { buffer.put((byte) ((val & 0x7F) | 0x80)); val >>>= 7; } // Last byte doesn't have high bit set buffer.put((byte) (val & 0x7F)); }
java
public static void writeUnsignedVarintLong(ByteBuffer buffer, long val) { // Extra bytes have the high bit set while((val & 0x7F) != val) { buffer.put((byte) ((val & 0x7F) | 0x80)); val >>>= 7; } // Last byte doesn't have high bit set buffer.put((byte) (val & 0x7F)); }
java
public static void writeString(ByteBuffer buffer, String s) throws IOException { if(s == null) { s = ""; // Which will be written as Varint 0 = single byte 0. } ByteArrayUtil.STRING_SERIALIZER.toByteBuffer(buffer, s); }
java
public static int readUnsignedVarint(ByteBuffer buffer) throws IOException { int val = 0; int bits = 0; while(true) { final int data = buffer.get(); val |= (data & 0x7F) << bits; if((data & 0x80) == 0) { return val; } bits += 7; if(bits > 35) { throw new IOException("Variable length quantity is too long for expected integer."); } } }
java
public static void unmapByteBuffer(final MappedByteBuffer map) { if(map == null) { return; } map.force(); try { if(Runtime.class.getDeclaredMethod("version") != null) return; // At later Java, the hack below will not work anymore. } catch(NoSuchMethodException e) { // This is an ugly hack, but all that Java <8 offers to help freeing // memory allocated using such buffers. // See also: http://bugs.sun.com/view_bug.do?bug_id=4724038 AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { try { Method getCleanerMethod = map.getClass().getMethod("cleaner", new Class[0]); if(getCleanerMethod == null) { return null; } getCleanerMethod.setAccessible(true); Object cleaner = getCleanerMethod.invoke(map, new Object[0]); Method cleanMethod = cleaner.getClass().getMethod("clean"); if(cleanMethod == null) { return null; } cleanMethod.invoke(cleaner); } catch(Exception e) { LoggingUtil.exception(e); } return null; } }); } catch(SecurityException e1) { // Ignore. } }
java
private void sortAxes() { for(int d = 0; d < shared.dim; d++) { double dist = shared.camera.squaredDistanceFromCamera(shared.layout.getNode(d).getX(), shared.layout.getNode(d).getY()); axes[d].first = -dist; axes[d].second = d; } Arrays.sort(axes); for(int i = 0; i < shared.dim; i++) { dindex[axes[i].second] = i; } }
java
private IntIntPair[] sortEdges(int[] dindex) { IntIntPair[] edgesort = new IntIntPair[shared.layout.edges.size()]; int e = 0; for(Layout.Edge edge : shared.layout.edges) { int i1 = dindex[edge.dim1], i2 = dindex[edge.dim2]; edgesort[e] = new IntIntPair(Math.min(i1, i2), e); e++; } Arrays.sort(edgesort); return edgesort; }
java
@Override public void finalizeFirstPassE() { double s = 1. / wsum; for(int i = 0; i < mean.length; i++) { mean[i] *= s; } }
java
private double restore(int d, double val) { d = (mean.length == 1) ? 0 : d; return val * mean[d]; }
java
public OutlierResult run(Relation<? extends NumberVector> relation) { final DBIDs ids = relation.getDBIDs(); WritableDoubleDataStore ranks = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); DoubleMinMax minmax = new DoubleMinMax(); KernelDensityEstimator kernel = new KernelDensityEstimator(relation, eps); long[] subspace = BitsUtil.zero(kernel.dim); FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("OUTRES scores", ids.size(), LOG) : null; for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) { BitsUtil.zeroI(subspace); double score = outresScore(0, subspace, iditer, kernel, ids); ranks.putDouble(iditer, score); minmax.put(score); LOG.incrementProcessed(progress); } LOG.ensureCompleted(progress); OutlierScoreMeta meta = new InvertedOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0., 1., 1.); return new OutlierResult(meta, new MaterializedDoubleRelation("OUTRES", "outres-score", ranks, ids)); }
java
public double outresScore(final int s, long[] subspace, DBIDRef id, KernelDensityEstimator kernel, DBIDs cands) { double score = 1.0; // Initial score is 1.0 final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction(subspace); MeanVariance meanv = new MeanVariance(); ModifiableDoubleDBIDList neighcand = DBIDUtil.newDistanceDBIDList(cands.size()); ModifiableDoubleDBIDList nn = DBIDUtil.newDistanceDBIDList(cands.size()); for(int i = s; i < kernel.dim; i++) { assert !BitsUtil.get(subspace, i); BitsUtil.setI(subspace, i); df.setSelectedDimensions(subspace); final double adjustedEps = kernel.adjustedEps(kernel.dim); DoubleDBIDList neigh = initialRange(id, cands, df, adjustedEps * 2, kernel, neighcand); // Relevance test if(neigh.size() > 2) { if(relevantSubspace(subspace, neigh, kernel)) { final double density = kernel.subspaceDensity(subspace, neigh); // Compute mean and standard deviation for densities of neighbors. meanv.reset(); for(DoubleDBIDListIter neighbor = neigh.iter(); neighbor.valid(); neighbor.advance()) { subsetNeighborhoodQuery(neighcand, neighbor, df, adjustedEps, kernel, nn); meanv.put(kernel.subspaceDensity(subspace, nn)); } final double deviation = (meanv.getMean() - density) / (2. * meanv.getSampleStddev()); // High deviation: if(deviation >= 1) { score *= density / deviation; } // Recursion score *= outresScore(i + 1, subspace, id, kernel, neighcand); } } BitsUtil.clearI(subspace, i); } return score; }
java
private DoubleDBIDList initialRange(DBIDRef obj, DBIDs cands, PrimitiveDistanceFunction<? super NumberVector> df, double eps, KernelDensityEstimator kernel, ModifiableDoubleDBIDList n) { n.clear(); NumberVector o = kernel.relation.get(obj); final double twoeps = eps * 2; int matches = 0; for(DBIDIter cand = cands.iter(); cand.valid(); cand.advance()) { final double dist = df.distance(o, kernel.relation.get(cand)); if(dist <= twoeps) { n.add(dist, cand); if(dist <= eps) { ++matches; } } } n.sort(); return n.slice(0, matches); }
java
private DoubleDBIDList subsetNeighborhoodQuery(DoubleDBIDList neighc, DBIDRef dbid, PrimitiveDistanceFunction<? super NumberVector> df, double adjustedEps, KernelDensityEstimator kernel, ModifiableDoubleDBIDList n) { n.clear(); NumberVector query = kernel.relation.get(dbid); for(DoubleDBIDListIter neighbor = neighc.iter(); neighbor.valid(); neighbor.advance()) { // TODO: use triangle inequality for pruning double dist = df.distance(query, kernel.relation.get(neighbor)); if(dist <= adjustedEps) { n.add(dist, neighbor); } } return n; }
java
protected boolean relevantSubspace(long[] subspace, DoubleDBIDList neigh, KernelDensityEstimator kernel) { final double crit = K_S_CRITICAL001 / FastMath.sqrt(neigh.size() - 2); double[] data = new double[neigh.size()]; Relation<? extends NumberVector> relation = kernel.relation; for(int dim = BitsUtil.nextSetBit(subspace, 0); dim >= 0; dim = BitsUtil.nextSetBit(subspace, dim + 1)) { // TODO: can/should we save this copy? int count = 0; for(DBIDIter neighbor = neigh.iter(); neighbor.valid(); neighbor.advance()) { data[count++] = relation.get(neighbor).doubleValue(dim); } assert (count == neigh.size()); Arrays.sort(data); final double min = data[0], norm = data[data.length - 1] - min; // Kolmogorow-Smirnow-Test against uniform distribution: boolean flag = false; for(int j = 1, end = data.length - 1; j < end; j++) { if(Math.abs(j / (data.length - 2.) - (data[j] - min) / norm) > crit) { flag = true; break; } } if(!flag) { return false; } } return true; }
java
public static double of(double... data) { double sum = 0.; for(double v : data) { sum += v; } return sum / data.length; }
java
@Reference(authors = "P. M. Neely", // title = "Comparison of Several Algorithms for Computation of Means, Standard Deviations and Correlation Coefficients", // booktitle = "Communications of the ACM 9(7), 1966", // url = "https://doi.org/10.1145/365719.365958", // bibkey = "doi:10.1145/365719.365958") public static double highPrecision(double... data) { double sum = 0.; for(double v : data) { sum += v; } sum /= data.length; // Perform a second pass to increase precision // In ideal math, this would sum to 0. double err = 0; for(double v : data) { err += v - sum; } return sum + err / data.length; }
java
public void insertAll(List<E> entries) { if(!initialized && !entries.isEmpty()) { initialize(entries.get(0)); } for(E entry : entries) { insert(entry, false); } }
java
protected final List<DoubleIntPair> getSortedEntries(N node, DBID q) { List<DoubleIntPair> result = new ArrayList<>(); for(int i = 0; i < node.getNumEntries(); i++) { E entry = node.getEntry(i); double distance = distance(entry.getRoutingObjectID(), q); double radius = entry.getCoveringRadius(); double minDist = (radius > distance) ? 0.0 : distance - radius; result.add(new DoubleIntPair(minDist, i)); } Collections.sort(result); return result; }
java
public final double distance(E e1, E e2) { return distance(e1.getRoutingObjectID(), e2.getRoutingObjectID()); }
java
public static <A> double[] alphaPWM(A data, NumberArrayAdapter<?, A> adapter, final int nmom) { final int n = adapter.size(data); final double[] xmom = new double[nmom]; double weight = 1. / n; for(int i = 0; i < n; i++) { final double val = adapter.getDouble(data, i); xmom[0] += weight * val; for(int j = 1; j < nmom; j++) { weight *= (n - i - j + 1) / (n - j + 1); xmom[j] += weight * val; } } return xmom; }
java
public static <A> double[] alphaBetaPWM(A data, NumberArrayAdapter<?, A> adapter, final int nmom) { final int n = adapter.size(data); final double[] xmom = new double[nmom << 1]; double aweight = 1. / n, bweight = aweight; for(int i = 0; i < n; i++) { final double val = adapter.getDouble(data, i); xmom[0] += aweight * val; xmom[1] += bweight * val; for(int j = 1, k = 2; j < nmom; j++, k += 2) { aweight *= (n - i - j + 1) / (n - j + 1); bweight *= (i - j + 1) / (n - j + 1); xmom[k + 1] += aweight * val; xmom[k + 1] += bweight * val; } } return xmom; }
java
public static <A> double[] samLMR(A sorted, NumberArrayAdapter<?, A> adapter, int nmom) { final int n = adapter.size(sorted); final double[] sum = new double[nmom]; nmom = n < nmom ? n : nmom; // Estimate probability weighted moments (unbiased) for(int i = 0; i < n; i++) { double term = adapter.getDouble(sorted, i); // Robustness: skip bad values if(Double.isInfinite(term) || Double.isNaN(term)) { continue; } sum[0] += term; for(int j = 1, z = i; j < nmom; j++, z--) { term *= z; sum[j] += term; } } // Normalize by "n choose (j + 1)" sum[0] /= n; double z = n; for(int j = 1; j < nmom; j++) { z *= n - j; sum[j] /= z; } normalizeLMR(sum, nmom); // Handle case when lambda2 == 0, by setting tau3...tauN = 0: if(sum[1] == 0) { for(int i = 2; i < nmom; i++) { sum[i] = 0.; // tau3...tauN = 0. } return sum; } // Map lambda3...lambdaN to tau3...tauN for(int i = 2; i < nmom; i++) { sum[i] /= sum[1]; } return sum; }
java
private static void normalizeLMR(double[] sum, int nmom) { for(int k = nmom - 1; k >= 1; --k) { double p = ((k & 1) == 0) ? +1 : -1; double temp = p * sum[0]; for(int i = 0; i < k; i++) { double ai = i + 1.; p *= -(k + ai) * (k - i) / (ai * ai); temp += p * sum[i + 1]; } sum[k] = temp; } }
java
private int[] countItemSupport(final Relation<BitVector> relation, final int dim) { final int[] counts = new int[dim]; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Finding frequent 1-items", relation.size(), LOG) : null; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { SparseFeatureVector<?> bv = relation.get(iditer); // TODO: only count those which satisfy minlength? for(int it = bv.iter(); bv.iterValid(it); it = bv.iterAdvance(it)) { counts[bv.iterDim(it)]++; } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return counts; }
java
private FPTree buildFPTree(final Relation<BitVector> relation, int[] iidx, final int items) { FPTree tree = new FPTree(items); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Building FP-tree", relation.size(), LOG) : null; int[] buf = new int[items]; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { // Convert item to index representation: int l = 0; SparseFeatureVector<?> bv = relation.get(iditer); for(int it = bv.iter(); bv.iterValid(it); it = bv.iterAdvance(it)) { int i = iidx[bv.iterDim(it)]; if(i < 0) { continue; // Skip non-frequent items } buf[l++] = i; } // Skip too short entries if(l >= minlength) { Arrays.sort(buf, 0, l); // Sort ascending tree.insert(buf, 0, l, 1); } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return tree; }
java
public StringBuilder appendTo(StringBuilder buf, VectorFieldTypeInformation<BitVector> meta) { this.antecedent.appendTo(buf, meta); buf.append(" --> "); this.consequent.appendItemsTo(buf, meta); buf.append(": "); buf.append(union.getSupport()); buf.append(" : "); buf.append(this.measure); return buf; }
java
public void process(Clustering<?> result1, Clustering<?> result2) { // Get the clusters final List<? extends Cluster<?>> cs1 = result1.getAllClusters(); final List<? extends Cluster<?>> cs2 = result2.getAllClusters(); // Initialize size1 = cs1.size(); size2 = cs2.size(); contingency = new int[size1 + 2][size2 + 2]; noise1 = BitsUtil.zero(size1); noise2 = BitsUtil.zero(size2); // Fill main part of matrix { final Iterator<? extends Cluster<?>> it2 = cs2.iterator(); for(int i2 = 0; it2.hasNext(); i2++) { final Cluster<?> c2 = it2.next(); if(c2.isNoise()) { BitsUtil.setI(noise2, i2); } contingency[size1 + 1][i2] = c2.size(); contingency[size1 + 1][size2] += c2.size(); } } final Iterator<? extends Cluster<?>> it1 = cs1.iterator(); for(int i1 = 0; it1.hasNext(); i1++) { final Cluster<?> c1 = it1.next(); if(c1.isNoise()) { BitsUtil.setI(noise1, i1); } final DBIDs ids = DBIDUtil.ensureSet(c1.getIDs()); contingency[i1][size2 + 1] = c1.size(); contingency[size1][size2 + 1] += c1.size(); final Iterator<? extends Cluster<?>> it2 = cs2.iterator(); for(int i2 = 0; it2.hasNext(); i2++) { final Cluster<?> c2 = it2.next(); int count = DBIDUtil.intersectionSize(ids, c2.getIDs()); contingency[i1][i2] = count; contingency[i1][size2] += count; contingency[size1][i2] += count; contingency[size1][size2] += count; } } }
java
private long[] randomSubspace(final int alldim, final int mindim, final int maxdim, final Random rand) { long[] dimset = BitsUtil.zero(alldim); // Fill with all dimensions int[] dims = new int[alldim]; for(int d = 0; d < alldim; d++) { dims[d] = d; } // Target dimensionality: int subdim = mindim + rand.nextInt(maxdim - mindim); // Shrink the subspace to the destination size for(int d = 0; d < alldim - subdim; d++) { int s = rand.nextInt(alldim - d); BitsUtil.setI(dimset, dims[s]); dims[s] = dims[alldim - d - 1]; } return dimset; }
java