code
stringlengths
73
34.1k
label
stringclasses
1 value
public static void load(Class<?> restrictionClass) { if(MASTER_CACHE == null) { initialize(); } if(MASTER_CACHE.isEmpty()) { return; } Iterator<Class<?>> iter = MASTER_CACHE.iterator(); while(iter.hasNext()) { Class<?> clazz = iter.next(); // Skip other classes. if(!restrictionClass.isAssignableFrom(clazz)) { continue; } // skip abstract / private classes. if(Modifier.isInterface(clazz.getModifiers()) || Modifier.isAbstract(clazz.getModifiers()) || Modifier.isPrivate(clazz.getModifiers())) { continue; } boolean instantiable = false; try { instantiable = clazz.getConstructor() != null; } catch(Exception | Error e) { // ignore } try { instantiable = instantiable || ClassGenericsUtil.getParameterizer(clazz) != null; } catch(Exception | Error e) { // ignore } if(!instantiable) { continue; } ELKIServiceRegistry.register(restrictionClass, clazz); } }
java
private static int comparePackageClass(Class<?> o1, Class<?> o2) { return o1.getPackage() == o2.getPackage() ? // o1.getCanonicalName().compareTo(o2.getCanonicalName()) // : o1.getPackage() == null ? -1 : o2.getPackage() == null ? +1 // : o1.getPackage().getName().compareTo(o2.getPackage().getName()); }
java
private static int classPriority(Class<?> o1) { Priority p = o1.getAnnotation(Priority.class); if(p == null) { Class<?> pa = o1.getDeclaringClass(); p = (pa != null) ? pa.getAnnotation(Priority.class) : null; } return p != null ? p.value() : Priority.DEFAULT; }
java
public int nextIndex(int weight) { if(used == parent.length) { int nsize = used + (used >> 1); this.weight = Arrays.copyOf(this.weight, nsize); this.parent = Arrays.copyOf(this.parent, nsize); } this.weight[used] = weight; this.parent[used] = used; return used++; }
java
public int find(int cur) { assert (cur >= 0 && cur < parent.length); int p = parent[cur], tmp; while(cur != p) { tmp = p; p = parent[cur] = parent[p]; // Perform simple path compression. cur = tmp; } return cur; }
java
public int union(int first, int second) { int firstComponent = find(first), secondComponent = find(second); if(firstComponent == secondComponent) { return firstComponent; } final int w1 = weight[firstComponent], w2 = weight[secondComponent]; if(w1 > w2) { parent[secondComponent] = firstComponent; weight[firstComponent] += w2; return firstComponent; } else { parent[firstComponent] = secondComponent; weight[secondComponent] += w1; return secondComponent; } }
java
public IntList getRoots() { IntList roots = new IntArrayList(); for(int i = 0; i < used; i++) { // roots or one element in component if(parent[i] == i) { roots.add(i); } } return roots; }
java
public int growSuperNode() { if(getNumEntries() < getCapacity()) { throw new IllegalStateException("This node is not yet overflowing (only " + getNumEntries() + " of " + getCapacity() + " entries)"); } Entry[] old_nodes = super.entries.clone(); assert old_nodes[old_nodes.length - 1] != null; super.entries = (Entry[]) java.util.Arrays.copyOfRange(old_nodes, 0, getCapacity() * 2 - 1, entries.getClass()); assert super.entries.length == old_nodes.length * 2 - 1; return getCapacity(); }
java
public <T extends AbstractXTree<N>> void readSuperNode(ObjectInput in, T tree) throws IOException, ClassNotFoundException { readExternal(in); if(capacity_to_be_filled <= 0 || !isSuperNode()) { throw new IllegalStateException("This node does not appear to be a supernode"); } if(isLeaf) { throw new IllegalStateException("A supernode is cannot be a leaf"); } // TODO: verify entries = new Entry[capacity_to_be_filled]; // old way: // entries = (E[]) new XDirectoryEntry[capacity_to_be_filled]; capacity_to_be_filled = 0; for(int i = 0; i < numEntries; i++) { SpatialEntry s = new SpatialDirectoryEntry(); s.readExternal(in); entries[i] = s; } N n = tree.getSupernodes().put((long) getPageID(), (N) this); if(n != null) { Logging.getLogger(this.getClass()).fine("Warning: this supernode should only be read once. Now a node of size " + entries.length + " has replaced a node of size " + n.entries.length + " for id " + getPageID()); } }
java
public static int compare(DBIDRef id1, DBIDRef id2) { return DBIDFactory.FACTORY.compare(id1, id2); }
java
public static boolean equal(DBIDRef id1, DBIDRef id2) { return DBIDFactory.FACTORY.equal(id1, id2); }
java
public static DBID deref(DBIDRef ref) { return ref instanceof DBID ? (DBID) ref : importInteger(ref.internalGetIndex()); }
java
public static ModifiableDBIDs union(DBIDs ids1, DBIDs ids2) { ModifiableDBIDs result = DBIDUtil.newHashSet(Math.max(ids1.size(), ids2.size())); result.addDBIDs(ids1); result.addDBIDs(ids2); return result; }
java
public static ModifiableDBIDs difference(DBIDs ids1, DBIDs ids2) { ModifiableDBIDs result = DBIDUtil.newHashSet(ids1); result.removeDBIDs(ids2); return result; }
java
public static ArrayDBIDs ensureArray(DBIDs ids) { return ids instanceof ArrayDBIDs ? (ArrayDBIDs) ids : newArray(ids); }
java
public static SetDBIDs ensureSet(DBIDs ids) { return ids instanceof SetDBIDs ? (SetDBIDs) ids : newHashSet(ids); }
java
public static ModifiableDBIDs ensureModifiable(DBIDs ids) { return ids instanceof ModifiableDBIDs ? (ModifiableDBIDs) ids : // ids instanceof HashSetDBIDs ? newHashSet(ids) : newArray(ids); }
java
public static DBIDPair newPair(DBIDRef id1, DBIDRef id2) { return DBIDFactory.FACTORY.newPair(id1, id2); }
java
public static DoubleDBIDPair newPair(double val, DBIDRef id) { return DBIDFactory.FACTORY.newPair(val, id); }
java
public static void sort(int[] data, Comparator<? super DBIDRef> comp) { sort(data, 0, data.length, comp); }
java
private static int compare(IntegerDBIDVar i1, int p1, IntegerDBIDVar i2, int p2, Comparator<? super DBIDRef> comp) { i1.internalSetIndex(p1); i2.internalSetIndex(p2); return comp.compare(i1, i2); }
java
@Override protected int computeHeight() { N node = getRoot(); int tHeight = 1; // compute height while(!node.isLeaf() && node.getNumEntries() != 0) { SpatialEntry entry = node.getEntry(0); node = getNode(entry); tHeight++; } return tHeight; }
java
public long commit() throws IOException { final PageFile<N> file = super.getFile(); if(!(file instanceof PersistentPageFile)) { throw new IllegalStateException("Trying to commit a non-persistent XTree"); } long npid = file.getNextPageID(); XTreeHeader ph = (XTreeHeader) ((PersistentPageFile<?>) file).getHeader(); long offset = (ph.getReservedPages() + npid) * ph.getPageSize(); ph.setSupernode_offset(npid * ph.getPageSize()); ph.setNumberOfElements(num_elements); RandomAccessFile ra_file = ((PersistentPageFile<?>) file).getFile(); ph.writeHeader(ra_file); ra_file.seek(offset); long nBytes = 0; for(Iterator<N> iterator = supernodes.values().iterator(); iterator.hasNext();) { N supernode = iterator.next(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); supernode.writeSuperNode(oos); oos.close(); baos.close(); byte[] array = baos.toByteArray(); byte[] sn_array = new byte[getPageSize() * (int) Math.ceil((double) supernode.getCapacity() / dirCapacity)]; if(array.length > sn_array.length) { throw new IllegalStateException("Supernode is too large for fitting in " + ((int) Math.ceil((double) supernode.getCapacity() / dirCapacity)) + " pages of total size " + sn_array.length); } System.arraycopy(array, 0, sn_array, 0, array.length); // file.countWrite(); ra_file.write(sn_array); nBytes += sn_array.length; } return nBytes; }
java
public void setExpanded(SpatialEntry entry1, SpatialEntry entry2) { IntSet exp1 = expanded.get(getPageID(entry1)); if(exp1 == null) { exp1 = new IntOpenHashSet(); expanded.put(getPageID(entry1), exp1); } exp1.add(getPageID(entry2)); }
java
public IntSet getExpanded(SpatialEntry entry) { IntSet exp = expanded.get(getPageID(entry)); return (exp != null) ? exp : IntSets.EMPTY_SET; }
java
public void increment(double coord, double val) { int bin = getBinNr(coord); if (bin < 0) { if (size - bin > data.length) { // Reallocate. TODO: use an arraylist-like grow strategy! double[] tmpdata = new double[growSize(data.length, size - bin)]; System.arraycopy(data, 0, tmpdata, -bin, size); data = tmpdata; } else { // Shift in place and clear head System.arraycopy(data, 0, data, -bin, size); Arrays.fill(data, 0, -bin, (double) 0); } data[0] = val; // Note that bin is negative, -bin is the shift offset! assert (data.length >= size - bin); offset -= bin; size -= bin; // TODO: modCounter++; and have iterators fast-fail } else if (bin >= data.length) { double[] tmpdata = new double[growSize(data.length, bin + 1)]; System.arraycopy(data, 0, tmpdata, 0, size); tmpdata[bin] = val; data = tmpdata; size = bin + 1; // TODO: modCounter++; and have iterators fast-fail // Unset max value when resizing max = Double.MAX_VALUE; } else { if (bin >= size) { // TODO: reset bins to 0 first? size = bin + 1; } data[bin] += val; } }
java
public double get(double coord) { int bin = getBinNr(coord); return (bin < 0 || bin >= size) ? 0 : data[bin]; }
java
public Assignment update(Border border) { Arrays.sort(cs); int j = 1; boolean found = (cs[0].core == border.core); for(int i = 1; i < cs.length; i++) { if(cs[i].core != cs[i - 1].core) { cs[j++] = cs[i]; } found |= (cs[i].core == border.core); } if(found) { if(j == 1) { Border r = cs[0]; cs = null; // Prevent further use return r; } if(j < cs.length) { cs = Arrays.copyOf(cs, j); } return this; } if(j + 1 != cs.length) { cs = Arrays.copyOf(cs, j + 1); } cs[j] = border; return this; }
java
public Core getCore() { Core a = cs[0].core; for(int i = 1; i < cs.length; i++) { Core v = cs[i].core; a = a.num > v.num ? a : v; // max, of negative values } return a; }
java
protected int currentCluster(List<? extends ModifiableDBIDs> clusters, DBIDRef id) { for(int i = 0; i < k; i++) { if(clusters.get(i).contains(id)) { return i; } } return -1; }
java
protected void computeINFLO(Relation<O> relation, ModifiableDBIDs pruned, KNNQuery<O> knnq, WritableDataStore<ModifiableDBIDs> rNNminuskNNs, WritableDoubleDataStore inflos, DoubleMinMax inflominmax) { FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing INFLOs", relation.size(), LOG) : null; HashSetModifiableDBIDs set = DBIDUtil.newHashSet(); for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { if(pruned.contains(iter)) { inflos.putDouble(iter, 1.); inflominmax.put(1.); LOG.incrementProcessed(prog); continue; } final KNNList knn = knnq.getKNNForDBID(iter, kplus1); if(knn.getKNNDistance() == 0.) { inflos.putDouble(iter, 1.); inflominmax.put(1.); LOG.incrementProcessed(prog); continue; } set.clear(); set.addDBIDs(knn); set.addDBIDs(rNNminuskNNs.get(iter)); // Compute mean density of NN \cup RNN double sum = 0.; int c = 0; for(DBIDIter niter = set.iter(); niter.valid(); niter.advance()) { if(DBIDUtil.equal(iter, niter)) { continue; } final double kdist = knnq.getKNNForDBID(niter, kplus1).getKNNDistance(); if(kdist <= 0) { sum = Double.POSITIVE_INFINITY; c++; break; } sum += 1. / kdist; c++; } sum *= knn.getKNNDistance(); final double inflo = sum == 0 ? 1. : sum / c; inflos.putDouble(iter, inflo); inflominmax.put(inflo); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); }
java
public OutlierResult run(Database database, Relation<O> relation) { // Get the query functions: DistanceQuery<O> dq = database.getDistanceQuery(relation, getDistanceFunction()); KNNQuery<O> knnq = database.getKNNQuery(dq, k); // Get the objects to process, and a data storage for counting and output: DBIDs ids = relation.getDBIDs(); WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_DB, 0.); // Process all objects for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { // Find the nearest neighbors (using an index, if available!) KNNList neighbors = knnq.getKNNForDBID(iter, k); // For each neighbor, except ourselves, increase the in-degree: for(DBIDIter nei = neighbors.iter(); nei.valid(); nei.advance()) { if(DBIDUtil.equal(iter, nei)) { continue; } scores.put(nei, scores.doubleValue(nei) + 1); } } // Compute maximum double min = Double.POSITIVE_INFINITY, max = 0.0; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { min = Math.min(min, scores.doubleValue(iter)); max = Math.max(max, scores.doubleValue(iter)); } // Wrap the result and add metadata. // By actually specifying theoretical min, max and baseline, we get a better // visualization (try it out - or see the screenshots in the tutorial)! OutlierScoreMeta meta = new InvertedOutlierScoreMeta(min, max, 0., ids.size() - 1, k); DoubleRelation rel = new MaterializedDoubleRelation("ODIN In-Degree", "odin", scores, ids); return new OutlierResult(meta, rel); }
java
public static Icon getStockIcon(String name) { SoftReference<Icon> ref = iconcache.get(name); if(ref != null) { Icon icon = ref.get(); if(icon != null) { return icon; } } java.net.URL imgURL = StockIcon.class.getResource(name + ".png"); if(imgURL != null) { Icon icon = new ImageIcon(imgURL); iconcache.put(name, new SoftReference<>(icon)); return icon; } LoggingUtil.warning("Could not find stock icon: " + name); return null; }
java
@Override public void initializeFromFile(TreeIndexHeader header, PageFile<FlatRStarTreeNode> file) { super.initializeFromFile(header, file); // reconstruct root int nextPageID = file.getNextPageID(); dirCapacity = nextPageID; root = createNewDirectoryNode(); for(int i = 1; i < nextPageID; i++) { FlatRStarTreeNode node = getNode(i); root.addDirectoryEntry(createNewDirectoryEntry(node)); } if(LOG.isDebugging()) { LOG.debugFine("root: " + root + " with " + nextPageID + " leafNodes."); } }
java
protected Node bulkConstruct(DBIDRef cur, int maxScale, double parentDist, ModifiableDoubleDBIDList elems) { assert (!elems.contains(cur)); final double max = maxDistance(elems); final int scale = Math.min(distToScale(max) - 1, maxScale); final int nextScale = scale - 1; // Leaf node, because points coincide, we are too deep, or have too few // elements remaining: if(max <= 0 || scale <= scaleBottom || elems.size() < truncate) { return new Node(cur, max, parentDist, elems); } // Find neighbors in the cover of the current object: ModifiableDoubleDBIDList candidates = DBIDUtil.newDistanceDBIDList(); excludeNotCovered(elems, scaleToDist(scale), candidates); // If no elements were not in the cover, build a compact tree: if(candidates.size() == 0) { LOG.warning("Scale not chosen appropriately? " + max + " " + scaleToDist(scale)); return bulkConstruct(cur, nextScale, parentDist, elems); } // We will have at least one other child, so build the parent: Node node = new Node(cur, max, parentDist); // Routing element now is a singleton: final boolean curSingleton = elems.size() == 0; if(!curSingleton) { // Add node for the routing object: node.children.add(bulkConstruct(cur, nextScale, 0, elems)); } final double fmax = scaleToDist(nextScale); // Build additional cover nodes: for(DoubleDBIDListIter it = candidates.iter(); it.valid();) { assert (it.getOffset() == 0); DBID t = DBIDUtil.deref(it); elems.clear(); // Recycle. collectByCover(it, candidates, fmax, elems); assert (DBIDUtil.equal(t, it)) : "First element in candidates must not change!"; if(elems.size() == 0) { // Singleton node.singletons.add(it.doubleValue(), it); } else { // Build a full child node: node.children.add(bulkConstruct(it, nextScale, it.doubleValue(), elems)); } candidates.removeSwap(0); } assert (candidates.size() == 0); // Routing object is not yet handled: if(curSingleton) { if(node.isLeaf()) { node.children = null; // First in leaf is enough. } else { node.singletons.add(parentDist, cur); // Add as regular singleton. } } // TODO: improve recycling of lists? return node; }
java
@Override public double getWeight(double distance, double max, double stddev) { if(max <= 0) { return 1.0; } double relativedistance = distance / max; return 1.0 - 0.9 * relativedistance * relativedistance; }
java
public double maxDist(SpatialComparable mbr1, SpatialComparable mbr2) { final int dim1 = mbr1.getDimensionality(), dim2 = mbr2.getDimensionality(); final int mindim = dim1 < dim2 ? dim1 : dim2; double agg = 0.; for(int d = 0; d < mindim; d++) { double d1 = mbr1.getMax(d) - mbr2.getMin(d); double d2 = mbr2.getMax(d) - mbr1.getMin(d); double delta = d1 > d2 ? d1 : d2; agg += delta * delta; } for(int d = mindim; d < dim1; d++) { double d1 = Math.abs(mbr1.getMin(d)), d2 = Math.abs(mbr1.getMax(d)); double delta = d1 > d2 ? d1 : d2; agg += delta * delta; } for(int d = mindim; d < dim2; d++) { double d1 = Math.abs(mbr2.getMin(d)), d2 = Math.abs(mbr2.getMax(d)); double delta = d1 > d2 ? d1 : d2; agg += delta * delta; } return FastMath.sqrt(agg); }
java
public synchronized void updateFromTrackParameters(TrackParameters track) { parameters.clear(); for(TrackedParameter p : track.getAllParameters()) { Parameter<?> option = p.getParameter(); String value = null; if(option.isDefined()) { if(option.tookDefaultValue()) { value = DynamicParameters.STRING_USE_DEFAULT + option.getDefaultValueAsString(); } else { value = option.getValueAsString(); } } if(value == null) { value = (option instanceof Flag) ? Flag.NOT_SET : ""; } int bits = 0; if(option.isOptional()) { bits |= BIT_OPTIONAL; } if(option.hasDefaultValue() && option.tookDefaultValue()) { bits |= BIT_DEFAULT_VALUE; } if(value.length() <= 0) { if((bits & BIT_DEFAULT_VALUE) == 0 && (bits & BIT_OPTIONAL) == 0) { bits |= BIT_INCOMPLETE; } } else { try { if(!option.tookDefaultValue() && !option.isValid(value)) { bits |= BIT_INVALID; } } catch(ParameterException e) { bits |= BIT_INVALID; } } int depth = 0; { Object pos = track.getParent(option); while(pos != null) { pos = track.getParent(pos); depth += 1; if(depth > 10) { break; } } } parameters.add(new Node(option, value, bits, depth)); } }
java
public synchronized void addParameter(Parameter<?> option, String value, int bits, int depth) { parameters.add(new Node(option, value, bits, depth)); }
java
public static List<Clustering<? extends Model>> getClusteringResults(Result r) { if(r instanceof Clustering<?>) { List<Clustering<?>> crs = new ArrayList<>(1); crs.add((Clustering<?>) r); return crs; } if(r instanceof HierarchicalResult) { return ResultUtil.filterResults(((HierarchicalResult) r).getHierarchy(), r, Clustering.class); } return Collections.emptyList(); }
java
private static double[] randomLatitudeLongitude(Random r) { // Make marginally more realistic looking data by non-uniformly sampling // latitude, since Earth is a sphere, and there is not much at the poles double lat = Math.pow(1. - r.nextDouble() * 2., 2) / 2. * 180; double lng = (.5 - r.nextDouble()) * 360.; return new double[] { lat, lng }; }
java
@Override public double distance(NumberVector v1, NumberVector v2) { return 1 - Math.abs(PearsonCorrelation.coefficient(v1, v2)); }
java
private long inverse(double current) { // Represent to base b. short[] digits = new short[maxi]; for(int j = 0; j < maxi; j++) { current *= base; digits[j] = (short) current; current -= digits[j]; if(current <= 1e-10) { break; } } long inv = 0; for(int j = maxi - 1; j >= 0; j--) { inv = inv * base + digits[j]; } return inv; }
java
private double radicalInverse(long i) { double digit = 1.0 / (double) base; double radical = digit; double inverse = 0.0; while(i > 0) { inverse += digit * (double) (i % base); digit *= radical; i /= base; } return inverse; }
java
private double nextRadicalInverse() { counter++; // Do at most MAXFAST appromate steps if(counter >= MAXFAST) { counter = 0; inverse += MAXFAST; current = radicalInverse(inverse); return current; } // Fast approximation: double nextInverse = current + invbase; if(nextInverse < ALMOST_ONE) { current = nextInverse; return current; } else { double digit1 = invbase, digit2 = invbase * invbase; while(current + digit2 >= ALMOST_ONE) { digit1 = digit2; digit2 *= invbase; } current += (digit1 - 1.0) + digit2; return current; } }
java
public String dimensonsToString(String sep) { StringBuilder result = new StringBuilder(100).append('['); for(int dim = BitsUtil.nextSetBit(dimensions, 0); dim >= 0; dim = BitsUtil.nextSetBit(dimensions, dim + 1)) { result.append(dim + 1).append(sep); } if(result.length() > sep.length()) { // Un-append last separator result.setLength(result.length() - sep.length()); } return result.append(']').toString(); }
java
public boolean isSubspace(Subspace subspace) { return this.dimensionality <= subspace.dimensionality && // BitsUtil.intersectionSize(dimensions, subspace.dimensions) == dimensionality; }
java
protected <E extends SpatialComparable, A> double computeOverlap(A entries, ArrayAdapter<E, A> getter, long[] assign) { ModifiableHyperBoundingBox mbr1 = null, mbr2 = null; for(int i = 0; i < getter.size(entries); i++) { E e = getter.get(entries, i); if(BitsUtil.get(assign, i)) { if(mbr1 == null) { mbr1 = new ModifiableHyperBoundingBox(e); } else { mbr1.extend(e); } } else { if(mbr2 == null) { mbr2 = new ModifiableHyperBoundingBox(e); } else { mbr2.extend(e); } } } if(mbr1 == null || mbr2 == null) { throw new AbortException("Invalid state in split: one of the sets is empty."); } return SpatialUtil.overlap(mbr1, mbr2); }
java
private void binarySplitSort(List<? extends SpatialComparable> objs, final int start, final int end, int depth, final int numdim, int[] dims, Sorter comp) { final int mid = start + ((end - start) >>> 1); // Make invariant comp.setDimension(dims != null ? dims[depth] : depth); QuickSelect.quickSelect(objs, comp, start, end, mid); // Recurse final int nextdim = (depth + 1) % numdim; if(start < mid - 1) { binarySplitSort(objs, start, mid, nextdim, numdim, dims, comp); } if(mid + 2 < end) { binarySplitSort(objs, mid + 1, end, nextdim, numdim, dims, comp); } }
java
public static Element svgElement(Document document, String name) { return document.createElementNS(SVGConstants.SVG_NAMESPACE_URI, name); }
java
public static void setStyle(Element el, String d) { el.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, d); }
java
public static void addCSSClass(Element e, String cssclass) { String oldval = e.getAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE); if(oldval == null || oldval.length() == 0) { setAtt(e, SVGConstants.SVG_CLASS_ATTRIBUTE, cssclass); return; } String[] classes = oldval.split(" "); for(String c : classes) { if(c.equals(cssclass)) { return; } } setAtt(e, SVGConstants.SVG_CLASS_ATTRIBUTE, oldval + " " + cssclass); }
java
public static void removeCSSClass(Element e, String cssclass) { String oldval = e.getAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE); if(oldval == null) { return; } String[] classes = oldval.split(" "); if(classes.length == 1) { if(cssclass.equals(classes[0])) { e.removeAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE); } } else if(classes.length == 2) { if(cssclass.equals(classes[0])) { if(cssclass.equals(classes[1])) { e.removeAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE); } else { e.setAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE, classes[1]); } } else if(cssclass.equals(classes[1])) { e.setAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE, classes[0]); } } else { StringBuilder joined = new StringBuilder(); for(String c : classes) { if(!c.equals(cssclass)) { if(joined.length() > 0) { joined.append(' '); } joined.append(c); } } e.setAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE, joined.toString()); } }
java
public static Element makeStyleElement(Document document) { Element style = SVGUtil.svgElement(document, SVGConstants.SVG_STYLE_TAG); style.setAttribute(SVGConstants.SVG_TYPE_ATTRIBUTE, SVGConstants.CSS_MIME_TYPE); return style; }
java
public static Element svgRect(Document document, double x, double y, double w, double h) { Element rect = SVGUtil.svgElement(document, SVGConstants.SVG_RECT_TAG); SVGUtil.setAtt(rect, SVGConstants.SVG_X_ATTRIBUTE, x); SVGUtil.setAtt(rect, SVGConstants.SVG_Y_ATTRIBUTE, y); SVGUtil.setAtt(rect, SVGConstants.SVG_WIDTH_ATTRIBUTE, w); SVGUtil.setAtt(rect, SVGConstants.SVG_HEIGHT_ATTRIBUTE, h); return rect; }
java
public static Element svgCircle(Document document, double cx, double cy, double r) { Element circ = SVGUtil.svgElement(document, SVGConstants.SVG_CIRCLE_TAG); SVGUtil.setAtt(circ, SVGConstants.SVG_CX_ATTRIBUTE, cx); SVGUtil.setAtt(circ, SVGConstants.SVG_CY_ATTRIBUTE, cy); SVGUtil.setAtt(circ, SVGConstants.SVG_R_ATTRIBUTE, r); return circ; }
java
public static Element svgLine(Document document, double x1, double y1, double x2, double y2) { Element line = SVGUtil.svgElement(document, SVGConstants.SVG_LINE_TAG); SVGUtil.setAtt(line, SVGConstants.SVG_X1_ATTRIBUTE, x1); SVGUtil.setAtt(line, SVGConstants.SVG_Y1_ATTRIBUTE, y1); SVGUtil.setAtt(line, SVGConstants.SVG_X2_ATTRIBUTE, x2); SVGUtil.setAtt(line, SVGConstants.SVG_Y2_ATTRIBUTE, y2); return line; }
java
public static Color stringToColor(String str) { int icol = SVG_COLOR_NAMES.getInt(str.toLowerCase()); if(icol != NO_VALUE) { return new Color(icol, false); } return colorLookupStylesheet.stringToColor(str); }
java
public static String colorToString(int col) { final char[] buf = new char[] { '#', 'X', 'X', 'X', 'X', 'X', 'X' }; for(int i = 6; i > 0; i--) { final int v = (col & 0xF); buf[i] = (char) ((v < 10) ? ('0' + v) : ('a' + v - 10)); col >>>= 4; } return new String(buf); }
java
public static SVGPoint elementCoordinatesFromEvent(Document doc, Element tag, Event evt) { try { DOMMouseEvent gnme = (DOMMouseEvent) evt; SVGMatrix mat = ((SVGLocatable) tag).getScreenCTM(); SVGMatrix imat = mat.inverse(); SVGPoint cPt = ((SVGDocument) doc).getRootElement().createSVGPoint(); cPt.setX(gnme.getClientX()); cPt.setY(gnme.getClientY()); return cPt.matrixTransform(imat); } catch(Exception e) { LoggingUtil.warning("Error getting coordinates from SVG event.", e); return null; } }
java
public static void removeLastChild(Element tag) { final Node last = tag.getLastChild(); if(last != null) { tag.removeChild(last); } }
java
public static void removeFromParent(Element elem) { if(elem != null && elem.getParentNode() != null) { elem.getParentNode().removeChild(elem); } }
java
public static Element svgCircleSegment(SVGPlot svgp, double centerx, double centery, double angleStart, double angleDelta, double innerRadius, double outerRadius) { final DoubleWrapper tmp = new DoubleWrapper(); // To return cosine double sin1st = FastMath.sinAndCos(angleStart, tmp); double cos1st = tmp.value; double sin2nd = FastMath.sinAndCos(angleStart + angleDelta, tmp); double cos2nd = tmp.value; // Note: tmp is modified! double inner1stx = centerx + (innerRadius * sin1st); double inner1sty = centery - (innerRadius * cos1st); double outer1stx = centerx + (outerRadius * sin1st); double outer1sty = centery - (outerRadius * cos1st); double inner2ndx = centerx + (innerRadius * sin2nd); double inner2ndy = centery - (innerRadius * cos2nd); double outer2ndx = centerx + (outerRadius * sin2nd); double outer2ndy = centery - (outerRadius * cos2nd); double largeArc = angleDelta >= Math.PI ? 1 : 0; SVGPath path = new SVGPath(inner1stx, inner1sty).lineTo(outer1stx, outer1sty) // .ellipticalArc(outerRadius, outerRadius, 0, largeArc, 1, outer2ndx, outer2ndy) // .lineTo(inner2ndx, inner2ndy); if(innerRadius > 0) { path.ellipticalArc(innerRadius, innerRadius, 0, largeArc, 0, inner1stx, inner1sty); } return path.makeElement(svgp); }
java
protected WritableDoubleDataStore computeCoreDists(DBIDs ids, KNNQuery<O> knnQ, int minPts) { final Logging LOG = getLogger(); final WritableDoubleDataStore coredists = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); FiniteProgress cprog = LOG.isVerbose() ? new FiniteProgress("Computing core sizes", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { coredists.put(iter, knnQ.getKNNForDBID(iter, minPts).getKNNDistance()); LOG.incrementProcessed(cprog); } LOG.ensureCompleted(cprog); return coredists; }
java
protected void convertToPointerRepresentation(ArrayDBIDs ids, DoubleLongHeap heap, WritableDBIDDataStore pi, WritableDoubleDataStore lambda) { final Logging LOG = getLogger(); // Initialize parent array: for(DBIDArrayIter iter = ids.iter(); iter.valid(); iter.advance()) { pi.put(iter, iter); // Initialize } DBIDVar p = DBIDUtil.newVar(), q = DBIDUtil.newVar(), n = DBIDUtil.newVar(); FiniteProgress pprog = LOG.isVerbose() ? new FiniteProgress("Converting MST to pointer representation", heap.size(), LOG) : null; while(!heap.isEmpty()) { final double dist = heap.peekKey(); final long pair = heap.peekValue(); final int i = (int) (pair >>> 31), j = (int) (pair & 0x7FFFFFFFL); ids.assignVar(i, p); // Follow p to its parent. while(!DBIDUtil.equal(p, pi.assignVar(p, n))) { p.set(n); } // Follow q to its parent. ids.assignVar(j, q); while(!DBIDUtil.equal(q, pi.assignVar(q, n))) { q.set(n); } // By definition of the pointer representation, the largest element in // each cluster is the cluster lead. // The extraction methods currently rely on this! int c = DBIDUtil.compare(p, q); if(c < 0) { // p joins q: pi.put(p, q); lambda.put(p, dist); } else { assert (c != 0) : "This should never happen!"; // q joins p: pi.put(q, p); lambda.put(q, dist); } heap.poll(); LOG.incrementProcessed(pprog); } LOG.ensureCompleted(pprog); // Hack to ensure a valid pointer representation: // If distances are tied, the heap may return edges such that the n-way join // does not fulfill the property that the last element has the largest id. for(DBIDArrayIter iter = ids.iter(); iter.valid(); iter.advance()) { double d = lambda.doubleValue(iter); // Parent: pi.assignVar(iter, p); q.set(p); // Follow parent while tied. while(d >= lambda.doubleValue(q) && !DBIDUtil.equal(q, pi.assignVar(q, n))) { q.set(n); } if(!DBIDUtil.equal(p, q)) { if(LOG.isDebuggingFinest()) { LOG.finest("Correcting parent: " + p + " -> " + q); } pi.put(iter, q); } } }
java
private void updateHeap(final double distance, final int iid) { final double prevdist = kdist; final int previd = heap.peekValue(); heap.replaceTopElement(distance, iid); kdist = heap.peekKey(); // If the kdist improved, zap ties. if(kdist < prevdist) { numties = 0; } else { addToTies(previd); } }
java
private void addToTies(int id) { if(ties.length == numties) { ties = Arrays.copyOf(ties, (ties.length << 1) + 1); // grow. } ties[numties] = id; ++numties; }
java
public static int numberOfFreeParameters(Relation<? extends NumberVector> relation, Clustering<? extends MeanModel> clustering) { // number of clusters int m = clustering.getAllClusters().size(); // num_ctrs // dimensionality of data points int dim = RelationUtil.dimensionality(relation); // num_dims // number of free parameters return (m - 1) + m * dim + m; }
java
protected void dumpClusteringOutput(PrintStream writer, ResultHierarchy hierarchy, Clustering<?> c) { DBIDRange ids = null; for(It<Relation<?>> iter = hierarchy.iterParents(c).filter(Relation.class); iter.valid(); iter.advance()) { DBIDs pids = iter.get().getDBIDs(); if(pids instanceof DBIDRange) { ids = (DBIDRange) pids; break; } LOG.warning("Parent result " + iter.get().getLongName() + " has DBID type " + pids.getClass()); } // Fallback: try to locate a database. if(ids == null) { for(It<Database> iter = hierarchy.iterAll().filter(Database.class); iter.valid(); iter.advance()) { DBIDs pids = iter.get().getRelation(TypeUtil.ANY).getDBIDs(); if(pids instanceof DBIDRange) { ids = (DBIDRange) pids; break; } LOG.warning("Parent result " + iter.get().getLongName() + " has DBID type " + pids.getClass()); } } if(ids == null) { LOG.warning("Cannot dump cluster assignment, as I do not have a well-defined DBIDRange to use for a unique column assignment. DBIDs must be a continuous range."); return; } WritableIntegerDataStore map = DataStoreUtil.makeIntegerStorage(ids, DataStoreFactory.HINT_TEMP); int cnum = 0; for(Cluster<?> clu : c.getAllClusters()) { for(DBIDIter iter = clu.getIDs().iter(); iter.valid(); iter.advance()) { map.putInt(iter, cnum); } ++cnum; } for(DBIDArrayIter iter = ids.iter(); iter.valid(); iter.advance()) { if(iter.getOffset() > 0) { writer.append(' '); } writer.append(Integer.toString(map.intValue(iter))); } if(forceLabel != null) { if(forceLabel.length() > 0) { writer.append(' ').append(forceLabel); } } else { writer.append(' ').append(c.getLongName()); } writer.append('\n'); }
java
public <F extends NumberVector> F getMeanVector(Relation<? extends F> relation) { return RelationUtil.getNumberVectorFactory(relation).newNumberVector(mean); }
java
public void reset() { Arrays.fill(mean, 0.); Arrays.fill(nmea, 0.); if(elements != null) { for(int i = 0; i < elements.length; i++) { Arrays.fill(elements[i], 0.); } } else { elements = new double[mean.length][mean.length]; } wsum = 0.; }
java
public static CovarianceMatrix make(Relation<? extends NumberVector> relation) { int dim = RelationUtil.dimensionality(relation); CovarianceMatrix c = new CovarianceMatrix(dim); double[] mean = c.mean; int count = 0; // Compute mean first: for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { NumberVector vec = relation.get(iditer); for(int i = 0; i < dim; i++) { mean[i] += vec.doubleValue(i); } count++; } if(count == 0) { return c; } // Normalize mean for(int i = 0; i < dim; i++) { mean[i] /= count; } // Compute covariances second // Two-pass approach is numerically okay and fast, when possible. double[] tmp = c.nmea; // Scratch space double[][] elems = c.elements; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { NumberVector vec = relation.get(iditer); for(int i = 0; i < dim; i++) { tmp[i] = vec.doubleValue(i) - mean[i]; } for(int i = 0; i < dim; i++) { for(int j = i; j < dim; j++) { elems[i][j] += tmp[i] * tmp[j]; } } } // Restore symmetry. for(int i = 0; i < dim; i++) { for(int j = i + 1; j < dim; j++) { elems[j][i] = elems[i][j]; } } c.wsum = count; return c; }
java
@Override public StringBuilder appendToBuffer(StringBuilder buf) { String processedString = Integer.toString(getProcessed()); int percentage = (int) (getProcessed() * 100.0 / total); buf.append(getTask()); buf.append(": "); for(int i = 0; i < totalLength - processedString.length(); i++) { buf.append(' '); } buf.append(getProcessed()); buf.append(" ["); if(percentage < 100) { buf.append(' '); } if(percentage < 10) { buf.append(' '); } buf.append(percentage); buf.append("%]"); if(ratems > 0. && getProcessed() < total) { buf.append(' '); int secs = (int) Math.round((total - getProcessed()) / ratems / 1000. + .2); if(secs > 300) { buf.append(secs / 60); buf.append(" min remaining"); } else { buf.append(secs); buf.append(" sec remaining"); } } return buf; }
java
public void ensureCompleted(Logging logger) { if(!isComplete()) { logger.warning("Progress had not completed automatically as expected: " + getProcessed() + "/" + total, new Throwable()); setProcessed(getTotal()); logger.progress(this); } }
java
private void clusterData(DBIDs ids, RangeQuery<O> rnnQuery, WritableDoubleDataStore radii, WritableDataStore<ModifiableDBIDs> labels) { FiniteProgress clustProg = LOG.isVerbose() ? new FiniteProgress("Density-Based Clustering", ids.size(), LOG) : null; // Iterate over all objects for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { if(labels.get(iter) != null) { continue; } ModifiableDBIDs newCluster = DBIDUtil.newArray(); newCluster.add(iter); labels.put(iter, newCluster); LOG.incrementProcessed(clustProg); // container of the points to be added and their radii neighbors to the // cluster ModifiableDBIDs nChain = DBIDUtil.newArray(); nChain.add(iter); // iterate over nChain for(DBIDIter toGetNeighbors = nChain.iter(); toGetNeighbors.valid(); toGetNeighbors.advance()) { double range = radii.doubleValue(toGetNeighbors); DoubleDBIDList nNeighbors = rnnQuery.getRangeForDBID(toGetNeighbors, range); for(DoubleDBIDListIter iter2 = nNeighbors.iter(); iter2.valid(); iter2.advance()) { if(DBIDUtil.equal(toGetNeighbors, iter2)) { continue; } if(labels.get(iter2) == null) { newCluster.add(iter2); labels.put(iter2, newCluster); nChain.add(iter2); LOG.incrementProcessed(clustProg); } else if(labels.get(iter2) != newCluster) { ModifiableDBIDs toBeDeleted = labels.get(iter2); newCluster.addDBIDs(toBeDeleted); for(DBIDIter iter3 = toBeDeleted.iter(); iter3.valid(); iter3.advance()) { labels.put(iter3, newCluster); } toBeDeleted.clear(); } } } } LOG.ensureCompleted(clustProg); }
java
private int updateSizes(DBIDs ids, WritableDataStore<ModifiableDBIDs> labels, WritableIntegerDataStore newSizes) { // to count the unclustered all over int countUnmerged = 0; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { // checking the point's new cluster size after the clustering step int newClusterSize = labels.get(iter).size(); newSizes.putInt(iter, newClusterSize); // the point is alone in the cluster --> not merged with other points if(newClusterSize == 1) { countUnmerged++; } } return countUnmerged; }
java
public PointerHierarchyRepresentationResult run(Database database, Relation<O> relation) { DBIDs ids = relation.getDBIDs(); WritableDBIDDataStore pi = DataStoreUtil.makeDBIDStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC); WritableDoubleDataStore lambda = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC, Double.POSITIVE_INFINITY); // Temporary storage for m. WritableDoubleDataStore m = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); final Logging log = getLogger(); // To allow CLINK logger override FiniteProgress progress = log.isVerbose() ? new FiniteProgress("Running SLINK", ids.size(), log) : null; ArrayDBIDs aids = DBIDUtil.ensureArray(ids); // First element is trivial/special: DBIDArrayIter id = aids.iter(), it = aids.iter(); // Step 1: initialize for(; id.valid(); id.advance()) { // P(n+1) = n+1: pi.put(id, id); // L(n+1) = infinity already. } // First element is finished already (start at seek(1) below!) log.incrementProcessed(progress); // Optimized branch if(getDistanceFunction() instanceof PrimitiveDistanceFunction) { PrimitiveDistanceFunction<? super O> distf = (PrimitiveDistanceFunction<? super O>) getDistanceFunction(); for(id.seek(1); id.valid(); id.advance()) { step2primitive(id, it, id.getOffset(), relation, distf, m); process(id, aids, it, id.getOffset(), pi, lambda, m); // SLINK or CLINK log.incrementProcessed(progress); } } else { // Fallback branch DistanceQuery<O> distQ = database.getDistanceQuery(relation, getDistanceFunction()); for(id.seek(1); id.valid(); id.advance()) { step2(id, it, id.getOffset(), distQ, m); process(id, aids, it, id.getOffset(), pi, lambda, m); // SLINK or CLINK log.incrementProcessed(progress); } } log.ensureCompleted(progress); // We don't need m anymore. m.destroy(); m = null; return new PointerHierarchyRepresentationResult(ids, pi, lambda, getDistanceFunction().isSquared()); }
java
protected void process(DBIDRef id, ArrayDBIDs ids, DBIDArrayIter it, int n, WritableDBIDDataStore pi, WritableDoubleDataStore lambda, WritableDoubleDataStore m) { slinkstep3(id, it, n, pi, lambda, m); slinkstep4(id, it, n, pi, lambda); }
java
public void add(DBIDRef id, double reach, DBIDRef pre) { ids.add(id); reachability.putDouble(id, reach); if(pre == null || pre instanceof DBIDVar && !((DBIDVar) pre).isSet()) { return; } predecessor.putDBID(id, pre); }
java
@Override public ArrayModifiableDBIDs order(DBIDs ids) { ArrayModifiableDBIDs res = DBIDUtil.newArray(ids.size()); for(DBIDIter it = this.ids.iter(); it.valid(); it.advance()) { if(ids.contains(it)) { res.add(it); } } return res; }
java
public void getPredecessor(DBIDRef id, DBIDVar out) { if(predecessor == null) { out.unset(); return; } predecessor.assignVar(id, out); }
java
public OutlierResult run(Database database, Relation<O> relation) { StepProgress stepprog = LOG.isVerbose() ? new StepProgress("COF", 3) : null; DistanceQuery<O> dq = database.getDistanceQuery(relation, getDistanceFunction()); LOG.beginStep(stepprog, 1, "Materializing COF neighborhoods."); KNNQuery<O> knnq = DatabaseUtil.precomputedKNNQuery(database, relation, dq, k); DBIDs ids = relation.getDBIDs(); LOG.beginStep(stepprog, 2, "Computing Average Chaining Distances."); WritableDoubleDataStore acds = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); computeAverageChainingDistances(knnq, dq, ids, acds); // compute COF_SCORE of each db object LOG.beginStep(stepprog, 3, "Computing Connectivity-based Outlier Factors."); WritableDoubleDataStore cofs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); // track the maximum value for normalization. DoubleMinMax cofminmax = new DoubleMinMax(); computeCOFScores(knnq, ids, acds, cofs, cofminmax); LOG.setCompleted(stepprog); // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Connectivity-Based Outlier Factor", "cof-outlier", cofs, ids); OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta(cofminmax.getMin(), cofminmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0); return new OutlierResult(scoreMeta, scoreResult); }
java
private void computeCOFScores(KNNQuery<O> knnq, DBIDs ids, DoubleDataStore acds, WritableDoubleDataStore cofs, DoubleMinMax cofminmax) { FiniteProgress progressCOFs = LOG.isVerbose() ? new FiniteProgress("COF for objects", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { final KNNList neighbors = knnq.getKNNForDBID(iter, k); // Aggregate the average chaining distances of all neighbors: double sum = 0.; for(DBIDIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { // skip the point itself if(DBIDUtil.equal(neighbor, iter)) { continue; } sum += acds.doubleValue(neighbor); } final double cof = (sum > 0.) ? (acds.doubleValue(iter) * k / sum) : (acds.doubleValue(iter) > 0. ? Double.POSITIVE_INFINITY : 1.); cofs.putDouble(iter, cof); // update minimum and maximum cofminmax.put(cof); LOG.incrementProcessed(progressCOFs); } LOG.ensureCompleted(progressCOFs); }
java
public void invokeLater(Runnable r) { queue.add(r); synchronized(this) { if(synchronizer == null) { runQueue(); } else { synchronizer.activate(); } } }
java
public void runQueue() { synchronized(sync) { while(!queue.isEmpty()) { Runnable r = queue.poll(); if(r != null) { try { r.run(); } catch(Exception e) { // Alternatively, we could allow the specification of exception // handlers for each runnable in the API. For now we'll just log. // TODO: handle exceptions here better! LoggingUtil.exception(e); } } else { LoggingUtil.warning("Tried to run a 'null' Object."); } } } }
java
public synchronized void synchronizeWith(UpdateSynchronizer newsync) { // LoggingUtil.warning("Synchronizing: " + sync + " " + newsync, new // Throwable()); if(synchronizer == newsync) { LoggingUtil.warning("Double-synced to the same plot!", new Throwable()); return; } if(synchronizer != null) { LoggingUtil.warning("Attempting to synchronize to more than one synchronizer."); return; } synchronizer = newsync; newsync.addUpdateRunner(this); }
java
public synchronized void unsynchronizeWith(UpdateSynchronizer oldsync) { if(synchronizer == null) { LoggingUtil.warning("Warning: was not synchronized."); return; } if(synchronizer != oldsync) { LoggingUtil.warning("Warning: was synchronized differently!"); return; } // LoggingUtil.warning("Unsynchronizing: " + sync + " " + oldsync); synchronizer = null; runQueue(); }
java
protected static double estimateInitialBeta(double[] dist_i, double perplexity) { double sum = 0.; for(double d : dist_i) { double d2 = d * d; sum += d2 < Double.POSITIVE_INFINITY ? d2 : 0.; } return sum > 0 && sum < Double.POSITIVE_INFINITY ? .5 / sum * perplexity * (dist_i.length - 1.) : 1.; }
java
public static List<Relation<?>> getRelations(Result r) { if(r instanceof Relation<?>) { List<Relation<?>> anns = new ArrayList<>(1); anns.add((Relation<?>) r); return anns; } if(r instanceof HierarchicalResult) { return filterResults(((HierarchicalResult) r).getHierarchy(), r, Relation.class); } return Collections.emptyList(); }
java
public static List<OrderingResult> getOrderingResults(Result r) { if(r instanceof OrderingResult) { List<OrderingResult> ors = new ArrayList<>(1); ors.add((OrderingResult) r); return ors; } if(r instanceof HierarchicalResult) { return filterResults(((HierarchicalResult) r).getHierarchy(), r, OrderingResult.class); } return Collections.emptyList(); }
java
public static List<CollectionResult<?>> getCollectionResults(Result r) { if(r instanceof CollectionResult<?>) { List<CollectionResult<?>> crs = new ArrayList<>(1); crs.add((CollectionResult<?>) r); return crs; } if(r instanceof HierarchicalResult) { return filterResults(((HierarchicalResult) r).getHierarchy(), r, CollectionResult.class); } return Collections.emptyList(); }
java
public static List<IterableResult<?>> getIterableResults(Result r) { if(r instanceof IterableResult<?>) { List<IterableResult<?>> irs = new ArrayList<>(1); irs.add((IterableResult<?>) r); return irs; } if(r instanceof HierarchicalResult) { return filterResults(((HierarchicalResult) r).getHierarchy(), r, IterableResult.class); } return Collections.emptyList(); }
java
public static <C extends Result> ArrayList<C> filterResults(ResultHierarchy hier, Result r, Class<? super C> restrictionClass) { ArrayList<C> res = new ArrayList<>(); final It<C> it = hier.iterDescendantsSelf(r).filter(restrictionClass); it.forEach(res::add); return res; }
java
public static void addChildResult(HierarchicalResult parent, Result child) { parent.getHierarchy().add(parent, child); }
java
public static Database findDatabase(ResultHierarchy hier, Result baseResult) { final List<Database> dbs = filterResults(hier, baseResult, Database.class); return (!dbs.isEmpty()) ? dbs.get(0) : null; }
java
public static void removeRecursive(ResultHierarchy hierarchy, Result child) { for(It<Result> iter = hierarchy.iterParents(child); iter.valid(); iter.advance()) { hierarchy.remove(iter.get(), child); } for(It<Result> iter = hierarchy.iterChildren(child); iter.valid(); iter.advance()) { removeRecursive(hierarchy, iter.get()); } }
java
protected void findEigenVectors(double[][] imat, double[][] evs, double[] lambda) { final int size = imat.length; Random rnd = random.getSingleThreadedRandom(); double[] tmp = new double[size]; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Learning projections", tdim, LOG) : null; for(int d = 0; d < tdim;) { final double[] cur = evs[d]; randomInitialization(cur, rnd); double l = multiply(imat, cur, tmp); for(int iter = 0; iter < 100; iter++) { // This will scale "tmp" to unit length, and copy it to cur: double delta = updateEigenvector(tmp, cur, l); if(delta < 1e-10) { break; } l = multiply(imat, cur, tmp); } lambda[d++] = l = estimateEigenvalue(imat, cur); LOG.incrementProcessed(prog); if(d == tdim) { break; } // Update matrix updateMatrix(imat, cur, l); } LOG.ensureCompleted(prog); }
java
protected void randomInitialization(double[] out, Random rnd) { double l2 = 0.; while(!(l2 > 0)) { for(int d = 0; d < out.length; d++) { final double val = rnd.nextDouble(); out[d] = val; l2 += val * val; } } // Standardize: final double s = 1. / FastMath.sqrt(l2); for(int d = 0; d < out.length; d++) { out[d] *= s; } }
java
protected double updateEigenvector(double[] in, double[] out, double l) { double s = 1. / (l > 0. ? l : l < 0. ? -l : 1.); s = (in[0] > 0.) ? s : -s; // Reduce flipping vectors double diff = 0.; for(int d = 0; d < in.length; d++) { in[d] *= s; // Scale to unit length // Compute change from previous iteration: double delta = in[d] - out[d]; diff += delta * delta; out[d] = in[d]; // Update output storage } return diff; }
java
protected void updateMatrix(double[][] mat, final double[] evec, double eval) { final int size = mat.length; for(int i = 0; i < size; i++) { final double[] mati = mat[i]; final double eveci = evec[i]; for(int j = 0; j < size; j++) { mati[j] -= eval * eveci * evec[j]; } } }
java