code
stringlengths
73
34.1k
label
stringclasses
1 value
public static int lengthWithoutLinefeed(CharSequence line) { int length = line.length(); while(length > 0) { char last = line.charAt(length - 1); if(last != '\n' && last != '\r') { break; } --length; } return length; }
java
public synchronized void logAndClearReportedErrors() { for(ParameterException e : getErrors()) { if(LOG.isDebugging()) { LOG.warning(e.getMessage(), e); } else { LOG.warning(e.getMessage()); } } clearErrors(); }
java
public void addParameter(Object owner, Parameter<?> param, TrackParameters track) { this.setBorder(new SoftBevelBorder(SoftBevelBorder.LOWERED)); ParameterConfigurator cfg = null; { // Find Object cur = owner; while(cur != null) { cfg = childconfig.get(cur); if(cfg != null) { break; } cur = track.getParent(cur); } } if(cfg != null) { cfg.addParameter(owner, param, track); return; } else { cfg = makeConfigurator(param); cfg.addChangeListener(this); children.add(cfg); } }
java
protected static <E extends MTreeEntry, N extends AbstractMTreeNode<?, N, E>> double[][] computeDistanceMatrix(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); double[][] distancematrix = new double[n][n]; // Build distance matrix for(int i = 0; i < n; i++) { E ei = node.getEntry(i); double[] row_i = distancematrix[i]; for(int j = i + 1; j < n; j++) { row_i[j] = distancematrix[j][i] = tree.distance(ei, node.getEntry(j)); } } return distancematrix; }
java
protected static double sparsity(final int setsize, final int dbsize, final int k, final double phi) { // calculate sparsity c final double fK = MathUtil.powi(1. / phi, k); return (setsize - (dbsize * fK)) / FastMath.sqrt(dbsize * fK * (1 - fK)); }
java
protected DBIDs computeSubspace(int[] subspace, ArrayList<ArrayList<DBIDs>> ranges) { HashSetModifiableDBIDs ids = DBIDUtil.newHashSet(ranges.get(subspace[0]).get(subspace[1])); // intersect all selected dimensions for(int i = 2, e = subspace.length - 1; i < e; i += 2) { DBIDs current = ranges.get(subspace[i]).get(subspace[i + 1] - GENE_OFFSET); ids.retainAll(current); if(ids.isEmpty()) { break; } } return ids; }
java
protected DBIDs computeSubspaceForGene(short[] gene, ArrayList<ArrayList<DBIDs>> ranges) { HashSetModifiableDBIDs m = null; // intersect all present restrictions for(int i = 0; i < gene.length; i++) { if(gene[i] != DONT_CARE) { DBIDs current = ranges.get(i).get(gene[i] - GENE_OFFSET); if(m == null) { m = DBIDUtil.newHashSet(current); } else { m.retainAll(current); } } } assert (m != null) : "All genes set to '*', should not happen!"; return m; }
java
private static void updateFilterSDASet(double mib, List<SteepDownArea> sdaset, double ixi) { Iterator<SteepDownArea> iter = sdaset.iterator(); while(iter.hasNext()) { SteepDownArea sda = iter.next(); if(sda.getMaximum() * ixi <= mib) { iter.remove(); } else { // Update if(mib > sda.getMib()) { sda.setMib(mib); } } } }
java
private static void sort5(double[] keys, int[] vals, final int m1, final int m2, final int m3, final int m4, final int m5) { if(keys[m1] > keys[m2]) { swap(keys, vals, m1, m2); } if(keys[m3] > keys[m4]) { swap(keys, vals, m3, m4); } // Merge 1+2 and 3+4 if(keys[m2] > keys[m4]) { swap(keys, vals, m2, m4); } if(keys[m1] > keys[m3]) { swap(keys, vals, m1, m3); } if(keys[m2] > keys[m3]) { swap(keys, vals, m2, m3); } // Insertion sort m5: if(keys[m4] > keys[m5]) { swap(keys, vals, m4, m5); if(keys[m3] > keys[m4]) { swap(keys, vals, m3, m4); if(keys[m2] > keys[m3]) { swap(keys, vals, m2, m3); if(keys[m1] > keys[m1]) { swap(keys, vals, m1, m2); } } } } }
java
private static void swap(double[] keys, int[] vals, int j, int i) { double td = keys[j]; keys[j] = keys[i]; keys[i] = td; int ti = vals[j]; vals[j] = vals[i]; vals[i] = ti; }
java
public EvaluationResult.MeasurementGroup newGroup(String string) { EvaluationResult.MeasurementGroup g = new MeasurementGroup(string); groups.add(g); return g; }
java
public EvaluationResult.MeasurementGroup findOrCreateGroup(String label) { for(EvaluationResult.MeasurementGroup g : groups) { if(label.equals(g.getName())) { return g; } } return newGroup(label); }
java
public int numLines() { int r = header.size(); for(MeasurementGroup m : groups) { r += 1 + m.measurements.size(); } return r; }
java
public static EvaluationResult findOrCreate(ResultHierarchy hierarchy, Result parent, String name, String shortname) { ArrayList<EvaluationResult> ers = ResultUtil.filterResults(hierarchy, parent, EvaluationResult.class); EvaluationResult ev = null; for(EvaluationResult e : ers) { if(shortname.equals(e.getShortName())) { ev = e; break; } } if(ev == null) { ev = new EvaluationResult(name, shortname); hierarchy.add(parent, ev); } return ev; }
java
public Document cloneDocument(DOMImplementation domImpl, Document document) { Element root = document.getDocumentElement(); // New document Document result = domImpl.createDocument(root.getNamespaceURI(), root.getNodeName(), null); Element rroot = result.getDocumentElement(); // Cloning the document element is a bit tricky. // This is adopted from DomUtilities#deepCloneDocument boolean before = true; for(Node n = document.getFirstChild(); n != null; n = n.getNextSibling()) { if(n == root) { before = false; copyAttributes(result, root, rroot); for(Node c = root.getFirstChild(); c != null; c = c.getNextSibling()) { final Node cl = cloneNode(result, c); if(cl != null) { rroot.appendChild(cl); } } } else { if(n.getNodeType() != Node.DOCUMENT_TYPE_NODE) { final Node cl = cloneNode(result, n); if(cl != null) { if(before) { result.insertBefore(cl, rroot); } else { result.appendChild(cl); } } } } } return result; }
java
public Node cloneNode(Document doc, Node eold) { return doc.importNode(eold, true); }
java
public void copyAttributes(Document doc, Element eold, Element enew) { if(eold.hasAttributes()) { NamedNodeMap attr = eold.getAttributes(); int len = attr.getLength(); for(int i = 0; i < len; i++) { enew.setAttributeNode((Attr) doc.importNode(attr.item(i), true)); } } }
java
protected String getFilename(Object result, String filenamepre) { if(filenamepre == null || filenamepre.length() == 0) { filenamepre = "result"; } for(int i = 0;; i++) { String filename = i > 0 ? filenamepre + "-" + i : filenamepre; Object existing = filenames.get(filename); if(existing == null || existing == result) { filenames.put(filename, result); return filename; } } }
java
@SuppressWarnings("unchecked") public void output(Database db, Result r, StreamFactory streamOpener, Pattern filter) throws IOException { List<Relation<?>> ra = new LinkedList<>(); List<OrderingResult> ro = new LinkedList<>(); List<Clustering<?>> rc = new LinkedList<>(); List<IterableResult<?>> ri = new LinkedList<>(); List<SettingsResult> rs = new LinkedList<>(); List<Result> otherres = new LinkedList<>(); // Split result objects in different known types: { List<Result> results = ResultUtil.filterResults(db.getHierarchy(), r, Result.class); for(Result res : results) { if(filter != null) { final String nam = res.getShortName(); if(nam == null || !filter.matcher(nam).find()) { continue; } } if(res instanceof Database) { continue; } if(res instanceof Relation) { ra.add((Relation<?>) res); continue; } if(res instanceof OrderingResult) { ro.add((OrderingResult) res); continue; } if(res instanceof Clustering) { rc.add((Clustering<?>) res); continue; } if(res instanceof IterableResult) { ri.add((IterableResult<?>) res); continue; } if(res instanceof SettingsResult) { rs.add((SettingsResult) res); continue; } otherres.add(res); } } writeSettingsResult(streamOpener, rs); for(IterableResult<?> rii : ri) { writeIterableResult(streamOpener, rii); } for(Clustering<?> c : rc) { NamingScheme naming = new SimpleEnumeratingScheme(c); for(Cluster<?> clus : c.getAllClusters()) { writeClusterResult(db, streamOpener, (Clustering<Model>) c, (Cluster<Model>) clus, ra, naming); } } for(OrderingResult ror : ro) { writeOrderingResult(db, streamOpener, ror, ra); } for(Result otherr : otherres) { writeOtherResult(streamOpener, otherr); } }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeObject(approximation); }
java
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); approximation = (PolynomialApproximation) in.readObject(); }
java
private WritableIntegerDataStore computeSubtreeSizes(DBIDs order) { WritableIntegerDataStore siz = DataStoreUtil.makeIntegerStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 1); DBIDVar v1 = DBIDUtil.newVar(); for(DBIDIter it = order.iter(); it.valid(); it.advance()) { if(DBIDUtil.equal(it, parent.assignVar(it, v1))) { continue; } siz.increment(v1, siz.intValue(it)); } return siz; }
java
private WritableDoubleDataStore computeMaxHeight() { WritableDoubleDataStore maxheight = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 0.); DBIDVar v1 = DBIDUtil.newVar(); for(DBIDIter it = ids.iter(); it.valid(); it.advance()) { double d = parentDistance.doubleValue(it); if(d > maxheight.doubleValue(it)) { maxheight.putDouble(it, d); } if(d > maxheight.doubleValue(parent.assignVar(it, v1))) { maxheight.putDouble(v1, d); } } return maxheight; }
java
public ArrayDBIDs topologicalSort() { ArrayModifiableDBIDs ids = DBIDUtil.newArray(this.ids); if(mergeOrder != null) { ids.sort(new DataStoreUtil.AscendingByIntegerDataStore(mergeOrder)); WritableDoubleDataStore maxheight = computeMaxHeight(); ids.sort(new Sorter(maxheight)); maxheight.destroy(); } else { ids.sort(new DataStoreUtil.DescendingByDoubleDataStoreAndId(parentDistance)); } // We used to simply sort by merging distance // But for e.g. Median Linkage, this would lead to problems, as links are // not necessarily performed in ascending order anymore! final int size = ids.size(); ModifiableDBIDs seen = DBIDUtil.newHashSet(size); ArrayModifiableDBIDs order = DBIDUtil.newArray(size); DBIDVar v1 = DBIDUtil.newVar(), prev = DBIDUtil.newVar(); // Process merges in descending order for(DBIDIter it = ids.iter(); it.valid(); it.advance()) { if(!seen.add(it)) { continue; } final int begin = order.size(); order.add(it); prev.set(it); // Copy // Follow parents of prev -> v1 - these need to come before prev. while(!DBIDUtil.equal(prev, parent.assignVar(prev, v1))) { if(!seen.add(v1)) { break; } order.add(v1); prev.set(v1); // Copy } // Reverse the inserted path: for(int i = begin, j = order.size() - 1; i < j; i++, j--) { order.swap(i, j); } } // Reverse everything for(int i = 0, j = size - 1; i < j; i++, j--) { order.swap(i, j); } return order; }
java
public static int[] range(int start, int end) { int[] out = new int[end - start]; for(int i = 0, j = start; j < end; i++, j++) { out[i] = j; } return out; }
java
private void doReverseKNNQuery(int k, DBIDRef q, ModifiableDoubleDBIDList result, ModifiableDBIDs candidates) { final ComparableMinHeap<MTreeSearchCandidate> pq = new ComparableMinHeap<>(); // push root pq.add(new MTreeSearchCandidate(0., getRootID(), null, Double.NaN)); // search in tree while(!pq.isEmpty()) { MTreeSearchCandidate pqNode = pq.poll(); // FIXME: cache the distance to the routing object in the queue node! MkCoPTreeNode<O> node = getNode(pqNode.nodeID); // directory node if(!node.isLeaf()) { for(int i = 0; i < node.getNumEntries(); i++) { MkCoPEntry entry = node.getEntry(i); double distance = distance(entry.getRoutingObjectID(), q); double minDist = entry.getCoveringRadius() > distance ? 0. : distance - entry.getCoveringRadius(); double approximatedKnnDist_cons = entry.approximateConservativeKnnDistance(k); if(minDist <= approximatedKnnDist_cons) { pq.add(new MTreeSearchCandidate(minDist, getPageID(entry), entry.getRoutingObjectID(), Double.NaN)); } } } // data node else { for(int i = 0; i < node.getNumEntries(); i++) { MkCoPLeafEntry entry = (MkCoPLeafEntry) node.getEntry(i); double distance = distance(entry.getRoutingObjectID(), q); double approximatedKnnDist_prog = entry.approximateProgressiveKnnDistance(k); if(distance <= approximatedKnnDist_prog) { result.add(distance, entry.getRoutingObjectID()); } else { double approximatedKnnDist_cons = entry.approximateConservativeKnnDistance(k); double diff = distance - approximatedKnnDist_cons; if(diff <= 1E-10) { candidates.add(entry.getRoutingObjectID()); } } } } } }
java
protected void expirePage(P page) { if(LOG.isDebuggingFine()) { LOG.debugFine("Write to backing:" + page.getPageID()); } if (page.isDirty()) { file.writePage(page); } }
java
public void setCacheSize(int cacheSize) { this.cacheSize = cacheSize; long toDelete = map.size() - this.cacheSize; if(toDelete <= 0) { return; } List<Integer> keys = new ArrayList<>(map.keySet()); Collections.reverse(keys); for(Integer id : keys) { P page = map.remove(id); file.writePage(page); } }
java
public static String getFilenameExtension(String name) { if(name == null) { return null; } int index = name.lastIndexOf('.'); return index < 0 ? null : name.substring(index + 1).toLowerCase(); }
java
public static InputStream tryGzipInput(InputStream in) throws IOException { // try autodetecting gzip compression. if(!in.markSupported()) { PushbackInputStream pb = new PushbackInputStream(in, 16); // read a magic from the file header, and push it back byte[] magic = { 0, 0 }; int r = pb.read(magic); pb.unread(magic, 0, r); return (magic[0] == 31 && magic[1] == -117) ? new GZIPInputStream(pb) : pb; } // Mark is supported. in.mark(16); boolean isgzip = ((in.read() << 8) | in.read()) == GZIPInputStream.GZIP_MAGIC; in.reset(); // Rewind return isgzip ? new GZIPInputStream(in) : in; }
java
public static File locateFile(String name, String basedir) { // Try exact match first. File f = new File(name); if(f.exists()) { return f; } // Try with base directory if(basedir != null) { if((f = new File(basedir, name)).exists()) { return f; } } // try stripping whitespace String name2; if(!name.equals(name2 = name.trim())) { if((f = locateFile(name2, basedir)) != null) { return f; } } // try substituting path separators if(!name.equals(name2 = name.replace('/', File.separatorChar))) { if((f = locateFile(name2, basedir)) != null) { return f; } } if(!name.equals(name2 = name.replace('\\', File.separatorChar))) { if((f = locateFile(name2, basedir)) != null) { return f; } } // try stripping extra characters, such as quotes. if(name.length() > 2 && name.charAt(0) == '"' && name.charAt(name.length() - 1) == '"') { if((f = locateFile(name.substring(1, name.length() - 1), basedir)) != null) { return f; } } return null; }
java
protected void addInternal(double dist, int id) { if(size == dists.length) { grow(); } dists[size] = dist; ids[size] = id; ++size; }
java
protected void grow() { if(dists == EMPTY_DISTS) { dists = new double[INITIAL_SIZE]; ids = new int[INITIAL_SIZE]; return; } final int len = dists.length; final int newlength = len + (len >> 1) + 1; double[] odists = dists; dists = new double[newlength]; System.arraycopy(odists, 0, dists, 0, odists.length); int[] oids = ids; ids = new int[newlength]; System.arraycopy(oids, 0, ids, 0, oids.length); }
java
protected void reverse() { for(int i = 0, j = size - 1; i < j; i++, j--) { double tmpd = dists[j]; dists[j] = dists[i]; dists[i] = tmpd; int tmpi = ids[j]; ids[j] = ids[i]; ids[i] = tmpi; } }
java
public void truncate(int newsize) { if(newsize < size) { double[] odists = dists; dists = new double[newsize]; System.arraycopy(odists, 0, dists, 0, newsize); int[] oids = ids; ids = new int[newsize]; System.arraycopy(oids, 0, ids, 0, newsize); size = newsize; } }
java
private RectangleArranger<PlotItem> arrangeVisualizations(double width, double height) { if(!(width > 0. && height > 0.)) { LOG.warning("No size information during arrange()", new Throwable()); return new RectangleArranger<>(1., 1.); } RectangleArranger<PlotItem> plotmap = new RectangleArranger<>(width, height); Hierarchy<Object> vistree = context.getVisHierarchy(); for(It<Projector> iter2 = vistree.iterAll().filter(Projector.class); iter2.valid(); iter2.advance()) { Collection<PlotItem> projs = iter2.get().arrange(context); for(PlotItem it : projs) { if(it.w <= 0.0 || it.h <= 0.0) { LOG.warning("Plot item with improper size information: " + it); continue; } plotmap.put(it.w, it.h, it); } } nextTask: for(It<VisualizationTask> iter2 = vistree.iterAll().filter(VisualizationTask.class); iter2.valid(); iter2.advance()) { VisualizationTask task = iter2.get(); if(!task.isVisible()) { continue; } if(vistree.iterParents(task).filter(Projector.class).valid()) { continue nextTask; } if(task.getRequestedWidth() <= 0.0 || task.getRequestedHeight() <= 0.0) { LOG.warning("Task with improper size information: " + task); continue; } PlotItem it = new PlotItem(task.getRequestedWidth(), task.getRequestedHeight(), null); it.tasks.add(task); plotmap.put(it.w, it.h, it); } return plotmap; }
java
public void initialize(double ratio) { if(!(ratio > 0 && ratio < Double.POSITIVE_INFINITY)) { LOG.warning("Invalid ratio: " + ratio, new Throwable()); ratio = 1.4; } this.ratio = ratio; if(plot != null) { LOG.warning("Already initialized."); lazyRefresh(); return; } reinitialize(); // register context listener context.addResultListener(this); context.addVisualizationListener(this); }
java
private void initializePlot() { plot = new VisualizationPlot(); { // Add a background element: CSSClass cls = new CSSClass(this, "background"); final String bgcol = context.getStyleLibrary().getBackgroundColor(StyleLibrary.PAGE); cls.setStatement(SVGConstants.CSS_FILL_PROPERTY, bgcol); plot.addCSSClassOrLogError(cls); Element background = plot.svgElement(SVGConstants.SVG_RECT_TAG); background.setAttribute(SVGConstants.SVG_X_ATTRIBUTE, "0"); background.setAttribute(SVGConstants.SVG_Y_ATTRIBUTE, "0"); background.setAttribute(SVGConstants.SVG_WIDTH_ATTRIBUTE, "100%"); background.setAttribute(SVGConstants.SVG_HEIGHT_ATTRIBUTE, "100%"); SVGUtil.setCSSClass(background, cls.getName()); // Don't export a white background: if("white".equals(bgcol)) { background.setAttribute(SVGPlot.NO_EXPORT_ATTRIBUTE, SVGPlot.NO_EXPORT_ATTRIBUTE); } plot.getRoot().appendChild(background); } { // setup the hover CSS classes. selcss = new CSSClass(this, "s"); if(DEBUG_LAYOUT) { selcss.setStatement(SVGConstants.CSS_STROKE_PROPERTY, SVGConstants.CSS_RED_VALUE); selcss.setStatement(SVGConstants.CSS_STROKE_WIDTH_PROPERTY, .00001 * StyleLibrary.SCALE); selcss.setStatement(SVGConstants.CSS_STROKE_OPACITY_PROPERTY, "0.5"); } selcss.setStatement(SVGConstants.CSS_FILL_PROPERTY, SVGConstants.CSS_RED_VALUE); selcss.setStatement(SVGConstants.CSS_FILL_OPACITY_PROPERTY, "0"); selcss.setStatement(SVGConstants.CSS_CURSOR_PROPERTY, SVGConstants.CSS_POINTER_VALUE); plot.addCSSClassOrLogError(selcss); CSSClass hovcss = new CSSClass(this, "h"); hovcss.setStatement(SVGConstants.CSS_FILL_OPACITY_PROPERTY, "0.25"); plot.addCSSClassOrLogError(hovcss); // Hover listener. hoverer = new CSSHoverClass(hovcss.getName(), null, true); } // Disable Batik default interactions (zoom, rotate, etc.) if(single) { plot.setDisableInteractions(true); } SVGEffects.addShadowFilter(plot); SVGEffects.addLightGradient(plot); }
java
private Visualization embedOrThumbnail(final int thumbsize, PlotItem it, VisualizationTask task, Element parent) { final Visualization vis; if(!single) { vis = task.getFactory().makeVisualizationOrThumbnail(context, task, plot, it.w, it.h, it.proj, thumbsize); } else { vis = task.getFactory().makeVisualization(context, task, plot, it.w, it.h, it.proj); } if(vis == null || vis.getLayer() == null) { LOG.warning("Visualization returned empty layer: " + vis); return vis; } if(task.has(RenderFlag.NO_EXPORT)) { vis.getLayer().setAttribute(SVGPlot.NO_EXPORT_ATTRIBUTE, SVGPlot.NO_EXPORT_ATTRIBUTE); } parent.appendChild(vis.getLayer()); return vis; }
java
protected boolean visibleInOverview(VisualizationTask task) { return task.isVisible() && !task.has(single ? RenderFlag.NO_EMBED : RenderFlag.NO_THUMBNAIL); }
java
private void recalcViewbox() { final Element root = plot.getRoot(); // Reset plot attributes SVGUtil.setAtt(root, SVGConstants.SVG_WIDTH_ATTRIBUTE, "20cm"); SVGUtil.setAtt(root, SVGConstants.SVG_HEIGHT_ATTRIBUTE, SVGUtil.fmt(20 * plotmap.getHeight() / plotmap.getWidth()) + "cm"); String vb = "0 0 " + SVGUtil.fmt(plotmap.getWidth()) + " " + SVGUtil.fmt(plotmap.getHeight()); SVGUtil.setAtt(root, SVGConstants.SVG_VIEW_BOX_ATTRIBUTE, vb); }
java
protected void triggerSubplotSelectEvent(PlotItem it) { // forward event to all listeners. for(ActionListener actionListener : actionListeners) { actionListener.actionPerformed(new DetailViewSelectedEvent(this, ActionEvent.ACTION_PERFORMED, null, 0, it)); } }
java
public static double cdf(double val, double mu, double sigma, double xi) { val = (val - mu) / sigma; // Check support: if(val < 0) { return 0.; } if(xi < 0 && val > -1. / xi) { return 1.; } return 1 - FastMath.pow(1 + xi * val, -1. / xi); }
java
public static double quantile(double val, double mu, double sigma, double xi) { if(val < 0.0 || val > 1.0) { return Double.NaN; } if(xi == 0.) { return mu - sigma * FastMath.log(1 - val); } return mu - sigma / xi * (1 - FastMath.pow(1 - val, -xi)); }
java
public Result run(Database database, Relation<O> relation, Relation<NumberVector> radrel) { if(queries != null) { throw new AbortException("This 'run' method will not use the given query set!"); } // Get a distance and kNN query instance. DistanceQuery<O> distQuery = database.getDistanceQuery(relation, getDistanceFunction()); RangeQuery<O> rangeQuery = database.getRangeQuery(distQuery); final DBIDs sample = DBIDUtil.randomSample(relation.getDBIDs(), sampling, random); FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null; int hash = 0; MeanVariance mv = new MeanVariance(); for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) { double r = radrel.get(iditer).doubleValue(0); DoubleDBIDList rres = rangeQuery.getRangeForDBID(iditer, r); int ichecksum = 0; for(DBIDIter it = rres.iter(); it.valid(); it.advance()) { ichecksum += DBIDUtil.asInteger(it); } hash = Util.mixHashCodes(hash, ichecksum); mv.put(rres.size()); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); if(LOG.isStatistics()) { LOG.statistics("Result hashcode: " + hash); LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev()); } return null; }
java
public static long[] random(int card, int capacity, Random random) { if(card < 0 || card > capacity) { throw new IllegalArgumentException("Cannot set " + card + " out of " + capacity + " bits."); } // FIXME: Avoid recomputing the cardinality. if(card < capacity >>> 1) { long[] bitset = BitsUtil.zero(capacity); for(int todo = card; todo > 0; // todo = (todo == 1) ? (card - cardinality(bitset)) : (todo - 1)) { setI(bitset, random.nextInt(capacity)); } return bitset; } else { long[] bitset = BitsUtil.ones(capacity); for(int todo = capacity - card; todo > 0; // todo = (todo == 1) ? (cardinality(bitset) - card) : (todo - 1)) { clearI(bitset, random.nextInt(capacity)); } return bitset; } }
java
public static long[] copy(long[] v, int mincap) { int words = ((mincap - 1) >>> LONG_LOG2_SIZE) + 1; if(v.length == words) { return Arrays.copyOf(v, v.length); } long[] ret = new long[words]; System.arraycopy(v, 0, ret, 0, Math.min(v.length, words)); return ret; }
java
public static boolean isZero(long[] v) { for(int i = 0; i < v.length; i++) { if(v[i] != 0) { return false; } } return true; }
java
public static long[] setI(long[] v, long[] o) { assert (o.length <= v.length) : "Bit set sizes do not agree."; final int max = Math.min(v.length, o.length); for(int i = 0; i < max; i++) { v[i] = o[i]; } return v; }
java
public static void onesI(long[] v, int bits) { final int fillWords = bits >>> LONG_LOG2_SIZE; final int fillBits = bits & LONG_LOG2_MASK; Arrays.fill(v, 0, fillWords, LONG_ALL_BITS); if(fillBits > 0) { v[fillWords] = (1L << fillBits) - 1; } if(fillWords + 1 < v.length) { Arrays.fill(v, fillWords + 1, v.length, 0L); } }
java
public static long[] xorI(long[] v, long[] o) { assert (o.length <= v.length) : "Bit set sizes do not agree."; for(int i = 0; i < o.length; i++) { v[i] ^= o[i]; } return v; }
java
public static long[] orI(long[] v, long[] o) { assert (o.length <= v.length) : "Bit set sizes do not agree."; final int max = Math.min(v.length, o.length); for(int i = 0; i < max; i++) { v[i] |= o[i]; } return v; }
java
public static long[] andI(long[] v, long[] o) { int i = 0; for(; i < o.length; i++) { v[i] &= o[i]; } // Zero higher words Arrays.fill(v, i, v.length, 0); return v; }
java
public static long[] nandI(long[] v, long[] o) { int i = 0; for(; i < o.length; i++) { v[i] &= ~o[i]; } return v; }
java
public static long[] invertI(long[] v) { for(int i = 0; i < v.length; i++) { v[i] = ~v[i]; } return v; }
java
public static long cycleLeftC(long v, int shift, int len) { return shift == 0 ? v : shift < 0 ? cycleRightC(v, -shift, len) : // (((v) << (shift)) | ((v) >>> ((len) - (shift)))) & ((1 << len) - 1); }
java
public static long[] cycleLeftI(long[] v, int shift, int len) { long[] t = copy(v, len, shift); return orI(shiftRightI(v, len - shift), truncateI(t, len)); }
java
public static int numberOfTrailingZerosSigned(long[] v) { for(int p = 0;; p++) { if(p == v.length) { return -1; } if(v[p] != 0) { return Long.numberOfTrailingZeros(v[p]) + p * Long.SIZE; } } }
java
public static int numberOfLeadingZerosSigned(long[] v) { for(int p = 0, ip = v.length - 1; p < v.length; p++, ip--) { if(v[ip] != 0) { return Long.numberOfLeadingZeros(v[ip]) + p * Long.SIZE; } } return -1; }
java
public static int previousClearBit(long v, int start) { if(start < 0) { return -1; } start = start < Long.SIZE ? start : Long.SIZE - 1; long cur = ~v & (LONG_ALL_BITS >>> -(start + 1)); return cur == 0 ? -1 : 63 - Long.numberOfLeadingZeros(cur); }
java
public static int nextSetBit(long v, int start) { if(start >= Long.SIZE) { return -1; } start = start < 0 ? 0 : start; long cur = v & (LONG_ALL_BITS << start); return cur == 0 ? -1 : cur == LONG_ALL_BITS ? 0 : Long.numberOfTrailingZeros(cur); }
java
public static boolean intersect(long[] x, long[] y) { final int min = (x.length < y.length) ? x.length : y.length; for(int i = 0; i < min; i++) { if((x[i] & y[i]) != 0L) { return true; } } return false; }
java
public static int intersectionSize(long[] x, long[] y) { final int lx = x.length, ly = y.length; final int min = (lx < ly) ? lx : ly; int res = 0; for(int i = 0; i < min; i++) { res += Long.bitCount(x[i] & y[i]); } return res; }
java
public static int unionSize(long[] x, long[] y) { final int lx = x.length, ly = y.length; final int min = (lx < ly) ? lx : ly; int i = 0, res = 0; for(; i < min; i++) { res += Long.bitCount(x[i] | y[i]); } for(; i < lx; i++) { res += Long.bitCount(x[i]); } for(; i < ly; i++) { res += Long.bitCount(y[i]); } return res; }
java
public static boolean equal(long[] x, long[] y) { if(x == null || y == null) { return (x == null) && (y == null); } int p = Math.min(x.length, y.length) - 1; for(int i = x.length - 1; i > p; i--) { if(x[i] != 0L) { return false; } } for(int i = y.length - 1; i > p; i--) { if(y[i] != 0L) { return false; } } for(; p >= 0; p--) { if(x[p] != y[p]) { return false; } } return true; }
java
public static int compare(long[] x, long[] y) { if(x == null) { return (y == null) ? 0 : -1; } if(y == null) { return +1; } int p = Math.min(x.length, y.length) - 1; for(int i = x.length - 1; i > p; i--) { if(x[i] != 0) { return +1; } } for(int i = y.length - 1; i > p; i--) { if(y[i] != 0) { return -1; } } for(; p >= 0; p--) { final long xp = x[p], yp = y[p]; if(xp != yp) { return xp < 0 ? (yp < 0 && yp < xp) ? -1 : +1 : (yp < 0 || xp < yp) ? -1 : +1; } } return 0; }
java
@Override public int compareTo(DistanceEntry<E> o) { int comp = Double.compare(distance, o.distance); return comp; // return comp != 0 ? comp : // entry.getEntryID().compareTo(o.entry.getEntryID()); }
java
private double[] knnDistances(O object) { KNNList knns = knnq.getKNNForObject(object, getKmax() - 1); double[] distances = new double[getKmax()]; int i = 0; for(DoubleDBIDListIter iter = knns.iter(); iter.valid() && i < getKmax(); iter.advance(), i++) { distances[i] = iter.doubleValue(); } return distances; }
java
protected double downsample(double[] data, int start, int end, int size) { double sum = 0; for (int i = start; i < end; i++) { sum += data[i]; } return sum; }
java
public void setRotationZ(double rotationZ) { this.rotationZ = rotationZ; this.cosZ = FastMath.cos(rotationZ); this.sinZ = FastMath.sin(rotationZ); fireCameraChangedEvent(); }
java
public void apply(GL2 gl) { // 3D projection gl.glMatrixMode(GL2.GL_PROJECTION); gl.glLoadIdentity(); // Perspective. glu.gluPerspective(35, ratio, 1, 1000); glu.gluLookAt(distance * sinZ, distance * -cosZ, height, // pos 0, 0, .5, // center 0, 0, 1 // up ); // Change back to model view matrix. gl.glMatrixMode(GL2.GL_MODELVIEW); gl.glLoadIdentity(); // Store the matrixes for reference. gl.glGetIntegerv(GL.GL_VIEWPORT, viewp, 0); gl.glGetDoublev(GLMatrixFunc.GL_MODELVIEW_MATRIX, modelview, 0); gl.glGetDoublev(GLMatrixFunc.GL_PROJECTION_MATRIX, projection, 0); }
java
public void project(double x, double y, double z, double[] out) { glu.gluProject(x, y, z, modelview, 0, projection, 0, viewp, 0, out, 0); }
java
public void addCameraListener(CameraListener lis) { if (listeners == null) { listeners = new ArrayList<>(5); } listeners.add(lis); }
java
public static AffineTransformation axisProjection(int dim, int ax1, int ax2) { // setup a projection to get the data into the interval -1:+1 in each // dimension with the intended-to-see dimensions first. AffineTransformation proj = AffineTransformation.reorderAxesTransformation(dim, ax1, ax2); // Assuming that the data was normalized on [0:1], center it: double[] trans = new double[dim]; for(int i = 0; i < dim; i++) { trans[i] = -.5; } proj.addTranslation(trans); // mirror on the y axis, since the SVG coordinate system is screen // coordinates (y = down) and not mathematical coordinates (y = up) proj.addAxisReflection(2); // scale it up proj.addScaling(SCALE); return proj; }
java
protected ApproximationLine conservativeKnnDistanceApproximation(int k_max) { // determine k_0, y_1, y_kmax int k_0 = k_max; double y_1 = Double.NEGATIVE_INFINITY; double y_kmax = Double.NEGATIVE_INFINITY; for(int i = 0; i < getNumEntries(); i++) { MkCoPEntry entry = getEntry(i); ApproximationLine approx = entry.getConservativeKnnDistanceApproximation(); k_0 = Math.min(approx.getK_0(), k_0); } for(int i = 0; i < getNumEntries(); i++) { MkCoPEntry entry = getEntry(i); ApproximationLine approx = entry.getConservativeKnnDistanceApproximation(); double entry_y_1 = approx.getValueAt(k_0); double entry_y_kmax = approx.getValueAt(k_max); if(!Double.isInfinite(entry_y_1)) { y_1 = Math.max(entry_y_1, y_1); } if(!Double.isInfinite(entry_y_kmax)) { y_kmax = Math.max(entry_y_kmax, y_kmax); } } // determine m and t double m = (y_kmax - y_1) / (FastMath.log(k_max) - FastMath.log(k_0)); double t = y_1 - m * FastMath.log(k_0); return new ApproximationLine(k_0, m, t); }
java
protected ApproximationLine progressiveKnnDistanceApproximation(int k_max) { if(!isLeaf()) { throw new UnsupportedOperationException("Progressive KNN-distance approximation " + "is only vailable in leaf nodes!"); } // determine k_0, y_1, y_kmax int k_0 = 0; double y_1 = Double.POSITIVE_INFINITY; double y_kmax = Double.POSITIVE_INFINITY; for(int i = 0; i < getNumEntries(); i++) { MkCoPLeafEntry entry = (MkCoPLeafEntry) getEntry(i); ApproximationLine approx = entry.getProgressiveKnnDistanceApproximation(); k_0 = Math.max(approx.getK_0(), k_0); } for(int i = 0; i < getNumEntries(); i++) { MkCoPLeafEntry entry = (MkCoPLeafEntry) getEntry(i); ApproximationLine approx = entry.getProgressiveKnnDistanceApproximation(); y_1 = Math.min(approx.getValueAt(k_0), y_1); y_kmax = Math.min(approx.getValueAt(k_max), y_kmax); } // determine m and t double m = (y_kmax - y_1) / (FastMath.log(k_max) - FastMath.log(k_0)); double t = y_1 - m * FastMath.log(k_0); return new ApproximationLine(k_0, m, t); }
java
public CSSClass addClass(CSSClass clss) throws CSSNamingConflict { CSSClass existing = store.get(clss.getName()); if (existing != null && existing.getOwner() != null && existing.getOwner() != clss.getOwner()) { throw new CSSNamingConflict("CSS class naming conflict between "+clss.getOwner().toString()+" and "+existing.getOwner().toString()); } return store.put(clss.getName(), clss); }
java
public synchronized void removeClass(CSSClass clss) { CSSClass existing = store.get(clss.getName()); if (existing == clss) { store.remove(existing.getName()); } }
java
public CSSClass getClass(String name, Object owner) throws CSSNamingConflict { CSSClass existing = store.get(name); // Not found. if (existing == null) { return null; } // Different owner if (owner != null && existing.getOwner() != owner) { throw new CSSNamingConflict("CSS class naming conflict between "+owner.toString()+" and "+existing.getOwner().toString()); } return existing; }
java
public void serialize(StringBuilder buf) { for (CSSClass clss : store.values()) { clss.appendCSSDefinition(buf); } }
java
public boolean mergeCSSFrom(CSSClassManager other) throws CSSNamingConflict { for (CSSClass clss : other.getClasses()) { this.addClass(clss); } return true; }
java
public void updateStyleElement(Document document, Element style) { StringBuilder buf = new StringBuilder(); serialize(buf); Text cont = document.createTextNode(buf.toString()); while (style.hasChildNodes()) { style.removeChild(style.getFirstChild()); } style.appendChild(cont); }
java
private static double calculate_MDEF_norm(Node sn, Node cg) { // get the square sum of the counting neighborhoods box counts long sq = sn.getSquareSum(cg.getLevel() - sn.getLevel()); /* * if the square sum is equal to box count of the sampling Neighborhood then * n_hat is equal one, and as cg needs to have at least one Element mdef * would get zero or lower than zero. This is the case when all of the * counting Neighborhoods contain one or zero Objects. Additionally, the * cubic sum, square sum and sampling Neighborhood box count are all equal, * which leads to sig_n_hat being zero and thus mdef_norm is either negative * infinite or undefined. As the distribution of the Objects seem quite * uniform, a mdef_norm value of zero ( = no outlier) is appropriate and * circumvents the problem of undefined values. */ if(sq == sn.getCount()) { return 0.0; } // calculation of mdef according to the paper and standardization as done in // LOCI long cb = sn.getCubicSum(cg.getLevel() - sn.getLevel()); double n_hat = (double) sq / sn.getCount(); double sig_n_hat = FastMath.sqrt(cb * sn.getCount() - (sq * sq)) / sn.getCount(); // Avoid NaN - correct result 0.0? if(sig_n_hat < Double.MIN_NORMAL) { return 0.0; } double mdef = n_hat - cg.getCount(); return mdef / sig_n_hat; }
java
public void writeBundleStream(BundleStreamSource source, WritableByteChannel output) throws IOException { ByteBuffer buffer = ByteBuffer.allocateDirect(INITIAL_BUFFER); DBIDVar var = DBIDUtil.newVar(); ByteBufferSerializer<?>[] serializers = null; loop: while(true) { BundleStreamSource.Event ev = source.nextEvent(); switch(ev){ case NEXT_OBJECT: if(serializers == null) { serializers = writeHeader(source, buffer, output); } if(serializers[0] != null) { if(!source.assignDBID(var)) { throw new AbortException("An object did not have an DBID assigned."); } DBID id = DBIDUtil.deref(var); @SuppressWarnings("unchecked") ByteBufferSerializer<DBID> ser = (ByteBufferSerializer<DBID>) serializers[0]; int size = ser.getByteSize(id); buffer = ensureBuffer(size, buffer, output); ser.toByteBuffer(buffer, id); } for(int i = 1, j = 0; i < serializers.length; ++i, ++j) { @SuppressWarnings("unchecked") ByteBufferSerializer<Object> ser = (ByteBufferSerializer<Object>) serializers[i]; int size = ser.getByteSize(source.data(j)); buffer = ensureBuffer(size, buffer, output); ser.toByteBuffer(buffer, source.data(j)); } break; // switch case META_CHANGED: if(serializers != null) { throw new AbortException("Meta changes are not supported, once the block header has been written."); } break; // switch case END_OF_STREAM: break loop; default: LOG.warning("Unknown bundle stream event. API inconsistent? " + ev); break; // switch } } if(buffer.position() > 0) { flushBuffer(buffer, output); } }
java
private void flushBuffer(ByteBuffer buffer, WritableByteChannel output) throws IOException { buffer.flip(); output.write(buffer); buffer.flip(); buffer.limit(buffer.capacity()); }
java
private ByteBuffer ensureBuffer(int size, ByteBuffer buffer, WritableByteChannel output) throws IOException { if(buffer.remaining() >= size) { return buffer; } flushBuffer(buffer, output); if(buffer.remaining() >= size) { return buffer; } // Aggressively grow the buffer return ByteBuffer.allocateDirect(Math.max(buffer.capacity() << 1, buffer.capacity() + size)); }
java
private ByteBufferSerializer<?>[] writeHeader(BundleStreamSource source, ByteBuffer buffer, WritableByteChannel output) throws IOException { final BundleMeta meta = source.getMeta(); final int nummeta = meta.size(); @SuppressWarnings("rawtypes") final ByteBufferSerializer[] serializers = new ByteBufferSerializer[1 + nummeta]; // Write our magic ID first. assert (buffer.position() == 0) : "Buffer is supposed to be at 0."; buffer.putInt(MAGIC); // Write the number of metas next. // For compatibility with earlier versions, treat DBIDs as extra type if(source.hasDBIDs()) { buffer.putInt(1 + nummeta); ByteBufferSerializer<DBID> ser = DBIDFactory.FACTORY.getDBIDSerializer(); TypeInformationSerializer.STATIC.toByteBuffer(buffer, new SimpleTypeInformation<>(DBID.class, ser)); serializers[0] = ser; } else { buffer.putInt(nummeta); } for(int i = 0; i < nummeta; i++) { SimpleTypeInformation<?> type = meta.get(i); ByteBufferSerializer<?> ser = type.getSerializer(); if(ser == null) { throw new AbortException("Cannot serialize - no serializer found for type: " + type.toString()); } TypeInformationSerializer.STATIC.toByteBuffer(buffer, type); serializers[i + 1] = ser; } return serializers; }
java
protected void runDBSCAN(Relation<O> relation, RangeQuery<O> rangeQuery) { final int size = relation.size(); FiniteProgress objprog = LOG.isVerbose() ? new FiniteProgress("Processing objects", size, LOG) : null; IndefiniteProgress clusprog = LOG.isVerbose() ? new IndefiniteProgress("Number of clusters", LOG) : null; processedIDs = DBIDUtil.newHashSet(size); ArrayModifiableDBIDs seeds = DBIDUtil.newArray(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { if(!processedIDs.contains(iditer)) { expandCluster(relation, rangeQuery, iditer, seeds, objprog, clusprog); } if(objprog != null && clusprog != null) { objprog.setProcessed(processedIDs.size(), LOG); clusprog.setProcessed(resultList.size(), LOG); } if(processedIDs.size() == size) { break; } } // Finish progress logging LOG.ensureCompleted(objprog); LOG.setCompleted(clusprog); }
java
protected void expandCluster(Relation<O> relation, RangeQuery<O> rangeQuery, DBIDRef startObjectID, ArrayModifiableDBIDs seeds, FiniteProgress objprog, IndefiniteProgress clusprog) { DoubleDBIDList neighbors = rangeQuery.getRangeForDBID(startObjectID, epsilon); ncounter += neighbors.size(); // startObject is no core-object if(neighbors.size() < minpts) { noise.add(startObjectID); processedIDs.add(startObjectID); if(objprog != null) { objprog.incrementProcessed(LOG); } return; } ModifiableDBIDs currentCluster = DBIDUtil.newArray(); currentCluster.add(startObjectID); processedIDs.add(startObjectID); // try to expand the cluster assert (seeds.size() == 0); seeds.clear(); processNeighbors(neighbors.iter(), currentCluster, seeds); DBIDVar o = DBIDUtil.newVar(); while(!seeds.isEmpty()) { neighbors = rangeQuery.getRangeForDBID(seeds.pop(o), epsilon); ncounter += neighbors.size(); if(neighbors.size() >= minpts) { processNeighbors(neighbors.iter(), currentCluster, seeds); } if(objprog != null) { objprog.incrementProcessed(LOG); } } resultList.add(currentCluster); if(clusprog != null) { clusprog.setProcessed(resultList.size(), LOG); } }
java
private void processNeighbors(DoubleDBIDListIter neighbor, ModifiableDBIDs currentCluster, ArrayModifiableDBIDs seeds) { final boolean ismetric = getDistanceFunction().isMetric(); for(; neighbor.valid(); neighbor.advance()) { if(processedIDs.add(neighbor)) { if(!ismetric || neighbor.doubleValue() > 0.) { seeds.add(neighbor); } } else if(!noise.remove(neighbor)) { continue; } currentCluster.add(neighbor); } }
java
private double loglikelihoodAnomalous(DBIDs anomalousObjs) { return anomalousObjs.isEmpty() ? 0 : anomalousObjs.size() * -FastMath.log(anomalousObjs.size()); }
java
private double loglikelihoodNormal(DBIDs objids, SetDBIDs anomalous, CovarianceMatrix builder, Relation<V> relation) { double[] mean = builder.getMeanVector(); final LUDecomposition lu = new LUDecomposition(builder.makeSampleMatrix()); double[][] covInv = lu.inverse(); // for each object compute probability and sum double prob = (objids.size() - anomalous.size()) * -FastMath.log(FastMath.sqrt(MathUtil.powi(MathUtil.TWOPI, RelationUtil.dimensionality(relation)) * lu.det())); for(DBIDIter iter = objids.iter(); iter.valid(); iter.advance()) { if(!anomalous.contains(iter)) { double[] xcent = minusEquals(relation.get(iter).toArray(), mean); prob -= .5 * transposeTimesTimes(xcent, covInv, xcent); } } return prob; }
java
private DoubleMinMax exactMinMax(Relation<O> relation, DistanceQuery<O> distFunc) { final FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("Exact fitting distance computations", relation.size(), LOG) : null; DoubleMinMax minmax = new DoubleMinMax(); // find exact minimum and maximum first. for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { for(DBIDIter iditer2 = relation.iterDBIDs(); iditer2.valid(); iditer2.advance()) { // skip the point itself. if(DBIDUtil.equal(iditer, iditer2)) { continue; } double d = distFunc.distance(iditer, iditer2); minmax.put(d); } LOG.incrementProcessed(progress); } LOG.ensureCompleted(progress); return minmax; }
java
@Override protected void preInsert(RdKNNEntry entry) { KNNHeap knns_o = DBIDUtil.newHeap(settings.k_max); preInsert(entry, getRootEntry(), knns_o); }
java
@Override protected void postDelete(RdKNNEntry entry) { // reverse knn of o ModifiableDoubleDBIDList rnns = DBIDUtil.newDistanceDBIDList(); doReverseKNN(getRoot(), ((RdKNNLeafEntry) entry).getDBID(), rnns); // knn of rnn ArrayModifiableDBIDs ids = DBIDUtil.newArray(rnns); ids.sort(); List<? extends KNNList> knnLists = knnQuery.getKNNForBulkDBIDs(ids, settings.k_max); // adjust knn distances adjustKNNDistance(getRootEntry(), ids, knnLists); }
java
private void doReverseKNN(RdKNNNode node, DBID oid, ModifiableDoubleDBIDList result) { if(node.isLeaf()) { for(int i = 0; i < node.getNumEntries(); i++) { RdKNNLeafEntry entry = (RdKNNLeafEntry) node.getEntry(i); double distance = distanceQuery.distance(entry.getDBID(), oid); if(distance <= entry.getKnnDistance()) { result.add(distance, entry.getDBID()); } } } // node is a inner node else { for(int i = 0; i < node.getNumEntries(); i++) { RdKNNDirectoryEntry entry = (RdKNNDirectoryEntry) node.getEntry(i); double minDist = distanceQuery.minDist(entry, oid); if(minDist <= entry.getKnnDistance()) { doReverseKNN(getNode(entry), oid, result); } } } }
java
private void doBulkReverseKNN(RdKNNNode node, DBIDs ids, Map<DBID, ModifiableDoubleDBIDList> result) { if(node.isLeaf()) { for(int i = 0; i < node.getNumEntries(); i++) { RdKNNLeafEntry entry = (RdKNNLeafEntry) node.getEntry(i); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { DBID id = DBIDUtil.deref(iter); double distance = distanceQuery.distance(entry.getDBID(), id); if(distance <= entry.getKnnDistance()) { result.get(id).add(distance, entry.getDBID()); } } } } // node is a inner node else { for(int i = 0; i < node.getNumEntries(); i++) { RdKNNDirectoryEntry entry = (RdKNNDirectoryEntry) node.getEntry(i); ModifiableDBIDs candidates = DBIDUtil.newArray(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { DBID id = DBIDUtil.deref(iter); double minDist = distanceQuery.minDist(entry, id); if(minDist <= entry.getKnnDistance()) { candidates.add(id); } if(!candidates.isEmpty()) { doBulkReverseKNN(getNode(entry), candidates, result); } } } } }
java
private void checkDistanceFunction(SpatialPrimitiveDistanceFunction<? super O> distanceFunction) { if(!settings.distanceFunction.equals(distanceFunction)) { throw new IllegalArgumentException("Parameter distanceFunction must be an instance of " + this.distanceQuery.getClass() + ", but is " + distanceFunction.getClass()); } }
java
@Override public final void insertAll(DBIDs ids) { if(ids.isEmpty() || (ids.size() == 1)) { return; } // Make an example leaf if(canBulkLoad()) { List<RdKNNEntry> leafs = new ArrayList<>(ids.size()); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { leafs.add(createNewLeafEntry(DBIDUtil.deref(iter))); } bulkLoad(leafs); } else { for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { insert(iter); } } doExtraIntegrityChecks(); }
java
public int correlationDistance(PCAFilteredResult pca1, PCAFilteredResult pca2, int dimensionality) { // TODO: Can we delay copying the matrixes? // pca of rv1 double[][] v1t = copy(pca1.getEigenvectors()); double[][] v1t_strong = pca1.getStrongEigenvectors(); int lambda1 = pca1.getCorrelationDimension(); // pca of rv2 double[][] v2t = copy(pca2.getEigenvectors()); double[][] v2t_strong = pca2.getStrongEigenvectors(); int lambda2 = pca2.getCorrelationDimension(); // for all strong eigenvectors of rv2 double[][] m1_czech = pca1.dissimilarityMatrix(); for(int i = 0; i < v2t_strong.length; i++) { double[] v2_i = v2t_strong[i]; // check, if distance of v2_i to the space of rv1 > delta // (i.e., if v2_i spans up a new dimension) double distsq = squareSum(v2_i) - transposeTimesTimes(v2_i, m1_czech, v2_i); // if so, insert v2_i into v1 and adjust v1 // and compute m1_czech new, increase lambda1 if(lambda1 < dimensionality && distsq > deltasq) { adjust(v1t, v2_i, lambda1++); // TODO: make this incremental? double[] e1_czech_d = new double[v1t.length]; Arrays.fill(e1_czech_d, 0, lambda1, 1); m1_czech = transposeDiagonalTimes(v1t, e1_czech_d, v1t); } } // for all strong eigenvectors of rv1 double[][] m2_czech = pca2.dissimilarityMatrix(); for(int i = 0; i < v1t_strong.length; i++) { double[] v1_i = v1t_strong[i]; // check, if distance of v1_i to the space of rv2 > delta // (i.e., if v1_i spans up a new dimension) double distsq = squareSum(v1_i) - transposeTimesTimes(v1_i, m2_czech, v1_i); // if so, insert v1_i into v2 and adjust v2 // and compute m2_czech new , increase lambda2 if(lambda2 < dimensionality && distsq > deltasq) { adjust(v2t, v1_i, lambda2++); // TODO: make this incremental? double[] e2_czech_d = new double[v1t.length]; Arrays.fill(e2_czech_d, 0, lambda2, 1); m2_czech = transposeDiagonalTimes(v2t, e2_czech_d, v2t); } } return Math.max(lambda1, lambda2); }
java