code
stringlengths
73
34.1k
label
stringclasses
1 value
public static double pdf(double x, double mu, double sigma, double k) { if(x == Double.POSITIVE_INFINITY || x == Double.NEGATIVE_INFINITY) { return 0.; } x = (x - mu) / sigma; if(k > 0 || k < 0) { if(k * x > 1) { return 0.; } double t = FastMath.log(1 - k * x); return t == Double.NEGATIVE_INFINITY ? 1. / sigma // : t == Double.POSITIVE_INFINITY ? 0. // : FastMath.exp((1 - k) * t / k - FastMath.exp(t / k)) / sigma; } else { // Gumbel case: return FastMath.exp(-x - FastMath.exp(-x)) / sigma; } }
java
public static double cdf(double val, double mu, double sigma, double k) { final double x = (val - mu) / sigma; if(k > 0 || k < 0) { if(k * x > 1) { return k > 0 ? 1 : 0; } return FastMath.exp(-FastMath.exp(FastMath.log(1 - k * x) / k)); } else { // Gumbel case: return FastMath.exp(-FastMath.exp(-x)); } }
java
public static double quantile(double val, double mu, double sigma, double k) { if(val < 0.0 || val > 1.0) { return Double.NaN; } if(k < 0) { return mu + sigma * Math.max((1. - FastMath.pow(-FastMath.log(val), k)) / k, 1. / k); } else if(k > 0) { return mu + sigma * Math.min((1. - FastMath.pow(-FastMath.log(val), k)) / k, 1. / k); } else { // Gumbel return mu + sigma * FastMath.log(1. / FastMath.log(1. / val)); } }
java
public static double cdf(double x, double sigma) { if(x <= 0.) { return 0.; } final double xs = x / sigma; return 1. - FastMath.exp(-.5 * xs * xs); }
java
public static double quantile(double val, double sigma) { if(!(val >= 0.) || !(val <= 1.)) { return Double.NaN; } if(val == 0.) { return 0.; } if(val == 1.) { return Double.POSITIVE_INFINITY; } return sigma * FastMath.sqrt(-2. * FastMath.log(1. - val)); }
java
public OutlierResult run(Database db, Relation<V> relation) { ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); // Build a kernel matrix, to make O(n^3) slightly less bad. SimilarityQuery<V> sq = db.getSimilarityQuery(relation, kernelFunction); KernelMatrix kernelMatrix = new KernelMatrix(sq, relation, ids); WritableDoubleDataStore abodvalues = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); DoubleMinMax minmaxabod = new DoubleMinMax(); MeanVariance s = new MeanVariance(); DBIDArrayIter pA = ids.iter(), pB = ids.iter(), pC = ids.iter(); for(; pA.valid(); pA.advance()) { final double abof = computeABOF(kernelMatrix, pA, pB, pC, s); minmaxabod.put(abof); abodvalues.putDouble(pA, abof); } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Angle-Based Outlier Degree", "abod-outlier", abodvalues, relation.getDBIDs()); OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta(minmaxabod.getMin(), minmaxabod.getMax(), 0.0, Double.POSITIVE_INFINITY); return new OutlierResult(scoreMeta, scoreResult); }
java
protected double computeABOF(KernelMatrix kernelMatrix, DBIDRef pA, DBIDArrayIter pB, DBIDArrayIter pC, MeanVariance s) { s.reset(); // Reused double simAA = kernelMatrix.getSimilarity(pA, pA); for(pB.seek(0); pB.valid(); pB.advance()) { if(DBIDUtil.equal(pB, pA)) { continue; } double simBB = kernelMatrix.getSimilarity(pB, pB); double simAB = kernelMatrix.getSimilarity(pA, pB); double sqdAB = simAA + simBB - simAB - simAB; if(!(sqdAB > 0.)) { continue; } for(pC.seek(pB.getOffset() + 1); pC.valid(); pC.advance()) { if(DBIDUtil.equal(pC, pA)) { continue; } double simCC = kernelMatrix.getSimilarity(pC, pC); double simAC = kernelMatrix.getSimilarity(pA, pC); double sqdAC = simAA + simCC - simAC - simAC; if(!(sqdAC > 0.)) { continue; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B,C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = kernelMatrix.getSimilarity(pB, pC); double numerator = simBC - simAB - simAC + simAA; double div = 1. / (sqdAB * sqdAC); s.put(numerator * div, FastMath.sqrt(div)); } } // Sample variance probably would be better here, but the ABOD publication // uses the naive variance. return s.getNaiveVariance(); }
java
public OutlierResult run(Database database, Relation<O> relation) { DBIDs ids = relation.getDBIDs(); WritableDoubleDataStore store = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_DB); DistanceQuery<O> distq = database.getDistanceQuery(relation, getDistanceFunction()); KNNQuery<O> knnq = database.getKNNQuery(distq, k + 1); // Find kNN KNNProcessor<O> knnm = new KNNProcessor<>(k + 1, knnq); SharedObject<KNNList> knnv = new SharedObject<>(); knnm.connectKNNOutput(knnv); // Extract outlier score KNNWeightProcessor kdistm = new KNNWeightProcessor(k + 1); SharedDouble kdistv = new SharedDouble(); kdistm.connectKNNInput(knnv); kdistm.connectOutput(kdistv); // Store in output result WriteDoubleDataStoreProcessor storem = new WriteDoubleDataStoreProcessor(store); storem.connectInput(kdistv); // And gather statistics for metadata DoubleMinMaxProcessor mmm = new DoubleMinMaxProcessor(); mmm.connectInput(kdistv); ParallelExecutor.run(ids, knnm, kdistm, storem, mmm); DoubleMinMax minmax = mmm.getMinMax(); DoubleRelation scoreres = new MaterializedDoubleRelation("kNN weight Outlier Score", "knnw-outlier", store, ids); OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0., Double.POSITIVE_INFINITY, 0.); return new OutlierResult(meta, scoreres); }
java
public Clustering<?> run(final Database database, final Relation<DiscreteUncertainObject> relation) { if(relation.size() <= 0) { return new Clustering<>("Uk-Means Clustering", "ukmeans-clustering"); } // Choose initial means randomly DBIDs sampleids = DBIDUtil.randomSample(relation.getDBIDs(), k, rnd); List<double[]> means = new ArrayList<>(k); for(DBIDIter iter = sampleids.iter(); iter.valid(); iter.advance()) { means.add(ArrayLikeUtil.toPrimitiveDoubleArray(relation.get(iter).getCenterOfMass())); } // Setup cluster assignment store List<ModifiableDBIDs> clusters = new ArrayList<>(); for(int i = 0; i < k; i++) { clusters.add(DBIDUtil.newHashSet((int) (relation.size() * 2. / k))); } WritableIntegerDataStore assignment = DataStoreUtil.makeIntegerStorage(relation.getDBIDs(), DataStoreFactory.HINT_TEMP | DataStoreFactory.HINT_HOT, -1); double[] varsum = new double[k]; IndefiniteProgress prog = LOG.isVerbose() ? new IndefiniteProgress("UK-Means iteration", LOG) : null; DoubleStatistic varstat = LOG.isStatistics() ? new DoubleStatistic(this.getClass().getName() + ".variance-sum") : null; int iteration = 0; for(; maxiter <= 0 || iteration < maxiter; iteration++) { LOG.incrementProcessed(prog); boolean changed = assignToNearestCluster(relation, means, clusters, assignment, varsum); logVarstat(varstat, varsum); // Stop if no cluster assignment changed. if(!changed) { break; } // Recompute means. means = means(clusters, means, relation); } LOG.setCompleted(prog); if(LOG.isStatistics()) { LOG.statistics(new LongStatistic(KEY + ".iterations", iteration)); } // Wrap result Clustering<KMeansModel> result = new Clustering<>("Uk-Means Clustering", "ukmeans-clustering"); for(int i = 0; i < clusters.size(); i++) { DBIDs ids = clusters.get(i); if(ids.isEmpty()) { continue; } result.addToplevelCluster(new Cluster<>(ids, new KMeansModel(means.get(i), varsum[i]))); } return result; }
java
protected boolean updateAssignment(DBIDIter iditer, List<? extends ModifiableDBIDs> clusters, WritableIntegerDataStore assignment, int newA) { final int oldA = assignment.intValue(iditer); if(oldA == newA) { return false; } clusters.get(newA).add(iditer); assignment.putInt(iditer, newA); if(oldA >= 0) { clusters.get(oldA).remove(iditer); } return true; }
java
protected double getExpectedRepDistance(NumberVector rep, DiscreteUncertainObject uo) { SquaredEuclideanDistanceFunction euclidean = SquaredEuclideanDistanceFunction.STATIC; int counter = 0; double sum = 0.0; for(int i = 0; i < uo.getNumberSamples(); i++) { sum += euclidean.distance(rep, uo.getSample(i)); counter++; } return sum / counter; }
java
protected void logVarstat(DoubleStatistic varstat, double[] varsum) { if(varstat != null) { double s = sum(varsum); getLogger().statistics(varstat.setDouble(s)); } }
java
public void save() throws FileNotFoundException { PrintStream p = new PrintStream(file); p.println(COMMENT_PREFIX + "Saved ELKI settings. First line is title, remaining lines are parameters."); for (Pair<String, ArrayList<String>> settings : store) { p.println(settings.first); for (String str : settings.second) { p.println(str); } p.println(); } p.close(); }
java
public void load() throws FileNotFoundException, IOException { BufferedReader is = new BufferedReader(new InputStreamReader(new FileInputStream(file))); ArrayList<String> buf = new ArrayList<>(); while (is.ready()) { String line = is.readLine(); // skip comments if (line.startsWith(COMMENT_PREFIX)) { continue; } if (line.length() == 0 && !buf.isEmpty()) { String title = buf.remove(0); store.add(new Pair<>(title, buf)); buf = new ArrayList<>(); } else { buf.add(line); } } if (!buf.isEmpty()) { String title = buf.remove(0); store.add(new Pair<>(title, buf)); buf = new ArrayList<>(); } is.close(); }
java
public void remove(String key) { Iterator<Pair<String, ArrayList<String>>> it = store.iterator(); while (it.hasNext()) { String thisKey = it.next().first; if (key.equals(thisKey)) { it.remove(); break; } } }
java
public ArrayList<String> get(String key) { Iterator<Pair<String, ArrayList<String>>> it = store.iterator(); while (it.hasNext()) { Pair<String, ArrayList<String>> pair = it.next(); if (key.equals(pair.first)) { return pair.second; } } return null; }
java
public Clustering<Model> run(Database database, Relation<V> relation) { // current dimensionality associated with each seed int dim_c = RelationUtil.dimensionality(relation); if(dim_c < l) { throw new IllegalStateException("Dimensionality of data < parameter l! " + "(" + dim_c + " < " + l + ")"); } // current number of seeds int k_c = Math.min(relation.size(), k_i * k); // pick k0 > k points from the database List<ORCLUSCluster> clusters = initialSeeds(relation, k_c); double beta = FastMath.exp(-FastMath.log(dim_c / (double) l) * FastMath.log(1 / alpha) / FastMath.log(k_c / (double) k)); IndefiniteProgress cprogress = LOG.isVerbose() ? new IndefiniteProgress("Current number of clusters:", LOG) : null; while(k_c > k) { // find partitioning induced by the seeds of the clusters assign(relation, clusters); // determine current subspace associated with each cluster for(ORCLUSCluster cluster : clusters) { if(cluster.objectIDs.size() > 0) { cluster.basis = findBasis(relation, cluster, dim_c); } } // reduce number of seeds and dimensionality associated with // each seed k_c = (int) Math.max(k, k_c * alpha); dim_c = (int) Math.max(l, dim_c * beta); merge(relation, clusters, k_c, dim_c, cprogress); if(cprogress != null) { cprogress.setProcessed(clusters.size(), LOG); } } assign(relation, clusters); LOG.setCompleted(cprogress); // get the result Clustering<Model> r = new Clustering<>("ORCLUS clustering", "orclus-clustering"); for(ORCLUSCluster c : clusters) { r.addToplevelCluster(new Cluster<Model>(c.objectIDs, ClusterModel.CLUSTER)); } return r; }
java
private List<ORCLUSCluster> initialSeeds(Relation<V> database, int k) { DBIDs randomSample = DBIDUtil.randomSample(database.getDBIDs(), k, rnd); List<ORCLUSCluster> seeds = new ArrayList<>(k); for(DBIDIter iter = randomSample.iter(); iter.valid(); iter.advance()) { seeds.add(new ORCLUSCluster(database.get(iter).toArray(), iter)); } return seeds; }
java
private void assign(Relation<V> database, List<ORCLUSCluster> clusters) { NumberVectorDistanceFunction<? super V> distFunc = SquaredEuclideanDistanceFunction.STATIC; // clear the current clusters for(ORCLUSCluster cluster : clusters) { cluster.objectIDs.clear(); } // projected centroids of the clusters List<NumberVector> projectedCentroids = new ArrayList<>(clusters.size()); for(ORCLUSCluster c : clusters) { projectedCentroids.add(DoubleVector.wrap(project(c, c.centroid))); } // for each data point o do for(DBIDIter it = database.iterDBIDs(); it.valid(); it.advance()) { double[] o = database.get(it).toArray(); double minDist = Double.POSITIVE_INFINITY; ORCLUSCluster minCluster = null; // determine projected distance between o and cluster for(int i = 0; i < clusters.size(); i++) { ORCLUSCluster c = clusters.get(i); NumberVector o_proj = DoubleVector.wrap(project(c, o)); double dist = distFunc.distance(o_proj, projectedCentroids.get(i)); if(dist < minDist) { minDist = dist; minCluster = c; } } // add p to the cluster with the least value of projected distance minCluster.objectIDs.add(it); } // recompute the seed in each clusters for(ORCLUSCluster cluster : clusters) { if(cluster.objectIDs.size() > 0) { cluster.centroid = Centroid.make(database, cluster.objectIDs).toArray(); } } }
java
private void merge(Relation<V> relation, List<ORCLUSCluster> clusters, int k_new, int d_new, IndefiniteProgress cprogress) { ArrayList<ProjectedEnergy> projectedEnergies = new ArrayList<>((clusters.size() * (clusters.size() - 1)) >>> 1); for(int i = 0; i < clusters.size(); i++) { for(int j = i + 1; j < clusters.size(); j++) { // projected energy of c_ij in subspace e_ij ORCLUSCluster c_i = clusters.get(i); ORCLUSCluster c_j = clusters.get(j); projectedEnergies.add(projectedEnergy(relation, c_i, c_j, i, j, d_new)); } } while(clusters.size() > k_new) { if(cprogress != null) { cprogress.setProcessed(clusters.size(), LOG); } // find the smallest value of r_ij ProjectedEnergy minPE = Collections.min(projectedEnergies); // renumber the clusters by replacing cluster c_i with cluster c_ij // and discarding cluster c_j for(int c = 0; c < clusters.size(); c++) { if(c == minPE.i) { clusters.remove(c); clusters.add(c, minPE.cluster); } if(c == minPE.j) { clusters.remove(c); } } // remove obsolete projected energies and renumber the others ... int i = minPE.i, j = minPE.j; for(Iterator<ProjectedEnergy> it = projectedEnergies.iterator(); it.hasNext();) { ProjectedEnergy pe = it.next(); if(pe.i == i || pe.i == j || pe.j == i || pe.j == j) { it.remove(); } else { if(pe.i > j) { pe.i -= 1; } if(pe.j > j) { pe.j -= 1; } } } // ... and recompute them ORCLUSCluster c_ij = minPE.cluster; for(int c = 0; c < clusters.size(); c++) { if(c < i) { projectedEnergies.add(projectedEnergy(relation, clusters.get(c), c_ij, c, i, d_new)); } else if(c > i) { projectedEnergies.add(projectedEnergy(relation, clusters.get(c), c_ij, i, c, d_new)); } } } }
java
private ProjectedEnergy projectedEnergy(Relation<V> relation, ORCLUSCluster c_i, ORCLUSCluster c_j, int i, int j, int dim) { NumberVectorDistanceFunction<? super V> distFunc = SquaredEuclideanDistanceFunction.STATIC; // union of cluster c_i and c_j ORCLUSCluster c_ij = union(relation, c_i, c_j, dim); double sum = 0.; NumberVector c_proj = DoubleVector.wrap(project(c_ij, c_ij.centroid)); for(DBIDIter iter = c_ij.objectIDs.iter(); iter.valid(); iter.advance()) { NumberVector o_proj = DoubleVector.wrap(project(c_ij, relation.get(iter).toArray())); sum += distFunc.distance(o_proj, c_proj); } sum /= c_ij.objectIDs.size(); return new ProjectedEnergy(i, j, c_ij, sum); }
java
private ORCLUSCluster union(Relation<V> relation, ORCLUSCluster c1, ORCLUSCluster c2, int dim) { ORCLUSCluster c = new ORCLUSCluster(); c.objectIDs = DBIDUtil.newHashSet(c1.objectIDs); c.objectIDs.addDBIDs(c2.objectIDs); c.objectIDs = DBIDUtil.newArray(c.objectIDs); if(c.objectIDs.size() > 0) { c.centroid = Centroid.make(relation, c.objectIDs).getArrayRef(); c.basis = findBasis(relation, c, dim); } else { c.centroid = timesEquals(plusEquals(c1.centroid, c2.centroid), .5); c.basis = identity(dim, c.centroid.length); } return c; }
java
private static void initializeNNCache(double[] scratch, double[] bestd, int[] besti) { final int size = bestd.length; Arrays.fill(bestd, Double.POSITIVE_INFINITY); Arrays.fill(besti, -1); for(int x = 0, p = 0; x < size; x++) { assert (p == MatrixParadigm.triangleSize(x)); double bestdx = Double.POSITIVE_INFINITY; int bestix = -1; for(int y = 0; y < x; y++, p++) { final double v = scratch[p]; if(v < bestd[y]) { bestd[y] = v; besti[y] = x; } if(v < bestdx) { bestdx = v; bestix = y; } } bestd[x] = bestdx; besti[x] = bestix; } }
java
protected int findMerge(int size, MatrixParadigm mat, double[] bestd, int[] besti, PointerHierarchyRepresentationBuilder builder) { double mindist = Double.POSITIVE_INFINITY; int x = -1, y = -1; // Find minimum: for(int cx = 0; cx < size; cx++) { // Skip if object has already joined a cluster: final int cy = besti[cx]; if(cy < 0) { continue; } final double dist = bestd[cx]; if(dist <= mindist) { // Prefer later on ==, to truncate more often. mindist = dist; x = cx; y = cy; } } assert (x >= 0 && y >= 0); assert (y < x); // We could swap otherwise, but this shouldn't arise. merge(size, mat, bestd, besti, builder, mindist, x, y); return x; }
java
protected void merge(int size, MatrixParadigm mat, double[] bestd, int[] besti, PointerHierarchyRepresentationBuilder builder, double mindist, int x, int y) { // Avoid allocating memory, by reusing existing iterators: final DBIDArrayIter ix = mat.ix.seek(x), iy = mat.iy.seek(y); if(LOG.isDebuggingFine()) { LOG.debugFine("Merging: " + DBIDUtil.toString(ix) + " -> " + DBIDUtil.toString(iy) + " " + mindist); } // Perform merge in data structure: x -> y assert (y < x); // Since y < x, prefer keeping y, dropping x. builder.add(ix, linkage.restore(mindist, getDistanceFunction().isSquared()), iy); // Update cluster size for y: final int sizex = builder.getSize(ix), sizey = builder.getSize(iy); builder.setSize(iy, sizex + sizey); // Deactivate x in cache: besti[x] = -1; // Note: this changes iy. updateMatrix(size, mat.matrix, iy, bestd, besti, builder, mindist, x, y, sizex, sizey); if(besti[y] == x) { findBest(size, mat.matrix, bestd, besti, y); } }
java
private void updateCache(int size, double[] scratch, double[] bestd, int[] besti, int x, int y, int j, double d) { // New best if(d <= bestd[j]) { bestd[j] = d; besti[j] = y; return; } // Needs slow update. if(besti[j] == x || besti[j] == y) { findBest(size, scratch, bestd, besti, j); } }
java
public VisualizerContext newContext(ResultHierarchy hier, Result start) { Collection<Relation<?>> rels = ResultUtil.filterResults(hier, Relation.class); for(Relation<?> rel : rels) { if(samplesize == 0) { continue; } if(!ResultUtil.filterResults(hier, rel, SamplingResult.class).isEmpty()) { continue; } if(rel.size() > samplesize) { SamplingResult sample = new SamplingResult(rel); sample.setSample(DBIDUtil.randomSample(sample.getSample(), samplesize, rnd)); ResultUtil.addChildResult(rel, sample); } } return new VisualizerContext(hier, start, stylelib, factories); }
java
public static String getTitle(Database db, Result result) { List<TrackedParameter> settings = new ArrayList<>(); for(SettingsResult sr : SettingsResult.getSettingsResults(result)) { settings.addAll(sr.getSettings()); } String algorithm = null; String distance = null; String dataset = null; for(TrackedParameter setting : settings) { Parameter<?> param = setting.getParameter(); OptionID option = param.getOptionID(); String value = param.isDefined() ? param.getValueAsString() : null; if(option.equals(AlgorithmStep.Parameterizer.ALGORITHM_ID)) { algorithm = value; } if(option.equals(DistanceBasedAlgorithm.DISTANCE_FUNCTION_ID)) { distance = value; } if(option.equals(FileBasedDatabaseConnection.Parameterizer.INPUT_ID)) { dataset = value; } } StringBuilder buf = new StringBuilder(); if(algorithm != null) { buf.append(shortenClassname(algorithm.split(",")[0], '.')); } if(distance != null) { if(buf.length() > 0) { buf.append(" using "); } buf.append(shortenClassname(distance, '.')); } if(dataset != null) { if(buf.length() > 0) { buf.append(" on "); } buf.append(shortenClassname(dataset, File.separatorChar)); } if(buf.length() > 0) { return buf.toString(); } return null; }
java
protected static String shortenClassname(String nam, char c) { final int lastdot = nam.lastIndexOf(c); if(lastdot >= 0) { nam = nam.substring(lastdot + 1); } return nam; }
java
private static Class<?> getRestrictionClass(OptionID oid, final Parameter<?> firstopt, Map<OptionID, List<Pair<Parameter<?>, Class<?>>>> byopt) { Class<?> superclass = getRestrictionClass(firstopt); // Also look for more general restrictions: for(Pair<Parameter<?>, Class<?>> clinst : byopt.get(oid)) { if(clinst.getFirst() instanceof ClassParameter) { ClassParameter<?> cls = (ClassParameter<?>) clinst.getFirst(); if(!cls.getRestrictionClass().equals(superclass) && cls.getRestrictionClass().isAssignableFrom(superclass)) { superclass = cls.getRestrictionClass(); } } if(clinst.getFirst() instanceof ClassListParameter) { ClassListParameter<?> cls = (ClassListParameter<?>) clinst.getFirst(); if(!cls.getRestrictionClass().equals(superclass) && cls.getRestrictionClass().isAssignableFrom(superclass)) { superclass = cls.getRestrictionClass(); } } } return superclass; }
java
private static <T> ArrayList<T> sorted(Collection<T> cls, Comparator<? super T> c) { ArrayList<T> sorted = new ArrayList<>(cls); sorted.sort(c); return sorted; }
java
protected void handleHoverEvent(Event evt) { if(evt.getTarget() instanceof Element) { Element e = (Element) evt.getTarget(); Node next = e.getNextSibling(); if(next instanceof Element) { toggleTooltip((Element) next, evt.getType()); } else { LoggingUtil.warning("Tooltip sibling not found."); } } else { LoggingUtil.warning("Got event for non-Element?!?"); } }
java
protected void toggleTooltip(Element elem, String type) { String csscls = elem.getAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE); if(SVGConstants.SVG_MOUSEOVER_EVENT_TYPE.equals(type)) { if(TOOLTIP_HIDDEN.equals(csscls)) { SVGUtil.setAtt(elem, SVGConstants.SVG_CLASS_ATTRIBUTE, TOOLTIP_VISIBLE); } } else if(SVGConstants.SVG_MOUSEOUT_EVENT_TYPE.equals(type)) { if(TOOLTIP_VISIBLE.equals(csscls)) { SVGUtil.setAtt(elem, SVGConstants.SVG_CLASS_ATTRIBUTE, TOOLTIP_HIDDEN); } } else if(SVGConstants.SVG_CLICK_EVENT_TYPE.equals(type)) { if(TOOLTIP_STICKY.equals(csscls)) { SVGUtil.setAtt(elem, SVGConstants.SVG_CLASS_ATTRIBUTE, TOOLTIP_HIDDEN); } if(TOOLTIP_HIDDEN.equals(csscls) || TOOLTIP_VISIBLE.equals(csscls)) { SVGUtil.setAtt(elem, SVGConstants.SVG_CLASS_ATTRIBUTE, TOOLTIP_STICKY); } } }
java
@Override public DoubleDBIDList reverseKNNQuery(DBIDRef id, int k) { ModifiableDoubleDBIDList result = DBIDUtil.newDistanceDBIDList(); final Heap<MTreeSearchCandidate> pq = new UpdatableHeap<>(); // push root pq.add(new MTreeSearchCandidate(0., getRootID(), null, Double.NaN)); // search in tree while(!pq.isEmpty()) { MTreeSearchCandidate pqNode = pq.poll(); // FIXME: cache the distance to the routing object in the queue node! MkAppTreeNode<O> node = getNode(pqNode.nodeID); // directory node if(!node.isLeaf()) { for(int i = 0; i < node.getNumEntries(); i++) { MkAppEntry entry = node.getEntry(i); double distance = distance(entry.getRoutingObjectID(), id); double minDist = (entry.getCoveringRadius() > distance) ? 0. : distance - entry.getCoveringRadius(); double approxValue = settings.log ? FastMath.exp(entry.approximatedValueAt(k)) : entry.approximatedValueAt(k); if(approxValue < 0) { approxValue = 0; } if(minDist <= approxValue) { pq.add(new MTreeSearchCandidate(minDist, getPageID(entry), entry.getRoutingObjectID(), Double.NaN)); } } } // data node else { for(int i = 0; i < node.getNumEntries(); i++) { MkAppLeafEntry entry = (MkAppLeafEntry) node.getEntry(i); double distance = distance(entry.getRoutingObjectID(), id); double approxValue = settings.log ? FastMath.exp(entry.approximatedValueAt(k)) : entry.approximatedValueAt(k); if(approxValue < 0) { approxValue = 0; } if(distance <= approxValue) { result.add(distance, entry.getRoutingObjectID()); } } } } return result; }
java
private void leafEntryIDs(MkAppTreeNode<O> node, ModifiableDBIDs result) { if(node.isLeaf()) { for(int i = 0; i < node.getNumEntries(); i++) { MkAppEntry entry = node.getEntry(i); result.add(((LeafEntry) entry).getDBID()); } } else { for(int i = 0; i < node.getNumEntries(); i++) { MkAppTreeNode<O> childNode = getNode(node.getEntry(i)); leafEntryIDs(childNode, result); } } }
java
private PolynomialApproximation approximateKnnDistances(double[] knnDistances) { StringBuilder msg = new StringBuilder(); // count the zero distances (necessary of log-log space is used) int k_0 = 0; if(settings.log) { for(int i = 0; i < settings.kmax; i++) { double dist = knnDistances[i]; if(dist == 0) { k_0++; } else { break; } } } double[] x = new double[settings.kmax - k_0]; double[] y = new double[settings.kmax - k_0]; for(int k = 0; k < settings.kmax - k_0; k++) { if(settings.log) { x[k] = FastMath.log(k + k_0); y[k] = FastMath.log(knnDistances[k + k_0]); } else { x[k] = k + k_0; y[k] = knnDistances[k + k_0]; } } PolynomialRegression regression = new PolynomialRegression(y, x, settings.p); PolynomialApproximation approximation = new PolynomialApproximation(regression.getEstimatedCoefficients()); if(LOG.isDebugging()) { msg.append("approximation ").append(approximation); LOG.debugFine(msg.toString()); } return approximation; }
java
protected final int isLeft(double[] a, double[] b, double[] o) { final double cross = getRX(a, o) * getRY(b, o) - getRY(a, o) * getRX(b, o); if(cross == 0) { // Compare manhattan distances - same angle! final double dista = Math.abs(getRX(a, o)) + Math.abs(getRY(a, o)); final double distb = Math.abs(getRX(b, o)) + Math.abs(getRY(b, o)); return Double.compare(dista, distb); } return Double.compare(cross, 0); }
java
private double mdist(double[] a, double[] b) { return Math.abs(a[0] - b[0]) + Math.abs(a[1] - b[1]); }
java
private boolean isConvex(double[] a, double[] b, double[] c) { // We're using factor to improve numerical contrast for small polygons. double area = (b[0] - a[0]) * factor * (c[1] - a[1]) - (c[0] - a[0]) * factor * (b[1] - a[1]); return (-1e-13 < area && area < 1e-13) ? (mdist(b, c) > mdist(a, b) + mdist(a, c)) : (area < 0); }
java
private void grahamScan() { if(points.size() < 3) { return; } Iterator<double[]> iter = points.iterator(); Stack<double[]> stack = new Stack<>(); // Start with the first two points on the stack final double[] first = iter.next(); stack.add(first); while(iter.hasNext()) { double[] n = iter.next(); if(mdist(first, n) > 0) { stack.add(n); break; } } while(iter.hasNext()) { double[] next = iter.next(); double[] curr = stack.pop(); double[] prev = stack.peek(); while((stack.size() > 1) && (mdist(curr, next) == 0 || !isConvex(prev, curr, next))) { curr = stack.pop(); prev = stack.peek(); } stack.add(curr); stack.add(next); } points = stack; }
java
public Polygon getHull() { if(!ok) { computeConvexHull(); } return new Polygon(points, minmaxX.getMin(), minmaxX.getMax(), minmaxY.getMin(), minmaxY.getMax()); }
java
private static double coverRadius(double[][] matrix, int[] idx, int i) { final int idx_i = idx[i]; final double[] row_i = matrix[i]; double m = 0; for(int j = 0; j < row_i.length; j++) { if(i != j && idx_i == idx[j]) { final double d = row_i[j]; m = d > m ? d : m; } } return m; }
java
private static int[] mstPartition(double[][] matrix) { final int n = matrix.length; int[] edges = PrimsMinimumSpanningTree.processDense(matrix); // Note: Prims does *not* yield edges sorted by edge length! double meanlength = thresholdLength(matrix, edges); int[] idx = new int[n], best = new int[n], sizes = new int[n]; int bestsize = -1; double bestlen = 0; for(int omit = n - 2; omit > 0; --omit) { final double len = edgelength(matrix, edges, omit); if(len < meanlength) { continue; } omitEdge(edges, idx, sizes, omit); // Finalize array: int minsize = n; for(int i = 0; i < n; i++) { int j = idx[i] = follow(i, idx); if(j == i && sizes[i] < minsize) { minsize = sizes[i]; } } if(minsize > bestsize || (minsize == bestsize && len > bestlen)) { bestsize = minsize; bestlen = len; System.arraycopy(idx, 0, best, 0, n); } } return best; }
java
private static double thresholdLength(double[][] matrix, int[] edges) { double[] lengths = new double[edges.length >> 1]; for(int i = 0, e = edges.length - 1; i < e; i += 2) { lengths[i >> 1] = matrix[edges[i]][edges[i + 1]]; } Arrays.sort(lengths); final int pos = (lengths.length >> 1); // 50% return lengths[pos]; }
java
private static double edgelength(double[][] matrix, int[] edges, int i) { i <<= 1; return matrix[edges[i]][edges[i + 1]]; }
java
private static void omitEdge(int[] edges, int[] idx, int[] sizes, int omit) { for(int i = 0; i < idx.length; i++) { idx[i] = i; } Arrays.fill(sizes, 1); for(int i = 0, j = 0, e = edges.length - 1; j < e; i++, j += 2) { if(i == omit) { continue; } int ea = edges[j + 1], eb = edges[j]; if(eb < ea) { // Swap int tmp = eb; eb = ea; ea = tmp; } final int pa = follow(ea, idx), pb = follow(eb, idx); assert (pa != pb) : "Must be disjoint - MST inconsistent."; sizes[idx[pa]] += sizes[idx[pb]]; idx[pb] = idx[pa]; } }
java
private static int follow(int i, int[] partitions) { int next = partitions[i], tmp; while(i != next) { tmp = next; next = partitions[i] = partitions[next]; i = tmp; } return i; }
java
private static void computeCentroid(double[] centroid, Relation<? extends NumberVector> relation, DBIDs ids) { Arrays.fill(centroid, 0); int dim = centroid.length; for(DBIDIter it = ids.iter(); it.valid(); it.advance()) { NumberVector v = relation.get(it); for(int i = 0; i < dim; i++) { centroid[i] += v.doubleValue(i); } } timesEquals(centroid, 1. / ids.size()); }
java
public static <O> DistanceQuery<O> getDistanceQuery(Database database, DistanceFunction<? super O> distanceFunction, Object... hints) { final Relation<O> objectQuery = database.getRelation(distanceFunction.getInputTypeRestriction(), hints); return database.getDistanceQuery(objectQuery, distanceFunction, hints); }
java
public static <O> SimilarityQuery<O> getSimilarityQuery(Database database, SimilarityFunction<? super O> similarityFunction, Object... hints) { final Relation<O> objectQuery = database.getRelation(similarityFunction.getInputTypeRestriction(), hints); return database.getSimilarityQuery(objectQuery, similarityFunction, hints); }
java
public static <O> RKNNQuery<O> getRKNNQuery(Relation<O> relation, DistanceFunction<? super O> distanceFunction, Object... hints) { final DistanceQuery<O> distanceQuery = relation.getDistanceQuery(distanceFunction, hints); return relation.getRKNNQuery(distanceQuery, hints); }
java
public static <O> RangeQuery<O> getLinearScanSimilarityRangeQuery(SimilarityQuery<O> simQuery) { // Slight optimizations of linear scans if(simQuery instanceof PrimitiveSimilarityQuery) { final PrimitiveSimilarityQuery<O> pdq = (PrimitiveSimilarityQuery<O>) simQuery; return new LinearScanPrimitiveSimilarityRangeQuery<>(pdq); } return new LinearScanSimilarityRangeQuery<>(simQuery); }
java
protected static void register(Class<?> parent, String cname) { Entry e = data.get(parent); if(e == null) { data.put(parent, e = new Entry()); } e.addName(cname); }
java
protected static void register(Class<?> parent, Class<?> clazz) { Entry e = data.get(parent); if(e == null) { data.put(parent, e = new Entry()); } final String cname = clazz.getCanonicalName(); e.addHit(cname, clazz); if(clazz.isAnnotationPresent(Alias.class)) { Alias aliases = clazz.getAnnotation(Alias.class); for(String alias : aliases.value()) { e.addAlias(alias, cname); } } }
java
protected static void registerAlias(Class<?> parent, String alias, String cname) { Entry e = data.get(parent); assert (e != null); e.addAlias(alias, cname); }
java
private static Class<?> tryLoadClass(String value) { try { return CLASSLOADER.loadClass(value); } catch(ClassNotFoundException e) { return null; } }
java
public static List<Class<?>> findAllImplementations(Class<?> restrictionClass) { if(restrictionClass == null) { return Collections.emptyList(); } if(!contains(restrictionClass)) { ELKIServiceLoader.load(restrictionClass); ELKIServiceScanner.load(restrictionClass); } Entry e = data.get(restrictionClass); if(e == null) { return Collections.emptyList(); } // Start loading classes: ArrayList<Class<?>> ret = new ArrayList<>(e.len); for(int pos = 0; pos < e.len; pos++) { Class<?> c = e.clazzes[pos]; if(c == null) { c = tryLoadClass(e.names[pos]); if(c == null) { LOG.warning("Failed to load class " + e.names[pos] + " for interface " + restrictionClass.getName()); c = FAILED_LOAD; } e.clazzes[pos] = c; } if(c == FAILED_LOAD) { continue; } // Linear scan, but cheap enough. if(!ret.contains(c)) { ret.add(c); } } return ret; }
java
public static List<Class<?>> findAllImplementations(Class<?> c, boolean everything, boolean parameterizable) { if(c == null) { return Collections.emptyList(); } // Default is served from the registry if(!everything && parameterizable) { return findAllImplementations(c); } // This codepath is used by utility classes to also find buggy // implementations (e.g. non-instantiable, abstract) of the interfaces. List<Class<?>> known = findAllImplementations(c); // For quickly skipping seen entries: HashSet<Class<?>> dupes = new HashSet<>(known); for(Iterator<Class<?>> iter = ELKIServiceScanner.nonindexedClasses(); iter.hasNext();) { Class<?> cls = iter.next(); if(dupes.contains(cls)) { continue; } // skip abstract / private classes. if(!everything && (Modifier.isInterface(cls.getModifiers()) || Modifier.isAbstract(cls.getModifiers()) || Modifier.isPrivate(cls.getModifiers()))) { continue; } if(!c.isAssignableFrom(cls)) { continue; } if(parameterizable) { boolean instantiable = false; try { instantiable = cls.getConstructor() != null; } catch(Exception | Error e) { // ignore } try { instantiable = instantiable || ClassGenericsUtil.getParameterizer(cls) != null; } catch(Exception | Error e) { // ignore } if(!instantiable) { continue; } } known.add(cls); dupes.add(cls); } return known; }
java
private static <C> Class<?> tryAlternateNames(Class<? super C> restrictionClass, String value, Entry e) { StringBuilder buf = new StringBuilder(value.length() + 100); // Try with FACTORY_POSTFIX first: Class<?> clazz = tryLoadClass(buf.append(value).append(FACTORY_POSTFIX).toString()); if(clazz != null) { return clazz; } clazz = tryLoadClass(value); // Without FACTORY_POSTFIX. if(clazz != null) { return clazz; } buf.setLength(0); // Try prepending the package name: clazz = tryLoadClass(buf.append(restrictionClass.getPackage().getName()).append('.')// .append(value).append(FACTORY_POSTFIX).toString()); if(clazz != null) { return clazz; } // Remove FACTORY_POSTFIX again. buf.setLength(buf.length() - FACTORY_POSTFIX.length()); String value2 = buf.toString(); // Will also be used below. clazz = tryLoadClass(value2); if(clazz != null) { return clazz; } // Last, try aliases: if(e != null && e.aliaslen > 0) { for(int i = 0; i < e.aliaslen; i += 2) { if(e.aliases[i].equalsIgnoreCase(value) || e.aliases[i].equalsIgnoreCase(value2)) { return findImplementation(restrictionClass, e.aliases[++i]); } } } return null; }
java
protected Element setupCanvas() { final double margin = context.getStyleLibrary().getSize(StyleLibrary.MARGIN); this.layer = setupCanvas(svgp, this.proj, margin, getWidth(), getHeight()); return layer; }
java
protected SimpleTypeInformation<?> convertedType(SimpleTypeInformation<?> in, NumberVector.Factory<V> factory) { return new VectorFieldTypeInformation<>(factory, tdim); }
java
protected <O> Map<O, IntList> partition(List<? extends O> classcolumn) { Map<O, IntList> classes = new HashMap<>(); Iterator<? extends O> iter = classcolumn.iterator(); for(int i = 0; iter.hasNext(); i++) { O lbl = iter.next(); IntList ids = classes.get(lbl); if(ids == null) { ids = new IntArrayList(); classes.put(lbl, ids); } ids.add(i); } return classes; }
java
public Curve makeCurve() { Curve c = new Curve(curves.size()); curves.add(c); return c; }
java
public void publish(String message, Level level) { try { publish(new LogRecord(level, message)); } catch(BadLocationException e) { throw new RuntimeException("Error writing a log-like message.", e); } }
java
protected synchronized void publish(LogRecord record) throws BadLocationException { // choose an appropriate formatter final Formatter fmt; final Style style; // always format progress messages using the progress formatter. if(record.getLevel().intValue() >= Level.WARNING.intValue()) { // format errors using the error formatter fmt = errformat; style = errStyle; } else if(record.getLevel().intValue() <= Level.FINE.intValue()) { // format debug statements using the debug formatter. fmt = debugformat; style = dbgStyle; } else { // default to the message formatter. fmt = msgformat; style = msgStyle; } // format final String m; m = fmt.format(record); StyledDocument doc = getStyledDocument(); if(record instanceof ProgressLogRecord) { if(lastNewlinePos < doc.getLength()) { doc.remove(lastNewlinePos, doc.getLength() - lastNewlinePos); } } else { // insert a newline, if we didn't see one yet. if(lastNewlinePos < doc.getLength()) { doc.insertString(doc.getLength(), "\n", style); lastNewlinePos = doc.getLength(); } } int tail = tailingNonNewline(m, 0, m.length()); int headlen = m.length() - tail; if(headlen > 0) { String pre = m.substring(0, headlen); doc.insertString(doc.getLength(), pre, style); } lastNewlinePos = doc.getLength(); if(tail > 0) { String post = m.substring(m.length() - tail); doc.insertString(lastNewlinePos, post, style); } }
java
protected void optimizeSNE(AffinityMatrix pij, double[][] sol) { final int size = pij.size(); if(size * 3L * dim > 0x7FFF_FFFAL) { throw new AbortException("Memory exceeds Java array size limit."); } // Meta information on each point; joined for memory locality. // Gradient, Momentum, and learning rate // For performance, we use a flat memory layout! double[] meta = new double[size * 3 * dim]; final int dim3 = dim * 3; for(int off = 2 * dim; off < meta.length; off += dim3) { Arrays.fill(meta, off, off + dim, 1.); // Initial learning rate } // Affinity matrix in projected space double[][] qij = new double[size][size]; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Iterative Optimization", iterations, LOG) : null; Duration timer = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".runtime.optimization").begin() : null; // Optimize for(int it = 0; it < iterations; it++) { double qij_sum = computeQij(qij, sol); computeGradient(pij, qij, 1. / qij_sum, sol, meta); updateSolution(sol, meta, it); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); if(timer != null) { LOG.statistics(timer.end()); } }
java
protected double computeQij(double[][] qij, double[][] solution) { double qij_sum = 0; for(int i = 1; i < qij.length; i++) { final double[] qij_i = qij[i], vi = solution[i]; for(int j = 0; j < i; j++) { qij_sum += qij_i[j] = qij[j][i] = MathUtil.exp(-sqDist(vi, solution[j])); } } return qij_sum * 2; // Symmetry }
java
protected void computeGradient(AffinityMatrix pij, double[][] qij, double qij_isum, double[][] sol, double[] meta) { final int dim3 = dim * 3; int size = pij.size(); for(int i = 0, off = 0; i < size; i++, off += dim3) { final double[] sol_i = sol[i], qij_i = qij[i]; Arrays.fill(meta, off, off + dim, 0.); // Clear gradient only for(int j = 0; j < size; j++) { if(i == j) { continue; } final double[] sol_j = sol[j]; final double qij_ij = qij_i[j]; // Qij after scaling! final double q = MathUtil.max(qij_ij * qij_isum, MIN_QIJ); double a = 4 * (pij.get(i, j) - q); // SNE gradient for(int k = 0; k < dim; k++) { meta[off + k] += a * (sol_i[k] - sol_j[k]); } } } }
java
public OutlierResult run(Database database, Relation<O> relation) { DistanceFunction<? super O> df = clusterer.getDistanceFunction(); DistanceQuery<O> dq = database.getDistanceQuery(relation, df); // TODO: improve ELKI api to ensure we're using the same DBIDs! Clustering<?> c = clusterer.run(database, relation); WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_DB); DoubleMinMax mm = new DoubleMinMax(); @SuppressWarnings("unchecked") NumberVector.Factory<O> factory = (NumberVector.Factory<O>) RelationUtil.assumeVectorField(relation).getFactory(); List<? extends Cluster<?>> clusters = c.getAllClusters(); for(Cluster<?> cluster : clusters) { // FIXME: use a primitive distance function on number vectors instead. O mean = factory.newNumberVector(ModelUtil.getPrototype(cluster.getModel(), relation)); for(DBIDIter iter = cluster.getIDs().iter(); iter.valid(); iter.advance()) { double dist = dq.distance(mean, iter); scores.put(iter, dist); mm.put(dist); } } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("KMeans outlier scores", "kmeans-outlier", scores, relation.getDBIDs()); OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(mm.getMin(), mm.getMax(), 0., Double.POSITIVE_INFINITY, 0.); return new OutlierResult(scoreMeta, scoreResult); }
java
@Override public FittingFunctionResult eval(double x, double[] params) { final int len = params.length; // We always need triples: (mean, stddev, scaling) assert (len % 3) == 0; double y = 0.0; double[] gradients = new double[len]; // Loosely based on the book: // Numerical Recipes in C: The Art of Scientific Computing // Due to their license, we cannot use their code, but we have to implement // the mathematics ourselves. We hope the loss in precision is not too big. for(int i = 2; i < params.length; i += 3) { // Standardized Gaussian parameter (centered, scaled by stddev) double stdpar = (x - params[i - 2]) / params[i - 1]; double e = FastMath.exp(-.5 * stdpar * stdpar); double localy = params[i] / (params[i - 1] * MathUtil.SQRTTWOPI) * e; y += localy; // mean gradient gradients[i - 2] = localy * stdpar; // stddev gradient gradients[i - 1] = (stdpar * stdpar - 1.0) * localy; // amplitude gradient gradients[i] = e / (params[i - 1] * MathUtil.SQRTTWOPI); } return new FittingFunctionResult(y, gradients); }
java
private void showVisualization(VisualizerContext context, SimilarityMatrixVisualizer factory, VisualizationTask task) { VisualizationPlot plot = new VisualizationPlot(); Visualization vis = factory.makeVisualization(context, task, plot, 1.0, 1.0, null); plot.getRoot().appendChild(vis.getLayer()); plot.getRoot().setAttribute(SVGConstants.SVG_WIDTH_ATTRIBUTE, "20cm"); plot.getRoot().setAttribute(SVGConstants.SVG_HEIGHT_ATTRIBUTE, "20cm"); plot.getRoot().setAttribute(SVGConstants.SVG_VIEW_BOX_ATTRIBUTE, "0 0 1 1"); plot.updateStyleElement(); (new SimpleSVGViewer()).setPlot(plot); }
java
public void put(int[] data) { final int l = data.length; for(int i = 0; i < l; i++) { put(data[i]); } }
java
public OutlierResult run(Database database, Relation<O> rel) { final DBIDs ids = rel.getDBIDs(); LOG.verbose("Running kNN preprocessor."); KNNQuery<O> knnq = DatabaseUtil.precomputedKNNQuery(database, rel, getDistanceFunction(), kmax + 1); // Initialize store for densities WritableDataStore<double[]> densities = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, double[].class); estimateDensities(rel, knnq, ids, densities); // Compute scores: WritableDoubleDataStore kofs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_DB); DoubleMinMax minmax = new DoubleMinMax(); computeOutlierScores(knnq, ids, densities, kofs, minmax); DoubleRelation scoreres = new MaterializedDoubleRelation("Kernel Density Estimation Outlier Scores", "kdeos-outlier", kofs, ids); OutlierScoreMeta meta = new ProbabilisticOutlierScore(minmax.getMin(), minmax.getMax()); return new OutlierResult(meta, scoreres); }
java
protected void estimateDensities(Relation<O> rel, KNNQuery<O> knnq, final DBIDs ids, WritableDataStore<double[]> densities) { final int dim = dimensionality(rel); final int knum = kmax + 1 - kmin; // Initialize storage: for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { densities.put(iter, new double[knum]); } // Distribute densities: FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing densities", ids.size(), LOG) : null; double iminbw = (minBandwidth > 0.) ? 1. / (minBandwidth * scale) : Double.POSITIVE_INFINITY; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { KNNList neighbors = knnq.getKNNForDBID(iter, kmax + 1); int k = 1, idx = 0; double sum = 0.; for(DoubleDBIDListIter kneighbor = neighbors.iter(); k <= kmax && kneighbor.valid(); kneighbor.advance(), k++) { sum += kneighbor.doubleValue(); if(k < kmin) { continue; } final double ibw = Math.min(k / (sum * scale), iminbw); final double sca = MathUtil.powi(ibw, dim); for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { final double dens; if(sca < Double.POSITIVE_INFINITY) { // NaNs with duplicate points! dens = sca * kernel.density(neighbor.doubleValue() * ibw); } else { dens = neighbor.doubleValue() == 0. ? 1. : 0.; } densities.get(neighbor)[idx] += dens; if(dens < CUTOFF) { break; } } ++idx; // Only if k >= kmin } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); }
java
private int dimensionality(Relation<O> rel) { // Explicit: if(idim >= 0) { return idim; } // Cast to vector field relation. @SuppressWarnings("unchecked") final Relation<NumberVector> frel = (Relation<NumberVector>) rel; int dim = RelationUtil.dimensionality(frel); if(dim < 1) { throw new AbortException("When using KDEOS with non-vectorspace data, the intrinsic dimensionality parameter must be set!"); } return dim; }
java
protected void computeOutlierScores(KNNQuery<O> knnq, final DBIDs ids, WritableDataStore<double[]> densities, WritableDoubleDataStore kdeos, DoubleMinMax minmax) { final int knum = kmax + 1 - kmin; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing KDEOS scores", ids.size(), LOG) : null; double[][] scratch = new double[knum][kmax + 5]; MeanVariance mv = new MeanVariance(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double[] dens = densities.get(iter); KNNList neighbors = knnq.getKNNForDBID(iter, kmax + 1); if(scratch[0].length < neighbors.size()) { // Resize scratch. Add some extra margin again. scratch = new double[knum][neighbors.size() + 5]; } { // Store density matrix of neighbors int i = 0; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance(), i++) { double[] ndens = densities.get(neighbor); for(int k = 0; k < knum; k++) { scratch[k][i] = ndens[k]; } } assert (i == neighbors.size()); } // Compute means and stddevs for each k double score = 0.; for(int i = 0; i < knum; i++) { mv.reset(); for(int j = 0; j < neighbors.size(); j++) { mv.put(scratch[i][j]); } final double mean = mv.getMean(), stddev = mv.getSampleStddev(); if(stddev > 0.) { score += (mean - dens[i]) / stddev; } } score /= knum; // average score = NormalDistribution.standardNormalCDF(score); minmax.put(score); kdeos.put(iter, score); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); }
java
public Clustering<Model> run(Relation<V> rel) { fulldatabase = preprocess(rel); processedIDs = DBIDUtil.newHashSet(fulldatabase.size()); noiseDim = dimensionality(fulldatabase); FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("CASH Clustering", fulldatabase.size(), LOG) : null; Clustering<Model> result = doRun(fulldatabase, progress); LOG.ensureCompleted(progress); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder(1000); for(Cluster<Model> c : result.getAllClusters()) { if(c.getModel() instanceof LinearEquationModel) { LinearEquationModel s = (LinearEquationModel) c.getModel(); msg.append("\n Cluster: Dim: " + s.getLes().subspacedim() + " size: " + c.size()); } else { msg.append("\n Cluster: " + c.getModel().getClass().getName() + " size: " + c.size()); } } LOG.verbose(msg.toString()); } return result; }
java
private Relation<ParameterizationFunction> preprocess(Relation<V> vrel) { DBIDs ids = vrel.getDBIDs(); SimpleTypeInformation<ParameterizationFunction> type = new SimpleTypeInformation<>(ParameterizationFunction.class); WritableDataStore<ParameterizationFunction> prep = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_HOT, ParameterizationFunction.class); // Project for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { prep.put(iter, new ParameterizationFunction(vrel.get(iter))); } return new MaterializedRelation<>(type, ids, null, prep); }
java
private void initHeap(ObjectHeap<CASHInterval> heap, Relation<ParameterizationFunction> relation, int dim, DBIDs ids) { CASHIntervalSplit split = new CASHIntervalSplit(relation, minPts); // determine minimum and maximum function value of all functions double[] minMax = determineMinMaxDistance(relation, dim); double d_min = minMax[0], d_max = minMax[1]; double dIntervalLength = d_max - d_min; int numDIntervals = (int) FastMath.ceil(dIntervalLength / jitter); double dIntervalSize = dIntervalLength / numDIntervals; double[] d_mins = new double[numDIntervals], d_maxs = new double[numDIntervals]; if(LOG.isVerbose()) { LOG.verbose(new StringBuilder().append("d_min ").append(d_min)// .append("\nd_max ").append(d_max)// .append("\nnumDIntervals ").append(numDIntervals)// .append("\ndIntervalSize ").append(dIntervalSize).toString()); } // alpha intervals double[] alphaMin = new double[dim - 1], alphaMax = new double[dim - 1]; Arrays.fill(alphaMax, Math.PI); for(int i = 0; i < numDIntervals; i++) { d_mins[i] = (i == 0) ? d_min : d_maxs[i - 1]; d_maxs[i] = (i < numDIntervals - 1) ? d_mins[i] + dIntervalSize : d_max - d_mins[i]; HyperBoundingBox alphaInterval = new HyperBoundingBox(alphaMin, alphaMax); ModifiableDBIDs intervalIDs = split.determineIDs(ids, alphaInterval, d_mins[i], d_maxs[i]); if(intervalIDs != null && intervalIDs.size() >= minPts) { heap.add(new CASHInterval(alphaMin, alphaMax, split, intervalIDs, -1, 0, d_mins[i], d_maxs[i])); } } if(LOG.isDebuggingFiner()) { LOG.debugFiner(new StringBuilder().append("heap.size: ").append(heap.size()).toString()); } }
java
private MaterializedRelation<ParameterizationFunction> buildDB(int dim, double[][] basis, DBIDs ids, Relation<ParameterizationFunction> relation) { ProxyDatabase proxy = new ProxyDatabase(ids); SimpleTypeInformation<ParameterizationFunction> type = new SimpleTypeInformation<>(ParameterizationFunction.class); WritableDataStore<ParameterizationFunction> prep = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_HOT, ParameterizationFunction.class); // Project for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { prep.put(iter, project(basis, relation.get(iter))); } if(LOG.isDebugging()) { LOG.debugFine("db fuer dim " + (dim - 1) + ": " + ids.size()); } MaterializedRelation<ParameterizationFunction> prel = new MaterializedRelation<>(type, ids, null, prep); proxy.addRelation(prel); return prel; }
java
private ParameterizationFunction project(double[][] basis, ParameterizationFunction f) { // Matrix m = new Matrix(new // double[][]{f.getPointCoordinates()}).times(basis); double[] m = transposeTimes(basis, f.getColumnVector()); return new ParameterizationFunction(DoubleVector.wrap(m)); }
java
private double[][] determineBasis(double[] alpha) { final int dim = alpha.length; // Primary vector: double[] nn = new double[dim + 1]; for(int i = 0; i < nn.length; i++) { double alpha_i = i == alpha.length ? 0 : alpha[i]; nn[i] = ParameterizationFunction.sinusProduct(0, i, alpha) * FastMath.cos(alpha_i); } timesEquals(nn, 1. / euclideanLength(nn)); // Normalize // Find orthogonal system, in transposed form: double[][] basis = new double[dim][]; int found = 0; for(int i = 0; i < nn.length && found < dim; i++) { // ith unit vector. final double[] e_i = new double[nn.length]; e_i[i] = 1.0; minusTimesEquals(e_i, nn, scalarProduct(e_i, nn)); double len = euclideanLength(e_i); // Make orthogonal to earlier (normal) basis vectors: for(int j = 0; j < found; j++) { if(len < 1e-9) { // Disappeared, probably linear dependent break; } minusTimesEquals(e_i, basis[j], scalarProduct(e_i, basis[j])); len = euclideanLength(e_i); } if(len < 1e-9) { continue; } timesEquals(e_i, 1. / len); // Normalize basis[found++] = e_i; } if(found < dim) { // Likely some numerical instability, should not happen. for(int i = found; i < dim; i++) { basis[i] = new double[nn.length]; // Append zero vectors } } return transpose(basis); }
java
private CASHInterval determineNextIntervalAtMaxLevel(ObjectHeap<CASHInterval> heap) { CASHInterval next = doDetermineNextIntervalAtMaxLevel(heap); // noise path was chosen while(next == null) { if(heap.isEmpty()) { return null; } next = doDetermineNextIntervalAtMaxLevel(heap); } return next; }
java
private CASHInterval doDetermineNextIntervalAtMaxLevel(ObjectHeap<CASHInterval> heap) { CASHInterval interval = heap.poll(); int dim = interval.getDimensionality(); while(true) { // max level is reached if(interval.getLevel() >= maxLevel && interval.getMaxSplitDimension() == (dim - 1)) { return interval; } if(heap.size() % 10000 == 0 && LOG.isVerbose()) { LOG.verbose("heap size " + heap.size()); } if(heap.size() >= 40000) { LOG.warning("Heap size > 40.000! Stopping."); heap.clear(); return null; } if(LOG.isDebuggingFiner()) { LOG.debugFiner("split " + interval.toString() + " " + interval.getLevel() + "-" + interval.getMaxSplitDimension()); } interval.split(); // noise if(!interval.hasChildren()) { return null; } CASHInterval bestInterval; if(interval.getLeftChild() != null && interval.getRightChild() != null) { int comp = interval.getLeftChild().compareTo(interval.getRightChild()); if(comp < 0) { bestInterval = interval.getRightChild(); heap.add(interval.getLeftChild()); } else { bestInterval = interval.getLeftChild(); heap.add(interval.getRightChild()); } } else if(interval.getLeftChild() == null) { bestInterval = interval.getRightChild(); } else { bestInterval = interval.getLeftChild(); } interval = bestInterval; } }
java
private double[] determineMinMaxDistance(Relation<ParameterizationFunction> relation, int dimensionality) { double[] min = new double[dimensionality - 1]; double[] max = new double[dimensionality - 1]; Arrays.fill(max, Math.PI); HyperBoundingBox box = new HyperBoundingBox(min, max); double d_min = Double.POSITIVE_INFINITY, d_max = Double.NEGATIVE_INFINITY; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { ParameterizationFunction f = relation.get(iditer); HyperBoundingBox minMax = f.determineAlphaMinMax(box); double f_min = f.function(SpatialUtil.getMin(minMax)); double f_max = f.function(SpatialUtil.getMax(minMax)); d_min = Math.min(d_min, f_min); d_max = Math.max(d_max, f_max); } return new double[] { d_min, d_max }; }
java
public HistogramResult run(Database database, Relation<O> relation) { final DistanceQuery<O> distanceQuery = database.getDistanceQuery(relation, getDistanceFunction()); final KNNQuery<O> knnQuery = database.getKNNQuery(distanceQuery, relation.size()); if(LOG.isVerbose()) { LOG.verbose("Preprocessing clusters..."); } // Cluster by labels Collection<Cluster<Model>> split = (new ByLabelOrAllInOneClustering()).run(database).getAllClusters(); DoubleHistogram hist = new DoubleHistogram(numbins, 0.0, 1.0); if(LOG.isVerbose()) { LOG.verbose("Processing points..."); } FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("Computing ROC AUC values", relation.size(), LOG) : null; ROCEvaluation roc = new ROCEvaluation(); MeanVariance mv = new MeanVariance(); // sort neighbors for(Cluster<?> clus : split) { for(DBIDIter iter = clus.getIDs().iter(); iter.valid(); iter.advance()) { KNNList knn = knnQuery.getKNNForDBID(iter, relation.size()); double result = EvaluateClustering.evaluateRanking(roc, clus, knn); mv.put(result); hist.increment(result, 1. / relation.size()); LOG.incrementProcessed(progress); } } LOG.ensureCompleted(progress); // Transform Histogram into a Double Vector array. Collection<double[]> res = new ArrayList<>(relation.size()); for(DoubleHistogram.Iter iter = hist.iter(); iter.valid(); iter.advance()) { res.add(new double[] { iter.getCenter(), iter.getValue() }); } HistogramResult result = new HistogramResult("Ranking Quality Histogram", "ranking-histogram", res); result.addHeader("Mean: " + mv.getMean() + " Variance: " + mv.getSampleVariance()); return result; }
java
public Clustering<M> run(Database database, Relation<V> relation) { if(relation.size() == 0) { throw new IllegalArgumentException("database empty: must contain elements"); } // initial models List<? extends EMClusterModel<M>> models = mfactory.buildInitialModels(database, relation, k, SquaredEuclideanDistanceFunction.STATIC); WritableDataStore<double[]> probClusterIGivenX = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_SORTED, double[].class); double loglikelihood = assignProbabilitiesToInstances(relation, models, probClusterIGivenX); DoubleStatistic likestat = LOG.isStatistics() ? new DoubleStatistic(this.getClass().getName() + ".loglikelihood") : null; if(LOG.isStatistics()) { LOG.statistics(likestat.setDouble(loglikelihood)); } // iteration unless no change int it = 0, lastimprovement = 0; double bestloglikelihood = loglikelihood; // For detecting instabilities. for(++it; it < maxiter || maxiter < 0; it++) { final double oldloglikelihood = loglikelihood; recomputeCovarianceMatrices(relation, probClusterIGivenX, models, prior); // reassign probabilities loglikelihood = assignProbabilitiesToInstances(relation, models, probClusterIGivenX); if(LOG.isStatistics()) { LOG.statistics(likestat.setDouble(loglikelihood)); } if(loglikelihood - bestloglikelihood > delta) { lastimprovement = it; bestloglikelihood = loglikelihood; } if(Math.abs(loglikelihood - oldloglikelihood) <= delta || lastimprovement < it >> 1) { break; } } if(LOG.isStatistics()) { LOG.statistics(new LongStatistic(KEY + ".iterations", it)); } // fill result with clusters and models List<ModifiableDBIDs> hardClusters = new ArrayList<>(k); for(int i = 0; i < k; i++) { hardClusters.add(DBIDUtil.newArray()); } // provide a hard clustering for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { hardClusters.get(argmax(probClusterIGivenX.get(iditer))).add(iditer); } Clustering<M> result = new Clustering<>("EM Clustering", "em-clustering"); // provide models within the result for(int i = 0; i < k; i++) { result.addToplevelCluster(new Cluster<>(hardClusters.get(i), models.get(i).finalizeCluster())); } if(isSoft()) { result.addChildResult(new MaterializedRelation<>("cluster assignments", "em-soft-score", SOFT_TYPE, probClusterIGivenX, relation.getDBIDs())); } else { probClusterIGivenX.destroy(); } return result; }
java
public static void recomputeCovarianceMatrices(Relation<? extends NumberVector> relation, WritableDataStore<double[]> probClusterIGivenX, List<? extends EMClusterModel<?>> models, double prior) { final int k = models.size(); boolean needsTwoPass = false; for(EMClusterModel<?> m : models) { m.beginEStep(); needsTwoPass |= m.needsTwoPass(); } // First pass, only for two-pass models. if(needsTwoPass) { for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { double[] clusterProbabilities = probClusterIGivenX.get(iditer); NumberVector instance = relation.get(iditer); for(int i = 0; i < clusterProbabilities.length; i++) { final double prob = clusterProbabilities[i]; if(prob > 1e-10) { models.get(i).firstPassE(instance, prob); } } } for(EMClusterModel<?> m : models) { m.finalizeFirstPassE(); } } double[] wsum = new double[k]; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { double[] clusterProbabilities = probClusterIGivenX.get(iditer); NumberVector instance = relation.get(iditer); for(int i = 0; i < clusterProbabilities.length; i++) { final double prob = clusterProbabilities[i]; if(prob > 1e-10) { models.get(i).updateE(instance, prob); } wsum[i] += prob; } } for(int i = 0; i < models.size(); i++) { // MLE / MAP final double weight = prior <= 0. ? wsum[i] / relation.size() : (wsum[i] + prior - 1) / (relation.size() + prior * k - k); models.get(i).finalizeEStep(weight, prior); } }
java
public static double assignProbabilitiesToInstances(Relation<? extends NumberVector> relation, List<? extends EMClusterModel<?>> models, WritableDataStore<double[]> probClusterIGivenX) { final int k = models.size(); double emSum = 0.; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { NumberVector vec = relation.get(iditer); double[] probs = new double[k]; for(int i = 0; i < k; i++) { double v = models.get(i).estimateLogDensity(vec); probs[i] = v > MIN_LOGLIKELIHOOD ? v : MIN_LOGLIKELIHOOD; } final double logP = logSumExp(probs); for(int i = 0; i < k; i++) { probs[i] = FastMath.exp(probs[i] - logP); } probClusterIGivenX.put(iditer, probs); emSum += logP; } return emSum / relation.size(); }
java
protected synchronized void updateVisualizerMenus() { Projection proj = null; if(svgCanvas.getPlot() instanceof DetailView) { PlotItem item = ((DetailView) svgCanvas.getPlot()).getPlotItem(); proj = item.proj; } menubar.removeAll(); menubar.add(filemenu); ResultHierarchy hier = context.getHierarchy(); Hierarchy<Object> vistree = context.getVisHierarchy(); Result start = context.getBaseResult(); ArrayList<JMenuItem> items = new ArrayList<>(); if(start == null) { for(It<Result> iter = hier.iterAll(); iter.valid(); iter.advance()) { if(hier.numParents(iter.get()) == 0) { recursiveBuildMenu(items, iter.get(), hier, vistree, proj); } } } else { for(It<Result> iter = hier.iterChildren(start); iter.valid(); iter.advance()) { recursiveBuildMenu(items, iter.get(), hier, vistree, proj); } } // Add all items. for(JMenuItem item : items) { menubar.add(item); } menubar.revalidate(); menubar.repaint(); }
java
public OutlierResult run(Relation<V> relation) { final DBIDs ids = relation.getDBIDs(); ArrayList<ArrayDBIDs> subspaceIndex = buildOneDimIndexes(relation); Set<HiCSSubspace> subspaces = calculateSubspaces(relation, subspaceIndex, rnd.getSingleThreadedRandom()); if(LOG.isVerbose()) { LOG.verbose("Number of high-contrast subspaces: " + subspaces.size()); } List<DoubleRelation> results = new ArrayList<>(); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Calculating Outlier scores for high Contrast subspaces", subspaces.size(), LOG) : null; // run outlier detection and collect the result // TODO extend so that any outlierAlgorithm can be used (use materialized // relation instead of SubspaceEuclideanDistanceFunction?) for(HiCSSubspace dimset : subspaces) { if(LOG.isVerbose()) { LOG.verbose("Performing outlier detection in subspace " + dimset); } ProxyDatabase pdb = new ProxyDatabase(ids); pdb.addRelation(new ProjectedView<>(relation, new NumericalFeatureSelection<V>(dimset))); // run LOF and collect the result OutlierResult result = outlierAlgorithm.run(pdb); results.add(result.getScores()); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC); DoubleMinMax minmax = new DoubleMinMax(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { double sum = 0.0; for(DoubleRelation r : results) { final double s = r.doubleValue(iditer); if(!Double.isNaN(s)) { sum += s; } } scores.putDouble(iditer, sum); minmax.put(sum); } OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax()); DoubleRelation scoreres = new MaterializedDoubleRelation("HiCS", "HiCS-outlier", scores, relation.getDBIDs()); return new OutlierResult(meta, scoreres); }
java
private ArrayList<ArrayDBIDs> buildOneDimIndexes(Relation<? extends NumberVector> relation) { final int dim = RelationUtil.dimensionality(relation); ArrayList<ArrayDBIDs> subspaceIndex = new ArrayList<>(dim + 1); SortDBIDsBySingleDimension comp = new VectorUtil.SortDBIDsBySingleDimension(relation); for(int i = 0; i < dim; i++) { ArrayModifiableDBIDs amDBIDs = DBIDUtil.newArray(relation.getDBIDs()); comp.setDimension(i); amDBIDs.sort(comp); subspaceIndex.add(amDBIDs); } return subspaceIndex; }
java
private double[] max(double[] distances1, double[] distances2) { if(distances1.length != distances2.length) { throw new RuntimeException("different lengths!"); } double[] result = new double[distances1.length]; for(int i = 0; i < distances1.length; i++) { result[i] = Math.max(distances1[i], distances2[i]); } return result; }
java
public static int compileShader(Class<?> context, GL2 gl, int type, String name) throws ShaderCompilationException { int prog = -1; try (InputStream in = context.getResourceAsStream(name)) { int[] error = new int[1]; String shaderdata = FileUtil.slurp(in); prog = gl.glCreateShader(type); gl.glShaderSource(prog, 1, new String[] { shaderdata }, null, 0); gl.glCompileShader(prog); // This worked best for me to capture error messages: gl.glGetObjectParameterivARB(prog, GL2.GL_OBJECT_INFO_LOG_LENGTH_ARB, error, 0); if(error[0] > 1) { byte[] info = new byte[error[0]]; gl.glGetInfoLogARB(prog, info.length, error, 0, info, 0); String out = new String(info); gl.glDeleteShader(prog); throw new ShaderCompilationException("Shader compilation error in '" + name + "': " + out); } // Different way of catching errors. gl.glGetShaderiv(prog, GL2.GL_COMPILE_STATUS, error, 0); if(error[0] > 1) { throw new ShaderCompilationException("Shader compilation of '" + name + "' failed."); } } catch(IOException e) { throw new ShaderCompilationException("IO error loading shader: " + name, e); } return prog; }
java
protected int effectiveBandSize(final int dim1, final int dim2) { if(bandSize == Double.POSITIVE_INFINITY) { return (dim1 > dim2) ? dim1 : dim2; } if(bandSize >= 1.) { return (int) bandSize; } // Max * bandSize: return (int) Math.ceil((dim1 >= dim2 ? dim1 : dim2) * bandSize); }
java
@Override public final int addLeafEntry(E entry) { // entry is not a leaf entry if(!(entry instanceof LeafEntry)) { throw new UnsupportedOperationException("Entry is not a leaf entry!"); } // this is a not a leaf node if(!isLeaf()) { throw new UnsupportedOperationException("Node is not a leaf node!"); } // leaf node return addEntry(entry); }
java
@Override public final int addDirectoryEntry(E entry) { // entry is not a directory entry if(entry instanceof LeafEntry) { throw new UnsupportedOperationException("Entry is not a directory entry!"); } // this is a not a directory node if(isLeaf()) { throw new UnsupportedOperationException("Node is not a directory node!"); } return addEntry(entry); }
java
public boolean deleteEntry(int index) { System.arraycopy(entries, index + 1, entries, index, numEntries - index - 1); entries[--numEntries] = null; return true; }
java
@SuppressWarnings("unchecked") @Deprecated public final List<E> getEntries() { List<E> result = new ArrayList<>(numEntries); for(Entry entry : entries) { if(entry != null) { result.add((E) entry); } } return result; }
java
public void removeMask(long[] mask) { int dest = BitsUtil.nextSetBit(mask, 0); if(dest < 0) { return; } int src = BitsUtil.nextSetBit(mask, dest); while(src < numEntries) { if(!BitsUtil.get(mask, src)) { entries[dest] = entries[src]; dest++; } src++; } int rm = src - dest; while(dest < numEntries) { entries[dest] = null; dest++; } numEntries -= rm; }
java