code
stringlengths
73
34.1k
label
stringclasses
1 value
public static String format(float[] f) { return (f == null) ? "null" : (f.length == 0) ? "" : // formatTo(new StringBuilder(), f, ", ").toString(); }
java
public static String format(int[] a, String sep) { return (a == null) ? "null" : (a.length == 0) ? "" : // formatTo(new StringBuilder(), a, sep).toString(); }
java
public static String format(boolean[] b, final String sep) { return (b == null) ? "null" : (b.length == 0) ? "" : // formatTo(new StringBuilder(), b, ", ").toString(); }
java
public static String format(double[][] d) { return d == null ? "null" : (d.length == 0) ? "[]" : // formatTo(new StringBuilder().append("[\n"), d, " [", "]\n", ", ", NF2).append(']').toString(); }
java
public static String format(double[][] m, int w, int d, String pre, String pos, String csep) { DecimalFormat format = new DecimalFormat(); format.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US)); format.setMinimumIntegerDigits(1); format.setMaximumFractionDigits(d); format.setMinimumFractionDigits(d); format.setGroupingUsed(false); StringBuilder msg = new StringBuilder(); for(int i = 0; i < m.length; i++) { double[] row = m[i]; msg.append(pre); for(int j = 0; j < row.length; j++) { if(j > 0) { msg.append(csep); } String s = format.format(row[j]); // format the number whitespace(msg, w - s.length()).append(s); } msg.append(pos); } return msg.toString(); }
java
public static String format(double[][] m, NumberFormat nf) { return formatTo(new StringBuilder().append("[\n"), m, " [", "]\n", ", ", nf).append("]").toString(); }
java
public static String format(Collection<String> d, String sep) { if(d == null) { return "null"; } if(d.isEmpty()) { return ""; } if(d.size() == 1) { return d.iterator().next(); } int len = sep.length() * (d.size() - 1); for(String s : d) { len += s.length(); } Iterator<String> it = d.iterator(); StringBuilder buffer = new StringBuilder(len) // .append(it.next()); while(it.hasNext()) { buffer.append(sep).append(it.next()); } return buffer.toString(); }
java
public static String format(String[] d, String sep) { if(d == null) { return "null"; } if(d.length == 0) { return ""; } if(d.length == 1) { return d[0]; } int len = sep.length() * (d.length - 1); for(String s : d) { len += s.length(); } StringBuilder buffer = new StringBuilder(len)// .append(d[0]); for(int i = 1; i < d.length; i++) { buffer.append(sep).append(d[i]); } return buffer.toString(); }
java
public static int findSplitpoint(String s, int width) { // the newline (or EOS) is the fallback split position. int in = s.indexOf(NEWLINE); in = in < 0 ? s.length() : in; // Good enough? if(in < width) { return in; } // otherwise, search for whitespace int iw = s.lastIndexOf(' ', width); // good whitespace found? if(iw >= 0 && iw < width) { return iw; } // sub-optimal splitpoint - retry AFTER the given position int bp = nextPosition(s.indexOf(' ', width), s.indexOf(NEWLINE, width)); if(bp >= 0) { return bp; } // even worse - can't split! return s.length(); }
java
public static List<String> splitAtLastBlank(String s, int width) { List<String> chunks = new ArrayList<>(); String tmp = s; while(tmp.length() > 0) { int index = findSplitpoint(tmp, width); // store first part chunks.add(tmp.substring(0, index)); // skip whitespace at beginning of line while(index < tmp.length() && tmp.charAt(index) == ' ') { index += 1; } // remove a newline if(index < tmp.length() && tmp.regionMatches(index, NEWLINE, 0, NEWLINE.length())) { index += NEWLINE.length(); } if(index >= tmp.length()) { break; } tmp = tmp.substring(index); } return chunks; }
java
public static String pad(String o, int len) { return o.length() >= len ? o : (o + whitespace(len - o.length())); }
java
public static String padRightAligned(String o, int len) { return o.length() >= len ? o : (whitespace(len - o.length()) + o); }
java
public static String formatTimeDelta(long time, CharSequence sep) { final StringBuilder sb = new StringBuilder(); final Formatter fmt = new Formatter(sb); for(int i = TIME_UNIT_SIZES.length - 1; i >= 0; --i) { // We do not include ms if we are in the order of minutes. if(i == 0 && sb.length() > 4) { continue; } // Separator if(sb.length() > 0) { sb.append(sep); } final long acValue = time / TIME_UNIT_SIZES[i]; time = time % TIME_UNIT_SIZES[i]; if(!(acValue == 0 && sb.length() == 0)) { fmt.format("%0" + TIME_UNIT_DIGITS[i] + "d%s", Long.valueOf(acValue), TIME_UNIT_NAMES[i]); } } fmt.close(); return sb.toString(); }
java
public static StringBuilder appendZeros(StringBuilder buf, int zeros) { for(int i = zeros; i > 0; i -= ZEROPADDING.length) { buf.append(ZEROPADDING, 0, i < ZEROPADDING.length ? i : ZEROPADDING.length); } return buf; }
java
public static StringBuilder appendSpace(StringBuilder buf, int spaces) { for(int i = spaces; i > 0; i -= SPACEPADDING.length) { buf.append(SPACEPADDING, 0, i < SPACEPADDING.length ? i : SPACEPADDING.length); } return buf; }
java
protected void makeLayerElement() { plotwidth = StyleLibrary.SCALE; plotheight = StyleLibrary.SCALE / optics.getOPTICSPlot(context).getRatio(); final double margin = context.getStyleLibrary().getSize(StyleLibrary.MARGIN); layer = SVGUtil.svgElement(svgp.getDocument(), SVGConstants.SVG_G_TAG); final String transform = SVGUtil.makeMarginTransform(getWidth(), getHeight(), plotwidth, plotheight, margin * .5, margin * .5, margin * 1.5, margin * .5); SVGUtil.setAtt(layer, SVGConstants.SVG_TRANSFORM_ATTRIBUTE, transform); }
java
protected List<Centroid> computeCentroids(int dim, List<V> vectorcolumn, List<ClassLabel> keys, Map<ClassLabel, IntList> classes) { final int numc = keys.size(); List<Centroid> centroids = new ArrayList<>(numc); for(int i = 0; i < numc; i++) { Centroid c = new Centroid(dim); for(IntIterator it = classes.get(keys.get(i)).iterator(); it.hasNext();) { c.put(vectorcolumn.get(it.nextInt())); } centroids.add(c); } return centroids; }
java
protected double kNNDistance() { double knnDist = 0.; for(int i = 0; i < getNumEntries(); i++) { MkMaxEntry entry = getEntry(i); knnDist = Math.max(knnDist, entry.getKnnDistance()); } return knnDist; }
java
@Override public boolean adjustEntry(MkMaxEntry entry, DBID routingObjectID, double parentDistance, AbstractMTree<O, MkMaxTreeNode<O>, MkMaxEntry, ?> mTree) { super.adjustEntry(entry, routingObjectID, parentDistance, mTree); // adjust knn distance entry.setKnnDistance(kNNDistance()); return true; // TODO: improve }
java
@Override protected void integrityCheckParameters(MkMaxEntry parentEntry, MkMaxTreeNode<O> parent, int index, AbstractMTree<O, MkMaxTreeNode<O>, MkMaxEntry, ?> mTree) { super.integrityCheckParameters(parentEntry, parent, index, mTree); // test if knn distance is correctly set MkMaxEntry entry = parent.getEntry(index); double knnDistance = kNNDistance(); if(Math.abs(entry.getKnnDistance() - knnDistance) > 0) { throw new RuntimeException("Wrong knnDistance in node " + parent.getPageID() + " at index " + index + " (child " + entry + ")" + "\nsoll: " + knnDistance + ",\n ist: " + entry.getKnnDistance()); } }
java
@Override public void initialize() { if(databaseConnection == null) { return; // Supposedly we initialized already. } if(LOG.isDebugging()) { LOG.debugFine("Loading data from database connection."); } MultipleObjectsBundle bundle = databaseConnection.loadData(); // Run at most once. databaseConnection = null; // Find DBIDs for bundle { DBIDs bids = bundle.getDBIDs(); if(bids instanceof ArrayStaticDBIDs) { this.ids = (ArrayStaticDBIDs) bids; } else if(bids == null) { this.ids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength()); } else { this.ids = (ArrayStaticDBIDs) DBIDUtil.makeUnmodifiable(DBIDUtil.ensureArray(bids)); } } // Replace id representation (it would be nicer if we would not need // DBIDView at all) this.idrep = new DBIDView(this.ids); relations.add(this.idrep); getHierarchy().add(this, idrep); DBIDArrayIter it = this.ids.iter(); int numrel = bundle.metaLength(); for(int i = 0; i < numrel; i++) { SimpleTypeInformation<?> meta = bundle.meta(i); @SuppressWarnings("unchecked") SimpleTypeInformation<Object> ometa = (SimpleTypeInformation<Object>) meta; WritableDataStore<Object> store = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_DB, ometa.getRestrictionClass()); for(it.seek(0); it.valid(); it.advance()) { store.put(it, bundle.data(it.getOffset(), i)); } Relation<?> relation = new MaterializedRelation<>(ometa, ids, null, store); relations.add(relation); getHierarchy().add(this, relation); // Try to add indexes where appropriate for(IndexFactory<?> factory : indexFactories) { if(factory.getInputTypeRestriction().isAssignableFromType(ometa)) { @SuppressWarnings("unchecked") final IndexFactory<Object> ofact = (IndexFactory<Object>) factory; @SuppressWarnings("unchecked") final Relation<Object> orep = (Relation<Object>) relation; final Index index = ofact.instantiate(orep); Duration duration = LOG.isStatistics() ? LOG.newDuration(index.getClass().getName() + ".construction").begin() : null; index.initialize(); if(duration != null) { LOG.statistics(duration.end()); } getHierarchy().add(relation, index); } } } // fire insertion event eventManager.fireObjectsInserted(ids); }
java
protected void zSort(List<? extends SpatialComparable> objs, int start, int end, double[] mms, int[] dims, int depth) { final int numdim = (dims != null) ? dims.length : (mms.length >> 1); final int edim = (dims != null) ? dims[depth] : depth; // Find the splitting points. final double min = mms[2 * edim], max = mms[2 * edim + 1]; double spos = (min + max) / 2.; // Safeguard against duplicate points: if(max - spos < STOPVAL || spos - min < STOPVAL) { boolean ok = false; for(int d = 0; d < numdim; d++) { int d2 = ((dims != null) ? dims[d] : d) << 1; if(mms[d2 + 1] - mms[d2] >= STOPVAL) { ok = true; break; } } if(!ok) { return; } } int split = pivotizeList1D(objs, start, end, edim, spos, false); assert (start <= split && split <= end); int nextdim = (depth + 1) % numdim; if(start < split - 1) { mms[2 * edim] = min; mms[2 * edim + 1] = spos; zSort(objs, start, split, mms, dims, nextdim); } if(split < end - 1) { mms[2 * edim] = spos; mms[2 * edim + 1] = max; zSort(objs, split, end, mms, dims, nextdim); } // Restore ranges mms[2 * edim] = min; mms[2 * edim + 1] = max; }
java
public StringBuilder appendTo(StringBuilder buf) { return buf.append(DBIDUtil.toString((DBIDRef) id)).append(" ").append(column).append(" ").append(score); }
java
protected Cluster<SubspaceModel> runDOC(Database database, Relation<V> relation, ArrayModifiableDBIDs S, final int d, int n, int m, int r, int minClusterSize) { // Best cluster for the current run. DBIDs C = null; // Relevant attributes for the best cluster. long[] D = null; // Quality of the best cluster. double quality = Double.NEGATIVE_INFINITY; // Bounds for our cluster. // ModifiableHyperBoundingBox bounds = new ModifiableHyperBoundingBox(new // double[d], new double[d]); // Inform the user about the progress in the current iteration. FiniteProgress iprogress = LOG.isVerbose() ? new FiniteProgress("Iteration progress for current cluster", m * n, LOG) : null; Random random = rnd.getSingleThreadedRandom(); DBIDArrayIter iter = S.iter(); for(int i = 0; i < n; ++i) { // Pick a random seed point. iter.seek(random.nextInt(S.size())); for(int j = 0; j < m; ++j) { // Choose a set of random points. DBIDs randomSet = DBIDUtil.randomSample(S, r, random); // Initialize cluster info. long[] nD = BitsUtil.zero(d); // Test each dimension and build bounding box. for(int k = 0; k < d; ++k) { if(dimensionIsRelevant(k, relation, randomSet)) { BitsUtil.setI(nD, k); } } if(BitsUtil.cardinality(nD) > 0) { DBIDs nC = findNeighbors(iter, nD, S, relation); if(LOG.isDebuggingFiner()) { LOG.finer("Testing a cluster candidate, |C| = " + nC.size() + ", |D| = " + BitsUtil.cardinality(nD)); } // Is the cluster large enough? if(nC.size() < minClusterSize) { // Too small. if(LOG.isDebuggingFiner()) { LOG.finer("... but it's too small."); } continue; } // Better cluster than before? double nQuality = computeClusterQuality(nC.size(), BitsUtil.cardinality(nD)); if(nQuality > quality) { if(LOG.isDebuggingFiner()) { LOG.finer("... and it's the best so far: " + nQuality + " vs. " + quality); } C = nC; D = nD; quality = nQuality; } else { if(LOG.isDebuggingFiner()) { LOG.finer("... but we already have a better one."); } } } LOG.incrementProcessed(iprogress); } } LOG.ensureCompleted(iprogress); return (C != null) ? makeCluster(relation, C, D) : null; }
java
protected DBIDs findNeighbors(DBIDRef q, long[] nD, ArrayModifiableDBIDs S, Relation<V> relation) { // Weights for distance (= rectangle query) DistanceQuery<V> dq = relation.getDistanceQuery(new SubspaceMaximumDistanceFunction(nD)); // TODO: add filtering capabilities into query API! // Until then, using the range query API will be unnecessarily slow. // RangeQuery<V> rq = relation.getRangeQuery(df, DatabaseQuery.HINT_SINGLE); ArrayModifiableDBIDs nC = DBIDUtil.newArray(); for(DBIDIter it = S.iter(); it.valid(); it.advance()) { if(dq.distance(q, it) <= w) { nC.add(it); } } return nC; }
java
protected Cluster<SubspaceModel> makeCluster(Relation<V> relation, DBIDs C, long[] D) { DBIDs ids = DBIDUtil.newHashSet(C); // copy, also to lose distance values! Cluster<SubspaceModel> cluster = new Cluster<>(ids); cluster.setModel(new SubspaceModel(new Subspace(D), Centroid.make(relation, ids).getArrayRef())); return cluster; }
java
protected static <M extends Model> int[] findDepth(Clustering<M> c) { final Hierarchy<Cluster<M>> hier = c.getClusterHierarchy(); int[] size = { 0, 0 }; for(It<Cluster<M>> iter = c.iterToplevelClusters(); iter.valid(); iter.advance()) { findDepth(hier, iter.get(), size); } return size; }
java
private static <M extends Model> void findDepth(Hierarchy<Cluster<M>> hier, Cluster<M> cluster, int[] size) { if(hier.numChildren(cluster) > 0) { for(It<Cluster<M>> iter = hier.iterChildren(cluster); iter.valid(); iter.advance()) { findDepth(hier, iter.get(), size); } size[0] += 1; // Depth } else { size[1] += 1; // Leaves } }
java
protected static int getPreferredColumns(double width, double height, int numc, double maxwidth) { // Maximum width (compared to height) of labels - guess. // FIXME: do we really need to do this three-step computation? // Number of rows we'd use in a squared layout: final double rows = Math.ceil(FastMath.pow(numc * maxwidth, height / (width + height))); // Given this number of rows (plus one for header), use this many columns: return (int) Math.ceil(numc / (rows + 1)); }
java
public ELKIBuilder<T> with(String opt, Object value) { p.addParameter(opt, value); return this; }
java
@SuppressWarnings("unchecked") public <C extends T> C build() { if(p == null) { throw new AbortException("build() may be called only once."); } final T obj = ClassGenericsUtil.parameterizeOrAbort(clazz, p); if(p.hasUnusedParameters()) { LOG.warning("Unused parameters: " + p.getRemainingParameters()); } p = null; // Prevent build() from being called again. return (C) obj; }
java
protected static double[][] randomInitialSolution(final int size, final int dim, Random random) { double[][] sol = new double[size][dim]; for(int i = 0; i < size; i++) { for(int j = 0; j < dim; j++) { sol[i][j] = random.nextGaussian() * INITIAL_SOLUTION_SCALE; } } return sol; }
java
protected double sqDist(double[] v1, double[] v2) { assert (v1.length == v2.length) : "Lengths do not agree: " + v1.length + " " + v2.length; double sum = 0; for(int i = 0; i < v1.length; i++) { final double diff = v1[i] - v2[i]; sum += diff * diff; } ++projectedDistances; return sum; }
java
protected void updateSolution(double[][] sol, double[] meta, int it) { final double mom = (it < momentumSwitch && initialMomentum < finalMomentum) ? initialMomentum : finalMomentum; final int dim3 = dim * 3; for(int i = 0, off = 0; i < sol.length; i++, off += dim3) { final double[] sol_i = sol[i]; for(int k = 0; k < dim; k++) { // Indexes in meta array final int gradk = off + k, movk = gradk + dim, gaink = movk + dim; // Adjust learning rate: meta[gaink] = MathUtil.max(((meta[gradk] > 0) != (meta[movk] > 0)) ? (meta[gaink] + 0.2) : (meta[gaink] * 0.8), MIN_GAIN); meta[movk] *= mom; // Dampening the previous momentum meta[movk] -= learningRate * meta[gradk] * meta[gaink]; // Learn sol_i[k] += meta[movk]; } } }
java
private int getOffset(int x, int y) { return (y < x) ? (triangleSize(x) + y) : (triangleSize(y) + x); }
java
@Override public double getWeight(double distance, double max, double stddev) { if(stddev <= 0) { return 1; } double scaleddistance = distance / (scaling * stddev); // After this, the result would be negative. if(scaleddistance >= 1.0) { return 0.0; } return 1.0 - scaleddistance * scaleddistance; }
java
public OPTICSPlot getOPTICSPlot(VisualizerContext context) { if(plot == null) { plot = OPTICSPlot.plotForClusterOrder(clusterOrder, context); } return plot; }
java
public void setValue(boolean val) { try { super.setValue(Boolean.valueOf(val)); } catch(ParameterException e) { // We're pretty sure that any Boolean is okay, so this should never be // reached. throw new AbortException("Flag did not accept boolean value!", e); } }
java
public OutlierResult run(Database database, Relation<O> relation) { StepProgress stepprog = LOG.isVerbose() ? new StepProgress(5) : null; Pair<KNNQuery<O>, KNNQuery<O>> pair = getKNNQueries(database, relation, stepprog); KNNQuery<O> knnComp = pair.getFirst(); KNNQuery<O> knnReach = pair.getSecond(); // Assert we got something if(knnComp == null) { throw new AbortException("No kNN queries supported by database for comparison distance function."); } if(knnReach == null) { throw new AbortException("No kNN queries supported by database for density estimation distance function."); } // FIXME: tie handling! // Probabilistic distances WritableDoubleDataStore pdists = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); LOG.beginStep(stepprog, 3, "Computing pdists"); computePDists(relation, knnReach, pdists); // Compute PLOF values. WritableDoubleDataStore plofs = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); LOG.beginStep(stepprog, 4, "Computing PLOF"); double nplof = computePLOFs(relation, knnComp, pdists, plofs); // Normalize the outlier scores. DoubleMinMax mm = new DoubleMinMax(); {// compute LOOP_SCORE of each db object LOG.beginStep(stepprog, 5, "Computing LoOP scores"); FiniteProgress progressLOOPs = LOG.isVerbose() ? new FiniteProgress("LoOP for objects", relation.size(), LOG) : null; final double norm = 1. / (nplof * MathUtil.SQRT2); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { double loop = NormalDistribution.erf((plofs.doubleValue(iditer) - 1.) * norm); plofs.putDouble(iditer, loop); mm.put(loop); LOG.incrementProcessed(progressLOOPs); } LOG.ensureCompleted(progressLOOPs); } LOG.setCompleted(stepprog); // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Local Outlier Probabilities", "loop-outlier", plofs, relation.getDBIDs()); OutlierScoreMeta scoreMeta = new ProbabilisticOutlierScore(mm.getMin(), mm.getMax(), 0.); return new OutlierResult(scoreMeta, scoreResult); }
java
protected void computePDists(Relation<O> relation, KNNQuery<O> knn, WritableDoubleDataStore pdists) { // computing PRDs FiniteProgress prdsProgress = LOG.isVerbose() ? new FiniteProgress("pdists", relation.size(), LOG) : null; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { final KNNList neighbors = knn.getKNNForDBID(iditer, kreach + 1); // + // query // point // use first kref neighbors as reference set int ks = 0; double ssum = 0.; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid() && ks < kreach; neighbor.advance()) { if(DBIDUtil.equal(neighbor, iditer)) { continue; } final double d = neighbor.doubleValue(); ssum += d * d; ks++; } double pdist = ks > 0 ? FastMath.sqrt(ssum / ks) : 0.; pdists.putDouble(iditer, pdist); LOG.incrementProcessed(prdsProgress); } LOG.ensureCompleted(prdsProgress); }
java
protected double computePLOFs(Relation<O> relation, KNNQuery<O> knn, WritableDoubleDataStore pdists, WritableDoubleDataStore plofs) { FiniteProgress progressPLOFs = LOG.isVerbose() ? new FiniteProgress("PLOFs for objects", relation.size(), LOG) : null; double nplof = 0.; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { final KNNList neighbors = knn.getKNNForDBID(iditer, kcomp + 1); // + query // point // use first kref neighbors as comparison set. int ks = 0; double sum = 0.; for(DBIDIter neighbor = neighbors.iter(); neighbor.valid() && ks < kcomp; neighbor.advance()) { if(DBIDUtil.equal(neighbor, iditer)) { continue; } sum += pdists.doubleValue(neighbor); ks++; } double plof = MathUtil.max(pdists.doubleValue(iditer) * ks / sum, 1.0); if(Double.isNaN(plof) || Double.isInfinite(plof)) { plof = 1.0; } plofs.putDouble(iditer, plof); nplof += (plof - 1.0) * (plof - 1.0); LOG.incrementProcessed(progressPLOFs); } LOG.ensureCompleted(progressPLOFs); nplof = lambda * FastMath.sqrt(nplof / relation.size()); if(LOG.isDebuggingFine()) { LOG.debugFine("nplof normalization factor is " + nplof); } return nplof > 0. ? nplof : 1.; }
java
@SuppressWarnings("unchecked") public final void writeObject(TextWriterStream out, String label, Object object) throws IOException { write(out, label, (O) object); }
java
@Override public int filter(double[] eigenValues) { // determine sum of eigenvalues double totalSum = 0; for(int i = 0; i < eigenValues.length; i++) { totalSum += eigenValues[i]; } double expectedVariance = totalSum / eigenValues.length * walpha; // determine strong and weak eigenpairs double currSum = 0; for(int i = 0; i < eigenValues.length - 1; i++) { // weak Eigenvector? if(eigenValues[i] < expectedVariance) { break; } currSum += eigenValues[i]; // calculate progressive alpha level double alpha = 1.0 - (1.0 - palpha) * (1.0 - (i + 1) / (double) eigenValues.length); if(currSum / totalSum >= alpha) { return i + 1; } } // the code using this method doesn't expect an empty strong set, // if we didn't find any strong ones, we make all vectors strong return eigenValues.length; }
java
public static List<OutlierResult> getOutlierResults(Result r) { if(r instanceof OutlierResult) { List<OutlierResult> ors = new ArrayList<>(1); ors.add((OutlierResult) r); return ors; } if(r instanceof HierarchicalResult) { return ResultUtil.filterResults(((HierarchicalResult) r).getHierarchy(), r, OutlierResult.class); } return Collections.emptyList(); }
java
double evaluateBy(ScoreEvaluation eval) { return eval.evaluate(new DBIDsTest(DBIDUtil.ensureSet(scores.getDBIDs())), new OutlierScoreAdapter(this)); }
java
public double distance(double[] v1, double[] v2) { final int dim1 = v1.length, dim2 = v2.length; final int mindim = dim1 < dim2 ? dim1 : dim2; double agg = preDistance(v1, v2, 0, mindim); if(dim1 > mindim) { agg += preNorm(v1, mindim, dim1); } else if(dim2 > mindim) { agg += preNorm(v2, mindim, dim2); } return agg; }
java
private void computeWeights(Node node) { int wsum = 0; for(Node child : node.children) { computeWeights(child); wsum += child.weight; } node.weight = Math.max(1, wsum); }
java
@Override public void processNewResult(ResultHierarchy hier, Result result) { // Get all new clusterings // TODO: handle clusterings added later, too. Can we update the result? List<Clustering<?>> clusterings = Clustering.getClusteringResults(result); // Abort if not enough clusterings to compare if(clusterings.size() < 2) { return; } // create segments Segments segments = new Segments(clusterings); hier.add(result, segments); }
java
public double getPartialMinDist(int dimension, int vp) { final int qp = queryApprox.getApproximation(dimension); if(vp < qp) { return lookup[dimension][vp + 1]; } else if(vp > qp) { return lookup[dimension][vp]; } else { return 0.0; } }
java
public double getMinDist(VectorApproximation vec) { final int dim = lookup.length; double minDist = 0; for(int d = 0; d < dim; d++) { final int vp = vec.getApproximation(d); minDist += getPartialMinDist(d, vp); } return FastMath.pow(minDist, onebyp); }
java
public double getPartialMaxDist(int dimension, int vp) { final int qp = queryApprox.getApproximation(dimension); if(vp < qp) { return lookup[dimension][vp]; } else if(vp > qp) { return lookup[dimension][vp + 1]; } else { return Math.max(lookup[dimension][vp], lookup[dimension][vp + 1]); } }
java
private void initializeLookupTable(double[][] splitPositions, NumberVector query, double p) { final int dimensions = splitPositions.length; final int bordercount = splitPositions[0].length; lookup = new double[dimensions][bordercount]; for(int d = 0; d < dimensions; d++) { final double val = query.doubleValue(d); for(int i = 0; i < bordercount; i++) { lookup[d][i] = FastMath.pow(splitPositions[d][i] - val, p); } } }
java
protected void makeStyleResult(StyleLibrary stylelib) { final Database db = ResultUtil.findDatabase(hier); stylelibrary = stylelib; List<Clustering<? extends Model>> clusterings = Clustering.getClusteringResults(db); if(!clusterings.isEmpty()) { stylepolicy = new ClusterStylingPolicy(clusterings.get(0), stylelib); } else { Clustering<Model> c = generateDefaultClustering(); stylepolicy = new ClusterStylingPolicy(c, stylelib); } }
java
@Override public void contentChanged(DataStoreEvent e) { for(int i = 0; i < listenerList.size(); i++) { listenerList.get(i).contentChanged(e); } }
java
private void notifyFactories(Object item) { for(VisualizationProcessor f : factories) { try { f.processNewResult(this, item); } catch(Throwable e) { LOG.warning("VisFactory " + f.getClass().getCanonicalName() + " failed:", e); } } }
java
protected int chooseBulkSplitPoint(int numEntries, int minEntries, int maxEntries) { if(numEntries < minEntries) { throw new IllegalArgumentException("numEntries < minEntries!"); } if(numEntries <= maxEntries) { return numEntries; } else if(numEntries < maxEntries + minEntries) { return (numEntries - minEntries); } else { return maxEntries; } }
java
protected <T> List<List<T>> trivialPartition(List<T> objects, int minEntries, int maxEntries) { // build partitions final int size = objects.size(); final int numberPartitions = (int) Math.ceil(((double) size) / maxEntries); List<List<T>> partitions = new ArrayList<>(numberPartitions); int start = 0; for(int pnum = 0; pnum < numberPartitions; pnum++) { int end = (int) ((pnum + 1.) * size / numberPartitions); if(pnum == numberPartitions - 1) { end = size; } assert ((end - start) >= minEntries && (end - start) <= maxEntries); partitions.add(objects.subList(start, end)); start = end; } return partitions; }
java
protected Relation<?>[] alignColumns(ObjectBundle pack) { // align representations. Relation<?>[] targets = new Relation<?>[pack.metaLength()]; long[] used = BitsUtil.zero(relations.size()); for(int i = 0; i < targets.length; i++) { SimpleTypeInformation<?> meta = pack.meta(i); // TODO: aggressively try to match exact metas first? // Try to match unused representations only for(int j = BitsUtil.nextClearBit(used, 0); j >= 0 && j < relations.size(); j = BitsUtil.nextClearBit(used, j + 1)) { Relation<?> relation = relations.get(j); if(relation.getDataTypeInformation().isAssignableFromType(meta)) { targets[i] = relation; BitsUtil.setI(used, j); break; } } if(targets[i] == null) { targets[i] = addNewRelation(meta); BitsUtil.setI(used, relations.size() - 1); } } return targets; }
java
private Relation<?> addNewRelation(SimpleTypeInformation<?> meta) { @SuppressWarnings("unchecked") SimpleTypeInformation<Object> ometa = (SimpleTypeInformation<Object>) meta; Relation<?> relation = new MaterializedRelation<>(ometa, ids); relations.add(relation); getHierarchy().add(this, relation); // Try to add indexes where appropriate for(IndexFactory<?> factory : indexFactories) { if(factory.getInputTypeRestriction().isAssignableFromType(meta)) { @SuppressWarnings("unchecked") final IndexFactory<Object> ofact = (IndexFactory<Object>) factory; @SuppressWarnings("unchecked") final Relation<Object> orep = (Relation<Object>) relation; Index index = ofact.instantiate(orep); index.initialize(); getHierarchy().add(relation, index); } } return relation; }
java
private void doDelete(DBIDRef id) { // Remove id ids.remove(id); // Remove from all representations. for(Relation<?> relation : relations) { // ID has already been removed, and this would loop... if(relation == idrep) { continue; } if(!(relation instanceof ModifiableRelation)) { throw new AbortException("Non-modifiable relations have been added to the database."); } ((ModifiableRelation<?>) relation).delete(id); } DBIDFactory.FACTORY.deallocateSingleDBID(id); }
java
private ArrayDBIDs greedy(DistanceQuery<V> distFunc, DBIDs sampleSet, int m, Random random) { ArrayModifiableDBIDs medoids = DBIDUtil.newArray(m); ArrayModifiableDBIDs s = DBIDUtil.newArray(sampleSet); DBIDArrayIter iter = s.iter(); DBIDVar m_i = DBIDUtil.newVar(); int size = s.size(); // Move a random element to the end, then pop() s.swap(random.nextInt(size), --size); medoids.add(s.pop(m_i)); if(LOG.isDebugging()) { LOG.debugFiner("medoids " + medoids.toString()); } // To track the current worst element: int worst = -1; double worstd = Double.NEGATIVE_INFINITY; // compute distances between each point in S and m_i WritableDoubleDataStore distances = DataStoreUtil.makeDoubleStorage(s, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); for(iter.seek(0); iter.getOffset() < size; iter.advance()) { final double dist = distFunc.distance(iter, m_i); distances.putDouble(iter, dist); if(dist > worstd) { worstd = dist; worst = iter.getOffset(); } } for(int i = 1; i < m; i++) { // choose medoid m_i to be far from previous medoids s.swap(worst, --size); medoids.add(s.pop(m_i)); // compute distances of each point to closest medoid; track worst. worst = -1; worstd = Double.NEGATIVE_INFINITY; for(iter.seek(0); iter.getOffset() < size; iter.advance()) { double dist_new = distFunc.distance(iter, m_i); double dist_old = distances.doubleValue(iter); double dist = (dist_new < dist_old) ? dist_new : dist_old; distances.putDouble(iter, dist); if(dist > worstd) { worstd = dist; worst = iter.getOffset(); } } if(LOG.isDebugging()) { LOG.debugFiner("medoids " + medoids.toString()); } } return medoids; }
java
private ArrayDBIDs initialSet(DBIDs sampleSet, int k, Random random) { return DBIDUtil.ensureArray(DBIDUtil.randomSample(sampleSet, k, random)); }
java
private ArrayDBIDs computeM_current(DBIDs m, DBIDs m_best, DBIDs m_bad, Random random) { ArrayModifiableDBIDs m_list = DBIDUtil.newArray(m); m_list.removeDBIDs(m_best); DBIDArrayMIter it = m_list.iter(); ArrayModifiableDBIDs m_current = DBIDUtil.newArray(); for(DBIDIter iter = m_best.iter(); iter.valid(); iter.advance()) { if(m_bad.contains(iter)) { int currentSize = m_current.size(); while(m_current.size() == currentSize) { m_current.add(it.seek(random.nextInt(m_list.size()))); it.remove(); } } else { m_current.add(iter); } } return m_current; }
java
private long[][] findDimensions(ArrayDBIDs medoids, Relation<V> database, DistanceQuery<V> distFunc, RangeQuery<V> rangeQuery) { // get localities DataStore<DBIDs> localities = getLocalities(medoids, distFunc, rangeQuery); // compute x_ij = avg distance from points in l_i to medoid m_i final int dim = RelationUtil.dimensionality(database); final int numc = medoids.size(); double[][] averageDistances = new double[numc][]; for(DBIDArrayIter iter = medoids.iter(); iter.valid(); iter.advance()) { V medoid_i = database.get(iter); DBIDs l_i = localities.get(iter); double[] x_i = new double[dim]; for(DBIDIter qr = l_i.iter(); qr.valid(); qr.advance()) { V o = database.get(qr); for(int d = 0; d < dim; d++) { x_i[d] += Math.abs(medoid_i.doubleValue(d) - o.doubleValue(d)); } } for(int d = 0; d < dim; d++) { x_i[d] /= l_i.size(); } averageDistances[iter.getOffset()] = x_i; } List<DoubleIntInt> z_ijs = computeZijs(averageDistances, dim); return computeDimensionMap(z_ijs, dim, numc); }
java
private List<Pair<double[], long[]>> findDimensions(ArrayList<PROCLUSCluster> clusters, Relation<V> database) { // compute x_ij = avg distance from points in c_i to c_i.centroid final int dim = RelationUtil.dimensionality(database); final int numc = clusters.size(); double[][] averageDistances = new double[numc][]; for(int i = 0; i < numc; i++) { PROCLUSCluster c_i = clusters.get(i); double[] x_i = new double[dim]; for(DBIDIter iter = c_i.objectIDs.iter(); iter.valid(); iter.advance()) { V o = database.get(iter); for(int d = 0; d < dim; d++) { x_i[d] += Math.abs(c_i.centroid[d] - o.doubleValue(d)); } } for(int d = 0; d < dim; d++) { x_i[d] /= c_i.objectIDs.size(); } averageDistances[i] = x_i; } List<DoubleIntInt> z_ijs = computeZijs(averageDistances, dim); long[][] dimensionMap = computeDimensionMap(z_ijs, dim, numc); // mapping cluster -> dimensions List<Pair<double[], long[]>> result = new ArrayList<>(numc); for(int i = 0; i < numc; i++) { long[] dims_i = dimensionMap[i]; if(dims_i == null) { continue; } result.add(new Pair<>(clusters.get(i).centroid, dims_i)); } return result; }
java
private List<DoubleIntInt> computeZijs(double[][] averageDistances, final int dim) { List<DoubleIntInt> z_ijs = new ArrayList<>(averageDistances.length * dim); for(int i = 0; i < averageDistances.length; i++) { double[] x_i = averageDistances[i]; // y_i double y_i = 0; for(int j = 0; j < dim; j++) { y_i += x_i[j]; } y_i /= dim; // sigma_i double sigma_i = 0; for(int j = 0; j < dim; j++) { double diff = x_i[j] - y_i; sigma_i += diff * diff; } sigma_i /= (dim - 1); sigma_i = FastMath.sqrt(sigma_i); for(int j = 0; j < dim; j++) { z_ijs.add(new DoubleIntInt((x_i[j] - y_i) / sigma_i, i, j)); } } Collections.sort(z_ijs); return z_ijs; }
java
private long[][] computeDimensionMap(List<DoubleIntInt> z_ijs, final int dim, final int numc) { // mapping cluster index -> dimensions long[][] dimensionMap = new long[numc][((dim - 1) >> 6) + 1]; int max = Math.max(k * l, 2); for(int m = 0; m < max; m++) { DoubleIntInt z_ij = z_ijs.get(m); long[] dims_i = dimensionMap[z_ij.dimi]; BitsUtil.setI(dims_i, z_ij.dimj); if(LOG.isDebugging()) { LOG.debugFiner(new StringBuilder().append("z_ij ").append(z_ij).append('\n') // .append("D_i ").append(BitsUtil.toString(dims_i)).toString()); } } return dimensionMap; }
java
private ArrayList<PROCLUSCluster> assignPoints(ArrayDBIDs m_current, long[][] dimensions, Relation<V> database) { ModifiableDBIDs[] clusterIDs = new ModifiableDBIDs[dimensions.length]; for(int i = 0; i < m_current.size(); i++) { clusterIDs[i] = DBIDUtil.newHashSet(); } DBIDArrayIter m_i = m_current.iter(); for(DBIDIter it = database.iterDBIDs(); it.valid(); it.advance()) { V p = database.get(it); double minDist = Double.NaN; int best = -1, i = 0; for(m_i.seek(0); m_i.valid(); m_i.advance(), i++) { V m = database.get(m_i); double currentDist = manhattanSegmentalDistance(p, m, dimensions[i]); if(!(minDist <= currentDist)) { minDist = currentDist; best = i; } } // add p to cluster with mindist assert best >= 0; clusterIDs[best].add(it); } ArrayList<PROCLUSCluster> clusters = new ArrayList<>(m_current.size()); for(int i = 0; i < dimensions.length; i++) { ModifiableDBIDs objectIDs = clusterIDs[i]; if(!objectIDs.isEmpty()) { long[] clusterDimensions = dimensions[i]; double[] centroid = Centroid.make(database, objectIDs).getArrayRef(); clusters.add(new PROCLUSCluster(objectIDs, clusterDimensions, centroid)); } else { clusters.add(null); } } if(LOG.isDebugging()) { LOG.debugFine(new StringBuilder().append("clusters ").append(clusters).toString()); } return clusters; }
java
private List<PROCLUSCluster> finalAssignment(List<Pair<double[], long[]>> dimensions, Relation<V> database) { Map<Integer, ModifiableDBIDs> clusterIDs = new HashMap<>(); for(int i = 0; i < dimensions.size(); i++) { clusterIDs.put(i, DBIDUtil.newHashSet()); } for(DBIDIter it = database.iterDBIDs(); it.valid(); it.advance()) { V p = database.get(it); double minDist = Double.POSITIVE_INFINITY; int best = -1; for(int i = 0; i < dimensions.size(); i++) { Pair<double[], long[]> pair_i = dimensions.get(i); double currentDist = manhattanSegmentalDistance(p, pair_i.first, pair_i.second); if(best < 0 || currentDist < minDist) { minDist = currentDist; best = i; } } // add p to cluster with mindist assert minDist >= 0.; clusterIDs.get(best).add(it); } List<PROCLUSCluster> clusters = new ArrayList<>(); for(int i = 0; i < dimensions.size(); i++) { ModifiableDBIDs objectIDs = clusterIDs.get(i); if(!objectIDs.isEmpty()) { long[] clusterDimensions = dimensions.get(i).second; double[] centroid = Centroid.make(database, objectIDs).getArrayRef(); clusters.add(new PROCLUSCluster(objectIDs, clusterDimensions, centroid)); } } if(LOG.isDebugging()) { LOG.debugFine(new StringBuilder().append("clusters ").append(clusters).toString()); } return clusters; }
java
private double manhattanSegmentalDistance(NumberVector o1, double[] o2, long[] dimensions) { double result = 0; int card = 0; for(int d = BitsUtil.nextSetBit(dimensions, 0); d >= 0; d = BitsUtil.nextSetBit(dimensions, d + 1)) { result += Math.abs(o1.doubleValue(d) - o2[d]); ++card; } return result / card; }
java
private double evaluateClusters(ArrayList<PROCLUSCluster> clusters, long[][] dimensions, Relation<V> database) { double result = 0; for(int i = 0; i < dimensions.length; i++) { PROCLUSCluster c_i = clusters.get(i); double[] centroid_i = c_i.centroid; long[] dims_i = dimensions[i]; double w_i = 0; for(int d = BitsUtil.nextSetBit(dims_i, 0); d >= 0; d = BitsUtil.nextSetBit(dims_i, d + 1)) { w_i += avgDistance(centroid_i, c_i.objectIDs, database, d); } w_i /= dimensions.length; result += c_i.objectIDs.size() * w_i; } return result / database.size(); }
java
private double avgDistance(double[] centroid, DBIDs objectIDs, Relation<V> database, int dimension) { Mean avg = new Mean(); for(DBIDIter iter = objectIDs.iter(); iter.valid(); iter.advance()) { V o = database.get(iter); avg.put(Math.abs(centroid[dimension] - o.doubleValue(dimension))); } return avg.getMean(); }
java
private DBIDs computeBadMedoids(ArrayDBIDs m_current, ArrayList<PROCLUSCluster> clusters, int threshold) { ModifiableDBIDs badMedoids = DBIDUtil.newHashSet(m_current.size()); int i = 0; for(DBIDIter it = m_current.iter(); it.valid(); it.advance(), i++) { PROCLUSCluster c_i = clusters.get(i); if(c_i == null || c_i.objectIDs.size() < threshold) { badMedoids.add(it); } } return badMedoids; }
java
public static Bit valueOf(String bit) throws NumberFormatException { final int i = ParseUtil.parseIntBase10(bit); if(i != 0 && i != 1) { throw new NumberFormatException("Input \"" + bit + "\" must be 0 or 1."); } return (i > 0) ? TRUE : FALSE; }
java
public ChangePoints run(Relation<DoubleVector> relation) { if(!(relation.getDBIDs() instanceof ArrayDBIDs)) { throw new AbortException("This implementation may only be used on static databases, with ArrayDBIDs to provide a clear order."); } return new Instance(rnd.getSingleThreadedRandom()).run(relation); }
java
public static void cusum(double[] data, double[] out, int begin, int end) { assert (out.length >= data.length); // Use Kahan summation for better precision! // FIXME: this should be unit tested. double m = 0., carry = 0.; for(int i = begin; i < end; i++) { double v = data[i] - carry; // Compensation double n = out[i] = (m + v); // May lose small digits of v. carry = (n - m) - v; // Recover lost bits m = n; } }
java
public static DoubleIntPair bestChangeInMean(double[] sums, int begin, int end) { final int len = end - begin, last = end - 1; final double suml = begin > 0 ? sums[begin - 1] : 0.; final double sumr = sums[last]; int bestpos = begin; double bestscore = Double.NEGATIVE_INFINITY; // Iterate elements k=2..n-1 in math notation_ for(int j = begin, km1 = 1; j < last; j++, km1++) { assert (km1 < len); // FIXME: remove eventually final double sumj = sums[j]; // Sum _inclusive_ j'th element. // Derive the left mean and right mean from the precomputed aggregates: final double lmean = (sumj - suml) / km1; final double rmean = (sumr - sumj) / (len - km1); // Equation 2.6.17 from the Basseville book final double dm = lmean - rmean; final double score = km1 * (double) (len - km1) * dm * dm; if(score > bestscore) { bestpos = j + 1; bestscore = score; } } return new DoubleIntPair(bestscore, bestpos); }
java
public static void shuffle(double[] bstrap, int len, Random rnd) { int i = len; while(i > 0) { final int r = rnd.nextInt(i); --i; // Swap double tmp = bstrap[r]; bstrap[r] = bstrap[i]; bstrap[i] = tmp; } }
java
@SuppressWarnings("unchecked") public T get(double coord) { if(coord == Double.NEGATIVE_INFINITY) { return getSpecial(0); } if(coord == Double.POSITIVE_INFINITY) { return getSpecial(1); } if(Double.isNaN(coord)) { return getSpecial(2); } int bin = getBinNr(coord); if(bin < 0) { if(size - bin > data.length) { // Reallocate. TODO: use an arraylist-like grow strategy! Object[] tmpdata = new Object[growSize(data.length, size - bin)]; System.arraycopy(data, 0, tmpdata, -bin, size); data = tmpdata; } else { // Shift in place System.arraycopy(data, 0, data, -bin, size); } for(int i = 0; i < -bin; i++) { data[i] = supplier.make(); } // Note that bin is negative, -bin is the shift offset! offset -= bin; size -= bin; // TODO: modCounter++; and have iterators fast-fail // Unset max value when resizing max = Double.MAX_VALUE; return (T) data[0]; } else if(bin >= size) { if(bin >= data.length) { Object[] tmpdata = new Object[growSize(data.length, bin + 1)]; System.arraycopy(data, 0, tmpdata, 0, size); data = tmpdata; } for(int i = size; i <= bin; i++) { data[i] = supplier.make(); } size = bin + 1; // TODO: modCounter++; and have iterators fast-fail // Unset max value when resizing max = Double.MAX_VALUE; return (T) data[bin]; } else { return (T) data[bin]; } }
java
protected void loadCache(int size, InputStream in) throws IOException { // Expect a sparse matrix here cache = new Long2FloatOpenHashMap(size * 20); cache.defaultReturnValue(Float.POSITIVE_INFINITY); min = Integer.MAX_VALUE; max = Integer.MIN_VALUE; parser.parse(in, new DistanceCacheWriter() { @Override public void put(int id1, int id2, double distance) { if(id1 < id2) { min = id1 < min ? id1 : min; max = id2 > max ? id2 : max; } else { min = id2 < min ? id2 : min; max = id1 > max ? id1 : max; } cache.put(makeKey(id1, id2), (float) distance); } }); if(min != 0) { LOG.verbose("Distance matrix is supposed to be 0-indexed. Choosing offset " + min + " to compensate."); } if(max + 1 - min != size) { LOG.warning("ID range is not consistent with relation size."); } }
java
public Element svgElement(String name, String cssclass) { Element elem = SVGUtil.svgElement(document, name); if(cssclass != null) { elem.setAttribute(SVGConstants.SVG_CLASS_ATTRIBUTE, cssclass); } return elem; }
java
public Element svgRect(double x, double y, double w, double h) { return SVGUtil.svgRect(document, x, y, w, h); }
java
public Element svgCircle(double cx, double cy, double r) { return SVGUtil.svgCircle(document, cx, cy, r); }
java
public Element svgLine(double x1, double y1, double x2, double y2) { return SVGUtil.svgLine(document, x1, y1, x2, y2); }
java
public SVGPoint elementCoordinatesFromEvent(Element tag, Event evt) { return SVGUtil.elementCoordinatesFromEvent(document, tag, evt); }
java
public void addCSSClassOrLogError(CSSClass cls) { try { cssman.addClass(cls); } catch(CSSNamingConflict e) { LoggingUtil.exception(e); } }
java
public void updateStyleElement() { // TODO: this should be sufficient - why does Batik occasionally not pick up // the changes unless we actually replace the style element itself? // cssman.updateStyleElement(document, style); Element newstyle = cssman.makeStyleElement(document); style.getParentNode().replaceChild(newstyle, style); style = newstyle; }
java
public void saveAsSVG(File file) throws IOException, TransformerFactoryConfigurationError, TransformerException { OutputStream out = new BufferedOutputStream(new FileOutputStream(file)); // TODO embed linked images. javax.xml.transform.Result result = new StreamResult(out); SVGDocument doc = cloneDocument(); // Use a transformer for pretty printing Transformer xformer = TransformerFactory.newInstance().newTransformer(); xformer.setOutputProperty(OutputKeys.INDENT, "yes"); xformer.transform(new DOMSource(doc), result); out.flush(); out.close(); }
java
protected void transcode(File file, Transcoder transcoder) throws IOException, TranscoderException { // Disable validation, performance is more important here (thumbnails!) transcoder.addTranscodingHint(XMLAbstractTranscoder.KEY_XML_PARSER_VALIDATING, Boolean.FALSE); SVGDocument doc = cloneDocument(); TranscoderInput input = new TranscoderInput(doc); OutputStream out = new BufferedOutputStream(new FileOutputStream(file)); TranscoderOutput output = new TranscoderOutput(out); transcoder.transcode(input, output); out.flush(); out.close(); }
java
protected SVGDocument cloneDocument() { return (SVGDocument) new CloneInlineImages() { @Override public Node cloneNode(Document doc, Node eold) { // Skip elements with noexport attribute set if(eold instanceof Element) { Element eeold = (Element) eold; String vis = eeold.getAttribute(NO_EXPORT_ATTRIBUTE); if(vis != null && vis.length() > 0) { return null; } } return super.cloneNode(doc, eold); } }.cloneDocument(getDomImpl(), document); }
java
public void saveAsPDF(File file) throws IOException, TranscoderException, ClassNotFoundException { try { Object t = Class.forName("org.apache.fop.svg.PDFTranscoder").newInstance(); transcode(file, (Transcoder) t); } catch(InstantiationException | IllegalAccessException e) { throw new ClassNotFoundException("Could not instantiate PDF transcoder - is Apache FOP installed?", e); } }
java
public void saveAsPNG(File file, int width, int height) throws IOException, TranscoderException { PNGTranscoder t = new PNGTranscoder(); t.addTranscodingHint(PNGTranscoder.KEY_WIDTH, new Float(width)); t.addTranscodingHint(PNGTranscoder.KEY_HEIGHT, new Float(height)); transcode(file, t); }
java
public void saveAsANY(File file, int width, int height, float quality) throws IOException, TranscoderException, TransformerFactoryConfigurationError, TransformerException, ClassNotFoundException { String extension = FileUtil.getFilenameExtension(file); if("svg".equals(extension)) { saveAsSVG(file); } else if("pdf".equals(extension)) { saveAsPDF(file); } else if("ps".equals(extension)) { saveAsPS(file); } else if("eps".equals(extension)) { saveAsEPS(file); } else if("png".equals(extension)) { saveAsPNG(file, width, height); } else if("jpg".equals(extension) || "jpeg".equals(extension)) { saveAsJPEG(file, width, height, quality); } else { throw new IOException("Unknown file extension: " + extension); } }
java
public BufferedImage makeAWTImage(int width, int height) throws TranscoderException { ThumbnailTranscoder t = new ThumbnailTranscoder(); t.addTranscodingHint(PNGTranscoder.KEY_WIDTH, new Float(width)); t.addTranscodingHint(PNGTranscoder.KEY_HEIGHT, new Float(height)); // Don't clone. Assume this is used safely. TranscoderInput input = new TranscoderInput(document); t.transcode(input, null); return t.getLastImage(); }
java
public void dumpDebugFile() { try { File f = File.createTempFile("elki-debug", ".svg"); f.deleteOnExit(); this.saveAsSVG(f); LoggingUtil.warning("Saved debug file to: " + f.getAbsolutePath()); } catch(Throwable err) { // Ignore. } }
java
public void putIdElement(String id, Element obj) { objWithId.put(id, new WeakReference<>(obj)); }
java
public Element getIdElement(String id) { WeakReference<Element> ref = objWithId.get(id); return (ref != null) ? ref.get() : null; }
java
protected ScoreResult computeScore(DBIDs ids, DBIDs outlierIds, OutlierResult or) throws IllegalStateException { if(scaling instanceof OutlierScaling) { OutlierScaling oscaling = (OutlierScaling) scaling; oscaling.prepare(or); } final ScalingFunction innerScaling; // If we have useful (finite) min/max, use these for binning. double min = scaling.getMin(); double max = scaling.getMax(); if(Double.isInfinite(min) || Double.isNaN(min) || Double.isInfinite(max) || Double.isNaN(max)) { innerScaling = new IdentityScaling(); // TODO: does the outlier score give us this guarantee? LOG.warning("JudgeOutlierScores expects values between 0.0 and 1.0, but we don't have such a guarantee by the scaling function: min:" + min + " max:" + max); } else { if(min == 0.0 && max == 1.0) { innerScaling = new IdentityScaling(); } else { innerScaling = new LinearScaling(1.0 / (max - min), -min); } } double posscore = 0.0; double negscore = 0.0; // fill histogram with values of each object for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double result = or.getScores().doubleValue(iter); result = innerScaling.getScaled(scaling.getScaled(result)); posscore += (1.0 - result); } for(DBIDIter iter = outlierIds.iter(); iter.valid(); iter.advance()) { double result = or.getScores().doubleValue(iter); result = innerScaling.getScaled(scaling.getScaled(result)); negscore += result; } posscore /= ids.size(); negscore /= outlierIds.size(); LOG.verbose("Scores: " + posscore + " " + negscore); ArrayList<double[]> s = new ArrayList<>(1); s.add(new double[] { (posscore + negscore) * .5, posscore, negscore }); return new ScoreResult(s); }
java
@Override protected Cluster<SubspaceModel> runDOC(Database database, Relation<V> relation, ArrayModifiableDBIDs S, int d, int n, int m, int r, int minClusterSize) { // Relevant attributes of highest cardinality. long[] D = null; // The seed point for the best dimensions. DBIDVar dV = DBIDUtil.newVar(); // Inform the user about the progress in the current iteration. FiniteProgress iprogress = LOG.isVerbose() ? new FiniteProgress("Iteration progress for current cluster", m * n, LOG) : null; Random random = rnd.getSingleThreadedRandom(); DBIDArrayIter iter = S.iter(); outer: for(int i = 0; i < n; ++i) { // Pick a random seed point. iter.seek(random.nextInt(S.size())); for(int j = 0; j < m; ++j) { // Choose a set of random points. DBIDs randomSet = DBIDUtil.randomSample(S, r, random); // Initialize cluster info. long[] nD = BitsUtil.zero(d); // Test each dimension. for(int k = 0; k < d; ++k) { if(dimensionIsRelevant(k, relation, randomSet)) { BitsUtil.setI(nD, k); } } if(D == null || BitsUtil.cardinality(nD) > BitsUtil.cardinality(D)) { D = nD; dV.set(iter); if(BitsUtil.cardinality(D) >= d_zero) { if(iprogress != null) { iprogress.setProcessed(iprogress.getTotal(), LOG); } break outer; } } LOG.incrementProcessed(iprogress); } } LOG.ensureCompleted(iprogress); // If no relevant dimensions were found, skip it. if(D == null || BitsUtil.cardinality(D) == 0) { return null; } // Get all points in the box. DBIDs C = findNeighbors(dV, D, S, relation); // If we have a non-empty cluster, return it. return (C.size() >= minClusterSize) ? makeCluster(relation, C, D) : null; }
java
public static SVGPath drawDelaunay(Projection2D proj, List<SweepHullDelaunay2D.Triangle> delaunay, List<double[]> means) { final SVGPath path = new SVGPath(); for(SweepHullDelaunay2D.Triangle del : delaunay) { path.moveTo(proj.fastProjectDataToRenderSpace(means.get(del.a))); path.drawTo(proj.fastProjectDataToRenderSpace(means.get(del.b))); path.drawTo(proj.fastProjectDataToRenderSpace(means.get(del.c))); path.close(); } return path; }
java