code
stringlengths
73
34.1k
label
stringclasses
1 value
public Element renderCheckBox(SVGPlot svgp, double x, double y, double size) { // create check final Element checkmark = SVGEffects.makeCheckmark(svgp); checkmark.setAttribute(SVGConstants.SVG_TRANSFORM_ATTRIBUTE, "scale(" + (size / 12) + ") translate(" + x + " " + y + ")"); if(!checked) { checkmark.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, SVGConstants.CSS_DISPLAY_PROPERTY + ":" + SVGConstants.CSS_NONE_VALUE); } // create box Element checkbox_box = SVGUtil.svgRect(svgp.getDocument(), x, y, size, size); checkbox_box.setAttribute(SVGConstants.SVG_FILL_ATTRIBUTE, "#d4e4f1"); checkbox_box.setAttribute(SVGConstants.SVG_STROKE_ATTRIBUTE, "#a0a0a0"); checkbox_box.setAttribute(SVGConstants.SVG_STROKE_WIDTH_ATTRIBUTE, "0.5"); // create checkbox final Element checkbox = svgp.svgElement(SVGConstants.SVG_G_TAG); checkbox.appendChild(checkbox_box); checkbox.appendChild(checkmark); // create Label if(label != null) { Element labele = svgp.svgText(x + 2 * size, y + size, label); // TODO: font size! checkbox.appendChild(labele); } // add click event listener EventTarget targ = (EventTarget) checkbox; targ.addEventListener(SVGConstants.SVG_CLICK_EVENT_TYPE, new EventListener() { @Override public void handleEvent(Event evt) { if(checked ^= true) { checkmark.removeAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE); } else { checkmark.setAttribute(SVGConstants.SVG_STYLE_ATTRIBUTE, SVGConstants.CSS_DISPLAY_PROPERTY + ":" + SVGConstants.CSS_NONE_VALUE); } fireSwitchEvent(new ChangeEvent(SVGCheckbox.this)); } }, false); return checkbox; }
java
protected void fireSwitchEvent(ChangeEvent evt) { Object[] listeners = listenerList.getListenerList(); for(int i = 1; i < listeners.length; i += 2) { if(listeners[i - 1] == ChangeListener.class) { ((ChangeListener) listeners[i]).stateChanged(evt); } } }
java
protected static void calculateSelectivityCoeffs(List<DoubleObjPair<DAFile>> daFiles, NumberVector query, double epsilon) { final int dimensions = query.getDimensionality(); double[] lowerVals = new double[dimensions]; double[] upperVals = new double[dimensions]; VectorApproximation queryApprox = calculatePartialApproximation(null, query, daFiles); for(int i = 0; i < dimensions; i++) { final double val = query.doubleValue(i); lowerVals[i] = val - epsilon; upperVals[i] = val + epsilon; } DoubleVector lowerEpsilon = DoubleVector.wrap(lowerVals); VectorApproximation lowerEpsilonPartitions = calculatePartialApproximation(null, lowerEpsilon, daFiles); DoubleVector upperEpsilon = DoubleVector.wrap(upperVals); VectorApproximation upperEpsilonPartitions = calculatePartialApproximation(null, upperEpsilon, daFiles); for(int i = 0; i < daFiles.size(); i++) { int coeff = (queryApprox.getApproximation(i) - lowerEpsilonPartitions.getApproximation(i)) + (upperEpsilonPartitions.getApproximation(i) - queryApprox.getApproximation(i)) + 1; daFiles.get(i).first = coeff; } }
java
protected static VectorApproximation calculatePartialApproximation(DBID id, NumberVector dv, List<DoubleObjPair<DAFile>> daFiles) { int[] approximation = new int[dv.getDimensionality()]; for(int i = 0; i < daFiles.size(); i++) { double val = dv.doubleValue(i); double[] borders = daFiles.get(i).second.getSplitPositions(); assert borders != null : "borders are null"; int lastBorderIndex = borders.length - 1; // value is lower outlier if(val < borders[0]) { approximation[i] = 0; } // value is upper outlier else if(val > borders[lastBorderIndex]) { approximation[i] = lastBorderIndex - 1; } // normal case else { for(int s = 0; s < lastBorderIndex; s++) { if(val >= borders[s] && val < borders[s + 1] && approximation[i] != -1) { approximation[i] = s; } } } } return new VectorApproximation(id, approximation); }
java
public String solutionToString(int fractionDigits) { if(!isSolvable()) { throw new IllegalStateException("System is not solvable!"); } DecimalFormat nf = new DecimalFormat(); nf.setMinimumFractionDigits(fractionDigits); nf.setMaximumFractionDigits(fractionDigits); nf.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US)); nf.setNegativePrefix(""); nf.setPositivePrefix(""); int row = coeff[0].length >> 1; int params = u.length; int paramsDigits = integerDigits(params); int x0Digits = maxIntegerDigits(x_0); int[] uDigits = maxIntegerDigits(u); StringBuilder buffer = new StringBuilder(); for(int i = 0; i < x_0.length; i++) { double value = x_0[i]; format(nf, buffer, value, x0Digits); for(int j = 0; j < u[0].length; j++) { if(i == row) { buffer.append(" + a_").append(j).append(" * "); } else { buffer.append(" "); for(int d = 0; d < paramsDigits; d++) { buffer.append(' '); } } format(nf, buffer, u[i][j], uDigits[j]); } buffer.append('\n'); } return buffer.toString(); }
java
private void reducedRowEchelonForm(int method) { final int rows = coeff.length; final int cols = coeff[0].length; int k = -1; // denotes current position on diagonal int pivotRow; // row index of pivot element int pivotCol; // column index of pivot element double pivot; // value of pivot element // main loop, transformation to reduced row echelon form boolean exitLoop = false; while(!exitLoop) { k++; // pivot search for entry in remaining matrix // (depends on chosen method in switch) // store position in pivotRow, pivotCol // TODO: Note that we're using "row, col", whereas "col, row" would be // more common? IntIntPair pivotPos = new IntIntPair(0, 0); IntIntPair currPos = new IntIntPair(k, k); switch(method){ case TRIVAL_PIVOT_SEARCH: pivotPos = nonZeroPivotSearch(k); break; case TOTAL_PIVOT_SEARCH: pivotPos = totalPivotSearch(k); break; } pivotRow = pivotPos.first; pivotCol = pivotPos.second; pivot = coeff[this.row[pivotRow]][col[pivotCol]]; if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("equations ").append(equationsToString(4)); msg.append(" *** pivot at (").append(pivotRow).append(',').append(pivotCol).append(") = ").append(pivot).append('\n'); LOG.debugFine(msg.toString()); } // permute rows and columns to get this entry onto // the diagonal permutePivot(pivotPos, currPos); // test conditions for exiting loop // after this iteration // reasons are: Math.abs(pivot) == 0 if((Math.abs(pivot) <= DELTA)) { exitLoop = true; } // pivoting only if Math.abs(pivot) > 0 // and k <= m - 1 if((Math.abs(pivot) > DELTA)) { rank++; pivotOperation(k); } // test conditions for exiting loop // after this iteration // reasons are: k == rows-1 : no more rows // k == cols-1 : no more columns if(k == rows - 1 || k == cols - 1) { exitLoop = true; } } // end while reducedRowEchelonForm = true; }
java
private IntIntPair nonZeroPivotSearch(int k) { int i, j; double absValue; for(i = k; i < coeff.length; i++) { for(j = k; j < coeff[0].length; j++) { // compute absolute value of // current entry in absValue absValue = Math.abs(coeff[row[i]][col[j]]); // check if absValue is non-zero if(absValue > 0) { // found a pivot element return new IntIntPair(i, j); } // end if } // end for j } // end for k return new IntIntPair(k, k); }
java
private void permutePivot(IntIntPair pos1, IntIntPair pos2) { int r1 = pos1.first; int c1 = pos1.second; int r2 = pos2.first; int c2 = pos2.second; int index; index = row[r2]; row[r2] = row[r1]; row[r1] = index; index = col[c2]; col[c2] = col[c1]; col[c1] = index; }
java
private void pivotOperation(int k) { double pivot = coeff[row[k]][col[k]]; // pivot row: set pivot to 1 coeff[row[k]][col[k]] = 1; for(int i = k + 1; i < coeff[k].length; i++) { coeff[row[k]][col[i]] /= pivot; } rhs[row[k]] /= pivot; if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("set pivot element to 1 ").append(equationsToString(4)); LOG.debugFine(msg.toString()); } // for (int i = k + 1; i < coeff.length; i++) { for(int i = 0; i < coeff.length; i++) { if(i == k) { continue; } // compute factor double q = coeff[row[i]][col[k]]; // modify entry a[i,k], i <> k coeff[row[i]][col[k]] = 0; // modify entries a[i,j], i > k fixed, j = k+1...n-1 for(int j = k + 1; j < coeff[0].length; j++) { coeff[row[i]][col[j]] = coeff[row[i]][col[j]] - coeff[row[k]][col[j]] * q; } // end for j // modify right-hand-side rhs[row[i]] = rhs[row[i]] - rhs[row[k]] * q; } // end for k if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("after pivot operation ").append(equationsToString(4)); LOG.debugFine(msg.toString()); } }
java
private void solve(int method) throws NullPointerException { // solution exists if(solved) { return; } // bring in reduced row echelon form if(!reducedRowEchelonForm) { reducedRowEchelonForm(method); } if(!isSolvable(method)) { if(LOG.isDebugging()) { LOG.debugFine("Equation system is not solvable!"); } return; } // compute one special solution final int cols = coeff[0].length; int numbound = 0, numfree = 0; int[] boundIndices = new int[cols], freeIndices = new int[cols]; x_0 = new double[cols]; outer: for(int i = 0; i < coeff.length; i++) { for(int j = i; j < coeff[row[i]].length; j++) { if(coeff[row[i]][col[j]] == 1) { x_0[col[i]] = rhs[row[i]]; boundIndices[numbound++] = col[i]; continue outer; } } freeIndices[numfree++] = i; } StringBuilder msg = new StringBuilder(); if(LOG.isDebugging()) { msg.append("\nSpecial solution x_0 = [").append(FormatUtil.format(x_0, ",", FormatUtil.NF4)).append(']') // .append("\nbound Indices ").append(FormatUtil.format(boundIndices, ",")) // .append("\nfree Indices ").append(FormatUtil.format(freeIndices, ",")); } // compute solution space of homogeneous linear equation system Arrays.sort(boundIndices, 0, numbound); int freeIndex = 0; int boundIndex = 0; u = new double[cols][numfree]; for(int j = 0; j < u[0].length; j++) { for(int i = 0; i < u.length; i++) { if(freeIndex < numfree && i == freeIndices[freeIndex]) { u[i][j] = 1; } else if(boundIndex < numbound && i == boundIndices[boundIndex]) { u[i][j] = -coeff[row[boundIndex]][freeIndices[freeIndex]]; boundIndex++; } } freeIndex++; boundIndex = 0; // Restart } if(LOG.isDebugging()) { msg.append("\nU"); for(double[] anU : u) { msg.append('\n').append(FormatUtil.format(anU, ",", FormatUtil.NF4)); } LOG.debugFine(msg.toString()); } solved = true; }
java
private boolean isSolvable(int method) throws NullPointerException { if(solved) { return solvable; } if(!reducedRowEchelonForm) { reducedRowEchelonForm(method); } // test if rank(coeff) == rank(coeff|rhs) for(int i = rank; i < rhs.length; i++) { if(Math.abs(rhs[row[i]]) > DELTA) { solvable = false; return false; // not solvable } } solvable = true; return true; }
java
private int[] maxIntegerDigits(double[][] values) { int[] digits = new int[values[0].length]; for(int j = 0; j < values[0].length; j++) { for(double[] value : values) { digits[j] = Math.max(digits[j], integerDigits(value[j])); } } return digits; }
java
private int maxIntegerDigits(double[] values) { int digits = 0; for(double value : values) { digits = Math.max(digits, integerDigits(value)); } return digits; }
java
private int integerDigits(double d) { double value = Math.abs(d); if(value < 10) { return 1; } return (int) FastMath.log10(value) + 1; }
java
private void format(NumberFormat nf, StringBuilder buffer, double value, int maxIntegerDigits) { if(value >= 0) { buffer.append(" + "); } else { buffer.append(" - "); } int digits = maxIntegerDigits - integerDigits(value); for(int d = 0; d < digits; d++) { buffer.append(' '); } buffer.append(nf.format(Math.abs(value))); }
java
protected ArrayModifiableDBIDs initialMedoids(DistanceQuery<V> distQ, DBIDs ids) { if(getLogger().isStatistics()) { getLogger().statistics(new StringStatistic(getClass().getName() + ".initialization", initializer.toString())); } Duration initd = getLogger().newDuration(getClass().getName() + ".initialization-time").begin(); ArrayModifiableDBIDs medoids = DBIDUtil.newArray(initializer.chooseInitialMedoids(k, ids, distQ)); getLogger().statistics(initd.end()); if(medoids.size() != k) { throw new AbortException("Initializer " + initializer.toString() + " did not return " + k + " means, but " + medoids.size()); } return medoids; }
java
public int getTotalClusterCount() { int clusterCount = 0; for(int i = 0; i < numclusters.length; i++) { clusterCount += numclusters[i]; } return clusterCount; }
java
public int getHighestClusterCount() { int maxClusters = 0; for(int i = 0; i < numclusters.length; i++) { maxClusters = Math.max(maxClusters, numclusters[i]); } return maxClusters; }
java
protected static double getMinDist(DBIDArrayIter j, DistanceQuery<?> distQ, DBIDArrayIter mi, WritableDoubleDataStore mindist) { double prev = mindist.doubleValue(j); if(Double.isNaN(prev)) { // NaN = unknown prev = Double.POSITIVE_INFINITY; for(mi.seek(0); mi.valid(); mi.advance()) { double d = distQ.distance(j, mi); prev = d < prev ? d : prev; } mindist.putDouble(j, prev); } return prev; }
java
private static void shuffle(ArrayModifiableDBIDs ids, int ssize, int end, Random random) { ssize = ssize < end ? ssize : end; // Guard for choosing from tiny sets for(int i = 1; i < ssize; i++) { ids.swap(i - 1, i + random.nextInt(end - i)); } }
java
public static LinearScale[] calcScales(Relation<? extends SpatialComparable> rel) { int dim = RelationUtil.dimensionality(rel); DoubleMinMax[] minmax = DoubleMinMax.newArray(dim); LinearScale[] scales = new LinearScale[dim]; // analyze data for(DBIDIter iditer = rel.iterDBIDs(); iditer.valid(); iditer.advance()) { SpatialComparable v = rel.get(iditer); if(v instanceof NumberVector) { for(int d = 0; d < dim; d++) { final double mi = v.getMin(d); if(mi != mi) { // NaN continue; } minmax[d].put(mi); } } else { for(int d = 0; d < dim; d++) { final double mi = v.getMin(d); if(mi == mi) { // No NaN minmax[d].put(mi); } final double ma = v.getMax(d); if(ma == ma) { // No NaN minmax[d].put(ma); } } } } // generate scales for(int d = 0; d < dim; d++) { scales[d] = new LinearScale(minmax[d].getMin(), minmax[d].getMax()); } return scales; }
java
public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) { // TODO: implement with resizable arrays, to not need dim. final int dim = RelationUtil.dimensionality(relation); final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation); // Compute absolute minsupport final int minsupp = getMinimumSupport(relation.size()); LOG.verbose("Build 1-dimensional transaction lists."); Duration ctime = LOG.newDuration(STAT + "eclat.transposition.time").begin(); DBIDs[] idx = buildIndex(relation, dim, minsupp); LOG.statistics(ctime.end()); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Building frequent itemsets", idx.length, LOG) : null; Duration etime = LOG.newDuration(STAT + "eclat.extraction.time").begin(); final List<Itemset> solution = new ArrayList<>(); for(int i = 0; i < idx.length; i++) { LOG.incrementProcessed(prog); extractItemsets(idx, i, minsupp, solution); } LOG.ensureCompleted(prog); Collections.sort(solution); LOG.statistics(etime.end()); LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size())); return new FrequentItemsetsResult("Eclat", "eclat", solution, meta, relation.size()); }
java
public static TreeNode build(List<Class<?>> choices, String rootpkg) { MutableTreeNode root = new PackageNode(rootpkg, rootpkg); HashMap<String, MutableTreeNode> lookup = new HashMap<>(); if(rootpkg != null) { lookup.put(rootpkg, root); } lookup.put("de.lmu.ifi.dbs.elki", root); lookup.put("", root); // Use the shorthand version of class names. String prefix = rootpkg != null ? rootpkg + "." : null; Class<?>[] choic = choices.toArray(new Class<?>[choices.size()]); Arrays.sort(choic, ELKIServiceScanner.SORT_BY_PRIORITY); for(Class<?> impl : choic) { String name = impl.getName(); name = (prefix != null && name.startsWith(prefix)) ? name.substring(prefix.length()) : name; int plen = (impl.getPackage() != null) ? impl.getPackage().getName().length() + 1 : 0; MutableTreeNode c = new ClassNode(impl.getName().substring(plen), name); MutableTreeNode p = null; int l = name.lastIndexOf('.'); while(p == null) { if(l < 0) { p = root; break; } String pname = name.substring(0, l); p = lookup.get(pname); if(p != null) { break; } l = pname.lastIndexOf('.'); MutableTreeNode tmp = new PackageNode(l >= 0 ? pname.substring(l + 1) : pname, pname); tmp.insert(c, 0); c = tmp; lookup.put(pname, tmp); name = pname; } p.insert(c, p.getChildCount()); } // Simplify tree, except for root node for(int i = 0; i < root.getChildCount(); i++) { MutableTreeNode c = (MutableTreeNode) root.getChildAt(i); MutableTreeNode c2 = simplifyTree(c, null); if(c != c2) { root.remove(i); root.insert(c2, i); } } return root; }
java
private static MutableTreeNode simplifyTree(MutableTreeNode cur, String prefix) { if(cur instanceof PackageNode) { PackageNode node = (PackageNode) cur; if(node.getChildCount() == 1) { String newprefix = (prefix != null) ? prefix + "." + (String) node.getUserObject() : (String) node.getUserObject(); cur = simplifyTree((MutableTreeNode) node.getChildAt(0), newprefix); } else { if(prefix != null) { node.setUserObject(prefix + "." + (String) node.getUserObject()); } for(int i = 0; i < node.getChildCount(); i++) { MutableTreeNode c = (MutableTreeNode) node.getChildAt(i); MutableTreeNode c2 = simplifyTree(c, null); if(c != c2) { node.remove(i); node.insert(c2, i); } } } } else if(cur instanceof ClassNode) { ClassNode node = (ClassNode) cur; if(prefix != null) { node.setUserObject(prefix + "." + (String) node.getUserObject()); } } return cur; }
java
protected String formatValue(List<Class<? extends C>> val) { StringBuilder buf = new StringBuilder(50 + val.size() * 25); String pkgname = restrictionClass.getPackage().getName(); for(Class<? extends C> c : val) { if(buf.length() > 0) { buf.append(LIST_SEP); } String name = c.getName(); boolean stripPrefix = name.length() > pkgname.length() && name.startsWith(pkgname) && name.charAt(pkgname.length()) == '.'; buf.append(name, stripPrefix ? pkgname.length() + 1 : 0, name.length()); } return buf.toString(); }
java
protected void publish(final LogRecord record) { if(record instanceof ProgressLogRecord) { ProgressLogRecord preg = (ProgressLogRecord) record; Progress prog = preg.getProgress(); JProgressBar pbar = getOrCreateProgressBar(prog); updateProgressBar(prog, pbar); if(prog.isComplete()) { removeProgressBar(prog, pbar); } if(prog.isComplete() || prog instanceof StepProgress) { publishTextRecord(record); } } else { publishTextRecord(record); } }
java
private void publishTextRecord(final LogRecord record) { try { logpane.publish(record); } catch(Exception e) { throw new RuntimeException("Error writing a log-like message.", e); } }
java
private JProgressBar getOrCreateProgressBar(Progress prog) { JProgressBar pbar = pbarmap.get(prog); // Add a new progress bar. if(pbar == null) { synchronized(pbarmap) { if(prog instanceof FiniteProgress) { pbar = new JProgressBar(0, ((FiniteProgress) prog).getTotal()); pbar.setStringPainted(true); } else if(prog instanceof IndefiniteProgress) { pbar = new JProgressBar(); pbar.setIndeterminate(true); pbar.setStringPainted(true); } else if(prog instanceof MutableProgress) { pbar = new JProgressBar(0, ((MutableProgress) prog).getTotal()); pbar.setStringPainted(true); } else { throw new RuntimeException("Unsupported progress record"); } pbarmap.put(prog, pbar); final JProgressBar pbar2 = pbar; // Make final SwingUtilities.invokeLater(() -> addProgressBar(pbar2)); } } return pbar; }
java
private void updateProgressBar(Progress prog, JProgressBar pbar) { if(prog instanceof FiniteProgress) { pbar.setValue(((FiniteProgress) prog).getProcessed()); pbar.setString(((FiniteProgress) prog).toString()); } else if(prog instanceof IndefiniteProgress) { pbar.setValue(((IndefiniteProgress) prog).getProcessed()); pbar.setString(((IndefiniteProgress) prog).toString()); } else if(prog instanceof MutableProgress) { pbar.setValue(((MutableProgress) prog).getProcessed()); pbar.setMaximum(((MutableProgress) prog).getProcessed()); pbar.setString(((MutableProgress) prog).toString()); } else { throw new RuntimeException("Unsupported progress record"); } }
java
private void removeProgressBar(Progress prog, JProgressBar pbar) { synchronized(pbarmap) { pbarmap.remove(prog); SwingUtilities.invokeLater(() -> removeProgressBar(pbar)); } }
java
public void clear() { logpane.clear(); synchronized(pbarmap) { for(Entry<Progress, JProgressBar> ent : pbarmap.entrySet()) { super.remove(ent.getValue()); pbarmap.remove(ent.getKey()); } } }
java
@Override public void componentResized(ComponentEvent e) { if (e.getComponent() == component) { double newRatio = getCurrentRatio(); if (Math.abs(newRatio - activeRatio) > threshold) { activeRatio = newRatio; executeResize(newRatio); } } }
java
@Override public String format(LogRecord record) { String msg = record.getMessage(); if(msg.length() > 0) { if (record instanceof ProgressLogRecord) { return msg; } if(msg.endsWith(OutputStreamLogger.NEWLINE)) { return msg; } } return msg + OutputStreamLogger.NEWLINE; }
java
protected double[] alignLabels(List<ClassLabel> l1, double[] d1, Collection<ClassLabel> l2) { assert (l1.size() == d1.length); if(l1 == l2) { return d1.clone(); } double[] d2 = new double[l2.size()]; Iterator<ClassLabel> i2 = l2.iterator(); for(int i = 0; i2.hasNext();) { ClassLabel l = i2.next(); int idx = l1.indexOf(l); if(idx < 0 && getLogger().isDebuggingFiner()) { getLogger().debugFiner("Label not found: " + l); } d2[i] = (idx >= 0) ? d1[idx] : 0.; // Default to 0 for unknown labels! } return d2; }
java
public void setInitialClusters(List<? extends Cluster<? extends MeanModel>> initialMeans) { double[][] vecs = new double[initialMeans.size()][]; for(int i = 0; i < vecs.length; i++) { vecs[i] = initialMeans.get(i).getModel().getMean(); } this.initialMeans = vecs; }
java
public static void exception(String message, Throwable e) { if(message == null && e != null) { message = e.getMessage(); } logExpensive(Level.SEVERE, message, e); }
java
public static void warning(String message, Throwable e) { if(message == null && e != null) { message = e.getMessage(); } logExpensive(Level.WARNING, message, e); }
java
public static void message(String message, Throwable e) { if(message == null && e != null) { message = e.getMessage(); } logExpensive(Level.INFO, message, e); }
java
private static final String[] inferCaller() { StackTraceElement[] stack = (new Throwable()).getStackTrace(); int ix = 0; while(ix < stack.length) { StackTraceElement frame = stack[ix]; if(!frame.getClassName().equals(LoggingUtil.class.getCanonicalName())) { return new String[] { frame.getClassName(), frame.getMethodName() }; } ix++; } return null; }
java
public static long binomialCoefficient(long n, long k) { final long m = Math.max(k, n - k); double temp = 1; for(long i = n, j = 1; i > m; i--, j++) { temp = temp * i / j; } return (long) temp; }
java
public static double approximateBinomialCoefficient(int n, int k) { final int m = max(k, n - k); long temp = 1; for(int i = n, j = 1; i > m; i--, j++) { temp = temp * i / j; } return temp; }
java
public static int[] sequence(int start, int end) { if(start >= end) { return EMPTY_INTS; } int[] ret = new int[end - start]; for(int j = 0; start < end; start++, j++) { ret[j] = start; } return ret; }
java
public KNNDistanceOrderResult run(Database database, Relation<O> relation) { final DistanceQuery<O> distanceQuery = database.getDistanceQuery(relation, getDistanceFunction()); final KNNQuery<O> knnQuery = database.getKNNQuery(distanceQuery, k + 1); final int size = (int) ((sample <= 1.) ? Math.ceil(relation.size() * sample) : sample); DBIDs sample = DBIDUtil.randomSample(relation.getDBIDs(), size, rnd); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Sampling kNN distances", size, LOG) : null; double[] knnDistances = new double[size]; int i = 0; for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance(), i++) { final KNNList neighbors = knnQuery.getKNNForDBID(iditer, k + 1); knnDistances[i] = neighbors.getKNNDistance(); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return new KNNDistanceOrderResult(knnDistances, k); }
java
public DataStore<M> preprocess(Class<? super M> modelcls, Relation<O> relation, RangeQuery<O> query) { WritableDataStore<M> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, modelcls); Duration time = getLogger().newDuration(this.getClass().getName() + ".preprocessing-time").begin(); FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), getLogger()) : null; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { DoubleDBIDList neighbors = query.getRangeForDBID(iditer, epsilon); storage.put(iditer, computeLocalModel(iditer, neighbors, relation)); getLogger().incrementProcessed(progress); } getLogger().ensureCompleted(progress); getLogger().statistics(time.end()); return storage; }
java
@Override public <NV extends NumberVector> NV projectScaledToDataSpace(double[] v, NumberVector.Factory<NV> factory) { final int dim = v.length; double[] vec = new double[dim]; for(int d = 0; d < dim; d++) { vec[d] = scales[d].getUnscaled(v[d]); } return factory.newNumberVector(vec); }
java
@Override public <NV extends NumberVector> NV projectRenderToDataSpace(double[] v, NumberVector.Factory<NV> prototype) { final int dim = v.length; double[] vec = projectRenderToScaled(v); // Not calling {@link #projectScaledToDataSpace} to avoid extra copy of // vector. for(int d = 0; d < dim; d++) { vec[d] = scales[d].getUnscaled(vec[d]); } return prototype.newNumberVector(vec); }
java
@Override public <NV extends NumberVector> NV projectRelativeScaledToDataSpace(double[] v, NumberVector.Factory<NV> prototype) { final int dim = v.length; double[] vec = new double[dim]; for(int d = 0; d < dim; d++) { vec[d] = scales[d].getRelativeUnscaled(v[d]); } return prototype.newNumberVector(vec); }
java
public PointerHierarchyRepresentationResult complete() { if(csize != null) { csize.destroy(); csize = null; } if(mergecount != ids.size() - 1) { LOG.warning(mergecount + " merges were added to the hierarchy, expected " + (ids.size() - 1)); } if(prototypes != null) { return new PointerPrototypeHierarchyRepresentationResult(ids, parent, parentDistance, isSquared, order, prototypes); } return new PointerHierarchyRepresentationResult(ids, parent, parentDistance, isSquared, order); }
java
public int getSize(DBIDRef id) { if(csize == null) { csize = DataStoreUtil.makeIntegerStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 1); } return csize.intValue(id); }
java
public void setSize(DBIDRef id, int size) { if(csize == null) { csize = DataStoreUtil.makeIntegerStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 1); } csize.putInt(id, size); }
java
public OutlierResult run(Database database, Relation<N> spatial, Relation<O> relation) { final NeighborSetPredicate npred = getNeighborSetPredicateFactory().instantiate(database, spatial); DistanceQuery<O> distFunc = getNonSpatialDistanceFunction().instantiate(relation); WritableDoubleDataStore lrds = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_TEMP | DataStoreFactory.HINT_HOT); WritableDoubleDataStore lofs = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC); DoubleMinMax lofminmax = new DoubleMinMax(); // Compute densities for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { DBIDs neighbors = npred.getNeighborDBIDs(iditer); double avg = 0; for(DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) { avg += distFunc.distance(iditer, iter); } double lrd = 1 / (avg / neighbors.size()); if(Double.isNaN(lrd)) { lrd = 0; } lrds.putDouble(iditer, lrd); } // Compute density quotients for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { DBIDs neighbors = npred.getNeighborDBIDs(iditer); double avg = 0; for(DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) { avg += lrds.doubleValue(iter); } final double lrd = (avg / neighbors.size()) / lrds.doubleValue(iditer); if(!Double.isNaN(lrd)) { lofs.putDouble(iditer, lrd); lofminmax.put(lrd); } else { lofs.putDouble(iditer, 0.0); } } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation("Spatial Outlier Factor", "sof-outlier", lofs, relation.getDBIDs()); OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta(lofminmax.getMin(), lofminmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0); OutlierResult or = new OutlierResult(scoreMeta, scoreResult); or.addChildResult(npred); return or; }
java
public void insertHandler(Class<?> restrictionClass, H handler) { // note that the handlers list is kept in a list that is traversed in // backwards order. handlers.add(new Pair<Class<?>, H>(restrictionClass, handler)); }
java
public H getHandler(Object o) { if(o == null) { return null; } // note that we start at the end of the list. ListIterator<Pair<Class<?>, H>> iter = handlers.listIterator(handlers.size()); while(iter.hasPrevious()) { Pair<Class<?>, H> pair = iter.previous(); try { // if we can cast to the restriction class, use the given handler. pair.getFirst().cast(o); return pair.getSecond(); } catch(ClassCastException e) { // do nothing, but try previous in list } } return null; }
java
public synchronized static Logging getLogger(final String name) { Logging logger = loggers.get(name); if(logger == null) { logger = new Logging(Logger.getLogger(name)); loggers.put(name, logger); } return logger; }
java
public void log(java.util.logging.Level level, CharSequence message) { LogRecord rec = new ELKILogRecord(level, message); logger.log(rec); }
java
public void error(CharSequence message, Throwable e) { log(Level.SEVERE, message, e); }
java
public void warning(CharSequence message, Throwable e) { log(Level.WARNING, message, e); }
java
public void statistics(CharSequence message, Throwable e) { log(Level.STATISTICS, message, e); }
java
public void veryverbose(CharSequence message, Throwable e) { log(Level.VERYVERBOSE, message, e); }
java
public void exception(CharSequence message, Throwable e) { log(Level.SEVERE, message, e); }
java
public void exception(Throwable e) { final String msg = e.getMessage(); log(Level.SEVERE, msg != null ? msg : "An exception occurred.", e); }
java
public void statistics(Statistic stats) { if(stats != null) { log(Level.STATISTICS, stats.getKey() + ": " + stats.formatValue()); } }
java
public MultipleObjectsBundle generate() { // we actually need some clusters. if(generators.isEmpty()) { throw new AbortException("No clusters specified."); } // Assert that cluster dimensions agree. final int dim = generators.get(0).getDim(); for(GeneratorInterface c : generators) { if(c.getDim() != dim) { throw new AbortException("Cluster dimensions do not agree."); } } // Prepare result bundle MultipleObjectsBundle bundle = new MultipleObjectsBundle(); VectorFieldTypeInformation<DoubleVector> type = new VectorFieldTypeInformation<>(DoubleVector.FACTORY, dim); bundle.appendColumn(type, new ArrayList<>()); bundle.appendColumn(TypeUtil.CLASSLABEL, new ArrayList<>()); bundle.appendColumn(Model.TYPE, new ArrayList<Model>()); // generate clusters ClassLabel[] labels = new ClassLabel[generators.size()]; Model[] models = new Model[generators.size()]; initLabelsAndModels(generators, labels, models, relabelClusters); final AssignPoint assignment; if(!testAgainstModel) { assignment = new AssignPoint(); } else if(relabelClusters == null) { assignment = new TestModel(); } else if(!relabelDistance) { assignment = new AssignLabelsByDensity(labels); } else { assignment = new AssignLabelsByDistance(labels); } for(int i = 0; i < labels.length; i++) { final GeneratorInterface curclus = generators.get(i); assignment.newCluster(i, curclus); // Only dynamic generators allow rejection / model testing: GeneratorInterfaceDynamic cursclus = (curclus instanceof GeneratorInterfaceDynamic) ? (GeneratorInterfaceDynamic) curclus : null; int kept = 0; while(kept < curclus.getSize()) { // generate the "missing" number of points List<double[]> newp = curclus.generate(curclus.getSize() - kept); for(double[] p : newp) { int bestc = assignment.getAssignment(i, p); if(bestc < 0) { cursclus.incrementDiscarded(); continue; } bundle.appendSimple(DoubleVector.wrap(p), labels[bestc], models[bestc]); ++kept; } } } return bundle; }
java
private void initLabelsAndModels(ArrayList<GeneratorInterface> generators, ClassLabel[] labels, Model[] models, Pattern reassign) { int existingclusters = 0; if(reassign != null) { for(int i = 0; i < labels.length; i++) { final GeneratorInterface curclus = generators.get(i); if(!reassign.matcher(curclus.getName()).find()) { labels[i] = new SimpleClassLabel(curclus.getName()); models[i] = curclus.makeModel(); ++existingclusters; } } if(existingclusters == 0) { LOG.warning("All clusters matched the 'reassign' pattern. Ignoring."); } if(existingclusters == 1) { // No need to test - only one possible answer. for(int i = 0; i < labels.length; i++) { if(labels[i] != null) { Arrays.fill(labels, labels[i]); Arrays.fill(models, models[i]); break; } } } if(existingclusters == labels.length) { LOG.warning("No clusters matched the 'reassign' pattern."); } } // Default case, every cluster has a label and model. if(existingclusters == 0) { for(int i = 0; i < labels.length; i++) { final GeneratorInterface curclus = generators.get(i); labels[i] = new SimpleClassLabel(curclus.getName()); models[i] = curclus.makeModel(); } } }
java
public static <V extends FeatureVector<?>> VectorFieldTypeInformation<V> assumeVectorField(Relation<V> relation) { try { return ((VectorFieldTypeInformation<V>) relation.getDataTypeInformation()); } catch(Exception e) { throw new UnsupportedOperationException("Expected a vector field, got type information: " + relation.getDataTypeInformation().toString(), e); } }
java
public static <V extends NumberVector> NumberVector.Factory<V> getNumberVectorFactory(Relation<V> relation) { final VectorFieldTypeInformation<V> type = assumeVectorField(relation); @SuppressWarnings("unchecked") final NumberVector.Factory<V> factory = (NumberVector.Factory<V>) type.getFactory(); return factory; }
java
public static int dimensionality(Relation<? extends SpatialComparable> relation) { final SimpleTypeInformation<? extends SpatialComparable> type = relation.getDataTypeInformation(); if(type instanceof FieldTypeInformation) { return ((FieldTypeInformation) type).getDimensionality(); } return -1; }
java
public static double[][] computeMinMax(Relation<? extends NumberVector> relation) { int dim = RelationUtil.dimensionality(relation); double[] mins = new double[dim], maxs = new double[dim]; for(int i = 0; i < dim; i++) { mins[i] = Double.MAX_VALUE; maxs[i] = -Double.MAX_VALUE; } for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { final NumberVector o = relation.get(iditer); for(int d = 0; d < dim; d++) { final double v = o.doubleValue(d); mins[d] = (v < mins[d]) ? v : mins[d]; maxs[d] = (v > maxs[d]) ? v : maxs[d]; } } return new double[][] { mins, maxs }; }
java
public static <V extends SpatialComparable> String getColumnLabel(Relation<? extends V> rel, int col) { SimpleTypeInformation<? extends V> type = rel.getDataTypeInformation(); if(!(type instanceof VectorFieldTypeInformation)) { return "Column " + col; } final VectorFieldTypeInformation<?> vtype = (VectorFieldTypeInformation<?>) type; String lbl = vtype.getLabel(col); return (lbl != null) ? lbl : ("Column " + col); }
java
@SuppressWarnings("unchecked") public static <V extends NumberVector, T extends NumberVector> Relation<V> relationUglyVectorCast(Relation<T> database) { return (Relation<V>) database; }
java
public KNNList get(DBIDRef id) { if(storage == null) { if(getLogger().isDebugging()) { getLogger().debug("Running kNN preprocessor: " + this.getClass()); } preprocess(); } return storage.get(id); }
java
public Clustering<DimensionModel> run(Database database, Relation<V> relation) { COPACNeighborPredicate.Instance npred = new COPACNeighborPredicate<V>(settings).instantiate(database, relation); CorePredicate.Instance<DBIDs> cpred = new MinPtsCorePredicate(settings.minpts).instantiate(database); Clustering<Model> dclusters = new GeneralizedDBSCAN.Instance<>(npred, cpred, false).run(); // Re-wrap the detected clusters for COPAC: Clustering<DimensionModel> result = new Clustering<>("COPAC clustering", "copac-clustering"); // Generalized DBSCAN clusterings will be flat. for(It<Cluster<Model>> iter = dclusters.iterToplevelClusters(); iter.valid(); iter.advance()) { Cluster<Model> clus = iter.get(); if(clus.size() > 0) { int dim = npred.dimensionality(clus.getIDs().iter()); DimensionModel model = new DimensionModel(dim); result.addToplevelCluster(new Cluster<>(clus.getIDs(), model)); } } return result; }
java
public int getUnpairedClusteringIndex() { for(int index = 0; index < clusterIds.length; index++) { if(clusterIds[index] == UNCLUSTERED) { return index; } } return -1; }
java
protected static boolean isNull(Object val) { return (val == null) || STRING_NULL.equals(val) || DOUBLE_NULL.equals(val) || INTEGER_NULL.equals(val); }
java
private static String formatCause(Throwable cause) { if(cause == null) { return ""; } String message = cause.getMessage(); return "\n" + (message != null ? message : cause.toString()); }
java
public TextWriterWriterInterface<?> getWriterFor(Object o) { if(o == null) { return null; } TextWriterWriterInterface<?> writer = writers.getHandler(o); if(writer != null) { return writer; } try { final Class<?> decl = o.getClass().getMethod("toString").getDeclaringClass(); if(decl == Object.class) { return null; // TODO: cache this, too } writers.insertHandler(decl, fallbackwriter); return fallbackwriter; } catch(NoSuchMethodException | SecurityException e) { return null; } }
java
protected Cluster<BiclusterModel> defineBicluster(BitSet rows, BitSet cols) { ArrayDBIDs rowIDs = rowsBitsetToIDs(rows); int[] colIDs = colsBitsetToIDs(cols); return new Cluster<>(rowIDs, new BiclusterModel(colIDs)); }
java
public double getSampleSkewness() { if(!(m2 > 0) || !(n > 2)) { throw new ArithmeticException("Skewness not defined when variance is 0 or weight <= 2.0!"); } return (m3 * n / (n - 1) / (n - 2)) / FastMath.pow(getSampleVariance(), 1.5); }
java
public static double cosineOrHaversineDeg(double lat1, double lon1, double lat2, double lon2) { return cosineOrHaversineRad(deg2rad(lat1), deg2rad(lon1), deg2rad(lat2), deg2rad(lon2)); }
java
public static double crossTrackDistanceRad(double lat1, double lon1, double lat2, double lon2, double latQ, double lonQ, double dist1Q) { final double dlon12 = lon2 - lon1; final double dlon1Q = lonQ - lon1; // Compute trigonometric functions only once. final DoubleWrapper tmp = new DoubleWrapper(); // To return cosine final double slat1 = sinAndCos(lat1, tmp), clat1 = tmp.value; final double slatQ = sinAndCos(latQ, tmp), clatQ = tmp.value; final double slat2 = sinAndCos(lat2, tmp), clat2 = tmp.value; // / Compute the course // y = sin(dlon) * cos(lat2) final double sdlon12 = sinAndCos(dlon12, tmp), cdlon12 = tmp.value; final double sdlon1Q = sinAndCos(dlon1Q, tmp), cdlon1Q = tmp.value; final double yE = sdlon12 * clat2; final double yQ = sdlon1Q * clatQ; // x = cos(lat1) * sin(lat2) - sin(lat1) * cos(lat2) * cos(dlon) final double xE = clat1 * slat2 - slat1 * clat2 * cdlon12; final double xQ = clat1 * slatQ - slat1 * clatQ * cdlon1Q; final double crs12 = atan2(yE, xE); final double crs1Q = atan2(yQ, xQ); // / Calculate cross-track distance return asin(sin(dist1Q) * sin(crs1Q - crs12)); }
java
public static double alongTrackDistanceRad(double lat1, double lon1, double lat2, double lon2, double latQ, double lonQ, double dist1Q, double ctd) { // FIXME: optimize the sign computation! int sign = Math.abs(bearingRad(lat1, lon1, lat2, lon2) - bearingRad(lat1, lon1, latQ, lonQ)) < HALFPI ? +1 : -1; return sign * acos(cos(dist1Q) / cos(ctd)); // TODO: for short distances, use this instead? // asin(sqrt( (sin(dist_1Q))^2 - (sin(XTD))^2 )/cos(XTD)) }
java
private static double[] reversed(double[] a) { // TODO: there doesn't appear to be a nicer version in Java, unfortunately. Arrays.sort(a); for(int i = 0, j = a.length - 1; i < j; i++, j--) { double tmp = a[i]; a[i] = a[j]; a[j] = tmp; } return a; }
java
private double computeExplainedVariance(double[] eigenValues, int filteredEigenPairs) { double strongsum = 0., weaksum = 0.; for(int i = 0; i < filteredEigenPairs; i++) { strongsum += eigenValues[i]; } for(int i = filteredEigenPairs; i < eigenValues.length; i++) { weaksum += eigenValues[i]; } return strongsum / (strongsum + weaksum); }
java
private void assertSortedByDistance(DoubleDBIDList results) { // TODO: sort results instead? double dist = -1.0; boolean sorted = true; for(DoubleDBIDListIter it = results.iter(); it.valid(); it.advance()) { double qr = it.doubleValue(); if(qr < dist) { sorted = false; } dist = qr; } if(!sorted) { try { ModifiableDoubleDBIDList.class.cast(results).sort(); } catch(ClassCastException | UnsupportedOperationException e) { LoggingUtil.warning("WARNING: results not sorted by distance!", e); } } }
java
public static String prefixParameterToMessage(Parameter<?> p, String message) { return new StringBuilder(100 + message.length()) // .append(p instanceof Flag ? "Flag '" : "Parameter '") // .append(p.getOptionID().getName()) // .append("' ").append(message).toString(); }
java
public static String prefixParametersToMessage(Parameter<?> p, String mid, Parameter<?> p2, String message) { return new StringBuilder(200 + mid.length() + message.length())// .append(p instanceof Flag ? "Flag '" : "Parameter '") // .append(p.getOptionID().getName()) // .append("' ").append(mid) // .append(p instanceof Flag ? " Flag '" : " Parameter '") // .append(p.getOptionID().getName()) // .append(message.length() > 0 ? "' " : "'.").append(message).toString(); }
java
@Override protected int computeHeight() { N node = getRoot(); int height = 1; // compute height while(!node.isLeaf() && node.getNumEntries() != 0) { E entry = node.getEntry(0); node = getNode(entry); height++; } return height; }
java
private List<E> createBulkDirectoryNodes(List<E> nodes) { int minEntries = dirMinimum; int maxEntries = dirCapacity - 1; ArrayList<E> result = new ArrayList<>(); List<List<E>> partitions = settings.bulkSplitter.partition(nodes, minEntries, maxEntries); for(List<E> partition : partitions) { // create node N dirNode = createNewDirectoryNode(); // insert nodes for(E o : partition) { dirNode.addDirectoryEntry(o); } // write to file writeNode(dirNode); result.add(createNewDirectoryEntry(dirNode)); if(getLogger().isDebuggingFiner()) { getLogger().debugFiner("Directory page no: "+dirNode.getPageID()); } } return result; }
java
private N createRoot(N root, List<E> objects) { // insert data for(E entry : objects) { if (entry instanceof LeafEntry) { root.addLeafEntry(entry); } else { root.addDirectoryEntry(entry); } } // set root mbr ((SpatialDirectoryEntry) getRootEntry()).setMBR(root.computeMBR()); // write to file writeNode(root); if(getLogger().isDebuggingFiner()) { StringBuilder msg = new StringBuilder(); msg.append("pageNo ").append(root.getPageID()); getLogger().debugFiner(msg.toString()); } return root; }
java
private int tailingNonNewline(char[] cbuf, int off, int len) { for(int cnt = 0; cnt < len; cnt++) { final int pos = off + (len - 1) - cnt; if(cbuf[pos] == UNIX_NEWLINE) { return cnt; } if(cbuf[pos] == CARRIAGE_RETURN) { return cnt; } // TODO: need to compare to NEWLINEC, too? } return len; }
java
@Override public void write(char[] cbuf, int off, int len) throws IOException { if(len <= 0) { return; } // if we havn't last seen a newline, and don't get a CR, insert a newline. if(charsSinceNewline > 0) { if(cbuf[off] != CARRIAGE_RETURN) { super.write(NEWLINEC, 0, NEWLINEC.length); charsSinceNewline = 0; } else { // length of this line: int nonnl = countNonNewline(cbuf, off + 1, len - 1); // clear the existing chars. if(nonnl < charsSinceNewline) { super.write(CARRIAGE_RETURN); while(charsSinceNewline > 0) { final int n = Math.min(charsSinceNewline, WHITESPACE.length()); super.write(WHITESPACE, 0, n); charsSinceNewline -= n; } } else { charsSinceNewline = 0; } } } charsSinceNewline = tailingNonNewline(cbuf, off, len); super.write(cbuf, off, len); flush(); }
java
protected DoubleDataStore computeIDs(DBIDs ids, KNNQuery<O> knnQ) { WritableDoubleDataStore intDims = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Intrinsic dimensionality", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double id = 0.; try { id = estimator.estimate(knnQ, iter, k_c + 1); } catch(ArithmeticException e) { id = 0; // Too many duplicates, etc. } intDims.putDouble(iter, id); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return intDims; }
java
protected DoubleDataStore computeIDOS(DBIDs ids, KNNQuery<O> knnQ, DoubleDataStore intDims, DoubleMinMax idosminmax) { WritableDoubleDataStore ldms = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("ID Outlier Scores for objects", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { final KNNList neighbors = knnQ.getKNNForDBID(iter, k_r); double sum = 0.; int cnt = 0; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { if(DBIDUtil.equal(iter, neighbor)) { continue; } final double id = intDims.doubleValue(neighbor); sum += id > 0 ? 1.0 / id : 0.; if(++cnt == k_r) { // Always stop after at most k_r elements. break; } } final double id_q = intDims.doubleValue(iter); final double idos = id_q > 0 ? id_q * sum / cnt : 0.; ldms.putDouble(iter, idos); idosminmax.put(idos); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return ldms; }
java
public OutlierResult run(Database database, Relation<V> relation) { final int dbsize = relation.size(); ArrayList<ArrayList<DBIDs>> ranges = buildRanges(relation); Heap<Individuum>.UnorderedIter individuums = (new EvolutionarySearch(relation, ranges, m, rnd.getSingleThreadedRandom())).run(); WritableDoubleDataStore outlierScore = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC); for(; individuums.valid(); individuums.advance()) { DBIDs ids = computeSubspaceForGene(individuums.get().getGene(), ranges); double sparsityC = sparsity(ids.size(), dbsize, k, phi); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double prev = outlierScore.doubleValue(iter); if(Double.isNaN(prev) || sparsityC < prev) { outlierScore.putDouble(iter, sparsityC); } } } DoubleMinMax minmax = new DoubleMinMax(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { double val = outlierScore.doubleValue(iditer); if(Double.isNaN(val)) { outlierScore.putDouble(iditer, val = 0.); } minmax.put(val); } DoubleRelation scoreResult = new MaterializedDoubleRelation("AggarwalYuEvolutionary", "aggarwal-yu-outlier", outlierScore, relation.getDBIDs()); OutlierScoreMeta meta = new InvertedOutlierScoreMeta(minmax.getMin(), minmax.getMax(), Double.NEGATIVE_INFINITY, 0.0); return new OutlierResult(meta, scoreResult); }
java
protected double[][] buildDistanceMatrix(ArrayDBIDs ids, DistanceQuery<?> dq) { final int size = ids.size(); double[][] dmat = new double[size][size]; final boolean square = !dq.getDistanceFunction().isSquared(); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing distance matrix", (size * (size - 1)) >>> 1, LOG) : null; Duration timer = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".runtime.distancematrix").begin() : null; DBIDArrayIter ix = ids.iter(), iy = ids.iter(); for(ix.seek(0); ix.valid(); ix.advance()) { double[] dmat_x = dmat[ix.getOffset()]; for(iy.seek(ix.getOffset() + 1); iy.valid(); iy.advance()) { final double dist = dq.distance(ix, iy); dmat[iy.getOffset()][ix.getOffset()] = dmat_x[iy.getOffset()] = square ? (dist * dist) : dist; } if(prog != null) { int row = ix.getOffset() + 1; prog.setProcessed(row * size - ((row * (row + 1)) >>> 1), LOG); } } LOG.ensureCompleted(prog); if(timer != null) { LOG.statistics(timer.end()); } return dmat; }
java
@Override public Clustering<M> run(Database database, Relation<V> relation) { MutableProgress prog = LOG.isVerbose() ? new MutableProgress("X-means number of clusters", k_max, LOG) : null; // Run initial k-means to find at least k_min clusters innerKMeans.setK(k_min); LOG.statistics(new StringStatistic(KEY + ".initialization", initializer.toString())); splitInitializer.setInitialMeans(initializer.chooseInitialMeans(database, relation, k_min, getDistanceFunction())); Clustering<M> clustering = innerKMeans.run(database, relation); if(prog != null) { prog.setProcessed(k_min, LOG); } ArrayList<Cluster<M>> clusters = new ArrayList<>(clustering.getAllClusters()); while(clusters.size() <= k_max) { // Improve-Structure: ArrayList<Cluster<M>> nextClusters = new ArrayList<>(); for(Cluster<M> cluster : clusters) { // Try to split this cluster: List<Cluster<M>> childClusterList = splitCluster(cluster, database, relation); nextClusters.addAll(childClusterList); if(childClusterList.size() > 1) { k += childClusterList.size() - 1; if(prog != null) { if(k >= k_max) { prog.setTotal(k + 1); } prog.setProcessed(k, LOG); } } } if(clusters.size() == nextClusters.size()) { break; } // Improve-Params: splitInitializer.setInitialClusters(nextClusters); innerKMeans.setK(nextClusters.size()); clustering = innerKMeans.run(database, relation); clusters.clear(); clusters.addAll(clustering.getAllClusters()); } // Ensure that the progress bar finished. if(prog != null) { prog.setTotal(k); prog.setProcessed(k, LOG); } return new Clustering<>("X-Means Result", "X-Means", clusters); }
java
protected List<Cluster<M>> splitCluster(Cluster<M> parentCluster, Database database, Relation<V> relation) { // Transform parent cluster into a clustering ArrayList<Cluster<M>> parentClusterList = new ArrayList<Cluster<M>>(1); parentClusterList.add(parentCluster); if(parentCluster.size() <= 1) { // Split is not possbile return parentClusterList; } Clustering<M> parentClustering = new Clustering<>(parentCluster.getName(), parentCluster.getName(), parentClusterList); ProxyDatabase proxyDB = new ProxyDatabase(parentCluster.getIDs(), database); splitInitializer.setInitialMeans(splitCentroid(parentCluster, relation)); innerKMeans.setK(2); Clustering<M> childClustering = innerKMeans.run(proxyDB); double parentEvaluation = informationCriterion.quality(parentClustering, getDistanceFunction(), relation); double childrenEvaluation = informationCriterion.quality(childClustering, getDistanceFunction(), relation); if(LOG.isDebugging()) { LOG.debug("parentEvaluation: " + parentEvaluation); LOG.debug("childrenEvaluation: " + childrenEvaluation); } // Check if split is an improvement: return informationCriterion.isBetter(parentEvaluation, childrenEvaluation) ? parentClusterList : childClustering.getAllClusters(); }
java
protected double[][] splitCentroid(Cluster<? extends MeanModel> parentCluster, Relation<V> relation) { double[] parentCentroid = parentCluster.getModel().getMean(); // Compute size of cluster/region double radius = 0.; for(DBIDIter it = parentCluster.getIDs().iter(); it.valid(); it.advance()) { double d = getDistanceFunction().distance(relation.get(it), DoubleVector.wrap(parentCentroid)); radius = (d > radius) ? d : radius; } // Choose random vector Random random = rnd.getSingleThreadedRandom(); final int dim = RelationUtil.dimensionality(relation); double[] randomVector = normalize(MathUtil.randomDoubleArray(dim, random)); timesEquals(randomVector, (.4 + random.nextDouble() * .5) * radius); // Get the new centroids for(int d = 0; d < dim; d++) { double a = parentCentroid[d], b = randomVector[d]; parentCentroid[d] = a - b; randomVector[d] = a + b; } return new double[][] { parentCentroid, randomVector }; }
java
private void scan(HilbertFeatures hf, int k0) { final int mink0 = Math.min(2 * k0, capital_n - 1); if(LOG.isDebuggingFine()) { LOG.debugFine("Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star); } for(int i = 0; i < hf.pf.length; i++) { if(hf.pf[i].ubound < omega_star) { continue; } if(hf.pf[i].lbound < hf.pf[i].ubound) { double omega = hf.fastUpperBound(i); if(omega < omega_star) { hf.pf[i].ubound = omega; } else { int maxcount; // capital_n-1 instead of capital_n: all, except self if(hf.top.contains(hf.pf[i])) { maxcount = capital_n - 1; } else { maxcount = mink0; } innerScan(hf, i, maxcount); } } if(hf.pf[i].ubound > 0) { hf.updateOUT(i); } if(hf.pf[i].lbound > 0) { hf.updateWLB(i); } if(hf.wlb.size() >= n) { omega_star = Math.max(omega_star, hf.wlb.peek().lbound); } } }
java
private void trueOutliers(HilbertFeatures h) { n_star = 0; for(ObjectHeap.UnsortedIter<HilFeature> iter = h.out.unsortedIter(); iter.valid(); iter.advance()) { HilFeature entry = iter.get(); if(entry.ubound >= omega_star && (entry.ubound - entry.lbound < 1E-10)) { n_star++; } } }
java