code
stringlengths
73
34.1k
label
stringclasses
1 value
protected void addToStatistics(NumberVector nv) { final int d = nv.getDimensionality(); assert (d == ls.length); this.n++; for(int i = 0; i < d; i++) { double v = nv.doubleValue(i); ls[i] += v; ss += v * v; } }
java
protected void addToStatistics(ClusteringFeature other) { n += other.n; VMath.plusEquals(ls, other.ls); ss += other.ss; }
java
public double sumOfSquaresOfSums() { double sum = 0.; for(int i = 0; i < ls.length; i++) { double v = ls[i]; sum += v * v; } return sum; }
java
public static double sumOfSquares(NumberVector v) { final int dim = v.getDimensionality(); double sum = 0; for(int d = 0; d < dim; d++) { double x = v.doubleValue(d); sum += x * x; } return sum; }
java
private static void insertionSort(int[] data, final int start, final int end, IntComparator comp) { // Classic insertion sort. for(int i = start + 1; i < end; i++) { final int cur = data[i]; int j = i - 1; while(j >= start) { final int pre = data[j]; if(comp.compare(cur, pre) >= 0) { break; } data[j + 1] = pre; --j; } data[j + 1] = cur; } }
java
@Override public double[][] processIds(DBIDs ids, Relation<? extends NumberVector> relation) { final int dim = RelationUtil.dimensionality(relation); final CovarianceMatrix cmat = new CovarianceMatrix(dim); final Centroid centroid = Centroid.make(relation, ids); // find maximum distance double maxdist = 0.0, stddev = 0.0; { for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { double distance = weightDistance.distance(centroid, relation.get(iter)); stddev += distance * distance; if(distance > maxdist) { maxdist = distance; } } if(maxdist == 0.0) { maxdist = 1.0; } // compute standard deviation. stddev = FastMath.sqrt(stddev / ids.size()); } for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { NumberVector obj = relation.get(iter); double distance = weightDistance.distance(centroid, obj); double weight = weightfunction.getWeight(distance, maxdist, stddev); cmat.put(obj, weight); } return cmat.destroyToPopulationMatrix(); }
java
@Override public double[][] processQueryResults(DoubleDBIDList results, Relation<? extends NumberVector> database, int k) { final int dim = RelationUtil.dimensionality(database); final CovarianceMatrix cmat = new CovarianceMatrix(dim); // avoid bad parameters k = k <= results.size() ? k : results.size(); // find maximum distance double maxdist = 0.0, stddev = 0.0; { int i = 0; for(DoubleDBIDListIter it = results.iter(); it.valid() && i < k; it.advance(), k++) { final double dist = it.doubleValue(); stddev += dist * dist; if(dist > maxdist) { maxdist = dist; } } if(maxdist == 0.0) { maxdist = 1.0; } stddev = FastMath.sqrt(stddev / k); } // calculate weighted PCA int i = 0; for(DoubleDBIDListIter it = results.iter(); it.valid() && i < k; it.advance(), k++) { final double dist = it.doubleValue(); NumberVector obj = database.get(it); double weight = weightfunction.getWeight(dist, maxdist, stddev); cmat.put(obj, weight); } return cmat.destroyToPopulationMatrix(); }
java
public Instance instantiate(Database database, Relation<V> relation) { DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC); KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k); WritableDataStore<PCAFilteredResult> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, PCAFilteredResult.class); PCARunner pca = settings.pca; EigenPairFilter filter = settings.filter; Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin(); FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null; for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k); PCAResult pcares = pca.processQueryResult(ref, relation); storage.put(iditer, new PCAFilteredResult(pcares.getEigenPairs(), filter.filter(pcares.getEigenvalues()), 1., 0.)); LOG.incrementProcessed(progress); } LOG.ensureCompleted(progress); LOG.statistics(time.end()); return new Instance(relation.getDBIDs(), storage, relation); }
java
public static void writeXHTML(Document htmldoc, OutputStream out) throws IOException { javax.xml.transform.Result result = new StreamResult(out); // Use a transformer for pretty printing Transformer xformer; try { xformer = TransformerFactory.newInstance().newTransformer(); xformer.setOutputProperty(OutputKeys.INDENT, "yes"); // TODO: ensure the "meta" tag doesn't claim a different encoding! xformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); xformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, HTML_XHTML_TRANSITIONAL_DOCTYPE_PUBLIC); xformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, HTML_XHTML_TRANSITIONAL_DOCTYPE_SYSTEM); xformer.transform(new DOMSource(htmldoc), result); } catch(TransformerException e1) { throw new IOException(e1); } out.flush(); }
java
public static Element appendMultilineText(Document htmldoc, Element parent, String text) { String[] parts = text != null ? text.split("\n") : null; if(parts == null || parts.length == 0) { return parent; } parent.appendChild(htmldoc.createTextNode(parts[0])); for(int i = 1; i < parts.length; i++) { parent.appendChild(htmldoc.createElement(HTML_BR_TAG)); parent.appendChild(htmldoc.createTextNode(parts[i])); } return parent; }
java
public int getPathCount() { int result = 0; for(IndexTreePath<E> path = this; path != null; path = path.parentPath) { result++; } return result; }
java
public OutlierResult run(Database database, Relation<O> relation) { StepProgress stepprog = LOG.isVerbose() ? new StepProgress("CBLOF", 3) : null; DBIDs ids = relation.getDBIDs(); LOG.beginStep(stepprog, 1, "Computing clustering."); Clustering<MeanModel> clustering = clusteringAlgorithm.run(database); LOG.beginStep(stepprog, 2, "Computing boundary between large and small clusters."); List<? extends Cluster<MeanModel>> clusters = clustering.getAllClusters(); Collections.sort(clusters, new Comparator<Cluster<MeanModel>>() { @Override public int compare(Cluster<MeanModel> o1, Cluster<MeanModel> o2) { // Sort in descending order by size return Integer.compare(o2.size(), o1.size()); } }); int clusterBoundary = getClusterBoundary(relation, clusters); List<? extends Cluster<MeanModel>> largeClusters = clusters.subList(0, clusterBoundary + 1); List<? extends Cluster<MeanModel>> smallClusters = clusters.subList(clusterBoundary + 1, clusters.size()); LOG.beginStep(stepprog, 3, "Computing Cluster-Based Local Outlier Factors (CBLOF)."); WritableDoubleDataStore cblofs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB); DoubleMinMax cblofMinMax = new DoubleMinMax(); computeCBLOFs(relation, distance, cblofs, cblofMinMax, largeClusters, smallClusters); LOG.setCompleted(stepprog); DoubleRelation scoreResult = new MaterializedDoubleRelation("Cluster-Based Local Outlier Factor", "cblof-outlier", cblofs, ids); OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta(cblofMinMax.getMin(), cblofMinMax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0); return new OutlierResult(scoreMeta, scoreResult); }
java
private int getClusterBoundary(Relation<O> relation, List<? extends Cluster<MeanModel>> clusters) { int totalSize = relation.size(); int clusterBoundary = clusters.size() - 1; int cumulativeSize = 0; for(int i = 0; i < clusters.size() - 1; i++) { cumulativeSize += clusters.get(i).size(); // Given majority covered by large cluster if(cumulativeSize >= totalSize * alpha) { clusterBoundary = i; break; } // Relative difference in cluster size between two consecutive clusters if(clusters.get(i).size() / (double) clusters.get(i + 1).size() >= beta) { clusterBoundary = i; break; } } return clusterBoundary; }
java
private void computeCBLOFs(Relation<O> relation, NumberVectorDistanceFunction<? super O> distance, WritableDoubleDataStore cblofs, DoubleMinMax cblofMinMax, List<? extends Cluster<MeanModel>> largeClusters, List<? extends Cluster<MeanModel>> smallClusters) { List<NumberVector> largeClusterMeans = new ArrayList<>(largeClusters.size()); for(Cluster<MeanModel> largeCluster : largeClusters) { NumberVector mean = ModelUtil.getPrototypeOrCentroid(largeCluster.getModel(), relation, largeCluster.getIDs()); largeClusterMeans.add(mean); // Compute CBLOF scores for members of large clusters for(DBIDIter iter = largeCluster.getIDs().iter(); iter.valid(); iter.advance()) { double cblof = computeLargeClusterCBLOF(relation.get(iter), distance, mean, largeCluster); storeCBLOFScore(cblofs, cblofMinMax, cblof, iter); } } for(Cluster<MeanModel> smallCluster : smallClusters) { for(DBIDIter iter = smallCluster.getIDs().iter(); iter.valid(); iter.advance()) { double cblof = computeSmallClusterCBLOF(relation.get(iter), distance, largeClusterMeans, smallCluster); storeCBLOFScore(cblofs, cblofMinMax, cblof, iter); } } }
java
private GeneratorMain loadXMLSpecification() { try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); URL url = ClassLoader.getSystemResource(GENERATOR_SCHEMA_FILE); if(url != null) { try { Schema schema = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI).newSchema(url); dbf.setSchema(schema); dbf.setIgnoringElementContentWhitespace(true); } catch(Exception e) { LOG.warning("Could not set up XML Schema validation for specification file.", e); } } else { LOG.warning("Could not set up XML Schema validation for specification file."); } Document doc = dbf.newDocumentBuilder().parse(specfile); Node root = doc.getDocumentElement(); if(TAG_DATASET.equals(root.getNodeName())) { GeneratorMain gen = new GeneratorMain(); processElementDataset(gen, root); return gen; } else { throw new AbortException("Experiment specification has incorrect document element: " + root.getNodeName()); } } catch(FileNotFoundException e) { throw new AbortException("Can't open specification file.", e); } catch(SAXException e) { throw new AbortException("Error parsing specification file.", e); } catch(IOException e) { throw new AbortException("IO Exception loading specification file.", e); } catch(ParserConfigurationException e) { throw new AbortException("Parser Configuration Error", e); } }
java
private void processElementDataset(GeneratorMain gen, Node cur) { // *** get parameters String seedstr = ((Element) cur).getAttribute(ATTR_SEED); if(clusterRandom != RandomFactory.DEFAULT && seedstr != null && seedstr.length() > 0) { clusterRandom = new RandomFactory((long) (ParseUtil.parseIntBase10(seedstr) * sizescale)); } String testmod = ((Element) cur).getAttribute(ATTR_TEST); if(testmod != null && testmod.length() > 0) { testAgainstModel = Boolean.valueOf(ParseUtil.parseIntBase10(testmod) != 0); } // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(TAG_CLUSTER.equals(child.getNodeName())) { processElementCluster(gen, child); } else if(TAG_STATIC.equals(child.getNodeName())) { processElementStatic(gen, child); } else if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementCluster(GeneratorMain gen, Node cur) { int size = -1; double overweight = 1.0; String sizestr = ((Element) cur).getAttribute(ATTR_SIZE); if(sizestr != null && sizestr.length() > 0) { size = (int) (ParseUtil.parseIntBase10(sizestr) * sizescale); } String name = ((Element) cur).getAttribute(ATTR_NAME); String dcostr = ((Element) cur).getAttribute(ATTR_DENSITY); if(dcostr != null && dcostr.length() > 0) { overweight = ParseUtil.parseDouble(dcostr); } if(size < 0) { throw new AbortException("No valid cluster size given in specification file."); } if(name == null || name.length() == 0) { throw new AbortException("No cluster name given in specification file."); } // *** add new cluster object Random newRand = clusterRandom.getSingleThreadedRandom(); GeneratorSingleCluster cluster = new GeneratorSingleCluster(name, size, overweight, newRand); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(TAG_UNIFORM.equals(child.getNodeName())) { processElementUniform(cluster, child); } else if(TAG_NORMAL.equals(child.getNodeName())) { processElementNormal(cluster, child); } else if(TAG_GAMMA.equals(child.getNodeName())) { processElementGamma(cluster, child); } else if(TAG_HALTON.equals(child.getNodeName())) { processElementHalton(cluster, child); } else if(TAG_ROTATE.equals(child.getNodeName())) { processElementRotate(cluster, child); } else if(TAG_TRANSLATE.equals(child.getNodeName())) { processElementTranslate(cluster, child); } else if(TAG_CLIP.equals(child.getNodeName())) { processElementClipping(cluster, child); } else if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } gen.addCluster(cluster); }
java
private void processElementUniform(GeneratorSingleCluster cluster, Node cur) { double min = 0.0; double max = 1.0; String minstr = ((Element) cur).getAttribute(ATTR_MIN); if(minstr != null && minstr.length() > 0) { min = ParseUtil.parseDouble(minstr); } String maxstr = ((Element) cur).getAttribute(ATTR_MAX); if(maxstr != null && maxstr.length() > 0) { max = ParseUtil.parseDouble(maxstr); } // *** new uniform generator Random random = cluster.getNewRandomGenerator(); Distribution generator = new UniformDistribution(min, max, random); cluster.addGenerator(generator); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementNormal(GeneratorSingleCluster cluster, Node cur) { double mean = 0.0; double stddev = 1.0; String meanstr = ((Element) cur).getAttribute(ATTR_MEAN); if(meanstr != null && meanstr.length() > 0) { mean = ParseUtil.parseDouble(meanstr); } String stddevstr = ((Element) cur).getAttribute(ATTR_STDDEV); if(stddevstr != null && stddevstr.length() > 0) { stddev = ParseUtil.parseDouble(stddevstr); } // *** New normal distribution generator Random random = cluster.getNewRandomGenerator(); Distribution generator = new NormalDistribution(mean, stddev, random); cluster.addGenerator(generator); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementGamma(GeneratorSingleCluster cluster, Node cur) { double k = 1.0; double theta = 1.0; String kstr = ((Element) cur).getAttribute(ATTR_K); if(kstr != null && kstr.length() > 0) { k = ParseUtil.parseDouble(kstr); } String thetastr = ((Element) cur).getAttribute(ATTR_THETA); if(thetastr != null && thetastr.length() > 0) { theta = ParseUtil.parseDouble(thetastr); } // *** New normal distribution generator Random random = cluster.getNewRandomGenerator(); Distribution generator = new GammaDistribution(k, theta, random); cluster.addGenerator(generator); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementRotate(GeneratorSingleCluster cluster, Node cur) { int axis1 = 0; int axis2 = 0; double angle = 0.0; String a1str = ((Element) cur).getAttribute(ATTR_AXIS1); if(a1str != null && a1str.length() > 0) { axis1 = ParseUtil.parseIntBase10(a1str); } String a2str = ((Element) cur).getAttribute(ATTR_AXIS2); if(a2str != null && a2str.length() > 0) { axis2 = ParseUtil.parseIntBase10(a2str); } String anstr = ((Element) cur).getAttribute(ATTR_ANGLE); if(anstr != null && anstr.length() > 0) { angle = ParseUtil.parseDouble(anstr); } if(axis1 <= 0 || axis1 > cluster.getDim()) { throw new AbortException("Invalid axis1 number given in specification file."); } if(axis2 <= 0 || axis2 > cluster.getDim()) { throw new AbortException("Invalid axis2 number given in specification file."); } if(axis1 == axis2) { throw new AbortException("Invalid axis numbers given in specification file."); } // Add rotation to cluster. cluster.addRotation(axis1 - 1, axis2 - 1, Math.toRadians(angle)); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementTranslate(GeneratorSingleCluster cluster, Node cur) { double[] offset = null; String vstr = ((Element) cur).getAttribute(ATTR_VECTOR); if(vstr != null && vstr.length() > 0) { offset = parseVector(vstr); } if(offset == null) { throw new AbortException("No translation vector given."); } // *** add new translation cluster.addTranslation(offset); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementClipping(GeneratorSingleCluster cluster, Node cur) { double[] cmin = null, cmax = null; String minstr = ((Element) cur).getAttribute(ATTR_MIN); if(minstr != null && minstr.length() > 0) { cmin = parseVector(minstr); } String maxstr = ((Element) cur).getAttribute(ATTR_MAX); if(maxstr != null && maxstr.length() > 0) { cmax = parseVector(maxstr); } if(cmin == null || cmax == null) { throw new AbortException("No or incomplete clipping vectors given."); } // *** set clipping cluster.setClipping(cmin, cmax); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } }
java
private void processElementStatic(GeneratorMain gen, Node cur) { String name = ((Element) cur).getAttribute(ATTR_NAME); if(name == null) { throw new AbortException("No cluster name given in specification file."); } ArrayList<double[]> points = new ArrayList<>(); // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator(cur.getFirstChild()); while(iter.hasNext()) { Node child = iter.next(); if(TAG_POINT.equals(child.getNodeName())) { processElementPoint(points, child); } else if(child.getNodeType() == Node.ELEMENT_NODE) { LOG.warning("Unknown element in XML specification file: " + child.getNodeName()); } } // *** add new cluster object GeneratorStatic cluster = new GeneratorStatic(name, points); gen.addCluster(cluster); if(LOG.isVerbose()) { LOG.verbose("Loaded cluster " + cluster.name + " from specification."); } }
java
private double[] parseVector(String s) { String[] entries = WHITESPACE_PATTERN.split(s); double[] d = new double[entries.length]; for(int i = 0; i < entries.length; i++) { try { d[i] = ParseUtil.parseDouble(entries[i]); } catch(NumberFormatException e) { throw new AbortException("Could not parse vector."); } } return d; }
java
public static NumberVector getPrototype(Model model, Relation<? extends NumberVector> relation) { // Mean model contains a numeric Vector if(model instanceof MeanModel) { return DoubleVector.wrap(((MeanModel) model).getMean()); } // Handle medoid models if(model instanceof MedoidModel) { return relation.get(((MedoidModel) model).getMedoid()); } if(model instanceof PrototypeModel) { Object p = ((PrototypeModel<?>) model).getPrototype(); if(p instanceof NumberVector) { return (NumberVector) p; } return null; // Inconvertible } return null; }
java
private long[] determinePreferenceVectorByApriori(Relation<V> relation, ModifiableDBIDs[] neighborIDs, StringBuilder msg) { int dimensionality = neighborIDs.length; // database for apriori UpdatableDatabase apriori_db = new HashmapDatabase(); SimpleTypeInformation<?> bitmeta = VectorFieldTypeInformation.typeRequest(BitVector.class, dimensionality, dimensionality); for(DBIDIter it = relation.iterDBIDs(); it.valid(); it.advance()) { long[] bits = BitsUtil.zero(dimensionality); boolean allFalse = true; for(int d = 0; d < dimensionality; d++) { if(neighborIDs[d].contains(it)) { BitsUtil.setI(bits, d); allFalse = false; } } if(!allFalse) { SingleObjectBundle oaa = new SingleObjectBundle(); oaa.append(bitmeta, new BitVector(bits, dimensionality)); apriori_db.insert(oaa); } } APRIORI apriori = new APRIORI(minpts); FrequentItemsetsResult aprioriResult = apriori.run(apriori_db); // result of apriori List<Itemset> frequentItemsets = aprioriResult.getItemsets(); if(msg != null) { msg.append("\n Frequent itemsets: ").append(frequentItemsets); } int maxSupport = 0, maxCardinality = 0; long[] preferenceVector = BitsUtil.zero(dimensionality); for(Itemset itemset : frequentItemsets) { if((maxCardinality < itemset.length()) || (maxCardinality == itemset.length() && maxSupport == itemset.getSupport())) { preferenceVector = Itemset.toBitset(itemset, BitsUtil.zero(dimensionality)); maxCardinality = itemset.length(); maxSupport = itemset.getSupport(); } } if(msg != null) { msg.append("\n preference ") // .append(BitsUtil.toStringLow(preferenceVector, dimensionality)) // .append('\n'); LOG.debugFine(msg.toString()); } return preferenceVector; }
java
private long[] determinePreferenceVectorByMaxIntersection(ModifiableDBIDs[] neighborIDs, StringBuilder msg) { int dimensionality = neighborIDs.length; long[] preferenceVector = BitsUtil.zero(dimensionality); Map<Integer, ModifiableDBIDs> candidates = new HashMap<>(dimensionality); for(int i = 0; i < dimensionality; i++) { ModifiableDBIDs s_i = neighborIDs[i]; if(s_i.size() > minpts) { candidates.put(i, s_i); } } if(msg != null) { msg.append("\n candidates ").append(candidates.keySet()); } if(!candidates.isEmpty()) { int i = max(candidates); ModifiableDBIDs intersection = candidates.remove(i); BitsUtil.setI(preferenceVector, i); while(!candidates.isEmpty()) { i = maxIntersection(candidates, intersection); candidates.remove(i); if(intersection.size() < minpts) { break; } BitsUtil.setI(preferenceVector, i); } } if(msg != null) { msg.append("\n preference ").append(BitsUtil.toStringLow(preferenceVector, dimensionality)); LOG.debug(msg.toString()); } return preferenceVector; }
java
private int max(Map<Integer, ModifiableDBIDs> candidates) { int maxDim = -1, size = -1; for(Integer nextDim : candidates.keySet()) { int nextSet = candidates.get(nextDim).size(); if(size < nextSet) { size = nextSet; maxDim = nextDim; } } return maxDim; }
java
private int maxIntersection(Map<Integer, ModifiableDBIDs> candidates, ModifiableDBIDs set) { int maxDim = -1; ModifiableDBIDs maxIntersection = null; for(Integer nextDim : candidates.keySet()) { DBIDs nextSet = candidates.get(nextDim); ModifiableDBIDs nextIntersection = DBIDUtil.intersection(set, nextSet); if(maxDim < 0 || maxIntersection.size() < nextIntersection.size()) { maxIntersection = nextIntersection; maxDim = nextDim; } } if(maxDim >= 0) { set.clear(); set.addDBIDs(maxIntersection); } return maxDim; }
java
private RangeQuery<V>[] initRangeQueries(Relation<V> relation, int dimensionality) { @SuppressWarnings("unchecked") RangeQuery<V>[] rangeQueries = (RangeQuery<V>[]) new RangeQuery[dimensionality]; for(int d = 0; d < dimensionality; d++) { rangeQueries[d] = relation.getRangeQuery(new PrimitiveDistanceQuery<>(relation, new OnedimensionalDistanceFunction(d))); } return rangeQueries; }
java
@Reference(authors = "F. J. Rohlf", title = "Methods of comparing classifications", // booktitle = "Annual Review of Ecology and Systematics", // url = "https://doi.org/10.1146/annurev.es.05.110174.000533", // bibkey = "doi:10.1146/annurev.es.05.110174.000533") public double computeTau(long c, long d, double m, long wd, long bd) { double tie = (wd * (wd - 1) + bd * (bd - 1)) >>> 1; return (c - d) / FastMath.sqrt((m - tie) * m); // return (4. * c - m) / m; }
java
public void checkServices(String update) { TreeSet<String> props = new TreeSet<>(); Enumeration<URL> us; try { us = getClass().getClassLoader().getResources(ELKIServiceLoader.RESOURCE_PREFIX); } catch(IOException e) { throw new AbortException("Error enumerating service folders.", e); } while(us.hasMoreElements()) { URL u = us.nextElement(); try { if(("jar".equals(u.getProtocol()))) { JarURLConnection con = (JarURLConnection) u.openConnection(); try (JarFile jar = con.getJarFile()) { Enumeration<JarEntry> entries = jar.entries(); while(entries.hasMoreElements()) { String prop = entries.nextElement().getName(); if(prop.startsWith(ELKIServiceLoader.RESOURCE_PREFIX)) { props.add(prop.substring(ELKIServiceLoader.RESOURCE_PREFIX.length())); } else if(prop.startsWith(ELKIServiceLoader.FILENAME_PREFIX)) { props.add(prop.substring(ELKIServiceLoader.FILENAME_PREFIX.length())); } } } continue; } if("file".equals(u.getProtocol())) { props.addAll(Arrays.asList(new File(u.toURI()).list())); } } catch(IOException | URISyntaxException e) { throw new AbortException("Error enumerating service folders.", e); } } for(String prop : props) { if(LOG.isVerbose()) { LOG.verbose("Checking property: " + prop); } checkService(prop, update); } }
java
@SuppressWarnings("unchecked") private void checkAliases(Class<?> parent, String classname, String[] parts) { Class<?> c = ELKIServiceRegistry.findImplementation((Class<Object>) parent, classname); if(c == null) { return; } Alias ann = c.getAnnotation(Alias.class); if(ann == null) { if(parts.length > 1) { StringBuilder buf = new StringBuilder(100) // .append("Class ").append(classname) // .append(" in ").append(parent.getCanonicalName()) // .append(" has the following extraneous aliases:"); for(int i = 1; i < parts.length; i++) { buf.append(' ').append(parts[i]); } LOG.warning(buf); } return; } HashSet<String> aliases = new HashSet<String>(); for(int i = 1; i < parts.length; i++) { aliases.add(parts[i]); } StringBuilder buf = null; for(String a : ann.value()) { if(!aliases.remove(a)) { if(buf == null) { buf = new StringBuilder(100) // .append("Class ").append(classname) // .append(" in ").append(parent.getCanonicalName()) // .append(" is missing the following aliases:"); } buf.append(' ').append(a); } } if(!aliases.isEmpty()) { buf = (buf == null ? new StringBuilder() : buf.append(FormatUtil.NEWLINE)) // .append("Class ").append(classname) // .append(" in ").append(parent.getCanonicalName()) // .append(" has the following extraneous aliases:"); for(String a : aliases) { buf.append(' ').append(a); } } if(buf != null) { LOG.warning(buf); } }
java
public LinearEquationSystem getNormalizedLinearEquationSystem(Normalization<?> normalization) throws NonNumericFeaturesException { if(normalization != null) { LinearEquationSystem lq = normalization.transform(linearEquationSystem); lq.solveByTotalPivotSearch(); return lq; } else { return linearEquationSystem; } }
java
public double squaredDistance(V p) { // V_affin = V + a // dist(p, V_affin) = d(p-a, V) = ||p - a - proj_V(p-a) || double[] p_minus_a = minusEquals(p.toArray(), centroid); return squareSum(minusEquals(p_minus_a, times(strongEigenvectors, transposeTimes(strongEigenvectors, p_minus_a)))); }
java
public double[] errorVector(V p) { return weakEigenvectors.length > 0 ? times(weakEigenvectors, transposeTimes(weakEigenvectors, minusEquals(p.toArray(), centroid))) : EMPTY_VECTOR; }
java
public double[] dataVector(V p) { return strongEigenvectors.length > 0 ? times(strongEigenvectors, transposeTimes(strongEigenvectors, minusEquals(p.toArray(), centroid))) : EMPTY_VECTOR; }
java
@Override public void writeToText(TextWriterStream out, String label) { if(label != null) { out.commentPrintLn(label); } out.commentPrintLn("Model class: " + this.getClass().getName()); try { if(getNormalizedLinearEquationSystem(null) != null) { // TODO: more elegant way of doing normalization here? /* * if(out instanceof TextWriterStreamNormalizing) { * TextWriterStreamNormalizing<V> nout = * (TextWriterStreamNormalizing<V>) out; LinearEquationSystem lq = * getNormalizedLinearEquationSystem(nout.getNormalization()); * out.commentPrint("Linear Equation System: "); * out.commentPrintLn(lq.equationsToString(nf)); } else { */ LinearEquationSystem lq = getNormalizedLinearEquationSystem(null); out.commentPrint("Linear Equation System: "); out.commentPrintLn(lq.equationsToString(nf)); // } } } catch(NonNumericFeaturesException e) { LoggingUtil.exception(e); } }
java
protected int getBinNr(double coord) { if (Double.isInfinite(coord) || Double.isNaN(coord)) { throw new UnsupportedOperationException("Encountered non-finite value in Histogram: " + coord); } if (coord == max) { // System.err.println("Triggered special case: "+ (Math.floor((coord - // base) / binsize) + offset) + " vs. " + (size - 1)); return size - 1; } return (int) Math.floor((coord - base) / binsize) + offset; }
java
protected static int growSize(int current, int requiredSize) { // Double until 64, then increase by 50% each time. int newCapacity = ((current < 64) ? ((current + 1) << 1) : ((current >> 1) * 3)); // overflow? if (newCapacity < 0) { throw new OutOfMemoryError(); } if (requiredSize > newCapacity) { newCapacity = requiredSize; } return requiredSize; }
java
private void expandNodes(DeLiCluTree index, SpatialPrimitiveDistanceFunction<V> distFunction, SpatialObjectPair nodePair, DataStore<KNNList> knns) { DeLiCluNode node1 = index.getNode(((SpatialDirectoryEntry) nodePair.entry1).getPageID()); DeLiCluNode node2 = index.getNode(((SpatialDirectoryEntry) nodePair.entry2).getPageID()); if(node1.isLeaf()) { expandLeafNodes(distFunction, node1, node2, knns); } else { expandDirNodes(distFunction, node1, node2); } index.setExpanded(nodePair.entry2, nodePair.entry1); }
java
private void expandDirNodes(SpatialPrimitiveDistanceFunction<V> distFunction, DeLiCluNode node1, DeLiCluNode node2) { if(LOG.isDebuggingFinest()) { LOG.debugFinest("ExpandDirNodes: " + node1.getPageID() + " + " + node2.getPageID()); } int numEntries_1 = node1.getNumEntries(); int numEntries_2 = node2.getNumEntries(); // insert all combinations of unhandled - handled children of // node1-node2 into pq for(int i = 0; i < numEntries_1; i++) { DeLiCluEntry entry1 = node1.getEntry(i); if(!entry1.hasUnhandled()) { continue; } for(int j = 0; j < numEntries_2; j++) { DeLiCluEntry entry2 = node2.getEntry(j); if(!entry2.hasHandled()) { continue; } double distance = distFunction.minDist(entry1, entry2); heap.add(new SpatialObjectPair(distance, entry1, entry2, true)); } } }
java
private void expandLeafNodes(SpatialPrimitiveDistanceFunction<V> distFunction, DeLiCluNode node1, DeLiCluNode node2, DataStore<KNNList> knns) { if(LOG.isDebuggingFinest()) { LOG.debugFinest("ExpandLeafNodes: " + node1.getPageID() + " + " + node2.getPageID()); } int numEntries_1 = node1.getNumEntries(); int numEntries_2 = node2.getNumEntries(); // insert all combinations of unhandled - handled children of // node1-node2 into pq for(int i = 0; i < numEntries_1; i++) { DeLiCluEntry entry1 = node1.getEntry(i); if(!entry1.hasUnhandled()) { continue; } for(int j = 0; j < numEntries_2; j++) { DeLiCluEntry entry2 = node2.getEntry(j); if(!entry2.hasHandled()) { continue; } double distance = distFunction.minDist(entry1, entry2); double reach = MathUtil.max(distance, knns.get(((LeafEntry) entry2).getDBID()).getKNNDistance()); heap.add(new SpatialObjectPair(reach, entry1, entry2, false)); } } }
java
private void reinsertExpanded(SpatialPrimitiveDistanceFunction<V> distFunction, DeLiCluTree index, IndexTreePath<DeLiCluEntry> path, DataStore<KNNList> knns) { int l = 0; // Count the number of components. for(IndexTreePath<DeLiCluEntry> it = path; it != null; it = it.getParentPath()) { l++; } ArrayList<IndexTreePath<DeLiCluEntry>> p = new ArrayList<>(l - 1); // All except the last (= root). IndexTreePath<DeLiCluEntry> it = path; for(; it.getParentPath() != null; it = it.getParentPath()) { p.add(it); } assert (p.size() == l - 1); DeLiCluEntry rootEntry = it.getEntry(); reinsertExpanded(distFunction, index, p, l - 2, rootEntry, knns); }
java
public static int assertSameDimensionality(SpatialComparable box1, SpatialComparable box2) { final int dim = box1.getDimensionality(); if(dim != box2.getDimensionality()) { throw new IllegalArgumentException("The spatial objects do not have the same dimensionality!"); } return dim; }
java
public static double[] getMin(SpatialComparable box) { final int dim = box.getDimensionality(); double[] min = new double[dim]; for(int i = 0; i < dim; i++) { min[i] = box.getMin(i); } return min; }
java
public static double[] getMax(SpatialComparable box) { final int dim = box.getDimensionality(); double[] max = new double[dim]; for(int i = 0; i < dim; i++) { max[i] = box.getMax(i); } return max; }
java
public static boolean intersects(SpatialComparable box1, SpatialComparable box2) { final int dim = assertSameDimensionality(box1, box2); for(int i = 0; i < dim; i++) { if(box2.getMax(i) < box1.getMin(i) || box1.getMax(i) < box2.getMin(i)) { return false; } } return true; }
java
public static boolean contains(SpatialComparable box, double[] point) { final int dim = box.getDimensionality(); if(dim != point.length) { throw new IllegalArgumentException("This HyperBoundingBox and the given point need same dimensionality"); } for(int i = 0; i < dim; i++) { if(box.getMin(i) > point[i] || box.getMax(i) < point[i]) { return false; } } return true; }
java
public static double enlargement(SpatialComparable exist, SpatialComparable addit) { final int dim = assertSameDimensionality(exist, addit); double v1 = 1.; double v2 = 1.; for(int i = 0; i < dim; i++) { final double emin = exist.getMin(i); final double emax = exist.getMax(i); final double amin = addit.getMin(i); final double amax = addit.getMax(i); final double min = Math.min(emin, amin); final double max = Math.max(emax, amax); v1 *= (max - min); v2 *= (emax - emin); } return v2 - v1; }
java
public static double perimeter(SpatialComparable box) { final int dim = box.getDimensionality(); double perimeter = 0.; for(int i = 0; i < dim; i++) { perimeter += box.getMax(i) - box.getMin(i); } return perimeter; }
java
public static double overlap(SpatialComparable box1, SpatialComparable box2) { final int dim = assertSameDimensionality(box1, box2); // the maximal and minimal value of the overlap box. double omax, omin; // the overlap volume double overlap = 1.; for(int i = 0; i < dim; i++) { // The maximal value of that overlap box in the current // dimension is the minimum of the max values. omax = Math.min(box1.getMax(i), box2.getMax(i)); // The minimal value is the maximum of the min values. omin = Math.max(box1.getMin(i), box2.getMin(i)); // if omax <= omin in any dimension, the overlap box has a volume of zero if(omax <= omin) { return 0.; } overlap *= omax - omin; } return overlap; }
java
public static double relativeOverlap(SpatialComparable box1, SpatialComparable box2) { final int dim = assertSameDimensionality(box1, box2); // the overlap volume double overlap = 1.; double vol1 = 1.; double vol2 = 1.; for(int i = 0; i < dim; i++) { final double box1min = box1.getMin(i); final double box1max = box1.getMax(i); final double box2min = box2.getMin(i); final double box2max = box2.getMax(i); final double omax = Math.min(box1max, box2max); final double omin = Math.max(box1min, box2min); // if omax <= omin in any dimension, the overlap box has a volume of zero if(omax <= omin) { return 0.; } overlap *= omax - omin; vol1 *= box1max - box1min; vol2 *= box2max - box2min; } return overlap / (vol1 + vol2); }
java
public static ModifiableHyperBoundingBox unionTolerant(SpatialComparable mbr1, SpatialComparable mbr2) { if(mbr1 == null && mbr2 == null) { return null; } if(mbr1 == null) { // Clone - intentionally return new ModifiableHyperBoundingBox(mbr2); } if(mbr2 == null) { // Clone - intentionally return new ModifiableHyperBoundingBox(mbr1); } return union(mbr1, mbr2); }
java
public static double[] centroid(SpatialComparable obj) { final int dim = obj.getDimensionality(); double[] centroid = new double[dim]; for(int d = 0; d < dim; d++) { centroid[d] = (obj.getMax(d) + obj.getMin(d)) * .5; } return centroid; }
java
public static LinearScaling fromMinMax(double min, double max) { double zoom = 1.0 / (max - min); return new LinearScaling(zoom, -min * zoom); }
java
public double fMeasure(double beta) { final double beta2 = beta * beta; double fmeasure = ((1 + beta2) * pairconfuse[0]) / ((1 + beta2) * pairconfuse[0] + beta2 * pairconfuse[1] + pairconfuse[2]); return fmeasure; }
java
@Override public void write(TextWriterStream out, String label, Object object) { StringBuilder buf = new StringBuilder(100); if(label != null) { buf.append(label).append('='); } if(object != null) { buf.append(object.toString()); } out.commentPrintLn(buf); }
java
protected static <I> double[][] computeSquaredDistanceMatrix(final List<I> col, PrimitiveDistanceFunction<? super I> dist) { final int size = col.size(); double[][] imat = new double[size][size]; boolean squared = dist.isSquared(); FiniteProgress dprog = LOG.isVerbose() ? new FiniteProgress("Computing distance matrix", (size * (size - 1)) >>> 1, LOG) : null; for(int x = 0; x < size; x++) { final I ox = col.get(x); for(int y = x + 1; y < size; y++) { final I oy = col.get(y); double distance = dist.distance(ox, oy); distance *= squared ? -.5 : (-.5 * distance); imat[x][y] = imat[y][x] = distance; } if(dprog != null) { dprog.setProcessed(dprog.getProcessed() + size - x - 1, LOG); } } LOG.ensureCompleted(dprog); return imat; }
java
private OutlierResult getOutlierResult(ResultHierarchy hier, Result result) { List<OutlierResult> ors = ResultUtil.filterResults(hier, result, OutlierResult.class); if(!ors.isEmpty()) { return ors.get(0); } throw new IllegalStateException("Comparison algorithm expected at least one outlier result."); }
java
protected PolynomialApproximation knnDistanceApproximation() { int p_max = 0; double[] b = null; for(int i = 0; i < getNumEntries(); i++) { MkAppEntry entry = getEntry(i); PolynomialApproximation approximation = entry.getKnnDistanceApproximation(); if(b == null) { p_max = approximation.getPolynomialOrder(); b = new double[p_max]; } for(int p = 0; p < p_max; p++) { b[p] += approximation.getB(p); } } for(int p = 0; p < p_max; p++) { b[p] /= p_max; } if(LoggingConfiguration.DEBUG) { StringBuilder msg = new StringBuilder(); msg.append("b " + FormatUtil.format(b, FormatUtil.NF4)); Logger.getLogger(this.getClass().getName()).fine(msg.toString()); } return new PolynomialApproximation(b); }
java
public void setDBIDs(DBIDs ids) { this.idrep.setDBIDs(ids); // Update relations. for (Relation<?> orel : this.relations) { if (orel instanceof ProxyView) { ((ProxyView<?>) orel).setDBIDs(this.idrep.getDBIDs()); } } }
java
public synchronized Collection<Progress> getProgresses() { List<Progress> list = new ArrayList<>(progresses.size()); Iterator<WeakReference<Progress>> iter = progresses.iterator(); while(iter.hasNext()) { WeakReference<Progress> ref = iter.next(); if(ref.get() == null) { iter.remove(); } else { list.add(ref.get()); } } return list; }
java
public synchronized void addProgress(Progress p) { // Don't add more than once. Iterator<WeakReference<Progress>> iter = progresses.iterator(); while(iter.hasNext()) { WeakReference<Progress> ref = iter.next(); // since we are at it anyway, remove old links. if(ref.get() == null) { iter.remove(); } else { if(ref.get() == p) { return; } } } progresses.add(new WeakReference<>(p)); }
java
public synchronized IndexTreePath<DeLiCluEntry> setHandled(DBID id, O obj) { if(LOG.isDebugging()) { LOG.debugFine("setHandled " + id + ", " + obj + "\n"); } // find the leaf node containing o IndexTreePath<DeLiCluEntry> pathToObject = findPathToObject(getRootPath(), obj, id); if(pathToObject == null) { throw new AbortException("Object not found in setHandled."); } // set o handled DeLiCluEntry entry = pathToObject.getEntry(); entry.setHasHandled(true); entry.setHasUnhandled(false); for(IndexTreePath<DeLiCluEntry> path = pathToObject; path.getParentPath() != null; path = path.getParentPath()) { DeLiCluEntry parentEntry = path.getParentPath().getEntry(); DeLiCluNode node = getNode(parentEntry); boolean hasHandled = false; boolean hasUnhandled = false; for(int i = 0; i < node.getNumEntries(); i++) { final DeLiCluEntry nodeEntry = node.getEntry(i); hasHandled = hasHandled || nodeEntry.hasHandled(); hasUnhandled = hasUnhandled || nodeEntry.hasUnhandled(); } parentEntry.setHasUnhandled(hasUnhandled); parentEntry.setHasHandled(hasHandled); } return pathToObject; }
java
@Override public final boolean delete(DBIDRef id) { // find the leaf node containing o O obj = relation.get(id); IndexTreePath<DeLiCluEntry> deletionPath = findPathToObject(getRootPath(), obj, id); if(deletionPath == null) { return false; } deletePath(deletionPath); return true; }
java
protected double[] makeSample(int maxk) { final Random rnd = this.rnd.getSingleThreadedRandom(); double[] dists = new double[maxk + 1]; final double e = 1. / dim; for(int i = 0; i <= maxk; i++) { dists[i] = FastMath.pow(rnd.nextDouble(), e); } Arrays.sort(dists); return dists; }
java
@Override public final boolean setRoutingObjectID(DBID objectID) { if(objectID == routingObjectID || DBIDUtil.equal(objectID, routingObjectID)) { return false; } this.routingObjectID = objectID; return true; }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(id); out.writeInt(DBIDUtil.asInteger(routingObjectID)); out.writeDouble(parentDistance); out.writeDouble(coveringRadius); }
java
@Override public void write(TextWriterStream out, String label, TextWriteable obj) { obj.writeToText(out, label); }
java
protected void evaluteResult(Database db, Clustering<?> c, Clustering<?> refc) { ClusterContingencyTable contmat = new ClusterContingencyTable(selfPairing, noiseSpecialHandling); contmat.process(refc, c); ScoreResult sr = new ScoreResult(contmat); sr.addHeader(c.getLongName()); db.getHierarchy().add(c, sr); }
java
private boolean isReferenceResult(Clustering<?> t) { // FIXME: don't hard-code strings return "bylabel-clustering".equals(t.getShortName()) // || "bymodel-clustering".equals(t.getShortName()) // || "allinone-clustering".equals(t.getShortName()) // || "allinnoise-clustering".equals(t.getShortName()); }
java
@Override public final synchronized int writePage(P page) { int pageid = setPageID(page); writePage(pageid, page); return pageid; }
java
public static Element drawManhattan(SVGPlot svgp, Projection2D proj, NumberVector mid, double radius) { final double[] v_mid = mid.toArray(); // a copy final long[] dims = proj.getVisibleDimensions2D(); SVGPath path = new SVGPath(); for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) { v_mid[dim] += radius; double[] p1 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim] -= radius * 2; double[] p2 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim] += radius; for(int dim2 = BitsUtil.nextSetBit(dims, 0); dim2 >= 0; dim2 = BitsUtil.nextSetBit(dims, dim2 + 1)) { if(dim < dim2) { v_mid[dim2] += radius; double[] p3 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim2] -= radius * 2; double[] p4 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim2] += radius; path.moveTo(p1[0], p1[1]).drawTo(p3[0], p3[1]) // .moveTo(p1[0], p1[1]).drawTo(p4[0], p4[1]) // .moveTo(p2[0], p2[1]).drawTo(p3[0], p3[1]) // .moveTo(p2[0], p2[1]).drawTo(p4[0], p4[1]) // .close(); } } } return path.makeElement(svgp); }
java
public static Element drawCross(SVGPlot svgp, Projection2D proj, NumberVector mid, double radius) { final double[] v_mid = mid.toArray(); final long[] dims = proj.getVisibleDimensions2D(); SVGPath path = new SVGPath(); for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) { v_mid[dim] += radius; double[] p1 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim] -= radius * 2; double[] p2 = proj.fastProjectDataToRenderSpace(v_mid); v_mid[dim] += radius; path.moveTo(p1[0], p1[1]).drawTo(p2[0], p2[1]).close(); } return path.makeElement(svgp); }
java
@SuppressWarnings({ "cast", "unchecked" }) public static <O extends SpatialComparable> RangeQuery<O> getRangeQuery(AbstractRStarTree<?, ?, ?> tree, SpatialDistanceQuery<O> distanceQuery, Object... hints) { // Can we support this distance function - spatial distances only! SpatialPrimitiveDistanceFunction<? super O> df = distanceQuery.getDistanceFunction(); if(EuclideanDistanceFunction.STATIC.equals(df)) { return (RangeQuery<O>) new EuclideanRStarTreeRangeQuery<>(tree, (Relation<NumberVector>) distanceQuery.getRelation()); } return new RStarTreeRangeQuery<>(tree, distanceQuery.getRelation(), df); }
java
@SuppressWarnings({ "cast", "unchecked" }) public static <O extends SpatialComparable> KNNQuery<O> getKNNQuery(AbstractRStarTree<?, ?, ?> tree, SpatialDistanceQuery<O> distanceQuery, Object... hints) { // Can we support this distance function - spatial distances only! SpatialPrimitiveDistanceFunction<? super O> df = distanceQuery.getDistanceFunction(); if(EuclideanDistanceFunction.STATIC.equals(df)) { return (KNNQuery<O>) new EuclideanRStarTreeKNNQuery<>(tree, (Relation<NumberVector>) distanceQuery.getRelation()); } return new RStarTreeKNNQuery<>(tree, distanceQuery.getRelation(), df); }
java
private void privateReconfigureLogging(String pkg, final String name) { LogManager logManager = LogManager.getLogManager(); Logger logger = Logger.getLogger(LoggingConfiguration.class.getName()); // allow null as package name. if(pkg == null) { pkg = ""; } // Load logging configuration from current directory String cfgfile = name; if(new File(name).exists()) { cfgfile = name; } else { // Fall back to full path / resources. cfgfile = pkg.replace('.', File.separatorChar) + File.separatorChar + name; } try { InputStream cfgdata = openSystemFile(cfgfile); logManager.readConfiguration(cfgdata); // also load as properties for us, to get debug flag. InputStream cfgdata2 = openSystemFile(cfgfile); Properties cfgprop = new Properties(); cfgprop.load(cfgdata2); DEBUG = Boolean.parseBoolean(cfgprop.getProperty("debug")); logger.info("Logging configuration read."); } catch(FileNotFoundException e) { logger.log(Level.SEVERE, "Could not find logging configuration file: " + cfgfile, e); } catch(Exception e) { logger.log(Level.SEVERE, "Failed to configure logging from file: " + cfgfile, e); } }
java
private static InputStream openSystemFile(String filename) throws FileNotFoundException { try { return new FileInputStream(filename); } catch(FileNotFoundException e) { // try with classloader String resname = File.separatorChar != '/' ? filename.replace(File.separatorChar, '/') : filename; ClassLoader cl = LoggingConfiguration.class.getClassLoader(); InputStream result = cl.getResourceAsStream(resname); if(result != null) { return result; } // Sometimes, URLClassLoader does not work right. Try harder: URL u = cl.getResource(resname); if(u == null) { throw e; } try { URLConnection conn = u.openConnection(); conn.setUseCaches(false); result = conn.getInputStream(); if(result != null) { return result; } } catch(IOException x) { throw e; // Throw original error instead. } throw e; } }
java
public static void setVerbose(java.util.logging.Level verbose) { if(verbose.intValue() <= Level.VERBOSE.intValue()) { // decrease to VERBOSE if it was higher, otherwise further to // VERYVERBOSE if(LOGGER_GLOBAL_TOP.getLevel() == null || LOGGER_GLOBAL_TOP.getLevel().intValue() > verbose.intValue()) { LOGGER_GLOBAL_TOP.setLevel(verbose); } if(LOGGER_ELKI_TOP.getLevel() == null || LOGGER_ELKI_TOP.getLevel().intValue() > verbose.intValue()) { LOGGER_ELKI_TOP.setLevel(verbose); } } else { // re-increase to given level if it was verbose or "very verbose". if(LOGGER_GLOBAL_TOP.getLevel() != null && (// Level.VERBOSE.equals(LOGGER_GLOBAL_TOP.getLevel()) || // Level.VERYVERBOSE.equals(LOGGER_GLOBAL_TOP.getLevel()) // )) { LOGGER_GLOBAL_TOP.setLevel(verbose); } if(LOGGER_ELKI_TOP.getLevel() != null && (// Level.VERBOSE.equals(LOGGER_ELKI_TOP.getLevel()) || // Level.VERYVERBOSE.equals(LOGGER_ELKI_TOP.getLevel()) // )) { LOGGER_ELKI_TOP.setLevel(verbose); } } }
java
public static void setStatistics() { // decrease to INFO if it was higher if(LOGGER_GLOBAL_TOP.getLevel() == null || LOGGER_GLOBAL_TOP.getLevel().intValue() > Level.STATISTICS.intValue()) { LOGGER_GLOBAL_TOP.setLevel(Level.STATISTICS); } if(LOGGER_ELKI_TOP.getLevel() == null || LOGGER_ELKI_TOP.getLevel().intValue() > Level.STATISTICS.intValue()) { LOGGER_ELKI_TOP.setLevel(Level.STATISTICS); } if(LOGGER_TIME_TOP.getLevel() == null || LOGGER_TIME_TOP.getLevel().intValue() > Level.STATISTICS.intValue()) { LOGGER_TIME_TOP.setLevel(Level.STATISTICS); } }
java
public static void replaceDefaultHandler(Handler handler) { Logger rootlogger = LogManager.getLogManager().getLogger(""); for(Handler h : rootlogger.getHandlers()) { if(h instanceof CLISmartHandler) { rootlogger.removeHandler(h); } } addHandler(handler); }
java
public static void setDefaultLevel(java.util.logging.Level level) { Logger.getLogger(TOPLEVEL_PACKAGE).setLevel(level); }
java
public static double[] getRelativeCoordinates(Event evt, Element reference) { if(evt instanceof DOMMouseEvent && reference instanceof SVGLocatable && reference instanceof SVGElement) { // Get the screen (pixel!) coordinates DOMMouseEvent gnme = (DOMMouseEvent) evt; SVGMatrix mat = ((SVGLocatable) reference).getScreenCTM(); SVGMatrix imat = mat.inverse(); SVGPoint cPt = ((SVGElement) reference).getOwnerSVGElement().createSVGPoint(); cPt.setX(gnme.getClientX()); cPt.setY(gnme.getClientY()); // Have Batik transform the screen (pixel!) coordinates into SVG element // coordinates cPt = cPt.matrixTransform(imat); return new double[] { cPt.getX(), cPt.getY() }; } return null; }
java
public Clustering<MeanModel> run(Relation<NumberVector> relation) { final int dim = RelationUtil.dimensionality(relation); CFTree tree = cffactory.newTree(relation.getDBIDs(), relation); // The CFTree does not store points. We have to reassign them (and the // quality is better than if we used the initial assignment, because centers // move in particular in the beginning, so we always had many outliers. Map<ClusteringFeature, ModifiableDBIDs> idmap = new HashMap<ClusteringFeature, ModifiableDBIDs>(tree.leaves); for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { ClusteringFeature cf = tree.findLeaf(relation.get(iter)); ModifiableDBIDs ids = idmap.get(cf); if(ids == null) { idmap.put(cf, ids = DBIDUtil.newArray(cf.n)); } ids.add(iter); } Clustering<MeanModel> result = new Clustering<>("BIRCH-leaves", "BIRCH leaves"); for(Map.Entry<ClusteringFeature, ModifiableDBIDs> ent : idmap.entrySet()) { ClusteringFeature leaf = ent.getKey(); double[] center = new double[dim]; for(int i = 0; i < dim; i++) { center[i] = leaf.centroid(i); } result.addToplevelCluster(new Cluster<>(ent.getValue(), new MeanModel(center))); } return result; }
java
public OutlierResult run(Database database, Relation<O> relation) { // Get a nearest neighbor query on the relation. KNNQuery<O> knnq = QueryUtil.getKNNQuery(relation, getDistanceFunction(), k); // Output data storage WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_DB); // Track minimum and maximum scores DoubleMinMax minmax = new DoubleMinMax(); // Iterate over all objects for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { KNNList neighbors = knnq.getKNNForDBID(iter, k); // Aggregate distances MeanVariance mv = new MeanVariance(); for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { // Skip the object itself. The 0 is not very informative. if(DBIDUtil.equal(iter, neighbor)) { continue; } mv.put(neighbor.doubleValue()); } // Store score scores.putDouble(iter, mv.getSampleStddev()); } // Wrap the result in the standard containers // Actual min-max, theoretical min-max! OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0, Double.POSITIVE_INFINITY); DoubleRelation rel = new MaterializedDoubleRelation(relation.getDBIDs(), "stddev-outlier", scores); return new OutlierResult(meta, rel); }
java
public static void main(String[] args) { if(args.length > 0 && args[0].charAt(0) != '-') { Class<?> cls = ELKIServiceRegistry.findImplementation(AbstractApplication.class, args[0]); if(cls != null) { try { Method m = cls.getMethod("main", String[].class); Object a = Arrays.copyOfRange(args, 1, args.length); m.invoke(null, a); } catch(InvocationTargetException e) { LoggingUtil.exception(e.getCause()); } catch(Exception e) { LoggingUtil.exception(e); } return; } } try { Method m = DEFAULT_APPLICATION.getMethod("main", String[].class); m.invoke(null, (Object) args); } catch(Exception e) { LoggingUtil.exception(e); } }
java
public static Parameterizer getParameterizer(Class<?> c) { for(Class<?> inner : c.getDeclaredClasses()) { if(Parameterizer.class.isAssignableFrom(inner)) { try { return inner.asSubclass(Parameterizer.class).newInstance(); } catch(Exception e) { LOG.warning("Non-usable Parameterizer in class: " + c.getName()); } } } return null; }
java
private void putRec(O obj, Rec<O> rec) { graph.put(obj, rec); for(int i = 0; i < numelems; ++i) { if(obj == elems[i]) { return; } } if(elems.length == numelems) { elems = Arrays.copyOf(elems, (elems.length << 1) + 1); } elems[numelems++] = obj; }
java
private void removeRec(O obj) { graph.remove(obj); for(int i = 0; i < numelems; ++i) { if(obj == elems[i]) { System.arraycopy(elems, i + 1, elems, i, --numelems - i); elems[numelems] = null; return; } } }
java
protected JTree createTree() { JTree tree = new JTree(model); tree.setName("TreePopup.tree"); tree.setFont(getFont()); tree.setForeground(getForeground()); tree.setBackground(getBackground()); tree.setBorder(null); tree.setFocusable(true); tree.addMouseListener(handler); tree.addKeyListener(handler); tree.setCellRenderer(new Renderer()); return tree; }
java
protected JScrollPane createScroller() { JScrollPane sp = new JScrollPane(tree, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); sp.setHorizontalScrollBar(null); sp.setName("TreePopup.scrollPane"); sp.setFocusable(false); sp.getVerticalScrollBar().setFocusable(false); sp.setBorder(null); return sp; }
java
public void show(Component parent) { Dimension parentSize = parent.getSize(); Insets insets = getInsets(); // reduce the width of the scrollpane by the insets so that the popup // is the same width as the combo box. parentSize.setSize(parentSize.width - (insets.right + insets.left), 10 * parentSize.height); Dimension scrollSize = computePopupBounds(parent, 0, getBounds().height, parentSize.width, parentSize.height).getSize(); scroller.setMaximumSize(scrollSize); scroller.setPreferredSize(scrollSize); scroller.setMinimumSize(scrollSize); super.show(parent, 0, parent.getHeight()); tree.requestFocusInWindow(); }
java
protected void fireActionPerformed(ActionEvent event) { Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == ActionListener.class) { ((ActionListener) listeners[i + 1]).actionPerformed(event); } } }
java
private <T> void setCached(String prefix, String postfix, T data) { cache.put(prefix + '.' + postfix, data); }
java
protected String getPropertyValue(String prefix, String postfix) { String ret = properties.getProperty(prefix + "." + postfix); if (ret != null) { // logger.debugFine("Found property: "+prefix + "." + // postfix+" for "+prefix); return ret; } int pos = prefix.length(); while (pos > 0) { pos = prefix.lastIndexOf('.', pos - 1); if (pos <= 0) { break; } ret = properties.getProperty(prefix.substring(0, pos) + '.' + postfix); if (ret != null) { // logger.debugFine("Found property: "+prefix.substring(0, pos) + "." + // postfix+" for "+prefix); return ret; } } ret = properties.getProperty(postfix); if (ret != null) { // logger.debugFine("Found property: "+postfix+" for "+prefix); return ret; } return null; }
java
@SuppressWarnings("unchecked") public static <V extends NumberVector> NumberVector.Factory<V> guessFactory(SimpleTypeInformation<V> in) { NumberVector.Factory<V> factory = null; if(in instanceof VectorTypeInformation) { factory = (NumberVector.Factory<V>) ((VectorTypeInformation<V>) in).getFactory(); } if(factory == null) { // FIXME: hack. Add factories to simple type information, too? try { Field f = in.getRestrictionClass().getField("FACTORY"); factory = (NumberVector.Factory<V>) f.get(null); } catch(Exception e) { LoggingUtil.warning("Cannot determine factory for type " + in.getRestrictionClass(), e); } } return factory; }
java
public void appendSimple(Object... data) { if(data.length != meta.size()) { throw new AbortException("Invalid number of attributes in 'append'."); } for(int i = 0; i < data.length; i++) { @SuppressWarnings("unchecked") final List<Object> col = (List<Object>) columns.get(i); col.add(data[i]); } }
java
public static MultipleObjectsBundle fromStream(BundleStreamSource source) { MultipleObjectsBundle bundle = new MultipleObjectsBundle(); boolean stop = false; DBIDVar var = null; ArrayModifiableDBIDs ids = null; int size = 0; while(!stop) { BundleStreamSource.Event ev = source.nextEvent(); switch(ev){ case END_OF_STREAM: stop = true; break; case META_CHANGED: BundleMeta smeta = source.getMeta(); // rebuild bundle meta bundle.meta = new BundleMeta(); for(int i = 0; i < bundle.columns.size(); i++) { bundle.meta.add(smeta.get(i)); } for(int i = bundle.metaLength(); i < smeta.size(); i++) { List<Object> data = new ArrayList<>(bundle.dataLength() + 1); bundle.appendColumn(smeta.get(i), data); } if(var == null && source.hasDBIDs()) { var = DBIDUtil.newVar(); ids = DBIDUtil.newArray(); } continue; case NEXT_OBJECT: if(var != null && source.assignDBID(var)) { ids.add(var); } for(int i = 0; i < bundle.metaLength(); i++) { @SuppressWarnings("unchecked") final List<Object> col = (List<Object>) bundle.columns.get(i); col.add(source.data(i)); } ++size; continue; default: LoggingUtil.warning("Unknown event: " + ev); continue; } } if(ids != null) { if(size != ids.size()) { LOG.warning("Not every object had an DBID - discarding DBIDs: " + size + " != " + ids.size()); } else { bundle.setDBIDs(ids); } } return bundle; }
java