code
stringlengths
73
34.1k
label
stringclasses
1 value
public final void splitTo(AbstractNode<E> newNode, List<E> sorting, int splitPoint) { assert (isLeaf() == newNode.isLeaf()); deleteAllEntries(); StringBuilder msg = LoggingConfiguration.DEBUG ? new StringBuilder(1000) : null; for(int i = 0; i < splitPoint; i++) { addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } for(int i = splitPoint; i < sorting.size(); i++) { newNode.addEntry(sorting.get(i)); if(msg != null) { msg.append("n_").append(newNode.getPageID()).append(' ').append(sorting.get(i)).append('\n'); } } if(msg != null) { Logging.getLogger(this.getClass().getName()).fine(msg.toString()); } }
java
public static void ensureClusteringResult(final Database db, final Result result) { Collection<Clustering<?>> clusterings = ResultUtil.filterResults(db.getHierarchy(), result, Clustering.class); if(clusterings.isEmpty()) { ResultUtil.addChildResult(db, new ByLabelOrAllInOneClustering().run(db)); } }
java
public static <A> double[] toPrimitiveDoubleArray(A data, NumberArrayAdapter<?, A> adapter) { if(adapter == DoubleArrayAdapter.STATIC) { return ((double[]) data).clone(); } final int len = adapter.size(data); double[] x = new double[len]; for(int i = 0; i < len; i++) { x[i] = adapter.getDouble(data, i); } return x; }
java
@Override public void flush() { try { out.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } try { err.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.FLUSH_FAILURE); } }
java
@Override public void publish(final LogRecord record) { // determine destination final Writer destination; if(record.getLevel().intValue() >= Level.WARNING.intValue()) { destination = this.err; } else { destination = this.out; } // format final String m; // Progress records are handled specially. if(record instanceof ProgressLogRecord) { ProgressLogRecord prec = (ProgressLogRecord) record; ptrack.addProgress(prec.getProgress()); Collection<Progress> completed = ptrack.removeCompleted(); Collection<Progress> progresses = ptrack.getProgresses(); StringBuilder buf = new StringBuilder(); if(!completed.isEmpty()) { buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : completed) { // TODO: use formatter, somehow? prog.appendToBuffer(buf); buf.append(OutputStreamLogger.NEWLINE); } } if(!progresses.isEmpty()) { boolean first = true; buf.append(OutputStreamLogger.CARRIAGE_RETURN); for(Progress prog : progresses) { if(first) { first = false; } else { buf.append(' '); } // TODO: use formatter, somehow? prog.appendToBuffer(buf); } } m = buf.toString(); } else { // choose an appropriate formatter final Formatter fmt; // always format progress messages using the progress formatter. if(record.getLevel().intValue() >= Level.WARNING.intValue()) { // format errors using the error formatter fmt = errformat; } else if(record.getLevel().intValue() <= Level.FINE.intValue()) { // format debug statements using the debug formatter. fmt = debugformat; } else { // default to the message formatter. fmt = msgformat; } try { m = fmt.format(record); } catch(Exception ex) { reportError(null, ex, ErrorManager.FORMAT_FAILURE); return; } } // write try { destination.write(m); // always flush (although the streams should auto-flush already) destination.flush(); } catch(Exception ex) { reportError(null, ex, ErrorManager.WRITE_FAILURE); return; } }
java
private boolean checkForNaNs(NumberVector vec) { for(int i = 0, d = vec.getDimensionality(); i < d; i++) { double v = vec.doubleValue(i); if(v != v) { // NaN! return true; } } return false; }
java
public static Relation<String> guessLabelRepresentation(Database database) throws NoSupportedDataTypeException { try { Relation<? extends ClassLabel> classrep = database.getRelation(TypeUtil.CLASSLABEL); if(classrep != null) { return new ConvertToStringView(classrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<? extends LabelList> labelsrep = database.getRelation(TypeUtil.LABELLIST); if(labelsrep != null) { return new ConvertToStringView(labelsrep); } } catch(NoSupportedDataTypeException e) { // retry. } try { Relation<String> stringrep = database.getRelation(TypeUtil.STRING); if(stringrep != null) { return stringrep; } } catch(NoSupportedDataTypeException e) { // retry. } throw new NoSupportedDataTypeException("No label-like representation was found."); }
java
public static ArrayModifiableDBIDs getObjectsByLabelMatch(Database database, Pattern name_pattern) { Relation<String> relation = guessLabelRepresentation(database); if(name_pattern == null) { return DBIDUtil.newArray(); } ArrayModifiableDBIDs ret = DBIDUtil.newArray(); for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { if(name_pattern.matcher(relation.get(iditer)).find()) { ret.add(iditer); } } return ret; }
java
@Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); out.writeObject(conservativeApproximation); }
java
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); conservativeApproximation = (ApproximationLine) in.readObject(); }
java
protected void updateDensities(WritableDoubleDataStore rbod_score, DoubleDBIDList referenceDists) { DoubleDBIDListIter it = referenceDists.iter(); for(int l = 0; l < referenceDists.size(); l++) { double density = computeDensity(referenceDists, it, l); // computeDensity modified the iterator, reset: it.seek(l); // NaN indicates the first run. if(!(density > rbod_score.doubleValue(it))) { rbod_score.putDouble(it, density); } } }
java
static void chooseRemaining(Relation<? extends NumberVector> relation, DBIDs ids, DistanceQuery<NumberVector> distQ, int k, List<NumberVector> means, WritableDoubleDataStore weights, double weightsum, Random random) { while(true) { if(weightsum > Double.MAX_VALUE) { throw new IllegalStateException("Could not choose a reasonable mean - too many data points, too large distance sum?"); } if(weightsum < Double.MIN_NORMAL) { LoggingUtil.warning("Could not choose a reasonable mean - to few data points?"); } double r = random.nextDouble() * weightsum; while(r <= 0 && weightsum > Double.MIN_NORMAL) { r = random.nextDouble() * weightsum; // Try harder to not choose 0. } DBIDIter it = ids.iter(); while(it.valid()) { if((r -= weights.doubleValue(it)) < 0) { break; } it.advance(); } if(!it.valid()) { // Rare case, but happens due to floating math weightsum -= r; // Decrease continue; // Retry } // Add new mean: final NumberVector newmean = relation.get(it); means.add(newmean); if(means.size() >= k) { break; } // Update weights: weights.putDouble(it, 0.); weightsum = updateWeights(weights, ids, newmean, distQ); } }
java
private double factor(int dimension) { return maxima[dimension] > minima[dimension] ? maxima[dimension] - minima[dimension] : maxima[dimension] > 0 ? maxima[dimension] : 1; }
java
protected double derivative(int i, NumberVector v) { final int dim = v.getDimensionality(); if(dim == 1) { return 0.; } // Adjust for boundary conditions, as per the article: i = (i == 0) ? 1 : (i == dim - 1) ? dim - 2 : i; return (v.doubleValue(i) - v.doubleValue(i - 1) + (v.doubleValue(i + 1) - v.doubleValue(i - 1)) * .5) * .5; }
java
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); int pos1 = random.nextInt(n), pos2 = random.nextInt(n - 1); pos2 = pos2 >= pos1 ? pos2 + 1 : pos2; // Build distance arrays: double[] dis1 = new double[n], dis2 = new double[n]; E e1 = node.getEntry(pos1), e2 = node.getEntry(pos2); for(int i = 0; i < n; i++) { if(i == pos1 || i == pos2) { continue; } final E ej = node.getEntry(i); dis1[i] = tree.distance(e1, ej); dis2[i] = tree.distance(e2, ej); } return distributor.distribute(node, pos1, dis1, pos2, dis2); }
java
public static boolean checkCSSStatements(Collection<Pair<String,String>> statements) { for (Pair<String, String> pair : statements) { if (!checkCSSStatement(pair.getFirst(), pair.getSecond())) { return false; } } return true; }
java
public String getStatement(String key) { for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { return pair.getSecond(); } } return null; }
java
public void setStatement(String key, String value) { if (value != null && !checkCSSStatement(key, value)) { throw new InvalidCSS("Invalid CSS statement."); } for (Pair<String, String> pair : statements) { if (pair.getFirst().equals(key)) { if (value != null) { pair.setSecond(value); } else { statements.remove(pair); } return; } } if (value != null) { statements.add(new Pair<>(key, value)); } }
java
public void appendCSSDefinition(StringBuilder buf) { buf.append("\n."); buf.append(name); buf.append('{'); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(";\n"); } buf.append("}\n"); }
java
public String inlineCSS() { StringBuilder buf = new StringBuilder(); for (Pair<String, String> pair : statements) { buf.append(pair.getFirst()); buf.append(':'); buf.append(pair.getSecond()); buf.append(';'); } return buf.toString(); }
java
protected Distribution findBestFit(final List<V> col, Adapter adapter, int d, double[] test) { if(estimators.size() == 1) { return estimators.get(0).estimate(col, adapter); } Distribution best = null; double bestq = Double.POSITIVE_INFINITY; trials: for(DistributionEstimator<?> est : estimators) { try { Distribution dist = est.estimate(col, adapter); for(int i = 0; i < test.length; i++) { test[i] = dist.cdf(col.get(i).doubleValue(d)); if(Double.isNaN(test[i])) { LOG.warning("Got NaN after fitting " + est + ": " + dist); continue trials; } if(Double.isInfinite(test[i])) { LOG.warning("Got infinite value after fitting " + est + ": " + dist); continue trials; } } Arrays.sort(test); double q = KolmogorovSmirnovTest.simpleTest(test); if(LOG.isVeryVerbose()) { LOG.veryverbose("Estimator " + est + " (" + dist + ") has maximum deviation " + q + " for dimension " + d); } if(best == null || q < bestq) { best = dist; bestq = q; } } catch(ArithmeticException e) { if(LOG.isVeryVerbose()) { LOG.veryverbose("Fitting distribution " + est + " failed: " + e.getMessage()); } continue trials; } } if(LOG.isVerbose()) { LOG.verbose("Best fit for dimension " + d + ": " + best); } return best; }
java
protected boolean constantZero(List<V> column, Adapter adapter) { for(int i = 0, s = adapter.size(column); i < s; i++) { if(adapter.get(column, i) != 0.) { return false; } } return true; }
java
private StreamTokenizer makeArffTokenizer(BufferedReader br) { // Setup tokenizer StreamTokenizer tokenizer = new StreamTokenizer(br); { tokenizer.resetSyntax(); tokenizer.whitespaceChars(0, ' '); tokenizer.ordinaryChars('0', '9'); // Do not parse numbers tokenizer.ordinaryChar('-'); tokenizer.ordinaryChar('.'); tokenizer.wordChars(' ' + 1, '\u00FF'); tokenizer.whitespaceChars(',', ','); tokenizer.commentChar('%'); tokenizer.quoteChar('"'); tokenizer.quoteChar('\''); tokenizer.ordinaryChar('{'); tokenizer.ordinaryChar('}'); tokenizer.eolIsSignificant(true); } return tokenizer; }
java
private void setupBundleHeaders(ArrayList<String> names, int[] targ, TypeInformation[] etyp, int[] dimsize, MultipleObjectsBundle bundle, boolean sparse) { for(int in = 0, out = 0; in < targ.length; out++) { int nin = in + 1; for(; nin < targ.length; nin++) { if(targ[nin] != targ[in]) { break; } } if(TypeUtil.NUMBER_VECTOR_FIELD.equals(etyp[out])) { String[] labels = new String[dimsize[out]]; // Collect labels: for(int i = 0; i < dimsize[out]; i++) { labels[i] = names.get(out + i); } if(!sparse) { VectorFieldTypeInformation<DoubleVector> type = new VectorFieldTypeInformation<>(DoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<DoubleVector>()); } else { VectorFieldTypeInformation<SparseDoubleVector> type = new VectorFieldTypeInformation<>(SparseDoubleVector.FACTORY, dimsize[out], labels); bundle.appendColumn(type, new ArrayList<SparseDoubleVector>()); } } else if(TypeUtil.LABELLIST.equals(etyp[out])) { StringBuilder label = new StringBuilder(names.get(out)); for(int i = 1; i < dimsize[out]; i++) { label.append(' ').append(names.get(out + i)); } bundle.appendColumn(new SimpleTypeInformation<>(LabelList.class, label.toString()), new ArrayList<LabelList>()); } else if(TypeUtil.EXTERNALID.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ExternalID.class, names.get(out)), new ArrayList<ExternalID>()); } else if(TypeUtil.CLASSLABEL.equals(etyp[out])) { bundle.appendColumn(new SimpleTypeInformation<>(ClassLabel.class, names.get(out)), new ArrayList<ClassLabel>()); } else { throw new AbortException("Unsupported type for column " + in + "->" + out + ": " + ((etyp[out] != null) ? etyp[out].toString() : "null")); } assert (out == bundle.metaLength() - 1); in = nin; } }
java
private void readHeader(BufferedReader br) throws IOException { String line; // Locate header line while(true) { line = br.readLine(); if(line == null) { throw new AbortException(ARFF_HEADER_RELATION + " not found in file."); } // Skip comments and empty lines if(ARFF_COMMENT.reset(line).matches() || EMPTY.reset(line).matches()) { continue; } // Break on relation statement if(ARFF_HEADER_RELATION.reset(line).matches()) { break; } throw new AbortException("Expected relation declaration: " + line); } }
java
private void nextToken(StreamTokenizer tokenizer) throws IOException { tokenizer.nextToken(); if((tokenizer.ttype == '\'') || (tokenizer.ttype == '"')) { tokenizer.ttype = StreamTokenizer.TT_WORD; } else if((tokenizer.ttype == StreamTokenizer.TT_WORD) && (tokenizer.sval.equals("?"))) { tokenizer.ttype = '?'; } if(LOG.isDebugging()) { if(tokenizer.ttype == StreamTokenizer.TT_NUMBER) { LOG.debug("token: " + tokenizer.nval); } else if(tokenizer.ttype == StreamTokenizer.TT_WORD) { LOG.debug("token: " + tokenizer.sval); } else if(tokenizer.ttype == StreamTokenizer.TT_EOF) { LOG.debug("token: EOF"); } else if(tokenizer.ttype == StreamTokenizer.TT_EOL) { LOG.debug("token: EOL"); } else { LOG.debug("token type: " + tokenizer.ttype); } } }
java
public static <E extends ClusterOrder> Clustering<Model> makeOPTICSCut(E co, double epsilon) { // Clustering model we are building Clustering<Model> clustering = new Clustering<>("OPTICS Cut Clustering", "optics-cut"); // Collects noise elements ModifiableDBIDs noise = DBIDUtil.newHashSet(); double lastDist = Double.MAX_VALUE; double actDist = Double.MAX_VALUE; // Current working set ModifiableDBIDs current = DBIDUtil.newHashSet(); // TODO: can we implement this more nicely with a 1-lookahead? DBIDVar prev = DBIDUtil.newVar(); for(DBIDIter it = co.iter(); it.valid(); prev.set(it), it.advance()) { lastDist = actDist; actDist = co.getReachability(it); if(actDist <= epsilon) { // the last element before the plot drops belongs to the cluster if(lastDist > epsilon && prev.isSet()) { // So un-noise it noise.remove(prev); // Add it to the cluster current.add(prev); } current.add(it); } else { // 'Finish' the previous cluster if(!current.isEmpty()) { // TODO: do we want a minpts restriction? // But we get have only core points guaranteed anyway. clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); current = DBIDUtil.newHashSet(); } // Add to noise noise.add(it); } } // Any unfinished cluster will also be added if(!current.isEmpty()) { clustering.addToplevelCluster(new Cluster<Model>(current, ClusterModel.CLUSTER)); } // Add noise clustering.addToplevelCluster(new Cluster<Model>(noise, true, ClusterModel.CLUSTER)); return clustering; }
java
public static LabelList make(Collection<String> labels) { int size = labels.size(); if(size == 0) { return EMPTY_LABELS; } return new LabelList(labels.toArray(new String[size])); }
java
public Clustering<SubspaceModel> run(Database db, Relation<V> relation) { if(mu >= relation.size()) { throw new AbortException("Parameter mu is chosen unreasonably large. This won't yield meaningful results."); } DiSHClusterOrder opticsResult = new Instance(db, relation).run(); if(LOG.isVerbose()) { LOG.verbose("Compute Clusters."); } return computeClusters(relation, opticsResult); }
java
private Clustering<SubspaceModel> computeClusters(Relation<V> database, DiSHClusterOrder clusterOrder) { final int dimensionality = RelationUtil.dimensionality(database); // extract clusters Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap = extractClusters(database, clusterOrder); logClusterSizes("Step 1: extract clusters", dimensionality, clustersMap); // check if there are clusters < minpts checkClusters(database, clustersMap); logClusterSizes("Step 2: check clusters", dimensionality, clustersMap); // sort the clusters List<Cluster<SubspaceModel>> clusters = sortClusters(database, clustersMap); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 3: sort clusters"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); } LOG.verbose(msg.toString()); } // build the hierarchy Clustering<SubspaceModel> clustering = new Clustering<>("DiSH clustering", "dish-clustering"); buildHierarchy(database, clustering, clusters, dimensionality); if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder("Step 4: build hierarchy"); for(Cluster<SubspaceModel> c : clusters) { msg.append('\n').append(BitsUtil.toStringLow(c.getModel().getSubspace().getDimensions(), dimensionality)).append(" ids ").append(c.size()); for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterParents(c); iter.valid(); iter.advance()) { msg.append("\n parent ").append(iter.get()); } for(It<Cluster<SubspaceModel>> iter = clustering.getClusterHierarchy().iterChildren(c); iter.valid(); iter.advance()) { msg.append("\n child ").append(iter.get()); } } LOG.verbose(msg.toString()); } // build result for(Cluster<SubspaceModel> c : clusters) { if(clustering.getClusterHierarchy().numParents(c) == 0) { clustering.addToplevelCluster(c); } } return clustering; }
java
private void logClusterSizes(String m, int dimensionality, Object2ObjectOpenCustomHashMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { if(LOG.isVerbose()) { final StringBuilder msg = new StringBuilder(1000).append(m).append('\n'); for(ObjectIterator<Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>>> iter = clustersMap.object2ObjectEntrySet().fastIterator(); iter.hasNext();) { Object2ObjectMap.Entry<long[], List<ArrayModifiableDBIDs>> entry = iter.next(); msg.append(BitsUtil.toStringLow(entry.getKey(), dimensionality)).append(" sizes:"); for(ArrayModifiableDBIDs c : entry.getValue()) { msg.append(' ').append(c.size()); } msg.append('\n'); } LOG.verbose(msg.toString()); } }
java
private List<Cluster<SubspaceModel>> sortClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int db_dim = RelationUtil.dimensionality(relation); // int num = 1; List<Cluster<SubspaceModel>> clusters = new ArrayList<>(); for(long[] pv : clustersMap.keySet()) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(int i = 0; i < parallelClusters.size(); i++) { ArrayModifiableDBIDs c = parallelClusters.get(i); Cluster<SubspaceModel> cluster = new Cluster<>(c); cluster.setModel(new SubspaceModel(new Subspace(pv), Centroid.make(relation, c).getArrayRef())); String subspace = BitsUtil.toStringLow(cluster.getModel().getSubspace().getDimensions(), db_dim); cluster.setName(parallelClusters.size() > 1 ? ("Cluster_" + subspace + "_" + i) : ("Cluster_" + subspace)); clusters.add(cluster); } } // sort the clusters w.r.t. lambda Comparator<Cluster<SubspaceModel>> comparator = new Comparator<Cluster<SubspaceModel>>() { @Override public int compare(Cluster<SubspaceModel> c1, Cluster<SubspaceModel> c2) { return c2.getModel().getSubspace().dimensionality() - c1.getModel().getSubspace().dimensionality(); } }; Collections.sort(clusters, comparator); return clusters; }
java
private void checkClusters(Relation<V> relation, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { final int dimensionality = RelationUtil.dimensionality(relation); // check if there are clusters < minpts // and add them to not assigned List<Pair<long[], ArrayModifiableDBIDs>> notAssigned = new ArrayList<>(); Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> newClustersMap = new Object2ObjectOpenCustomHashMap<>(BitsUtil.FASTUTIL_HASH_STRATEGY); Pair<long[], ArrayModifiableDBIDs> noise = new Pair<>(BitsUtil.zero(dimensionality), DBIDUtil.newArray()); for(long[] pv : clustersMap.keySet()) { // noise if(BitsUtil.cardinality(pv) == 0) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(ArrayModifiableDBIDs c : parallelClusters) { noise.second.addDBIDs(c); } } // clusters else { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); List<ArrayModifiableDBIDs> newParallelClusters = new ArrayList<>(parallelClusters.size()); for(ArrayModifiableDBIDs c : parallelClusters) { if(!BitsUtil.isZero(pv) && c.size() < mu) { notAssigned.add(new Pair<>(pv, c)); } else { newParallelClusters.add(c); } } newClustersMap.put(pv, newParallelClusters); } } clustersMap.clear(); clustersMap.putAll(newClustersMap); for(Pair<long[], ArrayModifiableDBIDs> c : notAssigned) { if(c.second.isEmpty()) { continue; } Pair<long[], ArrayModifiableDBIDs> parent = findParent(relation, c, clustersMap); (parent != null ? parent : noise).second.addDBIDs(c.second); } List<ArrayModifiableDBIDs> noiseList = new ArrayList<>(1); noiseList.add(noise.second); clustersMap.put(noise.first, noiseList); }
java
private Pair<long[], ArrayModifiableDBIDs> findParent(Relation<V> relation, Pair<long[], ArrayModifiableDBIDs> child, Object2ObjectMap<long[], List<ArrayModifiableDBIDs>> clustersMap) { Centroid child_centroid = ProjectedCentroid.make(child.first, relation, child.second); Pair<long[], ArrayModifiableDBIDs> result = null; int resultCardinality = -1; long[] childPV = child.first; int childCardinality = BitsUtil.cardinality(childPV); for(long[] parentPV : clustersMap.keySet()) { int parentCardinality = BitsUtil.cardinality(parentPV); if(parentCardinality >= childCardinality || (resultCardinality != -1 && parentCardinality <= resultCardinality)) { continue; } long[] pv = BitsUtil.andCMin(childPV, parentPV); if(BitsUtil.equal(pv, parentPV)) { List<ArrayModifiableDBIDs> parentList = clustersMap.get(parentPV); for(ArrayModifiableDBIDs parent : parentList) { NumberVector parent_centroid = ProjectedCentroid.make(parentPV, relation, parent); double d = weightedDistance(child_centroid, parent_centroid, parentPV); if(d <= 2 * epsilon) { result = new Pair<>(parentPV, parent); resultCardinality = parentCardinality; break; } } } } return result; }
java
private int subspaceDimensionality(NumberVector v1, NumberVector v2, long[] pv1, long[] pv2, long[] commonPreferenceVector) { // number of zero values in commonPreferenceVector int subspaceDim = v1.getDimensionality() - BitsUtil.cardinality(commonPreferenceVector); // special case: v1 and v2 are in parallel subspaces if(BitsUtil.equal(commonPreferenceVector, pv1) || BitsUtil.equal(commonPreferenceVector, pv2)) { double d = weightedDistance(v1, v2, commonPreferenceVector); if(d > 2 * epsilon) { subspaceDim++; } } return subspaceDim; }
java
protected static double weightedDistance(NumberVector v1, NumberVector v2, long[] weightVector) { double sqrDist = 0; for(int i = BitsUtil.nextSetBit(weightVector, 0); i >= 0; i = BitsUtil.nextSetBit(weightVector, i + 1)) { double manhattanI = v1.doubleValue(i) - v2.doubleValue(i); sqrDist += manhattanI * manhattanI; } return FastMath.sqrt(sqrDist); }
java
@Override public double[][] processIds(DBIDs ids, Relation<? extends NumberVector> database) { return CovarianceMatrix.make(database, ids).destroyToPopulationMatrix(); }
java
private void updateMeta() { meta = new BundleMeta(); BundleMeta origmeta = source.getMeta(); for(int i = 0; i < origmeta.size(); i++) { SimpleTypeInformation<?> type = origmeta.get(i); if(column < 0) { // Test whether this type matches if(TypeUtil.NUMBER_VECTOR_VARIABLE_LENGTH.isAssignableFromType(type)) { if(type instanceof VectorFieldTypeInformation) { @SuppressWarnings("unchecked") final VectorFieldTypeInformation<V> castType = (VectorFieldTypeInformation<V>) type; if(dim != -1 && castType.mindim() > dim) { throw new AbortException("Would filter all vectors: minimum dimensionality " + castType.mindim() + " > desired dimensionality " + dim); } if(dim != -1 && castType.maxdim() < dim) { throw new AbortException("Would filter all vectors: maximum dimensionality " + castType.maxdim() + " < desired dimensionality " + dim); } if(dim == -1) { dim = castType.mindim(); } if(castType.mindim() == castType.maxdim()) { meta.add(castType); column = i; continue; } } @SuppressWarnings("unchecked") final VectorTypeInformation<V> castType = (VectorTypeInformation<V>) type; if(dim != -1) { meta.add(new VectorFieldTypeInformation<>(FilterUtil.guessFactory(castType), dim, dim, castType.getSerializer())); } else { LOG.warning("No dimensionality yet for column " + i); meta.add(castType); } column = i; continue; } } meta.add(type); } }
java
public static double logquantile(double val, double loc, double scale) { return loc + scale * (val - MathUtil.log1mexp(-val)); }
java
public static <C extends Model> void logClusterSizes(Clustering<C> c) { if(!LOG.isStatistics()) { return; } final List<Cluster<C>> clusters = c.getAllClusters(); final int numc = clusters.size(); LOG.statistics(new StringStatistic(PREFIX + "name", c.getLongName())); LOG.statistics(new LongStatistic(PREFIX + "clusters", numc)); Hierarchy<Cluster<C>> h = c.getClusterHierarchy(); int cnum = 0; for(Cluster<C> clu : clusters) { final String p = PREFIX + "cluster-" + cnum + "."; if(clu.getName() != null) { LOG.statistics(new StringStatistic(p + "name", clu.getName())); } LOG.statistics(new LongStatistic(p + "size", clu.size())); if(clu.isNoise()) { LOG.statistics(new StringStatistic(p + "noise", "true")); } if(h.numChildren(clu) > 0) { // TODO: this only works if we have cluster names! StringBuilder buf = new StringBuilder(); for(It<Cluster<C>> it = h.iterChildren(clu); it.valid(); it.advance()) { if(buf.length() > 0) { buf.append(", "); } buf.append(it.get().getName()); } LOG.statistics(new StringStatistic(p + "children", buf.toString())); } // TODO: also log parents? ++cnum; } }
java
public void addDenseUnit(CLIQUEUnit unit) { int numdim = unit.dimensionality(); for(int i = 0; i < numdim; i++) { BitsUtil.setI(getDimensions(), unit.getDimension(i)); } denseUnits.add(unit); coverage += unit.numberOfFeatureVectors(); }
java
public List<Pair<Subspace, ModifiableDBIDs>> determineClusters() { List<Pair<Subspace, ModifiableDBIDs>> clusters = new ArrayList<>(); for(CLIQUEUnit unit : denseUnits) { if(!unit.isAssigned()) { ModifiableDBIDs cluster = DBIDUtil.newHashSet(); CLIQUESubspace model = new CLIQUESubspace(getDimensions()); clusters.add(new Pair<Subspace, ModifiableDBIDs>(model, cluster)); dfs(unit, cluster, model); } } return clusters; }
java
public void dfs(CLIQUEUnit unit, ModifiableDBIDs cluster, CLIQUESubspace model) { cluster.addDBIDs(unit.getIds()); unit.markAsAssigned(); model.addDenseUnit(unit); final long[] dims = getDimensions(); for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) { CLIQUEUnit left = leftNeighbor(unit, dim); if(left != null && !left.isAssigned()) { dfs(left, cluster, model); } CLIQUEUnit right = rightNeighbor(unit, dim); if(right != null && !right.isAssigned()) { dfs(right, cluster, model); } } }
java
protected CLIQUEUnit leftNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsLeftNeighbor(unit, dim)) { return u; } } return null; }
java
protected CLIQUEUnit rightNeighbor(CLIQUEUnit unit, int dim) { for(CLIQUEUnit u : denseUnits) { if(u.containsRightNeighbor(unit, dim)) { return u; } } return null; }
java
private IntIterator getCommonSplitDimensions(N node) { Collection<SplitHistory> splitHistories = new ArrayList<>(node.getNumEntries()); for(int i = 0; i < node.getNumEntries(); i++) { SpatialEntry entry = node.getEntry(i); if(!(entry instanceof XTreeDirectoryEntry)) { throw new RuntimeException("Wrong entry type to derive split dimension from: " + entry.getClass().getName()); } splitHistories.add(((XTreeDirectoryEntry) entry).getSplitHistory()); } return SplitHistory.getCommonDimensions(splitHistories); }
java
private HyperBoundingBox mbr(final int[] entries, final int from, final int to) { SpatialEntry first = this.node.getEntry(entries[from]); ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox(first); for(int i = from + 1; i < to; i++) { mbr.extend(this.node.getEntry(entries[i])); } return mbr; }
java
private void ensureSize(int minsize) { if(minsize <= store.length) { return; } int asize = store.length; while(asize < minsize) { asize = (asize >>> 1) + asize; } final int[] prev = store; store = new int[asize]; System.arraycopy(prev, 0, store, 0, size); }
java
private void grow() { final int newsize = store.length + (store.length >>> 1); final int[] prev = store; store = new int[newsize]; System.arraycopy(prev, 0, store, 0, size); }
java
public static double sumOfProbabilities(DBIDIter ignore, DBIDArrayIter di, double[] p) { double s = 0; for(di.seek(0); di.valid(); di.advance()) { if(DBIDUtil.equal(ignore, di)) { continue; } final double v = p[di.getOffset()]; if(!(v > 0)) { break; } s += v; } return s; }
java
public synchronized static Task queue(Listener callback) { final Task task = new Task(callback); // TODO: synchronization? if(THREAD != null && THREAD.isAlive()) { THREAD.queue.add(task); return task; } THREAD = new ThumbnailThread(); THREAD.queue.add(task); THREAD.start(); return task; }
java
public static void unqueue(Task task) { if(THREAD != null) { synchronized(THREAD) { THREAD.queue.remove(task); } } }
java
public void beginStep(int step, String stepTitle, Logging logger) { setProcessed(step - 1); this.stepTitle = stepTitle; logger.progress(this); }
java
protected static HyperBoundingBox computeBounds(NumberVector[] samples) { assert(samples.length > 0) : "Cannot compute bounding box of empty set."; // Compute bounds: final int dimensions = samples[0].getDimensionality(); final double[] min = new double[dimensions]; final double[] max = new double[dimensions]; NumberVector first = samples[0]; for(int d = 0; d < dimensions; d++) { min[d] = max[d] = first.doubleValue(d); } for(int i = 1; i < samples.length; i++) { NumberVector v = samples[i]; for(int d = 0; d < dimensions; d++) { final double c = v.doubleValue(d); min[d] = c < min[d] ? c : min[d]; max[d] = c > max[d] ? c : max[d]; } } return new HyperBoundingBox(min, max); }
java
@Override protected void preprocess() { final Logging log = getLogger(); // Could be subclass createStorage(); ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); if(log.isStatistics()) { log.statistics(new LongStatistic(this.getClass().getName() + ".k", k)); } Duration duration = log.isStatistics() ? log.newDuration(this.getClass().getName() + ".precomputation-time").begin() : null; FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Materializing k nearest neighbors (k=" + k + ")", ids.size(), getLogger()) : null; // Try bulk List<? extends KNNList> kNNList = null; if(usebulk) { kNNList = knnQuery.getKNNForBulkDBIDs(ids, k); if(kNNList != null) { int i = 0; for(DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) { storage.put(id, kNNList.get(i)); log.incrementProcessed(progress); } } } else { final boolean ismetric = getDistanceQuery().getDistanceFunction().isMetric(); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { if(ismetric && storage.get(iter) != null) { log.incrementProcessed(progress); continue; // Previously computed (duplicate point?) } KNNList knn = knnQuery.getKNNForDBID(iter, k); storage.put(iter, knn); if(ismetric) { for(DoubleDBIDListIter it = knn.iter(); it.valid() && it.doubleValue() == 0.; it.advance()) { storage.put(it, knn); // Reuse } } log.incrementProcessed(progress); } } log.ensureCompleted(progress); if(duration != null) { log.statistics(duration.end()); } }
java
protected void objectsInserted(DBIDs ids) { final Logging log = getLogger(); // Could be subclass StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; ArrayDBIDs aids = DBIDUtil.ensureArray(ids); // materialize the new kNNs log.beginStep(stepprog, 1, "New insertions ocurred, materialize their new kNNs."); // Bulk-query kNNs List<? extends KNNList> kNNList = knnQuery.getKNNForBulkDBIDs(aids, k); // Store in storage DBIDIter iter = aids.iter(); for(int i = 0; i < aids.size(); i++, iter.advance()) { storage.put(iter, kNNList.get(i)); } // update the affected kNNs log.beginStep(stepprog, 2, "New insertions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion(ids); // inform listener log.beginStep(stepprog, 3, "New insertions ocurred, inform listeners."); fireKNNsInserted(ids, rkNN_ids); log.setCompleted(stepprog); }
java
protected void objectsRemoved(DBIDs ids) { final Logging log = getLogger(); StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null; // delete the materialized (old) kNNs log.beginStep(stepprog, 1, "New deletions ocurred, remove their materialized kNNs."); for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { storage.delete(iter); } // update the affected kNNs log.beginStep(stepprog, 2, "New deletions ocurred, update the affected kNNs."); ArrayDBIDs rkNN_ids = updateKNNsAfterDeletion(ids); // inform listener log.beginStep(stepprog, 3, "New deletions ocurred, inform listeners."); fireKNNsRemoved(ids, rkNN_ids); log.ensureCompleted(stepprog); }
java
protected void fireKNNsInserted(DBIDs insertions, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.INSERT, insertions, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
java
protected void fireKNNsRemoved(DBIDs removals, DBIDs updates) { KNNChangeEvent e = new KNNChangeEvent(this, KNNChangeEvent.Type.DELETE, removals, updates); Object[] listeners = listenerList.getListenerList(); for(int i = listeners.length - 2; i >= 0; i -= 2) { if(listeners[i] == KNNListener.class) { ((KNNListener) listeners[i + 1]).kNNsChanged(e); } } }
java
@Override public void buildClassifier(Database database, Relation<? extends ClassLabel> labelrep) { Object2IntOpenHashMap<ClassLabel> count = new Object2IntOpenHashMap<>(); for(DBIDIter iter = labelrep.iterDBIDs(); iter.valid(); iter.advance()) { count.addTo(labelrep.get(iter), 1); } int max = Integer.MIN_VALUE; double size = labelrep.size(); distribution = new double[count.size()]; labels = new ArrayList<>(count.size()); ObjectIterator<Entry<ClassLabel>> iter = count.object2IntEntrySet().fastIterator(); for(int i = 0; iter.hasNext(); ++i) { Entry<ClassLabel> entry = iter.next(); distribution[i] = entry.getIntValue() / size; labels.add(entry.getKey()); if(entry.getIntValue() > max) { max = entry.getIntValue(); prediction = entry.getKey(); } } }
java
@Override public Assignments<E> split(AbstractMTree<?, N, E, ?> tree, N node) { final int n = node.getNumEntries(); double[][] distanceMatrix = computeDistanceMatrix(tree, node); double miSumCR = Double.POSITIVE_INFINITY; boolean leaf = node.isLeaf(); Assignments<E> bestAssignment = null; for(int i = 0; i < n; i++) { for(int j = i + 1; j < n; j++) { Assignments<E> currentAssignments = distributor.distribute(node, i, distanceMatrix[i], j, distanceMatrix[j]); double maxCR = Math.max(currentAssignments.computeFirstCover(leaf), currentAssignments.computeSecondCover(leaf)); if(maxCR < miSumCR) { miSumCR = maxCR; bestAssignment = currentAssignments; } } } return bestAssignment; }
java
public static double[][] unboxVectors(List<? extends NumberVector> means) { double[][] ret = new double[means.size()][]; for(int i = 0; i < ret.length; i++) { ret[i] = means.get(i).toArray(); } return ret; }
java
public void put(double x, double y, double w) { if(w == 0.) { return; } if(sumWe <= 0.) { sumX = x * w; sumY = y * w; sumWe = w; return; } // Delta to previous mean final double deltaX = x * sumWe - sumX; final double deltaY = y * sumWe - sumY; final double oldWe = sumWe; // Incremental update sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal weight * deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update means sumX += x * w; sumY += y * w; }
java
public double getCorrelation() { if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
public static double coefficient(double[] x, double[] y) { final int xdim = x.length; final int ydim = y.length; if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: arrays differ in length."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0.; double sumX = x[0], sumY = y[0]; int i = 1; while(i < xdim) { final double xv = x[i], yv = y[i]; // Delta to previous mean final double deltaX = xv * i - sumX; final double deltaY = yv * i - sumY; // Increment count first final double oldi = i; // Convert to double! ++i; final double f = 1. / (i * oldi); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv; sumY += yv; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
public static double weightedCoefficient(NumberVector x, NumberVector y, double[] weights) { final int xdim = x.getDimensionality(); final int ydim = y.getDimensionality(); if(xdim != ydim) { throw new IllegalArgumentException("Invalid arguments: number vectors differ in dimensionality."); } if(xdim != weights.length) { throw new IllegalArgumentException("Dimensionality doesn't agree to weights."); } if(xdim == 0) { throw new IllegalArgumentException("Empty vector."); } // Inlined computation of Pearson correlation, to avoid allocating objects! // This is a numerically stabilized version, avoiding sum-of-squares. double sumXX = 0., sumYY = 0., sumXY = 0., sumWe = weights[0]; double sumX = x.doubleValue(0) * sumWe, sumY = y.doubleValue(0) * sumWe; for(int i = 1; i < xdim; ++i) { final double xv = x.doubleValue(i), yv = y.doubleValue(i), w = weights[i]; // Delta to previous mean final double deltaX = xv * sumWe - sumX; final double deltaY = yv * sumWe - sumY; // Increment count first final double oldWe = sumWe; // Convert to double! sumWe += w; final double f = w / (sumWe * oldWe); // Update sumXX += f * deltaX * deltaX; sumYY += f * deltaY * deltaY; // should equal deltaY * neltaX! sumXY += f * deltaX * deltaY; // Update sums sumX += xv * w; sumY += yv * w; } // One or both series were constant: if(!(sumXX > 0. && sumYY > 0.)) { return (sumXX == sumYY) ? 1. : 0.; } return sumXY / FastMath.sqrt(sumXX * sumYY); }
java
@SuppressWarnings("unchecked") public static <T, A> ExtendedArray<T> extend(A array, ArrayAdapter<T, A> getter, T extra) { return new ExtendedArray<>(array, (ArrayAdapter<T, Object>) getter, extra); }
java
public static SelectionResult ensureSelectionResult(final Database db) { List<SelectionResult> selections = ResultUtil.filterResults(db.getHierarchy(), db, SelectionResult.class); if(!selections.isEmpty()) { return selections.get(0); } SelectionResult sel = new SelectionResult(); ResultUtil.addChildResult(db, sel); return sel; }
java
@SuppressWarnings("unused") public void debugRender(GL2 gl) { if (!DEBUG || (startcamera == null)) { return; } gl.glLineWidth(3f); gl.glColor4f(1.f, 0.f, 0.f, .66f); gl.glBegin(GL.GL_LINES); gl.glVertex3f(0.f, 0.f, 0.f); double rot = startangle - startcamera.getRotationZ(); gl.glVertex3f((float) FastMath.cos(rot) * 4.f, (float) -FastMath.sin(rot) * 4.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 0.f); gl.glVertex3f((float) FastMath.cos(rot) * 1.f, (float) -FastMath.sin(rot) * 1.f, 1.f); gl.glEnd(); }
java
public static List<SettingsResult> getSettingsResults(Result r) { if(r instanceof SettingsResult) { List<SettingsResult> ors = new ArrayList<>(1); ors.add((SettingsResult) r); return ors; } if(r instanceof HierarchicalResult) { return ResultUtil.filterResults(((HierarchicalResult) r).getHierarchy(), r, SettingsResult.class); } return Collections.emptyList(); }
java
public static String usage(Collection<TrackedParameter> options) { StringBuilder usage = new StringBuilder(10000); if(!REFERENCE_VERSION.equals(VERSION)) { usage.append("ELKI build: ").append(VERSION).append(NEWLINE).append(NEWLINE); } usage.append(REFERENCE); // Collect options OptionUtil.formatForConsole(usage.append(NEWLINE).append("Parameters:").append(NEWLINE), // FormatUtil.getConsoleWidth(), options); return usage.toString(); }
java
protected static void printErrorMessage(Exception e) { if(e instanceof AbortException) { // ensure we actually show the message: LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(e.getMessage()); } else if(e instanceof UnspecifiedParameterException) { LOG.error(e.getMessage()); } else if(e instanceof ParameterException) { LOG.error(e.getMessage()); } else { LOG.exception(e); } }
java
private static void printDescription(Class<?> descriptionClass) { if(descriptionClass == null) { return; } try { LoggingConfiguration.setVerbose(Level.VERBOSE); LOG.verbose(OptionUtil.describeParameterizable(new StringBuilder(), descriptionClass, FormatUtil.getConsoleWidth(), "").toString()); } catch(Exception e) { LOG.exception("Error instantiating class to describe.", e.getCause()); } }
java
public static <T> void processDense(T data, Adapter<T> adapter, Collector collector) { // Number of nodes final int n = adapter.size(data); // Best distance for each node double[] best = new double[n]; Arrays.fill(best, Double.POSITIVE_INFINITY); // Best previous node int[] src = new int[n]; // Nodes already handled // byte[] uses more memory, but it will be faster. byte[] connected = new byte[n]; // We always start at "random" node 0 // Note: we use this below in the "j" loop! int current = 0; connected[current] = 1; best[current] = 0; // Search for(int i = n - 2; i >= 0; i--) { // Update best and src from current: int newbesti = -1; double newbestd = Double.POSITIVE_INFINITY; // Note: we assume we started with 0, and can thus skip it for(int j = 0; j < n; ++j) { if(connected[j] == 1) { continue; } final double dist = adapter.distance(data, current, j); if(dist < best[j]) { best[j] = dist; src[j] = current; } if(best[j] < newbestd || newbesti == -1) { newbestd = best[j]; newbesti = j; } } assert (newbesti >= 0); // Flag connected[newbesti] = 1; // Store edge collector.addEdge(newbestd, src[newbesti], newbesti); // Continue current = newbesti; } }
java
protected boolean isInputValid(String filename, String line, String id, String msg) { return !filename.isEmpty() || !line.isEmpty() || !id.isEmpty() || !msg.isEmpty(); }
java
public void parseVCppLine(String line, String projectPath, String compilationFile) { this.parseVCppCompilerCLLine(line, projectPath, compilationFile); }
java
public Set<ValgrindError> processReport(File report) throws XMLStreamException { ValgrindReportStreamHandler streamHandler = new ValgrindReportStreamHandler(); new StaxParser(streamHandler).parse(report); return streamHandler.valgrindErrors; }
java
private static boolean isGeneratedNodeExcluded(AstNode astNode) { AstNode prev = astNode.getPreviousAstNode(); return prev != null && prev.getTokenLine() == astNode.getTokenLine() && prev.isCopyBookOrGeneratedNode(); }
java
private boolean isBreakStatementExcluded(AstNode astNode) { boolean exclude = false; if (excludeCaseBreak && astNode.getToken().getType().equals(CxxKeyword.BREAK)) { for (AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.statement); statement != null; statement = statement.getPreviousSibling()) { if (astNode.getTokenLine() != statement.getTokenLine()) { break; } TokenType type = statement.getToken().getType(); if (type.equals(CxxKeyword.CASE) || type.equals(CxxKeyword.DEFAULT)) { exclude = true; break; } } } return exclude; }
java
private boolean isEmptyExpressionStatement(AstNode astNode) { if (astNode.is(CxxGrammarImpl.expressionStatement) && ";".equals(astNode.getToken().getValue())) { AstNode statement = astNode.getFirstAncestor(CxxGrammarImpl.selectionStatement); if (statement != null) { return astNode.getTokenLine() == statement.getTokenLine(); } return isGeneratedNodeExcluded(astNode); } return false; }
java
private Map<String, Macro> parsePredefinedUnitMacros(Map<String, Macro> configuredMacros) { if (!ctorInProgress || (unitMacros != null)) { throw new IllegalStateException("Preconditions for initial fill-out of predefinedUnitMacros were violated"); } if (conf.getCompilationUnitSourceFiles().isEmpty() && (conf.getGlobalCompilationUnitSettings() == null)) { // configuration doesn't contain any settings for compilation units. // CxxPreprocessor will use fixedMacros only return Collections.emptyMap(); } unitMacros = new MapChain<>(); if (getMacros() != unitMacros) { throw new IllegalStateException("expected unitMacros as active macros map"); } try { getMacros().setHighPrio(true); getMacros().putAll(Macro.UNIT_MACROS); getMacros().putAll(configuredMacros); parseForcedIncludes(); final HashMap<String, Macro> result = new HashMap<>(unitMacros.getHighPrioMap()); return result; } finally { getMacros().setHighPrio(false); // just for the symmetry unitMacros = null; // remove unitMacros, switch getMacros() to fixedMacros } }
java
public <G extends Serializable> Metric<G> getMetric(CxxMetricsFactory.Key metricKey) { Metric<G> metric = (Metric<G>) this.langSpecificMetrics.get(metricKey); if (metric == null) { throw new IllegalStateException("Requested metric " + metricKey + " couldn't be found"); } return metric; }
java
public static List<String> getElements(File file, String charset) { List<String> list = new ArrayList<>(); try (BufferedReader br = new BufferedReader( new InputStreamReader(java.nio.file.Files.newInputStream(file.toPath()), charset))) { StringBuilder sb = new StringBuilder(4096); String line; int cnt = 0; final Pattern whitespacesOnly = Pattern.compile("^\\s*$"); while ((line = br.readLine()) != null) { if (cnt > (TOP_COUNT)) { if (whitespacesOnly.matcher(line).matches()) { list.add(sb.toString()); sb.setLength(0); } else { sb.append(line); sb.append('\n'); } } ++cnt; } if (sb.length() > 0) { list.add(sb.toString()); } } catch (IOException e) { String msg = new StringBuilder(512).append("Cannot feed the data into sonar, details: '") .append(e) .append("'").toString(); LOG.error(msg); } return list; }
java
public void saveUniqueViolation(SensorContext sensorContext, CxxReportIssue issue) { if (uniqueIssues.add(issue)) { saveViolation(sensorContext, issue); } }
java
@SafeVarargs public static SourceFile scanSingleFile(InputFile file, SensorContext sensorContext, CxxLanguage language, SquidAstVisitor<Grammar>... visitors) { return scanSingleFileConfig(language, file, new CxxConfiguration(sensorContext.fileSystem().encoding()), visitors); }
java
public static SourceFile scanSingleFileConfig(CxxLanguage language, InputFile file, CxxConfiguration cxxConfig, SquidAstVisitor<Grammar>... visitors) { if (!file.isFile()) { throw new IllegalArgumentException("File '" + file + "' not found."); } AstScanner<Grammar> scanner = create(language, cxxConfig, visitors); scanner.scanFile(file.file()); Collection<SourceCode> sources = scanner.getIndex().search(new QueryByType(SourceFile.class)); if (sources.size() != 1) { throw new IllegalStateException("Only one SourceFile was expected whereas " + sources.size() + " has been returned."); } return (SourceFile) sources.iterator().next(); }
java
public static String join(Path path1, Path path2) { if (path2.toString().isEmpty()) { return ""; } if (!path1.isAbsolute()) { path1 = Paths.get(".", path1.toString()); } if (!path2.isAbsolute()) { path2 = Paths.get(".", path2.toString()); } Path result = path1.resolve(path2).normalize(); if (!result.isAbsolute()) { result = Paths.get(".", result.toString()); } return result.toString(); }
java
public static void parse(CxxConfiguration config, File compileCommandsFile) throws IOException { LOG.debug("Parsing 'JSON Compilation Database' format"); ObjectMapper mapper = new ObjectMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); mapper.enable(DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY); JsonCompilationDatabaseCommandObject[] commandObjects = mapper.readValue(compileCommandsFile, JsonCompilationDatabaseCommandObject[].class); for (JsonCompilationDatabaseCommandObject commandObject : commandObjects) { Path cwd = Paths.get("."); if (commandObject.getDirectory() != null) { cwd = Paths.get(commandObject.getDirectory()); } Path absPath = cwd.resolve(commandObject.getFile()); if ("__global__".equals(commandObject.getFile())) { CxxCompilationUnitSettings globalSettings = new CxxCompilationUnitSettings(); parseCommandObject(globalSettings, commandObject); config.setGlobalCompilationUnitSettings(globalSettings); } else { CxxCompilationUnitSettings settings = new CxxCompilationUnitSettings(); parseCommandObject(settings, commandObject); config.addCompilationUnitSettings(absPath.toAbsolutePath().normalize().toString(), settings); } } }
java
private static String getOperatorId(AstNode operatorFunctionId) { StringBuilder builder = new StringBuilder( operatorFunctionId.getTokenValue()); AstNode operator = operatorFunctionId .getFirstDescendant(CxxGrammarImpl.overloadableOperator); if (operator != null) { AstNode opNode = operator.getFirstChild(); while (opNode != null) { builder.append(opNode.getTokenValue()); opNode = opNode.getNextSibling(); } } return builder.toString(); }
java
private static List<Token> getInlineDocumentation(Token token, int line) { List<Token> comments = new ArrayList<>(); for (Trivia trivia : token.getTrivia()) { if (trivia.isComment()) { Token triviaToken = trivia.getToken(); if ((triviaToken != null) && (triviaToken.getLine() == line) && (isDoxygenInlineComment(triviaToken.getValue()))) { comments.add(triviaToken); if (LOG.isTraceEnabled()) { LOG.trace("Inline doc: " + triviaToken.getValue()); } } } } return comments; }
java
public static String getContextStringProperty(SensorContext context, String name, String def) { String s = context.config().get(name).orElse(null); if (s == null || s.isEmpty()) { return def; } return s; }
java
@Nullable public static String resolveFilename(final String baseDir, @Nullable final String filename) { if (filename != null) { // Normalization can return null if path is null, is invalid, // or is a path with back-ticks outside known directory structure String normalizedPath = FilenameUtils.normalize(filename); if ((normalizedPath != null) && (new File(normalizedPath).isAbsolute())) { return normalizedPath; } // Prefix with absolute module base directory, attempt normalization again -- can still get null here normalizedPath = FilenameUtils.normalize(baseDir + File.separator + filename); if (normalizedPath != null) { return normalizedPath; } } return null; }
java
protected void createMultiLocationViolation(CxxReportIssue message) { SourceFile sourceFile = getSourceFile(); Set<CxxReportIssue> messages = getMultiLocationCheckMessages(sourceFile); if (messages == null) { messages = new HashSet<>(); } messages.add(message); setMultiLocationViolation(sourceFile, messages); }
java
public List<Epic> getEpics(Object groupIdOrPath, Integer authorId, String labels, EpicOrderBy orderBy, SortOrder sortOrder, String search, int page, int perPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm(page, perPage) .withParam("author_id", authorId) .withParam("labels", labels) .withParam("order_by", orderBy) .withParam("sort", sortOrder) .withParam("search", search); Response response = get(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(new GenericType<List<Epic>>() { })); }
java
public Epic getEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { Response response = get(Response.Status.OK, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
java
public Optional<Epic> getOptionalEpic(Object groupIdOrPath, Integer epicIid) { try { return (Optional.ofNullable(getEpic(groupIdOrPath, epicIid))); } catch (GitLabApiException glae) { return (GitLabApi.createOptionalFromException(glae)); } }
java
public Epic createEpic(Object groupIdOrPath, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = post(Response.Status.CREATED, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics"); return (response.readEntity(Epic.class)); }
java
public Epic updateEpic(Object groupIdOrPath, Integer epicIid, String title, String labels, String description, Date startDate, Date endDate) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("title", title, true) .withParam("labels", labels) .withParam("description", description) .withParam("start_date", startDate) .withParam("end_date", endDate); Response response = put(Response.Status.OK, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); return (response.readEntity(Epic.class)); }
java
public void deleteEpic(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { delete(Response.Status.NO_CONTENT, null, "groups", getGroupIdOrPath(groupIdOrPath), "epics", epicIid); }
java
public List<Epic> getEpicIssues(Object groupIdOrPath, Integer epicIid) throws GitLabApiException { return (getEpicIssues(groupIdOrPath, epicIid, getDefaultPerPage()).all()); }
java