code stringlengths 73 34.1k | label stringclasses 1 value |
|---|---|
public Object[] getRow(int row) {
Object[] ret = new Object[columns.size()];
for(int c = 0; c < columns.size(); c++) {
ret[c] = data(row, c);
}
return ret;
} | java |
protected void batchNN(AbstractRStarTreeNode<?, ?> node, Map<DBID, KNNHeap> knnLists) {
if(node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
SpatialEntry p = node.getEntry(i);
for(Entry<DBID, KNNHeap> ent : knnLists.entrySet()) {
final DBID q = ent.getKey();
final KNNHeap knns_q = ent.getValue();
double knn_q_maxDist = knns_q.getKNNDistance();
DBID pid = ((LeafEntry) p).getDBID();
// FIXME: objects are NOT accessible by DBID in a plain R-tree
// context!
double dist_pq = distanceFunction.distance(relation.get(pid), relation.get(q));
tree.statistics.countDistanceCalculation();
if(dist_pq <= knn_q_maxDist) {
knns_q.insert(dist_pq, pid);
}
}
}
}
else {
ModifiableDBIDs ids = DBIDUtil.newArray(knnLists.size());
for(DBID id : knnLists.keySet()) {
ids.add(id);
}
List<DoubleDistanceEntry> entries = getSortedEntries(node, ids);
for(DoubleDistanceEntry distEntry : entries) {
double minDist = distEntry.distance;
for(Entry<DBID, KNNHeap> ent : knnLists.entrySet()) {
final KNNHeap knns_q = ent.getValue();
double knn_q_maxDist = knns_q.getKNNDistance();
if(minDist <= knn_q_maxDist) {
SpatialEntry entry = distEntry.entry;
AbstractRStarTreeNode<?, ?> child = tree.getNode(((DirectoryEntry) entry).getPageID());
batchNN(child, knnLists);
break;
}
}
}
}
} | java |
protected List<DoubleDistanceEntry> getSortedEntries(AbstractRStarTreeNode<?, ?> node, DBIDs ids) {
List<DoubleDistanceEntry> result = new ArrayList<>();
for(int i = 0; i < node.getNumEntries(); i++) {
SpatialEntry entry = node.getEntry(i);
double minMinDist = Double.MAX_VALUE;
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
double minDist = distanceFunction.minDist(entry, relation.get(iter));
tree.statistics.countDistanceCalculation();
minMinDist = Math.min(minDist, minMinDist);
}
result.add(new DoubleDistanceEntry(entry, minMinDist));
}
Collections.sort(result);
return result;
} | java |
private boolean checkCandidateUpdate(double[] point) {
final double x = point[0], y = point[1];
if(points.isEmpty()) {
leftx = rightx = x;
topy = bottomy = y;
topleft = topright = bottomleft = bottomright = point;
return true;
}
// A non-regular diamond spanned by left, top, right, and bottom.
if(x <= leftx || x >= rightx || y <= bottomy || y >= topy) {
double xpy = x + y, xmy = x - y;
// Update bounds:
boolean changed = false;
if(xpy < bottomleft[0] + bottomleft[1]) {
bottomleft = point;
changed = true;
}
else if(xpy > topright[0] + topright[1]) {
topright = point;
changed = true;
}
if(xmy < topleft[0] - topleft[1]) {
topleft = point;
changed = true;
}
else if(xmy > bottomright[0] - bottomright[1]) {
bottomright = point;
changed = true;
}
if(changed) {
leftx = Math.max(bottomleft[0], topleft[0]);
rightx = Math.min(bottomright[0], topright[0]);
topy = Math.min(topleft[1], topright[1]);
bottomy = Math.max(bottomleft[1], bottomright[1]);
}
return true;
}
return false;
} | java |
static DBIDs randomSample(DBIDs ids, int samplesize, Random rnd, DBIDs previous) {
if(previous == null) {
return DBIDUtil.randomSample(ids, samplesize, rnd);
}
ModifiableDBIDs sample = DBIDUtil.newHashSet(samplesize);
sample.addDBIDs(previous);
sample.addDBIDs(DBIDUtil.randomSample(ids, samplesize - previous.size(), rnd));
// If these two were not disjoint, we can be short of the desired size!
if(sample.size() < samplesize) {
// Draw a large enough sample to make sure to be able to fill it now.
// This can be less random though, because the iterator may impose an
// order; but this is a rare code path.
for(DBIDIter it = DBIDUtil.randomSample(ids, samplesize, rnd).iter(); sample.size() < samplesize && it.valid(); it.advance()) {
sample.add(it);
}
}
return sample;
} | java |
@Override
public void actionPerformed(ActionEvent e) {
// Use a new JFileChooser. Inconsistent behaviour otherwise!
final JFileChooser fc = new JFileChooser(new File("."));
if(param.isDefined()) {
fc.setSelectedFile(param.getValue());
}
if(e.getSource() == button) {
int returnVal = fc.showOpenDialog(button);
if(returnVal == JFileChooser.APPROVE_OPTION) {
textfield.setText(fc.getSelectedFile().getPath());
fireValueChanged();
}
// else: do nothing on cancel.
}
else if(e.getSource() == textfield) {
fireValueChanged();
}
else {
LoggingUtil.warning("actionPerformed triggered by unknown source: " + e.getSource());
}
} | java |
protected Node inlineThumbnail(Document doc, ParsedURL urldata, Node eold) {
RenderableImage img = ThumbnailRegistryEntry.handleURL(urldata);
if(img == null) {
LoggingUtil.warning("Image not found in registry: " + urldata.toString());
return null;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
try {
os.write(SVGSyntax.DATA_PROTOCOL_PNG_PREFIX.getBytes());
Base64EncoderStream encoder = new Base64EncoderStream(os);
ImageIO.write(img.createDefaultRendering(), "png", encoder);
encoder.close();
}
catch(IOException e) {
LoggingUtil.exception("Exception serializing image to png", e);
return null;
}
Element i = (Element) super.cloneNode(doc, eold);
i.setAttributeNS(SVGConstants.XLINK_NAMESPACE_URI, SVGConstants.XLINK_HREF_ATTRIBUTE, os.toString().replaceAll("\\s*[\\r\\n]+\\s*", ""));
return i;
} | java |
private static PrintStream openStream(File out) throws IOException {
OutputStream os = new FileOutputStream(out);
os = out.getName().endsWith(GZIP_POSTFIX) ? new GZIPOutputStream(os) : os;
return new PrintStream(os);
} | java |
@Override
public void setDimensionality(int dimensionality) throws IllegalArgumentException {
final int maxdim = getMaxDim();
if(maxdim > dimensionality) {
throw new IllegalArgumentException("Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ").");
}
this.dimensionality = dimensionality;
} | java |
protected IndexTreePath<E> findPathToObject(IndexTreePath<E> subtree, SpatialComparable mbr, DBIDRef id) {
N node = getNode(subtree.getEntry());
if(node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
if(DBIDUtil.equal(((LeafEntry) node.getEntry(i)).getDBID(), id)) {
return new IndexTreePath<>(subtree, node.getEntry(i), i);
}
}
}
// directory node
else {
for(int i = 0; i < node.getNumEntries(); i++) {
if(SpatialUtil.intersects(node.getEntry(i), mbr)) {
IndexTreePath<E> childSubtree = new IndexTreePath<>(subtree, node.getEntry(i), i);
IndexTreePath<E> path = findPathToObject(childSubtree, mbr, id);
if(path != null) {
return path;
}
}
}
}
return null;
} | java |
protected void deletePath(IndexTreePath<E> deletionPath) {
N leaf = getNode(deletionPath.getParentPath().getEntry());
int index = deletionPath.getIndex();
// delete o
E entry = leaf.getEntry(index);
leaf.deleteEntry(index);
writeNode(leaf);
// condense the tree
Stack<N> stack = new Stack<>();
condenseTree(deletionPath.getParentPath(), stack);
// reinsert underflow nodes
while(!stack.empty()) {
N node = stack.pop();
if(node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
settings.getOverflowTreatment().reinitialize(); // Intended?
this.insertLeafEntry(node.getEntry(i));
}
}
else {
for(int i = 0; i < node.getNumEntries(); i++) {
stack.push(getNode(node.getEntry(i)));
}
}
deleteNode(node);
}
postDelete(entry);
doExtraIntegrityChecks();
} | java |
protected List<E> createBulkLeafNodes(List<E> objects) {
int minEntries = leafMinimum;
int maxEntries = leafCapacity;
ArrayList<E> result = new ArrayList<>();
List<List<E>> partitions = settings.bulkSplitter.partition(objects, minEntries, maxEntries);
for(List<E> partition : partitions) {
// create leaf node
N leafNode = createNewLeafNode();
// insert data
for(E o : partition) {
leafNode.addLeafEntry(o);
}
// write to file
writeNode(leafNode);
result.add(createNewDirectoryEntry(leafNode));
if(getLogger().isDebugging()) {
getLogger().debugFine("Created leaf page " + leafNode.getPageID());
}
}
if(getLogger().isDebugging()) {
getLogger().debugFine("numDataPages = " + result.size());
}
return result;
} | java |
protected IndexTreePath<E> choosePath(IndexTreePath<E> subtree, SpatialComparable mbr, int depth, int cur) {
if(getLogger().isDebuggingFiner()) {
getLogger().debugFiner("node " + subtree + ", depth " + depth);
}
N node = getNode(subtree.getEntry());
if(node == null) {
throw new RuntimeException("Page file did not return node for node id: " + getPageID(subtree.getEntry()));
}
if(node.isLeaf()) {
return subtree;
}
// first test on containment
IndexTreePath<E> newSubtree = containedTest(subtree, node, mbr);
if(newSubtree != null) {
return (++cur == depth) ? newSubtree : choosePath(newSubtree, mbr, depth, cur);
}
N childNode = getNode(node.getEntry(0));
int num = settings.insertionStrategy.choose(node, NodeArrayAdapter.STATIC, mbr, height, cur);
newSubtree = new IndexTreePath<>(subtree, node.getEntry(num), num);
++cur;
if(cur == depth) {
return newSubtree;
}
// children are leafs
if(childNode.isLeaf()) {
assert cur == newSubtree.getPathCount(); // Check for programming errors
throw new IllegalArgumentException("childNode is leaf, but currentDepth != depth: " + cur + " != " + depth);
}
// children are directory nodes
return choosePath(newSubtree, mbr, depth, cur);
} | java |
private N split(N node) {
// choose the split dimension and the split point
int minimum = node.isLeaf() ? leafMinimum : dirMinimum;
long[] split = settings.nodeSplitter.split(node, NodeArrayAdapter.STATIC, minimum);
// New node
final N newNode = node.isLeaf() ? createNewLeafNode() : createNewDirectoryNode();
// do the split
node.splitByMask(newNode, split);
// write changes to file
writeNode(node);
writeNode(newNode);
return newNode;
} | java |
public void reInsert(N node, IndexTreePath<E> path, int[] offs) {
final int depth = path.getPathCount();
long[] remove = BitsUtil.zero(node.getCapacity());
List<E> reInsertEntries = new ArrayList<>(offs.length);
for(int i = 0; i < offs.length; i++) {
reInsertEntries.add(node.getEntry(offs[i]));
BitsUtil.setI(remove, offs[i]);
}
// Remove the entries we reinsert
node.removeMask(remove);
writeNode(node);
// and adapt the mbrs
IndexTreePath<E> childPath = path;
N child = node;
while(childPath.getParentPath() != null) {
N parent = getNode(childPath.getParentPath().getEntry());
int indexOfChild = childPath.getIndex();
if(child.adjustEntry(parent.getEntry(indexOfChild))) {
writeNode(parent);
childPath = childPath.getParentPath();
child = parent;
}
else {
break;
// TODO: stop writing when MBR didn't change!
}
}
// reinsert the first entries
final Logging log = getLogger();
for(E entry : reInsertEntries) {
if(node.isLeaf()) {
if(log.isDebugging()) {
log.debug("reinsert " + entry);
}
insertLeafEntry(entry);
}
else {
if(log.isDebugging()) {
log.debug("reinsert " + entry + " at " + depth);
}
insertDirectoryEntry(entry, depth);
}
}
} | java |
private void condenseTree(IndexTreePath<E> subtree, Stack<N> stack) {
N node = getNode(subtree.getEntry());
// node is not root
if(!isRoot(node)) {
N parent = getNode(subtree.getParentPath().getEntry());
int index = subtree.getIndex();
if(hasUnderflow(node)) {
if(parent.deleteEntry(index)) {
stack.push(node);
}
else {
node.adjustEntry(parent.getEntry(index));
}
}
else {
node.adjustEntry(parent.getEntry(index));
}
writeNode(parent);
// get subtree to parent
condenseTree(subtree.getParentPath(), stack);
}
// node is root
else {
if(hasUnderflow(node) && node.getNumEntries() == 1 && !node.isLeaf()) {
N child = getNode(node.getEntry(0));
final N newRoot;
if(child.isLeaf()) {
newRoot = createNewLeafNode();
newRoot.setPageID(getRootID());
for(int i = 0; i < child.getNumEntries(); i++) {
newRoot.addLeafEntry(child.getEntry(i));
}
}
else {
newRoot = createNewDirectoryNode();
newRoot.setPageID(getRootID());
for(int i = 0; i < child.getNumEntries(); i++) {
newRoot.addDirectoryEntry(child.getEntry(i));
}
}
writeNode(newRoot);
height--;
}
}
} | java |
private void getLeafNodes(N node, List<E> result, int currentLevel) {
// Level 1 are the leaf nodes, Level 2 is the one atop!
if(currentLevel == 2) {
for(int i = 0; i < node.getNumEntries(); i++) {
result.add(node.getEntry(i));
}
}
else {
for(int i = 0; i < node.getNumEntries(); i++) {
getLeafNodes(getNode(node.getEntry(i)), result, (currentLevel - 1));
}
}
} | java |
public static double angleDense(NumberVector v1, NumberVector v2) {
final int dim1 = v1.getDimensionality(), dim2 = v2.getDimensionality();
final int mindim = (dim1 <= dim2) ? dim1 : dim2;
// Essentially, we want to compute this:
// v1.transposeTimes(v2) / (v1.euclideanLength() * v2.euclideanLength());
// We can just compute all three in parallel.
double cross = 0, l1 = 0, l2 = 0;
for(int k = 0; k < mindim; k++) {
final double r1 = v1.doubleValue(k);
final double r2 = v2.doubleValue(k);
cross += r1 * r2;
l1 += r1 * r1;
l2 += r2 * r2;
}
for(int k = mindim; k < dim1; k++) {
final double r1 = v1.doubleValue(k);
l1 += r1 * r1;
}
for(int k = mindim; k < dim2; k++) {
final double r2 = v2.doubleValue(k);
l2 += r2 * r2;
}
final double a = (cross == 0.) ? 0. : //
(l1 == 0. || l2 == 0.) ? 1. : //
FastMath.sqrt((cross / l1) * (cross / l2));
return (a < 1.) ? a : 1.;
} | java |
public static double angleSparse(SparseNumberVector v1, SparseNumberVector v2) {
// TODO: exploit precomputed length, when available?
double l1 = 0., l2 = 0., cross = 0.;
int i1 = v1.iter(), i2 = v2.iter();
while(v1.iterValid(i1) && v2.iterValid(i2)) {
final int d1 = v1.iterDim(i1), d2 = v2.iterDim(i2);
if(d1 < d2) {
final double val = v1.iterDoubleValue(i1);
l1 += val * val;
i1 = v1.iterAdvance(i1);
}
else if(d2 < d1) {
final double val = v2.iterDoubleValue(i2);
l2 += val * val;
i2 = v2.iterAdvance(i2);
}
else { // d1 == d2
final double val1 = v1.iterDoubleValue(i1);
final double val2 = v2.iterDoubleValue(i2);
l1 += val1 * val1;
l2 += val2 * val2;
cross += val1 * val2;
i1 = v1.iterAdvance(i1);
i2 = v2.iterAdvance(i2);
}
}
while(v1.iterValid(i1)) {
final double val = v1.iterDoubleValue(i1);
l1 += val * val;
i1 = v1.iterAdvance(i1);
}
while(v2.iterValid(i2)) {
final double val = v2.iterDoubleValue(i2);
l2 += val * val;
i2 = v2.iterAdvance(i2);
}
final double a = (cross == 0.) ? 0. : //
(l1 == 0. || l2 == 0.) ? 1. : //
FastMath.sqrt((cross / l1) * (cross / l2));
return (a < 1.) ? a : 1.;
} | java |
public static double angleSparseDense(SparseNumberVector v1, NumberVector v2) {
// TODO: exploit precomputed length, when available.
final int dim2 = v2.getDimensionality();
double l1 = 0., l2 = 0., cross = 0.;
int i1 = v1.iter(), d2 = 0;
while(v1.iterValid(i1)) {
final int d1 = v1.iterDim(i1);
while(d2 < d1 && d2 < dim2) {
final double val = v2.doubleValue(d2);
l2 += val * val;
++d2;
}
if(d2 < dim2) {
assert (d1 == d2) : "Dimensions not ordered";
final double val1 = v1.iterDoubleValue(i1);
final double val2 = v2.doubleValue(d2);
l1 += val1 * val1;
l2 += val2 * val2;
cross += val1 * val2;
i1 = v1.iterAdvance(i1);
++d2;
}
else {
final double val = v1.iterDoubleValue(i1);
l1 += val * val;
i1 = v1.iterAdvance(i1);
}
}
while(d2 < dim2) {
final double val = v2.doubleValue(d2);
l2 += val * val;
++d2;
}
final double a = (cross == 0.) ? 0. : //
(l1 == 0. || l2 == 0.) ? 1. : //
FastMath.sqrt((cross / l1) * (cross / l2));
return (a < 1.) ? a : 1.;
} | java |
public static double cosAngle(NumberVector v1, NumberVector v2) {
// Java Hotspot appears to optimize these better than if-then-else:
return v1 instanceof SparseNumberVector ? //
v2 instanceof SparseNumberVector ? //
angleSparse((SparseNumberVector) v1, (SparseNumberVector) v2) : //
angleSparseDense((SparseNumberVector) v1, v2) : //
v2 instanceof SparseNumberVector ? //
angleSparseDense((SparseNumberVector) v2, v1) : //
angleDense(v1, v2);
} | java |
public static double minCosAngle(SpatialComparable v1, SpatialComparable v2) {
if(v1 instanceof NumberVector && v2 instanceof NumberVector) {
return cosAngle((NumberVector) v1, (NumberVector) v2);
}
final int dim1 = v1.getDimensionality(), dim2 = v2.getDimensionality();
final int mindim = (dim1 <= dim2) ? dim1 : dim2;
// Essentially, we want to compute this:
// absmax(v1.transposeTimes(v2))/(min(v1.euclideanLength())*min(v2.euclideanLength()));
// We can just compute all three in parallel.
double s1 = 0, s2 = 0, l1 = 0, l2 = 0;
for(int k = 0; k < mindim; k++) {
final double min1 = v1.getMin(k), max1 = v1.getMax(k);
final double min2 = v2.getMin(k), max2 = v2.getMax(k);
final double p1 = min1 * min2, p2 = min1 * max2;
final double p3 = max1 * min2, p4 = max1 * max2;
s1 += Math.max(Math.max(p1, p2), Math.max(p3, p4));
s2 += Math.min(Math.min(p1, p2), Math.min(p3, p4));
if(max1 < 0) {
l1 += max1 * max1;
}
else if(min1 > 0) {
l1 += min1 * min1;
} // else: 0
if(max2 < 0) {
l2 += max2 * max2;
}
else if(min2 > 0) {
l2 += min2 * min2;
} // else: 0
}
for(int k = mindim; k < dim1; k++) {
final double min1 = v1.getMin(k), max1 = v1.getMax(k);
if(max1 < 0.) {
l1 += max1 * max1;
}
else if(min1 > 0.) {
l1 += min1 * min1;
} // else: 0
}
for(int k = mindim; k < dim2; k++) {
final double min2 = v2.getMin(k), max2 = v2.getMax(k);
if(max2 < 0.) {
l2 += max2 * max2;
}
else if(min2 > 0.) {
l2 += min2 * min2;
} // else: 0
}
final double cross = Math.max(Math.abs(s1), Math.abs(s2));
final double a = (cross == 0.) ? 0. : //
(l1 == 0. || l2 == 0.) ? 1. : //
FastMath.sqrt((cross / l1) * (cross / l2));
return (a < 1.) ? a : 1.;
} | java |
public static double angle(NumberVector v1, NumberVector v2, NumberVector o) {
final int dim1 = v1.getDimensionality(), dim2 = v2.getDimensionality(),
dimo = o.getDimensionality();
final int mindim = (dim1 <= dim2) ? dim1 : dim2;
// Essentially, we want to compute this:
// v1' = v1 - o, v2' = v2 - o
// v1'.transposeTimes(v2') / (v1'.euclideanLength()*v2'.euclideanLength());
// We can just compute all three in parallel.
double cross = 0, l1 = 0, l2 = 0;
for(int k = 0; k < mindim; k++) {
final double ok = k < dimo ? o.doubleValue(k) : 0.;
final double r1 = v1.doubleValue(k) - ok;
final double r2 = v2.doubleValue(k) - ok;
cross += r1 * r2;
l1 += r1 * r1;
l2 += r2 * r2;
}
for(int k = mindim; k < dim1; k++) {
final double ok = k < dimo ? o.doubleValue(k) : 0.;
final double r1 = v1.doubleValue(k) - ok;
l1 += r1 * r1;
}
for(int k = mindim; k < dim2; k++) {
final double ok = k < dimo ? o.doubleValue(k) : 0.;
final double r2 = v2.doubleValue(k) - ok;
l2 += r2 * r2;
}
final double a = (cross == 0.) ? 0. : //
(l1 == 0. || l2 == 0.) ? 1. : //
FastMath.sqrt((cross / l1) * (cross / l2));
return (a < 1.) ? a : 1.;
} | java |
public static double dotDense(NumberVector v1, NumberVector v2) {
final int dim1 = v1.getDimensionality(), dim2 = v2.getDimensionality();
final int mindim = (dim1 <= dim2) ? dim1 : dim2;
double dot = 0;
for(int k = 0; k < mindim; k++) {
dot += v1.doubleValue(k) * v2.doubleValue(k);
}
return dot;
} | java |
public static double dotSparse(SparseNumberVector v1, SparseNumberVector v2) {
double dot = 0.;
int i1 = v1.iter(), i2 = v2.iter();
while(v1.iterValid(i1) && v2.iterValid(i2)) {
final int d1 = v1.iterDim(i1), d2 = v2.iterDim(i2);
if(d1 < d2) {
i1 = v1.iterAdvance(i1);
}
else if(d2 < d1) {
i2 = v2.iterAdvance(i2);
}
else { // d1 == d2
dot += v1.iterDoubleValue(i1) * v2.iterDoubleValue(i2);
i1 = v1.iterAdvance(i1);
i2 = v2.iterAdvance(i2);
}
}
return dot;
} | java |
public static double dotSparseDense(SparseNumberVector v1, NumberVector v2) {
final int dim2 = v2.getDimensionality();
double dot = 0.;
for(int i1 = v1.iter(); v1.iterValid(i1);) {
final int d1 = v1.iterDim(i1);
if(d1 >= dim2) {
break;
}
dot += v1.iterDoubleValue(i1) * v2.doubleValue(d1);
i1 = v1.iterAdvance(i1);
}
return dot;
} | java |
public static double dot(NumberVector v1, NumberVector v2) {
// Java Hotspot appears to optimize these better than if-then-else:
return v1 instanceof SparseNumberVector ? //
v2 instanceof SparseNumberVector ? //
dotSparse((SparseNumberVector) v1, (SparseNumberVector) v2) : //
dotSparseDense((SparseNumberVector) v1, v2) : //
v2 instanceof SparseNumberVector ? //
dotSparseDense((SparseNumberVector) v2, v1) : //
dotDense(v1, v2);
} | java |
public static double minDot(SpatialComparable v1, SpatialComparable v2) {
if(v1 instanceof NumberVector && v2 instanceof NumberVector) {
return dot((NumberVector) v1, (NumberVector) v2);
}
final int dim1 = v1.getDimensionality(), dim2 = v2.getDimensionality();
final int mindim = (dim1 <= dim2) ? dim1 : dim2;
// Essentially, we want to compute this:
// absmax(v1.transposeTimes(v2));
double s1 = 0, s2 = 0;
for(int k = 0; k < mindim; k++) {
final double min1 = v1.getMin(k), max1 = v1.getMax(k);
final double min2 = v2.getMin(k), max2 = v2.getMax(k);
final double p1 = min1 * min2, p2 = min1 * max2;
final double p3 = max1 * min2, p4 = max1 * max2;
s1 += Math.max(Math.max(p1, p2), Math.max(p3, p4));
s2 += Math.min(Math.min(p1, p2), Math.min(p3, p4));
}
return Math.max(Math.abs(s1), Math.abs(s2));
} | java |
public static <V extends NumberVector> V project(V v, long[] selectedAttributes, NumberVector.Factory<V> factory) {
int card = BitsUtil.cardinality(selectedAttributes);
if(factory instanceof SparseNumberVector.Factory) {
final SparseNumberVector.Factory<?> sfactory = (SparseNumberVector.Factory<?>) factory;
Int2DoubleOpenHashMap values = new Int2DoubleOpenHashMap(card, .8f);
for(int d = BitsUtil.nextSetBit(selectedAttributes, 0); d >= 0; d = BitsUtil.nextSetBit(selectedAttributes, d + 1)) {
if(v.doubleValue(d) != 0.0) {
values.put(d, v.doubleValue(d));
}
}
// We can't avoid this cast, because Java doesn't know that V is a
// SparseNumberVector:
@SuppressWarnings("unchecked")
V projectedVector = (V) sfactory.newNumberVector(values, card);
return projectedVector;
}
else {
double[] newAttributes = new double[card];
int i = 0;
for(int d = BitsUtil.nextSetBit(selectedAttributes, 0); d >= 0; d = BitsUtil.nextSetBit(selectedAttributes, d + 1)) {
newAttributes[i] = v.doubleValue(d);
i++;
}
return factory.newNumberVector(newAttributes);
}
} | java |
public void mergeWith(Core o) {
o.num = this.num = (num < o.num ? num : o.num);
} | java |
public Clustering<Model> run(Relation<?> relation) {
HashMap<String, DBIDs> labelMap = multiple ? multipleAssignment(relation) : singleAssignment(relation);
ModifiableDBIDs noiseids = DBIDUtil.newArray();
Clustering<Model> result = new Clustering<>("By Label Clustering", "bylabel-clustering");
for(Entry<String, DBIDs> entry : labelMap.entrySet()) {
DBIDs ids = entry.getValue();
if(ids.size() <= 1) {
noiseids.addDBIDs(ids);
continue;
}
// Build a cluster
Cluster<Model> c = new Cluster<Model>(entry.getKey(), ids, ClusterModel.CLUSTER);
if(noisepattern != null && noisepattern.matcher(entry.getKey()).find()) {
c.setNoise(true);
}
result.addToplevelCluster(c);
}
// Collected noise IDs.
if(noiseids.size() > 0) {
Cluster<Model> c = new Cluster<Model>("Noise", noiseids, ClusterModel.CLUSTER);
c.setNoise(true);
result.addToplevelCluster(c);
}
return result;
} | java |
private HashMap<String, DBIDs> singleAssignment(Relation<?> data) {
HashMap<String, DBIDs> labelMap = new HashMap<>();
for(DBIDIter iditer = data.iterDBIDs(); iditer.valid(); iditer.advance()) {
final Object val = data.get(iditer);
String label = (val != null) ? val.toString() : null;
assign(labelMap, label, iditer);
}
return labelMap;
} | java |
private HashMap<String, DBIDs> multipleAssignment(Relation<?> data) {
HashMap<String, DBIDs> labelMap = new HashMap<>();
for(DBIDIter iditer = data.iterDBIDs(); iditer.valid(); iditer.advance()) {
String[] labels = data.get(iditer).toString().split(" ");
for(String label : labels) {
assign(labelMap, label, iditer);
}
}
return labelMap;
} | java |
private void assign(HashMap<String, DBIDs> labelMap, String label, DBIDRef id) {
if(labelMap.containsKey(label)) {
DBIDs exist = labelMap.get(label);
if(exist instanceof DBID) {
ModifiableDBIDs n = DBIDUtil.newHashSet();
n.add((DBID) exist);
n.add(id);
labelMap.put(label, n);
}
else {
assert (exist instanceof HashSetModifiableDBIDs);
assert (exist.size() > 1);
((ModifiableDBIDs) exist).add(id);
}
}
else {
labelMap.put(label, DBIDUtil.deref(id));
}
} | java |
public void put(double val) {
min = val < min ? val : min;
max = val > max ? val : max;
} | java |
public static void addShadowFilter(SVGPlot svgp) {
Element shadow = svgp.getIdElement(SHADOW_ID);
if(shadow == null) {
shadow = svgp.svgElement(SVGConstants.SVG_FILTER_TAG);
shadow.setAttribute(SVGConstants.SVG_ID_ATTRIBUTE, SHADOW_ID);
shadow.setAttribute(SVGConstants.SVG_WIDTH_ATTRIBUTE, "140%");
shadow.setAttribute(SVGConstants.SVG_HEIGHT_ATTRIBUTE, "140%");
Element offset = svgp.svgElement(SVGConstants.SVG_FE_OFFSET_TAG);
offset.setAttribute(SVGConstants.SVG_IN_ATTRIBUTE, SVGConstants.SVG_SOURCE_ALPHA_VALUE);
offset.setAttribute(SVGConstants.SVG_RESULT_ATTRIBUTE, "off");
offset.setAttribute(SVGConstants.SVG_DX_ATTRIBUTE, "0.1");
offset.setAttribute(SVGConstants.SVG_DY_ATTRIBUTE, "0.1");
shadow.appendChild(offset);
Element gauss = svgp.svgElement(SVGConstants.SVG_FE_GAUSSIAN_BLUR_TAG);
gauss.setAttribute(SVGConstants.SVG_IN_ATTRIBUTE, "off");
gauss.setAttribute(SVGConstants.SVG_RESULT_ATTRIBUTE, "blur");
gauss.setAttribute(SVGConstants.SVG_STD_DEVIATION_ATTRIBUTE, "0.1");
shadow.appendChild(gauss);
Element blend = svgp.svgElement(SVGConstants.SVG_FE_BLEND_TAG);
blend.setAttribute(SVGConstants.SVG_IN_ATTRIBUTE, SVGConstants.SVG_SOURCE_GRAPHIC_VALUE);
blend.setAttribute(SVGConstants.SVG_IN2_ATTRIBUTE, "blur");
blend.setAttribute(SVGConstants.SVG_MODE_ATTRIBUTE, SVGConstants.SVG_NORMAL_VALUE);
shadow.appendChild(blend);
svgp.getDefs().appendChild(shadow);
svgp.putIdElement(SHADOW_ID, shadow);
}
} | java |
public static void addLightGradient(SVGPlot svgp) {
Element gradient = svgp.getIdElement(LIGHT_GRADIENT_ID);
if(gradient == null) {
gradient = svgp.svgElement(SVGConstants.SVG_LINEAR_GRADIENT_TAG);
gradient.setAttribute(SVGConstants.SVG_ID_ATTRIBUTE, LIGHT_GRADIENT_ID);
gradient.setAttribute(SVGConstants.SVG_X1_ATTRIBUTE, "0");
gradient.setAttribute(SVGConstants.SVG_Y1_ATTRIBUTE, "0");
gradient.setAttribute(SVGConstants.SVG_X2_ATTRIBUTE, "0");
gradient.setAttribute(SVGConstants.SVG_Y2_ATTRIBUTE, "1");
Element stop0 = svgp.svgElement(SVGConstants.SVG_STOP_TAG);
stop0.setAttribute(SVGConstants.SVG_STOP_COLOR_ATTRIBUTE, "white");
stop0.setAttribute(SVGConstants.SVG_STOP_OPACITY_ATTRIBUTE, "1");
stop0.setAttribute(SVGConstants.SVG_OFFSET_ATTRIBUTE, "0");
gradient.appendChild(stop0);
Element stop04 = svgp.svgElement(SVGConstants.SVG_STOP_TAG);
stop04.setAttribute(SVGConstants.SVG_STOP_COLOR_ATTRIBUTE, "white");
stop04.setAttribute(SVGConstants.SVG_STOP_OPACITY_ATTRIBUTE, "0");
stop04.setAttribute(SVGConstants.SVG_OFFSET_ATTRIBUTE, ".4");
gradient.appendChild(stop04);
Element stop06 = svgp.svgElement(SVGConstants.SVG_STOP_TAG);
stop06.setAttribute(SVGConstants.SVG_STOP_COLOR_ATTRIBUTE, "black");
stop06.setAttribute(SVGConstants.SVG_STOP_OPACITY_ATTRIBUTE, "0");
stop06.setAttribute(SVGConstants.SVG_OFFSET_ATTRIBUTE, ".6");
gradient.appendChild(stop06);
Element stop1 = svgp.svgElement(SVGConstants.SVG_STOP_TAG);
stop1.setAttribute(SVGConstants.SVG_STOP_COLOR_ATTRIBUTE, "black");
stop1.setAttribute(SVGConstants.SVG_STOP_OPACITY_ATTRIBUTE, ".5");
stop1.setAttribute(SVGConstants.SVG_OFFSET_ATTRIBUTE, "1");
gradient.appendChild(stop1);
svgp.getDefs().appendChild(gradient);
svgp.putIdElement(LIGHT_GRADIENT_ID, gradient);
}
} | java |
public static Element makeCheckmark(SVGPlot svgp) {
Element checkmark = svgp.svgElement(SVGConstants.SVG_PATH_TAG);
checkmark.setAttribute(SVGConstants.SVG_D_ATTRIBUTE, SVG_CHECKMARK_PATH);
checkmark.setAttribute(SVGConstants.SVG_FILL_ATTRIBUTE, SVGConstants.CSS_BLACK_VALUE);
checkmark.setAttribute(SVGConstants.SVG_STROKE_ATTRIBUTE, SVGConstants.CSS_NONE_VALUE);
return checkmark;
} | java |
public double continueToMargin(double[] origin, double[] delta) {
assert (delta.length == 2 && origin.length == 2);
double factor = Double.POSITIVE_INFINITY;
if(delta[0] > 0) {
factor = Math.min(factor, (maxx - origin[0]) / delta[0]);
}
else if(delta[0] < 0) {
factor = Math.min(factor, (origin[0] - minx) / -delta[0]);
}
if(delta[1] > 0) {
factor = Math.min(factor, (maxy - origin[1]) / delta[1]);
}
else if(delta[1] < 0) {
factor = Math.min(factor, (origin[1] - miny) / -delta[1]);
}
return factor;
} | java |
@Override
public void clear() {
try {
file.setLength(header.size());
}
catch(IOException e) {
throw new RuntimeException(e);
}
} | java |
private double deviation(double[] delta, double[][] beta) {
final double a = squareSum(delta);
final double b = squareSum(transposeTimes(beta, delta));
return (a > b) ? FastMath.sqrt(a - b) : 0.;
} | java |
private Separation findSeparation(Relation<NumberVector> relation, DBIDs currentids, int dimension, Random r) {
Separation separation = new Separation();
// determine the number of samples needed, to secure that with a specific
// probability
// in at least on sample every sampled point is from the same cluster.
int samples = (int) Math.min(LOG_NOT_FROM_ONE_CLUSTER_PROBABILITY / (FastMath.log1p(-FastMath.powFast(samplingLevel, -dimension))), (double) currentids.size());
// System.out.println("Number of samples: " + samples);
int remaining_retries = 100;
for(int i = 1; i <= samples; i++) {
DBIDs sample = DBIDUtil.randomSample(currentids, dimension + 1, r);
final DBIDIter iter = sample.iter();
// Use first as origin
double[] originV = relation.get(iter).toArray();
iter.advance();
// Build orthogonal basis from remainder
double[][] basis;
{
List<double[]> vectors = new ArrayList<>(sample.size() - 1);
for(; iter.valid(); iter.advance()) {
double[] vec = relation.get(iter).toArray();
vectors.add(minusEquals(vec, originV));
}
// generate orthogonal basis
basis = generateOrthonormalBasis(vectors);
if(basis == null) {
// new sample has to be taken.
i--;
if(--remaining_retries < 0) {
throw new TooManyRetriesException("Too many retries in sampling, and always a linear dependant data set.");
}
continue;
}
}
// Generate and fill a histogram.
DoubleDynamicHistogram histogram = new DoubleDynamicHistogram(BINS);
double w = 1.0 / currentids.size();
for(DBIDIter iter2 = currentids.iter(); iter2.valid(); iter2.advance()) {
// Skip sampled points
if(sample.contains(iter2)) {
continue;
}
double[] vec = minusEquals(relation.get(iter2).toArray(), originV);
final double distance = deviation(vec, basis);
histogram.increment(distance, w);
}
double[] th = findAndEvaluateThreshold(histogram); // evaluate threshold
if(th[1] > separation.goodness) {
separation.goodness = th[1];
separation.threshold = th[0];
separation.originV = originV;
separation.basis = basis;
}
}
return separation;
} | java |
public double getDistance(final DBIDRef o1, final DBIDRef o2) {
return FastMath.sqrt(getSquaredDistance(o1, o2));
} | java |
public double getSquaredDistance(final DBIDRef id1, final DBIDRef id2) {
final int o1 = idmap.getOffset(id1), o2 = idmap.getOffset(id2);
return kernel[o1][o1] + kernel[o2][o2] - 2 * kernel[o1][o2];
} | java |
public double getSimilarity(DBIDRef id1, DBIDRef id2) {
return kernel[idmap.getOffset(id1)][idmap.getOffset(id2)];
} | java |
protected double[][] initialMeans(Database database, Relation<V> relation) {
Duration inittime = getLogger().newDuration(initializer.getClass() + ".time").begin();
double[][] means = initializer.chooseInitialMeans(database, relation, k, getDistanceFunction());
getLogger().statistics(inittime.end());
return means;
} | java |
public static void plusEquals(double[] sum, NumberVector vec) {
for(int d = 0; d < sum.length; d++) {
sum[d] += vec.doubleValue(d);
}
} | java |
public static void minusEquals(double[] sum, NumberVector vec) {
for(int d = 0; d < sum.length; d++) {
sum[d] -= vec.doubleValue(d);
}
} | java |
public static void plusMinusEquals(double[] add, double[] sub, NumberVector vec) {
for(int d = 0; d < add.length; d++) {
final double v = vec.doubleValue(d);
add[d] += v;
sub[d] -= v;
}
} | java |
protected static void incrementalUpdateMean(double[] mean, NumberVector vec, int newsize, double op) {
if(newsize == 0) {
return; // Keep old mean
}
// Note: numerically stabilized version:
VMath.plusTimesEquals(mean, VMath.minusEquals(vec.toArray(), mean), op / newsize);
} | java |
public static int fastModPrime(long data) {
// Mix high and low 32 bit:
int high = (int) (data >>> 32);
// Use fast multiplication with 5 for high:
int alpha = ((int) data) + (high << 2 + high);
// Note that in Java, PRIME will be negative.
if(alpha < 0 && alpha > -5) {
alpha = alpha + 5;
}
return alpha;
} | java |
private void doRangeQuery(DBID o_p, AbstractMTreeNode<O, ?, ?> node, O q, double r_q, ModifiableDoubleDBIDList result) {
double d1 = 0.;
if(o_p != null) {
d1 = distanceQuery.distance(o_p, q);
index.statistics.countDistanceCalculation();
}
if(!node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
MTreeEntry entry = node.getEntry(i);
DBID o_r = entry.getRoutingObjectID();
double r_or = entry.getCoveringRadius();
double d2 = o_p != null ? entry.getParentDistance() : 0.;
double diff = Math.abs(d1 - d2);
double sum = r_q + r_or;
if(diff <= sum) {
double d3 = distanceQuery.distance(o_r, q);
index.statistics.countDistanceCalculation();
if(d3 <= sum) {
AbstractMTreeNode<O, ?, ?> child = index.getNode(((DirectoryEntry) entry).getPageID());
doRangeQuery(o_r, child, q, r_q, result);
}
}
}
}
else {
for(int i = 0; i < node.getNumEntries(); i++) {
MTreeEntry entry = node.getEntry(i);
DBID o_j = entry.getRoutingObjectID();
double d2 = o_p != null ? entry.getParentDistance() : 0.;
double diff = Math.abs(d1 - d2);
if(diff <= r_q) {
double d3 = distanceQuery.distance(o_j, q);
index.statistics.countDistanceCalculation();
if(d3 <= r_q) {
result.add(d3, o_j);
}
}
}
}
} | java |
public static double pdf(double x, double mu, double beta) {
final double z = (x - mu) / beta;
if(x == Double.NEGATIVE_INFINITY) {
return 0.;
}
return FastMath.exp(-z - FastMath.exp(-z)) / beta;
} | java |
public static double logpdf(double x, double mu, double beta) {
if(x == Double.NEGATIVE_INFINITY) {
return Double.NEGATIVE_INFINITY;
}
final double z = (x - mu) / beta;
return -z - FastMath.exp(-z) - FastMath.log(beta);
} | java |
public static double cdf(double val, double mu, double beta) {
return FastMath.exp(-FastMath.exp(-(val - mu) / beta));
} | java |
public static double quantile(double val, double mu, double beta) {
return mu - beta * FastMath.log(-FastMath.log(val));
} | java |
public void setPartitions(Relation<V> relation) throws IllegalArgumentException {
if((FastMath.log(partitions) / FastMath.log(2)) != (int) (FastMath.log(partitions) / FastMath.log(2))) {
throw new IllegalArgumentException("Number of partitions must be a power of 2!");
}
final int dimensions = RelationUtil.dimensionality(relation);
final int size = relation.size();
splitPositions = new double[dimensions][partitions + 1];
for(int d = 0; d < dimensions; d++) {
double[] tempdata = new double[size];
int j = 0;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
tempdata[j] = relation.get(iditer).doubleValue(d);
j += 1;
}
Arrays.sort(tempdata);
for(int b = 0; b < partitions; b++) {
int start = (int) (b * size / (double) partitions);
splitPositions[d][b] = tempdata[start];
}
// make sure that last object will be included
splitPositions[d][partitions] = tempdata[size - 1] + 0.000001;
}
} | java |
public long getScannedPages() {
int vacapacity = pageSize / VectorApproximation.byteOnDisk(splitPositions.length, partitions);
long vasize = (long) Math.ceil((vectorApprox.size()) / (1.0 * vacapacity));
return vasize * scans;
} | java |
private void hqr2BackTransformation(int nn, int low, int high) {
for(int j = nn - 1; j >= low; j--) {
final int last = j < high ? j : high;
for(int i = low; i <= high; i++) {
final double[] Vi = V[i];
double sum = 0.;
for(int k = low; k <= last; k++) {
sum += Vi[k] * H[k][j];
}
Vi[j] = sum;
}
}
} | java |
protected static double gammaQuantileNewtonRefinement(final double logpt, final double k, final double theta, final int maxit, double x) {
final double EPS_N = 1e-15; // Precision threshold
// 0 is not possible, try MIN_NORMAL instead
if(x <= 0) {
x = Double.MIN_NORMAL;
}
// Current estimation
double logpc = logcdf(x, k, theta);
if(x == Double.MIN_NORMAL && logpc > logpt * (1. + 1e-7)) {
return 0.;
}
if(logpc == Double.NEGATIVE_INFINITY) {
return 0.;
}
// Refine by newton iterations
for(int i = 0; i < maxit; i++) {
// Error of current approximation
final double logpe = logpc - logpt;
if(Math.abs(logpe) < Math.abs(EPS_N * logpt)) {
break;
}
// Step size is controlled by PDF:
final double g = logpdf(x, k, theta);
if(g == Double.NEGATIVE_INFINITY) {
break;
}
final double newx = x - logpe * FastMath.exp(logpc - g);
// New estimate:
logpc = logcdf(newx, k, theta);
if(Math.abs(logpc - logpt) > Math.abs(logpe) || (i > 0 && Math.abs(logpc - logpt) == Math.abs(logpe))) {
// no further improvement
break;
}
x = newx;
}
return x;
} | java |
@Override
public Element useMarker(SVGPlot plot, Element parent, double x, double y, int stylenr, double size) {
Element marker = plot.svgCircle(x, y, size * .5);
final String col;
if(stylenr == -1) {
col = dotcolor;
}
else if(stylenr == -2) {
col = greycolor;
}
else {
col = colors.getColor(stylenr);
}
SVGUtil.setStyle(marker, SVGConstants.CSS_FILL_PROPERTY + ":" + col);
parent.appendChild(marker);
return marker;
} | java |
public Clustering<DendrogramModel> run(PointerHierarchyRepresentationResult pointerresult) {
Clustering<DendrogramModel> result = new Instance(pointerresult).run();
result.addChildResult(pointerresult);
return result;
} | java |
public static double erf(double x) {
final double w = x < 0 ? -x : x;
double y;
if(w < 2.2) {
double t = w * w;
int k = (int) t;
t -= k;
k *= 13;
y = ((((((((((((ERF_COEFF1[k] * t + ERF_COEFF1[k + 1]) * t + //
ERF_COEFF1[k + 2]) * t + ERF_COEFF1[k + 3]) * t + ERF_COEFF1[k + 4]) * t + //
ERF_COEFF1[k + 5]) * t + ERF_COEFF1[k + 6]) * t + ERF_COEFF1[k + 7]) * t + //
ERF_COEFF1[k + 8]) * t + ERF_COEFF1[k + 9]) * t + ERF_COEFF1[k + 10]) * t + //
ERF_COEFF1[k + 11]) * t + ERF_COEFF1[k + 12]) * w;
}
else if(w < 6.9) {
int k = (int) w;
double t = w - k;
k = 13 * (k - 2);
y = (((((((((((ERF_COEFF2[k] * t + ERF_COEFF2[k + 1]) * t + //
ERF_COEFF2[k + 2]) * t + ERF_COEFF2[k + 3]) * t + ERF_COEFF2[k + 4]) * t + //
ERF_COEFF2[k + 5]) * t + ERF_COEFF2[k + 6]) * t + ERF_COEFF2[k + 7]) * t + //
ERF_COEFF2[k + 8]) * t + ERF_COEFF2[k + 9]) * t + ERF_COEFF2[k + 10]) * t + //
ERF_COEFF2[k + 11]) * t + ERF_COEFF2[k + 12];
y *= y;
y *= y;
y *= y;
y = 1 - y * y;
}
else if(w == w) {
y = 1;
}
else {
return Double.NaN;
}
return x < 0 ? -y : y;
} | java |
public static double standardNormalQuantile(double d) {
return (d == 0) ? Double.NEGATIVE_INFINITY : //
(d == 1) ? Double.POSITIVE_INFINITY : //
(Double.isNaN(d) || d < 0 || d > 1) ? Double.NaN //
: MathUtil.SQRT2 * -erfcinv(2 * d);
} | java |
@Override
public <N extends SpatialComparable> List<List<N>> partition(List<N> spatialObjects, int minEntries, int maxEntries) {
List<List<N>> partitions = new ArrayList<>();
List<N> objects = new ArrayList<>(spatialObjects);
while (!objects.isEmpty()) {
StringBuilder msg = new StringBuilder();
// get the split axis and split point
int splitAxis = chooseMaximalExtendedSplitAxis(objects);
int splitPoint = chooseBulkSplitPoint(objects.size(), minEntries, maxEntries);
if (LOG.isDebugging()) {
msg.append("\nsplitAxis ").append(splitAxis);
msg.append("\nsplitPoint ").append(splitPoint);
}
// sort in the right dimension
Collections.sort(objects, new SpatialSingleMinComparator(splitAxis));
// insert into partition
List<N> partition1 = new ArrayList<>();
for (int i = 0; i < splitPoint; i++) {
N o = objects.remove(0);
partition1.add(o);
}
partitions.add(partition1);
// copy array
if (LOG.isDebugging()) {
msg.append("\ncurrent partition ").append(partition1);
msg.append("\nremaining objects # ").append(objects.size());
LOG.debugFine(msg.toString());
}
}
if (LOG.isDebugging()) {
LOG.debugFine("partitions " + partitions);
}
return partitions;
} | java |
private int chooseMaximalExtendedSplitAxis(List<? extends SpatialComparable> objects) {
// maximum and minimum value for the extension
int dimension = objects.get(0).getDimensionality();
double[] maxExtension = new double[dimension];
double[] minExtension = new double[dimension];
Arrays.fill(minExtension, Double.MAX_VALUE);
// compute min and max value in each dimension
for (SpatialComparable object : objects) {
for (int d = 0; d < dimension; d++) {
double min, max;
min = object.getMin(d);
max = object.getMax(d);
if (maxExtension[d] < max) {
maxExtension[d] = max;
}
if (minExtension[d] > min) {
minExtension[d] = min;
}
}
}
// set split axis to dim with maximal extension
int splitAxis = -1;
double max = 0;
for (int d = 0; d < dimension; d++) {
double currentExtension = maxExtension[d] - minExtension[d];
if (max < currentExtension) {
max = currentExtension;
splitAxis = d;
}
}
return splitAxis;
} | java |
public void setTotal(int total) throws IllegalArgumentException {
if(getProcessed() > total) {
throw new IllegalArgumentException(getProcessed() + " exceeds total: " + total);
}
this.total = total;
} | java |
@SuppressWarnings("unchecked")
protected <T> T get(DBIDRef id, int index) {
Object[] d = data.get(DBIDUtil.deref(id));
if(d == null) {
return null;
}
return (T) d[index];
} | java |
@SuppressWarnings("unchecked")
protected <T> T set(DBIDRef id, int index, T value) {
Object[] d = data.get(DBIDUtil.deref(id));
if(d == null) {
d = new Object[rlen];
data.put(DBIDUtil.deref(id), d);
}
T ret = (T) d[index];
d[index] = value;
return ret;
} | java |
public UniformDistribution estimate(DoubleMinMax mm) {
return new UniformDistribution(Math.max(mm.getMin(), -Double.MAX_VALUE), Math.min(mm.getMax(), Double.MAX_VALUE));
} | java |
public static boolean canVisualize(Relation<?> rel, AbstractMTree<?, ?, ?, ?> tree) {
if(!TypeUtil.NUMBER_VECTOR_FIELD.isAssignableFromType(rel.getDataTypeInformation())) {
return false;
}
return getLPNormP(tree) > 0;
} | java |
void initializeRandomAttributes(SimpleTypeInformation<V> in) {
int d = ((VectorFieldTypeInformation<V>) in).getDimensionality();
selectedAttributes = BitsUtil.random(k, d, rnd.getSingleThreadedRandom());
} | java |
protected void singleEnsemble(final double[] ensemble, final NumberVector vec) {
double[] buf = new double[1];
for(int i = 0; i < ensemble.length; i++) {
buf[0] = vec.doubleValue(i);
ensemble[i] = voting.combine(buf, 1);
if(Double.isNaN(ensemble[i])) {
LOG.warning("NaN after combining: " + FormatUtil.format(buf) + " " + voting.toString());
}
}
applyScaling(ensemble, scaling);
} | java |
public static String getFullDescription(Parameter<?> param) {
StringBuilder description = new StringBuilder(1000) //
.append(param.getShortDescription()).append(FormatUtil.NEWLINE);
param.describeValues(description);
if(!FormatUtil.endsWith(description, FormatUtil.NEWLINE)) {
description.append(FormatUtil.NEWLINE);
}
if(param.hasDefaultValue()) {
description.append("Default: ").append(param.getDefaultValueAsString()).append(FormatUtil.NEWLINE);
}
List<? extends ParameterConstraint<?>> constraints = param.getConstraints();
if(constraints != null && !constraints.isEmpty()) {
description.append((constraints.size() == 1) ? "Constraint: " : "Constraints: ") //
.append(constraints.get(0).getDescription(param.getOptionID().getName()));
for(int i = 1; i < constraints.size(); i++) {
description.append(", ").append(constraints.get(i).getDescription(param.getOptionID().getName()));
}
description.append(FormatUtil.NEWLINE);
}
return description.toString();
} | java |
private static void println(StringBuilder buf, int width, String data) {
for(String line : FormatUtil.splitAtLastBlank(data, width)) {
buf.append(line);
if(!line.endsWith(FormatUtil.NEWLINE)) {
buf.append(FormatUtil.NEWLINE);
}
}
} | java |
public static int centroids(Relation<? extends NumberVector> rel, List<? extends Cluster<?>> clusters, NumberVector[] centroids, NoiseHandling noiseOption) {
assert (centroids.length == clusters.size());
int ignorednoise = 0;
Iterator<? extends Cluster<?>> ci = clusters.iterator();
for(int i = 0; ci.hasNext(); i++) {
Cluster<?> cluster = ci.next();
if(cluster.size() <= 1 || cluster.isNoise()) {
switch(noiseOption){
case IGNORE_NOISE:
ignorednoise += cluster.size();
case TREAT_NOISE_AS_SINGLETONS:
centroids[i] = null;
continue;
case MERGE_NOISE:
break; // Treat as cluster below
}
}
centroids[i] = ModelUtil.getPrototypeOrCentroid(cluster.getModel(), rel, cluster.getIDs());
}
return ignorednoise;
} | java |
public static double cdf(double val, double rate) {
final double v = .5 * FastMath.exp(-rate * Math.abs(val));
return (v == Double.POSITIVE_INFINITY) ? ((val <= 0) ? 0 : 1) : //
(val < 0) ? v : 1 - v;
} | java |
protected double maxDistance(DoubleDBIDList elems) {
double max = 0;
for(DoubleDBIDListIter it = elems.iter(); it.valid(); it.advance()) {
final double v = it.doubleValue();
max = max > v ? max : v;
}
return max;
} | java |
protected void excludeNotCovered(ModifiableDoubleDBIDList candidates, double fmax, ModifiableDoubleDBIDList collect) {
for(DoubleDBIDListIter it = candidates.iter(); it.valid();) {
if(it.doubleValue() > fmax) {
collect.add(it.doubleValue(), it);
candidates.removeSwap(it.getOffset());
}
else {
it.advance(); // Keep in candidates
}
}
} | java |
protected void collectByCover(DBIDRef cur, ModifiableDoubleDBIDList candidates, double fmax, ModifiableDoubleDBIDList collect) {
assert (collect.size() == 0) : "Not empty";
DoubleDBIDListIter it = candidates.iter().advance(); // Except first = cur!
while(it.valid()) {
assert (!DBIDUtil.equal(cur, it));
final double dist = distance(cur, it);
if(dist <= fmax) { // Collect
collect.add(dist, it);
candidates.removeSwap(it.getOffset());
}
else {
it.advance(); // Keep in candidates, outside cover radius.
}
}
} | java |
private void process(double[] data, double min, double max, KernelDensityFunction kernel, int window, double epsilon) {
dens = new double[data.length];
var = new double[data.length];
// This is the desired bandwidth of the kernel.
double halfwidth = ((max - min) / window) * .5;
for (int current = 0; current < data.length; current++) {
double value = 0.0;
for (int i = current; i >= 0; i--) {
double delta = Math.abs(data[i] - data[current]) / halfwidth;
final double contrib = kernel.density(delta);
value += contrib;
if (contrib < epsilon) {
break;
}
}
for (int i = current + 1; i < data.length; i++) {
double delta = Math.abs(data[i] - data[current]) / halfwidth;
final double contrib = kernel.density(delta);
value += contrib;
if (contrib < epsilon) {
break;
}
}
double realwidth = (Math.min(data[current] + halfwidth, max) - Math.max(min, data[current] - halfwidth));
double weight = realwidth / (2 * halfwidth);
dens[current] = value / (data.length * realwidth * .5);
var[current] = 1 / weight;
}
} | java |
public static double[] computeSimilarityMatrix(DependenceMeasure sim, Relation<? extends NumberVector> rel) {
final int dim = RelationUtil.dimensionality(rel);
// TODO: we could use less memory (no copy), but this would likely be
// slower. Maybe as a fallback option?
double[][] data = new double[dim][rel.size()];
int r = 0;
for(DBIDIter it = rel.iterDBIDs(); it.valid(); it.advance(), r++) {
NumberVector v = rel.get(it);
for(int d = 0; d < dim; d++) {
data[d][r] = v.doubleValue(d);
}
}
return sim.dependence(DoubleArrayAdapter.STATIC, Arrays.asList(data));
} | java |
protected N buildSpanningTree(int dim, double[] mat, Layout layout) {
assert (layout.edges == null || layout.edges.size() == 0);
int[] iedges = PrimsMinimumSpanningTree.processDense(mat, new LowerTriangularAdapter(dim));
int root = findOptimalRoot(iedges);
// Convert edges:
ArrayList<Edge> edges = new ArrayList<>(iedges.length >> 1);
for(int i = 1; i < iedges.length; i += 2) {
edges.add(new Edge(iedges[i - 1], iedges[i]));
}
layout.edges = edges;
// Prefill nodes array with nulls.
ArrayList<N> nodes = new ArrayList<>(dim);
for(int i = 0; i < dim; i++) {
nodes.add(null);
}
layout.nodes = nodes;
N rootnode = buildTree(iedges, root, -1, nodes);
return rootnode;
} | java |
protected N buildTree(int[] msg, int cur, int parent, ArrayList<N> nodes) {
// Count the number of children:
int c = 0;
for(int i = 1; i < msg.length; i += 2) {
if((msg[i - 1] == cur && msg[i] != parent) || (msg[i] == cur && msg[i - 1] != parent)) {
c++;
}
}
// Build children:
List<N> children = Collections.emptyList();
if(c > 0) {
children = new ArrayList<>(c);
for(int i = 1; i < msg.length; i += 2) {
if(msg[i - 1] == cur && msg[i] != parent) {
children.add(buildTree(msg, msg[i], cur, nodes));
}
else if(msg[i] == cur && msg[i - 1] != parent) {
children.add(buildTree(msg, msg[i - 1], cur, nodes));
}
}
}
N node = makeNode(cur, children);
nodes.set(cur, node);
return node;
} | java |
protected int maxDepth(Layout.Node node) {
int depth = 0;
for(int i = 0; i < node.numChildren(); i++) {
depth = Math.max(depth, maxDepth(node.getChild(i)));
}
return depth + 1;
} | java |
@Override
public void initialize() {
TreeIndexHeader header = createHeader();
if(this.file.initialize(header)) {
initializeFromFile(header, file);
}
rootEntry = createRootEntry();
} | java |
public N getNode(int nodeID) {
if(nodeID == getPageID(rootEntry)) {
return getRoot();
}
else {
return file.readPage(nodeID);
}
} | java |
public void initializeFromFile(TreeIndexHeader header, PageFile<N> file) {
this.dirCapacity = header.getDirCapacity();
this.leafCapacity = header.getLeafCapacity();
this.dirMinimum = header.getDirMinimum();
this.leafMinimum = header.getLeafMinimum();
if(getLogger().isDebugging()) {
StringBuilder msg = new StringBuilder();
msg.append(getClass());
msg.append("\n file = ").append(file.getClass());
getLogger().debugFine(msg.toString());
}
this.initialized = true;
} | java |
protected final void initialize(E exampleLeaf) {
initializeCapacities(exampleLeaf);
// create empty root
createEmptyRoot(exampleLeaf);
final Logging log = getLogger();
if(log.isStatistics()) {
String cls = this.getClass().getName();
log.statistics(new LongStatistic(cls + ".directory.capacity", dirCapacity));
log.statistics(new LongStatistic(cls + ".directory.minfill", dirMinimum));
log.statistics(new LongStatistic(cls + ".leaf.capacity", leafCapacity));
log.statistics(new LongStatistic(cls + ".leaf.minfill", leafMinimum));
}
initialized = true;
} | java |
public static MeanVarianceMinMax[] newArray(int dimensionality) {
MeanVarianceMinMax[] arr = new MeanVarianceMinMax[dimensionality];
for(int i = 0; i < dimensionality; i++) {
arr[i] = new MeanVarianceMinMax();
}
return arr;
} | java |
@Override
public double getWeight(double distance, double max, double stddev) {
if(stddev <= 0) {
return 1;
}
double scaleddistance = distance / stddev;
return stddev * FastMath.exp(-.5 * scaleddistance);
} | java |
protected static <A> int[] sortedIndex(final NumberArrayAdapter<?, A> adapter, final A data, int len) {
int[] s1 = MathUtil.sequence(0, len);
IntegerArrayQuickSort.sort(s1, (x, y) -> Double.compare(adapter.getDouble(data, x), adapter.getDouble(data, y)));
return s1;
} | java |
protected static <A> int[] discretize(NumberArrayAdapter<?, A> adapter, A data, final int len, final int bins) {
double min = adapter.getDouble(data, 0), max = min;
for(int i = 1; i < len; i++) {
double v = adapter.getDouble(data, i);
if(v < min) {
min = v;
}
else if(v > max) {
max = v;
}
}
final double scale = (max > min) ? bins / (max - min) : 1;
int[] discData = new int[len];
for(int i = 0; i < len; i++) {
int bin = (int) Math.floor((adapter.getDouble(data, i) - min) * scale);
discData[i] = bin < 0 ? 0 : bin >= bins ? bins - 1 : bin;
}
return discData;
} | java |
protected void finishGridRow() {
GridBagConstraints constraints = new GridBagConstraints();
constraints.gridwidth = GridBagConstraints.REMAINDER;
constraints.weightx = 0;
final JLabel icon;
if(param.isOptional()) {
if(param.isDefined() && param.tookDefaultValue() && !(param instanceof Flag)) {
// TODO: better icon for default value?
icon = new JLabel(StockIcon.getStockIcon(StockIcon.DIALOG_INFORMATION));
icon.setToolTipText("Default value: "+param.getDefaultValueAsString());
}
else {
icon = new JLabel();
icon.setMinimumSize(new Dimension(16, 16));
}
}
else {
if(!param.isDefined()) {
icon = new JLabel(StockIcon.getStockIcon(StockIcon.DIALOG_ERROR));
icon.setToolTipText("Missing value.");
}
else {
icon = new JLabel();
icon.setMinimumSize(new Dimension(16, 16));
}
}
parent.add(icon, constraints);
} | java |
private double normalize(int d, double val) {
d = (mean.length == 1) ? 0 : d;
return (val - mean[d]) / stddev[d];
} | java |
private static EigenPair[] processDecomposition(EigenvalueDecomposition evd) {
double[] eigenvalues = evd.getRealEigenvalues();
double[][] eigenvectors = evd.getV();
EigenPair[] eigenPairs = new EigenPair[eigenvalues.length];
for(int i = 0; i < eigenvalues.length; i++) {
double e = Math.abs(eigenvalues[i]);
double[] v = VMath.getCol(eigenvectors, i);
eigenPairs[i] = new EigenPair(v, e);
}
Arrays.sort(eigenPairs, Comparator.reverseOrder());
return eigenPairs;
} | java |
public void nextIteration(double[][] means) {
this.means = means;
changed = false;
final int k = means.length;
final int dim = means[0].length;
centroids = new double[k][dim];
sizes = new int[k];
Arrays.fill(varsum, 0.);
} | java |
public double[][] getMeans() {
double[][] newmeans = new double[centroids.length][];
for(int i = 0; i < centroids.length; i++) {
if(sizes[i] == 0) {
newmeans[i] = means[i]; // Keep old mean.
continue;
}
newmeans[i] = centroids[i];
}
return newmeans;
} | java |
public static String format(double[] v, int w, int d) {
DecimalFormat format = new DecimalFormat();
format.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US));
format.setMinimumIntegerDigits(1);
format.setMaximumFractionDigits(d);
format.setMinimumFractionDigits(d);
format.setGroupingUsed(false);
int width = w + 1;
StringBuilder msg = new StringBuilder() //
.append('\n'); // start on new line.
for(int i = 0; i < v.length; i++) {
String s = format.format(v[i]); // format the number
// At _least_ 1 whitespace is added
whitespace(msg, Math.max(1, width - s.length())).append(s);
}
return msg.toString();
} | java |
public static StringBuilder formatTo(StringBuilder buf, double[] d, String sep) {
if(d == null) {
return buf.append("null");
}
if(d.length == 0) {
return buf;
}
buf.append(d[0]);
for(int i = 1; i < d.length; i++) {
buf.append(sep).append(d[i]);
}
return buf;
} | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.