code
stringlengths
73
34.1k
label
stringclasses
1 value
private void validate() { if ((this.segments.length < 0) && (!this.snapshot)) { throw new IllegalArgumentException("segments.length:" + Integer.valueOf(this.segments.length)); } for (int i = 0; i < this.segments.length; i++) { if (this.segments[i] < 0) { throw new IllegalArgumentException("segments[" + i + "]:" + Integer.valueOf(this.segments[i])); } } if (this.phaseNumber != null) { if (this.phaseNumber.intValue() < 0) { throw new IllegalArgumentException("phaseNumber:" + this.phaseNumber); } if (this.phase == null) { throw new IllegalArgumentException("phaseNumber (phase==null):" + this.phaseNumber); } if ((this.phase == DevelopmentPhase.RELEASE) && (this.phaseNumber.intValue() != 0)) { throw new IllegalArgumentException("phaseNumber (phase==RELEASE):" + this.phaseNumber); } } }
java
public static Integer computeGrammarSize(GrammarRules rules, Integer paaSize) { // The final grammar's size in BYTES // int res = 0; // The final size is the sum of the sizes of all rules // for (GrammarRuleRecord r : rules) { String ruleStr = r.getRuleString(); String[] tokens = ruleStr.split("\\s+"); int ruleSize = computeRuleSize(paaSize, tokens); res += ruleSize; } return res; }
java
public static GrammarRules performPruning(double[] ts, GrammarRules grammarRules) { RulePruningAlgorithm pruner = new RulePruningAlgorithm(grammarRules, ts.length); pruner.pruneRules(); return pruner.regularizePrunedRules(); }
java
public static double computeCover(boolean[] cover) { int covered = 0; for (boolean i : cover) { if (i) { covered++; } } return (double) covered / (double) cover.length; }
java
private boolean compatible(int i, int j, Set<Integer> T[][]) { if(s2comp[i] != s2comp[j]) return false; for(int k = 0; k < nbLabels; k++) { if(! T[i][k].equals(T[j][k])) return false; } return true; }
java
public boolean register(E e, String name) { if (resolve.containsKey(name)) { return false; } resolve.put(name, e); rev.put(e, name); return true; }
java
@Override public boolean substituteVM(VM curId, VM nextId) { if (VM.TYPE.equals(elemId)) { if (rev.containsKey(nextId)) { //the new id already exists. It is a failure scenario. return false; } String fqn = rev.remove(curId); if (fqn != null) { //new resolution, with the substitution of the old one. rev.put((E) nextId, fqn); resolve.put(fqn, (E) nextId); } } return true; }
java
@SuppressWarnings("unchecked") public static NamingService<VM> getVMNames(Model mo) { return (NamingService<VM>) mo.getView(ID + VM.TYPE); }
java
@SuppressWarnings("unchecked") public static NamingService<Node> getNodeNames(Model mo) { return (NamingService<Node>) mo.getView(ID + Node.TYPE); }
java
public boolean applyEvents(Hook k, Model i) { for (Event n : getEvents(k)) { if (!n.apply(i)) { return false; } } return true; }
java
public boolean addEvent(Hook k, Event n) { Set<Event> l = events.get(k); if (l == null) { l = new HashSet<>(); events.put(k, l); } return l.add(n); }
java
public Set<Event> getEvents(Hook k) { Set<Event> l = events.get(k); return l == null ? Collections.emptySet() : l; }
java
@Override public void addDim(int c, int[] cUse, IntVar[] dUse, int[] alias) { capacities.add(c); cUsages.add(cUse); dUsages.add(dUse); aliases.add(alias); }
java
@Override public boolean beforeSolve(ReconfigurationProblem r) { super.beforeSolve(r); for (int i = 0; i < aliases.size(); i++) { int capa = capacities.get(i); int[] alias = aliases.get(i); int[] cUse = cUsages.get(i); int[] dUses = new int[dUsages.get(i).length]; for (IntVar dUseDim : dUsages.get(i)) { dUses[i++] = dUseDim.getLB(); } r.getModel().post(new AliasedCumulatives(alias, new int[]{capa}, cHosts, new int[][]{cUse}, cEnds, dHosts, new int[][]{dUses}, dStarts, associations)); } return true; }
java
public static List<ChocoView> resolveDependencies(Model mo, List<ChocoView> views, Collection<String> base) throws SchedulerException { Set<String> done = new HashSet<>(base); List<ChocoView> remaining = new ArrayList<>(views); List<ChocoView> solved = new ArrayList<>(); while (!remaining.isEmpty()) { ListIterator<ChocoView> ite = remaining.listIterator(); boolean blocked = true; while (ite.hasNext()) { ChocoView s = ite.next(); if (done.containsAll(s.getDependencies())) { ite.remove(); done.add(s.getIdentifier()); solved.add(s); blocked = false; } } if (blocked) { throw new SchedulerModelingException(mo, "Missing dependencies or cyclic dependencies prevent from using: " + remaining); } } return solved; }
java
public static List<RuleInterval> getZeroIntervals(int[] coverageArray) { ArrayList<RuleInterval> res = new ArrayList<RuleInterval>(); int start = -1; boolean inInterval = false; int intervalsCounter = -1; // slide over the array from left to the right // for (int i = 0; i < coverageArray.length; i++) { if (0 == coverageArray[i] && !inInterval) { start = i; inInterval = true; } if (coverageArray[i] > 0 && inInterval) { res.add(new RuleInterval(intervalsCounter, start, i, 0)); inInterval = false; intervalsCounter--; } } // we need to check for the last interval here // if (inInterval) { res.add(new RuleInterval(intervalsCounter, start, coverageArray.length, 0)); } return res; }
java
public static double getCoverAsFraction(int seriesLength, GrammarRules rules) { boolean[] coverageArray = new boolean[seriesLength]; for (GrammarRuleRecord rule : rules) { if (0 == rule.ruleNumber()) { continue; } ArrayList<RuleInterval> arrPos = rule.getRuleIntervals(); for (RuleInterval saxPos : arrPos) { int startPos = saxPos.getStart(); int endPos = saxPos.getEnd(); for (int j = startPos; j < endPos; j++) { coverageArray[j] = true; } } } int coverSum = 0; for (int i = 0; i < seriesLength; i++) { if (coverageArray[i]) { coverSum++; } } return (double) coverSum / (double) seriesLength; }
java
public static double getMeanRuleCoverage(int length, GrammarRules rules) { // get the coverage array // int[] coverageArray = new int[length]; for (GrammarRuleRecord rule : rules) { if (0 == rule.ruleNumber()) { continue; } ArrayList<RuleInterval> arrPos = rule.getRuleIntervals(); for (RuleInterval saxPos : arrPos) { int startPos = saxPos.getStart(); int endPos = saxPos.getEnd(); for (int j = startPos; j < endPos; j++) { coverageArray[j] = coverageArray[j] + 1; } } } // int minCoverage = 0; // int maxCoverage = 0; int coverageSum = 0; for (int i : coverageArray) { coverageSum += i; // if (i < minCoverage) { // minCoverage = i; // } // if (i > maxCoverage) { // maxCoverage = i; // } } return (double) coverageSum / (double) length; }
java
public void removeLayerFromCache(LayerImpl layer) { if (layer.getId() != layerId) { throw new IllegalStateException(); } LayerCacheImpl layerCache = layerCacheRef.get(); if (layerCache != null) { layerCache.remove(layer.getKey(), layer); } }
java
public final V put(K key, V value) { BinTreeNode<K,V> prev = null; BinTreeNode<K,V> node = root; int key_hash_code = key.hashCode(); while(node != null) { prev = node; if(key_hash_code < node.keyHashCode) { node = node.left; } else { if((key_hash_code > node.keyHashCode) || !node.key.equals(key)) { node = node.right; } else { cachedHashCode -= node.hashCode(); V temp = node.value; node.value = value; cachedHashCode += node.hashCode(); return temp; } } } size++; BinTreeNode<K,V> new_node = new BinTreeNode<K,V>(key, value); cachedHashCode += new_node.hashCode(); // invalidate the cached hash code if(prev == null) { root = new_node; return null; } if(key_hash_code < prev.keyHashCode) { prev.left = new_node; } else { prev.right = new_node; } return null; }
java
private final V remove_semi_leaf(BinTreeNode<K,V> node, BinTreeNode<K,V> prev, int son, BinTreeNode<K,V> m) { if(prev == null) { root = m; } else { if(son == 0) prev.left = m; else prev.right = m; } return node.value; }
java
private final V finish_removal(BinTreeNode<K,V> node, BinTreeNode<K,V> prev, int son, BinTreeNode<K,V> m) { if(m != null) { // set up the links for m m.left = node.left; m.right = node.right; } if(prev == null) root = m; else { if(son == 0) prev.left = m; else prev.right = m; } return node.value; }
java
private final BinTreeNode<K,V> extract_next(BinTreeNode<K,V> node) { BinTreeNode<K,V> prev = node.right; BinTreeNode<K,V> curr = prev.left; if(curr == null) { node.right = node.right.right; return prev; } while(curr.left != null) { prev = curr; curr = curr.left; } prev.left = curr.right; return curr; }
java
public final Collection<V> values() { return new AbstractCollection<V>() { public Iterator<V> iterator() { final Iterator<Map.Entry<K,V>> ite = entryIterator(); return new Iterator<V>() { public boolean hasNext() { return ite.hasNext(); } public void remove() { ite.remove(); } public V next() { return ite.next().getValue(); } }; } public int size() { return size; } }; }
java
public final Set<Map.Entry<K,V>> entrySet() { return new AbstractSet<Map.Entry<K,V>>() { public Iterator<Map.Entry<K,V>> iterator() { return entryIterator(); } public int size() { return size; } }; }
java
public final Set<K> keySet() { return new AbstractSet<K>() { public Iterator<K> iterator() { final Iterator<Map.Entry<K,V>> ite = entryIterator(); return new Iterator<K>() { public boolean hasNext() { return ite.hasNext(); } public void remove() { ite.remove(); } public K next() { return ite.next().getKey(); } }; } public int size() { return size; } }; }
java
public void processChildren(Node node) { for (Node cursor = node.getFirstChild(); cursor != null; cursor = cursor.getNext()) { if (cursor.getType() == Token.CALL) { // The node is a function or method call Node name = cursor.getFirstChild(); if (name != null && name.getType() == Token.NAME && // named function call name.getString().equals("define")) { // name is "define" //$NON-NLS-1$ Node param = name.getNext(); if (param != null && param.getType() != Token.STRING) { String expname = name.getProp(Node.SOURCENAME_PROP).toString(); if (source != null) { PositionLocator locator = source.locate(name.getLineno(), name.getCharno()+6); char tok = locator.findNextJSToken(); // move cursor to the open paren if (tok == '(') { // Try to insert the module name immediately following the open paren for the // define call because the param location will be off if the argument list is parenthesized. source.insert("\"" + expname + "\",", locator.getLineno(), locator.getCharno()+1); //$NON-NLS-1$ //$NON-NLS-2$ } else { // First token following 'define' name is not a paren, so fall back to inserting // before the first parameter. source.insert("\"" + expname + "\",", param.getLineno(), param.getCharno()); //$NON-NLS-1$ //$NON-NLS-2$ } } param.getParent().addChildBefore(Node.newString(expname), param); } } } // Recursively call this method to process the child nodes if (cursor.hasChildren()) processChildren(cursor); } }
java
public List<IntVar> getIncoming(Node n) { List<IntVar> l = incoming.get(n); if (l == null) { l = Collections.emptyList(); } return l; }
java
public List<IntVar> getOutgoing(Node n) { List<IntVar> l = outgoings.get(n); if (l == null) { l = Collections.emptyList(); } return l; }
java
private void checkModel(Model mo, boolean start) throws SatConstraintViolationException { for (SatConstraintChecker<?> c : checkers) { if (start && !c.startsWith(mo)) { SatConstraint cs = c.getConstraint(); if (cs != null) { throw new DiscreteViolationException(c.getConstraint(), mo); } } else if (!start && !c.endsWith(mo)) { SatConstraint cs = c.getConstraint(); if (cs != null) { throw new DiscreteViolationException(c.getConstraint(), mo); } } } }
java
private static JSONObject toJSON(Mapping c) { JSONObject o = new JSONObject(); o.put("offlineNodes", nodesToJSON(c.getOfflineNodes())); o.put("readyVMs", vmsToJSON(c.getReadyVMs())); JSONObject ons = new JSONObject(); for (Node n : c.getOnlineNodes()) { JSONObject w = new JSONObject(); w.put("runningVMs", vmsToJSON(c.getRunningVMs(n))); w.put("sleepingVMs", vmsToJSON(c.getSleepingVMs(n))); ons.put(Integer.toString(n.id()), w); } o.put("onlineNodes", ons); return o; }
java
public void fillMapping(Model mo, JSONObject o) throws JSONConverterException { Mapping c = mo.getMapping(); for (Node u : newNodes(mo, o, "offlineNodes")) { c.addOfflineNode(u); } for (VM u : newVMs(mo, o, "readyVMs")) { c.addReadyVM(u); } JSONObject ons = (JSONObject) o.get("onlineNodes"); for (Map.Entry<String, Object> e : ons.entrySet()) { int id = Integer.parseInt(e.getKey()); Node u = mo.newNode(id); if (u == null) { throw JSONConverterException.nodeAlreadyDeclared(id); } JSONObject on = (JSONObject) e.getValue(); c.addOnlineNode(u); for (VM vm : newVMs(mo, on, "runningVMs")) { c.addRunningVM(vm, u); } for (VM vm : newVMs(mo, on, "sleepingVMs")) { c.addSleepingVM(vm, u); } } }
java
private static Set<Node> newNodes(Model mo, JSONObject o, String key) throws JSONConverterException { checkKeys(o, key); Object x = o.get(key); if (!(x instanceof JSONArray)) { throw new JSONConverterException("array expected at key '" + key + "'"); } Set<Node> s = new HashSet<>(((JSONArray) x).size()); for (Object i : (JSONArray) x) { int id = (Integer) i; Node n = mo.newNode(id); if (n == null) { throw JSONConverterException.nodeAlreadyDeclared(id); } s.add(n); } return s; }
java
private static Set<VM> newVMs(Model mo, JSONObject o, String key) throws JSONConverterException { checkKeys(o, key); Object x = o.get(key); if (!(x instanceof JSONArray)) { throw new JSONConverterException("array expected at key '" + key + "'"); } Set<VM> s = new HashSet<>(((JSONArray) x).size()); for (Object i : (JSONArray) x) { int id = (Integer) i; VM vm = mo.newVM(id); if (vm == null) { throw JSONConverterException.vmAlreadyDeclared(id); } s.add(vm); } return s; }
java
public boolean add(String key, ModuleDepInfo info) { if (info == null) { throw new NullPointerException(); } boolean modified = false; ModuleDepInfo existing = get(key); if (!containsKey(key) || existing != info) { if (existing != null) { modified = existing.add(info); } else { super.put(key, info); modified = true; } } return modified; }
java
public boolean addAll(ModuleDeps other) { boolean modified = false; for (Map.Entry<String, ModuleDepInfo> entry : other.entrySet()) { modified |= add(entry.getKey(), new ModuleDepInfo(entry.getValue())); } return modified; }
java
public int minVMAllocation(int vmIdx, int v) { int vv = Math.max(v, vmAllocation.get(vmIdx)); vmAllocation.set(vmIdx, vv); return vv; }
java
public double capOverbookRatio(int nIdx, double d) { if (d < 1) { return ratios.get(nIdx); } double v = Math.min(ratios.get(nIdx), d); ratios.set(nIdx, v); return v; }
java
@Override public boolean beforeSolve(ReconfigurationProblem p) throws SchedulerException { for (VM vm : source.getMapping().getAllVMs()) { int vmId = p.getVM(vm); int v = getVMAllocation(vmId); if (v < 0) { int prevUsage = rc.getConsumption(vm); minVMAllocation(vmId, prevUsage); } } ChocoView v = rp.getView(Packing.VIEW_ID); if (v == null) { throw SchedulerModelingException.missingView(rp.getSourceModel(), Packing.VIEW_ID); } IntVar[] host = new IntVar[p.getFutureRunningVMs().size()]; int[] demand = new int[host.length]; int i = 0; for (VM vm : p.getFutureRunningVMs()) { host[i] = rp.getVMAction(vm).getDSlice().getHoster(); demand[i] = getVMAllocation(p.getVM(vm)); i++; } ((Packing) v).addDim(rc.getResourceIdentifier(), virtRcUsage, demand, host); return linkVirtualToPhysicalUsage(); }
java
private boolean capHosting(int nIdx, int min, int nbZeroes) { Node n = rp.getNode(nIdx); double capa = getSourceResource().getCapacity(n) * getOverbookRatio(nIdx)/*.getLB()*/; int card = (int) (capa / min) + nbZeroes + 1; if (card < source.getMapping().getRunningVMs(n).size()) { // This shortcut is required to prevent a filtering issue in the scheduling phase: // At time 0, LocalTaskScheduler will complain and start to backtrack. // TODO: revise the notion of continuous constraint for the cardinality issue. return true; } try { //Restrict the hosting capacity. rp.getNbRunningVMs().get(nIdx).updateUpperBound(card, Cause.Null); } catch (ContradictionException ex) { rp.getLogger().error("Unable to cap the hosting capacity of '" + n + " ' to " + card, ex); return false; } return true; }
java
private void checkInitialSatisfaction() { //Seems to me we don't support ratio change for (Node n : rp.getSourceModel().getMapping().getOnlineNodes()) { int nIdx = rp.getNode(n); double ratio = getOverbookRatio(nIdx)/*.getLB()*/; double capa = getSourceResource().getCapacity(n) * ratio; int usage = 0; for (VM vm : rp.getSourceModel().getMapping().getRunningVMs(n)) { usage += getSourceResource().getConsumption(vm); if (usage > capa) { //Here, the problem is not feasible but we consider an exception //because such a situation does not physically makes sense (one cannot run at 110%) throw new SchedulerModelingException(rp.getSourceModel(), "Usage of virtual resource " + getResourceIdentifier() + " on node " + n + " (" + usage + ") exceeds its capacity (" + capa + ")"); } } } }
java
public static TObjectIntMap<VM> getWeights(ReconfigurationProblem rp, List<CShareableResource> rcs) { Model mo = rp.getSourceModel(); int[] capa = new int[rcs.size()]; int[] cons = new int[rcs.size()]; TObjectIntMap<VM> cost = new TObjectIntHashMap<>(); for (Node n : mo.getMapping().getAllNodes()) { for (int i = 0; i < rcs.size(); i++) { capa[i] += rcs.get(i).virtRcUsage.get(rp.getNode(n)).getUB() * rcs.get(i).ratios.get(rp.getNode(n))/*.getLB()*/; } } for (VM v : mo.getMapping().getAllVMs()) { for (int i = 0; i < rcs.size(); i++) { cons[i] += rcs.get(i).getVMAllocation(rp.getVM(v)); } } for (VM v : mo.getMapping().getAllVMs()) { double sum = 0; for (int i = 0; i < rcs.size(); i++) { double ratio = 0; if (cons[i] > 0) { ratio = 1.0 * rcs.get(i).getVMAllocation(rp.getVM(v)) / capa[i]; } sum += ratio; } cost.put(v, (int) (sum * 10000)); } return cost; }
java
private boolean distinctVMStates() { boolean ok = vms.size() == running.size() + sleeping.size() + ready.size() + killed.size(); //It is sure there is no solution as a VM cannot have multiple destination state Map<VM, VMState> states = new HashMap<>(); for (VM v : running) { states.put(v, VMState.RUNNING); } for (VM v : ready) { VMState prev = states.put(v, VMState.READY); if (prev != null) { getLogger().debug("multiple destination state for {}: {} and {}", v, prev, VMState.READY); } } for (VM v : sleeping) { VMState prev = states.put(v, VMState.SLEEPING); if (prev != null) { getLogger().debug("multiple destination state for {}: {} and {}", v, prev, VMState.SLEEPING); } } for (VM v : killed) { VMState prev = states.put(v, VMState.KILLED); if (prev != null) { getLogger().debug("multiple destination state for {}: {} and {}", v, prev, VMState.KILLED); } } return ok; }
java
@Override @SuppressWarnings("squid:S3346") public ReconfigurationPlan buildReconfigurationPlan(Solution s, Model src) throws SchedulerException { ReconfigurationPlan plan = new DefaultReconfigurationPlan(src); for (NodeTransition action : nodeActions) { action.insertActions(s, plan); } for (VMTransition action : vmActions) { action.insertActions(s, plan); } assert plan.isApplyable() : "The following plan cannot be applied:\n" + plan; assert checkConsistency(s, plan); return plan; }
java
private void defaultHeuristic() { IntStrategy intStrat = Search.intVarSearch(new FirstFail(csp), new IntDomainMin(), csp.retrieveIntVars(true)); SetStrategy setStrat = new SetStrategy(csp.retrieveSetVars(), new InputOrder<>(csp), new SetDomainMin(), true); RealStrategy realStrat = new RealStrategy(csp.retrieveRealVars(), new Occurrence<>(), new RealDomainMiddle()); solver.setSearch(new StrategiesSequencer(intStrat, realStrat, setStrat)); }
java
private void makeCardinalityVariables() { vmsCountOnNodes = new ArrayList<>(nodes.size()); int nbVMs = vms.size(); for (Node n : nodes) { vmsCountOnNodes.add(csp.intVar(makeVarLabel("nbVMsOn('", n, "')"), 0, nbVMs, true)); } vmsCountOnNodes = Collections.unmodifiableList(vmsCountOnNodes); }
java
@Override public Iterator<Action> iterator() { Set<Action> sorted = new TreeSet<>(startFirstComparator); sorted.addAll(actions); return sorted.iterator(); }
java
private void addDirectionalEdge(Edge e) { if (edgeAccess.getCapacity() < getEdgeIndex(edgePos) + EDGE_SIZE) edgeAccess.ensureCapacity(edgeAccess.getCapacity() + INITIAL_EDGE_FILE_SIZE); long fromId = nodIdMapping.get(e.getFromNodeId()); long toId = nodIdMapping.get(e.getToNodeId()); long fromIndex = getNodeIndex(fromId); long toIndex = getNodeIndex(toId); long edgeIndex = getEdgeIndex(edgePos); edgeAccess.setLong ( edgeIndex , fromId ); edgeAccess.setLong ( edgeIndex + 8 , toId ); edgeAccess.setDouble ( edgeIndex + 16 , e.getWeight() ); edgeAccess.setLong ( edgeIndex + 24 , nodeAccess.getLong(fromIndex + 8) ); edgeAccess.setLong ( edgeIndex + 32 , nodeAccess.getLong(toIndex + 16) ); nodeAccess.setLong( fromIndex + 8 , edgePos); nodeAccess.setLong( toIndex + 16 , edgePos); edgeAccess.setLong( 0 , ++edgePos ); }
java
public static DefaultConstraintsCatalog newBundle() { DefaultConstraintsCatalog c = new DefaultConstraintsCatalog(); c.add(new AmongBuilder()); c.add(new BanBuilder()); c.add(new ResourceCapacityBuilder()); c.add(new RunningCapacityBuilder()); c.add(new FenceBuilder()); c.add(new GatherBuilder()); c.add(new KilledBuilder()); c.add(new LonelyBuilder()); c.add(new OfflineBuilder()); c.add(new OnlineBuilder()); c.add(new OverbookBuilder()); c.add(new PreserveBuilder()); c.add(new QuarantineBuilder()); c.add(new ReadyBuilder()); c.add(new RootBuilder()); c.add(new RunningBuilder()); c.add(new SleepingBuilder()); c.add(new SplitBuilder()); c.add(new SplitAmongBuilder()); c.add(new SpreadBuilder()); c.add(new SeqBuilder()); c.add(new MaxOnlineBuilder()); c.add(new NoDelayBuilder()); c.add(new BeforeBuilder()); c.add(new SerializeBuilder()); c.add(new SyncBuilder()); return c; }
java
public boolean add(SatConstraintBuilder c) { if (this.builders.containsKey(c.getIdentifier())) { return false; } this.builders.put(c.getIdentifier(), c); return true; }
java
public int compare(T o1, T o2) { if(o1 == o2) return 0; String str1 = (o1 == null) ? "null" : o1.toString(); String str2 = (o2 == null) ? "null" : o2.toString(); return str1.compareTo(str2); }
java
public static CaseSyntax of(Character separator, CaseConversion allCharCase) { return of(separator, allCharCase, allCharCase, allCharCase); }
java
protected void registerDefaultDatatypes() { registerStandardDatatype(String.class); registerStandardDatatype(Boolean.class); registerStandardDatatype(Character.class); registerStandardDatatype(Currency.class); registerCustomDatatype(Datatype.class); // internal trick... registerNumberDatatypes(); registerJavaTimeDatatypes(); registerJavaUtilDateCalendarDatatypes(); }
java
public void setExtraDatatypes(List<String> datatypeList) { getInitializationState().requireNotInitilized(); for (String fqn : datatypeList) { registerCustomDatatype(fqn); } }
java
public static CompressionCodec getGzipCodec(Configuration conf) { try { return (CompressionCodec) ReflectionUtils.newInstance( conf.getClassByName("org.apache.hadoop.io.compress.GzipCodec").asSubclass(CompressionCodec.class), conf); } catch (ClassNotFoundException e) { logger.warn("GzipCodec could not be instantiated", e); return null; } }
java
public static boolean isNearMultipleBorders(@Nonnull final Point point, @Nonnull final Territory territory) { checkDefined("point", point); if (territory != Territory.AAA) { final int territoryNumber = territory.getNumber(); if (territory.getParentTerritory() != null) { // There is a parent! check its borders as well... if (isNearMultipleBorders(point, territory.getParentTerritory())) { return true; } } int nrFound = 0; final int fromTerritoryRecord = DATA_MODEL.getDataFirstRecord(territoryNumber); final int uptoTerritoryRecord = DATA_MODEL.getDataLastRecord(territoryNumber); for (int territoryRecord = uptoTerritoryRecord; territoryRecord >= fromTerritoryRecord; territoryRecord--) { if (!Data.isRestricted(territoryRecord)) { final Boundary boundary = Boundary.createBoundaryForTerritoryRecord(territoryRecord); final int xdiv8 = Common.xDivider(boundary.getLatMicroDegMin(), boundary.getLatMicroDegMax()) / 4; if (boundary.extendBoundary(60, xdiv8).containsPoint(point)) { if (!boundary.extendBoundary(-60, -xdiv8).containsPoint(point)) { nrFound++; if (nrFound > 1) { return true; } } } } } } return false; }
java
public void enqueue(RepairDigramRecord digramRecord) { // System.out.println("before == " + this.toString()); // if the same key element is in the queue - something went wrong with tracking... if (elements.containsKey(digramRecord.str)) { throw new IllegalArgumentException( "Element with payload " + digramRecord.str + " already exists in the queue..."); } else { // create a new node RepairQueueNode nn = new RepairQueueNode(digramRecord); // System.out.println(nn.payload); // place it into the queue if it's empty if (this.elements.isEmpty()) { this.head = nn; } // if new node has _higher than_ or _equal to_ the head frequency... this going to be the new head else if (nn.getFrequency() >= this.head.getFrequency()) { this.head.prev = nn; nn.next = this.head; this.head = nn; } // in all other cases find an appropriate place in the existing queue, starting from the head else { RepairQueueNode currentNode = head; while (null != currentNode.next) { // the intent is to slide down the list finding a place at new node is greater than a node // a tracking pointer points to... // ABOVE we just checked that at this loop start that the current node is greater than new // node // if (nn.getFrequency() >= currentNode.getFrequency()) { RepairQueueNode prevN = currentNode.prev; prevN.next = nn; nn.prev = prevN; currentNode.prev = nn; nn.next = currentNode; break; // the element has been placed } currentNode = currentNode.next; } // check if loop was broken by the TAIL condition, not by placement if (null == currentNode.next) { // so, currentNode points on the tail... if (nn.getFrequency() >= currentNode.getFrequency()) { // insert just before... RepairQueueNode prevN = currentNode.prev; prevN.next = nn; nn.prev = prevN; currentNode.prev = nn; nn.next = currentNode; } else { // or make a new tail nn.prev = currentNode; currentNode.next = nn; } } } // also save the element in the index store this.elements.put(nn.payload.str, nn); } // System.out.println("before == " + this.toString()); }
java
public RepairDigramRecord get(String key) { RepairQueueNode el = this.elements.get(key); if (null != el) { return el.payload; } return null; }
java
private void removeNodeFromList(RepairQueueNode el) { // the head case // if (null == el.prev) { if (null != el.next) { this.head = el.next; this.head.prev = null; el=null; } else { // can't happen? yep. if there is only one element exists... this.head = null; } } // the tail case // else if (null == el.next) { if (null != el.prev) { el.prev.next = null; } else { // can't happen? throw new RuntimeException("Unrecognized situation here..."); } } // all others // else { el.prev.next = el.next; el.next.prev = el.prev; } }
java
public int evaluate(Model mo, Class<? extends Action> a, Element e) throws SchedulerException { ActionDurationEvaluator<Element> ev = durations.get(a); if (ev == null) { throw new SchedulerModelingException(null, "Unable to estimate the duration of action '" + a.getSimpleName() + "' related to '" + e + "'"); } int d = ev.evaluate(mo, e); if (d <= 0) { throw new SchedulerModelingException(null, "The duration for action " + a.getSimpleName() + " over '" + e + "' has been evaluated to a negative value (" + d + "). Unsupported"); } return d; }
java
protected void parseParameter(String parameter, CliParserState parserState, CliParameterConsumer parameterConsumer) { if (parserState.isOptionsComplete()) { // no more options (e.g. --foo), only arguments from here List<CliArgumentContainer> argumentList = this.cliState.getArguments(parserState.requireCurrentMode(this.cliState)); int argumentIndex = parserState.getArgumentIndex(); if (argumentIndex >= argumentList.size()) { throw new NlsIllegalArgumentException(parameter); } else { parseArgument(parserState, parameter, argumentList.get(argumentIndex), parameterConsumer); } } else { CliOptionContainer optionContainer = this.cliState.getOption(parameter); if (optionContainer == null) { // no option found for argument... parseParameterUndefinedOption(parameter, parserState, parameterConsumer); } else { // mode handling... String modeId = optionContainer.getOption().mode(); CliModeObject newMode = this.cliState.getMode(modeId); if (newMode == null) { // should never happen! newMode = new CliModeContainer(modeId); } if (parserState.currentMode == null) { parserState.setCurrentMode(parameter, newMode); } else if (!modeId.equals(parserState.currentMode.getId())) { // mode already detected, but mode of current option differs... if (newMode.isDescendantOf(parserState.currentMode)) { // new mode extends current mode parserState.setCurrentMode(parameter, newMode); } else if (!newMode.isAncestorOf(parserState.currentMode)) { // current mode does NOT extend new mode and vice versa // --> incompatible modes throw new CliOptionIncompatibleModesException(parserState.modeOption, parameter); } } parseOption(parserState, parameter, optionContainer, parameterConsumer); } } }
java
private int printHelpOptions(CliOutputSettings settings, Map<CliOption, CliOptionHelpInfo> option2HelpMap, StringBuilder parameters, Collection<CliOptionContainer> modeOptions) { int maxOptionColumnWidth = 0; for (CliOptionContainer option : modeOptions) { CliOption cliOption = option.getOption(); if (parameters.length() > 0) { parameters.append(" "); } if (!cliOption.required()) { parameters.append("["); } parameters.append(cliOption.name()); if (!option.getSetter().getPropertyClass().equals(boolean.class)) { parameters.append(" "); parameters.append(cliOption.operand()); if (option.isArrayMapOrCollection()) { CliContainerStyle containerStyle = option.getContainerStyle(this.cliState.getCliStyle()); switch (containerStyle) { case COMMA_SEPARATED: parameters.append(",..."); break; case MULTIPLE_OCCURRENCE: parameters.append("*"); break; default : throw new IllegalCaseException(CliContainerStyle.class, containerStyle); } } } if (!cliOption.required()) { parameters.append("]"); } CliOptionHelpInfo helpInfo = option2HelpMap.get(cliOption); if (helpInfo == null) { helpInfo = new CliOptionHelpInfo(option, this.dependencies, settings); option2HelpMap.put(cliOption, helpInfo); } if (helpInfo.length > maxOptionColumnWidth) { maxOptionColumnWidth = helpInfo.length; } } return maxOptionColumnWidth; }
java
public boolean contains(Object o) { for(T e : elemArray) { if((o == e) || ((o != null) && o.equals(e))) { return true; } } return false; }
java
public Set<Action> getDependencies(Action a) { if (!demandingNodes.containsKey(a)) { return Collections.emptySet(); } Node n = demandingNodes.get(a); Set<Action> allActions = getFreeings(n); Set<Action> pre = new HashSet<>(); for (Action action : allActions) { if (!action.equals(a) && a.getStart() >= action.getEnd()) { pre.add(action); } } return pre; }
java
public static boolean isExplodeRequires(HttpServletRequest request) { if (isIncludeRequireDeps(request)) { // don't expand require deps if we're including them in the response. return false; } boolean result = false; IAggregator aggr = (IAggregator)request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME); IOptions options = aggr.getOptions(); IConfig config = aggr.getConfig(); Boolean reqattr = TypeUtil.asBoolean(request.getAttribute(IHttpTransport.EXPANDREQUIRELISTS_REQATTRNAME)); result = (options == null || !options.isDisableRequireListExpansion()) && (config == null || !isServerExpandedLayers(request)) && reqattr != null && reqattr; return result; }
java
public static boolean isHasFiltering(HttpServletRequest request) { IAggregator aggr = (IAggregator)request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME); IOptions options = aggr.getOptions(); return (options != null) ? !options.isDisableHasFiltering() : true; }
java
@Nonnull static Boundary createBoundaryForTerritoryRecord(final int territoryRecord) { return new Boundary( DATA_MODEL.getLatMicroDegMin(territoryRecord), DATA_MODEL.getLonMicroDegMin(territoryRecord), DATA_MODEL.getLatMicroDegMax(territoryRecord), DATA_MODEL.getLonMicroDegMax(territoryRecord) ); }
java
boolean containsPoint(@Nonnull final Point p) { if (!p.isDefined()) { return false; } final int latMicroDeg = p.getLatMicroDeg(); if ((latMicroDegMin > latMicroDeg) || (latMicroDeg >= latMicroDegMax)) { return false; } final int lonMicroDeg = p.getLonMicroDeg(); // Longitude boundaries can extend (slightly) outside the [-180,180) range. if (lonMicroDeg < lonMicroDegMin) { return (lonMicroDegMin <= (lonMicroDeg + Point.MICRO_DEG_360)) && ((lonMicroDeg + Point.MICRO_DEG_360) < lonMicroDegMax); } else if (lonMicroDeg >= lonMicroDegMax) { return (lonMicroDegMin <= (lonMicroDeg - Point.MICRO_DEG_360)) && ((lonMicroDeg - Point.MICRO_DEG_360) < lonMicroDegMax); } else { return true; } }
java
public static List<Precedence> newPrecedence(VM vmBefore, Collection<VM> vmsAfter) { return newPrecedence(Collections.singleton(vmBefore), vmsAfter); }
java
public static List<Precedence> newPrecedence(Collection<VM> vmsBefore, VM vmAfter) { return newPrecedence(vmsBefore, Collections.singleton(vmAfter)); }
java
public static List<Precedence> newPrecedence(Collection<VM> vmsBefore, Collection<VM> vmsAfter) { List<Precedence> l = new ArrayList<>(vmsBefore.size() * vmsAfter.size()); for (VM vmb : vmsBefore) { for (VM vma : vmsAfter) { l.add(new Precedence(vmb, vma)); } } return l; }
java
private ESat isConsistent() { int[][] l = new int[nbDims][nbBins]; for (int i = 0; i < bins.length; i++) { if (bins[i].isInstantiated()) { for (int d = 0; d < nbDims; d++) { int v = bins[i].getValue(); l[d][v] += iSizes[d][i]; if (l[d][v] > loads[d][v].getUB()) { return ESat.FALSE; } } } } return ESat.TRUE; }
java
@Override public int getPropagationConditions(int idx) { return idx < bins.length ? IntEventType.all() : IntEventType.BOUND.getMask() + IntEventType.INSTANTIATE.getMask(); }
java
private void computeSumItemSizes() { for (int d = 0; d < nbDims; d++) { long sum = 0; for (int i = 0; i < iSizes[d].length; i++) { sum += iSizes[d][i]; } this.sumISizes[d] = sum; } }
java
@Override public void clearCached(ICacheManager mgr) { Map<String, CacheEntry> moduleBuilds; synchronized (this) { moduleBuilds = _moduleBuilds; _moduleBuilds = null; } if (moduleBuilds != null) { for (Map.Entry<String, CacheEntry> entry : moduleBuilds.entrySet()) { entry.getValue().delete(mgr); } moduleBuilds.clear(); } }
java
public static SplittableElementSet<VM> newVMIndex(Collection<VM> c, TIntIntHashMap idx) { return new SplittableElementSet<>(c, idx); }
java
public static SplittableElementSet<Node> newNodeIndex(Collection<Node> c, TIntIntHashMap idx) { return new SplittableElementSet<>(c, idx); }
java
public boolean forEachPartition(IterateProcedure<E> p) { int curIdx = index.get(values.get(0).id()); int from; int to; for (from = 0, to = 0; to < values.size(); to++) { int cIdx = index.get(values.get(to).id()); if (curIdx != cIdx) { if (!p.extract(this, curIdx, from, to)) { return false; } from = to; curIdx = cIdx; } } return p.extract(this, curIdx, from, to); }
java
public Set<E> getSubSet(int k) { int from = -1; //TODO: very bad. Bounds should be memorized for (int x = 0; x < values.size(); x++) { int cIdx = index.get(values.get(x).id()); if (cIdx == k && from == -1) { from = x; } if (from >= 0 && cIdx > k) { return new ElementSubSet<>(this, k, from, x); } } if (from >= 0) { return new ElementSubSet<>(this, k, from, values.size()); } return Collections.emptySet(); }
java
public List<ElementSubSet<E>> getPartitions() { final List<ElementSubSet<E>> partitions = new ArrayList<>(); forEachPartition((idx, key, from, to) -> { partitions.add(new ElementSubSet<>(SplittableElementSet.this, key, from, to)); return true; }); return partitions; }
java
public ICacheKeyGenerator combine(ICacheKeyGenerator otherKeyGen) { if (this.equals(otherKeyGen)) { return this; } @SuppressWarnings("unchecked") AbstractCollectionCacheKeyGenerator<T> other = (AbstractCollectionCacheKeyGenerator<T>)otherKeyGen; if (isProvisional() && other.isProvisional()) { // should never happen throw new IllegalStateException(); } // If either generator is provisional, return a provisional result if (isProvisional()) { return other; } else if (other.isProvisional()) { return this; } if (getCollection() == null) { return this; } if (other.getCollection() == null) { return other; } // See if one of the keygens encompasses the other. This is the most likely // case and is more performant than always creating a new keygen. int size = getCollection().size(), otherSize = other.getCollection().size(); if (size > otherSize && getCollection().containsAll(other.getCollection())) { return this; } if (otherSize > size && other.getCollection().containsAll(getCollection())) { return other; } // Neither keygen encompasses the other, so create a new one that is a combination // of the both of them. Set<T> combined = new HashSet<T>(); combined.addAll(getCollection()); combined.addAll(other.getCollection()); return newKeyGen(combined, false); }
java
public static final void setReader(String format, TreeReader reader) { String key = format.toLowerCase(); readers.put(key, reader); if (JSON.equals(key)) { cachedJsonReader = reader; } }
java
protected int resolveInclude() throws XMLStreamException { // we are no more in fallback mode this.fallback = false; this.depth++; int eventType = -1; // read attributes... String href = getAttributeValue(null, "href"); LOGGER.trace("Resolving xi:include to href {}", href); String xpointer = getAttributeValue(null, "xpointer"); // get the included resource... DataResource includeResource = this.resource.navigate(href); // and try to include it... boolean success = false; if (includeResource.isAvailable()) { // determine inclusion format type... String parse = getAttributeValue(null, "parse"); if ((parse == null) || ("xml".equals(parse))) { this.includeReader = new XIncludeStreamReader(this.factory, includeResource, this); if (xpointer != null) { // shorthand form: id // scheme-based form: e.g. element(/1/*) this.includeReader = new XPointerStreamReader(this.includeReader, xpointer); } eventType = this.includeReader.nextTag(); setParent(this.includeReader); // we ascend the XML until the initial include is closed. closeInitialInclude(); success = true; } else if ("text".equals(parse)) { String encoding = getAttributeValue(null, "encoding"); Charset charset; if (encoding == null) { charset = Charset.defaultCharset(); } else { charset = Charset.forName(encoding); } InputStream textInputStream = includeResource.openStream(); Reader reader = new InputStreamReader(textInputStream, charset); this.includeText = read(reader); // we ascend the XML until the initial include is closed. closeInitialInclude(); return XMLStreamConstants.CHARACTERS; } else { throw new XMLStreamException("Unsupported XInclude parse type:" + parse); } } if (!success) { // search for fallback do { eventType = super.next(); } while ((eventType != XMLStreamConstants.START_ELEMENT) && (eventType != XMLStreamConstants.END_ELEMENT)); if (eventType == XMLStreamConstants.START_ELEMENT) { if ((XmlUtil.NAMESPACE_URI_XINCLUDE.equals(getNamespaceURI())) && ("fallback".equals(getLocalName()))) { // found fallback this.fallback = true; return next(); } } // no fallback available, ignore include... closeInitialInclude(); return next(); } return eventType; }
java
protected void closeInitialInclude() throws XMLStreamException { LOGGER.trace("Closing xi:include"); int eventType = -1; // we ascend the XML until the initial include is closed. while (this.depth > 0) { eventType = this.mainReader.next(); if (eventType == XMLStreamConstants.START_ELEMENT) { LOGGER.trace("Closing loop: Start {}", this.mainReader.getLocalName()); this.depth++; } else if (eventType == XMLStreamConstants.END_ELEMENT) { LOGGER.trace("Closing loop: End {}", this.mainReader.getLocalName()); this.depth--; } } LOGGER.trace("Closing xi:include complete"); }
java
public boolean read() { if (state == State.READING_FRAME_SIZE) { // try to read the frame size completely if (!internalRead(frameSizeBuffer)) return false; // if the frame size has been read completely, then prepare to read the // actual frame. if (frameSizeBuffer.remaining() == 0) { // pull out the frame size as an integer. int frameSize = frameSizeBuffer.getInt(0); if (frameSize <= 0) { logger.error("Read an invalid frame size of " + frameSize + ". Are you using TFramedTransport on the client side?"); return false; } if (frameSize > thriftFactories.maxFrameSizeInBytes) { logger.error("Invalid frame size got (" + frameSize + "), maximum expected " + thriftFactories.maxFrameSizeInBytes); return false; } // reallocate to match frame size (if needed) reallocateDataBuffer(frameSize); frameSizeBuffer.clear(); // prepare it to the next round of reading (if any) state = State.READING_FRAME; } else { // this skips the check of READING_FRAME state below, since we can't // possibly go on to that state if there's data left to be read at // this one. state = State.READY_TO_READ_FRAME_SIZE; return true; } } // it is possible to fall through from the READING_FRAME_SIZE section // to READING_FRAME if there's already some frame data available once // READING_FRAME_SIZE is complete. if (state == State.READING_FRAME) { if (!internalRead(dataBuffer)) return false; state = (dataBuffer.remaining() == 0) ? State.READ_FRAME_COMPLETE : State.READY_TO_READ_FRAME; // Do not read until we finish processing request. if (state == State.READ_FRAME_COMPLETE) { switchMode(State.READ_FRAME_COMPLETE); } return true; } // if we fall through to this point, then the state must be invalid. logger.error("Read was called but state is invalid (" + state + ")"); return false; }
java
public boolean write() { assert state == State.WRITING; boolean writeFailed = false; try { if (response.streamTo(transport) < 0) { writeFailed = true; return false; } else if (!response.isFullyStreamed()) { // if socket couldn't accommodate whole write buffer, // continue writing when we get next write signal. switchToWrite(); return true; } } catch (IOException e) { logger.error("Got an IOException during write!", e); writeFailed = true; return false; } finally { if (writeFailed || response.isFullyStreamed()) response.close(); } // we're done writing. Now we need to switch back to reading. switchToRead(); return true; }
java
public void changeSelectInterests() { switch (state) { case READY_TO_WRITE: // set the OP_WRITE interest state = State.WRITING; break; case READY_TO_READ_FRAME_SIZE: state = State.READING_FRAME_SIZE; break; case READY_TO_READ_FRAME: state = State.READING_FRAME; break; case AWAITING_CLOSE: close(); selectionKey.cancel(); break; default: logger.error("changeSelectInterest was called, but state is invalid (" + state + ")"); } }
java
public void invoke() { assert state == State.READ_FRAME_COMPLETE : "Invoke called in invalid state: " + state; TTransport inTrans = getInputTransport(); TProtocol inProt = thriftFactories.inputProtocolFactory.getProtocol(inTrans); TProtocol outProt = thriftFactories.outputProtocolFactory.getProtocol(getOutputTransport()); try { thriftFactories.processorFactory.getProcessor(inTrans).process(inProt, outProt); responseReady(); return; } catch (TException te) { logger.warn("Exception while invoking!", te); } catch (Throwable t) { logger.error("Unexpected throwable while invoking!", t); } // This will only be reached when there is a throwable. state = State.AWAITING_CLOSE; changeSelectInterests(); }
java
private boolean internalRead(Buffer buffer) { try { return !(buffer.readFrom(transport) < 0); } catch (IOException e) { logger.warn("Got an IOException in internalRead!", e); return false; } }
java
public void close() { freeDataBuffer(); frameSizeBuffer.free(); transport.close(); if (response != null) response.close(); }
java
public ArrayList<SameLengthMotifs> classifyMotifs(double lengthThreshold, GrammarRules grammarRules) { // reset vars ArrayList<SameLengthMotifs> allClassifiedMotifs = new ArrayList<SameLengthMotifs>(); // down to business ArrayList<SAXMotif> allMotifs = getAllMotifs(grammarRules); // is this one better? int currentIndex = 0; for (SAXMotif tmpMotif : allMotifs) { currentIndex++; if (tmpMotif.isClassified()) { // this breaks the loop flow, so it goes to //for (SAXMotif // tempMotif : allMotifs) { continue; } SameLengthMotifs tmpSameLengthMotifs = new SameLengthMotifs(); int tmpMotifLen = tmpMotif.getPos().getEnd() - tmpMotif.getPos().getStart() + 1; int minLen = tmpMotifLen; int maxLen = tmpMotifLen; // TODO: assuming that this motif has not been processed, right? ArrayList<SAXMotif> newMotifClass = new ArrayList<SAXMotif>(); newMotifClass.add(tmpMotif); tmpMotif.setClassified(true); // TODO: this motif assumed to be the first one of it's class, // traverse the rest down for (int i = currentIndex; i < allMotifs.size(); i++) { SAXMotif anotherMotif = allMotifs.get(i); // if the two motifs are similar or not. int anotherMotifLen = anotherMotif.getPos().getEnd() - anotherMotif.getPos().getStart() + 1; // if they have the similar length. if (Math.abs(anotherMotifLen - tmpMotifLen) < (tmpMotifLen * lengthThreshold)) { newMotifClass.add(anotherMotif); anotherMotif.setClassified(true); if (anotherMotifLen > maxLen) { maxLen = anotherMotifLen; } else if (anotherMotifLen < minLen) { minLen = anotherMotifLen; } } } tmpSameLengthMotifs.setSameLenMotifs(newMotifClass); tmpSameLengthMotifs.setMinMotifLen(minLen); tmpSameLengthMotifs.setMaxMotifLen(maxLen); allClassifiedMotifs.add(tmpSameLengthMotifs); } return allClassifiedMotifs; // System.out.println(); }
java
protected ArrayList<SAXMotif> getAllMotifs(GrammarRules grammarRules) { // result ArrayList<SAXMotif> allMotifs = new ArrayList<SAXMotif>(); int ruleNumber = grammarRules.size(); // iterate over all rules for (int i = 0; i < ruleNumber; i++) { // iterate over all segments/motifs/sub-sequences which correspond // to the rule ArrayList<RuleInterval> arrPos = grammarRules.getRuleRecord(i).getRuleIntervals(); for (RuleInterval saxPos : arrPos) { SAXMotif motif = new SAXMotif(); motif.setPos(saxPos); motif.setRuleIndex(i); motif.setClassified(false); allMotifs.add(motif); } } // ascending order Collections.sort(allMotifs); return allMotifs; }
java
protected ArrayList<SameLengthMotifs> removeOverlappingInSimiliar( ArrayList<SameLengthMotifs> allClassifiedMotifs, GrammarRules grammarRules, double[] ts, double thresouldCom) { ArrayList<SAXMotif> motifsBeDeleted = new ArrayList<SAXMotif>(); SAXPointsNumber[] pointsNumberRemoveStrategy = countPointNumber(grammarRules, ts); for (SameLengthMotifs sameLenMotifs : allClassifiedMotifs) { outer: for (int j = 0; j < sameLenMotifs.getSameLenMotifs().size(); j++) { SAXMotif tempMotif = sameLenMotifs.getSameLenMotifs().get(j); int tempMotifLen = tempMotif.getPos().getEnd() - tempMotif.getPos().getStart() + 1; for (int i = j + 1; i < sameLenMotifs.getSameLenMotifs().size(); i++) { SAXMotif anotherMotif = sameLenMotifs.getSameLenMotifs().get(i); int anotherMotifLen = anotherMotif.getPos().getEnd() - anotherMotif.getPos().getStart() + 1; double minEndPos = Math.min(tempMotif.getPos().getEnd(), anotherMotif.getPos().getEnd()); double maxStartPos = Math.max(tempMotif.getPos().getStart(), anotherMotif.getPos().getStart()); // the length in common. double commonLen = minEndPos - maxStartPos + 1; // if they are overlapped motif, remove the shorter one if (commonLen > (tempMotifLen * thresouldCom)) { SAXMotif deletedMotif = new SAXMotif(); SAXMotif similarWith = new SAXMotif(); boolean isAnotherBetter; if (pointsNumberRemoveStrategy != null) { isAnotherBetter = decideRemove(anotherMotif, tempMotif, pointsNumberRemoveStrategy); } else { isAnotherBetter = anotherMotifLen > tempMotifLen; } if (isAnotherBetter) { deletedMotif = tempMotif; similarWith = anotherMotif; sameLenMotifs.getSameLenMotifs().remove(j); deletedMotif.setSimilarWith(similarWith); motifsBeDeleted.add(deletedMotif); j--; continue outer; } else { deletedMotif = anotherMotif; similarWith = tempMotif; sameLenMotifs.getSameLenMotifs().remove(i); deletedMotif.setSimilarWith(similarWith); motifsBeDeleted.add(deletedMotif); i--; } } } } int minLength = sameLenMotifs.getSameLenMotifs().get(0).getPos().endPos - sameLenMotifs.getSameLenMotifs().get(0).getPos().startPos + 1; int sameLenMotifsSize = sameLenMotifs.getSameLenMotifs().size(); int maxLength = sameLenMotifs.getSameLenMotifs().get(sameLenMotifsSize - 1).getPos().endPos - sameLenMotifs.getSameLenMotifs().get(sameLenMotifsSize - 1).getPos().startPos + 1; sameLenMotifs.setMinMotifLen(minLength); sameLenMotifs.setMaxMotifLen(maxLength); } return allClassifiedMotifs; }
java
protected boolean decideRemove(SAXMotif motif1, SAXMotif motif2, SAXPointsNumber[] pointsNumberRemoveStrategy) { // motif1 details int motif1Start = motif1.getPos().getStart(); int motif1End = motif1.getPos().getEnd() - 1; int length1 = motif1End - motif1Start; // motif2 details int motif2Start = motif2.getPos().getStart(); int motif2End = motif1.getPos().getEnd() - 1; int length2 = motif2End - motif2Start; int countsMotif1 = 0; int countsMotif2 = 0; // compute the averageWeight double averageWeight = 1; int count = 0; for (int i = 0; i < pointsNumberRemoveStrategy.length; i++) { count += pointsNumberRemoveStrategy[i].getPointOccurenceNumber(); } averageWeight = (double) count / (double) pointsNumberRemoveStrategy.length; // compute counts for motif 1 for (int i = motif1Start; i <= motif1End; i++) { countsMotif1 += pointsNumberRemoveStrategy[i].getPointOccurenceNumber(); } // compute counts for motif 2 for (int i = motif2Start; i <= motif2End; i++) { countsMotif2 += pointsNumberRemoveStrategy[i].getPointOccurenceNumber(); } // get weights double weight1 = countsMotif1 / (averageWeight * length1); double weight2 = countsMotif2 / (averageWeight * length2); if (weight1 > weight2) { return true; } return false; }
java
protected ArrayList<SameLengthMotifs> refinePatternsByClustering(GrammarRules grammarRules, double[] ts, ArrayList<SameLengthMotifs> allClassifiedMotifs, double fractionTopDist) { DistanceComputation dc = new DistanceComputation(); double[] origTS = ts; ArrayList<SameLengthMotifs> newAllClassifiedMotifs = new ArrayList<SameLengthMotifs>(); for (SameLengthMotifs sameLenMotifs : allClassifiedMotifs) { ArrayList<RuleInterval> arrPos = new ArrayList<RuleInterval>(); ArrayList<SAXMotif> subsequences = sameLenMotifs.getSameLenMotifs(); for (SAXMotif ss : subsequences) { arrPos.add(ss.getPos()); } int patternNum = arrPos.size(); if (patternNum < 2) { continue; } double dt[][] = new double[patternNum][patternNum]; // Build distance matrix. for (int i = 0; i < patternNum; i++) { RuleInterval saxPos = arrPos.get(i); int start1 = saxPos.getStart(); int end1 = saxPos.getEnd(); double[] ts1 = Arrays.copyOfRange(origTS, start1, end1); for (int j = 0; j < arrPos.size(); j++) { RuleInterval saxPos2 = arrPos.get(j); if (dt[i][j] > 0) { continue; } double d = 0; dt[i][j] = d; if (i == j) { continue; } int start2 = saxPos2.getStart(); int end2 = saxPos2.getEnd(); double[] ts2 = Arrays.copyOfRange(origTS, start2, end2); if (ts1.length > ts2.length) d = dc.calcDistTSAndPattern(ts1, ts2); else d = dc.calcDistTSAndPattern(ts2, ts1); // DTW dtw = new DTW(ts1, ts2); // d = dtw.warpingDistance; dt[i][j] = d; } } String[] patternsName = new String[patternNum]; for (int i = 0; i < patternNum; i++) { patternsName[i] = String.valueOf(i); } ClusteringAlgorithm alg = new DefaultClusteringAlgorithm(); Cluster cluster = alg.performClustering(dt, patternsName, new AverageLinkageStrategy()); // int minPatternPerCls = (int) (0.3 * patternNum); // minPatternPerCls = minPatternPerCls > 0 ? minPatternPerCls : 1; int minPatternPerCls = 1; if (cluster.getDistance() == null) { // System.out.print(false); continue; } // TODO: refine hard coded threshold // double cutDist = cluster.getDistance() * 0.67; double cutDist = cluster.getDistanceValue() * fractionTopDist; ArrayList<String[]> clusterTSIdx = findCluster(cluster, cutDist, minPatternPerCls); while (clusterTSIdx.size() <= 0) { cutDist += cutDist / 2; clusterTSIdx = findCluster(cluster, cutDist, minPatternPerCls); } newAllClassifiedMotifs.addAll(SeparateMotifsByClustering(clusterTSIdx, sameLenMotifs)); } return newAllClassifiedMotifs; }
java
private ArrayList<String[]> findCluster(Cluster cluster, double cutDist, int minPatternPerCls) { ArrayList<String[]> clusterTSIdx = new ArrayList<String[]>(); if (cluster.getDistance() != null) { // if (cluster.getDistance() > cutDist) { if (cluster.getDistanceValue() > cutDist) { if (cluster.getChildren().size() > 0) { clusterTSIdx.addAll(findCluster(cluster.getChildren().get(0), cutDist, minPatternPerCls)); clusterTSIdx.addAll(findCluster(cluster.getChildren().get(1), cutDist, minPatternPerCls)); } } else { // String[] idxes = cluster.getName().split("&"); ArrayList<String> itemsInCluster = getNameInCluster(cluster); String[] idxes = itemsInCluster.toArray(new String[itemsInCluster.size()]); if (idxes.length > minPatternPerCls) { clusterTSIdx.add(idxes); } } } return clusterTSIdx; }
java
private ArrayList<String> getNameInCluster(Cluster cluster) { ArrayList<String> itemsInCluster = new ArrayList<String>(); String nodeName; if (cluster.isLeaf()) { nodeName = cluster.getName(); itemsInCluster.add(nodeName); } else { // String[] clusterName = cluster.getName().split("#"); // nodeName = clusterName[1]; } for (Cluster child : cluster.getChildren()) { ArrayList<String> childrenNames = getNameInCluster(child); itemsInCluster.addAll(childrenNames); } return itemsInCluster; }
java
private ArrayList<SameLengthMotifs> SeparateMotifsByClustering(ArrayList<String[]> clusterTSIdx, SameLengthMotifs sameLenMotifs) { ArrayList<SameLengthMotifs> newResult = new ArrayList<SameLengthMotifs>(); if (clusterTSIdx.size() > 1) { ArrayList<SAXMotif> subsequences = sameLenMotifs.getSameLenMotifs(); for (String[] idxesInCluster : clusterTSIdx) { SameLengthMotifs newIthSLM = new SameLengthMotifs(); ArrayList<SAXMotif> sameLenSS = new ArrayList<SAXMotif>(); int minL = sameLenMotifs.getMinMotifLen(); int maxL = sameLenMotifs.getMaxMotifLen(); for (String i : idxesInCluster) { SAXMotif ssI = subsequences.get(Integer.parseInt(i)); int len = ssI.getPos().getEnd() - ssI.getPos().getStart(); if (len < minL) { minL = len; } else if (len > maxL) { maxL = len; } sameLenSS.add(ssI); } newIthSLM.setSameLenMotifs(sameLenSS); newIthSLM.setMaxMotifLen(maxL); newIthSLM.setMinMotifLen(minL); newResult.add(newIthSLM); } } else { newResult.add(sameLenMotifs); } return newResult; }
java
@Override public BtrpOperand go(BtrPlaceTree parent) { String cname = getText(); if (catalog == null) { return ignoreError("No constraints available"); } SatConstraintBuilder b = catalog.getConstraint(cname); if (b == null) { ignoreError("Unknown constraint '" + cname + "'"); } //Get the params int i = 0; boolean discrete = false; if (">>".equals(getChild(0).getText())) { i = 1; discrete = true; } List<BtrpOperand> params = new ArrayList<>(); for (; i < getChildCount(); i++) { params.add(getChild(i).go(this)); } if (b != null) { List<? extends SatConstraint> constraints = b.buildConstraint(this, params); for (SatConstraint c : constraints) { if (c != null) { if (discrete) { if (!c.setContinuous(false)) { return ignoreError("Discrete restriction is not supported by constraint '" + cname + "'"); } } else { //force the continuous mode, if available c.setContinuous(true); } script.addConstraint(c); } } } return IgnorableOperand.getInstance(); }
java
public static Process runProcess(String... cmdParts) throws IOException { return new ProcessBuilder(buildCommandline(cmdParts)) .inheritIO() .start(); }
java