code
stringlengths
73
34.1k
label
stringclasses
1 value
public void startup() { underlying.startup(); OProfiler.getInstance().registerHookValue(profilerPrefix + "enabled", new OProfilerHookValue() { public Object getValue() { return isEnabled(); } }); OProfiler.getInstance().registerHookValue(profilerPrefix + "current", new OProfilerHookValue() { public Object getValue() { return getSize(); } }); OProfiler.getInstance().registerHookValue(profilerPrefix + "max", new OProfilerHookValue() { public Object getValue() { return getMaxSize(); } }); }
java
public Cipher getCipher() throws GeneralSecurityException { Cipher cipher = Cipher.getInstance("Blowfish/ECB/PKCS5Padding"); cipher.init(Cipher.DECRYPT_MODE, new SecretKeySpec(key, "Blowfish")); return cipher; }
java
public Cipher getEncryptionCipher() throws GeneralSecurityException { Cipher cipher = Cipher.getInstance("Blowfish/ECB/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, new SecretKeySpec(key, "Blowfish")); return cipher; }
java
public Object execute(final Map<Object, Object> iArgs) { if (className == null) throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet"); final ODatabaseRecord database = getDatabase(); final OClass oClass = database.getMetadata().getSchema().getClass(className); if (oClass == null) return null; for (final OIndex<?> oIndex : oClass.getClassIndexes()) { database.getMetadata().getIndexManager().dropIndex(oIndex.getName()); } final OClass superClass = oClass.getSuperClass(); final int[] clustersToIndex = oClass.getPolymorphicClusterIds(); final String[] clusterNames = new String[clustersToIndex.length]; for (int i = 0; i < clustersToIndex.length; i++) { clusterNames[i] = database.getClusterNameById(clustersToIndex[i]); } final int clusterId = oClass.getDefaultClusterId(); ((OSchemaProxy) database.getMetadata().getSchema()).dropClassInternal(className); ((OSchemaProxy) database.getMetadata().getSchema()).saveInternal(); database.getMetadata().getSchema().reload(); deleteDefaultCluster(clusterId); if (superClass == null) return true; for (final OIndex<?> oIndex : superClass.getIndexes()) { for (final String clusterName : clusterNames) oIndex.getInternal().removeCluster(clusterName); OLogManager.instance().info("Index %s is used in super class of %s and should be rebuilt.", oIndex.getName(), className); oIndex.rebuild(); } return true; }
java
public int compareTo(final OCompositeKey otherKey) { final Iterator<Object> inIter = keys.iterator(); final Iterator<Object> outIter = otherKey.keys.iterator(); while (inIter.hasNext() && outIter.hasNext()) { final Object inKey = inIter.next(); final Object outKey = outIter.next(); if (outKey instanceof OAlwaysGreaterKey) return -1; if (outKey instanceof OAlwaysLessKey) return 1; final int result = comparator.compare(inKey, outKey); if (result != 0) return result; } return 0; }
java
public Map<String, Object> getVariables() { final HashMap<String, Object> map = new HashMap<String, Object>(); if (inherited != null) map.putAll(inherited.getVariables()); if (variables != null) map.putAll(variables); return map; }
java
@SuppressWarnings("unchecked") public <T> T getPropertyValue(ElementDescriptor<T> property) { if (mProperties == null) { return null; } return (T) mProperties.get(property); }
java
public <T> void clear(ElementDescriptor<T> property) { if (mSet != null) { mSet.remove(property); } }
java
public long countClass(final String iClassName) { final OClass cls = getMetadata().getSchema().getClass(iClassName); if (cls == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in database"); return cls.count(); }
java
public OBinarySerializer<?> getObjectSerializer(final byte identifier) { OBinarySerializer<?> impl = serializerIdMap.get(identifier); if (impl == null) { final Class<? extends OBinarySerializer<?>> cls = serializerClassesIdMap.get(identifier); if (cls != null) try { impl = cls.newInstance(); } catch (Exception e) { OLogManager.instance().error(this, "Cannot create an instance of class %s invoking the empty constructor", cls); } } return impl; }
java
public long addRecord(final ORecordId iRid, final byte[] iContent) throws IOException { if (iContent.length == 0) // AVOID UNUSEFUL CREATION OF EMPTY RECORD: IT WILL BE CREATED AT FIRST UPDATE return -1; final int recordSize = iContent.length + RECORD_FIX_SIZE; acquireExclusiveLock(); try { final long[] newFilePosition = getFreeSpace(recordSize); writeRecord(newFilePosition, iRid.clusterId, iRid.clusterPosition, iContent); return getAbsolutePosition(newFilePosition); } finally { releaseExclusiveLock(); } }
java
public byte[] getRecord(final long iPosition) throws IOException { if (iPosition == -1) return null; acquireSharedLock(); try { final long[] pos = getRelativePosition(iPosition); final OFile file = files[(int) pos[0]]; final int recordSize = file.readInt(pos[1]); if (recordSize <= 0) // RECORD DELETED return null; if (pos[1] + RECORD_FIX_SIZE + recordSize > file.getFilledUpTo()) throw new OStorageException( "Error on reading record from file '" + file.getName() + "', position " + iPosition + ", size " + OFileUtils.getSizeAsString(recordSize) + ": the record size is bigger then the file itself (" + OFileUtils.getSizeAsString(getFilledUpTo()) + "). Probably the record is dirty due to a previous crash. It is strongly suggested to restore the database or export and reimport this one."); final byte[] content = new byte[recordSize]; file.read(pos[1] + RECORD_FIX_SIZE, content, recordSize); return content; } finally { releaseSharedLock(); } }
java
public int getRecordSize(final long iPosition) throws IOException { acquireSharedLock(); try { final long[] pos = getRelativePosition(iPosition); final OFile file = files[(int) pos[0]]; return file.readInt(pos[1]); } finally { releaseSharedLock(); } }
java
public long setRecord(final long iPosition, final ORecordId iRid, final byte[] iContent) throws IOException { acquireExclusiveLock(); try { long[] pos = getRelativePosition(iPosition); final OFile file = files[(int) pos[0]]; final int recordSize = file.readInt(pos[1]); final int contentLength = iContent != null ? iContent.length : 0; if (contentLength == recordSize) { // USE THE OLD SPACE SINCE SIZE ISN'T CHANGED file.write(pos[1] + RECORD_FIX_SIZE, iContent); OProfiler.getInstance().updateCounter(PROFILER_UPDATE_REUSED_ALL, +1); return iPosition; } else if (recordSize - contentLength > RECORD_FIX_SIZE + 50) { // USE THE OLD SPACE BUT UPDATE THE CURRENT SIZE. IT'S PREFEREABLE TO USE THE SAME INSTEAD FINDING A BEST SUITED FOR IT TO // AVOID CHANGES TO REF FILE AS WELL. writeRecord(pos, iRid.clusterId, iRid.clusterPosition, iContent); // CREATE A HOLE WITH THE DIFFERENCE OF SPACE handleHole(iPosition + RECORD_FIX_SIZE + contentLength, recordSize - contentLength - RECORD_FIX_SIZE); OProfiler.getInstance().updateCounter(PROFILER_UPDATE_REUSED_PARTIAL, +1); } else { // CREATE A HOLE FOR THE ENTIRE OLD RECORD handleHole(iPosition, recordSize); // USE A NEW SPACE pos = getFreeSpace(contentLength + RECORD_FIX_SIZE); writeRecord(pos, iRid.clusterId, iRid.clusterPosition, iContent); OProfiler.getInstance().updateCounter(PROFILER_UPDATE_NOT_REUSED, +1); } return getAbsolutePosition(pos); } finally { releaseExclusiveLock(); } }
java
public List<ODataHoleInfo> getHolesList() { acquireSharedLock(); try { final List<ODataHoleInfo> holes = new ArrayList<ODataHoleInfo>(); final int tot = holeSegment.getHoles(); for (int i = 0; i < tot; ++i) { final ODataHoleInfo h = holeSegment.getHole(i); if (h != null) holes.add(h); } return holes; } finally { releaseSharedLock(); } }
java
private static boolean betweenLongitudes(double topLeftLon, double bottomRightLon, double lon) { if (topLeftLon <= bottomRightLon) return lon >= topLeftLon && lon <= bottomRightLon; else return lon >= topLeftLon || lon <= bottomRightLon; }
java
public int getPropertyStatus(ElementDescriptor<?> descriptor) { PropStat propStat = mPropStatByProperty.get(descriptor); if (propStat == null) { return STATUS_NONE; } return propStat.getStatusCode(); }
java
public <T> T getPropertyValue(ElementDescriptor<T> descriptor) { PropStat propStat = mPropStatByProperty.get(descriptor); if (propStat == null) { return null; } return propStat.getPropertyValue(descriptor); }
java
private static synchronized Set<OIndexFactory> getFactories() { if (FACTORIES == null) { final Iterator<OIndexFactory> ite = lookupProviderWithOrientClassLoader(OIndexFactory.class,orientClassLoader); final Set<OIndexFactory> factories = new HashSet<OIndexFactory>(); while (ite.hasNext()) { factories.add(ite.next()); } FACTORIES = Collections.unmodifiableSet(factories); } return FACTORIES; }
java
public void addListener(final ORecordListener iListener) { if (_listeners == null) _listeners = Collections.newSetFromMap(new WeakHashMap<ORecordListener, Boolean>()); _listeners.add(iListener); }
java
public static String getComponentProjectName(int componentType, String groupId, String artifactId) { IModel m = ModelFactory.newModel(groupId, artifactId, null, null, MuleVersionEnum.MAIN_MULE_VERSION, null, null); String projectFolderName = null; ComponentEnum compEnum = ComponentEnum.get(componentType); switch (compEnum) { case INTEGRATION_COMPONENT: projectFolderName = m.getIntegrationComponentProject(); break; case INTEGRATION_TESTSTUBS_COMPONENT: projectFolderName = m.getTeststubStandaloneProject(); break; case SD_SCHEMA_COMPONENT: projectFolderName = m.getSchemaProject(); break; } return projectFolderName; }
java
protected boolean checkConsistency(Object o1, Object o2) { return checkConsistency(o1, o2, null, null); }
java
public boolean checkConsistency(final Object legacyEntity, final Object lightblueEntity, final String methodName, MethodCallStringifier callToLogInCaseOfInconsistency) { if (legacyEntity == null && lightblueEntity == null) { return true; } if (callToLogInCaseOfInconsistency == null) { callToLogInCaseOfInconsistency = new LazyMethodCallStringifier(); } try { Timer p2j = new Timer("ConsistencyChecker's pojo2json conversion"); final JsonNode legacyJson = objectMapper.valueToTree(legacyEntity); final JsonNode lightblueJson = objectMapper.valueToTree(lightblueEntity); p2j.complete(); try { Timer t = new Timer("checkConsistency (jiff)"); List<JsonDelta> deltas = jiff.computeDiff(legacyJson, lightblueJson); boolean consistent = deltas.isEmpty(); long jiffConsistencyCheckTook = t.complete(); if (inconsistencyLog.isDebugEnabled()) { inconsistencyLog.debug("Jiff consistency check took: " + jiffConsistencyCheckTook + " ms"); inconsistencyLog.debug("Jiff consistency check passed: true"); } if (consistent) { return true; } // TODO: this can be memory intensive too, but how else to check the size of responses? String legacyJsonStr = objectMapper.writeValueAsString(legacyEntity); String lightblueJsonStr = objectMapper.writeValueAsString(lightblueEntity); // JSONCompare fails when comparing booleans, convert them to strings if ("true".equals(legacyJsonStr) || "false".equals(legacyJsonStr)) { legacyJsonStr = "\"" + legacyJsonStr + "\""; } if ("true".equals(lightblueJsonStr) || "false".equals(lightblueJsonStr)) { lightblueJsonStr = "\"" + lightblueJsonStr + "\""; } if ("null".equals(legacyJsonStr) || "null".equals(lightblueJsonStr)) { logInconsistency(Thread.currentThread().getName(), callToLogInCaseOfInconsistency.toString(), legacyJsonStr, lightblueJsonStr, "One object is null and the other isn't"); } else { if (legacyJsonStr.length() >= maxJsonStrLengthForJsonCompare || lightblueJsonStr.length() >= maxJsonStrLengthForJsonCompare) { inconsistencyLog.debug("Using jiff to produce inconsistency warning"); // it is not very detailed (will show only first inconsistency; will tell you which array element is inconsistent, but not how), but it's easy on resources logInconsistencyUsingJiff(Thread.currentThread().getName(), legacyJsonStr, lightblueJsonStr, deltas, callToLogInCaseOfInconsistency, Boolean.valueOf(System.getProperty("lightblue.facade.consistencyChecker.blocking", "false"))); } else { inconsistencyLog.debug("Using org.skyscreamer.jsonassert.JSONCompare to produce inconsistency warning"); // it's slow and can consume lots of memory, but produces nice diffs logInconsistencyUsingJSONCompare(Thread.currentThread().getName(), legacyJsonStr, lightblueJsonStr, callToLogInCaseOfInconsistency, Boolean.valueOf(System.getProperty("lightblue.facade.consistencyChecker.blocking", "false"))); } } // inconsistent return false; } catch (IOException e) { inconsistencyLog.error("Consistency check failed in " + implementationName + "." + callToLogInCaseOfInconsistency + "! Invalid JSON: legacyJson=" + legacyJson + ", lightblueJson=" + lightblueJson, e); } } catch (Exception e) { inconsistencyLog.error("Consistency check failed in " + implementationName + "." + callToLogInCaseOfInconsistency + "! legacyEntity=" + legacyEntity + ", lightblueEntity=" + lightblueEntity, e); } return false; }
java
public Object execute(final Map<Object, Object> iArgs) { if (newRecords == null) throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet"); final OCommandParameters commandParameters = new OCommandParameters(iArgs); if (indexName != null) { final OIndex<?> index = getDatabase().getMetadata().getIndexManager().getIndex(indexName); if (index == null) throw new OCommandExecutionException("Target index '" + indexName + "' not found"); // BIND VALUES Map<String, Object> result = null; for (Map<String, Object> candidate : newRecords) { index.put(getIndexKeyValue(commandParameters, candidate), getIndexValue(commandParameters, candidate)); result = candidate; } // RETURN LAST ENTRY return new ODocument(result); } else { // CREATE NEW DOCUMENTS final List<ODocument> docs = new ArrayList<ODocument>(); for (Map<String, Object> candidate : newRecords) { final ODocument doc = className != null ? new ODocument(className) : new ODocument(); OSQLHelper.bindParameters(doc, candidate, commandParameters); if (clusterName != null) { doc.save(clusterName); } else { doc.save(); } docs.add(doc); } if (docs.size() == 1) { return docs.get(0); } else { return docs; } } }
java
@SuppressWarnings("unchecked") public OGraphEdge link(final OGraphVertex iTargetVertex, final String iClassName) { if (iTargetVertex == null) throw new IllegalArgumentException("Missed the target vertex"); // CREATE THE EDGE BETWEEN ME AND THE TARGET final OGraphEdge edge = new OGraphEdge(database, iClassName, this, iTargetVertex); getOutEdges().add(edge); Set<ODocument> recordEdges = ((Set<ODocument>) document.field(OGraphDatabase.VERTEX_FIELD_OUT)); if (recordEdges == null) { recordEdges = new HashSet<ODocument>(); document.field(OGraphDatabase.VERTEX_FIELD_OUT, recordEdges); } recordEdges.add(edge.getDocument()); document.setDirty(); // INSERT INTO THE INGOING EDGES OF TARGET iTargetVertex.getInEdges().add(edge); recordEdges = ((Set<ODocument>) iTargetVertex.getDocument().field(OGraphDatabase.VERTEX_FIELD_IN)); if (recordEdges == null) { recordEdges = new HashSet<ODocument>(); iTargetVertex.getDocument().field(OGraphDatabase.VERTEX_FIELD_IN, recordEdges); } recordEdges.add(edge.getDocument()); iTargetVertex.getDocument().setDirty(); return edge; }
java
public OGraphVertex unlink(final OGraphVertex iTargetVertex) { if (iTargetVertex == null) throw new IllegalArgumentException("Missed the target vertex"); unlink(database, document, iTargetVertex.getDocument()); return this; }
java
public boolean hasInEdges() { final Set<ODocument> docs = document.field(OGraphDatabase.VERTEX_FIELD_IN); return docs != null && !docs.isEmpty(); }
java
public boolean hasOutEdges() { final Set<ODocument> docs = document.field(OGraphDatabase.VERTEX_FIELD_OUT); return docs != null && !docs.isEmpty(); }
java
public Set<OGraphEdge> getInEdges(final String iEdgeLabel) { Set<OGraphEdge> temp = in != null ? in.get() : null; if (temp == null) { if (iEdgeLabel == null) temp = new HashSet<OGraphEdge>(); in = new SoftReference<Set<OGraphEdge>>(temp); final Set<Object> docs = document.field(OGraphDatabase.VERTEX_FIELD_IN); if (docs != null) { // TRANSFORM ALL THE ARCS for (Object o : docs) { final ODocument doc = (ODocument) ((OIdentifiable) o).getRecord(); if (iEdgeLabel != null && !iEdgeLabel.equals(doc.field(OGraphDatabase.LABEL))) continue; temp.add((OGraphEdge) database.getUserObjectByRecord(doc, null)); } } } else if (iEdgeLabel != null) { // FILTER THE EXISTENT COLLECTION HashSet<OGraphEdge> filtered = new HashSet<OGraphEdge>(); for (OGraphEdge e : temp) { if (iEdgeLabel.equals(e.getLabel())) filtered.add(e); } temp = filtered; } return temp; }
java
@SuppressWarnings("unchecked") public Set<OGraphVertex> browseOutEdgesVertexes() { final Set<OGraphVertex> resultset = new HashSet<OGraphVertex>(); Set<OGraphEdge> temp = out != null ? out.get() : null; if (temp == null) { final Set<OIdentifiable> docEdges = (Set<OIdentifiable>) document.field(OGraphDatabase.VERTEX_FIELD_OUT); // TRANSFORM ALL THE EDGES if (docEdges != null) for (OIdentifiable d : docEdges) { resultset.add((OGraphVertex) database.getUserObjectByRecord( (ODocument) ((ODocument) d.getRecord()).field(OGraphDatabase.EDGE_FIELD_IN), null)); } } else { for (OGraphEdge edge : temp) { resultset.add(edge.getIn()); } } return resultset; }
java
@SuppressWarnings("unchecked") public Set<OGraphVertex> browseInEdgesVertexes() { final Set<OGraphVertex> resultset = new HashSet<OGraphVertex>(); Set<OGraphEdge> temp = in != null ? in.get() : null; if (temp == null) { final Set<ODocument> docEdges = (Set<ODocument>) document.field(OGraphDatabase.VERTEX_FIELD_IN); // TRANSFORM ALL THE EDGES if (docEdges != null) for (ODocument d : docEdges) { resultset.add((OGraphVertex) database.getUserObjectByRecord((ODocument) d.field(OGraphDatabase.EDGE_FIELD_OUT), null)); } } else { for (OGraphEdge edge : temp) { resultset.add(edge.getOut()); } } return resultset; }
java
public static void unlink(final ODatabaseGraphTx iDatabase, final ODocument iSourceVertex, final ODocument iTargetVertex) { if (iTargetVertex == null) throw new IllegalArgumentException("Missed the target vertex"); if (iDatabase.existsUserObjectByRID(iSourceVertex.getIdentity())) { // WORK ALSO WITH IN MEMORY OBJECTS final OGraphVertex vertex = (OGraphVertex) iDatabase.getUserObjectByRecord(iSourceVertex, null); // REMOVE THE EDGE OBJECT if (vertex.out != null) { final Set<OGraphEdge> obj = vertex.out.get(); if (obj != null) for (OGraphEdge e : obj) if (e.getIn().getDocument().equals(iTargetVertex)) obj.remove(e); } } if (iDatabase.existsUserObjectByRID(iTargetVertex.getIdentity())) { // WORK ALSO WITH IN MEMORY OBJECTS final OGraphVertex vertex = (OGraphVertex) iDatabase.getUserObjectByRecord(iTargetVertex, null); // REMOVE THE EDGE OBJECT FROM THE TARGET VERTEX if (vertex.in != null) { final Set<OGraphEdge> obj = vertex.in.get(); if (obj != null) for (OGraphEdge e : obj) if (e.getOut().getDocument().equals(iSourceVertex)) obj.remove(e); } } final List<ODocument> edges2Remove = new ArrayList<ODocument>(); // REMOVE THE EDGE DOCUMENT ODocument edge = null; Set<ODocument> docs = iSourceVertex.field(OGraphDatabase.VERTEX_FIELD_OUT); if (docs != null) { // USE A TEMP ARRAY TO AVOID CONCURRENT MODIFICATION TO THE ITERATOR for (OIdentifiable d : docs) { final ODocument doc = (ODocument) d.getRecord(); if (doc.field(OGraphDatabase.EDGE_FIELD_IN).equals(iTargetVertex)) { edges2Remove.add(doc); edge = doc; } } for (ODocument d : edges2Remove) docs.remove(d); } if (edge == null) throw new OGraphException("Edge not found between the ougoing edges"); iSourceVertex.setDirty(); iSourceVertex.save(); docs = iTargetVertex.field(OGraphDatabase.VERTEX_FIELD_IN); // REMOVE THE EDGE DOCUMENT FROM THE TARGET VERTEX if (docs != null) { edges2Remove.clear(); for (OIdentifiable d : docs) { final ODocument doc = (ODocument) d.getRecord(); if (doc.field(OGraphDatabase.EDGE_FIELD_IN).equals(iTargetVertex)) edges2Remove.add(doc); } for (ODocument d : edges2Remove) docs.remove(d); } iTargetVertex.setDirty(); iTargetVertex.save(); edge.delete(); }
java
public SyncCollection limitNumberOfResults(int limit) { if (limit > 0) { addLimit(WebDavSearch.NRESULTS, limit); } else { removeLimit(WebDavSearch.NRESULTS); } return this; }
java
public int getNumberOfResultsLimit() { if (mLimit == null) { return 0; } Integer limit = (Integer) mLimit.get(WebDavSearch.NRESULTS); return limit == null ? 0 : limit; }
java
private <T> void addLimit(ElementDescriptor<T> descriptor, T limit) { if (mLimit == null) { mLimit = new HashMap<ElementDescriptor<?>, Object>(6); } mLimit.put(descriptor, limit); }
java
public void put(K k, V v) { cache.put(k, v); acquireLock(k).countDown(); }
java
public V get(K k) throws InterruptedException { await(k); return cache.get(k); }
java
public V get(K k, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { await(k, timeout, unit); return cache.get(k); }
java
public void await(K k, long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { if (!acquireLock(k).await(timeout, unit)) { throw new TimeoutException("Wait time for retrieving value for key " + k + " exceeded " + timeout + " " + unit); } }
java
public static void initEndpointDirectories(MuleContext muleContext, String[] serviceNames, String[] endpointNames) throws Exception { // Stop all named services (either Flows or services List<Lifecycle> services = new ArrayList<Lifecycle>(); for (String serviceName : serviceNames) { try { Lifecycle service = muleContext.getRegistry().lookupObject(serviceName); // logServiceStatus(service); // service.stop(); // logServiceStatus(service); services.add(service); } catch (Exception e) { logger.error("Error '" + e.getMessage() + "' occured while stopping the service " + serviceName + ". Perhaps the service did not exist in the config?"); throw e; } } // Now init the directory for each named endpoint, one by one for (String endpointName : endpointNames) { initEndpointDirectory(muleContext, endpointName); } // We are done, startup the services again so that the test can begin... for (@SuppressWarnings("unused") Lifecycle service : services) { // logServiceStatus(service); // service.start(); // logServiceStatus(service); } }
java
static protected SftpClient getSftpClient(MuleContext muleContext, String endpointName) throws IOException { ImmutableEndpoint endpoint = getImmutableEndpoint(muleContext, endpointName); try { SftpClient sftpClient = SftpConnectionFactory.createClient(endpoint); return sftpClient; } catch (Exception e) { throw new RuntimeException("Login failed", e); } /* EndpointURI endpointURI = endpoint.getEndpointURI(); SftpClient sftpClient = new SftpClient(endpointURI.getHost()); SftpConnector sftpConnector = (SftpConnector) endpoint.getConnector(); if (sftpConnector.getIdentityFile() != null) { try { sftpClient.login(endpointURI.getUser(), sftpConnector.getIdentityFile(), sftpConnector.getPassphrase()); } catch (Exception e) { throw new RuntimeException("Login failed", e); } } else { try { sftpClient.login(endpointURI.getUser(), endpointURI.getPassword()); } catch (Exception e) { throw new RuntimeException("Login failed", e); } } return sftpClient; */ }
java
static protected void recursiveDelete(MuleContext muleContext, SftpClient sftpClient, String endpointName, String relativePath) throws IOException { EndpointURI endpointURI = getImmutableEndpoint(muleContext, endpointName).getEndpointURI(); String path = endpointURI.getPath() + relativePath; try { // Ensure that we can delete the current directory and the below // directories (if write is not permitted then delete is either) sftpClient.chmod(path, 00700); sftpClient.changeWorkingDirectory(sftpClient.getAbsolutePath(path)); // Delete all sub-directories String[] directories = sftpClient.listDirectories(); for (String directory : directories) { recursiveDelete(muleContext, sftpClient, endpointName, relativePath + "/" + directory); } // Needs to change the directory back after the recursiveDelete sftpClient.changeWorkingDirectory(sftpClient.getAbsolutePath(path)); // Delete all files String[] files = sftpClient.listFiles(); for (String file : files) { sftpClient.deleteFile(file); } // Delete the directory try { sftpClient.deleteDirectory(path); } catch (Exception e) { if (logger.isDebugEnabled()) logger.debug("Failed delete directory " + path, e); } } catch (Exception e) { if (logger.isDebugEnabled()) logger.debug("Failed to recursivly delete directory " + path, e); } }
java
public OMVRBTreeEntryPersistent<K, V> save() throws OSerializationException { if (!dataProvider.isEntryDirty()) return this; final boolean isNew = dataProvider.getIdentity().isNew(); // FOR EACH NEW LINK, SAVE BEFORE if (left != null && left.dataProvider.getIdentity().isNew()) { if (isNew) { // TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER left.dataProvider.save(); } else left.save(); } if (right != null && right.dataProvider.getIdentity().isNew()) { if (isNew) { // TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER right.dataProvider.save(); } else right.save(); } if (parent != null && parent.dataProvider.getIdentity().isNew()) { if (isNew) { // TEMPORARY INCORRECT SAVE FOR GETTING AN ID. WILL BE SET DIRTY AGAIN JUST AFTER parent.dataProvider.save(); } else parent.save(); } dataProvider.save(); // if (parent != null) // if (!parent.record.getIdentity().equals(parentRid)) // OLogManager.instance().error(this, // "[save]: Tree node %s has parentRid '%s' different by the rid of the assigned parent node: %s", record.getIdentity(), // parentRid, parent.record.getIdentity()); checkEntryStructure(); if (pTree.searchNodeInCache(dataProvider.getIdentity()) != this) { // UPDATE THE CACHE pTree.addNodeInMemory(this); } return this; }
java
public OMVRBTreeEntryPersistent<K, V> delete() throws IOException { if (dataProvider != null) { pTree.removeNodeFromMemory(this); pTree.removeEntry(dataProvider.getIdentity()); // EARLY LOAD LEFT AND DELETE IT RECURSIVELY if (getLeft() != null) ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete(); // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY if (getRight() != null) ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete(); // DELETE MYSELF dataProvider.removeIdentityChangedListener(this); dataProvider.delete(); clear(); } return this; }
java
protected int disconnect(final boolean iForceDirty, final int iLevel) { if (dataProvider == null) // DIRTY NODE, JUST REMOVE IT return 1; int totalDisconnected = 0; final ORID rid = dataProvider.getIdentity(); boolean disconnectedFromParent = false; if (parent != null) { // DISCONNECT RECURSIVELY THE PARENT NODE if (canDisconnectFrom(parent) || iForceDirty) { if (parent.left == this) { parent.left = null; } else if (parent.right == this) { parent.right = null; } else OLogManager.instance().warn(this, "Node " + rid + " has the parent (" + parent + ") unlinked to itself. It links to " + parent); totalDisconnected += parent.disconnect(iForceDirty, iLevel + 1); parent = null; disconnectedFromParent = true; } } else { disconnectedFromParent = true; } boolean disconnectedFromLeft = false; if (left != null) { // DISCONNECT RECURSIVELY THE LEFT NODE if (canDisconnectFrom(left) || iForceDirty) { if (left.parent == this) left.parent = null; else OLogManager.instance().warn(this, "Node " + rid + " has the left (" + left + ") unlinked to itself. It links to " + left.parent); totalDisconnected += left.disconnect(iForceDirty, iLevel + 1); left = null; disconnectedFromLeft = true; } } else { disconnectedFromLeft = true; } boolean disconnectedFromRight = false; if (right != null) { // DISCONNECT RECURSIVELY THE RIGHT NODE if (canDisconnectFrom(right) || iForceDirty) { if (right.parent == this) right.parent = null; else OLogManager.instance().warn(this, "Node " + rid + " has the right (" + right + ") unlinked to itself. It links to " + right.parent); totalDisconnected += right.disconnect(iForceDirty, iLevel + 1); right = null; disconnectedFromRight = true; } } else { disconnectedFromLeft = true; } if (disconnectedFromParent && disconnectedFromLeft && disconnectedFromRight) if ((!dataProvider.isEntryDirty() && !dataProvider.getIdentity().isTemporary() || iForceDirty) && !pTree.isNodeEntryPoint(this)) { totalDisconnected++; pTree.removeNodeFromMemory(this); clear(); } return totalDisconnected; }
java
public V setValue(final V iValue) { V oldValue = getValue(); int index = tree.getPageIndex(); if (dataProvider.setValueAt(index, iValue)) markDirty(); return oldValue; }
java
public static OIdentifiable readIdentifiable(final OChannelBinaryClient network) throws IOException { final int classId = network.readShort(); if (classId == RECORD_NULL) return null; if (classId == RECORD_RID) { return network.readRID(); } else { final ORecordInternal<?> record = Orient.instance().getRecordFactoryManager().newInstance(network.readByte()); if (record instanceof ORecordSchemaAware<?>) ((ORecordSchemaAware<?>) record).fill(network.readRID(), network.readInt(), network.readBytes(), false); else // DISCARD CLASS ID record.fill(network.readRID(), network.readInt(), network.readBytes(), false); return record; } }
java
public static Map<Identity, JsonNode> getDocumentIdMap(List<JsonNode> list, List<String> identityFields) { Map<Identity, JsonNode> map = new HashMap<>(); if (list != null) { LOGGER.debug("Getting doc IDs for {} docs, fields={}", list.size(), identityFields); for (JsonNode node : list) { Identity id = new Identity(node, identityFields); LOGGER.debug("ID={}", id); map.put(id, node); } } return map; }
java
public static boolean fastCompareDocs(JsonNode sourceDocument, JsonNode destinationDocument, List<String> exclusionPaths, boolean ignoreTimestampMSDiffs) { try { JsonDiff diff = new JsonDiff(); diff.setOption(JsonDiff.Option.ARRAY_ORDER_INSIGNIFICANT); diff.setOption(JsonDiff.Option.RETURN_LEAVES_ONLY); diff.setFilter(new AbstractFieldFilter() { public boolean includeField(List<String> fieldName) { return !fieldName.get(fieldName.size() - 1).endsWith("#"); } }); List<JsonDelta> list = diff.computeDiff(sourceDocument, destinationDocument); for (JsonDelta x : list) { String field = x.getField(); if (!isExcluded(exclusionPaths, field)) { if (reallyDifferent(x.getNode1(), x.getNode2(), ignoreTimestampMSDiffs)) { return true; } } } } catch (Exception e) { LOGGER.error("Cannot compare docs:{}", e, e); } return false; }
java
public synchronized void createHole(final long iRecordOffset, final int iRecordSize) throws IOException { final long timer = OProfiler.getInstance().startChrono(); // IN MEMORY final int recycledPosition; final ODataHoleInfo hole; if (!freeHoles.isEmpty()) { // RECYCLE THE FIRST FREE HOLE recycledPosition = freeHoles.remove(0); hole = availableHolesList.get(recycledPosition); hole.dataOffset = iRecordOffset; hole.size = iRecordSize; } else { // APPEND A NEW ONE recycledPosition = getHoles(); hole = new ODataHoleInfo(iRecordSize, iRecordOffset, recycledPosition); availableHolesList.add(hole); file.allocateSpace(RECORD_SIZE); } availableHolesBySize.put(hole, hole); availableHolesByPosition.put(hole, hole); if (maxHoleSize < iRecordSize) maxHoleSize = iRecordSize; // TO FILE final long p = recycledPosition * RECORD_SIZE; file.writeLong(p, iRecordOffset); file.writeInt(p + OBinaryProtocol.SIZE_LONG, iRecordSize); OProfiler.getInstance().stopChrono(PROFILER_DATA_HOLE_CREATE, timer); }
java
public synchronized ODataHoleInfo getHole(final int iPosition) { final ODataHoleInfo hole = availableHolesList.get(iPosition); if (hole.dataOffset == -1) return null; return hole; }
java
public synchronized void updateHole(final ODataHoleInfo iHole, final long iNewDataOffset, final int iNewRecordSize) throws IOException { final long timer = OProfiler.getInstance().startChrono(); final boolean offsetChanged = iNewDataOffset != iHole.dataOffset; final boolean sizeChanged = iNewRecordSize != iHole.size; if (maxHoleSize < iNewRecordSize) maxHoleSize = iNewRecordSize; // IN MEMORY if (offsetChanged) availableHolesByPosition.remove(iHole); if (sizeChanged) availableHolesBySize.remove(iHole); if (offsetChanged) iHole.dataOffset = iNewDataOffset; if (sizeChanged) iHole.size = iNewRecordSize; if (offsetChanged) availableHolesByPosition.put(iHole, iHole); if (sizeChanged) availableHolesBySize.put(iHole, iHole); // TO FILE final long holePosition = iHole.holeOffset * RECORD_SIZE; if (offsetChanged) file.writeLong(holePosition, iNewDataOffset); if (sizeChanged) file.writeInt(holePosition + OBinaryProtocol.SIZE_LONG, iNewRecordSize); OProfiler.getInstance().stopChrono(PROFILER_DATA_HOLE_UPDATE, timer); }
java
public synchronized void deleteHole(int iHolePosition) throws IOException { // IN MEMORY final ODataHoleInfo hole = availableHolesList.get(iHolePosition); availableHolesBySize.remove(hole); availableHolesByPosition.remove(hole); hole.dataOffset = -1; freeHoles.add(iHolePosition); // TO FILE iHolePosition = iHolePosition * RECORD_SIZE; file.writeLong(iHolePosition, -1); }
java
protected T getObject() { final T object; if (reusedObject != null) { // REUSE THE SAME RECORD AFTER HAVING RESETTED IT object = reusedObject; object.reset(); } else // CREATE A NEW ONE object = (T) database.newInstance(className); return object; }
java
@SneakyThrows(IOException.class) public static String sha1(String input) throws NoSuchAlgorithmException { MessageDigest mDigest = MessageDigest.getInstance("SHA1"); byte[] result = mDigest.digest(input.getBytes("UTF-8")); String resultString = String.format("%040x", new BigInteger(1, result)); return resultString; }
java
public void saveRecord(final ORecordInternal<?> iRecord, final String iClusterName, final OPERATION_MODE iMode, final ORecordCallback<? extends Number> iCallback) { try { database.executeSaveRecord(iRecord, iClusterName, iRecord.getVersion(), iRecord.getRecordType(), true, iMode, iCallback); } catch (Exception e) { // REMOVE IT FROM THE CACHE TO AVOID DIRTY RECORDS final ORecordId rid = (ORecordId) iRecord.getIdentity(); if (rid.isValid()) database.getLevel1Cache().freeRecord(rid); if (e instanceof RuntimeException) throw (RuntimeException) e; throw new OException(e); } }
java
public void deleteRecord(final ORecordInternal<?> iRecord, final OPERATION_MODE iMode) { if (!iRecord.getIdentity().isPersistent()) return; try { database.executeDeleteRecord(iRecord, iRecord.getVersion(), true, true, iMode); } catch (Exception e) { // REMOVE IT FROM THE CACHE TO AVOID DIRTY RECORDS final ORecordId rid = (ORecordId) iRecord.getIdentity(); if (rid.isValid()) database.getLevel1Cache().freeRecord(rid); if (e instanceof RuntimeException) throw (RuntimeException) e; throw new OException(e); } }
java
@SuppressWarnings("unchecked") public <RET> RET execute(final Object... iArgs) { setParameters(iArgs); return (RET) ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().command(this); }
java
public Collection<ODocument> getEntriesBetween(final Object iRangeFrom, final Object iRangeTo) { return getEntriesBetween(iRangeFrom, iRangeTo, true); }
java
public long rebuild(final OProgressListener iProgressListener) { long documentIndexed = 0; final boolean intentInstalled = getDatabase().declareIntent(new OIntentMassiveInsert()); acquireExclusiveLock(); try { try { map.clear(); } catch (Exception e) { // IGNORE EXCEPTION: IF THE REBUILD WAS LAUNCHED IN CASE OF RID INVALID CLEAR ALWAYS GOES IN ERROR } int documentNum = 0; long documentTotal = 0; for (final String cluster : clustersToIndex) documentTotal += getDatabase().countClusterElements(cluster); if (iProgressListener != null) iProgressListener.onBegin(this, documentTotal); for (final String clusterName : clustersToIndex) try { for (final ORecord<?> record : getDatabase().browseCluster(clusterName)) { if (record instanceof ODocument) { final ODocument doc = (ODocument) record; if (indexDefinition == null) throw new OConfigurationException("Index '" + name + "' cannot be rebuilt because has no a valid definition (" + indexDefinition + ")"); final Object fieldValue = indexDefinition.getDocumentValueToIndex(doc); if (fieldValue != null) { if (fieldValue instanceof Collection) { for (final Object fieldValueItem : (Collection<?>) fieldValue) { put(fieldValueItem, doc); } } else put(fieldValue, doc); ++documentIndexed; } } documentNum++; if (iProgressListener != null) iProgressListener.onProgress(this, documentNum, documentNum * 100f / documentTotal); } } catch (NoSuchElementException e) { // END OF CLUSTER REACHED, IGNORE IT } lazySave(); if (iProgressListener != null) iProgressListener.onCompletition(this, true); } catch (final Exception e) { if (iProgressListener != null) iProgressListener.onCompletition(this, false); try { map.clear(); } catch (Exception e2) { // IGNORE EXCEPTION: IF THE REBUILD WAS LAUNCHED IN CASE OF RID INVALID CLEAR ALWAYS GOES IN ERROR } throw new OIndexException("Error on rebuilding the index for clusters: " + clustersToIndex, e); } finally { if (intentInstalled) getDatabase().declareIntent(null); releaseExclusiveLock(); } return documentIndexed; }
java
public boolean isConnected() { if (socket != null && socket.isConnected() && !socket.isInputShutdown() && !socket.isOutputShutdown()) return true; return false; }
java
public void detach(Object self) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { for (String fieldName : doc.fieldNames()) { Object value = getValue(self, fieldName, false, null); if (value instanceof OLazyObjectMultivalueElement) ((OLazyObjectMultivalueElement) value).detach(); OObjectEntitySerializer.setFieldValue(getField(fieldName, self.getClass()), self, value); } OObjectEntitySerializer.setIdField(self.getClass(), self, doc.getIdentity()); OObjectEntitySerializer.setVersionField(self.getClass(), self, doc.getVersion()); }
java
public void attach(Object self) throws IllegalArgumentException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { for (Class<?> currentClass = self.getClass(); currentClass != Object.class;) { if (Proxy.class.isAssignableFrom(currentClass)) { currentClass = currentClass.getSuperclass(); continue; } for (Field f : currentClass.getDeclaredFields()) { Object value = OObjectEntitySerializer.getFieldValue(f, self); value = setValue(self, f.getName(), value); OObjectEntitySerializer.setFieldValue(f, self, value); } currentClass = currentClass.getSuperclass(); if (currentClass == null || currentClass.equals(ODocument.class)) // POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER // ODOCUMENT FIELDS currentClass = Object.class; } }
java
private void ensureRespondJsScriptElement() { if (this.respondJsScript == null) { this.respondJsScript = Document.get().createScriptElement(); this.respondJsScript.setSrc(GWT.getModuleBaseForStaticFiles() + DefaultIE8ThemeController.RESPOND_JS_LOCATION); this.respondJsScript.setType("text/javascript"); } }
java
public static MessageDispatcherServlet createMessageDispatcherServlet(Class... contextConfigLocation) { StringBuilder items = new StringBuilder(); for (Class aClass : contextConfigLocation) { items.append( aClass.getName()); items.append(","); } MessageDispatcherServlet messageDispatcherServlet = new MessageDispatcherServlet(); messageDispatcherServlet.setContextClass(AnnotationConfigWebApplicationContext.class); messageDispatcherServlet.setContextConfigLocation(removeEnd(items.toString(), "," )); messageDispatcherServlet.setTransformWsdlLocations(true); return messageDispatcherServlet; }
java
@VisibleForTesting static Integer getReceivedStations(AisExtractor extractor, int slotTimeout, int startIndex) { if (slotTimeout == 3 || slotTimeout == 5 || slotTimeout == 7) return extractor.getValue(startIndex + 5, startIndex + 19); else return null; }
java
private static Integer getHourUtc(AisExtractor extractor, int slotTimeout, int startIndex) { if (slotTimeout == 1) { // skip the msb bit int hours = extractor.getValue(startIndex + 5, startIndex + 10); return hours; } else return null; }
java
private static Integer getMinuteUtc(AisExtractor extractor, int slotTimeout, int startIndex) { if (slotTimeout == 1) { // skip the msb bit int minutes = extractor.getValue(startIndex + 10, startIndex + 17); return minutes; } else return null; }
java
private static Integer getSlotOffset(AisExtractor extractor, int slotTimeout, int startIndex) { if (slotTimeout == 0) return extractor.getValue(startIndex + 5, startIndex + 19); else return null; }
java
public synchronized int getValue(int from, int to) { try { // is synchronized so that values of bitSet and calculated can be // lazily // calculated and safely published (thread safe). SixBit.convertSixBitToBits(message, padBits, bitSet, calculated, from, to); return (int) SixBit.getValue(from, to, bitSet); } catch (SixBitException | ArrayIndexOutOfBoundsException e) { throw new AisParseException(e); } }
java
public synchronized int getSignedValue(int from, int to) { try { // is synchronized so that values of bitSet and calculated can be // lazily // calculated and safely published (thread safe). SixBit.convertSixBitToBits(message, padBits, bitSet, calculated, from, to); return (int) SixBit.getSignedValue(from, to, bitSet); } catch (SixBitException e) { throw new AisParseException(e); } }
java
public static Object getField(Object obj, String name) { try { Class<? extends Object> klass = obj.getClass(); do { try { Field field = klass.getDeclaredField(name); field.setAccessible(true); return field.get(obj); } catch (NoSuchFieldException e) { klass = klass.getSuperclass(); } } while (klass != null); throw new RuntimeException(); // true no such field exception } catch (SecurityException e) { throw new RuntimeException(e); } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } }
java
public final double getDistanceToKm(Position position) { double lat1 = toRadians(lat); double lat2 = toRadians(position.lat); double lon1 = toRadians(lon); double lon2 = toRadians(position.lon); double deltaLon = lon2 - lon1; double cosLat2 = cos(lat2); double cosLat1 = cos(lat1); double sinLat1 = sin(lat1); double sinLat2 = sin(lat2); double cosDeltaLon = cos(deltaLon); double top = sqrt(sqr(cosLat2 * sin(deltaLon)) + sqr(cosLat1 * sinLat2 - sinLat1 * cosLat2 * cosDeltaLon)); double bottom = sinLat1 * sinLat2 + cosLat1 * cosLat2 * cosDeltaLon; double distance = radiusEarthKm * atan2(top, bottom); return abs(distance); }
java
public final double getDistanceKmToPath(Position p1, Position p2) { double d = radiusEarthKm * asin(sin(getDistanceToKm(p1) / radiusEarthKm) * sin(toRadians(getBearingDegrees(p1) - p1.getBearingDegrees(p2)))); return abs(d); }
java
@SuppressWarnings("unchecked") public static void setModelClass(Class<?> modelClass) throws IllegalArgumentException { if (!DefaultModelImpl.class.isAssignableFrom(modelClass)) { throw new IllegalArgumentException("Modelclass, " + modelClass.getName() + ", is not a subtype of " + DefaultModelImpl.class.getName()); } ModelFactory.modelClass = (Class<DefaultModelImpl>)modelClass; }
java
public static void resetModelClass() { ModelFactory.modelClass = DefaultModelImpl.class; System.err.println("[INFO] Reset model-class: " + ModelFactory.modelClass.getName()); }
java
public static IModel newModel(String groupId, String artifactId, String version, String service, MuleVersionEnum muleVersion, TransportEnum inboundTransport, TransportEnum outboundTransport, TransformerEnum transformerType) { return doCreateNewModel(groupId, artifactId, version, service, muleVersion, null, null, inboundTransport, outboundTransport, transformerType, null, null); }
java
public static void lockMemory(boolean useSystemJNADisabled) { if (useSystemJNADisabled) disableUsingSystemJNA(); try { int errorCode = MemoryLockerLinux.INSTANCE.mlockall(MemoryLockerLinux.LOCK_CURRENT_MEMORY); if (errorCode != 0) { final String errorMessage; int lastError = Native.getLastError(); switch (lastError) { case MemoryLockerLinux.EPERM: errorMessage = "The calling process does not have the appropriate privilege to perform the requested operation(EPERM)."; break; case MemoryLockerLinux.EAGAIN: errorMessage = "Some or all of the memory identified by the operation could not be locked when the call was made(EAGAIN)."; break; case MemoryLockerLinux.ENOMEM: errorMessage = "Unable to lock JVM memory. This can result in part of the JVM being swapped out, especially if mmapping of files enabled. Increase RLIMIT_MEMLOCK or run OrientDB server as root(ENOMEM)."; break; case MemoryLockerLinux.EINVAL: errorMessage = "The flags argument is zero, or includes unimplemented flags(EINVAL)."; break; case MemoryLockerLinux.ENOSYS: errorMessage = "The implementation does not support this memory locking interface(ENOSYS)."; break; default: errorMessage = "Unexpected exception with code " + lastError + "."; break; } OLogManager.instance().error(null, "[MemoryLocker.lockMemory] Error occurred while locking memory: %s", errorMessage); } else { OLogManager.instance().info(null, "[MemoryLocker.lockMemory] Memory locked successfully!"); } } catch (UnsatisfiedLinkError e) { OLogManager.instance().config(null, "[MemoryLocker.lockMemory] Cannot lock virtual memory. It seems that you OS (%s) doesn't support this feature", System.getProperty("os.name")); } }
java
public OMVRBTreeEntry<K, V> getFirstInMemory() { OMVRBTreeEntry<K, V> node = this; OMVRBTreeEntry<K, V> prev = this; while (node != null) { prev = node; node = node.getPreviousInMemory(); } return prev; }
java
public OMVRBTreeEntry<K, V> getPreviousInMemory() { OMVRBTreeEntry<K, V> t = this; OMVRBTreeEntry<K, V> p = null; if (t.getLeftInMemory() != null) { p = t.getLeftInMemory(); while (p.getRightInMemory() != null) p = p.getRightInMemory(); } else { p = t.getParentInMemory(); while (p != null && t == p.getLeftInMemory()) { t = p; p = p.getParentInMemory(); } } return p; }
java
private V linearSearch(final K iKey) { V value = null; int i = 0; tree.pageItemComparator = -1; for (int s = getSize(); i < s; ++i) { if (tree.comparator != null) tree.pageItemComparator = tree.comparator.compare(getKeyAt(i), iKey); else tree.pageItemComparator = ((Comparable<? super K>) getKeyAt(i)).compareTo(iKey); if (tree.pageItemComparator == 0) { // FOUND: SET THE INDEX AND RETURN THE NODE tree.pageItemFound = true; value = getValueAt(i); break; } else if (tree.pageItemComparator > 0) break; } tree.pageIndex = i; return value; }
java
private V binarySearch(final K iKey) { int low = 0; int high = getSize() - 1; int mid = 0; while (low <= high) { mid = (low + high) >>> 1; Object midVal = getKeyAt(mid); if (tree.comparator != null) tree.pageItemComparator = tree.comparator.compare((K) midVal, iKey); else tree.pageItemComparator = ((Comparable<? super K>) midVal).compareTo(iKey); if (tree.pageItemComparator == 0) { // FOUND: SET THE INDEX AND RETURN THE NODE tree.pageItemFound = true; tree.pageIndex = mid; return getValueAt(tree.pageIndex); } if (low == high) break; if (tree.pageItemComparator < 0) low = mid + 1; else high = mid; } tree.pageIndex = mid; return null; }
java
public int compareTo(final OMVRBTreeEntry<K, V> o) { if (o == null) return 1; if (o == this) return 0; if (getSize() == 0) return -1; if (o.getSize() == 0) return 1; if (tree.comparator != null) return tree.comparator.compare(getFirstKey(), o.getFirstKey()); return ((Comparable<K>) getFirstKey()).compareTo(o.getFirstKey()); }
java
public static long parsePeriod(String periodStr) { PeriodFormatter fmt = PeriodFormat.getDefault(); Period p = fmt.parsePeriod(periodStr); return p.toStandardDuration().getMillis(); }
java
public Date getEndDate(Date startDate, long period) { long now = getNow().getTime(); long endDate = startDate.getTime() + period; if (now - period > endDate) { return new Date(endDate); } else { return null; } }
java
protected List<MigrationJob> createJobs(Date startDate, Date endDate, ActiveExecution ae) throws Exception { List<MigrationJob> ret = new ArrayList<MigrationJob>(); LOGGER.debug("Creating the migrator to setup new jobs"); // We setup a new migration job MigrationJob mj = new MigrationJob(); mj.setConfigurationName(getMigrationConfiguration().getConfigurationName()); mj.setScheduledDate(getNow()); mj.setGenerated(true); mj.setStatus(MigrationJob.STATE_AVAILABLE); mj.setConsistencyChecker(new MigrationJob.ConsistencyChecker()); mj.getConsistencyChecker().setJobRangeBegin(ClientConstants.getDateFormat().format(startDate)); mj.getConsistencyChecker().setJobRangeEnd(ClientConstants.getDateFormat().format(endDate)); mj.getConsistencyChecker().setConfigurationName(mj.getConfigurationName()); Migrator migrator = createMigrator(mj, ae); mj.setQuery(migrator.createRangeQuery(startDate, endDate)); // At this point, mj.query contains the range query LOGGER.debug("Migration job query:{}", mj.getQuery()); ret.add(mj); return ret; }
java
private OMMapBufferEntry[] searchAmongExisting(OFileMMap file, final OMMapBufferEntry[] fileEntries, final long beginOffset, final int size) { if (fileEntries.length == 0) { return EMPTY_BUFFER_ENTRIES; } final OMMapBufferEntry lastEntry = fileEntries[fileEntries.length - 1]; if (lastEntry.beginOffset + lastEntry.size <= beginOffset) { return EMPTY_BUFFER_ENTRIES; } final LastMMapEntrySearchInfo entrySearchInfo = mapEntrySearchInfo.get(file); final int beginSearchPosition; final int endSearchPosition; if (entrySearchInfo == null) { beginSearchPosition = 0; endSearchPosition = fileEntries.length - 1; } else { if (entrySearchInfo.requestedPosition <= beginOffset) { beginSearchPosition = entrySearchInfo.foundMmapIndex; endSearchPosition = fileEntries.length - 1; } else { beginSearchPosition = 0; endSearchPosition = entrySearchInfo.foundMmapIndex; } } final int resultFirstPosition; if (endSearchPosition - beginSearchPosition > BINARY_SEARCH_THRESHOLD) resultFirstPosition = binarySearch(fileEntries, beginOffset, beginSearchPosition, endSearchPosition); else resultFirstPosition = linearSearch(fileEntries, beginOffset, beginSearchPosition, endSearchPosition); if (beginSearchPosition < 0) return EMPTY_BUFFER_ENTRIES; int resultLastPosition = fileEntries.length - 1; for (int i = resultFirstPosition; i <= resultLastPosition; i++) { final OMMapBufferEntry entry = fileEntries[i]; if (entry.beginOffset + entry.size >= beginOffset + size) { resultLastPosition = i; break; } } final int length = resultLastPosition - resultFirstPosition + 1; final OMMapBufferEntry[] foundEntries = new OMMapBufferEntry[length]; if (length > 0) { System.arraycopy(fileEntries, resultFirstPosition, foundEntries, 0, length); mapEntrySearchInfo.put(file, new LastMMapEntrySearchInfo(resultFirstPosition, beginOffset)); } return foundEntries; }
java
private OMMapBufferEntry mapNew(final OFileMMap file, final long beginOffset) throws IOException { return new OMMapBufferEntry(file, file.map(beginOffset, file.getFileSize() - (int) beginOffset), beginOffset, file.getFileSize() - (int) beginOffset); }
java
private void acquireLocksOnEntries(final OMMapBufferEntry[] entries, OPERATION_TYPE operationType) { if (operationType == OPERATION_TYPE.WRITE) for (OMMapBufferEntry entry : entries) { entry.acquireWriteLock(); entry.setDirty(); } else for (OMMapBufferEntry entry : entries) entry.acquireReadLock(); }
java
private void removeFileEntries(OMMapBufferEntry[] fileEntries) { if (fileEntries != null) { for (OMMapBufferEntry entry : fileEntries) { removeEntry(entry); } } }
java
public static String getRelativePath(File file, File srcDir) { String base = srcDir.getPath(); String filePath = file.getPath(); String relativePath = new File(base).toURI() .relativize(new File(filePath).toURI()).getPath(); return relativePath; }
java
public static void copyFile(File srcFile, File destFile) { OutputStream out = null; InputStream in = null; try { if (!destFile.getParentFile().exists()) destFile.getParentFile().mkdirs(); in = new FileInputStream(srcFile); out = new FileOutputStream(destFile); // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } catch (IOException e) { throw new RuntimeException(e); } }
java
public static ContextLoaderListener createSpringContextLoader(final WebApplicationContext webApplicationContext) { return new ContextLoaderListener() { @SuppressWarnings("unchecked") @Override protected WebApplicationContext createWebApplicationContext(ServletContext sc) { return webApplicationContext; } }; }
java
public static JsonNode getFieldValue(JsonNode doc, String field) { StringTokenizer tkz = new StringTokenizer(field, ". "); JsonNode trc = doc; while (tkz.hasMoreTokens() && trc != null) { String tok = tkz.nextToken(); trc = trc.get(tok); } return trc; }
java
public OPhysicalPosition getPhysicalPosition(final OPhysicalPosition iPPosition) throws IOException { final long filePosition = iPPosition.clusterPosition * RECORD_SIZE; acquireSharedLock(); try { final long[] pos = fileSegment.getRelativePosition(filePosition); final OFile f = fileSegment.files[(int) pos[0]]; long p = pos[1]; iPPosition.dataSegmentId = f.readShort(p); iPPosition.dataSegmentPos = f.readLong(p += OBinaryProtocol.SIZE_SHORT); iPPosition.recordType = f.readByte(p += OBinaryProtocol.SIZE_LONG); iPPosition.recordVersion = f.readInt(p += OBinaryProtocol.SIZE_BYTE); return iPPosition; } finally { releaseSharedLock(); } }
java
public void removePhysicalPosition(final long iPosition) throws IOException { final long position = iPosition * RECORD_SIZE; acquireExclusiveLock(); try { final long[] pos = fileSegment.getRelativePosition(position); final OFile file = fileSegment.files[(int) pos[0]]; final long p = pos[1] + OBinaryProtocol.SIZE_SHORT + OBinaryProtocol.SIZE_LONG + OBinaryProtocol.SIZE_BYTE; holeSegment.pushPosition(position); // MARK DELETED SETTING VERSION TO NEGATIVE NUMBER final int version = file.readInt(p); file.writeInt(p, (version + 1) * -1); updateBoundsAfterDeletion(iPosition); } finally { releaseExclusiveLock(); } }
java
public void addPhysicalPosition(final OPhysicalPosition iPPosition) throws IOException { final long[] pos; final boolean recycled; long offset; acquireExclusiveLock(); try { offset = holeSegment.popLastEntryPosition(); if (offset > -1) { // REUSE THE HOLE pos = fileSegment.getRelativePosition(offset); recycled = true; } else { // NO HOLES FOUND: ALLOCATE MORE SPACE pos = allocateRecord(); offset = fileSegment.getAbsolutePosition(pos); recycled = false; } final OFile file = fileSegment.files[(int) pos[0]]; long p = pos[1]; file.writeShort(p, (short) iPPosition.dataSegmentId); file.writeLong(p += OBinaryProtocol.SIZE_SHORT, iPPosition.dataSegmentPos); file.writeByte(p += OBinaryProtocol.SIZE_LONG, iPPosition.recordType); if (recycled) // GET LAST VERSION iPPosition.recordVersion = file.readInt(p + OBinaryProtocol.SIZE_BYTE) * -1; else iPPosition.recordVersion = 0; file.writeInt(p + OBinaryProtocol.SIZE_BYTE, iPPosition.recordVersion); iPPosition.clusterPosition = offset / RECORD_SIZE; updateBoundsAfterInsertion(iPPosition.clusterPosition); } finally { releaseExclusiveLock(); } }
java
private void setDataSegmentInternal(final String iName) { final int dataId = storage.getDataSegmentIdByName(iName); config.setDataSegmentId(dataId); storage.getConfiguration().update(); }
java
protected Object getCustomEventLoggerFromRegistry(MuleContext muleContext) { Object obj = muleContext.getRegistry().lookupObject( CUSTOM_EVENT_LOGGER_BEAN_NAME); return obj; }
java
public void addIndex(final OIndexDefinition indexDefinition) { indexDefinitions.add(indexDefinition); if (indexDefinition instanceof OIndexDefinitionMultiValue) { if (multiValueDefinitionIndex == -1) multiValueDefinitionIndex = indexDefinitions.size() - 1; else throw new OIndexException("Composite key can not contain more than one collection item"); } }
java