method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public static int readSize(ByteBuffer buf, boolean allowNull) throws IgniteCheckedException {
byte protoVer = checkProtocolVersion(buf.get(buf.position()), allowNull);
return sizeForVersion(protoVer);
}
|
static int function(ByteBuffer buf, boolean allowNull) throws IgniteCheckedException { byte protoVer = checkProtocolVersion(buf.get(buf.position()), allowNull); return sizeForVersion(protoVer); }
|
/**
* Gets needed buffer size to read the whole version instance.
* Does not change buffer position.
*
* @param buf Buffer.
* @param allowNull Is {@code null} version allowed.
* @return Size of serialized version.
* @throws IgniteCheckedException If failed.
*/
|
Gets needed buffer size to read the whole version instance. Does not change buffer position
|
readSize
|
{
"repo_name": "samaitra/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/tree/io/CacheVersionIO.java",
"license": "apache-2.0",
"size": 9799
}
|
[
"java.nio.ByteBuffer",
"org.apache.ignite.IgniteCheckedException"
] |
import java.nio.ByteBuffer; import org.apache.ignite.IgniteCheckedException;
|
import java.nio.*; import org.apache.ignite.*;
|
[
"java.nio",
"org.apache.ignite"
] |
java.nio; org.apache.ignite;
| 1,038,958
|
WithCreate withVirtualNetworkRules(List<VirtualNetworkRule> virtualNetworkRules);
}
interface WithMultipleLocations {
|
WithCreate withVirtualNetworkRules(List<VirtualNetworkRule> virtualNetworkRules); } interface WithMultipleLocations {
|
/**
* Specifies the list of Virtual Network ACL Rules for the CosmosDB account.
*
* @param virtualNetworkRules the list of Virtual Network ACL Rules.
* @return the next stage
*/
|
Specifies the list of Virtual Network ACL Rules for the CosmosDB account
|
withVirtualNetworkRules
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-cosmos/src/main/java/com/azure/resourcemanager/cosmos/models/CosmosDBAccount.java",
"license": "mit",
"size": 25538
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,663,028
|
@Test
public void multiMachineScaleDownOfMachinePool() throws Exception {
// set up initial pool
DateTime now = UtcTime.now();
Machine booting = machine("i-1", PENDING, now.minus(1));
Machine active1 = machine("i-2", RUNNING, now.minus(2));
Machine active2 = machine("i-3", RUNNING, now.minus(3));
Machine terminated = machine("i-4", TERMINATED, now.minus(4));
when(this.driverMock.listMachines()).thenReturn(machines(booting, active1, active2, terminated));
// run test that requests two machines to be terminated
this.cloudPool.configure(poolConfig(OLDEST));
this.cloudPool.start();
// effective size: 3 => ask for 1 machines
assertThat(this.cloudPool.getPoolSize().getDesiredSize(), is(3));
Future<?> update = this.cloudPool.setDesiredSize(1);
// wait for pool update to complete
update.get();
// verify that cloud driver was asked to terminate the two oldest
// active machines
verify(this.driverMock).terminateMachines(asList("i-3", "i-2"));
// verify event posted on event bus
verify(this.eventBusMock).post(argThat(isTerminationAlert("i-2", "i-3")));
assertThat(this.cloudPool.getPoolSize().getDesiredSize(), is(1));
}
|
void function() throws Exception { DateTime now = UtcTime.now(); Machine booting = machine("i-1", PENDING, now.minus(1)); Machine active1 = machine("i-2", RUNNING, now.minus(2)); Machine active2 = machine("i-3", RUNNING, now.minus(3)); Machine terminated = machine("i-4", TERMINATED, now.minus(4)); when(this.driverMock.listMachines()).thenReturn(machines(booting, active1, active2, terminated)); this.cloudPool.configure(poolConfig(OLDEST)); this.cloudPool.start(); assertThat(this.cloudPool.getPoolSize().getDesiredSize(), is(3)); Future<?> update = this.cloudPool.setDesiredSize(1); update.get(); verify(this.driverMock).terminateMachines(asList("i-3", "i-2")); verify(this.eventBusMock).post(argThat(isTerminationAlert("i-2", "i-3"))); assertThat(this.cloudPool.getPoolSize().getDesiredSize(), is(1)); }
|
/**
* Verify cloud pool behavior when scaling down the machine pool with
* several machines.
*/
|
Verify cloud pool behavior when scaling down the machine pool with several machines
|
multiMachineScaleDownOfMachinePool
|
{
"repo_name": "elastisys/scale.cloudpool",
"path": "commons/src/test/java/com/elastisys/scale/cloudpool/commons/basepool/TestBaseCloudPoolOperation.java",
"license": "apache-2.0",
"size": 79049
}
|
[
"com.elastisys.scale.cloudpool.api.types.Machine",
"com.elastisys.scale.cloudpool.commons.basepool.BasePoolTestUtils",
"com.elastisys.scale.commons.util.time.UtcTime",
"java.util.concurrent.Future",
"org.hamcrest.CoreMatchers",
"org.joda.time.DateTime",
"org.junit.Assert",
"org.mockito.Mockito"
] |
import com.elastisys.scale.cloudpool.api.types.Machine; import com.elastisys.scale.cloudpool.commons.basepool.BasePoolTestUtils; import com.elastisys.scale.commons.util.time.UtcTime; import java.util.concurrent.Future; import org.hamcrest.CoreMatchers; import org.joda.time.DateTime; import org.junit.Assert; import org.mockito.Mockito;
|
import com.elastisys.scale.cloudpool.api.types.*; import com.elastisys.scale.cloudpool.commons.basepool.*; import com.elastisys.scale.commons.util.time.*; import java.util.concurrent.*; import org.hamcrest.*; import org.joda.time.*; import org.junit.*; import org.mockito.*;
|
[
"com.elastisys.scale",
"java.util",
"org.hamcrest",
"org.joda.time",
"org.junit",
"org.mockito"
] |
com.elastisys.scale; java.util; org.hamcrest; org.joda.time; org.junit; org.mockito;
| 2,857,253
|
public RigidBody setAngularVelocity(@Const Vector3 angVel);
|
RigidBody function(@Const Vector3 angVel);
|
/**
* Set the angular velocity of this rigid body. The angular velocity is stored as the axis of rotation and
* its magnitude is the rate of rotation.
*
* @param angVel The new angular velocity
*
* @return This component
*/
|
Set the angular velocity of this rigid body. The angular velocity is stored as the axis of rotation and its magnitude is the rate of rotation
|
setAngularVelocity
|
{
"repo_name": "geronimo-iia/ferox",
"path": "ferox-physics/src/main/java/com/ferox/physics/dynamics/RigidBody.java",
"license": "bsd-2-clause",
"size": 4423
}
|
[
"com.ferox.math.Const",
"com.ferox.math.Vector3"
] |
import com.ferox.math.Const; import com.ferox.math.Vector3;
|
import com.ferox.math.*;
|
[
"com.ferox.math"
] |
com.ferox.math;
| 2,271,633
|
private void appendDocument(String string) {
EventQueue.invokeLater(new Appendix(this.document, string, this.attributeSet));
}
}
private static class Appendix implements Runnable {
private Document document;
private String string;
private AttributeSet attributeSet;
Appendix(Document document, String string, AttributeSet attributeSet) {
super();
this.document = document;
this.string = string;
this.attributeSet = attributeSet;
}
|
void function(String string) { EventQueue.invokeLater(new Appendix(this.document, string, this.attributeSet)); } } private static class Appendix implements Runnable { private Document document; private String string; private AttributeSet attributeSet; Appendix(Document document, String string, AttributeSet attributeSet) { super(); this.document = document; this.string = string; this.attributeSet = attributeSet; }
|
/**
* Place a task on the AWT Event Queue that will append
* the document with the specified string.
*/
|
Place a task on the AWT Event Queue that will append the document with the specified string
|
appendDocument
|
{
"repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs",
"path": "utils/eclipselink.utils.workbench/uitools/source/org/eclipse/persistence/tools/workbench/uitools/Console.java",
"license": "epl-1.0",
"size": 14715
}
|
[
"java.awt.EventQueue",
"javax.swing.text.AttributeSet",
"javax.swing.text.Document"
] |
import java.awt.EventQueue; import javax.swing.text.AttributeSet; import javax.swing.text.Document;
|
import java.awt.*; import javax.swing.text.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 598,870
|
public void enterText(int index, String text) {
textEnterer.setEditText(waiter.waitForAndGetView(index, EditText.class), text);
}
|
void function(int index, String text) { textEnterer.setEditText(waiter.waitForAndGetView(index, EditText.class), text); }
|
/**
* Enters text in an EditText with a given index.
*
* @param index the index of the {@link EditText}. {@code 0} if only one is available
* @param text the text string to enter into the {@link EditText} field
*
*/
|
Enters text in an EditText with a given index
|
enterText
|
{
"repo_name": "moizjv/robotium",
"path": "robotium-solo/src/main/java/com/jayway/android/robotium/solo/Solo.java",
"license": "apache-2.0",
"size": 59557
}
|
[
"android.widget.EditText"
] |
import android.widget.EditText;
|
import android.widget.*;
|
[
"android.widget"
] |
android.widget;
| 1,458,783
|
@Override
public DrawingSupplier getDrawingSupplier() {
DrawingSupplier result = null;
CategoryPlot cp = getPlot();
if (cp != null) {
result = cp.getDrawingSupplier();
}
return result;
}
|
DrawingSupplier function() { DrawingSupplier result = null; CategoryPlot cp = getPlot(); if (cp != null) { result = cp.getDrawingSupplier(); } return result; }
|
/**
* Returns the drawing supplier from the plot.
*
* @return The drawing supplier (possibly <code>null</code>).
*/
|
Returns the drawing supplier from the plot
|
getDrawingSupplier
|
{
"repo_name": "akardapolov/ASH-Viewer",
"path": "jfreechart-fse/src/main/java/org/jfree/chart/renderer/category/AbstractCategoryItemRenderer.java",
"license": "gpl-3.0",
"size": 66756
}
|
[
"org.jfree.chart.plot.CategoryPlot",
"org.jfree.chart.plot.DrawingSupplier"
] |
import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.plot.DrawingSupplier;
|
import org.jfree.chart.plot.*;
|
[
"org.jfree.chart"
] |
org.jfree.chart;
| 2,446,333
|
public static boolean areDifferentDomains(String firstDomain, String secondDomain) {
try {
URL urlOne = new URL(firstDomain.toLowerCase());
URL urlTwo = new URL(secondDomain.toLowerCase());
if (urlOne.getHost().equals(urlTwo.getHost())) {
LOG.debug("Hosts "
+ urlOne.getHost()
+ " of domains "
+ firstDomain
+ " and "
+ secondDomain
+ " were determined to be equal");
return false;
} else {
LOG.debug("Hosts "
+ urlOne.getHost()
+ " of domains "
+ firstDomain
+ " and "
+ secondDomain
+ " are not equal");
return true;
}
} catch (MalformedURLException mue) {
LOG.error("Unable to successfully compare domains " + firstDomain + " and " + secondDomain);
}
return true;
}
|
static boolean function(String firstDomain, String secondDomain) { try { URL urlOne = new URL(firstDomain.toLowerCase()); URL urlTwo = new URL(secondDomain.toLowerCase()); if (urlOne.getHost().equals(urlTwo.getHost())) { LOG.debug(STR + urlOne.getHost() + STR + firstDomain + STR + secondDomain + STR); return false; } else { LOG.debug(STR + urlOne.getHost() + STR + firstDomain + STR + secondDomain + STR); return true; } } catch (MalformedURLException mue) { LOG.error(STR + firstDomain + STR + secondDomain); } return true; }
|
/**
* Determines whether two URLs point at the same domain.
*
* @param firstDomain first URL string to compare
* @param secondDomain second URL string to compare
* @return true if the domains are different, false otherwise
*/
|
Determines whether two URLs point at the same domain
|
areDifferentDomains
|
{
"repo_name": "ricepanda/rice",
"path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/util/KRADUtils.java",
"license": "apache-2.0",
"size": 69178
}
|
[
"java.net.MalformedURLException"
] |
import java.net.MalformedURLException;
|
import java.net.*;
|
[
"java.net"
] |
java.net;
| 1,761,617
|
private WADOResponseObject getRemoteDICOMFile(String hostname, WADORequestObject req ) {
if ( "localhost".equals(hostname) ) {
log.warn("WADO request redirected to localhost! Return 'NOT FOUND' to avoid circular redirect!\n(Maybe a filesystem was removed from filesystem management but already exists in database!)");
return new WADOStreamResponseObjectImpl( null, CONTENT_TYPE_JPEG, HttpServletResponse.SC_NOT_FOUND, "Object not found (Circular redirect found)!");
}
if ( log.isInfoEnabled() ) log.info("WADO request redirected to hostname:"+hostname);
URL url = null;
try {
url = getRedirectURL( hostname, req );
if (log.isDebugEnabled() ) log.debug("redirect url:"+url );
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
String authHeader = (String)req.getRequestHeaders().get("Authorization");
if ( authHeader != null ) {
conn.addRequestProperty("Authorization", authHeader );
}
conn.connect();
if (log.isDebugEnabled() ) log.debug("conn.getResponseCode():"+conn.getResponseCode() );
if ( conn.getResponseCode() != HttpServletResponse.SC_OK ) {
if (log.isInfoEnabled() ) log.info("Remote WADO server responses with:"+conn.getResponseMessage() );
return new WADOStreamResponseObjectImpl( null, conn.getContentType(), conn.getResponseCode(), conn.getResponseMessage() );
}
InputStream is = conn.getInputStream();
if ( WADOCacheImpl.getWADOCache().isRedirectCaching() && CONTENT_TYPE_JPEG.equals( conn.getContentType() ) ) {
File file = WADOCacheImpl.getWADOCache().putStream( is, req.getStudyUID(),
req.getSeriesUID(),
req.getObjectUID(),
req.getRows(),
req.getColumns() );
is = new FileInputStream( file );
}
return new WADOStreamResponseObjectImpl( is, conn.getContentType(), HttpServletResponse.SC_OK, null);
} catch (Exception e) {
log.error("Can't connect to remote WADO service:"+url, e);
e.printStackTrace();
return null;
}
}
|
WADOResponseObject function(String hostname, WADORequestObject req ) { if ( STR.equals(hostname) ) { log.warn(STR); return new WADOStreamResponseObjectImpl( null, CONTENT_TYPE_JPEG, HttpServletResponse.SC_NOT_FOUND, STR); } if ( log.isInfoEnabled() ) log.info(STR+hostname); URL url = null; try { url = getRedirectURL( hostname, req ); if (log.isDebugEnabled() ) log.debug(STR+url ); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); String authHeader = (String)req.getRequestHeaders().get(STR); if ( authHeader != null ) { conn.addRequestProperty(STR, authHeader ); } conn.connect(); if (log.isDebugEnabled() ) log.debug(STR+conn.getResponseCode() ); if ( conn.getResponseCode() != HttpServletResponse.SC_OK ) { if (log.isInfoEnabled() ) log.info(STR+conn.getResponseMessage() ); return new WADOStreamResponseObjectImpl( null, conn.getContentType(), conn.getResponseCode(), conn.getResponseMessage() ); } InputStream is = conn.getInputStream(); if ( WADOCacheImpl.getWADOCache().isRedirectCaching() && CONTENT_TYPE_JPEG.equals( conn.getContentType() ) ) { File file = WADOCacheImpl.getWADOCache().putStream( is, req.getStudyUID(), req.getSeriesUID(), req.getObjectUID(), req.getRows(), req.getColumns() ); is = new FileInputStream( file ); } return new WADOStreamResponseObjectImpl( is, conn.getContentType(), HttpServletResponse.SC_OK, null); } catch (Exception e) { log.error(STR+url, e); e.printStackTrace(); return null; } }
|
/**
* Tries to get the DICOM file from an external WADO service.
*
* @param hostname Hostname of remote WADO service.
* @param studyUID Unique identifier of the study.
* @param seriesUID Unique identifier of the series.
* @param instanceUID Unique identifier of the instance.
*
* @return The File object or null if not found.
*/
|
Tries to get the DICOM file from an external WADO service
|
getRemoteDICOMFile
|
{
"repo_name": "medicayun/medicayundicom",
"path": "dcm4jboss-all/tags/DCM4CHEE_2_8_2/dcm4jboss-wado/src/java/org/dcm4chex/wado/mbean/WADOSupport.java",
"license": "apache-2.0",
"size": 34188
}
|
[
"java.io.File",
"java.io.FileInputStream",
"java.io.InputStream",
"java.net.HttpURLConnection",
"javax.servlet.http.HttpServletResponse",
"org.dcm4chex.wado.common.WADORequestObject",
"org.dcm4chex.wado.common.WADOResponseObject",
"org.dcm4chex.wado.mbean.cache.WADOCacheImpl"
] |
import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.net.HttpURLConnection; import javax.servlet.http.HttpServletResponse; import org.dcm4chex.wado.common.WADORequestObject; import org.dcm4chex.wado.common.WADOResponseObject; import org.dcm4chex.wado.mbean.cache.WADOCacheImpl;
|
import java.io.*; import java.net.*; import javax.servlet.http.*; import org.dcm4chex.wado.common.*; import org.dcm4chex.wado.mbean.cache.*;
|
[
"java.io",
"java.net",
"javax.servlet",
"org.dcm4chex.wado"
] |
java.io; java.net; javax.servlet; org.dcm4chex.wado;
| 2,201,216
|
@Override
ValueNode preprocess(int numTables,
FromList outerFromList,
SubqueryList outerSubqueryList,
PredicateList outerPredicateList)
throws StandardException
{
leftOperand = leftOperand.preprocess(numTables,
outerFromList, outerSubqueryList,
outerPredicateList);
rightOperand = rightOperand.preprocess(numTables,
outerFromList, outerSubqueryList,
outerPredicateList);
return this;
}
|
ValueNode preprocess(int numTables, FromList outerFromList, SubqueryList outerSubqueryList, PredicateList outerPredicateList) throws StandardException { leftOperand = leftOperand.preprocess(numTables, outerFromList, outerSubqueryList, outerPredicateList); rightOperand = rightOperand.preprocess(numTables, outerFromList, outerSubqueryList, outerPredicateList); return this; }
|
/**
* Preprocess an expression tree. We do a number of transformations
* here (including subqueries, IN lists, LIKE and BETWEEN) plus
* subquery flattening.
* NOTE: This is done before the outer ResultSetNode is preprocessed.
*
* @param numTables Number of tables in the DML Statement
* @param outerFromList FromList from outer query block
* @param outerSubqueryList SubqueryList from outer query block
* @param outerPredicateList PredicateList from outer query block
*
* @return The modified expression
*
* @exception StandardException Thrown on error
*/
|
Preprocess an expression tree. We do a number of transformations here (including subqueries, IN lists, LIKE and BETWEEN) plus subquery flattening
|
preprocess
|
{
"repo_name": "scnakandala/derby",
"path": "java/engine/org/apache/derby/impl/sql/compile/BinaryOperatorNode.java",
"license": "apache-2.0",
"size": 25697
}
|
[
"org.apache.derby.iapi.error.StandardException"
] |
import org.apache.derby.iapi.error.StandardException;
|
import org.apache.derby.iapi.error.*;
|
[
"org.apache.derby"
] |
org.apache.derby;
| 2,114,133
|
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1) {
long fileId = mFsMaster.createFile(path, mCreateFileOptions);
Assert.assertEquals(fileId, mFsMaster.getFileId(path));
// verify the user permission for file
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
} else {
mFsMaster.createDirectory(path, CreateDirectoryOptions.defaults());
Assert.assertNotNull(mFsMaster.getFileId(path));
long dirId = mFsMaster.getFileId(path);
Assert.assertNotEquals(-1, dirId);
FileInfo dirInfo = mFsMaster.getFileInfo(dirId);
Assert.assertEquals("", dirInfo.getOwner());
Assert.assertEquals(0755, (short) dirInfo.getMode());
}
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentCreator(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i)), mCreateFileOptions));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
}
}
class ConcurrentFreer implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
ConcurrentFreer(int depth, int concurrencyDepth, AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
}
|
void function(int depth, int concurrencyDepth, AlluxioURI path) throws Exception { if (depth < 1) { return; } else if (depth == 1) { long fileId = mFsMaster.createFile(path, mCreateFileOptions); Assert.assertEquals(fileId, mFsMaster.getFileId(path)); FileInfo fileInfo = mFsMaster.getFileInfo(fileId); Assert.assertEquals(STR", dirInfo.getOwner()); Assert.assertEquals(0755, (short) dirInfo.getMode()); } if (concurrencyDepth > 0) { ExecutorService executor = Executors.newCachedThreadPool(); try { ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE); for (int i = 0; i < FILES_PER_NODE; i++) { Callable<Void> call = (new ConcurrentCreator(depth - 1, concurrencyDepth - 1, path.join(Integer.toString(i)), mCreateFileOptions)); futures.add(executor.submit(call)); } for (Future<Void> f : futures) { f.get(); } } finally { executor.shutdown(); } } else { for (int i = 0; i < FILES_PER_NODE; i++) { exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i))); } } } } class ConcurrentFreer implements Callable<Void> { private int mDepth; private int mConcurrencyDepth; private AlluxioURI mInitPath; ConcurrentFreer(int depth, int concurrencyDepth, AlluxioURI initPath) { mDepth = depth; mConcurrencyDepth = concurrencyDepth; mInitPath = initPath; }
|
/**
* Executes the process of creating all files in one directory by multiple concurrent threads.
*
* @param depth the depth of files to be created in one directory
* @param concurrencyDepth the concurrency depth of files to be created in one directory
* @param path the directory of files to be created in
*/
|
Executes the process of creating all files in one directory by multiple concurrent threads
|
exec
|
{
"repo_name": "ShailShah/alluxio",
"path": "tests/src/test/java/alluxio/master/file/FileSystemMasterIntegrationTest.java",
"license": "apache-2.0",
"size": 45311
}
|
[
"java.util.ArrayList",
"java.util.concurrent.Callable",
"java.util.concurrent.ExecutorService",
"java.util.concurrent.Executors",
"java.util.concurrent.Future",
"org.junit.Assert"
] |
import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.junit.Assert;
|
import java.util.*; import java.util.concurrent.*; import org.junit.*;
|
[
"java.util",
"org.junit"
] |
java.util; org.junit;
| 433,946
|
boolean canDismiss(int position, Card card, CardWithList.ListObject listObject);
|
boolean canDismiss(int position, Card card, CardWithList.ListObject listObject);
|
/**
* Called to determine whether the given position can be dismissed.
*/
|
Called to determine whether the given position can be dismissed
|
canDismiss
|
{
"repo_name": "Daniele-Comi/System-Monitor",
"path": "cardLibrary/src/main/java/it/gmariotti/cardslib/library/prototypes/SwipeDismissListItemViewTouchListener.java",
"license": "gpl-2.0",
"size": 11124
}
|
[
"it.gmariotti.cardslib.library.internal.Card"
] |
import it.gmariotti.cardslib.library.internal.Card;
|
import it.gmariotti.cardslib.library.internal.*;
|
[
"it.gmariotti.cardslib"
] |
it.gmariotti.cardslib;
| 795,822
|
@Override
public final void requestCompleted(final HttpContext context) {
if (this.completed.compareAndSet(false, true)) {
try {
this.result = buildResult(context);
} catch (final Exception ex) {
this.ex = ex;
} finally {
releaseResources();
}
}
}
|
final void function(final HttpContext context) { if (this.completed.compareAndSet(false, true)) { try { this.result = buildResult(context); } catch (final Exception ex) { this.ex = ex; } finally { releaseResources(); } } }
|
/**
* Use {@link #buildResult(HttpContext)} instead.
*/
|
Use <code>#buildResult(HttpContext)</code> instead
|
requestCompleted
|
{
"repo_name": "cictourgune/MDP-Airbnb",
"path": "httpcomponents-core-4.4/httpcore-nio/src/main/java/org/apache/http/nio/protocol/AbstractAsyncRequestConsumer.java",
"license": "apache-2.0",
"size": 6456
}
|
[
"org.apache.http.protocol.HttpContext"
] |
import org.apache.http.protocol.HttpContext;
|
import org.apache.http.protocol.*;
|
[
"org.apache.http"
] |
org.apache.http;
| 1,769,829
|
public static long stringToHierarchicalLedgerId(String...levelNodes) throws IOException {
try {
StringBuilder sb = new StringBuilder();
for (String node : levelNodes) {
sb.append(node);
}
return Long.parseLong(sb.toString());
} catch (NumberFormatException e) {
throw new IOException(e);
}
}
|
static long function(String...levelNodes) throws IOException { try { StringBuilder sb = new StringBuilder(); for (String node : levelNodes) { sb.append(node); } return Long.parseLong(sb.toString()); } catch (NumberFormatException e) { throw new IOException(e); } }
|
/**
* Get ledger id.
*
* @param levelNodes
* level of the ledger path
* @return ledger id
* @throws IOException
*/
|
Get ledger id
|
stringToHierarchicalLedgerId
|
{
"repo_name": "sijie/bookkeeper",
"path": "bookkeeper-server/src/main/java/org/apache/bookkeeper/util/StringUtils.java",
"license": "apache-2.0",
"size": 6605
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,114,477
|
private void preMergeRegions(final MasterProcedureEnv env) throws IOException {
final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
if (cpHost != null) {
boolean ret = cpHost.preMergeRegionsAction(regionsToMerge, getUser());
if (ret) {
throw new IOException(
"Coprocessor bypassing regions " + HRegionInfo.getShortNameToLog(regionsToMerge) +
" merge.");
}
}
// TODO: Clean up split and merge. Currently all over the place.
try {
env.getMasterServices().getMasterQuotaManager().onRegionMerged(this.mergedRegion);
} catch (QuotaExceededException e) {
env.getAssignmentManager().getRegionNormalizer().planSkipped(this.mergedRegion,
NormalizationPlan.PlanType.MERGE);
throw e;
}
}
|
void function(final MasterProcedureEnv env) throws IOException { final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost(); if (cpHost != null) { boolean ret = cpHost.preMergeRegionsAction(regionsToMerge, getUser()); if (ret) { throw new IOException( STR + HRegionInfo.getShortNameToLog(regionsToMerge) + STR); } } try { env.getMasterServices().getMasterQuotaManager().onRegionMerged(this.mergedRegion); } catch (QuotaExceededException e) { env.getAssignmentManager().getRegionNormalizer().planSkipped(this.mergedRegion, NormalizationPlan.PlanType.MERGE); throw e; } }
|
/**
* Pre merge region action
* @param env MasterProcedureEnv
**/
|
Pre merge region action
|
preMergeRegions
|
{
"repo_name": "gustavoanatoly/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java",
"license": "apache-2.0",
"size": 31345
}
|
[
"java.io.IOException",
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.master.MasterCoprocessorHost",
"org.apache.hadoop.hbase.master.normalizer.NormalizationPlan",
"org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv",
"org.apache.hadoop.hbase.quotas.QuotaExceededException"
] |
import java.io.IOException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.quotas.QuotaExceededException;
|
import java.io.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.master.*; import org.apache.hadoop.hbase.master.normalizer.*; import org.apache.hadoop.hbase.master.procedure.*; import org.apache.hadoop.hbase.quotas.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,074,830
|
private static void assertWeekIterator(final Iterator<?> it, final Date start, final Date end) {
final Calendar calStart = Calendar.getInstance();
calStart.setTime(start);
final Calendar calEnd = Calendar.getInstance();
calEnd.setTime(end);
assertWeekIterator(it, calStart, calEnd);
}
|
static void function(final Iterator<?> it, final Date start, final Date end) { final Calendar calStart = Calendar.getInstance(); calStart.setTime(start); final Calendar calEnd = Calendar.getInstance(); calEnd.setTime(end); assertWeekIterator(it, calStart, calEnd); }
|
/**
* Convenience method for when working with Date objects
*/
|
Convenience method for when working with Date objects
|
assertWeekIterator
|
{
"repo_name": "xiwc/commons-lang",
"path": "src/test/java/org/apache/commons/lang3/time/DateUtilsTest.java",
"license": "apache-2.0",
"size": 77393
}
|
[
"java.util.Calendar",
"java.util.Date",
"java.util.Iterator"
] |
import java.util.Calendar; import java.util.Date; import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,542,638
|
public static <V> AttemptTimeLimiter<V> fixedTimeLimit(long duration, @Nonnull TimeUnit timeUnit) {
Preconditions.checkNotNull(timeUnit);
return new FixedAttemptTimeLimit<V>(duration, timeUnit);
}
|
static <V> AttemptTimeLimiter<V> function(long duration, @Nonnull TimeUnit timeUnit) { Preconditions.checkNotNull(timeUnit); return new FixedAttemptTimeLimit<V>(duration, timeUnit); }
|
/**
* For control over thread management, it is preferable to offer an {@link ExecutorService} through the other
* factory method, {@link #fixedTimeLimit(long, TimeUnit, ExecutorService)}. See the note on
* {@link SimpleTimeLimiter#SimpleTimeLimiter(ExecutorService)}, which this AttemptTimeLimiter uses.
*
* @param duration that an attempt may persist before being circumvented
* @param timeUnit of the 'duration' arg
* @param <V> the type of the computation result
* @return an {@link AttemptTimeLimiter} with a fixed time limit for each attempt
*/
|
For control over thread management, it is preferable to offer an <code>ExecutorService</code> through the other factory method, <code>#fixedTimeLimit(long, TimeUnit, ExecutorService)</code>. See the note on <code>SimpleTimeLimiter#SimpleTimeLimiter(ExecutorService)</code>, which this AttemptTimeLimiter uses
|
fixedTimeLimit
|
{
"repo_name": "rholder/guava-retrying",
"path": "src/main/java/com/github/rholder/retry/AttemptTimeLimiters.java",
"license": "apache-2.0",
"size": 4283
}
|
[
"com.google.common.base.Preconditions",
"java.util.concurrent.TimeUnit",
"javax.annotation.Nonnull"
] |
import com.google.common.base.Preconditions; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull;
|
import com.google.common.base.*; import java.util.concurrent.*; import javax.annotation.*;
|
[
"com.google.common",
"java.util",
"javax.annotation"
] |
com.google.common; java.util; javax.annotation;
| 1,452,922
|
NetworkReservedIPGetResponse get(String ipName) throws IOException, ServiceException, ParserConfigurationException, SAXException;
|
NetworkReservedIPGetResponse get(String ipName) throws IOException, ServiceException, ParserConfigurationException, SAXException;
|
/**
* The Get Reserved IP operation retrieves the details for the virtual IP
* reserved for the subscription.
*
* @param ipName Required. The name of the reserved IP to retrieve.
* @throws IOException Signals that an I/O exception of some sort has
* occurred. This class is the general class of exceptions produced by
* failed or interrupted I/O operations.
* @throws ServiceException Thrown if an unexpected response is found.
* @throws ParserConfigurationException Thrown if there was a serious
* configuration error with the document parser.
* @throws SAXException Thrown if there was an error parsing the XML
* response.
* @return A reserved IP associated with your subscription.
*/
|
The Get Reserved IP operation retrieves the details for the virtual IP reserved for the subscription
|
get
|
{
"repo_name": "oaastest/azure-sdk-for-java",
"path": "management-network/src/main/java/com/microsoft/windowsazure/management/network/ReservedIPOperations.java",
"license": "apache-2.0",
"size": 13902
}
|
[
"com.microsoft.windowsazure.exception.ServiceException",
"com.microsoft.windowsazure.management.network.models.NetworkReservedIPGetResponse",
"java.io.IOException",
"javax.xml.parsers.ParserConfigurationException",
"org.xml.sax.SAXException"
] |
import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.network.models.NetworkReservedIPGetResponse; import java.io.IOException; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException;
|
import com.microsoft.windowsazure.exception.*; import com.microsoft.windowsazure.management.network.models.*; import java.io.*; import javax.xml.parsers.*; import org.xml.sax.*;
|
[
"com.microsoft.windowsazure",
"java.io",
"javax.xml",
"org.xml.sax"
] |
com.microsoft.windowsazure; java.io; javax.xml; org.xml.sax;
| 758,241
|
public SyndFeed getRecommendedFeed(User user) throws AuraException, RemoteException {
return getRecommendedFeed(user, 20);
}
|
SyndFeed function(User user) throws AuraException, RemoteException { return getRecommendedFeed(user, 20); }
|
/**
* Gets the feed for the particular user
* @param user the user
* @return the feed
*/
|
Gets the feed for the particular user
|
getRecommendedFeed
|
{
"repo_name": "SunLabsAST/AURA",
"path": "aardvark/src/com/sun/labs/aura/aardvark/impl/AardvarkImpl.java",
"license": "gpl-2.0",
"size": 17947
}
|
[
"com.sun.labs.aura.datastore.User",
"com.sun.labs.aura.util.AuraException",
"com.sun.syndication.feed.synd.SyndFeed",
"java.rmi.RemoteException"
] |
import com.sun.labs.aura.datastore.User; import com.sun.labs.aura.util.AuraException; import com.sun.syndication.feed.synd.SyndFeed; import java.rmi.RemoteException;
|
import com.sun.labs.aura.datastore.*; import com.sun.labs.aura.util.*; import com.sun.syndication.feed.synd.*; import java.rmi.*;
|
[
"com.sun.labs",
"com.sun.syndication",
"java.rmi"
] |
com.sun.labs; com.sun.syndication; java.rmi;
| 1,392,516
|
default T findOrThrow(DeviceGroup _group, UUID _uuid) throws NotFoundException, GoneException {
T rtn = findByGroupAndUUID(_group, _uuid);
if (rtn == null) {
if (findDeletedByGroupAndUUID(_group, _uuid) == null)
throw new NotFoundException();
else
throw new GoneException();
}
return rtn;
}
|
default T findOrThrow(DeviceGroup _group, UUID _uuid) throws NotFoundException, GoneException { T rtn = findByGroupAndUUID(_group, _uuid); if (rtn == null) { if (findDeletedByGroupAndUUID(_group, _uuid) == null) throw new NotFoundException(); else throw new GoneException(); } return rtn; }
|
/**
* Find a item or throw an exception.
* @param _group The group, that contains the object to find.
* @param _uuid The uuid identifying the object in the group.
* @return The found object. Never null.
* @throws NotFoundException if item was not found and not deleted.
* @throws GoneException if item was deleted.
*/
|
Find a item or throw an exception
|
findOrThrow
|
{
"repo_name": "InstaList/instalist-server",
"path": "src/main/java/org/noorganization/instalist/server/controller/generic/IFinder.java",
"license": "apache-2.0",
"size": 4019
}
|
[
"javax.ws.rs.NotFoundException",
"org.noorganization.instalist.server.model.DeviceGroup",
"org.noorganization.instalist.server.support.exceptions.GoneException"
] |
import javax.ws.rs.NotFoundException; import org.noorganization.instalist.server.model.DeviceGroup; import org.noorganization.instalist.server.support.exceptions.GoneException;
|
import javax.ws.rs.*; import org.noorganization.instalist.server.model.*; import org.noorganization.instalist.server.support.exceptions.*;
|
[
"javax.ws",
"org.noorganization.instalist"
] |
javax.ws; org.noorganization.instalist;
| 1,568,142
|
LOG.debug("post() started");
// Only post transactions where:
// balance type code is AC or CB
// or where object type isn't FB and balance type code is EX, IE, PE and CE
if ((t.getFinancialBalanceTypeCode().equals(t.getOption().getActualFinancialBalanceTypeCd()) || t.getFinancialBalanceTypeCode().equals(t.getOption().getBudgetCheckingBalanceTypeCd())) || (t.getFinancialBalanceTypeCode().equals(t.getOption().getExtrnlEncumFinBalanceTypCd()) || t.getFinancialBalanceTypeCode().equals(t.getOption().getIntrnlEncumFinBalanceTypCd()) || t.getFinancialBalanceTypeCode().equals(t.getOption().getPreencumbranceFinBalTypeCd()) || t.getFinancialBalanceTypeCode().equals(t.getOption().getCostShareEncumbranceBalanceTypeCd())) && (!t.getFinancialObjectTypeCode().equals(t.getOption().getFinObjectTypeFundBalanceCd()))) {
// We are posting this transaction
String returnCode = GeneralLedgerConstants.UPDATE_CODE;
// Load it
AccountBalance ab = accountingCycleCachingService.getAccountBalance(t);
if (ab == null) {
returnCode = GeneralLedgerConstants.INSERT_CODE;
ab = new AccountBalance(t);
}
ab.setTimestamp(new java.sql.Date(postDate.getTime()));
if (!updateAccountBalanceReturn(t, ab)) {
return GeneralLedgerConstants.EMPTY_CODE;
}
if (returnCode.equals(GeneralLedgerConstants.INSERT_CODE)) {
accountingCycleCachingService.insertAccountBalance(ab);
} else {
accountingCycleCachingService.updateAccountBalance(ab);
}
return returnCode;
} else {
return GeneralLedgerConstants.EMPTY_CODE;
}
}
|
LOG.debug(STR); if ((t.getFinancialBalanceTypeCode().equals(t.getOption().getActualFinancialBalanceTypeCd()) t.getFinancialBalanceTypeCode().equals(t.getOption().getBudgetCheckingBalanceTypeCd())) (t.getFinancialBalanceTypeCode().equals(t.getOption().getExtrnlEncumFinBalanceTypCd()) t.getFinancialBalanceTypeCode().equals(t.getOption().getIntrnlEncumFinBalanceTypCd()) t.getFinancialBalanceTypeCode().equals(t.getOption().getPreencumbranceFinBalTypeCd()) t.getFinancialBalanceTypeCode().equals(t.getOption().getCostShareEncumbranceBalanceTypeCd())) && (!t.getFinancialObjectTypeCode().equals(t.getOption().getFinObjectTypeFundBalanceCd()))) { String returnCode = GeneralLedgerConstants.UPDATE_CODE; AccountBalance ab = accountingCycleCachingService.getAccountBalance(t); if (ab == null) { returnCode = GeneralLedgerConstants.INSERT_CODE; ab = new AccountBalance(t); } ab.setTimestamp(new java.sql.Date(postDate.getTime())); if (!updateAccountBalanceReturn(t, ab)) { return GeneralLedgerConstants.EMPTY_CODE; } if (returnCode.equals(GeneralLedgerConstants.INSERT_CODE)) { accountingCycleCachingService.insertAccountBalance(ab); } else { accountingCycleCachingService.updateAccountBalance(ab); } return returnCode; } else { return GeneralLedgerConstants.EMPTY_CODE; } }
|
/**
* Posts the transaction to the appropriate account balance record.
*
* @param t the transaction which is being posted
* @param mode the mode the poster is currently running in
* @param postDate the date this transaction should post to
* @param posterReportWriterService the writer service where the poster is writing its report
* @return the accomplished post type
* @see org.kuali.kfs.gl.batch.service.PostTransaction#post(org.kuali.kfs.gl.businessobject.Transaction, int, java.util.Date)
*/
|
Posts the transaction to the appropriate account balance record
|
post
|
{
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-core/src/main/java/org/kuali/kfs/gl/batch/service/impl/PostAccountBalance.java",
"license": "agpl-3.0",
"size": 8085
}
|
[
"java.util.Date",
"org.kuali.kfs.gl.GeneralLedgerConstants",
"org.kuali.kfs.gl.businessobject.AccountBalance"
] |
import java.util.Date; import org.kuali.kfs.gl.GeneralLedgerConstants; import org.kuali.kfs.gl.businessobject.AccountBalance;
|
import java.util.*; import org.kuali.kfs.gl.*; import org.kuali.kfs.gl.businessobject.*;
|
[
"java.util",
"org.kuali.kfs"
] |
java.util; org.kuali.kfs;
| 2,803,457
|
protected static boolean manageHeaderLeftClick(ScenarioGrid scenarioGrid, Integer uiColumnIndex, ScenarioGridColumn scenarioGridColumn, Integer uiHeaderRowIndex) {
if (!isEditableHeaderLocal(scenarioGridColumn, uiHeaderRowIndex)) {
return false;
}
ScenarioHeaderMetaData clickedScenarioHeaderMetadata = (ScenarioHeaderMetaData) scenarioGridColumn.getHeaderMetaData().get(uiHeaderRowIndex);
if (clickedScenarioHeaderMetadata == null) {
return false;
}
String group = ScenarioSimulationUtils.getOriginalColumnGroup(clickedScenarioHeaderMetadata.getColumnGroup());
switch (group) {
case "GIVEN":
case "EXPECT":
return manageGivenExpectHeaderLeftClick(scenarioGrid, clickedScenarioHeaderMetadata, uiColumnIndex, uiHeaderRowIndex);
default:
return false;
}
}
|
static boolean function(ScenarioGrid scenarioGrid, Integer uiColumnIndex, ScenarioGridColumn scenarioGridColumn, Integer uiHeaderRowIndex) { if (!isEditableHeaderLocal(scenarioGridColumn, uiHeaderRowIndex)) { return false; } ScenarioHeaderMetaData clickedScenarioHeaderMetadata = (ScenarioHeaderMetaData) scenarioGridColumn.getHeaderMetaData().get(uiHeaderRowIndex); if (clickedScenarioHeaderMetadata == null) { return false; } String group = ScenarioSimulationUtils.getOriginalColumnGroup(clickedScenarioHeaderMetadata.getColumnGroup()); switch (group) { case "GIVEN": case STR: return manageGivenExpectHeaderLeftClick(scenarioGrid, clickedScenarioHeaderMetadata, uiColumnIndex, uiHeaderRowIndex); default: return false; } }
|
/**
* This method check if the click happened on an <b>second level header</b> (i.e. the header of a specific column) cell. If it is so, manage it and returns <code>true</code>,
* otherwise returns <code>false</code>
* @param scenarioGrid
* @param uiColumnIndex
* @param scenarioGridColumn
* @param uiHeaderRowIndex
* @return
*/
|
This method check if the click happened on an second level header (i.e. the header of a specific column) cell. If it is so, manage it and returns <code>true</code>, otherwise returns <code>false</code>
|
manageHeaderLeftClick
|
{
"repo_name": "droolsjbpm/drools-wb",
"path": "drools-wb-screens/drools-wb-scenario-simulation-editor/drools-wb-scenario-simulation-editor-client/src/main/java/org/drools/workbench/screens/scenariosimulation/client/handlers/CommonEditHandler.java",
"license": "apache-2.0",
"size": 7525
}
|
[
"org.drools.workbench.screens.scenariosimulation.client.metadata.ScenarioHeaderMetaData",
"org.drools.workbench.screens.scenariosimulation.client.utils.ScenarioSimulationUtils",
"org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGrid",
"org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGridColumn"
] |
import org.drools.workbench.screens.scenariosimulation.client.metadata.ScenarioHeaderMetaData; import org.drools.workbench.screens.scenariosimulation.client.utils.ScenarioSimulationUtils; import org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGrid; import org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGridColumn;
|
import org.drools.workbench.screens.scenariosimulation.client.metadata.*; import org.drools.workbench.screens.scenariosimulation.client.utils.*; import org.drools.workbench.screens.scenariosimulation.client.widgets.*;
|
[
"org.drools.workbench"
] |
org.drools.workbench;
| 1,900,693
|
public RowIterator getAcCsi()
{
return (RowIterator)getAttributeInternal(ACCSI);
}
|
RowIterator function() { return (RowIterator)getAttributeInternal(ACCSI); }
|
/**
*
* Gets the associated entity oracle.jbo.RowIterator
*/
|
Gets the associated entity oracle.jbo.RowIterator
|
getAcCsi
|
{
"repo_name": "CBIIT/cadsr-util",
"path": "cadsrutil/src/java/gov/nih/nci/ncicb/cadsr/common/persistence/bc4j/CsCsiImpl.java",
"license": "bsd-3-clause",
"size": 10810
}
|
[
"oracle.jbo.RowIterator"
] |
import oracle.jbo.RowIterator;
|
import oracle.jbo.*;
|
[
"oracle.jbo"
] |
oracle.jbo;
| 2,121,542
|
public void updatePatient(StudyLocal study, Dataset attrs,
PatientMatching matching) throws FinderException, CreateException {
String pid = attrs.getString(Tags.PatientID);
// If the patient id is not included, then we don't have to do any
// patient update. Although patient id is type 2 in DICOM, but for DC,
// we enforce this.
if (pid == null || pid.length() == 0)
return;
PatientLocal newPatient = updateOrCreate(attrs, matching);
// Case 1: it's matching the same patient. Do nothing
if(study.getPatient().getPatientId().equals(pid))
return;
// Case 2: there's no matching, a new patient is created. The study is updated.
// Case 3: it's matching another existing patient. The study is updated.
study.setPatient(newPatient);
}
|
void function(StudyLocal study, Dataset attrs, PatientMatching matching) throws FinderException, CreateException { String pid = attrs.getString(Tags.PatientID); if (pid == null pid.length() == 0) return; PatientLocal newPatient = updateOrCreate(attrs, matching); if(study.getPatient().getPatientId().equals(pid)) return; study.setPatient(newPatient); }
|
/**
* Update patient data as well as relink study with the patient if the patient
* is different than original one.
* @throws CreateException
*
* @ejb.interface-method
*/
|
Update patient data as well as relink study with the patient if the patient is different than original one
|
updatePatient
|
{
"repo_name": "medicayun/medicayundicom",
"path": "dcm4jboss-all/tags/DCM4CHEE_2_14_6/dcm4jboss-ejb/src/java/org/dcm4chex/archive/ejb/session/PatientUpdateBean.java",
"license": "apache-2.0",
"size": 10973
}
|
[
"javax.ejb.CreateException",
"javax.ejb.FinderException",
"org.dcm4che.data.Dataset",
"org.dcm4che.dict.Tags",
"org.dcm4chex.archive.common.PatientMatching",
"org.dcm4chex.archive.ejb.interfaces.PatientLocal",
"org.dcm4chex.archive.ejb.interfaces.StudyLocal"
] |
import javax.ejb.CreateException; import javax.ejb.FinderException; import org.dcm4che.data.Dataset; import org.dcm4che.dict.Tags; import org.dcm4chex.archive.common.PatientMatching; import org.dcm4chex.archive.ejb.interfaces.PatientLocal; import org.dcm4chex.archive.ejb.interfaces.StudyLocal;
|
import javax.ejb.*; import org.dcm4che.data.*; import org.dcm4che.dict.*; import org.dcm4chex.archive.common.*; import org.dcm4chex.archive.ejb.interfaces.*;
|
[
"javax.ejb",
"org.dcm4che.data",
"org.dcm4che.dict",
"org.dcm4chex.archive"
] |
javax.ejb; org.dcm4che.data; org.dcm4che.dict; org.dcm4chex.archive;
| 2,179,406
|
void enterCreateOutputStreamStatement(@NotNull CQLParser.CreateOutputStreamStatementContext ctx);
void exitCreateOutputStreamStatement(@NotNull CQLParser.CreateOutputStreamStatementContext ctx);
|
void enterCreateOutputStreamStatement(@NotNull CQLParser.CreateOutputStreamStatementContext ctx); void exitCreateOutputStreamStatement(@NotNull CQLParser.CreateOutputStreamStatementContext ctx);
|
/**
* Exit a parse tree produced by {@link CQLParser#createOutputStreamStatement}.
*/
|
Exit a parse tree produced by <code>CQLParser#createOutputStreamStatement</code>
|
exitCreateOutputStreamStatement
|
{
"repo_name": "HuaweiBigData/StreamCQL",
"path": "cql/src/main/java/com/huawei/streaming/cql/semanticanalyzer/parser/CQLParserListener.java",
"license": "apache-2.0",
"size": 58667
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 2,798,845
|
private void createConfigurationDialogs(Composite pageComponent) {
pageComponent.setLayout(new GridLayout(3, false));
materialized = new Label(pageComponent, SWT.LEFT);
updateMaterialized();
new Label(pageComponent, SWT.LEFT);
new Label(pageComponent, SWT.LEFT);
Label ivmlLabel = new Label(pageComponent, SWT.LEFT);
ivmlLabel.setText("IVML path:");
ivmlInput = new Text(pageComponent, SWT.SINGLE | SWT.BORDER);
GridData gd = new GridData(SWT.FILL, SWT.FILL, true, false);
ivmlInput.setLayoutData(gd);
createPathSelectionButton(pageComponent, PathKind.VTL);
Composite composite = new Composite(getShell(), SWT.NONE);
GridLayout layout = new GridLayout(2, true);
layout.marginWidth = 0;
composite.setLayout(layout);
Label vilLabel = new Label(pageComponent, SWT.LEFT);
vilLabel.setText("VIL path:");
vilInput = new Text(pageComponent, SWT.BORDER);
vilInput.setLayoutData(gd);
createPathSelectionButton(pageComponent, PathKind.VTL);
Label vtlLabel = new Label(pageComponent, SWT.LEFT);
vtlLabel.setText("VTL path:");
vtlInput = new Text(pageComponent, SWT.BORDER);
vtlInput.setLayoutData(gd);
vtlInput.setEnabled(false);
createPathSelectionButton(pageComponent, PathKind.VTL).setEnabled(false);
setInputs(projectConfiguration);
}
private class ButtonListener implements Listener {
private PathKind kind;
ButtonListener(PathKind kind) {
this.kind = kind;
}
|
void function(Composite pageComponent) { pageComponent.setLayout(new GridLayout(3, false)); materialized = new Label(pageComponent, SWT.LEFT); updateMaterialized(); new Label(pageComponent, SWT.LEFT); new Label(pageComponent, SWT.LEFT); Label ivmlLabel = new Label(pageComponent, SWT.LEFT); ivmlLabel.setText(STR); ivmlInput = new Text(pageComponent, SWT.SINGLE SWT.BORDER); GridData gd = new GridData(SWT.FILL, SWT.FILL, true, false); ivmlInput.setLayoutData(gd); createPathSelectionButton(pageComponent, PathKind.VTL); Composite composite = new Composite(getShell(), SWT.NONE); GridLayout layout = new GridLayout(2, true); layout.marginWidth = 0; composite.setLayout(layout); Label vilLabel = new Label(pageComponent, SWT.LEFT); vilLabel.setText(STR); vilInput = new Text(pageComponent, SWT.BORDER); vilInput.setLayoutData(gd); createPathSelectionButton(pageComponent, PathKind.VTL); Label vtlLabel = new Label(pageComponent, SWT.LEFT); vtlLabel.setText(STR); vtlInput = new Text(pageComponent, SWT.BORDER); vtlInput.setLayoutData(gd); vtlInput.setEnabled(false); createPathSelectionButton(pageComponent, PathKind.VTL).setEnabled(false); setInputs(projectConfiguration); } private class ButtonListener implements Listener { private PathKind kind; ButtonListener(PathKind kind) { this.kind = kind; }
|
/**
* Create the dialogs.
* 3 labels for users info.
* 3 TextFields for user input.
* 1 Button for applying the input. The values will be stored in the preference store.
* 1 Button for restoring the default values.
*
* @param pageComponent The PageComponent.
*/
|
Create the dialogs. 3 labels for users info. 3 TextFields for user input. 1 Button for applying the input. The values will be stored in the preference store. 1 Button for restoring the default values
|
createConfigurationDialogs
|
{
"repo_name": "SSEHUB/EASyProducer",
"path": "Plugins/EASy-Producer/EASy-Producer.UI/src/net/ssehub/easy/producer/ui/configuration/PathPropertyPage.java",
"license": "apache-2.0",
"size": 8850
}
|
[
"net.ssehub.easy.producer.core.persistence.Configuration",
"org.eclipse.swt.layout.GridData",
"org.eclipse.swt.layout.GridLayout",
"org.eclipse.swt.widgets.Composite",
"org.eclipse.swt.widgets.Label",
"org.eclipse.swt.widgets.Listener",
"org.eclipse.swt.widgets.Text"
] |
import net.ssehub.easy.producer.core.persistence.Configuration; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text;
|
import net.ssehub.easy.producer.core.persistence.*; import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
|
[
"net.ssehub.easy",
"org.eclipse.swt"
] |
net.ssehub.easy; org.eclipse.swt;
| 2,033,305
|
public List<org.bukkit.entity.Entity> getNearbyEntities(double x, double y, double z);
|
List<org.bukkit.entity.Entity> function(double x, double y, double z);
|
/**
* Returns a list of entities within a bounding box centered around this
* entity
*
* @param x 1/2 the size of the box along x axis
* @param y 1/2 the size of the box along y axis
* @param z 1/2 the size of the box along z axis
* @return {@code List<Entity>} List of entities nearby
*/
|
Returns a list of entities within a bounding box centered around this entity
|
getNearbyEntities
|
{
"repo_name": "thedj21/bukkit--src",
"path": "src/main/java/org/bukkit/entity/Entity.java",
"license": "gpl-3.0",
"size": 9395
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 421,313
|
public SubscriptionEngine getSubscriptionEngine() {
return subscriptionEngine;
}
|
SubscriptionEngine function() { return subscriptionEngine; }
|
/**
* get subscription store
*
* @return subscription store
*/
|
get subscription store
|
getSubscriptionEngine
|
{
"repo_name": "hemikak/andes",
"path": "modules/andes-core/broker/src/main/java/org/wso2/andes/kernel/AndesContext.java",
"license": "apache-2.0",
"size": 7348
}
|
[
"org.wso2.andes.subscription.SubscriptionEngine"
] |
import org.wso2.andes.subscription.SubscriptionEngine;
|
import org.wso2.andes.subscription.*;
|
[
"org.wso2.andes"
] |
org.wso2.andes;
| 533,893
|
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Map == false) {
return false;
}
Map map = (Map) obj;
if (map.size() != size()) {
return false;
}
MapIterator it = mapIterator();
try {
while (it.hasNext()) {
Object key = it.next();
Object value = it.getValue();
if (value == null) {
if (map.get(key) != null || map.containsKey(key) == false) {
return false;
}
} else {
if (value.equals(map.get(key)) == false) {
return false;
}
}
}
} catch (ClassCastException ignored) {
return false;
} catch (NullPointerException ignored) {
return false;
}
return true;
}
|
boolean function(Object obj) { if (obj == this) { return true; } if (obj instanceof Map == false) { return false; } Map map = (Map) obj; if (map.size() != size()) { return false; } MapIterator it = mapIterator(); try { while (it.hasNext()) { Object key = it.next(); Object value = it.getValue(); if (value == null) { if (map.get(key) != null map.containsKey(key) == false) { return false; } } else { if (value.equals(map.get(key)) == false) { return false; } } } } catch (ClassCastException ignored) { return false; } catch (NullPointerException ignored) { return false; } return true; }
|
/**
* Compares this map with another.
*
* @param obj the object to compare to
* @return true if equal
*/
|
Compares this map with another
|
equals
|
{
"repo_name": "mobile-event-processing/Asper",
"path": "source/src/com/espertech/esper/collection/apachecommons/AbstractHashedMap.java",
"license": "gpl-2.0",
"size": 46252
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,398,398
|
public Value getRed() throws DOMException {
return computedValue.getRed();
}
|
Value function() throws DOMException { return computedValue.getRed(); }
|
/**
* Implements {@link Value#getRed()}.
*/
|
Implements <code>Value#getRed()</code>
|
getRed
|
{
"repo_name": "Squeegee/batik",
"path": "sources/org/apache/batik/css/engine/value/ComputedValue.java",
"license": "apache-2.0",
"size": 4529
}
|
[
"org.w3c.dom.DOMException"
] |
import org.w3c.dom.DOMException;
|
import org.w3c.dom.*;
|
[
"org.w3c.dom"
] |
org.w3c.dom;
| 2,716,625
|
private List<String> getAggregateColumns( EventQueryParams params )
{
EventOutputType outputType = params.getOutputType();
if ( params.hasValueDimension() )
{
return Lists.newArrayList( statementBuilder.columnQuote( params.getValue().getUid() ) );
}
else if ( params.hasProgramIndicatorDimension() )
{
Set<String> uids = ProgramIndicator.getDataElementAndAttributeIdentifiers( params.getProgramIndicator().getExpression() );
return uids.stream().map( uid -> statementBuilder.columnQuote( uid ) ).collect( Collectors.toList() );
}
else
{
if ( EventOutputType.TRACKED_ENTITY_INSTANCE.equals( outputType ) && params.isProgramRegistration() )
{
return Lists.newArrayList( statementBuilder.columnQuote( "tei" ) );
}
else if ( EventOutputType.ENROLLMENT.equals( outputType ) )
{
return Lists.newArrayList( statementBuilder.columnQuote( "pi" ) );
}
}
return Lists.newArrayList();
}
|
List<String> function( EventQueryParams params ) { EventOutputType outputType = params.getOutputType(); if ( params.hasValueDimension() ) { return Lists.newArrayList( statementBuilder.columnQuote( params.getValue().getUid() ) ); } else if ( params.hasProgramIndicatorDimension() ) { Set<String> uids = ProgramIndicator.getDataElementAndAttributeIdentifiers( params.getProgramIndicator().getExpression() ); return uids.stream().map( uid -> statementBuilder.columnQuote( uid ) ).collect( Collectors.toList() ); } else { if ( EventOutputType.TRACKED_ENTITY_INSTANCE.equals( outputType ) && params.isProgramRegistration() ) { return Lists.newArrayList( statementBuilder.columnQuote( "tei" ) ); } else if ( EventOutputType.ENROLLMENT.equals( outputType ) ) { return Lists.newArrayList( statementBuilder.columnQuote( "pi" ) ); } } return Lists.newArrayList(); }
|
/**
* Returns columns based on value dimension and output type.
*/
|
Returns columns based on value dimension and output type
|
getAggregateColumns
|
{
"repo_name": "HRHR-project/palestine",
"path": "dhis-2/dhis-services/dhis-service-analytics/src/main/java/org/hisp/dhis/analytics/event/data/JdbcEventAnalyticsManager.java",
"license": "bsd-3-clause",
"size": 30132
}
|
[
"com.google.common.collect.Lists",
"java.util.List",
"java.util.Set",
"java.util.stream.Collectors",
"org.hisp.dhis.analytics.EventOutputType",
"org.hisp.dhis.analytics.event.EventQueryParams",
"org.hisp.dhis.program.ProgramIndicator"
] |
import com.google.common.collect.Lists; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.hisp.dhis.analytics.EventOutputType; import org.hisp.dhis.analytics.event.EventQueryParams; import org.hisp.dhis.program.ProgramIndicator;
|
import com.google.common.collect.*; import java.util.*; import java.util.stream.*; import org.hisp.dhis.analytics.*; import org.hisp.dhis.analytics.event.*; import org.hisp.dhis.program.*;
|
[
"com.google.common",
"java.util",
"org.hisp.dhis"
] |
com.google.common; java.util; org.hisp.dhis;
| 2,908,297
|
private MapItemRenderer.Instance getMapRendererInstance(MapData mapdataIn)
{
MapItemRenderer.Instance mapitemrenderer$instance = (MapItemRenderer.Instance)this.loadedMaps.get(mapdataIn.mapName);
if (mapitemrenderer$instance == null)
{
mapitemrenderer$instance = new MapItemRenderer.Instance(mapdataIn);
this.loadedMaps.put(mapdataIn.mapName, mapitemrenderer$instance);
}
return mapitemrenderer$instance;
}
|
MapItemRenderer.Instance function(MapData mapdataIn) { MapItemRenderer.Instance mapitemrenderer$instance = (MapItemRenderer.Instance)this.loadedMaps.get(mapdataIn.mapName); if (mapitemrenderer$instance == null) { mapitemrenderer$instance = new MapItemRenderer.Instance(mapdataIn); this.loadedMaps.put(mapdataIn.mapName, mapitemrenderer$instance); } return mapitemrenderer$instance; }
|
/**
* Returns {@link net.minecraft.client.gui.MapItemRenderer.Instance MapItemRenderer.Instance} with given map data
*/
|
Returns <code>net.minecraft.client.gui.MapItemRenderer.Instance MapItemRenderer.Instance</code> with given map data
|
getMapRendererInstance
|
{
"repo_name": "MartyParty21/AwakenDreamsClient",
"path": "mcp/src/minecraft/net/minecraft/client/gui/MapItemRenderer.java",
"license": "gpl-3.0",
"size": 6753
}
|
[
"net.minecraft.world.storage.MapData"
] |
import net.minecraft.world.storage.MapData;
|
import net.minecraft.world.storage.*;
|
[
"net.minecraft.world"
] |
net.minecraft.world;
| 2,630,276
|
private static String generateUniqueName(final String suffix) {
String name = UUID.randomUUID().toString().replaceAll("-", "");
if (suffix != null) name += suffix;
return name;
}
|
static String function(final String suffix) { String name = UUID.randomUUID().toString().replaceAll("-", ""); if (suffix != null) name += suffix; return name; }
|
/**
* Generate a unique file name, used by createTempName() and commitStoreFile()
* @param suffix extra information to append to the generated name
* @return Unique file name
*/
|
Generate a unique file name, used by createTempName() and commitStoreFile()
|
generateUniqueName
|
{
"repo_name": "ZhangXFeng/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java",
"license": "apache-2.0",
"size": 42258
}
|
[
"java.util.UUID"
] |
import java.util.UUID;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,689,809
|
private void computeReverseMap() {
reverseMap = new int[edgeCount];
Arrays.fill(reverseMap, -1);
for (int edgeId = 0; edgeId < edgeCount; ++edgeId) {
if(reverseMap[edgeId] >= 0) {
continue;
}
int trgt = this.getTarget(edgeId);
int src = this.getSource(edgeId);
for (int edgeNum = 0; edgeNum < this.getOutEdgeCount(trgt); edgeNum++){
int otherEdgeId = this.getOutEdgeId(trgt, edgeNum);
if(src == this.getTarget(otherEdgeId) && this.getDist(edgeId) == this.getDist(otherEdgeId)) {
reverseMap[edgeId] = otherEdgeId;
reverseMap[otherEdgeId] = edgeId;
break;
}
}
}
}
|
void function() { reverseMap = new int[edgeCount]; Arrays.fill(reverseMap, -1); for (int edgeId = 0; edgeId < edgeCount; ++edgeId) { if(reverseMap[edgeId] >= 0) { continue; } int trgt = this.getTarget(edgeId); int src = this.getSource(edgeId); for (int edgeNum = 0; edgeNum < this.getOutEdgeCount(trgt); edgeNum++){ int otherEdgeId = this.getOutEdgeId(trgt, edgeNum); if(src == this.getTarget(otherEdgeId) && this.getDist(edgeId) == this.getDist(otherEdgeId)) { reverseMap[edgeId] = otherEdgeId; reverseMap[otherEdgeId] = edgeId; break; } } } }
|
/**
* Compute the reverse map
*/
|
Compute the reverse map
|
computeReverseMap
|
{
"repo_name": "ToureNPlaner/tourenplaner-server",
"path": "src/main/java/de/tourenplaner/graphrep/GraphRep.java",
"license": "apache-2.0",
"size": 25853
}
|
[
"java.util.Arrays"
] |
import java.util.Arrays;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 544,965
|
public ResultSet select(String columns, String where) throws Exception{
return doSelect(false, columns, getTableName(), where);
}
|
ResultSet function(String columns, String where) throws Exception{ return doSelect(false, columns, getTableName(), where); }
|
/**
* Do a select on given columns for all data in table, with condition.
*
* @param columns columns to select
* @param where condition
* @return resultset of data
* @throws Exception if something goes wrong
*/
|
Do a select on given columns for all data in table, with condition
|
select
|
{
"repo_name": "automenta/adams-core",
"path": "src/main/java/adams/db/AbstractIndexedTable.java",
"license": "gpl-3.0",
"size": 11569
}
|
[
"java.sql.ResultSet"
] |
import java.sql.ResultSet;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 700,508
|
public List getNameDefinition() {
return field_13_name_definition;
}
|
List function() { return field_13_name_definition; }
|
/** gets the definition, reference (Formula)
* @return definition -- can be null if we cant parse ptgs
*/
|
gets the definition, reference (Formula)
|
getNameDefinition
|
{
"repo_name": "dgault/bioformats",
"path": "components/forks/poi/src/loci/poi/hssf/record/NameRecord.java",
"license": "gpl-2.0",
"size": 28888
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,793,643
|
private void discoverHost(PacketContext context, Ip4Address ipAssigned) {
if (!allowHostDiscovery) {
// host discovery is not allowed, do nothing
return;
}
Ethernet packet = context.inPacket().parsed();
MacAddress mac = packet.getSourceMAC();
VlanId vlanId = VlanId.vlanId(packet.getVlanID());
HostLocation hostLocation = new HostLocation(context.inPacket().receivedFrom(), 0);
Set<IpAddress> ips = new HashSet<>();
ips.add(ipAssigned);
HostId hostId = HostId.hostId(mac, vlanId);
DefaultHostDescription desc = new DefaultHostDescription(mac, vlanId, hostLocation, ips);
log.info("Discovered host {}", desc);
hostProviderService.hostDetected(hostId, desc, false);
}
|
void function(PacketContext context, Ip4Address ipAssigned) { if (!allowHostDiscovery) { return; } Ethernet packet = context.inPacket().parsed(); MacAddress mac = packet.getSourceMAC(); VlanId vlanId = VlanId.vlanId(packet.getVlanID()); HostLocation hostLocation = new HostLocation(context.inPacket().receivedFrom(), 0); Set<IpAddress> ips = new HashSet<>(); ips.add(ipAssigned); HostId hostId = HostId.hostId(mac, vlanId); DefaultHostDescription desc = new DefaultHostDescription(mac, vlanId, hostLocation, ips); log.info(STR, desc); hostProviderService.hostDetected(hostId, desc, false); }
|
/**
* Integrates hosts learned through DHCP into topology.
* @param context context of the incoming message
* @param ipAssigned IP Address assigned to the host by DHCP Manager
*/
|
Integrates hosts learned through DHCP into topology
|
discoverHost
|
{
"repo_name": "sdnwiselab/onos",
"path": "apps/dhcp/app/src/main/java/org/onosproject/dhcp/impl/DhcpManager.java",
"license": "apache-2.0",
"size": 30666
}
|
[
"java.util.HashSet",
"java.util.Set",
"org.onlab.packet.Ethernet",
"org.onlab.packet.Ip4Address",
"org.onlab.packet.IpAddress",
"org.onlab.packet.MacAddress",
"org.onlab.packet.VlanId",
"org.onosproject.net.HostId",
"org.onosproject.net.HostLocation",
"org.onosproject.net.host.DefaultHostDescription",
"org.onosproject.net.packet.PacketContext"
] |
import java.util.HashSet; import java.util.Set; import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.net.HostId; import org.onosproject.net.HostLocation; import org.onosproject.net.host.DefaultHostDescription; import org.onosproject.net.packet.PacketContext;
|
import java.util.*; import org.onlab.packet.*; import org.onosproject.net.*; import org.onosproject.net.host.*; import org.onosproject.net.packet.*;
|
[
"java.util",
"org.onlab.packet",
"org.onosproject.net"
] |
java.util; org.onlab.packet; org.onosproject.net;
| 2,323,762
|
public static SkyKey workspaceKey(RootedPath workspacePath) {
return new SkyKey(SkyFunctions.WORKSPACE_FILE, workspacePath);
}
|
static SkyKey function(RootedPath workspacePath) { return new SkyKey(SkyFunctions.WORKSPACE_FILE, workspacePath); }
|
/**
* Returns a SkyKey to find the WORKSPACE file at the given path.
*/
|
Returns a SkyKey to find the WORKSPACE file at the given path
|
workspaceKey
|
{
"repo_name": "vt09/bazel",
"path": "src/main/java/com/google/devtools/build/lib/skyframe/PackageValue.java",
"license": "apache-2.0",
"size": 2315
}
|
[
"com.google.devtools.build.lib.vfs.RootedPath",
"com.google.devtools.build.skyframe.SkyKey"
] |
import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyKey;
|
import com.google.devtools.build.lib.vfs.*; import com.google.devtools.build.skyframe.*;
|
[
"com.google.devtools"
] |
com.google.devtools;
| 1,676,749
|
public Vector<Object> findBatchOfLocations(String searchValue, boolean includeRetired, Integer start, Integer length)
throws APIException {
Vector<Object> locationList = new Vector<Object>();
MessageSourceService mss = Context.getMessageSourceService();
try {
LocationService ls = Context.getLocationService();
List<Location> locations = ls.getLocations(searchValue, includeRetired, start, length);
locationList = new Vector<Object>(locations.size());
for (Location loc : locations) {
locationList.add(new LocationListItem(loc));
}
}
catch (Exception e) {
log.error(e);
locationList.add(mss.getMessage("Location.search.error") + " - " + e.getMessage());
}
if (locationList.size() == 0) {
locationList.add(mss.getMessage("Location.noLocationsFound"));
}
return locationList;
}
|
Vector<Object> function(String searchValue, boolean includeRetired, Integer start, Integer length) throws APIException { Vector<Object> locationList = new Vector<Object>(); MessageSourceService mss = Context.getMessageSourceService(); try { LocationService ls = Context.getLocationService(); List<Location> locations = ls.getLocations(searchValue, includeRetired, start, length); locationList = new Vector<Object>(locations.size()); for (Location loc : locations) { locationList.add(new LocationListItem(loc)); } } catch (Exception e) { log.error(e); locationList.add(mss.getMessage(STR) + STR + e.getMessage()); } if (locationList.size() == 0) { locationList.add(mss.getMessage(STR)); } return locationList; }
|
/**
* Returns a list of matching locations (depending on values of start and length parameters) if
* the length parameter is not specified, then all matches will be returned from the start index
* if specified.
*
* @param searchValue is the string used to search for locations
* @param includeRetired Specifies if retired locations should be returned
* @param start the beginning index
* @param length the number of matching locations to return
* @return list of the matching locations
* @throws APIException
* @since 1.8
*/
|
Returns a list of matching locations (depending on values of start and length parameters) if the length parameter is not specified, then all matches will be returned from the start index if specified
|
findBatchOfLocations
|
{
"repo_name": "milankarunarathne/openmrs-core",
"path": "web/src/main/java/org/openmrs/web/dwr/DWREncounterService.java",
"license": "mpl-2.0",
"size": 11316
}
|
[
"java.util.List",
"java.util.Vector",
"org.openmrs.Location",
"org.openmrs.api.APIException",
"org.openmrs.api.LocationService",
"org.openmrs.api.context.Context",
"org.openmrs.messagesource.MessageSourceService"
] |
import java.util.List; import java.util.Vector; import org.openmrs.Location; import org.openmrs.api.APIException; import org.openmrs.api.LocationService; import org.openmrs.api.context.Context; import org.openmrs.messagesource.MessageSourceService;
|
import java.util.*; import org.openmrs.*; import org.openmrs.api.*; import org.openmrs.api.context.*; import org.openmrs.messagesource.*;
|
[
"java.util",
"org.openmrs",
"org.openmrs.api",
"org.openmrs.messagesource"
] |
java.util; org.openmrs; org.openmrs.api; org.openmrs.messagesource;
| 2,716,835
|
public int findColumnInsenstive(String columnName) {
final Map workMap;
synchronized (this) {
if (columnNameMap==null) {
// updateXXX and getXXX methods are case insensitive and the
// first column should be returned. The loop goes backward to
// create a map which preserves this property.
Map map = new HashMap();
for (int i = getColumnCount(); i>=1; i--) {
final String key = StringUtil.
SQLToUpperCase(
getColumnDescriptor(i).getName());
final Integer value = ReuseFactory.getInteger(i);
map.put(key, value);
}
// Ensure this map can never change.
columnNameMap = Collections.unmodifiableMap(map);
}
workMap = columnNameMap;
}
Integer val = (Integer) workMap.get(columnName);
if (val==null) {
val = (Integer) workMap.get(StringUtil.SQLToUpperCase(columnName));
}
if (val==null) {
return -1;
} else {
return val.intValue();
}
}
|
int function(String columnName) { final Map workMap; synchronized (this) { if (columnNameMap==null) { Map map = new HashMap(); for (int i = getColumnCount(); i>=1; i--) { final String key = StringUtil. SQLToUpperCase( getColumnDescriptor(i).getName()); final Integer value = ReuseFactory.getInteger(i); map.put(key, value); } columnNameMap = Collections.unmodifiableMap(map); } workMap = columnNameMap; } Integer val = (Integer) workMap.get(columnName); if (val==null) { val = (Integer) workMap.get(StringUtil.SQLToUpperCase(columnName)); } if (val==null) { return -1; } else { return val.intValue(); } }
|
/**
* Find a column name based upon the JDBC rules for
* getXXX and setXXX. Name matching is case-insensitive,
* matching the first name (1-based) if there are multiple
* columns that map to the same name.
*/
|
Find a column name based upon the JDBC rules for getXXX and setXXX. Name matching is case-insensitive, matching the first name (1-based) if there are multiple columns that map to the same name
|
findColumnInsenstive
|
{
"repo_name": "lpxz/grail-derby104",
"path": "java/engine/org/apache/derby/impl/sql/GenericResultDescription.java",
"license": "apache-2.0",
"size": 7742
}
|
[
"java.util.Collections",
"java.util.HashMap",
"java.util.Map",
"org.apache.derby.iapi.util.ReuseFactory",
"org.apache.derby.iapi.util.StringUtil"
] |
import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.derby.iapi.util.ReuseFactory; import org.apache.derby.iapi.util.StringUtil;
|
import java.util.*; import org.apache.derby.iapi.util.*;
|
[
"java.util",
"org.apache.derby"
] |
java.util; org.apache.derby;
| 2,405,650
|
public RestResponse<Job> search(ObjectMap params) throws ClientException {
params = params != null ? params : new ObjectMap();
return execute("jobs", null, null, null, "search", params, GET, Job.class);
}
|
RestResponse<Job> function(ObjectMap params) throws ClientException { params = params != null ? params : new ObjectMap(); return execute("jobs", null, null, null, STR, params, GET, Job.class); }
|
/**
* Job search method.
* @param params Map containing any of the following optional parameters.
* include: Fields included in the response, whole JSON path must be provided.
* exclude: Fields excluded in the response, whole JSON path must be provided.
* limit: Number of results to be returned.
* skip: Number of results to skip.
* count: Get the total number of results matching the query. Deactivated by default.
* study: Study [[user@]project:]study where study and project can be either the ID or UUID.
* otherStudies: Flag indicating the entries being queried can belong to any related study, not just the primary one.
* id: Job ID. It must be a unique string within the study. An id will be autogenerated automatically if not provided.
* tool: Tool executed by the job.
* userId: User that created the job.
* priority: Priority of the job.
* internal.status.name: Job internal status.
* creationDate: Creation date. Format: yyyyMMddHHmmss. Examples: >2018, 2017-2018, <201805.
* modificationDate: Modification date. Format: yyyyMMddHHmmss. Examples: >2018, 2017-2018, <201805.
* visited: Visited status of job.
* tags: Job tags.
* input: Comma separated list of file ids used as input.
* output: Comma separated list of file ids used as output.
* acl: Filter entries for which a user has the provided permissions. Format: acl={user}:{permissions}. Example:
* acl=john:WRITE,WRITE_ANNOTATIONS will return all entries for which user john has both WRITE and WRITE_ANNOTATIONS
* permissions. Only study owners or administrators can query by this field. .
* release: Release when it was created.
* deleted: Boolean to retrieve deleted jobs.
* @return a RestResponse object.
* @throws ClientException ClientException if there is any server error.
*/
|
Job search method
|
search
|
{
"repo_name": "j-coll/opencga",
"path": "opencga-client/src/main/java/org/opencb/opencga/client/rest/clients/JobClient.java",
"license": "apache-2.0",
"size": 12562
}
|
[
"org.opencb.commons.datastore.core.ObjectMap",
"org.opencb.opencga.client.exceptions.ClientException",
"org.opencb.opencga.core.models.job.Job",
"org.opencb.opencga.core.response.RestResponse"
] |
import org.opencb.commons.datastore.core.ObjectMap; import org.opencb.opencga.client.exceptions.ClientException; import org.opencb.opencga.core.models.job.Job; import org.opencb.opencga.core.response.RestResponse;
|
import org.opencb.commons.datastore.core.*; import org.opencb.opencga.client.exceptions.*; import org.opencb.opencga.core.models.job.*; import org.opencb.opencga.core.response.*;
|
[
"org.opencb.commons",
"org.opencb.opencga"
] |
org.opencb.commons; org.opencb.opencga;
| 1,388,253
|
@Override public void exitForInit(@NotNull Java7Parser.ForInitContext ctx) { }
|
@Override public void exitForInit(@NotNull Java7Parser.ForInitContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
enterForInit
|
{
"repo_name": "jsteenbeeke/antlr-java-parser",
"path": "src/main/java/com/github/antlrjavaparser/Java7ParserBaseListener.java",
"license": "lgpl-3.0",
"size": 53492
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 1,899,336
|
private void initTypedArray(Context context, AttributeSet attrs) {
final TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.rippleLayout);
mRippleColor = typedArray.getColor(R.styleable.rippleLayout_color, DEFAULT_RIPPLE_COLOR);
mStrokeWidth = typedArray.getDimension(R.styleable.rippleLayout_strokeWidth, DEFAULT_STROKE_WIDTH);
mRippleRadius = typedArray.getDimension(R.styleable.rippleLayout_radius, DEFAULT_RADIUS);
mAnimDuration = typedArray.getInt(R.styleable.rippleLayout_duration, DEFAULT_DURATION_TIME);
mRippleViewNums = typedArray.getInt(R.styleable.rippleLayout_rippleNums, DEFAULT_RIPPLE_COUNT);
mRippleScale = typedArray.getFloat(R.styleable.rippleLayout_scale, DEFAULT_SCALE);
typedArray.recycle();
}
|
void function(Context context, AttributeSet attrs) { final TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.rippleLayout); mRippleColor = typedArray.getColor(R.styleable.rippleLayout_color, DEFAULT_RIPPLE_COLOR); mStrokeWidth = typedArray.getDimension(R.styleable.rippleLayout_strokeWidth, DEFAULT_STROKE_WIDTH); mRippleRadius = typedArray.getDimension(R.styleable.rippleLayout_radius, DEFAULT_RADIUS); mAnimDuration = typedArray.getInt(R.styleable.rippleLayout_duration, DEFAULT_DURATION_TIME); mRippleViewNums = typedArray.getInt(R.styleable.rippleLayout_rippleNums, DEFAULT_RIPPLE_COUNT); mRippleScale = typedArray.getFloat(R.styleable.rippleLayout_scale, DEFAULT_SCALE); typedArray.recycle(); }
|
/**
* Initialize the custom attribute container
*
* @param context
* @param attrs
*/
|
Initialize the custom attribute container
|
initTypedArray
|
{
"repo_name": "coderJohnZhang/TvLauncher",
"path": "src/com/gotech/tv/launcher/anim/RippleLayout.java",
"license": "apache-2.0",
"size": 8406
}
|
[
"android.content.Context",
"android.content.res.TypedArray",
"android.util.AttributeSet"
] |
import android.content.Context; import android.content.res.TypedArray; import android.util.AttributeSet;
|
import android.content.*; import android.content.res.*; import android.util.*;
|
[
"android.content",
"android.util"
] |
android.content; android.util;
| 816,959
|
public static <T, R> Collection<R> doInParallel(
ExecutorService executorSvc,
Collection<T> srcDatas,
IgniteThrowableFunction<T, R> operation
) throws IgniteCheckedException, IgniteInterruptedCheckedException {
return doInParallel(srcDatas.size(), executorSvc, srcDatas, operation);
}
|
static <T, R> Collection<R> function( ExecutorService executorSvc, Collection<T> srcDatas, IgniteThrowableFunction<T, R> operation ) throws IgniteCheckedException, IgniteInterruptedCheckedException { return doInParallel(srcDatas.size(), executorSvc, srcDatas, operation); }
|
/**
* Execute operation on data in parallel.
*
* @param executorSvc Service for parallel execution.
* @param srcDatas List of data for parallelization.
* @param operation Logic for execution of on each item of data.
* @param <T> Type of data.
* @throws IgniteCheckedException if parallel execution was failed.
*/
|
Execute operation on data in parallel
|
doInParallel
|
{
"repo_name": "ascherbakoff/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 385578
}
|
[
"java.util.Collection",
"java.util.concurrent.ExecutorService",
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.IgniteInterruptedCheckedException",
"org.apache.ignite.internal.util.lang.IgniteThrowableFunction"
] |
import java.util.Collection; import java.util.concurrent.ExecutorService; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.util.lang.IgniteThrowableFunction;
|
import java.util.*; import java.util.concurrent.*; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.lang.*;
|
[
"java.util",
"org.apache.ignite"
] |
java.util; org.apache.ignite;
| 2,273,747
|
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
|
List<IItemPropertyDescriptor> function(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; }
|
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This returns the property descriptors for the adapted class.
|
getPropertyDescriptors
|
{
"repo_name": "patrickneubauer/XMLIntellEdit",
"path": "use-cases/TOSCA/eu.artist.tosca.edit/src/tosca/provider/ConstraintsTypeItemProvider.java",
"license": "mit",
"size": 4980
}
|
[
"java.util.List",
"org.eclipse.emf.edit.provider.IItemPropertyDescriptor"
] |
import java.util.List; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
|
import java.util.*; import org.eclipse.emf.edit.provider.*;
|
[
"java.util",
"org.eclipse.emf"
] |
java.util; org.eclipse.emf;
| 2,882,736
|
@WebMethod(operationName="updateRole")
@WebResult(name = "role")
@CacheEvict(value={Permission.Cache.NAME, Responsibility.Cache.NAME, Role.Cache.NAME, RoleMembership.Cache.NAME, RoleMember.Cache.NAME, DelegateMember.Cache.NAME, RoleResponsibility.Cache.NAME, DelegateType.Cache.NAME}, allEntries = true)
Role updateRole(@WebParam(name = "role") Role role)
throws RiceIllegalArgumentException, RiceIllegalStateException;
|
@WebMethod(operationName=STR) @WebResult(name = "role") @CacheEvict(value={Permission.Cache.NAME, Responsibility.Cache.NAME, Role.Cache.NAME, RoleMembership.Cache.NAME, RoleMember.Cache.NAME, DelegateMember.Cache.NAME, RoleResponsibility.Cache.NAME, DelegateType.Cache.NAME}, allEntries = true) Role updateRole(@WebParam(name = "role") Role role) throws RiceIllegalArgumentException, RiceIllegalStateException;
|
/**
* This will update a {@link Role}.
*
* @param role the role to update
* @throws RiceIllegalArgumentException if the role is null
* @throws RiceIllegalStateException if the role does not exist in the system
*/
|
This will update a <code>Role</code>
|
updateRole
|
{
"repo_name": "ricepanda/rice-git3",
"path": "rice-middleware/kim/kim-api/src/main/java/org/kuali/rice/kim/api/role/RoleService.java",
"license": "apache-2.0",
"size": 48854
}
|
[
"javax.jws.WebMethod",
"javax.jws.WebParam",
"javax.jws.WebResult",
"org.kuali.rice.core.api.exception.RiceIllegalArgumentException",
"org.kuali.rice.core.api.exception.RiceIllegalStateException",
"org.kuali.rice.kim.api.common.delegate.DelegateMember",
"org.kuali.rice.kim.api.common.delegate.DelegateType",
"org.kuali.rice.kim.api.permission.Permission",
"org.kuali.rice.kim.api.responsibility.Responsibility",
"org.springframework.cache.annotation.CacheEvict"
] |
import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.core.api.exception.RiceIllegalStateException; import org.kuali.rice.kim.api.common.delegate.DelegateMember; import org.kuali.rice.kim.api.common.delegate.DelegateType; import org.kuali.rice.kim.api.permission.Permission; import org.kuali.rice.kim.api.responsibility.Responsibility; import org.springframework.cache.annotation.CacheEvict;
|
import javax.jws.*; import org.kuali.rice.core.api.exception.*; import org.kuali.rice.kim.api.common.delegate.*; import org.kuali.rice.kim.api.permission.*; import org.kuali.rice.kim.api.responsibility.*; import org.springframework.cache.annotation.*;
|
[
"javax.jws",
"org.kuali.rice",
"org.springframework.cache"
] |
javax.jws; org.kuali.rice; org.springframework.cache;
| 2,794,482
|
public LocalDate optDate(String key) {
Object object = this.get(key);
if (object == null) {
return null;
}
if (object instanceof LocalDate) {
return (LocalDate) object;
}
try {
return LocalDate.parse(object.toString());
} catch (Exception e) {
return null;
}
}
|
LocalDate function(String key) { Object object = this.get(key); if (object == null) { return null; } if (object instanceof LocalDate) { return (LocalDate) object; } try { return LocalDate.parse(object.toString()); } catch (Exception e) { return null; } }
|
/**
* Get an optional double associated with a key, or the defaultValue if
* there is no such key or if its value is not a number. If the value is a
* string, an attempt will be made to evaluate it as a number.
*
* @param key
* A key string.
* @return Date, or null if there is no value for this key
*/
|
Get an optional double associated with a key, or the defaultValue if there is no such key or if its value is not a number. If the value is a string, an attempt will be made to evaluate it as a number
|
optDate
|
{
"repo_name": "simplity/simplity",
"path": "simplity/core/src/main/java/org/simplity/json/JSONObject.java",
"license": "mit",
"size": 56094
}
|
[
"java.time.LocalDate"
] |
import java.time.LocalDate;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 1,559,067
|
public static void checkService(Event e) throws InsufficientInformationException {
if (e == null) {
throw new NullPointerException("e is null");
} else if (e.getService() == null || e.getService().length() == 0) {
throw new InsufficientInformationException("service for event is unavailable");
}
}
|
static void function(Event e) throws InsufficientInformationException { if (e == null) { throw new NullPointerException(STR); } else if (e.getService() == null e.getService().length() == 0) { throw new InsufficientInformationException(STR); } }
|
/**
* Ensures that the given event has a service parameter
*
* @param e
* the event to check
* @throws org.opennms.netmgt.capsd.InsufficientInformationException
* if the event does not have a service
*/
|
Ensures that the given event has a service parameter
|
checkService
|
{
"repo_name": "bugcy013/opennms-tmp-tools",
"path": "opennms-services/src/main/java/org/opennms/netmgt/capsd/EventUtils.java",
"license": "gpl-2.0",
"size": 34931
}
|
[
"org.opennms.netmgt.xml.event.Event"
] |
import org.opennms.netmgt.xml.event.Event;
|
import org.opennms.netmgt.xml.event.*;
|
[
"org.opennms.netmgt"
] |
org.opennms.netmgt;
| 1,540,389
|
public int getRelativeTop(Window window) {
Rectangle bounds;
bounds = window.getGraphicsConfiguration().getBounds();
return bounds.y - window.getBounds().y;
}
|
int function(Window window) { Rectangle bounds; bounds = window.getGraphicsConfiguration().getBounds(); return bounds.y - window.getBounds().y; }
|
/**
* Returns the relative top Y position in its screen.
*
* @param window the window to determine the top Y for
* @return the top Y
*/
|
Returns the relative top Y position in its screen
|
getRelativeTop
|
{
"repo_name": "waikato-datamining/adams-base",
"path": "adams-core/src/main/java/adams/gui/core/GUIHelper.java",
"license": "gpl-3.0",
"size": 88762
}
|
[
"java.awt.Rectangle",
"java.awt.Window"
] |
import java.awt.Rectangle; import java.awt.Window;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 245,453
|
public List<Integer> getLines() {
final JSONArray json = jsonData.optJSONArray("lines");
if (json == null) {
return null;
}
final List<Integer> list = new ArrayList<>(json.length());
for (int i = 0; i < json.length(); i++) {
list.add(json.getInt(i));
}
return Collections.unmodifiableList(list);
}
|
List<Integer> function() { final JSONArray json = jsonData.optJSONArray("lines"); if (json == null) { return null; } final List<Integer> list = new ArrayList<>(json.length()); for (int i = 0; i < json.length(); i++) { list.add(json.getInt(i)); } return Collections.unmodifiableList(list); }
|
/**
* Deprecated: The code locations of the breakpoints.
*/
|
Deprecated: The code locations of the breakpoints
|
getLines
|
{
"repo_name": "smarr/Truffle",
"path": "tools/src/com.oracle.truffle.tools.dap/src/com/oracle/truffle/tools/dap/types/SetBreakpointsArguments.java",
"license": "gpl-2.0",
"size": 5767
}
|
[
"com.oracle.truffle.tools.utils.json.JSONArray",
"java.util.ArrayList",
"java.util.Collections",
"java.util.List"
] |
import com.oracle.truffle.tools.utils.json.JSONArray; import java.util.ArrayList; import java.util.Collections; import java.util.List;
|
import com.oracle.truffle.tools.utils.json.*; import java.util.*;
|
[
"com.oracle.truffle",
"java.util"
] |
com.oracle.truffle; java.util;
| 858,931
|
public static ArrayList<Notification> get10NotifOfUser(int userId,
int offset) {
ArrayList<Notification> notifications = new ArrayList<Notification>();
Hashtable<String, Object> hashTable = new Hashtable<String, Object>();
hashTable.put("usernotif", userId);
try {
// On r�cup�re toutes les notifications qui sont envoy� au user id
for (AbstractModel notification : persistance.selectMany(
Notification.class, hashTable, "", offset, "10")) {
notifications.add((Notification) notification);
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return notifications;
}
private int numNotif;
private java.sql.Timestamp sendDateNotif;
private java.sql.Timestamp recepDateNotif;
private String objectNotif;
private String messageNotif;
private int usernotif;
public java.util.Collection notificationCenter = new java.util.TreeSet();
public Notification() {
this(-1);
}
// contructeurs
public Notification(int aNum) {
setNumNotif(aNum);
}
|
static ArrayList<Notification> function(int userId, int offset) { ArrayList<Notification> notifications = new ArrayList<Notification>(); Hashtable<String, Object> hashTable = new Hashtable<String, Object>(); hashTable.put(STR, userId); try { for (AbstractModel notification : persistance.selectMany( Notification.class, hashTable, STR10")) { notifications.add((Notification) notification); } } catch (SQLException e) { e.printStackTrace(); } return notifications; } private int numNotif; private java.sql.Timestamp sendDateNotif; private java.sql.Timestamp recepDateNotif; private String objectNotif; private String messageNotif; private int usernotif; public java.util.Collection notificationCenter = new java.util.TreeSet(); public Notification() { this(-1); } public Notification(int aNum) { setNumNotif(aNum); }
|
/**
* Gets the 10 notif of user.
*
* @param userId the user id
* @param offset the offset
* @return the 10 notif of user
*/
|
Gets the 10 notif of user
|
get10NotifOfUser
|
{
"repo_name": "florentdescroix/MCSE_ZenLounge",
"path": "src/BusinessLogic/Models/Notification.java",
"license": "gpl-2.0",
"size": 3884
}
|
[
"java.sql.SQLException",
"java.util.ArrayList",
"java.util.Hashtable"
] |
import java.sql.SQLException; import java.util.ArrayList; import java.util.Hashtable;
|
import java.sql.*; import java.util.*;
|
[
"java.sql",
"java.util"
] |
java.sql; java.util;
| 1,197,873
|
static DecoderConfig parseMP4DecoderSpecificInfo(byte[] data) throws AACException {
final BitStream in = new BitStream(data);
final DecoderConfig config = new DecoderConfig();
try {
config.profile = readProfile(in);
int sf = in.readBits(4);
if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(in.readBits(24));
else config.sampleFrequency = SampleFrequency.forInt(sf);
config.channelConfiguration = ChannelConfiguration.forInt(in.readBits(4));
switch(config.profile) {
case AAC_SBR:
config.extProfile = config.profile;
config.sbrPresent = true;
sf = in.readBits(4);
//TODO: 24 bits already read; read again?
//if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(in.readBits(24));
//if sample frequencies are the same: downsample SBR
config.downSampledSBR = config.sampleFrequency.getIndex()==sf;
config.sampleFrequency = SampleFrequency.forInt(sf);
config.profile = readProfile(in);
break;
case AAC_MAIN:
case AAC_LC:
case AAC_SSR:
case AAC_LTP:
case ER_AAC_LC:
case ER_AAC_LTP:
case ER_AAC_LD:
//ga-specific info:
config.frameLengthFlag = in.readBool();
if(config.frameLengthFlag) throw new AACException("config uses 960-sample frames, not yet supported"); //TODO: are 960-frames working yet?
config.dependsOnCoreCoder = in.readBool();
if(config.dependsOnCoreCoder) config.coreCoderDelay = in.readBits(14);
else config.coreCoderDelay = 0;
config.extensionFlag = in.readBool();
if(config.extensionFlag) {
if(config.profile.isErrorResilientProfile()) {
config.sectionDataResilience = in.readBool();
config.scalefactorResilience = in.readBool();
config.spectralDataResilience = in.readBool();
}
//extensionFlag3
in.skipBit();
}
if(config.channelConfiguration==ChannelConfiguration.CHANNEL_CONFIG_NONE) {
//TODO: is this working correct? -> ISO 14496-3 part 1: 1.A.4.3
in.skipBits(3); //PCE
PCE pce = new PCE();
pce.decode(in);
config.profile = pce.getProfile();
config.sampleFrequency = pce.getSampleFrequency();
config.channelConfiguration = ChannelConfiguration.forInt(pce.getChannelCount());
}
if(in.getBitsLeft()>10) readSyncExtension(in, config);
break;
default:
throw new AACException("profile not supported: "+config.profile.getIndex());
}
return config;
}
finally {
in.destroy();
}
}
|
static DecoderConfig parseMP4DecoderSpecificInfo(byte[] data) throws AACException { final BitStream in = new BitStream(data); final DecoderConfig config = new DecoderConfig(); try { config.profile = readProfile(in); int sf = in.readBits(4); if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(in.readBits(24)); else config.sampleFrequency = SampleFrequency.forInt(sf); config.channelConfiguration = ChannelConfiguration.forInt(in.readBits(4)); switch(config.profile) { case AAC_SBR: config.extProfile = config.profile; config.sbrPresent = true; sf = in.readBits(4); config.downSampledSBR = config.sampleFrequency.getIndex()==sf; config.sampleFrequency = SampleFrequency.forInt(sf); config.profile = readProfile(in); break; case AAC_MAIN: case AAC_LC: case AAC_SSR: case AAC_LTP: case ER_AAC_LC: case ER_AAC_LTP: case ER_AAC_LD: config.frameLengthFlag = in.readBool(); if(config.frameLengthFlag) throw new AACException(STR); config.dependsOnCoreCoder = in.readBool(); if(config.dependsOnCoreCoder) config.coreCoderDelay = in.readBits(14); else config.coreCoderDelay = 0; config.extensionFlag = in.readBool(); if(config.extensionFlag) { if(config.profile.isErrorResilientProfile()) { config.sectionDataResilience = in.readBool(); config.scalefactorResilience = in.readBool(); config.spectralDataResilience = in.readBool(); } in.skipBit(); } if(config.channelConfiguration==ChannelConfiguration.CHANNEL_CONFIG_NONE) { in.skipBits(3); PCE pce = new PCE(); pce.decode(in); config.profile = pce.getProfile(); config.sampleFrequency = pce.getSampleFrequency(); config.channelConfiguration = ChannelConfiguration.forInt(pce.getChannelCount()); } if(in.getBitsLeft()>10) readSyncExtension(in, config); break; default: throw new AACException(STR+config.profile.getIndex()); } return config; } finally { in.destroy(); } }
|
/**
* Parses the input arrays as a DecoderSpecificInfo, as used in MP4
* containers.
* @return a DecoderConfig
*/
|
Parses the input arrays as a DecoderSpecificInfo, as used in MP4 containers
|
parseMP4DecoderSpecificInfo
|
{
"repo_name": "dubenju/javay",
"path": "src/java/net/sourceforge/jaad/aac/DecoderConfig.java",
"license": "apache-2.0",
"size": 7384
}
|
[
"net.sourceforge.jaad.aac.syntax.BitStream"
] |
import net.sourceforge.jaad.aac.syntax.BitStream;
|
import net.sourceforge.jaad.aac.syntax.*;
|
[
"net.sourceforge.jaad"
] |
net.sourceforge.jaad;
| 1,777,585
|
public static double normalizeDOUBLE(double v) throws StandardException
{
boolean invalid = Double.isNaN(v) || Double.isInfinite(v);
if (v < Limits.DB2_SMALLEST_DOUBLE ||
v > Limits.DB2_LARGEST_DOUBLE ||
(v > 0 && v < Limits.DB2_SMALLEST_POSITIVE_DOUBLE) ||
(v < 0 && v > Limits.DB2_LARGEST_NEGATIVE_DOUBLE)) {
if (useDB2Limits()) {
invalid = true;
}
}
if (invalid) {
throw StandardException.newException(
SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME);
}
// Normalize negative doubles to be "positive" (can't detect easily without using Double object because -0.0f = 0.0f)
// DERBY-2447: It shouldn't matter whether we compare to 0.0d or -0.0d,
// both should match negative zero, but comparing to 0.0d triggered
// this JVM bug: http://bugs.sun.com/view_bug.do?bug_id=6833879
if (v == -0.0d) v = 0.0d;
return v;
}
|
static double function(double v) throws StandardException { boolean invalid = Double.isNaN(v) Double.isInfinite(v); if (v < Limits.DB2_SMALLEST_DOUBLE v > Limits.DB2_LARGEST_DOUBLE (v > 0 && v < Limits.DB2_SMALLEST_POSITIVE_DOUBLE) (v < 0 && v > Limits.DB2_LARGEST_NEGATIVE_DOUBLE)) { if (useDB2Limits()) { invalid = true; } } if (invalid) { throw StandardException.newException( SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME); } if (v == -0.0d) v = 0.0d; return v; }
|
/**
* normalizeDOUBLE normalizes the value, so that negative zero (-0.0)
* becomes positive.
* @throws StandardException if v is not a number (NaN) or is infinite.
*/
|
normalizeDOUBLE normalizes the value, so that negative zero (-0.0) becomes positive
|
normalizeDOUBLE
|
{
"repo_name": "apache/derby",
"path": "java/org.apache.derby.engine/org/apache/derby/iapi/types/NumberDataType.java",
"license": "apache-2.0",
"size": 17800
}
|
[
"org.apache.derby.shared.common.error.StandardException",
"org.apache.derby.shared.common.reference.Limits",
"org.apache.derby.shared.common.reference.SQLState"
] |
import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.reference.Limits; import org.apache.derby.shared.common.reference.SQLState;
|
import org.apache.derby.shared.common.error.*; import org.apache.derby.shared.common.reference.*;
|
[
"org.apache.derby"
] |
org.apache.derby;
| 2,835,028
|
OneTimeTokenAccount create(String username);
|
OneTimeTokenAccount create(String username);
|
/**
* Create one time token account.
*
* @param username the username
* @return the one time token account
*/
|
Create one time token account
|
create
|
{
"repo_name": "apereo/cas",
"path": "support/cas-server-support-otp-mfa-core/src/main/java/org/apereo/cas/otp/repository/credentials/OneTimeTokenCredentialRepository.java",
"license": "apache-2.0",
"size": 2119
}
|
[
"org.apereo.cas.authentication.OneTimeTokenAccount"
] |
import org.apereo.cas.authentication.OneTimeTokenAccount;
|
import org.apereo.cas.authentication.*;
|
[
"org.apereo.cas"
] |
org.apereo.cas;
| 323,626
|
public void setProperty(final PersistentProperty<P> property) {
this.property = property;
}
|
void function(final PersistentProperty<P> property) { this.property = property; }
|
/**
* Sets a new value for the property field.
*
* @param property
* The new value for the property field.
*/
|
Sets a new value for the property field
|
setProperty
|
{
"repo_name": "lunarray-org/model-descriptor",
"path": "src/main/java/org/lunarray/model/descriptor/accessor/reference/property/impl/PropertyReference.java",
"license": "lgpl-3.0",
"size": 9152
}
|
[
"org.lunarray.model.descriptor.accessor.property.PersistentProperty"
] |
import org.lunarray.model.descriptor.accessor.property.PersistentProperty;
|
import org.lunarray.model.descriptor.accessor.property.*;
|
[
"org.lunarray.model"
] |
org.lunarray.model;
| 2,194,118
|
Lock getParentActiveWriteLock() {
if (this.parentAdvisor != null) {
return this.parentAdvisor.getActiveWriteLock();
}
return null;
}
|
Lock getParentActiveWriteLock() { if (this.parentAdvisor != null) { return this.parentAdvisor.getActiveWriteLock(); } return null; }
|
/**
* Returns the lock that prevents the parent's primary from moving while active writes are in
* progress. This should be locked before checking if the local bucket is primary.
*
* @return the lock for in-progress write operations
*/
|
Returns the lock that prevents the parent's primary from moving while active writes are in progress. This should be locked before checking if the local bucket is primary
|
getParentActiveWriteLock
|
{
"repo_name": "charliemblack/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/BucketAdvisor.java",
"license": "apache-2.0",
"size": 99671
}
|
[
"java.util.concurrent.locks.Lock"
] |
import java.util.concurrent.locks.Lock;
|
import java.util.concurrent.locks.*;
|
[
"java.util"
] |
java.util;
| 2,239,778
|
private void register(Path dir) throws IOException {
WatchKey key = dir.register(_watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY);
_keys.put(key, dir);
}
|
void function(Path dir) throws IOException { WatchKey key = dir.register(_watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); _keys.put(key, dir); }
|
/**
* Register the given directory with the WatchService
*/
|
Register the given directory with the WatchService
|
register
|
{
"repo_name": "PerfectCarl/ninja-cmd",
"path": "src/net_alchim31_livereload/Watcher.java",
"license": "apache-2.0",
"size": 5031
}
|
[
"java.io.IOException",
"java.nio.file.Path",
"java.nio.file.WatchKey"
] |
import java.io.IOException; import java.nio.file.Path; import java.nio.file.WatchKey;
|
import java.io.*; import java.nio.file.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 2,570,844
|
public void setButton(String type, String title, String url, String payload) {
HashMap<String, String> button = new HashMap<String, String>();
button.put("type", type);
button.put("title", title);
button.put("url", url);
button.put("payload", payload);
this.buttons.add(button);
}
|
void function(String type, String title, String url, String payload) { HashMap<String, String> button = new HashMap<String, String>(); button.put("type", type); button.put("title", title); button.put("url", url); button.put(STR, payload); this.buttons.add(button); }
|
/**
* Set Button
*
* @param type the button type
* @param title the button title
* @param url the button url
* @param payload the button payload
*/
|
Set Button
|
setButton
|
{
"repo_name": "Clivern/Racter",
"path": "src/main/java/com/clivern/racter/senders/templates/ListTemplate.java",
"license": "apache-2.0",
"size": 14999
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,132,660
|
public RectangleAnchor getTextAnchor() {
return this.textAnchor;
}
|
RectangleAnchor function() { return this.textAnchor; }
|
/**
* Returns the text anchor (never <code>null</code>).
*
* @return The text anchor.
*
* @since 1.0.13
*/
|
Returns the text anchor (never <code>null</code>)
|
getTextAnchor
|
{
"repo_name": "Mr-Steve/LTSpice_Library_Manager",
"path": "libs/jfreechart-1.0.16/source/org/jfree/chart/block/LabelBlock.java",
"license": "gpl-2.0",
"size": 13399
}
|
[
"org.jfree.ui.RectangleAnchor"
] |
import org.jfree.ui.RectangleAnchor;
|
import org.jfree.ui.*;
|
[
"org.jfree.ui"
] |
org.jfree.ui;
| 323,177
|
@Override public T visitLexerBlock(@NotNull ANTLRv4Parser.LexerBlockContext ctx) { return visitChildren(ctx); }
|
@Override public T visitLexerBlock(@NotNull ANTLRv4Parser.LexerBlockContext ctx) { return visitChildren(ctx); }
|
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
|
The default implementation returns the result of calling <code>#visitChildren</code> on ctx
|
visitRuleref
|
{
"repo_name": "ajosephau/generic_multiobjective_superoptimizer",
"path": "src/org/gso/antlrv4parser/ANTLRv4ParserBaseVisitor.java",
"license": "gpl-2.0",
"size": 15727
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 971,862
|
public Dfp solve(final int maxEval, final UnivariateDfpFunction f,
final Dfp min, final Dfp max, final AllowedSolution allowedSolution)
throws NullArgumentException, NoBracketingException {
return solve(maxEval, f, min, max, min.add(max).divide(2), allowedSolution);
}
|
Dfp function(final int maxEval, final UnivariateDfpFunction f, final Dfp min, final Dfp max, final AllowedSolution allowedSolution) throws NullArgumentException, NoBracketingException { return solve(maxEval, f, min, max, min.add(max).divide(2), allowedSolution); }
|
/**
* Solve for a zero in the given interval.
* A solver may require that the interval brackets a single zero root.
* Solvers that do require bracketing should be able to handle the case
* where one of the endpoints is itself a root.
*
* @param maxEval Maximum number of evaluations.
* @param f Function to solve.
* @param min Lower bound for the interval.
* @param max Upper bound for the interval.
* @param allowedSolution The kind of solutions that the root-finding algorithm may
* accept as solutions.
* @return a value where the function is zero.
* @exception NullArgumentException if f is null.
* @exception NoBracketingException if root cannot be bracketed
*/
|
Solve for a zero in the given interval. A solver may require that the interval brackets a single zero root. Solvers that do require bracketing should be able to handle the case where one of the endpoints is itself a root
|
solve
|
{
"repo_name": "charles-cooper/idylfin",
"path": "src/org/apache/commons/math3/dfp/BracketingNthOrderBrentSolverDFP.java",
"license": "apache-2.0",
"size": 16322
}
|
[
"org.apache.commons.math3.analysis.solvers.AllowedSolution",
"org.apache.commons.math3.exception.NoBracketingException",
"org.apache.commons.math3.exception.NullArgumentException"
] |
import org.apache.commons.math3.analysis.solvers.AllowedSolution; import org.apache.commons.math3.exception.NoBracketingException; import org.apache.commons.math3.exception.NullArgumentException;
|
import org.apache.commons.math3.analysis.solvers.*; import org.apache.commons.math3.exception.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 1,173,697
|
public static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter,
Job parentJob, boolean loggingRemoteWork ) {
Result result = new Result();
//
// See if the remote transformations have finished.
// We could just look at the master, but I doubt that that is enough in all situations.
//
SlaveServer[] slaveServers = transSplitter.getSlaveTargets(); // <-- ask these guys
TransMeta[] slaves = transSplitter.getSlaves();
SlaveServer masterServer;
try {
masterServer = transSplitter.getMasterServer();
} catch ( KettleException e ) {
log.logError( "Error getting the master server", e );
masterServer = null;
result.setNrErrors( result.getNrErrors() + 1 );
}
TransMeta master = transSplitter.getMaster();
// Slaves first...
//
for ( int s = 0; s < slaveServers.length; s++ ) {
try {
// Get the detailed status of the slave transformation...
//
SlaveServerTransStatus transStatus = slaveServers[s].getTransStatus( slaves[s].getName(), "", 0 );
Result transResult = transStatus.getResult( slaves[s] );
result.add( transResult );
if ( loggingRemoteWork ) {
log.logBasic( "-- Slave : " + slaveServers[s].getName() );
log.logBasic( transStatus.getLoggingString() );
}
} catch ( Exception e ) {
result.setNrErrors( result.getNrErrors() + 1 );
log.logError( "Unable to contact slave server '" + slaveServers[s].getName()
+ "' to get result of slave transformation : " + e.toString() );
}
}
// Clean up the master too
//
if ( master != null && master.nrSteps() > 0 ) {
try {
// Get the detailed status of the slave transformation...
//
SlaveServerTransStatus transStatus = masterServer.getTransStatus( master.getName(), "", 0 );
Result transResult = transStatus.getResult( master );
result.add( transResult );
if ( loggingRemoteWork ) {
log.logBasic( "-- Master : " + masterServer.getName() );
log.logBasic( transStatus.getLoggingString() );
}
} catch ( Exception e ) {
result.setNrErrors( result.getNrErrors() + 1 );
log.logError( "Unable to contact master server '" + masterServer.getName()
+ "' to get result of master transformation : " + e.toString() );
}
}
return result;
}
|
static final Result function( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ) { Result result = new Result(); TransMeta[] slaves = transSplitter.getSlaves(); SlaveServer masterServer; try { masterServer = transSplitter.getMasterServer(); } catch ( KettleException e ) { log.logError( STR, e ); masterServer = null; result.setNrErrors( result.getNrErrors() + 1 ); } TransMeta master = transSplitter.getMaster(); try { Result transResult = transStatus.getResult( slaves[s] ); result.add( transResult ); if ( loggingRemoteWork ) { log.logBasic( STR + slaveServers[s].getName() ); log.logBasic( transStatus.getLoggingString() ); } } catch ( Exception e ) { result.setNrErrors( result.getNrErrors() + 1 ); log.logError( STR + slaveServers[s].getName() + STR + e.toString() ); } } try { Result transResult = transStatus.getResult( master ); result.add( transResult ); if ( loggingRemoteWork ) { log.logBasic( STR + masterServer.getName() ); log.logBasic( transStatus.getLoggingString() ); } } catch ( Exception e ) { result.setNrErrors( result.getNrErrors() + 1 ); log.logError( STR + masterServer.getName() + STR + e.toString() ); } } return result; }
|
/**
* Gets the clustered transformation result.
*
* @param log
* the log channel interface
* @param transSplitter
* the TransSplitter object
* @param parentJob
* the parent job
* @param loggingRemoteWork
* log remote execution logs locally
* @return the clustered transformation result
*/
|
Gets the clustered transformation result
|
getClusteredTransformationResult
|
{
"repo_name": "alina-ipatina/pentaho-kettle",
"path": "engine/src/org/pentaho/di/trans/Trans.java",
"license": "apache-2.0",
"size": 197880
}
|
[
"org.pentaho.di.cluster.SlaveServer",
"org.pentaho.di.core.Result",
"org.pentaho.di.core.exception.KettleException",
"org.pentaho.di.core.logging.LogChannelInterface",
"org.pentaho.di.job.Job",
"org.pentaho.di.trans.cluster.TransSplitter"
] |
import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Result; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.job.Job; import org.pentaho.di.trans.cluster.TransSplitter;
|
import org.pentaho.di.cluster.*; import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.logging.*; import org.pentaho.di.job.*; import org.pentaho.di.trans.cluster.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 2,578,112
|
@Test
public void testGetPathForMac()
{
Variables variables = new DefaultVariables();
InstallData installData = new AutomatedInstallData(variables, Platforms.MAC_OSX);
System.setProperty("user.dir", "userdir");
variables.set("DEFAULT_INSTALL_PATH", "default");
assertEquals("default", InstallPathHelper.getPath(installData));
// verify TargetPanel.dir.unix overrides DEFAULT_INSTALL_PATH and SYSTEM_user_dir
variables.set("TargetPanel.dir.unix", "1");
assertEquals("1", InstallPathHelper.getPath(installData));
// verify TargetPanel.dir.mac overrides TargetPanel.dir.unix
variables.set("TargetPanel.dir.mac", "2");
assertEquals("2", InstallPathHelper.getPath(installData));
// verify TargetPanel.dir.mac_osx overrides TargetPanel.dir.mac
variables.set("TargetPanel.dir.mac_osx", "3");
assertEquals("3", InstallPathHelper.getPath(installData));
}
|
void function() { Variables variables = new DefaultVariables(); InstallData installData = new AutomatedInstallData(variables, Platforms.MAC_OSX); System.setProperty(STR, STR); variables.set(STR, STR); assertEquals(STR, InstallPathHelper.getPath(installData)); variables.set(STR, "1"); assertEquals("1", InstallPathHelper.getPath(installData)); variables.set(STR, "2"); assertEquals("2", InstallPathHelper.getPath(installData)); variables.set(STR, "3"); assertEquals("3", InstallPathHelper.getPath(installData)); }
|
/**
* Tests the {@link InstallPathHelper#getPath(InstallData)} method for Mac.
* <p/>
* Mac OSX has two parent platforms, Mac and UNIX. This verifies that Mac overrides Unix.
*/
|
Tests the <code>InstallPathHelper#getPath(InstallData)</code> method for Mac. Mac OSX has two parent platforms, Mac and UNIX. This verifies that Mac overrides Unix
|
testGetPathForMac
|
{
"repo_name": "akuhtz/izpack",
"path": "izpack-installer/src/test/java/com/izforge/izpack/installer/util/InstallPathHelperTest.java",
"license": "apache-2.0",
"size": 5546
}
|
[
"com.izforge.izpack.api.data.AutomatedInstallData",
"com.izforge.izpack.api.data.InstallData",
"com.izforge.izpack.api.data.Variables",
"com.izforge.izpack.core.data.DefaultVariables",
"com.izforge.izpack.util.Platforms",
"org.junit.Assert"
] |
import com.izforge.izpack.api.data.AutomatedInstallData; import com.izforge.izpack.api.data.InstallData; import com.izforge.izpack.api.data.Variables; import com.izforge.izpack.core.data.DefaultVariables; import com.izforge.izpack.util.Platforms; import org.junit.Assert;
|
import com.izforge.izpack.api.data.*; import com.izforge.izpack.core.data.*; import com.izforge.izpack.util.*; import org.junit.*;
|
[
"com.izforge.izpack",
"org.junit"
] |
com.izforge.izpack; org.junit;
| 555,603
|
public CostCategoryDetail retrieveMatchingCostCategoryLevelAmongCodes(CostCategoryObjectLevel level);
|
CostCategoryDetail function(CostCategoryObjectLevel level);
|
/**
* Determines if a given cost category object level would have an object level unshared by any cost category object codes
* @param level the cost category object level to check
* @return null if the object level is unique among cost category object codes, otherwise the blocking cost category object code
*/
|
Determines if a given cost category object level would have an object level unshared by any cost category object codes
|
retrieveMatchingCostCategoryLevelAmongCodes
|
{
"repo_name": "kuali/kfs",
"path": "kfs-ar/src/main/java/org/kuali/kfs/module/ar/dataaccess/CostCategoryDao.java",
"license": "agpl-3.0",
"size": 7315
}
|
[
"org.kuali.kfs.module.ar.businessobject.CostCategoryDetail",
"org.kuali.kfs.module.ar.businessobject.CostCategoryObjectLevel"
] |
import org.kuali.kfs.module.ar.businessobject.CostCategoryDetail; import org.kuali.kfs.module.ar.businessobject.CostCategoryObjectLevel;
|
import org.kuali.kfs.module.ar.businessobject.*;
|
[
"org.kuali.kfs"
] |
org.kuali.kfs;
| 1,766,467
|
private static boolean isMultichannelConfigSupported(int channelConfig) {
// check for unsupported channels
if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
Log.e(TAG, "Channel configuration features unsupported channels");
return false;
}
// check for unsupported multichannel combinations:
// - FL/FR must be present
// - L/R channels must be paired (e.g. no single L channel)
final int frontPair =
AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
if ((channelConfig & frontPair) != frontPair) {
Log.e(TAG, "Front channels must be present in multichannel configurations");
return false;
}
final int backPair =
AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
if ((channelConfig & backPair) != 0) {
if ((channelConfig & backPair) != backPair) {
Log.e(TAG, "Rear channels can't be used independently");
return false;
}
}
return true;
}
|
static boolean function(int channelConfig) { if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) { Log.e(TAG, STR); return false; } final int frontPair = AudioFormat.CHANNEL_OUT_FRONT_LEFT AudioFormat.CHANNEL_OUT_FRONT_RIGHT; if ((channelConfig & frontPair) != frontPair) { Log.e(TAG, STR); return false; } final int backPair = AudioFormat.CHANNEL_OUT_BACK_LEFT AudioFormat.CHANNEL_OUT_BACK_RIGHT; if ((channelConfig & backPair) != 0) { if ((channelConfig & backPair) != backPair) { Log.e(TAG, STR); return false; } } return true; }
|
/**
* Convenience method to check that the channel configuration (a.k.a channel mask) is supported
* @param channelConfig the mask to validate
* @return false if the AudioTrack can't be used with such a mask
*/
|
Convenience method to check that the channel configuration (a.k.a channel mask) is supported
|
isMultichannelConfigSupported
|
{
"repo_name": "haikuowuya/android_system_code",
"path": "src/android/media/AudioTrack.java",
"license": "apache-2.0",
"size": 49747
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 609,649
|
public void setLocalNode(DiscoveryNode localNode) {
this.localNode = localNode;
}
|
void function(DiscoveryNode localNode) { this.localNode = localNode; }
|
/**
* makes the transport service aware of the local node. this allows it to optimize requests sent
* from the local node to it self and by pass the network stack/ serialization
*/
|
makes the transport service aware of the local node. this allows it to optimize requests sent from the local node to it self and by pass the network stack/ serialization
|
setLocalNode
|
{
"repo_name": "Kamapcuc/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/transport/TransportService.java",
"license": "apache-2.0",
"size": 36444
}
|
[
"org.elasticsearch.cluster.node.DiscoveryNode"
] |
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.*;
|
[
"org.elasticsearch.cluster"
] |
org.elasticsearch.cluster;
| 1,262,411
|
@Override
public void execute(JobExecutionContext context)
throws JobExecutionException {
broken = false;
// Get data about our job
name = context.getJobDetail().getName();
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
// Make sure the URL is valid
String urlString = dataMap.getString("url");
try {
url = new URL(urlString);
} catch (MalformedURLException ex) {
// This should never fail, we validated the
// url back in housekeeping
broken = true;
log.error("URL is invalid: ", ex);
}
token = dataMap.getString("token");
// MD5 hash our token if we have it
if (token != null) {
token = DigestUtils.md5Hex(token);
}
if (!broken) {
runJob();
}
}
|
void function(JobExecutionContext context) throws JobExecutionException { broken = false; name = context.getJobDetail().getName(); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); String urlString = dataMap.getString("url"); try { url = new URL(urlString); } catch (MalformedURLException ex) { broken = true; log.error(STR, ex); } token = dataMap.getString("token"); if (token != null) { token = DigestUtils.md5Hex(token); } if (!broken) { runJob(); } }
|
/**
* This method will be called by quartz when the job trigger fires.
*
* @param context The execution context of this job, including data.
*/
|
This method will be called by quartz when the job trigger fires
|
execute
|
{
"repo_name": "the-fascinator/fascinator-portal",
"path": "src/main/java/com/googlecode/fascinator/portal/quartz/ExternalJob.java",
"license": "gpl-2.0",
"size": 4436
}
|
[
"java.net.MalformedURLException",
"org.apache.commons.codec.digest.DigestUtils",
"org.quartz.JobDataMap",
"org.quartz.JobExecutionContext",
"org.quartz.JobExecutionException"
] |
import java.net.MalformedURLException; import org.apache.commons.codec.digest.DigestUtils; import org.quartz.JobDataMap; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException;
|
import java.net.*; import org.apache.commons.codec.digest.*; import org.quartz.*;
|
[
"java.net",
"org.apache.commons",
"org.quartz"
] |
java.net; org.apache.commons; org.quartz;
| 1,299,423
|
protected FocusListener createToolBarFocusListener()
{
return new ToolBarFocusListener();
}
|
FocusListener function() { return new ToolBarFocusListener(); }
|
/**
* This method creates a new FocusListener for the JToolBar.
*
* @return A new FocusListener for the JToolBar.
*/
|
This method creates a new FocusListener for the JToolBar
|
createToolBarFocusListener
|
{
"repo_name": "unofficial-opensource-apple/gcc_40",
"path": "libjava/javax/swing/plaf/basic/BasicToolBarUI.java",
"license": "gpl-2.0",
"size": 37188
}
|
[
"java.awt.event.FocusListener"
] |
import java.awt.event.FocusListener;
|
import java.awt.event.*;
|
[
"java.awt"
] |
java.awt;
| 2,524,296
|
void UpdateReadMessages(Context context, long time, int threadID);
|
void UpdateReadMessages(Context context, long time, int threadID);
|
/**
* Update all messages in the SMS/MMS repositories as read where the time <= the provided time as well as where the threadID matches the provided ID
* @param context The context to utilize
* @param time The 'time' in the SMS database to use when marking messages as read
* (NOTE* for MMS divide by 1000 as MMS repository stores times as 1/1000th of the SMS)
* @param threadID The ID of the thread in the SMS/MMS repositories to update
*/
|
Update all messages in the SMS/MMS repositories as read where the time <= the provided time as well as where the threadID matches the provided ID
|
UpdateReadMessages
|
{
"repo_name": "AdeebNqo/Thula",
"path": "Thula/src/main/java/com/mariussoft/endlessjabber/sdk/IEndlessJabberImplementation.java",
"license": "gpl-3.0",
"size": 2381
}
|
[
"android.content.Context"
] |
import android.content.Context;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 1,340,936
|
private Name readIndexedQName() throws IOException {
if (version >= BundleBinding.VERSION_3) {
return readName();
}
int index = in.readInt();
if (index < 0) {
return null;
} else {
String uri = binding.nsIndex.indexToString(index);
String local = binding.nameIndex.indexToString(in.readInt());
return NameFactoryImpl.getInstance().create(uri, local);
}
}
|
Name function() throws IOException { if (version >= BundleBinding.VERSION_3) { return readName(); } int index = in.readInt(); if (index < 0) { return null; } else { String uri = binding.nsIndex.indexToString(index); String local = binding.nameIndex.indexToString(in.readInt()); return NameFactoryImpl.getInstance().create(uri, local); } }
|
/**
* Deserializes an indexed Name
*
* @return the qname
* @throws IOException in an I/O error occurs.
*/
|
Deserializes an indexed Name
|
readIndexedQName
|
{
"repo_name": "tripodsan/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/util/BundleReader.java",
"license": "apache-2.0",
"size": 23080
}
|
[
"java.io.IOException",
"org.apache.jackrabbit.spi.Name",
"org.apache.jackrabbit.spi.commons.name.NameFactoryImpl"
] |
import java.io.IOException; import org.apache.jackrabbit.spi.Name; import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl;
|
import java.io.*; import org.apache.jackrabbit.spi.*; import org.apache.jackrabbit.spi.commons.name.*;
|
[
"java.io",
"org.apache.jackrabbit"
] |
java.io; org.apache.jackrabbit;
| 639,931
|
public static <T, U extends OutputStream> T withStream(U os, @ClosureParams(value=FirstParam.class) Closure<T> closure) throws IOException {
try {
T result = closure.call(os);
os.flush();
OutputStream temp = os;
os = null;
temp.close();
return result;
} finally {
closeWithWarning(os);
}
}
|
static <T, U extends OutputStream> T function(U os, @ClosureParams(value=FirstParam.class) Closure<T> closure) throws IOException { try { T result = closure.call(os); os.flush(); OutputStream temp = os; os = null; temp.close(); return result; } finally { closeWithWarning(os); } }
|
/**
* Passes this OutputStream to the closure, ensuring that the stream
* is closed after the closure returns, regardless of errors.
*
* @param os the stream which is used and then closed
* @param closure the closure that the stream is passed into
* @return the value returned by the closure
* @throws IOException if an IOException occurs.
* @since 1.5.2
*/
|
Passes this OutputStream to the closure, ensuring that the stream is closed after the closure returns, regardless of errors
|
withStream
|
{
"repo_name": "PascalSchumacher/incubator-groovy",
"path": "src/main/org/codehaus/groovy/runtime/IOGroovyMethods.java",
"license": "apache-2.0",
"size": 62767
}
|
[
"groovy.lang.Closure",
"groovy.transform.stc.ClosureParams",
"groovy.transform.stc.FirstParam",
"java.io.IOException",
"java.io.OutputStream"
] |
import groovy.lang.Closure; import groovy.transform.stc.ClosureParams; import groovy.transform.stc.FirstParam; import java.io.IOException; import java.io.OutputStream;
|
import groovy.lang.*; import groovy.transform.stc.*; import java.io.*;
|
[
"groovy.lang",
"groovy.transform.stc",
"java.io"
] |
groovy.lang; groovy.transform.stc; java.io;
| 186,744
|
void createPostComment(@NonNull TGComment comment, @NonNull String postId, @NonNull TGRequestCallback<TGComment> callback);
|
void createPostComment(@NonNull TGComment comment, @NonNull String postId, @NonNull TGRequestCallback<TGComment> callback);
|
/**
* Create new comment for post
*
* @param comment
* @param postId
* @param callback
*/
|
Create new comment for post
|
createPostComment
|
{
"repo_name": "tapglue/android_sdk",
"path": "v1/tapglue-android-sdk/tapglue-android-sdk/src/main/java/com/tapglue/networking/TGRequests.java",
"license": "apache-2.0",
"size": 12785
}
|
[
"android.support.annotation.NonNull",
"com.tapglue.model.TGComment",
"com.tapglue.networking.requests.TGRequestCallback"
] |
import android.support.annotation.NonNull; import com.tapglue.model.TGComment; import com.tapglue.networking.requests.TGRequestCallback;
|
import android.support.annotation.*; import com.tapglue.model.*; import com.tapglue.networking.requests.*;
|
[
"android.support",
"com.tapglue.model",
"com.tapglue.networking"
] |
android.support; com.tapglue.model; com.tapglue.networking;
| 1,005,699
|
@SuppressWarnings("unchecked")
public final List<String> getList(String property) {
String retValue = getString(property, null);
return (List<String>) (retValue != null ? Arrays.asList(retValue.split(",")) : Collections.emptyList());
}
|
@SuppressWarnings(STR) final List<String> function(String property) { String retValue = getString(property, null); return (List<String>) (retValue != null ? Arrays.asList(retValue.split(",")) : Collections.emptyList()); }
|
/**
* Returns a list of Strings parsed from the comma delimited property value.
*
* @param property name
* @return list of Strings parsed from comma delimited property value
*/
|
Returns a list of Strings parsed from the comma delimited property value
|
getList
|
{
"repo_name": "joval/vngx-jsch",
"path": "src/main/java/org/vngx/jsch/config/JSchConfig.java",
"license": "bsd-3-clause",
"size": 13699
}
|
[
"java.util.Arrays",
"java.util.Collections",
"java.util.List"
] |
import java.util.Arrays; import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,693,217
|
public static long getEstimatedNodeCount(NodeState root, String path, boolean max) {
if (USE_OLD_COUNTER) {
return NodeCounterOld.getEstimatedNodeCount(root, path, max);
} else {
return doGetEstimatedNodeCount(root, path, max);
}
}
|
static long function(NodeState root, String path, boolean max) { if (USE_OLD_COUNTER) { return NodeCounterOld.getEstimatedNodeCount(root, path, max); } else { return doGetEstimatedNodeCount(root, path, max); } }
|
/**
* Get the estimated number of nodes for a given path.
*
* @param root the root
* @param path the path
* @param max whether to get the maximum expected number of nodes (the
* stored value plus the resolution)
* @return -1 if unknown, 0 if the node does not exist (or, if max is false,
* if there are probably not many descendant nodes), or the
* (maximum) estimated number of descendant nodes
*/
|
Get the estimated number of nodes for a given path
|
getEstimatedNodeCount
|
{
"repo_name": "mduerig/jackrabbit-oak",
"path": "oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/counter/jmx/NodeCounter.java",
"license": "apache-2.0",
"size": 9564
}
|
[
"org.apache.jackrabbit.oak.spi.state.NodeState"
] |
import org.apache.jackrabbit.oak.spi.state.NodeState;
|
import org.apache.jackrabbit.oak.spi.state.*;
|
[
"org.apache.jackrabbit"
] |
org.apache.jackrabbit;
| 1,987,024
|
private Reagent getReagent(int screenIndex, int reagentIndex)
{
LinkedHashMap<Index, Integer> indexes =
new LinkedHashMap<Index, Integer>();
indexes.put(Index.SCREEN_INDEX, screenIndex);
indexes.put(Index.REAGENT_INDEX, reagentIndex);
return getSourceObject(Reagent.class, indexes);
}
|
Reagent function(int screenIndex, int reagentIndex) { LinkedHashMap<Index, Integer> indexes = new LinkedHashMap<Index, Integer>(); indexes.put(Index.SCREEN_INDEX, screenIndex); indexes.put(Index.REAGENT_INDEX, reagentIndex); return getSourceObject(Reagent.class, indexes); }
|
/**
* Retrieve Reagent
* @param screenIndex the index of the screen
* @param reagentIndex the index of the reagent within the screen
* @return the reagent
*/
|
Retrieve Reagent
|
getReagent
|
{
"repo_name": "will-moore/openmicroscopy",
"path": "components/blitz/src/ome/formats/OMEROMetadataStoreClient.java",
"license": "gpl-2.0",
"size": 308130
}
|
[
"java.util.LinkedHashMap"
] |
import java.util.LinkedHashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,079,998
|
public void setEnableGcDeletes(boolean enableGcDeletes) {
this.enableGcDeletes = enableGcDeletes;
}
/**
* Returns the initial index buffer size. This setting is only read on startup and otherwise controlled
* by {@link IndexingMemoryController}
|
void function(boolean enableGcDeletes) { this.enableGcDeletes = enableGcDeletes; } /** * Returns the initial index buffer size. This setting is only read on startup and otherwise controlled * by {@link IndexingMemoryController}
|
/**
* Enables / disables gc deletes
*
* @see #isEnableGcDeletes()
*/
|
Enables / disables gc deletes
|
setEnableGcDeletes
|
{
"repo_name": "crate/crate",
"path": "server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java",
"license": "apache-2.0",
"size": 14281
}
|
[
"org.elasticsearch.indices.IndexingMemoryController"
] |
import org.elasticsearch.indices.IndexingMemoryController;
|
import org.elasticsearch.indices.*;
|
[
"org.elasticsearch.indices"
] |
org.elasticsearch.indices;
| 2,419,758
|
private void updateNewsData(Map<String, Object> data){
News news = new News(data);
if (news.getIdx() == null)
return;
boolean isExist = false;
for (News item : newsList) {
if (item.getIdx().equals(news.getIdx())) {
item.updateData(data);
isExist = true;
break;
}
}
if (!isExist) {
newsList.add(news);
}
if (newsValueListenerMain != null) {
newsValueListenerMain.onUpdatedNewsList(newsList);
}
if (newsValueListenerForNews != null) {
newsValueListenerForNews.onUpdatedNewsList(newsList);
}
}
|
void function(Map<String, Object> data){ News news = new News(data); if (news.getIdx() == null) return; boolean isExist = false; for (News item : newsList) { if (item.getIdx().equals(news.getIdx())) { item.updateData(data); isExist = true; break; } } if (!isExist) { newsList.add(news); } if (newsValueListenerMain != null) { newsValueListenerMain.onUpdatedNewsList(newsList); } if (newsValueListenerForNews != null) { newsValueListenerForNews.onUpdatedNewsList(newsList); } }
|
/**
* this method is used to update existing news
* @param data : updated data
*/
|
this method is used to update existing news
|
updateNewsData
|
{
"repo_name": "AppHero2/Raffler-Android",
"path": "app/src/main/java/com/raffler/app/classes/AppManager.java",
"license": "mit",
"size": 18254
}
|
[
"com.raffler.app.models.News",
"java.util.Map"
] |
import com.raffler.app.models.News; import java.util.Map;
|
import com.raffler.app.models.*; import java.util.*;
|
[
"com.raffler.app",
"java.util"
] |
com.raffler.app; java.util;
| 1,312,817
|
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_ASSET,
defaultValue = "")
@SimpleProperty(
category = PropertyCategory.APPEARANCE,
description = "The screen background image.")
public void BackgroundImage(String path) {
backgroundImagePath = (path == null) ? "" : path;
try {
backgroundDrawable = MediaUtil.getBitmapDrawable(this, backgroundImagePath);
} catch (IOException ioe) {
Log.e(LOG_TAG, "Unable to load " + backgroundImagePath);
backgroundDrawable = null;
}
ViewUtil.setBackgroundImage(frameLayout, backgroundDrawable);
frameLayout.invalidate();
}
|
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_ASSET, defaultValue = STRThe screen background image.STRSTRUnable to load " + backgroundImagePath); backgroundDrawable = null; } ViewUtil.setBackgroundImage(frameLayout, backgroundDrawable); frameLayout.invalidate(); }
|
/**
* Specifies the path of the background image.
*
* <p/>See {@link MediaUtil#determineMediaSource} for information about what
* a path can be.
*
* @param path the path of the background image
*/
|
Specifies the path of the background image. See <code>MediaUtil#determineMediaSource</code> for information about what a path can be
|
BackgroundImage
|
{
"repo_name": "JDub20/CodeBlocks",
"path": "appinventor/components/src/com/google/appinventor/components/runtime/Form.java",
"license": "mit",
"size": 50911
}
|
[
"com.google.appinventor.components.annotations.DesignerProperty",
"com.google.appinventor.components.common.PropertyTypeConstants",
"com.google.appinventor.components.runtime.util.ViewUtil"
] |
import com.google.appinventor.components.annotations.DesignerProperty; import com.google.appinventor.components.common.PropertyTypeConstants; import com.google.appinventor.components.runtime.util.ViewUtil;
|
import com.google.appinventor.components.annotations.*; import com.google.appinventor.components.common.*; import com.google.appinventor.components.runtime.util.*;
|
[
"com.google.appinventor"
] |
com.google.appinventor;
| 504,411
|
public static <T> Object2DArrayAssert<T> then(T[][] actual) {
return assertThat(actual);
}
|
static <T> Object2DArrayAssert<T> function(T[][] actual) { return assertThat(actual); }
|
/**
* Creates a new instance of <code>{@link org.assertj.core.api.Object2DArrayAssert}</code>.
*
* @param <T> the actual elements type
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
|
Creates a new instance of <code><code>org.assertj.core.api.Object2DArrayAssert</code></code>
|
then
|
{
"repo_name": "hazendaz/assertj-core",
"path": "src/main/java/org/assertj/core/api/Java6BDDAssertions.java",
"license": "apache-2.0",
"size": 42180
}
|
[
"org.assertj.core.api.Java6Assertions"
] |
import org.assertj.core.api.Java6Assertions;
|
import org.assertj.core.api.*;
|
[
"org.assertj.core"
] |
org.assertj.core;
| 1,763,044
|
Collection<CaseInstance> getCaseInstances(List<CaseStatus> statuses, QueryContext queryContext);
|
Collection<CaseInstance> getCaseInstances(List<CaseStatus> statuses, QueryContext queryContext);
|
/**
* Returns all available active case instances that match given statuses
* @param statuses list of statuses that case should be in to match
* @param queryContext control parameters for the result e.g. sorting, paging
*
*/
|
Returns all available active case instances that match given statuses
|
getCaseInstances
|
{
"repo_name": "droolsjbpm/jbpm",
"path": "jbpm-case-mgmt/jbpm-case-mgmt-api/src/main/java/org/jbpm/casemgmt/api/CaseRuntimeDataService.java",
"license": "apache-2.0",
"size": 19580
}
|
[
"java.util.Collection",
"java.util.List",
"org.jbpm.casemgmt.api.model.CaseStatus",
"org.jbpm.casemgmt.api.model.instance.CaseInstance",
"org.kie.api.runtime.query.QueryContext"
] |
import java.util.Collection; import java.util.List; import org.jbpm.casemgmt.api.model.CaseStatus; import org.jbpm.casemgmt.api.model.instance.CaseInstance; import org.kie.api.runtime.query.QueryContext;
|
import java.util.*; import org.jbpm.casemgmt.api.model.*; import org.jbpm.casemgmt.api.model.instance.*; import org.kie.api.runtime.query.*;
|
[
"java.util",
"org.jbpm.casemgmt",
"org.kie.api"
] |
java.util; org.jbpm.casemgmt; org.kie.api;
| 1,497,179
|
public void remove(JsonPath path);
|
void function(JsonPath path);
|
/**
* Method to contain logic on how a REMOVE operation should be preformed for a resource. An {@link
* InvalidPatchPathException} should be thrown if the path given is not matched.
*
* @param path The {@link JsonPath} for the location of the value to be removed.
*/
|
Method to contain logic on how a REMOVE operation should be preformed for a resource. An <code>InvalidPatchPathException</code> should be thrown if the path given is not matched
|
remove
|
{
"repo_name": "tbugrara/dropwizard-patch",
"path": "src/main/java/io/progix/dropwizard/patch/operations/RemoveOperation.java",
"license": "apache-2.0",
"size": 1477
}
|
[
"io.progix.dropwizard.patch.JsonPath"
] |
import io.progix.dropwizard.patch.JsonPath;
|
import io.progix.dropwizard.patch.*;
|
[
"io.progix.dropwizard"
] |
io.progix.dropwizard;
| 683,496
|
public int getAllowScansWithTraffic() {
try {
return mService.getAllowScansWithTraffic();
} catch (RemoteException e) {
return 0;
}
}
|
int function() { try { return mService.getAllowScansWithTraffic(); } catch (RemoteException e) { return 0; } }
|
/**
* Get setting for allowing Scans when traffic is ongoing.
* @hide
*/
|
Get setting for allowing Scans when traffic is ongoing
|
getAllowScansWithTraffic
|
{
"repo_name": "Ant-Droid/android_frameworks_base_OLD",
"path": "wifi/java/android/net/wifi/WifiManager.java",
"license": "apache-2.0",
"size": 103390
}
|
[
"android.os.RemoteException"
] |
import android.os.RemoteException;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 335,579
|
@Test(timeout=10000)
public void testPRBindingOnAttributesWithoutSetterUsingModifies(){
final String str =
"package org.drools.test;\n" +
"\n" +
"import " + Klass4.class.getCanonicalName() + ";\n" +
"\n" +
"global java.util.List list;\n" +
"\n" +
"rule \"Init\"\n" +
"when\n" +
"then\n" +
" insert( new Klass4( \"XXX\", \"White\" ) );\n" +
"end\n" +
"rule \"Get Person name\"\n" +
"salience 1\n" +
"when\n" +
" $x : Klass4( $fullName: fullName )\n" +
"then\n" +
" list.add( $fullName );\n" +
"end\n" +
"rule \"XXX -> Walter\"\n" +
"when\n" +
" $x : Klass4( name == 'XXX' )\n" +
"then\n" +
" modify($x){ setName('Walter') };\n" +
"end\n" +
"\n";
final KieBase kbase = loadKnowledgeBaseFromString(str);
final KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<String>();
ksession.setGlobal("list", list);
ksession.fireAllRules();
assertEquals( 2, list.size() );
assertEquals( Arrays.asList( "XXX White", "Walter White" ), list );
}
|
@Test(timeout=10000) void function(){ final String str = STR + "\n" + STR + Klass4.class.getCanonicalName() + ";\n" + "\n" + STR + "\n" + STRInit\"\n" + STR + STR + STRXXX\STRWhite\STR + "end\n" + STRGet Person name\"\n" + STR + STR + STR + STR + STR + "end\n" + STRXXX -> Walter\"\n" + STR + STR + STR + STR + "end\n" + "\n"; final KieBase kbase = loadKnowledgeBaseFromString(str); final KieSession ksession = kbase.newKieSession(); final List<String> list = new ArrayList<String>(); ksession.setGlobal("list", list); ksession.fireAllRules(); assertEquals( 2, list.size() ); assertEquals( Arrays.asList( STR, STR ), list ); }
|
/**
* Tests the use of PR on bindings involving 'virtual' properties
* of a POJO: calculated properties without a setter.
* getFullName doesn't have a setter but we are explicitly using @Modifies
* in Klass4's setName() and setLastName(). After the name of Kalss4
* instance is modified, rule 'Get Person name' must be re-activated.
*/
|
Tests the use of PR on bindings involving 'virtual' properties getFullName doesn't have a setter but we are explicitly using @Modifies in Klass4's setName() and setLastName(). After the name of Kalss4 instance is modified, rule 'Get Person name' must be re-activated
|
testPRBindingOnAttributesWithoutSetterUsingModifies
|
{
"repo_name": "ngs-mtech/drools",
"path": "drools-compiler/src/test/java/org/drools/compiler/integrationtests/PropertyReactivityTest.java",
"license": "apache-2.0",
"size": 59300
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List",
"org.drools.compiler.Person",
"org.junit.Test",
"org.kie.api.KieBase",
"org.kie.api.runtime.KieSession"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.drools.compiler.Person; import org.junit.Test; import org.kie.api.KieBase; import org.kie.api.runtime.KieSession;
|
import java.util.*; import org.drools.compiler.*; import org.junit.*; import org.kie.api.*; import org.kie.api.runtime.*;
|
[
"java.util",
"org.drools.compiler",
"org.junit",
"org.kie.api"
] |
java.util; org.drools.compiler; org.junit; org.kie.api;
| 2,583,029
|
@RequestMapping(value = "configuration/methodPaymentEdit/{id}", method = RequestMethod.POST)
public String chargeMethodPayment(@PathVariable Long id) {
methodPayment = methodPaymentService.findById(id);
return "redirect:/configuration/methodPaymentEdit";
}
|
@RequestMapping(value = STR, method = RequestMethod.POST) String function(@PathVariable Long id) { methodPayment = methodPaymentService.findById(id); return STR; }
|
/**
* Charge method payment.
*
* @param id the id
* @return the string
*/
|
Charge method payment
|
chargeMethodPayment
|
{
"repo_name": "pablogrela/members_cuacfm",
"path": "src/main/java/org/cuacfm/members/web/configuration/MethodPaymentEditController.java",
"license": "apache-2.0",
"size": 3880
}
|
[
"org.springframework.web.bind.annotation.PathVariable",
"org.springframework.web.bind.annotation.RequestMapping",
"org.springframework.web.bind.annotation.RequestMethod"
] |
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod;
|
import org.springframework.web.bind.annotation.*;
|
[
"org.springframework.web"
] |
org.springframework.web;
| 2,386,646
|
public Period resizeTimeout() {
return this.resizeTimeout;
}
|
Period function() { return this.resizeTimeout; }
|
/**
* Get the default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request).
*
* @return the resizeTimeout value
*/
|
Get the default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 minutes, the Batch service returns an error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request)
|
resizeTimeout
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/batch/microsoft-azure-batch/src/main/java/com/microsoft/azure/batch/protocol/models/NodeRemoveParameter.java",
"license": "mit",
"size": 3643
}
|
[
"org.joda.time.Period"
] |
import org.joda.time.Period;
|
import org.joda.time.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 975,466
|
private ObjectNode encodeBandwidthConstraint() {
checkNotNull(constraint, "Bandwidth constraint cannot be null");
final BandwidthConstraint bandwidthConstraint =
(BandwidthConstraint) constraint;
return context.mapper().createObjectNode()
.put("bandwidth", bandwidthConstraint.bandwidth().bps());
}
|
ObjectNode function() { checkNotNull(constraint, STR); final BandwidthConstraint bandwidthConstraint = (BandwidthConstraint) constraint; return context.mapper().createObjectNode() .put(STR, bandwidthConstraint.bandwidth().bps()); }
|
/**
* Encodes a bandwidth constraint.
*
* @return JSON ObjectNode representing the constraint
*/
|
Encodes a bandwidth constraint
|
encodeBandwidthConstraint
|
{
"repo_name": "oplinkoms/onos",
"path": "core/common/src/main/java/org/onosproject/codec/impl/EncodeConstraintCodecHelper.java",
"license": "apache-2.0",
"size": 7994
}
|
[
"com.fasterxml.jackson.databind.node.ObjectNode",
"com.google.common.base.Preconditions",
"org.onosproject.net.intent.constraint.BandwidthConstraint"
] |
import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Preconditions; import org.onosproject.net.intent.constraint.BandwidthConstraint;
|
import com.fasterxml.jackson.databind.node.*; import com.google.common.base.*; import org.onosproject.net.intent.constraint.*;
|
[
"com.fasterxml.jackson",
"com.google.common",
"org.onosproject.net"
] |
com.fasterxml.jackson; com.google.common; org.onosproject.net;
| 1,630,582
|
checkSolenoidModule(m_moduleNumber);
checkSolenoidChannel(m_channel);
ByteBuffer status = ByteBuffer.allocateDirect(4);
status.order(ByteOrder.LITTLE_ENDIAN);
ByteBuffer port = SolenoidJNI.getPortWithModule((byte) m_moduleNumber, (byte) m_channel);
m_solenoid_port = SolenoidJNI.initializeSolenoidPort(port, status.asIntBuffer());
HALUtil.checkStatus(status.asIntBuffer());
LiveWindow.addActuator("Solenoid", m_moduleNumber, m_channel, this);
UsageReporting.report(tResourceType.kResourceType_Solenoid, m_channel, m_moduleNumber);
}
public Solenoid(final int channel) {
super(getDefaultSolenoidModule());
m_channel = channel;
initSolenoid();
}
public Solenoid(final int moduleNumber, final int channel) {
super(moduleNumber);
m_channel = channel;
initSolenoid();
}
|
checkSolenoidModule(m_moduleNumber); checkSolenoidChannel(m_channel); ByteBuffer status = ByteBuffer.allocateDirect(4); status.order(ByteOrder.LITTLE_ENDIAN); ByteBuffer port = SolenoidJNI.getPortWithModule((byte) m_moduleNumber, (byte) m_channel); m_solenoid_port = SolenoidJNI.initializeSolenoidPort(port, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); LiveWindow.addActuator(STR, m_moduleNumber, m_channel, this); UsageReporting.report(tResourceType.kResourceType_Solenoid, m_channel, m_moduleNumber); } Solenoid(final int channel) { super(getDefaultSolenoidModule()); m_channel = channel; function(); } Solenoid(final int moduleNumber, final int channel) { super(moduleNumber); m_channel = channel; function(); }
|
/**
* Common function to implement constructor behavior.
*/
|
Common function to implement constructor behavior
|
initSolenoid
|
{
"repo_name": "trc492/Frc2015RecycleRush",
"path": "code/WPILibJ/Solenoid.java",
"license": "mit",
"size": 5223
}
|
[
"edu.wpi.first.wpilibj.communication.UsageReporting",
"edu.wpi.first.wpilibj.hal.HALUtil",
"edu.wpi.first.wpilibj.hal.SolenoidJNI",
"edu.wpi.first.wpilibj.livewindow.LiveWindow",
"java.nio.ByteBuffer",
"java.nio.ByteOrder"
] |
import edu.wpi.first.wpilibj.communication.UsageReporting; import edu.wpi.first.wpilibj.hal.HALUtil; import edu.wpi.first.wpilibj.hal.SolenoidJNI; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import java.nio.ByteBuffer; import java.nio.ByteOrder;
|
import edu.wpi.first.wpilibj.communication.*; import edu.wpi.first.wpilibj.hal.*; import edu.wpi.first.wpilibj.livewindow.*; import java.nio.*;
|
[
"edu.wpi.first",
"java.nio"
] |
edu.wpi.first; java.nio;
| 1,651,745
|
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
/**
* Adds the given bundle to the bootstrap.
*
* @param bundle a {@link Bundle}
|
void function(ClassLoader classLoader) { this.classLoader = classLoader; } /** * Adds the given bundle to the bootstrap. * * @param bundle a {@link Bundle}
|
/**
* Sets the bootstrap's class loader.
*/
|
Sets the bootstrap's class loader
|
setClassLoader
|
{
"repo_name": "philandstuff/dropwizard",
"path": "dropwizard-core/src/main/java/io/dropwizard/setup/Bootstrap.java",
"license": "apache-2.0",
"size": 8092
}
|
[
"io.dropwizard.Bundle"
] |
import io.dropwizard.Bundle;
|
import io.dropwizard.*;
|
[
"io.dropwizard"
] |
io.dropwizard;
| 561,938
|
public int getDuration() {
return Dispatch.get(this, "Duration").toInt();
}
|
int function() { return Dispatch.get(this, STR).toInt(); }
|
/**
* Wrapper for calling the ActiveX-Method with input-parameter(s).
*
* @return the result is of type int
*/
|
Wrapper for calling the ActiveX-Method with input-parameter(s)
|
getDuration
|
{
"repo_name": "cpesch/MetaMusic",
"path": "itunes-com-library/src/main/java/slash/metamusic/itunes/com/binding/IITTrack.java",
"license": "gpl-2.0",
"size": 19298
}
|
[
"com.jacob.com.Dispatch"
] |
import com.jacob.com.Dispatch;
|
import com.jacob.com.*;
|
[
"com.jacob.com"
] |
com.jacob.com;
| 229,936
|
@NotNull
public static PsiElementPath forNamespace(@NotNull String namespace) {
try {
return new NamespacePath(namespace);
} catch (IllegalArgumentException e) {
return PsiElementPath.EMPTY;
}
}
|
static PsiElementPath function(@NotNull String namespace) { try { return new NamespacePath(namespace); } catch (IllegalArgumentException e) { return PsiElementPath.EMPTY; } }
|
/**
* Safely builds a new {@link net.venaglia.nondairy.soylang.elements.path.PsiElementPath} for the specified namespace
* name. If the namespace name is malformed, {@link net.venaglia.nondairy.soylang.elements.path.PsiElementPath#EMPTY} is
* returned.
* @param namespace The namespace name to navigate to.
* @return A path object to navigate to the specified nmespace.
*/
|
Safely builds a new <code>net.venaglia.nondairy.soylang.elements.path.PsiElementPath</code> for the specified namespace name. If the namespace name is malformed, <code>net.venaglia.nondairy.soylang.elements.path.PsiElementPath#EMPTY</code> is returned
|
forNamespace
|
{
"repo_name": "Arcnor/Non-Dairy-Soy-Plugin",
"path": "src/net/venaglia/nondairy/soylang/elements/path/NamespacePath.java",
"license": "apache-2.0",
"size": 2773
}
|
[
"org.jetbrains.annotations.NotNull"
] |
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,865,626
|
public static HARegionQueue getHARegionQueueInstance(String regionName, InternalCache cache,
HARegionQueueAttributes hrqa,
final int haRgnQType,
final boolean isDurable, Map haContainer,
ClientProxyMembershipID clientProxyId,
final byte clientConflation,
boolean isPrimary,
boolean canHandleDelta,
StatisticsClock statisticsClock)
throws IOException, ClassNotFoundException, CacheException, InterruptedException {
HARegionQueue hrq;
switch (haRgnQType) {
case BLOCKING_HA_QUEUE:
if (!isDurable && !canHandleDelta) {
hrq = new BlockingHARegionQueue(regionName, cache, hrqa, haContainer, clientProxyId,
clientConflation, isPrimary, statisticsClock);
} else {
hrq = new DurableHARegionQueue(regionName, cache, hrqa, haContainer, clientProxyId,
clientConflation, isPrimary, statisticsClock);
}
break;
case NON_BLOCKING_HA_QUEUE:
hrq = new HARegionQueue(regionName, cache, haContainer, clientProxyId, clientConflation,
isPrimary, statisticsClock);
break;
default:
throw new IllegalArgumentException(
String.format("haRgnQType can either be BLOCKING ( %s ) or NON BLOCKING ( %s )",
BLOCKING_HA_QUEUE, NON_BLOCKING_HA_QUEUE));
}
if (!isDurable) {
Optional<Integer> expiryTime =
getProductIntegerProperty(HA_REGION_QUEUE_EXPIRY_TIME_PROPERTY);
hrqa.setExpiryTime(expiryTime.orElseGet(hrqa::getExpiryTime));
ExpirationAttributes ea =
new ExpirationAttributes(hrqa.getExpiryTime(), ExpirationAction.LOCAL_INVALIDATE);
hrq.region.getAttributesMutator().setEntryTimeToLive(ea);
}
return hrq;
}
|
static HARegionQueue function(String regionName, InternalCache cache, HARegionQueueAttributes hrqa, final int haRgnQType, final boolean isDurable, Map haContainer, ClientProxyMembershipID clientProxyId, final byte clientConflation, boolean isPrimary, boolean canHandleDelta, StatisticsClock statisticsClock) throws IOException, ClassNotFoundException, CacheException, InterruptedException { HARegionQueue hrq; switch (haRgnQType) { case BLOCKING_HA_QUEUE: if (!isDurable && !canHandleDelta) { hrq = new BlockingHARegionQueue(regionName, cache, hrqa, haContainer, clientProxyId, clientConflation, isPrimary, statisticsClock); } else { hrq = new DurableHARegionQueue(regionName, cache, hrqa, haContainer, clientProxyId, clientConflation, isPrimary, statisticsClock); } break; case NON_BLOCKING_HA_QUEUE: hrq = new HARegionQueue(regionName, cache, haContainer, clientProxyId, clientConflation, isPrimary, statisticsClock); break; default: throw new IllegalArgumentException( String.format(STR, BLOCKING_HA_QUEUE, NON_BLOCKING_HA_QUEUE)); } if (!isDurable) { Optional<Integer> expiryTime = getProductIntegerProperty(HA_REGION_QUEUE_EXPIRY_TIME_PROPERTY); hrqa.setExpiryTime(expiryTime.orElseGet(hrqa::getExpiryTime)); ExpirationAttributes ea = new ExpirationAttributes(hrqa.getExpiryTime(), ExpirationAction.LOCAL_INVALIDATE); hrq.region.getAttributesMutator().setEntryTimeToLive(ea); } return hrq; }
|
/**
* Creates a HARegionQueue object with default attributes
*
* @param regionName uniquely identifies the HARegionQueue in the VM.For HARegionQueues across the
* VM to communicate with each other , the name should be identical
* @param cache Gemfire Cache instance
* @param hrqa HARegionQueueAttribute instance used for configuring the HARegionQueue
* @param haRgnQType int identifying whether the HARegionQueue is of type blocking or non blocking
* @param isPrimary whether this is the primary queue for the client
* @param canHandleDelta boolean indicating whether the HARegionQueue can handle delta or not
* @return an instance of HARegionQueue
*/
|
Creates a HARegionQueue object with default attributes
|
getHARegionQueueInstance
|
{
"repo_name": "smgoller/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/ha/HARegionQueue.java",
"license": "apache-2.0",
"size": 150325
}
|
[
"java.io.IOException",
"java.util.Map",
"java.util.Optional",
"org.apache.geode.cache.CacheException",
"org.apache.geode.cache.ExpirationAction",
"org.apache.geode.cache.ExpirationAttributes",
"org.apache.geode.internal.cache.InternalCache",
"org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID",
"org.apache.geode.internal.lang.SystemPropertyHelper",
"org.apache.geode.internal.statistics.StatisticsClock"
] |
import java.io.IOException; import java.util.Map; import java.util.Optional; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.ExpirationAction; import org.apache.geode.cache.ExpirationAttributes; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.lang.SystemPropertyHelper; import org.apache.geode.internal.statistics.StatisticsClock;
|
import java.io.*; import java.util.*; import org.apache.geode.cache.*; import org.apache.geode.internal.cache.*; import org.apache.geode.internal.cache.tier.sockets.*; import org.apache.geode.internal.lang.*; import org.apache.geode.internal.statistics.*;
|
[
"java.io",
"java.util",
"org.apache.geode"
] |
java.io; java.util; org.apache.geode;
| 1,851,830
|
public static void e(String tag, String msg, Exception e, int errorLevel) {
if (errorLevel >= Config.LOGGER_ERROR_LEVEL) {
Log.e(tag, msg, e);
}
}
|
static void function(String tag, String msg, Exception e, int errorLevel) { if (errorLevel >= Config.LOGGER_ERROR_LEVEL) { Log.e(tag, msg, e); } }
|
/**
* Metodo che stampa log di errore con annessa eccezione
*
* @param tag Tag del log
* @param msg Messaggio del log
* @param e Eccezione da loggare
* @param errorLevel livello di errore
*/
|
Metodo che stampa log di errore con annessa eccezione
|
e
|
{
"repo_name": "alessandro1105/Lewe2.0",
"path": "code/android/app/src/main/java/com/lewetechnologies/app/logger/Logger.java",
"license": "apache-2.0",
"size": 3554
}
|
[
"android.util.Log",
"com.lewetechnologies.app.configs.Config"
] |
import android.util.Log; import com.lewetechnologies.app.configs.Config;
|
import android.util.*; import com.lewetechnologies.app.configs.*;
|
[
"android.util",
"com.lewetechnologies.app"
] |
android.util; com.lewetechnologies.app;
| 688,329
|
public void setProperties(Hashtable<?, ?> props)
{
Hashtable<Object, Object> prop2 = (Hashtable<Object, Object>) props;
prop2.put("filters", "CropImageFilter");
if (consumer != null)
consumer.setProperties(prop2);
}
|
void function(Hashtable<?, ?> props) { Hashtable<Object, Object> prop2 = (Hashtable<Object, Object>) props; prop2.put(STR, STR); if (consumer != null) consumer.setProperties(prop2); }
|
/**
* An <code>ImageProducer</code> can set a list of properties
* associated with this image by using this method.
* <br>
* FIXME - What property is set for this class?
*
* @param props the list of properties associated with this image
*/
|
An <code>ImageProducer</code> can set a list of properties associated with this image by using this method. FIXME - What property is set for this class
|
setProperties
|
{
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/java/awt/image/CropImageFilter.java",
"license": "gpl-2.0",
"size": 7143
}
|
[
"java.util.Hashtable"
] |
import java.util.Hashtable;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 916,668
|
private void assertNumberOfAttributeValuesValid( Attribute attribute ) throws LdapInvalidAttributeValueException
{
if ( attribute.size() > 1 && attribute.getAttributeType().isSingleValued() )
{
throw new LdapInvalidAttributeValueException( ResultCodeEnum.CONSTRAINT_VIOLATION, I18n.err( I18n.ERR_278,
attribute.getUpId() ) );
}
}
|
void function( Attribute attribute ) throws LdapInvalidAttributeValueException { if ( attribute.size() > 1 && attribute.getAttributeType().isSingleValued() ) { throw new LdapInvalidAttributeValueException( ResultCodeEnum.CONSTRAINT_VIOLATION, I18n.err( I18n.ERR_278, attribute.getUpId() ) ); } }
|
/**
* Checks to see numbers of values of attributes conforms to the schema
*/
|
Checks to see numbers of values of attributes conforms to the schema
|
assertNumberOfAttributeValuesValid
|
{
"repo_name": "TremoloSecurity/MyVirtualDirectory",
"path": "server/src/main/java/org/apache/directory/server/core/schema/SchemaInterceptor.java",
"license": "apache-2.0",
"size": 62924
}
|
[
"org.apache.directory.api.ldap.model.entry.Attribute",
"org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException",
"org.apache.directory.api.ldap.model.message.ResultCodeEnum",
"org.apache.directory.server.i18n.I18n"
] |
import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException; import org.apache.directory.api.ldap.model.message.ResultCodeEnum; import org.apache.directory.server.i18n.I18n;
|
import org.apache.directory.api.ldap.model.entry.*; import org.apache.directory.api.ldap.model.exception.*; import org.apache.directory.api.ldap.model.message.*; import org.apache.directory.server.i18n.*;
|
[
"org.apache.directory"
] |
org.apache.directory;
| 1,051,148
|
@Deployment
public void testVariableUpdateOrderHistoricTaskInstance() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("historicTask");
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertNotNull(task);
// Update task and process-variable 10 times
for (int i = 0; i < 10; i++) {
taskService.setVariableLocal(task.getId(), "taskVar", i);
runtimeService.setVariable(task.getExecutionId(), "procVar", i);
}
taskService.complete(task.getId());
// Check if all variables have the value for the latest revision
HistoricTaskInstance taskInstance = historyService.createHistoricTaskInstanceQuery()
.taskId(task.getId())
.includeProcessVariables()
.singleResult();
Object varValue = taskInstance.getProcessVariables().get("procVar");
assertEquals(9, varValue);
taskInstance = historyService.createHistoricTaskInstanceQuery()
.taskId(task.getId())
.includeTaskLocalVariables()
.singleResult();
varValue = taskInstance.getTaskLocalVariables().get("taskVar");
assertEquals(9, varValue);
}
|
void function() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey(STR); org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); for (int i = 0; i < 10; i++) { taskService.setVariableLocal(task.getId(), STR, i); runtimeService.setVariable(task.getExecutionId(), STR, i); } taskService.complete(task.getId()); HistoricTaskInstance taskInstance = historyService.createHistoricTaskInstanceQuery() .taskId(task.getId()) .includeProcessVariables() .singleResult(); Object varValue = taskInstance.getProcessVariables().get(STR); assertEquals(9, varValue); taskInstance = historyService.createHistoricTaskInstanceQuery() .taskId(task.getId()) .includeTaskLocalVariables() .singleResult(); varValue = taskInstance.getTaskLocalVariables().get(STR); assertEquals(9, varValue); }
|
/**
* Test to validate fix for ACT-1939: HistoryService loads invalid task local variables for completed task
*/
|
Test to validate fix for ACT-1939: HistoryService loads invalid task local variables for completed task
|
testVariableUpdateOrderHistoricTaskInstance
|
{
"repo_name": "dbmalkovsky/flowable-engine",
"path": "modules/flowable5-test/src/test/java/org/activiti/engine/test/history/HistoricTaskInstanceTest.java",
"license": "apache-2.0",
"size": 40573
}
|
[
"org.flowable.engine.runtime.ProcessInstance",
"org.flowable.task.api.history.HistoricTaskInstance"
] |
import org.flowable.engine.runtime.ProcessInstance; import org.flowable.task.api.history.HistoricTaskInstance;
|
import org.flowable.engine.runtime.*; import org.flowable.task.api.history.*;
|
[
"org.flowable.engine",
"org.flowable.task"
] |
org.flowable.engine; org.flowable.task;
| 1,180,053
|
public static String parse(String sexpr, Map<String, Value> params) {
if (!isWellFormed(sexpr)) {
Log.e(TAG, "Sexpr " + sexpr + " not well formed.");
return null;
}
StreamTokenizer tokenizer = getTokenizer(sexpr);
boolean inExpression = false;
String command = null;
int token;
try {
while ((token = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) {
switch (token) {
case '(':
if (inExpression) {
Log.e(TAG, "Unexpected subexpression!");
return null;
}
inExpression = true;
break;
case StreamTokenizer.TT_WORD:
if (!inExpression) {
Log.e(TAG, "Expression did not start with '('!");
return null;
} else if (command == null) {
command = tokenizer.sval.toLowerCase();
} else {
Log.e(TAG, "Unrecognized bareword [" + tokenizer.sval + "]");
return null;
}
break;
case StreamTokenizer.TT_NUMBER:
if (!inExpression) {
Log.e(TAG, "Expression did not start with '('!");
return null;
}
Log.e(TAG, "Unexpected number '" + tokenizer.nval + "'");
return null;
case '\'':
case ':':
if (!inExpression) {
Log.e(TAG, "Expression did not start with '('!");
return null;
}
// This is the start of the command in the plist. Continue so we can
// grab it the next time around.
if (command == null) {
continue;
}
String paramName = CommandParser.parseParamSymbol(tokenizer);
if (paramName == null) {
Log.e(TAG, "Couldn't parse param name.");
return null;
}
Value paramValue = null;
if (params.containsKey(paramName)) {
paramValue = params.get(paramName);
} else {
paramValue = new Value();
}
if (!CommandParser.parseParamValue(tokenizer, paramValue)) {
Log.e(TAG, "Couldn't parse param name or value.");
return null;
}
params.put(paramName, paramValue);
break;
case ')':
if (!inExpression) {
Log.e(TAG, "Premature end of expression -- did not start with '('!");
return null;
}
if (command == null) {
Log.e(TAG, "Premature end of expression -- no command specified!");
return null;
}
inExpression = false;
break;
default:
Log.v(TAG, "Tokenized unknown token [" + token + "].");
Log.e(TAG, "Unexpected character '" + tokenizer.sval + "'!");
return null;
}
}
if (inExpression) {
Log.e(TAG, "EOL encountered while waiting for end of expression!");
return null;
}
return command;
} catch (IOException e) {
Log.e(TAG, "Caught IOException during parse -- this shouldn't happen!");
return null;
}
}
|
static String function(String sexpr, Map<String, Value> params) { if (!isWellFormed(sexpr)) { Log.e(TAG, STR + sexpr + STR); return null; } StreamTokenizer tokenizer = getTokenizer(sexpr); boolean inExpression = false; String command = null; int token; try { while ((token = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) { switch (token) { case '(': if (inExpression) { Log.e(TAG, STR); return null; } inExpression = true; break; case StreamTokenizer.TT_WORD: if (!inExpression) { Log.e(TAG, STR); return null; } else if (command == null) { command = tokenizer.sval.toLowerCase(); } else { Log.e(TAG, STR + tokenizer.sval + "]"); return null; } break; case StreamTokenizer.TT_NUMBER: if (!inExpression) { Log.e(TAG, STR); return null; } Log.e(TAG, STR + tokenizer.nval + "'"); return null; case '\'': case ':': if (!inExpression) { Log.e(TAG, STR); return null; } if (command == null) { continue; } String paramName = CommandParser.parseParamSymbol(tokenizer); if (paramName == null) { Log.e(TAG, STR); return null; } Value paramValue = null; if (params.containsKey(paramName)) { paramValue = params.get(paramName); } else { paramValue = new Value(); } if (!CommandParser.parseParamValue(tokenizer, paramValue)) { Log.e(TAG, STR); return null; } params.put(paramName, paramValue); break; case ')': if (!inExpression) { Log.e(TAG, STR); return null; } if (command == null) { Log.e(TAG, STR); return null; } inExpression = false; break; default: Log.v(TAG, STR + token + "]."); Log.e(TAG, STR + tokenizer.sval + "'!"); return null; } } if (inExpression) { Log.e(TAG, STR); return null; } return command; } catch (IOException e) { Log.e(TAG, STR); return null; } }
|
/**
* Returns the command and extracts all tagged parameters in an s-expression.
*
* params is updated atomically, and only if the sexpr passed in was formatted
* correctly.
*
* @param sexpr the s-expression to extract the tagged parameters from.
* @param params the {@link Map} to store each {@link Paramenter} in.
* @return a string containing the parsed command name, or null if the sexpr
* was invalid in some way.
*/
|
Returns the command and extracts all tagged parameters in an s-expression. params is updated atomically, and only if the sexpr passed in was formatted correctly
|
parse
|
{
"repo_name": "jtgans/gypsum",
"path": "src/com/theonelab/navi/gypsum/CommandParser.java",
"license": "bsd-2-clause",
"size": 15521
}
|
[
"android.util.Log",
"java.io.IOException",
"java.io.StreamTokenizer",
"java.util.Map"
] |
import android.util.Log; import java.io.IOException; import java.io.StreamTokenizer; import java.util.Map;
|
import android.util.*; import java.io.*; import java.util.*;
|
[
"android.util",
"java.io",
"java.util"
] |
android.util; java.io; java.util;
| 2,008,634
|
private Optional<String> extractIdFromInput(RouteDefinition route) {
if (route.getInput() == null) {
return Optional.empty();
}
FromDefinition from = route.getInput();
String uri = from.getUri();
String[] uriSplitted = uri.split(":");
// needs to have at least 3 fields
if (uriSplitted.length < 3) {
return Optional.empty();
}
String verb = uriSplitted[1];
String contextPath = uriSplitted[2];
String additionalUri = "";
if (uriSplitted.length > 3 && uriSplitted[3].startsWith("/")) {
additionalUri = uriSplitted[3];
}
StringBuilder routeId = new StringBuilder(verb.length() + contextPath.length() + additionalUri.length());
routeId.append(verb);
appendWithSeparator(routeId, prepareUri(contextPath));
if (additionalUri.length() > 0) {
appendWithSeparator(routeId, prepareUri(additionalUri));
}
return Optional.of(routeId.toString());
}
|
Optional<String> function(RouteDefinition route) { if (route.getInput() == null) { return Optional.empty(); } FromDefinition from = route.getInput(); String uri = from.getUri(); String[] uriSplitted = uri.split(":"); if (uriSplitted.length < 3) { return Optional.empty(); } String verb = uriSplitted[1]; String contextPath = uriSplitted[2]; String additionalUri = STR/")) { additionalUri = uriSplitted[3]; } StringBuilder routeId = new StringBuilder(verb.length() + contextPath.length() + additionalUri.length()); routeId.append(verb); appendWithSeparator(routeId, prepareUri(contextPath)); if (additionalUri.length() > 0) { appendWithSeparator(routeId, prepareUri(additionalUri)); } return Optional.of(routeId.toString()); }
|
/**
* Extract id from rest input uri.
*/
|
Extract id from rest input uri
|
extractIdFromInput
|
{
"repo_name": "punkhorn/camel-upstream",
"path": "core/camel-core/src/main/java/org/apache/camel/impl/RouteIdFactory.java",
"license": "apache-2.0",
"size": 6601
}
|
[
"java.util.Optional",
"org.apache.camel.model.FromDefinition",
"org.apache.camel.model.RouteDefinition"
] |
import java.util.Optional; import org.apache.camel.model.FromDefinition; import org.apache.camel.model.RouteDefinition;
|
import java.util.*; import org.apache.camel.model.*;
|
[
"java.util",
"org.apache.camel"
] |
java.util; org.apache.camel;
| 1,892,488
|
public BaseAdapterHelper setChecked(int viewId, boolean checked) {
Checkable view = (Checkable) retrieveView(viewId);
view.setChecked(checked);
return this;
}
|
BaseAdapterHelper function(int viewId, boolean checked) { Checkable view = (Checkable) retrieveView(viewId); view.setChecked(checked); return this; }
|
/**
* Sets the checked status of a checkable.
*
* @param viewId The view id.
* @param checked The checked status;
* @return The BaseAdapterHelper for chaining.
*/
|
Sets the checked status of a checkable
|
setChecked
|
{
"repo_name": "focus-forked-open-source-license/base-adapter-helper-recyclerview",
"path": "library/src/main/java/com/hhl/adapter/BaseAdapterHelper.java",
"license": "apache-2.0",
"size": 12123
}
|
[
"android.widget.Checkable"
] |
import android.widget.Checkable;
|
import android.widget.*;
|
[
"android.widget"
] |
android.widget;
| 2,143,682
|
public List<CmsResource> getUsersPubList(CmsRequestContext context) throws CmsException {
List<CmsResource> result = null;
CmsDbContext dbc = m_dbContextFactory.getDbContext(context);
try {
result = m_driverManager.getUsersPubList(dbc, context.getCurrentUser().getId());
} catch (Exception e) {
dbc.report(
null,
Messages.get().container(Messages.ERR_READ_USER_PUBLIST_1, context.getCurrentUser().getName()),
e);
} finally {
dbc.clear();
}
return result;
}
|
List<CmsResource> function(CmsRequestContext context) throws CmsException { List<CmsResource> result = null; CmsDbContext dbc = m_dbContextFactory.getDbContext(context); try { result = m_driverManager.getUsersPubList(dbc, context.getCurrentUser().getId()); } catch (Exception e) { dbc.report( null, Messages.get().container(Messages.ERR_READ_USER_PUBLIST_1, context.getCurrentUser().getName()), e); } finally { dbc.clear(); } return result; }
|
/**
* Returns the current user's publish list.<p>
*
* @param context the request context
*
* @return the current user's publish list
*
* @throws CmsException if something goes wrong
*/
|
Returns the current user's publish list
|
getUsersPubList
|
{
"repo_name": "sbonoc/opencms-core",
"path": "src/org/opencms/db/CmsSecurityManager.java",
"license": "lgpl-2.1",
"size": 287876
}
|
[
"java.util.List",
"org.opencms.file.CmsRequestContext",
"org.opencms.file.CmsResource",
"org.opencms.main.CmsException"
] |
import java.util.List; import org.opencms.file.CmsRequestContext; import org.opencms.file.CmsResource; import org.opencms.main.CmsException;
|
import java.util.*; import org.opencms.file.*; import org.opencms.main.*;
|
[
"java.util",
"org.opencms.file",
"org.opencms.main"
] |
java.util; org.opencms.file; org.opencms.main;
| 2,580,081
|
@POST
public Response createInstallation(String message) {
return Utils.returnResponse(PrettyJSONPrinter.prettyPrint(new MongoInstallations().createInstallation(message)));
}
|
Response function(String message) { return Utils.returnResponse(PrettyJSONPrinter.prettyPrint(new MongoInstallations().createInstallation(message))); }
|
/**
*
* Create a installation
*/
|
Create a installation
|
createInstallation
|
{
"repo_name": "cassandra-project/platform",
"path": "src/eu/cassandra/server/api/Installations.java",
"license": "apache-2.0",
"size": 2555
}
|
[
"eu.cassandra.server.mongo.MongoInstallations",
"eu.cassandra.server.mongo.util.PrettyJSONPrinter",
"eu.cassandra.sim.utilities.Utils",
"javax.ws.rs.core.Response"
] |
import eu.cassandra.server.mongo.MongoInstallations; import eu.cassandra.server.mongo.util.PrettyJSONPrinter; import eu.cassandra.sim.utilities.Utils; import javax.ws.rs.core.Response;
|
import eu.cassandra.server.mongo.*; import eu.cassandra.server.mongo.util.*; import eu.cassandra.sim.utilities.*; import javax.ws.rs.core.*;
|
[
"eu.cassandra.server",
"eu.cassandra.sim",
"javax.ws"
] |
eu.cassandra.server; eu.cassandra.sim; javax.ws;
| 2,760,029
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.