method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
private static List<StartNode> getNeverReleasedStartNodes(final IDProvider storeElement) {
List<StartNode> neverReleasedStartNodes = new ArrayList<>();
final IDProvider parentElement = storeElement.getParent();
if (parentElement != null && !"root".equals(parentElement.getUid())) {
neverReleasedStartNodes = getNeverReleasedStartNodes(parentElement);
}
if (storeElement instanceof SiteStoreFolder) {
final SiteStoreFolder storeElementFolder = (SiteStoreFolder) storeElement;
final StartNode startNode = storeElementFolder.getStartNode();
if (startNode != null && !startNode.isInReleaseStore()) {
neverReleasedStartNodes.add(startNode);
}
}
return neverReleasedStartNodes;
}
|
static List<StartNode> function(final IDProvider storeElement) { List<StartNode> neverReleasedStartNodes = new ArrayList<>(); final IDProvider parentElement = storeElement.getParent(); if (parentElement != null && !"root".equals(parentElement.getUid())) { neverReleasedStartNodes = getNeverReleasedStartNodes(parentElement); } if (storeElement instanceof SiteStoreFolder) { final SiteStoreFolder storeElementFolder = (SiteStoreFolder) storeElement; final StartNode startNode = storeElementFolder.getStartNode(); if (startNode != null && !startNode.isInReleaseStore()) { neverReleasedStartNodes.add(startNode); } } return neverReleasedStartNodes; }
|
/**
* Get never released start nodes for each parent sitestore folder.
*
* @param storeElement the store element the workflow started on
* @return list of never released start nodes
*/
|
Get never released start nodes for each parent sitestore folder
|
getNeverReleasedStartNodes
|
{
"repo_name": "e-Spirit/basicworkflows",
"path": "src/main/java/com/espirit/moddev/basicworkflows/release/WfReleaseExecutable.java",
"license": "apache-2.0",
"size": 13199
}
|
[
"de.espirit.firstspirit.access.store.IDProvider",
"de.espirit.firstspirit.access.store.sitestore.SiteStoreFolder",
"de.espirit.firstspirit.access.store.sitestore.StartNode",
"java.util.ArrayList",
"java.util.List"
] |
import de.espirit.firstspirit.access.store.IDProvider; import de.espirit.firstspirit.access.store.sitestore.SiteStoreFolder; import de.espirit.firstspirit.access.store.sitestore.StartNode; import java.util.ArrayList; import java.util.List;
|
import de.espirit.firstspirit.access.store.*; import de.espirit.firstspirit.access.store.sitestore.*; import java.util.*;
|
[
"de.espirit.firstspirit",
"java.util"
] |
de.espirit.firstspirit; java.util;
| 1,552,321
|
public MatrixBlock getMatrixBlock(String varname) {
Data dat = _out.get(varname);
if( dat == null )
throw new DMLException("Non-existent output variable: "+varname);
//basic checks for data type
if( !(dat instanceof MatrixObject) )
throw new DMLException("Expected matrix result '"+varname+"' not a matrix.");
//convert output matrix to double array
MatrixObject mo = (MatrixObject)dat;
MatrixBlock mb = mo.acquireRead();
mo.release();
return mb;
}
|
MatrixBlock function(String varname) { Data dat = _out.get(varname); if( dat == null ) throw new DMLException(STR+varname); if( !(dat instanceof MatrixObject) ) throw new DMLException(STR+varname+STR); MatrixObject mo = (MatrixObject)dat; MatrixBlock mb = mo.acquireRead(); mo.release(); return mb; }
|
/**
* Obtain the matrix represented by the given output variable.
* Calling this method avoids unnecessary output conversions.
*
* @param varname output variable name
* @return matrix as matrix block
*/
|
Obtain the matrix represented by the given output variable. Calling this method avoids unnecessary output conversions
|
getMatrixBlock
|
{
"repo_name": "deroneriksson/incubator-systemml",
"path": "src/main/java/org/apache/sysml/api/jmlc/ResultVariables.java",
"license": "apache-2.0",
"size": 6003
}
|
[
"org.apache.sysml.api.DMLException",
"org.apache.sysml.runtime.controlprogram.caching.MatrixObject",
"org.apache.sysml.runtime.instructions.cp.Data",
"org.apache.sysml.runtime.matrix.data.MatrixBlock"
] |
import org.apache.sysml.api.DMLException; import org.apache.sysml.runtime.controlprogram.caching.MatrixObject; import org.apache.sysml.runtime.instructions.cp.Data; import org.apache.sysml.runtime.matrix.data.MatrixBlock;
|
import org.apache.sysml.api.*; import org.apache.sysml.runtime.controlprogram.caching.*; import org.apache.sysml.runtime.instructions.cp.*; import org.apache.sysml.runtime.matrix.data.*;
|
[
"org.apache.sysml"
] |
org.apache.sysml;
| 2,515,662
|
@Override
public Tab createNewTab(LoadUrlParams loadUrlParams, @TabLaunchType int type, Tab parent) {
return createNewTab(loadUrlParams, type, parent, null);
}
|
Tab function(LoadUrlParams loadUrlParams, @TabLaunchType int type, Tab parent) { return createNewTab(loadUrlParams, type, parent, null); }
|
/**
* Creates a new tab and posts to UI.
* @param loadUrlParams parameters of the url load.
* @param type Information about how the tab was launched.
* @param parent the parent tab, if present.
* @return The new tab.
*/
|
Creates a new tab and posts to UI
|
createNewTab
|
{
"repo_name": "scheib/chromium",
"path": "chrome/android/java/src/org/chromium/chrome/browser/tabmodel/ChromeTabCreator.java",
"license": "bsd-3-clause",
"size": 24589
}
|
[
"org.chromium.chrome.browser.tab.Tab",
"org.chromium.chrome.browser.tab.TabLaunchType",
"org.chromium.content_public.browser.LoadUrlParams"
] |
import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.tab.TabLaunchType; import org.chromium.content_public.browser.LoadUrlParams;
|
import org.chromium.chrome.browser.tab.*; import org.chromium.content_public.browser.*;
|
[
"org.chromium.chrome",
"org.chromium.content_public"
] |
org.chromium.chrome; org.chromium.content_public;
| 265,321
|
@ExceptionHandler(RequestTimeoutException.class)
public ResponseEntity<?> requestTimeout(Exception ex, WebRequest webRequest) {
return handleExceptionInternal(ex, ex.getLocalizedMessage(), new HttpHeaders(), HttpStatus.REQUEST_TIMEOUT,
webRequest);
}
|
@ExceptionHandler(RequestTimeoutException.class) ResponseEntity<?> function(Exception ex, WebRequest webRequest) { return handleExceptionInternal(ex, ex.getLocalizedMessage(), new HttpHeaders(), HttpStatus.REQUEST_TIMEOUT, webRequest); }
|
/**
* 408 Request Timeout.
*/
|
408 Request Timeout
|
requestTimeout
|
{
"repo_name": "raulcsj/CPF",
"path": "plugin/src/main/java/core/plugin/spring/GlobalExceptionHandler.java",
"license": "apache-2.0",
"size": 9428
}
|
[
"org.springframework.http.HttpHeaders",
"org.springframework.http.HttpStatus",
"org.springframework.http.ResponseEntity",
"org.springframework.web.bind.annotation.ExceptionHandler",
"org.springframework.web.context.request.WebRequest"
] |
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.context.request.WebRequest;
|
import org.springframework.http.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.context.request.*;
|
[
"org.springframework.http",
"org.springframework.web"
] |
org.springframework.http; org.springframework.web;
| 1,654,598
|
private void removeBadMatches(Dependency dependency) {
final Set<Identifier> identifiers = dependency.getIdentifiers();
final Iterator<Identifier> itr = identifiers.iterator();
//Set<Evidence> groupId = dependency.getVendorEvidence().getEvidence("pom", "groupid");
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
while (itr.hasNext()) {
final Identifier i = itr.next();
//TODO move this startsWith expression to the base suppression file
if ("cpe".equals(i.getType())) {
if ((i.getValue().matches(".*c\\+\\+.*")
|| i.getValue().startsWith("cpe:/a:file:file")
|| i.getValue().startsWith("cpe:/a:mozilla:mozilla")
|| i.getValue().startsWith("cpe:/a:cvs:cvs")
|| i.getValue().startsWith("cpe:/a:ftp:ftp")
|| i.getValue().startsWith("cpe:/a:tcp:tcp")
|| i.getValue().startsWith("cpe:/a:ssh:ssh")
|| i.getValue().startsWith("cpe:/a:lookup:lookup"))
&& (dependency.getFileName().toLowerCase().endsWith(".jar")
|| dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|| dependency.getFileName().toLowerCase().endsWith(".exe")
|| dependency.getFileName().toLowerCase().endsWith(".nuspec")
|| dependency.getFileName().toLowerCase().endsWith(".zip")
|| dependency.getFileName().toLowerCase().endsWith(".sar")
|| dependency.getFileName().toLowerCase().endsWith(".apk")
|| dependency.getFileName().toLowerCase().endsWith(".tar")
|| dependency.getFileName().toLowerCase().endsWith(".gz")
|| dependency.getFileName().toLowerCase().endsWith(".tgz")
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war"))) {
itr.remove();
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype")
|| i.getValue().startsWith("cpe:/a:yahoo:yui"))
&& (dependency.getFileName().toLowerCase().endsWith(".jar")
|| dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|| dependency.getFileName().toLowerCase().endsWith(".exe"))) {
itr.remove();
} else if ((i.getValue().startsWith("cpe:/a:microsoft:excel")
|| i.getValue().startsWith("cpe:/a:microsoft:word")
|| i.getValue().startsWith("cpe:/a:microsoft:visio")
|| i.getValue().startsWith("cpe:/a:microsoft:powerpoint")
|| i.getValue().startsWith("cpe:/a:microsoft:office")
|| i.getValue().startsWith("cpe:/a:core_ftp:core_ftp"))
&& (dependency.getFileName().toLowerCase().endsWith(".jar")
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war")
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
itr.remove();
} else if (i.getValue().startsWith("cpe:/a:apache:maven")
&& !dependency.getFileName().toLowerCase().matches("maven-core-[\\d\\.]+\\.jar")) {
itr.remove();
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core")
&& !dependency.getEvidenceUsed().containsUsedString("m-core")) {
itr.remove();
} else if (i.getValue().startsWith("cpe:/a:jboss:jboss")
&& !dependency.getFileName().toLowerCase().matches("jboss-?[\\d\\.-]+(GA)?\\.jar")) {
itr.remove();
}
}
}
}
|
void function(Dependency dependency) { final Set<Identifier> identifiers = dependency.getIdentifiers(); final Iterator<Identifier> itr = identifiers.iterator(); while (itr.hasNext()) { final Identifier i = itr.next(); if ("cpe".equals(i.getType())) { if ((i.getValue().matches(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR)) && (dependency.getFileName().toLowerCase().endsWith(".jar") dependency.getFileName().toLowerCase().endsWith(STR) dependency.getFileName().toLowerCase().endsWith(".dll") dependency.getFileName().toLowerCase().endsWith(".exe") dependency.getFileName().toLowerCase().endsWith(STR) dependency.getFileName().toLowerCase().endsWith(".zip") dependency.getFileName().toLowerCase().endsWith(".sar") dependency.getFileName().toLowerCase().endsWith(".apk") dependency.getFileName().toLowerCase().endsWith(".tar") dependency.getFileName().toLowerCase().endsWith(".gz") dependency.getFileName().toLowerCase().endsWith(".tgz") dependency.getFileName().toLowerCase().endsWith(".ear") dependency.getFileName().toLowerCase().endsWith(".war"))) { itr.remove(); } else if ((i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR)) && (dependency.getFileName().toLowerCase().endsWith(".jar") dependency.getFileName().toLowerCase().endsWith(STR) dependency.getFileName().toLowerCase().endsWith(".dll") dependency.getFileName().toLowerCase().endsWith(".exe"))) { itr.remove(); } else if ((i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR) i.getValue().startsWith(STR)) && (dependency.getFileName().toLowerCase().endsWith(".jar") dependency.getFileName().toLowerCase().endsWith(".ear") dependency.getFileName().toLowerCase().endsWith(".war") dependency.getFileName().toLowerCase().endsWith(STR))) { itr.remove(); } else if (i.getValue().startsWith(STR) && !dependency.getFileName().toLowerCase().matches(STR)) { itr.remove(); } else if (i.getValue().startsWith(STR) && !dependency.getEvidenceUsed().containsUsedString(STR)) { itr.remove(); } else if (i.getValue().startsWith(STR) && !dependency.getFileName().toLowerCase().matches(STR)) { itr.remove(); } } } }
|
/**
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems
* identified when testing this on a LARGE volume of jar files.
*
* @param dependency the dependency to analyze
*/
|
Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems identified when testing this on a LARGE volume of jar files
|
removeBadMatches
|
{
"repo_name": "sirkkalap/DependencyCheck",
"path": "dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/FalsePositiveAnalyzer.java",
"license": "apache-2.0",
"size": 22246
}
|
[
"java.util.Iterator",
"java.util.Set",
"org.owasp.dependencycheck.dependency.Dependency",
"org.owasp.dependencycheck.dependency.Identifier"
] |
import java.util.Iterator; import java.util.Set; import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Identifier;
|
import java.util.*; import org.owasp.dependencycheck.dependency.*;
|
[
"java.util",
"org.owasp.dependencycheck"
] |
java.util; org.owasp.dependencycheck;
| 1,042,645
|
public Arc<T> readLastTargetArc(Arc<T> follow, Arc<T> arc, BytesReader in) throws IOException {
//System.out.println("readLast");
if (!targetHasArcs(follow)) {
//System.out.println(" end node");
assert follow.isFinal();
arc.label = END_LABEL;
arc.target = FINAL_END_NODE;
arc.output = follow.nextFinalOutput;
arc.flags = BIT_LAST_ARC;
return arc;
} else {
in.setPosition(getNodeAddress(follow.target));
arc.node = follow.target;
final byte b = in.readByte();
if (b == ARCS_AS_FIXED_ARRAY) {
// array: jump straight to end
arc.numArcs = in.readVInt();
if (packed || version >= VERSION_VINT_TARGET) {
arc.bytesPerArc = in.readVInt();
} else {
arc.bytesPerArc = in.readInt();
}
//System.out.println(" array numArcs=" + arc.numArcs + " bpa=" + arc.bytesPerArc);
arc.posArcsStart = in.getPosition();
arc.arcIdx = arc.numArcs - 2;
} else {
arc.flags = b;
// non-array: linear scan
arc.bytesPerArc = 0;
//System.out.println(" scan");
while(!arc.isLast()) {
// skip this arc:
readLabel(in);
if (arc.flag(BIT_ARC_HAS_OUTPUT)) {
outputs.skipOutput(in);
}
if (arc.flag(BIT_ARC_HAS_FINAL_OUTPUT)) {
outputs.skipFinalOutput(in);
}
if (arc.flag(BIT_STOP_NODE)) {
} else if (arc.flag(BIT_TARGET_NEXT)) {
} else if (packed) {
in.readVLong();
} else {
readUnpackedNodeTarget(in);
}
arc.flags = in.readByte();
}
// Undo the byte flags we read:
in.skipBytes(-1);
arc.nextArc = in.getPosition();
}
readNextRealArc(arc, in);
assert arc.isLast();
return arc;
}
}
|
Arc<T> function(Arc<T> follow, Arc<T> arc, BytesReader in) throws IOException { if (!targetHasArcs(follow)) { assert follow.isFinal(); arc.label = END_LABEL; arc.target = FINAL_END_NODE; arc.output = follow.nextFinalOutput; arc.flags = BIT_LAST_ARC; return arc; } else { in.setPosition(getNodeAddress(follow.target)); arc.node = follow.target; final byte b = in.readByte(); if (b == ARCS_AS_FIXED_ARRAY) { arc.numArcs = in.readVInt(); if (packed version >= VERSION_VINT_TARGET) { arc.bytesPerArc = in.readVInt(); } else { arc.bytesPerArc = in.readInt(); } arc.posArcsStart = in.getPosition(); arc.arcIdx = arc.numArcs - 2; } else { arc.flags = b; arc.bytesPerArc = 0; while(!arc.isLast()) { readLabel(in); if (arc.flag(BIT_ARC_HAS_OUTPUT)) { outputs.skipOutput(in); } if (arc.flag(BIT_ARC_HAS_FINAL_OUTPUT)) { outputs.skipFinalOutput(in); } if (arc.flag(BIT_STOP_NODE)) { } else if (arc.flag(BIT_TARGET_NEXT)) { } else if (packed) { in.readVLong(); } else { readUnpackedNodeTarget(in); } arc.flags = in.readByte(); } in.skipBytes(-1); arc.nextArc = in.getPosition(); } readNextRealArc(arc, in); assert arc.isLast(); return arc; } }
|
/** Follows the <code>follow</code> arc and reads the last
* arc of its target; this changes the provided
* <code>arc</code> (2nd arg) in-place and returns it.
*
* @return Returns the second argument
* (<code>arc</code>). */
|
Follows the <code>follow</code> arc and reads the last arc of its target; this changes the provided <code>arc</code> (2nd arg) in-place and returns it
|
readLastTargetArc
|
{
"repo_name": "q474818917/solr-5.2.0",
"path": "lucene/core/src/java/org/apache/lucene/util/fst/FST.java",
"license": "apache-2.0",
"size": 62253
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,114,177
|
@Test
public void testAlertHistoryPredicate() throws Exception {
m_helper.installHdfsService(m_cluster, m_serviceFactory,
m_componentFactory, m_schFactory, HOSTNAME);
m_alertHelper.populateData(m_cluster);
Predicate clusterPredicate = null;
Predicate hdfsPredicate = null;
Predicate yarnPredicate = null;
Predicate clusterAndHdfsPredicate = null;
Predicate clusterAndHdfsAndCriticalPredicate = null;
Predicate hdfsAndCriticalOrWarningPredicate = null;
Predicate alertNamePredicate = null;
Predicate historyIdPredicate = null;
clusterPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").toPredicate();
AlertHistoryRequest request = new AlertHistoryRequest();
request.Predicate = clusterPredicate;
List<AlertHistoryEntity> histories = m_dao.findAll(request);
assertEquals(3, histories.size());
hdfsPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").toPredicate();
yarnPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("YARN").toPredicate();
clusterAndHdfsPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").and().property(
AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").toPredicate();
clusterAndHdfsAndCriticalPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").and().property(
AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").and().property(
AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals(
AlertState.CRITICAL.name()).toPredicate();
hdfsAndCriticalOrWarningPredicate = new PredicateBuilder().begin().property(
AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").and().property(
AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals(
AlertState.CRITICAL.name()).end().or().property(
AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals(
AlertState.WARNING.name()).toPredicate();
alertNamePredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_DEFINITION_NAME).equals(
"NAMENODE").toPredicate();
request.Predicate = hdfsPredicate;
histories = m_dao.findAll(request);
assertEquals(2, histories.size());
request.Predicate = yarnPredicate;
histories = m_dao.findAll(request);
assertEquals(1, histories.size());
request.Predicate = clusterAndHdfsPredicate;
histories = m_dao.findAll(request);
assertEquals(2, histories.size());
request.Predicate = clusterAndHdfsAndCriticalPredicate;
histories = m_dao.findAll(request);
assertEquals(0, histories.size());
request.Predicate = hdfsAndCriticalOrWarningPredicate;
histories = m_dao.findAll(request);
assertEquals(1, histories.size());
request.Predicate = alertNamePredicate;
histories = m_dao.findAll(request);
assertEquals(1, histories.size());
historyIdPredicate = new PredicateBuilder().property(
AlertHistoryResourceProvider.ALERT_HISTORY_ID).equals(
histories.get(0).getAlertId()).toPredicate();
request.Predicate = historyIdPredicate;
histories = m_dao.findAll(request);
assertEquals(1, histories.size());
}
|
void function() throws Exception { m_helper.installHdfsService(m_cluster, m_serviceFactory, m_componentFactory, m_schFactory, HOSTNAME); m_alertHelper.populateData(m_cluster); Predicate clusterPredicate = null; Predicate hdfsPredicate = null; Predicate yarnPredicate = null; Predicate clusterAndHdfsPredicate = null; Predicate clusterAndHdfsAndCriticalPredicate = null; Predicate hdfsAndCriticalOrWarningPredicate = null; Predicate alertNamePredicate = null; Predicate historyIdPredicate = null; clusterPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").toPredicate(); AlertHistoryRequest request = new AlertHistoryRequest(); request.Predicate = clusterPredicate; List<AlertHistoryEntity> histories = m_dao.findAll(request); assertEquals(3, histories.size()); hdfsPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").toPredicate(); yarnPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("YARN").toPredicate(); clusterAndHdfsPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").and().property( AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").toPredicate(); clusterAndHdfsAndCriticalPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_CLUSTER_NAME).equals("c1").and().property( AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").and().property( AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals( AlertState.CRITICAL.name()).toPredicate(); hdfsAndCriticalOrWarningPredicate = new PredicateBuilder().begin().property( AlertHistoryResourceProvider.ALERT_HISTORY_SERVICE_NAME).equals("HDFS").and().property( AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals( AlertState.CRITICAL.name()).end().or().property( AlertHistoryResourceProvider.ALERT_HISTORY_STATE).equals( AlertState.WARNING.name()).toPredicate(); alertNamePredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_DEFINITION_NAME).equals( STR).toPredicate(); request.Predicate = hdfsPredicate; histories = m_dao.findAll(request); assertEquals(2, histories.size()); request.Predicate = yarnPredicate; histories = m_dao.findAll(request); assertEquals(1, histories.size()); request.Predicate = clusterAndHdfsPredicate; histories = m_dao.findAll(request); assertEquals(2, histories.size()); request.Predicate = clusterAndHdfsAndCriticalPredicate; histories = m_dao.findAll(request); assertEquals(0, histories.size()); request.Predicate = hdfsAndCriticalOrWarningPredicate; histories = m_dao.findAll(request); assertEquals(1, histories.size()); request.Predicate = alertNamePredicate; histories = m_dao.findAll(request); assertEquals(1, histories.size()); historyIdPredicate = new PredicateBuilder().property( AlertHistoryResourceProvider.ALERT_HISTORY_ID).equals( histories.get(0).getAlertId()).toPredicate(); request.Predicate = historyIdPredicate; histories = m_dao.findAll(request); assertEquals(1, histories.size()); }
|
/**
* Tests that the Ambari {@link Predicate} can be converted and submitted to
* JPA correctly to return a restricted result set.
*
* @throws Exception
*/
|
Tests that the Ambari <code>Predicate</code> can be converted and submitted to JPA correctly to return a restricted result set
|
testAlertHistoryPredicate
|
{
"repo_name": "sekikn/ambari",
"path": "ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java",
"license": "apache-2.0",
"size": 55052
}
|
[
"java.util.List",
"org.apache.ambari.server.controller.AlertHistoryRequest",
"org.apache.ambari.server.controller.internal.AlertHistoryResourceProvider",
"org.apache.ambari.server.controller.spi.Predicate",
"org.apache.ambari.server.controller.utilities.PredicateBuilder",
"org.apache.ambari.server.orm.entities.AlertHistoryEntity",
"org.apache.ambari.server.state.AlertState",
"org.junit.Assert"
] |
import java.util.List; import org.apache.ambari.server.controller.AlertHistoryRequest; import org.apache.ambari.server.controller.internal.AlertHistoryResourceProvider; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.utilities.PredicateBuilder; import org.apache.ambari.server.orm.entities.AlertHistoryEntity; import org.apache.ambari.server.state.AlertState; import org.junit.Assert;
|
import java.util.*; import org.apache.ambari.server.controller.*; import org.apache.ambari.server.controller.internal.*; import org.apache.ambari.server.controller.spi.*; import org.apache.ambari.server.controller.utilities.*; import org.apache.ambari.server.orm.entities.*; import org.apache.ambari.server.state.*; import org.junit.*;
|
[
"java.util",
"org.apache.ambari",
"org.junit"
] |
java.util; org.apache.ambari; org.junit;
| 2,148,753
|
public DecJurPag getRetencionOsirisDeclarada(){
return GdeDAOFactory.getDecJurPagDAO().getByDecJurAndTipPagDecJur(getId(), TipPagDecJur.ID_RETENCION_OSIRIS);
}
|
DecJurPag function(){ return GdeDAOFactory.getDecJurPagDAO().getByDecJurAndTipPagDecJur(getId(), TipPagDecJur.ID_RETENCION_OSIRIS); }
|
/**
* Obtiene Retencion/Percepcion declarada Afip (hecha por Osiris) asociado a la Declaracion Jurada
* @return
*/
|
Obtiene Retencion/Percepcion declarada Afip (hecha por Osiris) asociado a la Declaracion Jurada
|
getRetencionOsirisDeclarada
|
{
"repo_name": "avdata99/SIAT",
"path": "siat-1.0-SOURCE/src/buss/src/ar/gov/rosario/siat/gde/buss/bean/DecJur.java",
"license": "gpl-3.0",
"size": 30651
}
|
[
"ar.gov.rosario.siat.gde.buss.dao.GdeDAOFactory"
] |
import ar.gov.rosario.siat.gde.buss.dao.GdeDAOFactory;
|
import ar.gov.rosario.siat.gde.buss.dao.*;
|
[
"ar.gov.rosario"
] |
ar.gov.rosario;
| 2,466,230
|
public void setPortrayalCatalogueInfo(final Collection<? extends PortrayalCatalogueReference> newValues) {
portrayalCatalogueInfo = writeCollection(newValues, portrayalCatalogueInfo, PortrayalCatalogueReference.class);
}
|
void function(final Collection<? extends PortrayalCatalogueReference> newValues) { portrayalCatalogueInfo = writeCollection(newValues, portrayalCatalogueInfo, PortrayalCatalogueReference.class); }
|
/**
* Sets information about the catalogue of rules defined for the portrayal of a resource(s).
*
* @param newValues the new portrayal catalog info.
*/
|
Sets information about the catalogue of rules defined for the portrayal of a resource(s)
|
setPortrayalCatalogueInfo
|
{
"repo_name": "apache/sis",
"path": "core/sis-metadata/src/main/java/org/apache/sis/metadata/iso/DefaultMetadata.java",
"license": "apache-2.0",
"size": 75665
}
|
[
"java.util.Collection",
"org.opengis.metadata.PortrayalCatalogueReference"
] |
import java.util.Collection; import org.opengis.metadata.PortrayalCatalogueReference;
|
import java.util.*; import org.opengis.metadata.*;
|
[
"java.util",
"org.opengis.metadata"
] |
java.util; org.opengis.metadata;
| 2,709,072
|
public Date getCreatedTime() {
return mCreatedTime;
}
|
Date function() { return mCreatedTime; }
|
/**
* The time the video was initially published
*/
|
The time the video was initially published
|
getCreatedTime
|
{
"repo_name": "AllanWang/Facebook-Frost",
"path": "simple-facebook/src/main/java/com/sromku/simple/fb/entities/Video.java",
"license": "apache-2.0",
"size": 8508
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,313,724
|
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
db = new Database(this);
itemKey = getIntent().getStringExtra("com.gimranov.zandy.app.itemKey");
Item item = Item.load(itemKey, db);
if (item == null) {
Log.e(TAG, "Null item for key: " + itemKey);
finish();
}
itemTitle = item.getTitle();
this.setTitle(getResources().getString(R.string.collections_for_item, itemTitle));
ArrayList<ItemCollection> rows = ItemCollection.getCollections(item, db);
|
void function(Bundle savedInstanceState) { super.onCreate(savedInstanceState); db = new Database(this); itemKey = getIntent().getStringExtra(STR); Item item = Item.load(itemKey, db); if (item == null) { Log.e(TAG, STR + itemKey); finish(); } itemTitle = item.getTitle(); this.setTitle(getResources().getString(R.string.collections_for_item, itemTitle)); ArrayList<ItemCollection> rows = ItemCollection.getCollections(item, db);
|
/**
* Called when the activity is first created.
*/
|
Called when the activity is first created
|
onCreate
|
{
"repo_name": "avram/zandy",
"path": "src/main/java/com/gimranov/zandy/app/CollectionMembershipActivity.java",
"license": "agpl-3.0",
"size": 10676
}
|
[
"android.os.Bundle",
"android.util.Log",
"com.gimranov.zandy.app.data.Database",
"com.gimranov.zandy.app.data.Item",
"com.gimranov.zandy.app.data.ItemCollection",
"java.util.ArrayList"
] |
import android.os.Bundle; import android.util.Log; import com.gimranov.zandy.app.data.Database; import com.gimranov.zandy.app.data.Item; import com.gimranov.zandy.app.data.ItemCollection; import java.util.ArrayList;
|
import android.os.*; import android.util.*; import com.gimranov.zandy.app.data.*; import java.util.*;
|
[
"android.os",
"android.util",
"com.gimranov.zandy",
"java.util"
] |
android.os; android.util; com.gimranov.zandy; java.util;
| 359,922
|
@Override
public void init(IEditorSite site, IEditorInput editorInput) {
setSite(site);
setInputWithNotify(editorInput);
setPartName(editorInput.getName());
site.setSelectionProvider(this);
site.getPage().addPartListener(partListener);
ResourcesPlugin.getWorkspace().addResourceChangeListener(resourceChangeListener,
IResourceChangeEvent.POST_CHANGE);
}
|
void function(IEditorSite site, IEditorInput editorInput) { setSite(site); setInputWithNotify(editorInput); setPartName(editorInput.getName()); site.setSelectionProvider(this); site.getPage().addPartListener(partListener); ResourcesPlugin.getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE); }
|
/**
* This is called during startup.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
|
This is called during startup.
|
init
|
{
"repo_name": "ylussaud/M2Doc",
"path": "plugins/org.obeonetwork.m2doc.genconf.editor/src-gen/org/obeonetwork/m2doc/genconf/presentation/GenconfEditor.java",
"license": "epl-1.0",
"size": 76057
}
|
[
"org.eclipse.core.resources.IResourceChangeEvent",
"org.eclipse.core.resources.ResourcesPlugin",
"org.eclipse.ui.IEditorInput",
"org.eclipse.ui.IEditorSite"
] |
import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite;
|
import org.eclipse.core.resources.*; import org.eclipse.ui.*;
|
[
"org.eclipse.core",
"org.eclipse.ui"
] |
org.eclipse.core; org.eclipse.ui;
| 1,836,603
|
public CarbonReaderBuilder withHadoopConf(String key, String value) {
if (this.hadoopConf == null) {
this.hadoopConf = new Configuration();
}
this.hadoopConf.set(key, value);
return this;
}
|
CarbonReaderBuilder function(String key, String value) { if (this.hadoopConf == null) { this.hadoopConf = new Configuration(); } this.hadoopConf.set(key, value); return this; }
|
/**
* Updates the hadoop configuration with the given key value
*
* @param key key word
* @param value value
* @return this object
*/
|
Updates the hadoop configuration with the given key value
|
withHadoopConf
|
{
"repo_name": "jackylk/incubator-carbondata",
"path": "sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java",
"license": "apache-2.0",
"size": 15338
}
|
[
"org.apache.hadoop.conf.Configuration"
] |
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,441,306
|
@XmlElementWrapper(name = "maintenanceOwners")
public List<ApiEntityType> getMaintenanceOwners() {
return maintenanceOwners;
}
|
@XmlElementWrapper(name = STR) List<ApiEntityType> function() { return maintenanceOwners; }
|
/**
* Readonly. The list of objects that trigger this cluster to be in
* maintenance mode.
* Available since API v2.
*/
|
Readonly. The list of objects that trigger this cluster to be in maintenance mode. Available since API v2
|
getMaintenanceOwners
|
{
"repo_name": "justinhayes/cm_api",
"path": "java/src/main/java/com/cloudera/api/model/ApiCluster.java",
"license": "apache-2.0",
"size": 4839
}
|
[
"java.util.List",
"javax.xml.bind.annotation.XmlElementWrapper"
] |
import java.util.List; import javax.xml.bind.annotation.XmlElementWrapper;
|
import java.util.*; import javax.xml.bind.annotation.*;
|
[
"java.util",
"javax.xml"
] |
java.util; javax.xml;
| 207,164
|
public void setContentPane(final Container contentPane) {
getRootPane().setContentPane(contentPane);
}
|
void function(final Container contentPane) { getRootPane().setContentPane(contentPane); }
|
/**
* Sets contentPane property.
*
* @param contentPane - new contentPane property value
*/
|
Sets contentPane property
|
setContentPane
|
{
"repo_name": "skyHALud/codenameone",
"path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/swing/src/main/java/common/javax/swing/JFrame.java",
"license": "gpl-2.0",
"size": 13329
}
|
[
"java.awt.Container"
] |
import java.awt.Container;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 1,298,405
|
protected void assertHeavyHitterPresent(String heavyHitterOpCode) {
Set<String> heavyHitterOpCodes = Statistics.getCPHeavyHitterOpCodes();
Assert.assertTrue(heavyHitterOpCodes.contains(heavyHitterOpCode));
}
/**
* Runs a program on the CPU
*
* @param spark a valid {@link SparkSession}
|
void function(String heavyHitterOpCode) { Set<String> heavyHitterOpCodes = Statistics.getCPHeavyHitterOpCodes(); Assert.assertTrue(heavyHitterOpCodes.contains(heavyHitterOpCode)); } /** * Runs a program on the CPU * * @param spark a valid {@link SparkSession}
|
/**
* asserts that the expected op was executed
*
* @param heavyHitterOpCode opcode of the heavy hitter for the unary op
*/
|
asserts that the expected op was executed
|
assertHeavyHitterPresent
|
{
"repo_name": "asurve/systemml",
"path": "src/test/java/org/apache/sysml/test/gpu/GPUTests.java",
"license": "apache-2.0",
"size": 14015
}
|
[
"java.util.Set",
"org.apache.spark.sql.SparkSession",
"org.apache.sysml.utils.Statistics",
"org.junit.Assert"
] |
import java.util.Set; import org.apache.spark.sql.SparkSession; import org.apache.sysml.utils.Statistics; import org.junit.Assert;
|
import java.util.*; import org.apache.spark.sql.*; import org.apache.sysml.utils.*; import org.junit.*;
|
[
"java.util",
"org.apache.spark",
"org.apache.sysml",
"org.junit"
] |
java.util; org.apache.spark; org.apache.sysml; org.junit;
| 2,613,536
|
public Object invokeSingleBlockPool(final String bpId, RemoteMethod method)
throws IOException {
String nsId = getNameserviceForBlockPoolId(bpId);
return invokeSingle(nsId, method);
}
|
Object function(final String bpId, RemoteMethod method) throws IOException { String nsId = getNameserviceForBlockPoolId(bpId); return invokeSingle(nsId, method); }
|
/**
* Invokes a ClientProtocol method. Determines the target nameservice using
* the block pool id.
*
* Re-throws exceptions generated by the remote RPC call as either
* RemoteException or IOException.
*
* @param bpId Block pool identifier.
* @param method The remote method and parameters to invoke.
* @return The result of invoking the method.
* @throws IOException
*/
|
Invokes a ClientProtocol method. Determines the target nameservice using the block pool id. Re-throws exceptions generated by the remote RPC call as either RemoteException or IOException
|
invokeSingleBlockPool
|
{
"repo_name": "ChetnaChaudhari/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcClient.java",
"license": "apache-2.0",
"size": 43735
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,671,196
|
public ServiceFuture<Void> disconnectP2sVpnConnectionsAsync(String resourceGroupName, String p2sVpnGatewayName, List<String> vpnConnectionIds, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(disconnectP2sVpnConnectionsWithServiceResponseAsync(resourceGroupName, p2sVpnGatewayName, vpnConnectionIds), serviceCallback);
}
|
ServiceFuture<Void> function(String resourceGroupName, String p2sVpnGatewayName, List<String> vpnConnectionIds, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(disconnectP2sVpnConnectionsWithServiceResponseAsync(resourceGroupName, p2sVpnGatewayName, vpnConnectionIds), serviceCallback); }
|
/**
* Disconnect P2S vpn connections of the virtual wan P2SVpnGateway in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param p2sVpnGatewayName The name of the P2S Vpn Gateway.
* @param vpnConnectionIds List of p2s vpn connection Ids.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
|
Disconnect P2S vpn connections of the virtual wan P2SVpnGateway in the specified resource group
|
disconnectP2sVpnConnectionsAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/implementation/P2sVpnGatewaysInner.java",
"license": "mit",
"size": 129361
}
|
[
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture",
"java.util.List"
] |
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import java.util.List;
|
import com.microsoft.rest.*; import java.util.*;
|
[
"com.microsoft.rest",
"java.util"
] |
com.microsoft.rest; java.util;
| 2,579,481
|
@Accessor(name = "size", type = Accessor.Type.Getter)
public static Object size(ExecutionContext cx, Object thisValue) {
SetObject s = thisSetObject(cx, thisValue);
LinkedMap<Object, Void> entries = s.getSetData();
return entries.size();
}
|
@Accessor(name = "size", type = Accessor.Type.Getter) static Object function(ExecutionContext cx, Object thisValue) { SetObject s = thisSetObject(cx, thisValue); LinkedMap<Object, Void> entries = s.getSetData(); return entries.size(); }
|
/**
* 23.2.3.9 get Set.prototype.size
*
* @param cx
* the execution context
* @param thisValue
* the function this-value
* @return the number of entries
*/
|
23.2.3.9 get Set.prototype.size
|
size
|
{
"repo_name": "jugglinmike/es6draft",
"path": "src/main/java/com/github/anba/es6draft/runtime/objects/collection/SetPrototype.java",
"license": "mit",
"size": 10544
}
|
[
"com.github.anba.es6draft.runtime.ExecutionContext",
"com.github.anba.es6draft.runtime.internal.LinkedMap",
"com.github.anba.es6draft.runtime.internal.Properties"
] |
import com.github.anba.es6draft.runtime.ExecutionContext; import com.github.anba.es6draft.runtime.internal.LinkedMap; import com.github.anba.es6draft.runtime.internal.Properties;
|
import com.github.anba.es6draft.runtime.*; import com.github.anba.es6draft.runtime.internal.*;
|
[
"com.github.anba"
] |
com.github.anba;
| 104,292
|
public void receiveUIUpdateEvent() throws RemoteException;
|
void function() throws RemoteException;
|
/**
* Wenn ein Update des UI notwendig ist
* @throws RemoteException
*/
|
Wenn ein Update des UI notwendig ist
|
receiveUIUpdateEvent
|
{
"repo_name": "B-Stefan/Risiko",
"path": "Commons/src/main/java/commons/interfaces/IClient.java",
"license": "mit",
"size": 2553
}
|
[
"java.rmi.RemoteException"
] |
import java.rmi.RemoteException;
|
import java.rmi.*;
|
[
"java.rmi"
] |
java.rmi;
| 1,900,919
|
public void setInquiryPrimaryKeys(Map<String, String> inquiryPrimaryKeys) {
this.inquiryPrimaryKeys = inquiryPrimaryKeys;
}
|
void function(Map<String, String> inquiryPrimaryKeys) { this.inquiryPrimaryKeys = inquiryPrimaryKeys; }
|
/**
* Sets the map used to pass primary key values between invocations of the inquiry screens after the start method has been called.
*
* @param inquiryPrimaryKeys
*/
|
Sets the map used to pass primary key values between invocations of the inquiry screens after the start method has been called
|
setInquiryPrimaryKeys
|
{
"repo_name": "sbower/kuali-rice-1",
"path": "kns/src/main/java/org/kuali/rice/kns/web/struts/form/InquiryForm.java",
"license": "apache-2.0",
"size": 17299
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,314,293
|
protected ImportLocationSettings getImportSettings()
{
GroupData group = getSelectedGroup();
ExperimenterData user = getSelectedUser();
switch(dataType)
{
case Importer.PROJECT_TYPE:
DataNode project = getSelectedItem(projectsBox);
DataNode dataset = getSelectedItem(datasetsBox);
return new ProjectImportLocationSettings(group, user,
project, dataset);
case Importer.SCREEN_TYPE:
DataNode screen = getSelectedItem(screensBox);
return new ScreenImportLocationSettings(group, user, screen);
}
return new NullImportSettings(group, user);
}
|
ImportLocationSettings function() { GroupData group = getSelectedGroup(); ExperimenterData user = getSelectedUser(); switch(dataType) { case Importer.PROJECT_TYPE: DataNode project = getSelectedItem(projectsBox); DataNode dataset = getSelectedItem(datasetsBox); return new ProjectImportLocationSettings(group, user, project, dataset); case Importer.SCREEN_TYPE: DataNode screen = getSelectedItem(screensBox); return new ScreenImportLocationSettings(group, user, screen); } return new NullImportSettings(group, user); }
|
/**
* Returns the import settings chosen but the user.
*
* @return The import settings selected by the user.
*/
|
Returns the import settings chosen but the user
|
getImportSettings
|
{
"repo_name": "stelfrich/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/fsimporter/chooser/LocationDialog.java",
"license": "gpl-2.0",
"size": 56281
}
|
[
"org.openmicroscopy.shoola.agents.fsimporter.view.Importer",
"org.openmicroscopy.shoola.agents.util.browser.DataNode"
] |
import org.openmicroscopy.shoola.agents.fsimporter.view.Importer; import org.openmicroscopy.shoola.agents.util.browser.DataNode;
|
import org.openmicroscopy.shoola.agents.fsimporter.view.*; import org.openmicroscopy.shoola.agents.util.browser.*;
|
[
"org.openmicroscopy.shoola"
] |
org.openmicroscopy.shoola;
| 1,965,290
|
static protected void formatString(byte[] buf, int pos,
int len, String value) throws IOException
{
byte[] src = value.getBytes(ASCII);
int vlen = Math.min(len, src.length);
System.arraycopy(src, 0, buf, pos, vlen);
for(int i = vlen; i < len; i++)
buf[pos + i] = 0;
}
|
static void function(byte[] buf, int pos, int len, String value) throws IOException { byte[] src = value.getBytes(ASCII); int vlen = Math.min(len, src.length); System.arraycopy(src, 0, buf, pos, vlen); for(int i = vlen; i < len; i++) buf[pos + i] = 0; }
|
/** Format a string to a byte array.
* @param buf Buffer to store formatted value.
* @param pos Starting position in buffer.
* @param len Length of field in buffer.
* @param value Value to store in buffer. */
|
Format a string to a byte array
|
formatString
|
{
"repo_name": "crc-corp/iris-its",
"path": "src/us/mn/state/dot/tms/server/comm/ss125/SS125Property.java",
"license": "gpl-2.0",
"size": 12237
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 708,804
|
public History getHistory() {
return this.history;
}
|
History function() { return this.history; }
|
/**
* Get the history of the biological object.
* @return The history object of the object
*/
|
Get the history of the biological object
|
getHistory
|
{
"repo_name": "GenomicParisCentre/nividic",
"path": "src/main/java/fr/ens/transcriptome/nividic/om/impl/BioAssayImpl.java",
"license": "lgpl-2.1",
"size": 23124
}
|
[
"fr.ens.transcriptome.nividic.om.History"
] |
import fr.ens.transcriptome.nividic.om.History;
|
import fr.ens.transcriptome.nividic.om.*;
|
[
"fr.ens.transcriptome"
] |
fr.ens.transcriptome;
| 1,857,783
|
@After
public void tearDownTest() {
verify(mockGroupService);
verify(mockCoreService);
}
public static class GroupJsonMatcher extends TypeSafeMatcher<JsonObject> {
private final Group group;
private final String expectedAppId;
private String reason = "";
public GroupJsonMatcher(Group groupValue, String expectedAppIdValue) {
group = groupValue;
expectedAppId = expectedAppIdValue;
}
|
void function() { verify(mockGroupService); verify(mockCoreService); } public static class GroupJsonMatcher extends TypeSafeMatcher<JsonObject> { private final Group group; private final String expectedAppId; private String reason = ""; public GroupJsonMatcher(Group groupValue, String expectedAppIdValue) { group = groupValue; expectedAppId = expectedAppIdValue; }
|
/**
* Cleans up and verifies the mocks.
*/
|
Cleans up and verifies the mocks
|
tearDownTest
|
{
"repo_name": "gkatsikas/onos",
"path": "web/api/src/test/java/org/onosproject/rest/resources/GroupsResourceTest.java",
"license": "apache-2.0",
"size": 23803
}
|
[
"com.eclipsesource.json.JsonObject",
"org.easymock.EasyMock",
"org.hamcrest.TypeSafeMatcher",
"org.onosproject.net.group.Group"
] |
import com.eclipsesource.json.JsonObject; import org.easymock.EasyMock; import org.hamcrest.TypeSafeMatcher; import org.onosproject.net.group.Group;
|
import com.eclipsesource.json.*; import org.easymock.*; import org.hamcrest.*; import org.onosproject.net.group.*;
|
[
"com.eclipsesource.json",
"org.easymock",
"org.hamcrest",
"org.onosproject.net"
] |
com.eclipsesource.json; org.easymock; org.hamcrest; org.onosproject.net;
| 1,568,397
|
public void init() {
try {
// System.setProperty("javax.xml.transform.TransformerFactory", "org.apache.xalan.processor.TransformerFactoryImpl");
Source etramXSLTSource = new StreamSource(new File(getXSLFilePath("etram")));
TransformerFactory etramTransFact = TransformerFactory.newInstance();
etramCachedXSLT = etramTransFact.newTemplates(etramXSLTSource);
// Transformer trans = cachedXSLT.newTransformer();
// Source ecpXSLTSource = new StreamSource(new File(getXSLFilePath("ecp")));
// TransformerFactory ecpTransFact = TransformerFactory.newInstance();
// ecpCachedXSLT = ecpTransFact.newTemplates(ecpXSLTSource);
} catch (Exception e) {
log.error(e);
}
}
|
void function() { try { Source etramXSLTSource = new StreamSource(new File(getXSLFilePath("etram"))); TransformerFactory etramTransFact = TransformerFactory.newInstance(); etramCachedXSLT = etramTransFact.newTemplates(etramXSLTSource); } catch (Exception e) { log.error(e); } }
|
/**
* Initializes the servlet.
*/
|
Initializes the servlet
|
init
|
{
"repo_name": "ankitbaderiya/code-samples",
"path": "ETRAMInquiryControl.java",
"license": "mit",
"size": 96362
}
|
[
"java.io.File",
"javax.xml.transform.Source",
"javax.xml.transform.TransformerFactory",
"javax.xml.transform.stream.StreamSource"
] |
import java.io.File; import javax.xml.transform.Source; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamSource;
|
import java.io.*; import javax.xml.transform.*; import javax.xml.transform.stream.*;
|
[
"java.io",
"javax.xml"
] |
java.io; javax.xml;
| 1,429,256
|
public ServiceFuture<Void> deleteAuthorizationRuleAsync(String resourceGroupName, String namespaceName, String eventHubName, String authorizationRuleName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteAuthorizationRuleWithServiceResponseAsync(resourceGroupName, namespaceName, eventHubName, authorizationRuleName), serviceCallback);
}
|
ServiceFuture<Void> function(String resourceGroupName, String namespaceName, String eventHubName, String authorizationRuleName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteAuthorizationRuleWithServiceResponseAsync(resourceGroupName, namespaceName, eventHubName, authorizationRuleName), serviceCallback); }
|
/**
* Deletes an Event Hub AuthorizationRule.
*
* @param resourceGroupName Name of the resource group within the azure subscription.
* @param namespaceName The Namespace name
* @param eventHubName The Event Hub name
* @param authorizationRuleName The authorization rule name.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
|
Deletes an Event Hub AuthorizationRule
|
deleteAuthorizationRuleAsync
|
{
"repo_name": "martinsawicki/azure-sdk-for-java",
"path": "azure-mgmt-eventhub/src/main/java/com/microsoft/azure/management/eventhub/implementation/EventHubsInner.java",
"license": "mit",
"size": 94388
}
|
[
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture"
] |
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 452,871
|
public static LocalTime parse(CharSequence text, DateTimeFormatter formatter) {
Objects.requireNonNull(formatter, "formatter");
return formatter.parse(text, LocalTime::from);
}
|
static LocalTime function(CharSequence text, DateTimeFormatter formatter) { Objects.requireNonNull(formatter, STR); return formatter.parse(text, LocalTime::from); }
|
/**
* Obtains an instance of {@code LocalTime} from a text string using a specific formatter.
* <p>
* The text is parsed using the formatter, returning a time.
*
* @param text the text to parse, not null
* @param formatter the formatter to use, not null
* @return the parsed local time, not null
* @throws DateTimeParseException if the text cannot be parsed
*/
|
Obtains an instance of LocalTime from a text string using a specific formatter. The text is parsed using the formatter, returning a time
|
parse
|
{
"repo_name": "rokn/Count_Words_2015",
"path": "testing/openjdk2/jdk/src/share/classes/java/time/LocalTime.java",
"license": "mit",
"size": 72189
}
|
[
"java.time.format.DateTimeFormatter",
"java.util.Objects"
] |
import java.time.format.DateTimeFormatter; import java.util.Objects;
|
import java.time.format.*; import java.util.*;
|
[
"java.time",
"java.util"
] |
java.time; java.util;
| 342,337
|
return instance;
}
private GembleCraftingRecipes(){
this.addShapelessRecipe(new ItemStack(Blocks.obsidian, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.diamond_block});
this.addShapelessRecipe(new ItemStack(Blocks.diamond_block, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.gold_block});
this.addShapelessRecipe(new ItemStack(Blocks.gold_block, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.iron_block});
this.addShapelessRecipe(new ItemStack(SoulBlocks.BronzeBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.TinBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulBlocks.BronzeBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.CopperBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulBlocks.SilverBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.BronzeBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulBlocks.CobaltBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.SilverBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulBlocks.BlackdiamondBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.SteelBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulBlocks.OnyxBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.CobaltBlock.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.BurnedMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.BurnedShard.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.LightShard.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrostShard.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthShard.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.BurnedMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.BurnedMagicFragment.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.EnlightedMagicFragmentHalf.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrozenMagicFragmentHalf.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthMagicFragmentHalf.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.EnlightedMagicFragment.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrozenMagicFragment.get()});
this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthMagicFragment.get()});
Collections.sort(this.recipes, new GembleTableRecipeSorter(this));
}
|
return instance; } private GembleCraftingRecipes(){ this.addShapelessRecipe(new ItemStack(Blocks.obsidian, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.diamond_block}); this.addShapelessRecipe(new ItemStack(Blocks.diamond_block, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.gold_block}); this.addShapelessRecipe(new ItemStack(Blocks.gold_block, 1), new Object[] {SoulItems.CitrineAmuletStone.get(), Blocks.iron_block}); this.addShapelessRecipe(new ItemStack(SoulBlocks.BronzeBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.TinBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulBlocks.BronzeBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.CopperBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulBlocks.SilverBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.BronzeBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulBlocks.CobaltBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.SilverBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulBlocks.BlackdiamondBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.SteelBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulBlocks.OnyxBlock.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulBlocks.CobaltBlock.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.BurnedMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.BurnedShard.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.LightShard.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrostShard.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragmentHalf.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthShard.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.BurnedMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.BurnedMagicFragment.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.EnlightedMagicFragmentHalf.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrozenMagicFragmentHalf.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragment.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthMagicFragmentHalf.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.EnlightedMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.EnlightedMagicFragment.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.FrozenMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.FrozenMagicFragment.get()}); this.addShapelessRecipe(new ItemStack(SoulItems.StrengthMagicFragmentAdv.get(), 1), new Object[] {SoulItems.CitrineAmuletStone.get(), SoulItems.StrengthMagicFragment.get()}); Collections.sort(this.recipes, new GembleTableRecipeSorter(this)); }
|
/**
* Returns the static instance of this class
*/
|
Returns the static instance of this class
|
getInstance
|
{
"repo_name": "OwnAgePau/Soul-Forest",
"path": "src/main/java/com/Mod_Ores/Blocks/Recipes/GembleCraftingRecipes.java",
"license": "lgpl-2.1",
"size": 10007
}
|
[
"java.util.Collections",
"net.minecraft.init.Blocks",
"net.minecraft.item.ItemStack"
] |
import java.util.Collections; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack;
|
import java.util.*; import net.minecraft.init.*; import net.minecraft.item.*;
|
[
"java.util",
"net.minecraft.init",
"net.minecraft.item"
] |
java.util; net.minecraft.init; net.minecraft.item;
| 1,500,332
|
@PUT
@Path("namespaces/{namespace}/pods/{podId}")
@Consumes("application/json")
String updatePod(@PathParam("podId") @NotNull String podId, Pod entity, @PathParam("namespace") String namespace) throws Exception;
|
@Path(STR) @Consumes(STR) String updatePod(@PathParam("podId") @NotNull String podId, Pod entity, @PathParam(STR) String namespace) throws Exception;
|
/**
* Update a pod
* @param podId
* @param entity
* @param namespace
*/
|
Update a pod
|
updatePod
|
{
"repo_name": "hekonsek/fabric8",
"path": "components/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/Kubernetes.java",
"license": "apache-2.0",
"size": 9137
}
|
[
"io.fabric8.kubernetes.api.model.Pod",
"javax.validation.constraints.NotNull",
"javax.ws.rs.Consumes",
"javax.ws.rs.Path",
"javax.ws.rs.PathParam"
] |
import io.fabric8.kubernetes.api.model.Pod; import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.PathParam;
|
import io.fabric8.kubernetes.api.model.*; import javax.validation.constraints.*; import javax.ws.rs.*;
|
[
"io.fabric8.kubernetes",
"javax.validation",
"javax.ws"
] |
io.fabric8.kubernetes; javax.validation; javax.ws;
| 1,680,369
|
public static ScanConversion getInstance(InputStreamReader inputStreamReader) {
if (singletonScanConversion == null) {
singletonScanConversion = new ScanConversion(inputStreamReader);
compute_tables();
}
return singletonScanConversion;
}
|
static ScanConversion function(InputStreamReader inputStreamReader) { if (singletonScanConversion == null) { singletonScanConversion = new ScanConversion(inputStreamReader); compute_tables(); } return singletonScanConversion; }
|
/**
* Singleton getInstance method, with InputStreamReader argument
* inputStreamReader holds the simulated data stored in a csv format,
* in a file stored in the assets folder
* @param inputStreamReader
* @return
*/
|
Singleton getInstance method, with InputStreamReader argument inputStreamReader holds the simulated data stored in a csv format, in a file stored in the assets folder
|
getInstance
|
{
"repo_name": "benchoufi/PRJ-medtec_androidapp",
"path": "android-app/app/src/main/java/com/echopen/asso/echopen/preproc/ScanConversion.java",
"license": "bsd-3-clause",
"size": 20997
}
|
[
"java.io.InputStreamReader"
] |
import java.io.InputStreamReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,562,769
|
private CaArrayEntityReference searchForExperiment() throws RemoteException, InvalidInputException {
// Search for experiment with the given title.
ExperimentSearchCriteria experimentSearchCriteria = new ExperimentSearchCriteria();
experimentSearchCriteria.setTitle(EXPERIMENT_TITLE);
List<Experiment> experiments = (searchService.searchForExperiments(experimentSearchCriteria, null))
.getResults();
if (experiments == null || experiments.size() <= 0) {
return null;
}
// Multiple experiments with the same name can exist. Here, we're picking the first result.
Experiment experiment = experiments.iterator().next();
return experiment.getReference();
}
|
CaArrayEntityReference function() throws RemoteException, InvalidInputException { ExperimentSearchCriteria experimentSearchCriteria = new ExperimentSearchCriteria(); experimentSearchCriteria.setTitle(EXPERIMENT_TITLE); List<Experiment> experiments = (searchService.searchForExperiments(experimentSearchCriteria, null)) .getResults(); if (experiments == null experiments.size() <= 0) { return null; } Experiment experiment = experiments.iterator().next(); return experiment.getReference(); }
|
/**
* Search for an experiment based on its title.
*/
|
Search for an experiment based on its title
|
searchForExperiment
|
{
"repo_name": "NCIP/caarray",
"path": "api_client_examples/java_api_client_examples_v1_0/src/caarray/client/examples/java/LookUpEntities.java",
"license": "bsd-3-clause",
"size": 24748
}
|
[
"gov.nih.nci.caarray.external.v1_0.CaArrayEntityReference",
"gov.nih.nci.caarray.external.v1_0.experiment.Experiment",
"gov.nih.nci.caarray.external.v1_0.query.ExperimentSearchCriteria",
"gov.nih.nci.caarray.services.external.v1_0.InvalidInputException",
"java.rmi.RemoteException",
"java.util.List"
] |
import gov.nih.nci.caarray.external.v1_0.CaArrayEntityReference; import gov.nih.nci.caarray.external.v1_0.experiment.Experiment; import gov.nih.nci.caarray.external.v1_0.query.ExperimentSearchCriteria; import gov.nih.nci.caarray.services.external.v1_0.InvalidInputException; import java.rmi.RemoteException; import java.util.List;
|
import gov.nih.nci.caarray.external.v1_0.*; import gov.nih.nci.caarray.external.v1_0.experiment.*; import gov.nih.nci.caarray.external.v1_0.query.*; import gov.nih.nci.caarray.services.external.v1_0.*; import java.rmi.*; import java.util.*;
|
[
"gov.nih.nci",
"java.rmi",
"java.util"
] |
gov.nih.nci; java.rmi; java.util;
| 588,671
|
public Adapter createThoroughfareNumberRangeTypeAdapter() {
return null;
}
|
Adapter function() { return null; }
|
/**
* Creates a new adapter for an object of class '{@link org.oasis.xAL.ThoroughfareNumberRangeType <em>Thoroughfare Number Range Type</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.oasis.xAL.ThoroughfareNumberRangeType
* @generated
*/
|
Creates a new adapter for an object of class '<code>org.oasis.xAL.ThoroughfareNumberRangeType Thoroughfare Number Range Type</code>'. This default implementation returns null so that we can easily ignore cases; it's useful to ignore a case when inheritance will catch all the cases anyway.
|
createThoroughfareNumberRangeTypeAdapter
|
{
"repo_name": "markus1978/citygml4emf",
"path": "de.hub.citygml.emf.ecore/src/org/oasis/xAL/util/XALAdapterFactory.java",
"license": "apache-2.0",
"size": 61937
}
|
[
"org.eclipse.emf.common.notify.Adapter"
] |
import org.eclipse.emf.common.notify.Adapter;
|
import org.eclipse.emf.common.notify.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,353,582
|
@Override
public boolean modelGetterMethodGenerated(Method method, TopLevelClass topLevelClass,
IntrospectedColumn introspectedColumn, IntrospectedTable introspectedTable, ModelClassType modelClassType) {
return false;
}
|
boolean function(Method method, TopLevelClass topLevelClass, IntrospectedColumn introspectedColumn, IntrospectedTable introspectedTable, ModelClassType modelClassType) { return false; }
|
/**
* Prevents all getters from being generated. See SimpleModelGenerator
*
* @param method
* @param topLevelClass
* @param introspectedColumn
* @param introspectedTable
* @param modelClassType
* @return
*/
|
Prevents all getters from being generated. See SimpleModelGenerator
|
modelGetterMethodGenerated
|
{
"repo_name": "ishiitky/mybatis-generator-plugins",
"path": "src/main/java/mybatis/generator/plugins/LombokPlugin.java",
"license": "apache-2.0",
"size": 3009
}
|
[
"org.mybatis.generator.api.IntrospectedColumn",
"org.mybatis.generator.api.IntrospectedTable",
"org.mybatis.generator.api.dom.java.Method",
"org.mybatis.generator.api.dom.java.TopLevelClass"
] |
import org.mybatis.generator.api.IntrospectedColumn; import org.mybatis.generator.api.IntrospectedTable; import org.mybatis.generator.api.dom.java.Method; import org.mybatis.generator.api.dom.java.TopLevelClass;
|
import org.mybatis.generator.api.*; import org.mybatis.generator.api.dom.java.*;
|
[
"org.mybatis.generator"
] |
org.mybatis.generator;
| 320,619
|
private void overrideTransformationOptions(TransformationOptions options)
{
// Set any transformation options overrides if we can
if(options != null && transformationOptionOverrides != null)
{
for(String key : transformationOptionOverrides.keySet())
{
if(PropertyUtils.isWriteable(options, key))
{
try
{
PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(options, key);
Class<?> propertyClass = pd.getPropertyType();
Object value = transformationOptionOverrides.get(key);
if(value != null)
{
if(propertyClass.isInstance(value))
{
// Nothing to do
}
else if(value instanceof String && propertyClass.isInstance(Boolean.TRUE))
{
// Use relaxed converter
value = TransformationOptions.relaxedBooleanTypeConverter.convert((String)value);
}
else
{
value = DefaultTypeConverter.INSTANCE.convert(propertyClass, value);
}
}
PropertyUtils.setProperty(options, key, value);
}
catch(NoSuchMethodException nsme) {}
catch(InvocationTargetException ite) {}
catch(IllegalAccessException iae) {}
}
else
{
logger.warn("Unable to set override Transformation Option " + key + " on " + options);
}
}
}
}
|
void function(TransformationOptions options) { if(options != null && transformationOptionOverrides != null) { for(String key : transformationOptionOverrides.keySet()) { if(PropertyUtils.isWriteable(options, key)) { try { PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(options, key); Class<?> propertyClass = pd.getPropertyType(); Object value = transformationOptionOverrides.get(key); if(value != null) { if(propertyClass.isInstance(value)) { } else if(value instanceof String && propertyClass.isInstance(Boolean.TRUE)) { value = TransformationOptions.relaxedBooleanTypeConverter.convert((String)value); } else { value = DefaultTypeConverter.INSTANCE.convert(propertyClass, value); } } PropertyUtils.setProperty(options, key, value); } catch(NoSuchMethodException nsme) {} catch(InvocationTargetException ite) {} catch(IllegalAccessException iae) {} } else { logger.warn(STR + key + STR + options); } } } }
|
/**
* Sets any transformation option overrides it can.
*/
|
Sets any transformation option overrides it can
|
overrideTransformationOptions
|
{
"repo_name": "loftuxab/alfresco-community-loftux",
"path": "projects/repository/source/java/org/alfresco/repo/content/transform/ComplexContentTransformer.java",
"license": "lgpl-3.0",
"size": 22821
}
|
[
"java.beans.PropertyDescriptor",
"java.lang.reflect.InvocationTargetException",
"org.alfresco.service.cmr.repository.TransformationOptions",
"org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter",
"org.apache.commons.beanutils.PropertyUtils"
] |
import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import org.alfresco.service.cmr.repository.TransformationOptions; import org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter; import org.apache.commons.beanutils.PropertyUtils;
|
import java.beans.*; import java.lang.reflect.*; import org.alfresco.service.cmr.repository.*; import org.alfresco.service.cmr.repository.datatype.*; import org.apache.commons.beanutils.*;
|
[
"java.beans",
"java.lang",
"org.alfresco.service",
"org.apache.commons"
] |
java.beans; java.lang; org.alfresco.service; org.apache.commons;
| 2,073,979
|
@Test
public void testConstructor06() {
// given
int session = 1;
List<FlowState> state = new ArrayList<>();
// when
WebFlow wf = new WebFlow(session, TestFlowEventProcessor.class, state);
// then
assertFalse(wf.hasPrevious());
}
|
void function() { int session = 1; List<FlowState> state = new ArrayList<>(); WebFlow wf = new WebFlow(session, TestFlowEventProcessor.class, state); assertFalse(wf.hasPrevious()); }
|
/**
* testConstructor06().
* hasPrevious()
*/
|
testConstructor06(). hasPrevious()
|
testConstructor06
|
{
"repo_name": "formkiq/formkiq-server",
"path": "webflow/src/test/java/com/formkiq/webflow/WebFlowTest.java",
"license": "apache-2.0",
"size": 4463
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.junit.Assert"
] |
import java.util.ArrayList; import java.util.List; import org.junit.Assert;
|
import java.util.*; import org.junit.*;
|
[
"java.util",
"org.junit"
] |
java.util; org.junit;
| 2,127,980
|
public void addIssuerSerial(String X509IssuerName, String X509SerialNumber) {
this.add(new XMLX509IssuerSerial(this.doc, X509IssuerName, X509SerialNumber));
}
|
void function(String X509IssuerName, String X509SerialNumber) { this.add(new XMLX509IssuerSerial(this.doc, X509IssuerName, X509SerialNumber)); }
|
/**
* Method addIssuerSerial
*
* @param X509IssuerName
* @param X509SerialNumber
*/
|
Method addIssuerSerial
|
addIssuerSerial
|
{
"repo_name": "rokn/Count_Words_2015",
"path": "testing/openjdk2/jdk/src/share/classes/com/sun/org/apache/xml/internal/security/keys/content/X509Data.java",
"license": "mit",
"size": 14847
}
|
[
"com.sun.org.apache.xml.internal.security.keys.content.x509.XMLX509IssuerSerial"
] |
import com.sun.org.apache.xml.internal.security.keys.content.x509.XMLX509IssuerSerial;
|
import com.sun.org.apache.xml.internal.security.keys.content.x509.*;
|
[
"com.sun.org"
] |
com.sun.org;
| 520,013
|
int getBundleStartLevel(Bundle bundle)
{
if (bundle.getState() == Bundle.UNINSTALLED)
{
throw new IllegalArgumentException("Bundle is uninstalled.");
}
return ((BundleImpl) bundle).getStartLevel(getInitialBundleStartLevel());
}
|
int getBundleStartLevel(Bundle bundle) { if (bundle.getState() == Bundle.UNINSTALLED) { throw new IllegalArgumentException(STR); } return ((BundleImpl) bundle).getStartLevel(getInitialBundleStartLevel()); }
|
/**
* Returns the start level for the specified bundle; this method
* implements functionality for the Start Level service.
* @param bundle The bundle to examine.
* @return The start level of the specified bundle.
* @throws java.lang.IllegalArgumentException If the specified
* bundle has been uninstalled.
**/
|
Returns the start level for the specified bundle; this method implements functionality for the Start Level service
|
getBundleStartLevel
|
{
"repo_name": "aosgi/org.apache.felix.framework",
"path": "src/main/java/org/apache/felix/framework/Felix.java",
"license": "apache-2.0",
"size": 200994
}
|
[
"org.osgi.framework.Bundle"
] |
import org.osgi.framework.Bundle;
|
import org.osgi.framework.*;
|
[
"org.osgi.framework"
] |
org.osgi.framework;
| 2,271,562
|
public User loadUserAuthentication(String username) throws RemoteException {
UserDetails userDetails = userDetailsService
.loadUserByUsername(username);
if (userDetails == null) {
throw new RemoteException(
"Cannot authorize. No security credential found for "
+ username);
}
User user = new User(userDetails.getUsername(), userDetails
.getPassword(), true, true, true, true, userDetails
.getAuthorities());
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
user, "password", user.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(token);
return user;
}
|
User function(String username) throws RemoteException { UserDetails userDetails = userDetailsService .loadUserByUsername(username); if (userDetails == null) { throw new RemoteException( STR + username); } User user = new User(userDetails.getUsername(), userDetails .getPassword(), true, true, true, true, userDetails .getAuthorities()); UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken( user, STR, user.getAuthorities()); SecurityContextHolder.getContext().setAuthentication(token); return user; }
|
/**
* Load user authentication.
*
* @param username
* the username
*
* @return the user
*
* @throws RemoteException
* the remote exception
*/
|
Load user authentication
|
loadUserAuthentication
|
{
"repo_name": "NCIP/c3pr",
"path": "codebase/projects/core/src/java/edu/duke/cabig/c3pr/utils/GridSecurityUtils.java",
"license": "bsd-3-clause",
"size": 3467
}
|
[
"java.rmi.RemoteException",
"org.acegisecurity.context.SecurityContextHolder",
"org.acegisecurity.providers.UsernamePasswordAuthenticationToken",
"org.acegisecurity.userdetails.User",
"org.acegisecurity.userdetails.UserDetails"
] |
import java.rmi.RemoteException; import org.acegisecurity.context.SecurityContextHolder; import org.acegisecurity.providers.UsernamePasswordAuthenticationToken; import org.acegisecurity.userdetails.User; import org.acegisecurity.userdetails.UserDetails;
|
import java.rmi.*; import org.acegisecurity.context.*; import org.acegisecurity.providers.*; import org.acegisecurity.userdetails.*;
|
[
"java.rmi",
"org.acegisecurity.context",
"org.acegisecurity.providers",
"org.acegisecurity.userdetails"
] |
java.rmi; org.acegisecurity.context; org.acegisecurity.providers; org.acegisecurity.userdetails;
| 694,687
|
@DELETE
@RolesAllowed({ Role.ADMIN })
@Path("/{thingUID}")
@ApiOperation(value = "Removes a thing from the registry. Set \'force\' to __true__ if you want the thing te be removed immediately.")
@ApiResponses(value = { @ApiResponse(code = 200, message = "OK, was deleted."),
@ApiResponse(code = 202, message = "ACCEPTED for asynchronous deletion."),
@ApiResponse(code = 404, message = "Thing not found."),
@ApiResponse(code = 409, message = "Thing could not be deleted because it's not editable.") })
public Response remove(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language,
@PathParam("thingUID") @ApiParam(value = "thingUID") String thingUID,
@DefaultValue("false") @QueryParam("force") @ApiParam(value = "force") boolean force) {
final Locale locale = localeService.getLocale(language);
ThingUID thingUIDObject = new ThingUID(thingUID);
// check whether thing exists and throw 404 if not
Thing thing = thingRegistry.get(thingUIDObject);
if (thing == null) {
logger.info("Received HTTP DELETE request for update at '{}' for the unknown thing '{}'.",
uriInfo.getPath(), thingUID);
return getThingNotFoundResponse(thingUID);
}
// ask whether the Thing exists as a managed thing, so it can get
// updated, 409 otherwise
Thing managed = managedThingProvider.get(thingUIDObject);
if (null == managed) {
logger.info("Received HTTP DELETE request for update at '{}' for an unmanaged thing '{}'.",
uriInfo.getPath(), thingUID);
return getThingResponse(Status.CONFLICT, thing, locale,
"Cannot delete Thing " + thingUID + " as it is not editable.");
}
// only move on if Thing is known to be managed, so it can get updated
if (force) {
if (null == thingRegistry.forceRemove(thingUIDObject)) {
return getThingResponse(Status.INTERNAL_SERVER_ERROR, thing, locale,
"Cannot delete Thing " + thingUID + " for unknown reasons.");
}
} else {
if (null != thingRegistry.remove(thingUIDObject)) {
return getThingResponse(Status.ACCEPTED, thing, locale, null);
}
}
return Response.ok(null, MediaType.TEXT_PLAIN).build();
}
|
@RolesAllowed({ Role.ADMIN }) @Path(STR) @ApiOperation(value = STR) @ApiResponses(value = { @ApiResponse(code = 200, message = STR), @ApiResponse(code = 202, message = STR), @ApiResponse(code = 404, message = STR), @ApiResponse(code = 409, message = STR) }) Response function(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = STR) String language, @PathParam(STR) @ApiParam(value = STR) String thingUID, @DefaultValue("false") @QueryParam("force") @ApiParam(value = "force") boolean force) { final Locale locale = localeService.getLocale(language); ThingUID thingUIDObject = new ThingUID(thingUID); Thing thing = thingRegistry.get(thingUIDObject); if (thing == null) { logger.info(STR, uriInfo.getPath(), thingUID); return getThingNotFoundResponse(thingUID); } Thing managed = managedThingProvider.get(thingUIDObject); if (null == managed) { logger.info(STR, uriInfo.getPath(), thingUID); return getThingResponse(Status.CONFLICT, thing, locale, STR + thingUID + STR); } if (force) { if (null == thingRegistry.forceRemove(thingUIDObject)) { return getThingResponse(Status.INTERNAL_SERVER_ERROR, thing, locale, STR + thingUID + STR); } } else { if (null != thingRegistry.remove(thingUIDObject)) { return getThingResponse(Status.ACCEPTED, thing, locale, null); } } return Response.ok(null, MediaType.TEXT_PLAIN).build(); }
|
/**
* Delete a Thing, if possible. Thing deletion might be impossible if the
* Thing is not managed, will return CONFLICT. Thing deletion might happen
* delayed, will return ACCEPTED.
*
* @param thingUID
* @param force
* @return Response with status/error information
*/
|
Delete a Thing, if possible. Thing deletion might be impossible if the Thing is not managed, will return CONFLICT. Thing deletion might happen delayed, will return ACCEPTED
|
remove
|
{
"repo_name": "Snickermicker/smarthome",
"path": "bundles/io/org.eclipse.smarthome.io.rest.core/src/main/java/org/eclipse/smarthome/io/rest/core/internal/thing/ThingResource.java",
"license": "epl-1.0",
"size": 42083
}
|
[
"io.swagger.annotations.ApiOperation",
"io.swagger.annotations.ApiParam",
"io.swagger.annotations.ApiResponse",
"io.swagger.annotations.ApiResponses",
"java.util.Locale",
"javax.annotation.security.RolesAllowed",
"javax.ws.rs.DefaultValue",
"javax.ws.rs.HeaderParam",
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.QueryParam",
"javax.ws.rs.core.HttpHeaders",
"javax.ws.rs.core.MediaType",
"javax.ws.rs.core.Response",
"org.eclipse.smarthome.core.auth.Role",
"org.eclipse.smarthome.core.thing.Thing",
"org.eclipse.smarthome.core.thing.ThingUID"
] |
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import java.util.Locale; import javax.annotation.security.RolesAllowed; import javax.ws.rs.DefaultValue; import javax.ws.rs.HeaderParam; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.eclipse.smarthome.core.auth.Role; import org.eclipse.smarthome.core.thing.Thing; import org.eclipse.smarthome.core.thing.ThingUID;
|
import io.swagger.annotations.*; import java.util.*; import javax.annotation.security.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.eclipse.smarthome.core.auth.*; import org.eclipse.smarthome.core.thing.*;
|
[
"io.swagger.annotations",
"java.util",
"javax.annotation",
"javax.ws",
"org.eclipse.smarthome"
] |
io.swagger.annotations; java.util; javax.annotation; javax.ws; org.eclipse.smarthome;
| 2,027,718
|
public Collection<Router> changeUpdateJsonToSub(JsonNode subnode,
String routerId)
throws Exception {
checkNotNull(subnode, JSON_NOT_NULL);
checkNotNull(routerId, "routerId should not be null");
Map<RouterId, Router> subMap = new HashMap<RouterId, Router>();
JsonNode routerNode = subnode.get("router");
RouterId id = RouterId.valueOf(routerId);
Router sub = nullIsNotFound(get(RouterService.class).getRouter(id),
NOT_EXIST);
TenantId tenantId = sub.tenantId();
VirtualPortId gwPortId = null;
if (routerNode.hasNonNull("gw_port_id")) {
gwPortId = VirtualPortId
.portId(routerNode.get("gw_port_id").asText());
}
Status status = sub.status();
String routerName = routerNode.get("name").asText();
checkArgument(routerNode.get("admin_state_up").isBoolean(),
"admin_state_up should be boolean");
boolean adminStateUp = routerNode.get("admin_state_up").asBoolean();
boolean distributed = sub.distributed();
if (routerNode.hasNonNull("distributed")) {
distributed = routerNode.get("distributed").asBoolean();
}
RouterGateway gateway = sub.externalGatewayInfo();
if (routerNode.hasNonNull("external_gateway_info")) {
gateway = jsonNodeToGateway(routerNode
.get("external_gateway_info"));
}
List<String> routes = new ArrayList<String>();
DefaultRouter routerObj = new DefaultRouter(id, routerName,
adminStateUp, status,
distributed, gateway,
gwPortId, tenantId, routes);
subMap.put(id, routerObj);
return Collections.unmodifiableCollection(subMap.values());
}
|
Collection<Router> function(JsonNode subnode, String routerId) throws Exception { checkNotNull(subnode, JSON_NOT_NULL); checkNotNull(routerId, STR); Map<RouterId, Router> subMap = new HashMap<RouterId, Router>(); JsonNode routerNode = subnode.get(STR); RouterId id = RouterId.valueOf(routerId); Router sub = nullIsNotFound(get(RouterService.class).getRouter(id), NOT_EXIST); TenantId tenantId = sub.tenantId(); VirtualPortId gwPortId = null; if (routerNode.hasNonNull(STR)) { gwPortId = VirtualPortId .portId(routerNode.get(STR).asText()); } Status status = sub.status(); String routerName = routerNode.get("name").asText(); checkArgument(routerNode.get(STR).isBoolean(), STR); boolean adminStateUp = routerNode.get(STR).asBoolean(); boolean distributed = sub.distributed(); if (routerNode.hasNonNull(STR)) { distributed = routerNode.get(STR).asBoolean(); } RouterGateway gateway = sub.externalGatewayInfo(); if (routerNode.hasNonNull(STR)) { gateway = jsonNodeToGateway(routerNode .get(STR)); } List<String> routes = new ArrayList<String>(); DefaultRouter routerObj = new DefaultRouter(id, routerName, adminStateUp, status, distributed, gateway, gwPortId, tenantId, routes); subMap.put(id, routerObj); return Collections.unmodifiableCollection(subMap.values()); }
|
/**
* Returns a collection of floatingIps from floatingIpNodes.
*
* @param subnode the router json node
* @param routerId the router identify
* @return routers a collection of router
* @throws Exception when any argument is illegal
*/
|
Returns a collection of floatingIps from floatingIpNodes
|
changeUpdateJsonToSub
|
{
"repo_name": "donNewtonAlpha/onos",
"path": "apps/vtn/vtnweb/src/main/java/org/onosproject/vtnweb/resources/RouterWebResource.java",
"license": "apache-2.0",
"size": 21810
}
|
[
"com.fasterxml.jackson.databind.JsonNode",
"com.google.common.base.Preconditions",
"java.util.ArrayList",
"java.util.Collection",
"java.util.Collections",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.onosproject.vtnrsc.DefaultRouter",
"org.onosproject.vtnrsc.Router",
"org.onosproject.vtnrsc.RouterGateway",
"org.onosproject.vtnrsc.RouterId",
"org.onosproject.vtnrsc.TenantId",
"org.onosproject.vtnrsc.VirtualPortId",
"org.onosproject.vtnrsc.router.RouterService"
] |
import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.onosproject.vtnrsc.DefaultRouter; import org.onosproject.vtnrsc.Router; import org.onosproject.vtnrsc.RouterGateway; import org.onosproject.vtnrsc.RouterId; import org.onosproject.vtnrsc.TenantId; import org.onosproject.vtnrsc.VirtualPortId; import org.onosproject.vtnrsc.router.RouterService;
|
import com.fasterxml.jackson.databind.*; import com.google.common.base.*; import java.util.*; import org.onosproject.vtnrsc.*; import org.onosproject.vtnrsc.router.*;
|
[
"com.fasterxml.jackson",
"com.google.common",
"java.util",
"org.onosproject.vtnrsc"
] |
com.fasterxml.jackson; com.google.common; java.util; org.onosproject.vtnrsc;
| 462,608
|
public void zoomDomainAxes(double lowerPercent, double upperPercent,
PlotRenderingInfo info, Point2D source) {
this.domainAxis.zoomRange(lowerPercent, upperPercent);
}
|
void function(double lowerPercent, double upperPercent, PlotRenderingInfo info, Point2D source) { this.domainAxis.zoomRange(lowerPercent, upperPercent); }
|
/**
* Zooms in on the domain axes.
*
* @param lowerPercent the new lower bound as a percentage of the current
* range.
* @param upperPercent the new upper bound as a percentage of the current
* range.
* @param info the plot rendering info.
* @param source the source point.
*/
|
Zooms in on the domain axes
|
zoomDomainAxes
|
{
"repo_name": "Mr-Steve/LTSpice_Library_Manager",
"path": "libs/jfreechart-1.0.16/source/org/jfree/chart/plot/FastScatterPlot.java",
"license": "gpl-2.0",
"size": 37300
}
|
[
"java.awt.geom.Point2D"
] |
import java.awt.geom.Point2D;
|
import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 1,945,786
|
private BindingAction getBindingAction(Item item, String action) {
if (action == null) {
return null;
}
BindingAction bindingAction = BindingAction.parse(action);
if (bindingAction == null) {
logger.warn("Can't parse action {}, only {} and {} allowed. Ignoring action parameter in item {}!", action,
BindingAction.RELOAD_VARIABLES, BindingAction.RELOAD_DATAPOINTS, item.getName());
} else if (!acceptsOnOffType(item)) {
logger.warn(
"Actions can only be attached to items which accepts OnOffType commands, ignoring action attribute in item {}!",
action, item.getName());
bindingAction = null;
}
return bindingAction;
}
|
BindingAction function(Item item, String action) { if (action == null) { return null; } BindingAction bindingAction = BindingAction.parse(action); if (bindingAction == null) { logger.warn(STR, action, BindingAction.RELOAD_VARIABLES, BindingAction.RELOAD_DATAPOINTS, item.getName()); } else if (!acceptsOnOffType(item)) { logger.warn( STR, action, item.getName()); bindingAction = null; } return bindingAction; }
|
/**
* Parses the BindingAction if available.
*/
|
Parses the BindingAction if available
|
getBindingAction
|
{
"repo_name": "mattnl/openhab",
"path": "bundles/binding/org.openhab.binding.homematic/src/main/java/org/openhab/binding/homematic/internal/bus/BindingConfigParser.java",
"license": "epl-1.0",
"size": 6598
}
|
[
"org.openhab.binding.homematic.internal.config.BindingAction",
"org.openhab.core.items.Item"
] |
import org.openhab.binding.homematic.internal.config.BindingAction; import org.openhab.core.items.Item;
|
import org.openhab.binding.homematic.internal.config.*; import org.openhab.core.items.*;
|
[
"org.openhab.binding",
"org.openhab.core"
] |
org.openhab.binding; org.openhab.core;
| 1,027,131
|
@Exported(name="builtOn")
public String getBuiltOnStr() {
return builtOn;
}
|
@Exported(name=STR) String function() { return builtOn; }
|
/**
* Returns the name of the slave it was built on; null or "" if built by the master.
* (null happens when we read old record that didn't have this information.)
*/
|
Returns the name of the slave it was built on; null or "" if built by the master. (null happens when we read old record that didn't have this information.)
|
getBuiltOnStr
|
{
"repo_name": "jtnord/jenkins",
"path": "core/src/main/java/hudson/model/AbstractBuild.java",
"license": "mit",
"size": 48784
}
|
[
"org.kohsuke.stapler.export.Exported"
] |
import org.kohsuke.stapler.export.Exported;
|
import org.kohsuke.stapler.export.*;
|
[
"org.kohsuke.stapler"
] |
org.kohsuke.stapler;
| 170,017
|
@NotNull
Set<String> getHostnames();
|
Set<String> getHostnames();
|
/**
* Entries look like "example.com" or "foo.example.com" in lower case, without trailing dot.
* Empty if there is no known host name with reverse dns. For example DuckDuckGo currently doesn't provide this.
*/
|
Entries look like "example.com" or "foo.example.com" in lower case, without trailing dot. Empty if there is no known host name with reverse dns. For example DuckDuckGo currently doesn't provide this
|
getHostnames
|
{
"repo_name": "optimaize/webcrawler-verifier",
"path": "src/main/java/com/optimaize/webcrawlerverifier/bots/CrawlerData.java",
"license": "mit",
"size": 1596
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,140,772
|
public JsonWriter value(Number value) throws IOException {
if (value == null) {
return nullValue();
}
writeDeferredName();
String string = value.toString();
if (!lenient
&& (string.equals("-Infinity") || string.equals("Infinity") || string.equals("NaN"))) {
throw new IllegalArgumentException("Numeric values must be finite, but was " + value);
}
beforeValue(false);
sink.writeUtf8(string);
return this;
}
/**
* Ensures all buffered data is written to the underlying {@link Sink}
|
JsonWriter function(Number value) throws IOException { if (value == null) { return nullValue(); } writeDeferredName(); String string = value.toString(); if (!lenient && (string.equals(STR) string.equals(STR) string.equals("NaN"))) { throw new IllegalArgumentException(STR + value); } beforeValue(false); sink.writeUtf8(string); return this; } /** * Ensures all buffered data is written to the underlying {@link Sink}
|
/**
* Encodes {@code value}.
*
* @param value a finite value. May not be {@link Double#isNaN() NaNs} or
* {@link Double#isInfinite() infinities}.
* @return this writer.
*/
|
Encodes value
|
value
|
{
"repo_name": "maarcooliveira/498dm",
"path": "moshi/src/main/java/com/squareup/moshi/JsonWriter.java",
"license": "apache-2.0",
"size": 17402
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,898,777
|
public CmsWorkOrderSimple getWorkOrder(DelegateExecution exec, CmsWorkOrderSimple dpmtRec) {
Integer execOrder = (Integer) exec.getVariable(CmsConstants.EXEC_ORDER);
CmsDeployment dpmt = (CmsDeployment) exec.getVariable(DPMT);
logger.info("Geting work order pmtRec = " + dpmtRec.getDpmtRecordId() + " for dpmt id = " + dpmtRec.getDeploymentId() + " rfcId = " + dpmtRec.getRfcId() + " step #" + execOrder);
long startTime = System.currentTimeMillis();
try {
//CmsWorkOrderSimple wo = retryTemplate.execute(retryContext -> restTemplate.getForObject(serviceUrl + "dj/simple/deployments/{deploymentId}/workorders/{dpmtRecId}?execorder={execOrder}", CmsWorkOrderSimple.class, dpmtRec.getDeploymentId(), dpmtRec.getDpmtRecordId(), execOrder));
CmsWorkOrderSimple wo = cmsWoProvider.getWorkOrderSimple(dpmtRec.getDpmtRecordId(), null, execOrder);
final long woCreationtime = System.currentTimeMillis() - startTime;
wo.getSearchTags().put("woCrtTime",String.valueOf(woCreationtime));
logger.info("Time taked to get wo - " + woCreationtime + "ms; pmtRec = " + dpmtRec.getDpmtRecordId() + " for dpmt id = " + dpmtRec.getDeploymentId() + " rfcId = " + dpmtRec.getRfcId() + " step #" + execOrder);
if (wo != null) {
decryptWo(wo);
CmsWorkOrderSimple strippedWo = controllerUtil.stripWO(wo);
setOrCreateLocalVar(exec, "wo", strippedWo);
setOrCreateLocalVar(exec, WorkflowController.WO_STATE, WorkflowController.WO_RECEIVED);
logger.info("Set WO as activiti local var; pmtRec = " + dpmtRec.getDpmtRecordId() + " for dpmt id = " + dpmtRec.getDeploymentId() + " rfcId = " + dpmtRec.getRfcId() + " step #" + execOrder);
return wo;
} else {
String descr = dpmt.getDescription();
if (descr == null) {
descr = "";
}
descr += "\n Can not get workorder for rfc : " + dpmtRec.getRfcId() + "; execOrder : " + execOrder + ";\n ";
logger.error(descr);
handleGetWoError(exec, dpmt, dpmtRec, descr);
}
} catch (CmsBaseException e) {
logger.error("RestClientException rfc : " + dpmtRec.getRfcId() + "; execOrder : " + execOrder, e);
logger.error(e.getMessage());
String descr = dpmt.getDescription();
if (descr == null) {
descr = "";
}
descr += "\n Can not get workorder for rfc : " + dpmtRec.getRfcId() + "; execOrder : " + execOrder + ";\n " + e.getMessage();
handleGetWoError(exec, dpmt, dpmtRec, descr);
} catch (GeneralSecurityException e) {
logger.error("Failed to decrypt workorder for rfc : " + dpmtRec.getRfcId() + "; execOrder : " + execOrder, e);
String descr = dpmt.getDescription();
if (descr == null) {
descr = "";
}
descr += "\n Can not decrypt workorder for rfc : " + dpmtRec.getRfcId() + "; execOrder : " + execOrder + ";";
handleGetWoError(exec, dpmt, dpmtRec, descr);
}
return null;
}
|
CmsWorkOrderSimple function(DelegateExecution exec, CmsWorkOrderSimple dpmtRec) { Integer execOrder = (Integer) exec.getVariable(CmsConstants.EXEC_ORDER); CmsDeployment dpmt = (CmsDeployment) exec.getVariable(DPMT); logger.info(STR + dpmtRec.getDpmtRecordId() + STR + dpmtRec.getDeploymentId() + STR + dpmtRec.getRfcId() + STR + execOrder); long startTime = System.currentTimeMillis(); try { CmsWorkOrderSimple wo = cmsWoProvider.getWorkOrderSimple(dpmtRec.getDpmtRecordId(), null, execOrder); final long woCreationtime = System.currentTimeMillis() - startTime; wo.getSearchTags().put(STR,String.valueOf(woCreationtime)); logger.info(STR + woCreationtime + STR + dpmtRec.getDpmtRecordId() + STR + dpmtRec.getDeploymentId() + STR + dpmtRec.getRfcId() + STR + execOrder); if (wo != null) { decryptWo(wo); CmsWorkOrderSimple strippedWo = controllerUtil.stripWO(wo); setOrCreateLocalVar(exec, "wo", strippedWo); setOrCreateLocalVar(exec, WorkflowController.WO_STATE, WorkflowController.WO_RECEIVED); logger.info(STR + dpmtRec.getDpmtRecordId() + STR + dpmtRec.getDeploymentId() + STR + dpmtRec.getRfcId() + STR + execOrder); return wo; } else { String descr = dpmt.getDescription(); if (descr == null) { descr = STR\n Can not get workorder for rfc : STR; execOrder : STR;\n STRRestClientException rfc : STR; execOrder : " + execOrder, e); logger.error(e.getMessage()); String descr = dpmt.getDescription(); if (descr == null) { descr = STR\n Can not get workorder for rfc : STR; execOrder : STR;\n STRFailed to decrypt workorder for rfc : STR; execOrder : " + execOrder, e); String descr = dpmt.getDescription(); if (descr == null) { descr = STR\n Can not decrypt workorder for rfc : STR; execOrder : STR;"; handleGetWoError(exec, dpmt, dpmtRec, descr); } return null; }
|
/**
* Gets the work order.
*
* @param exec the exec
* @return the work orders
* @throws GeneralSecurityException the general security exception
*/
|
Gets the work order
|
getWorkOrder
|
{
"repo_name": "lkhusid/oneops",
"path": "controller/src/main/java/com/oneops/controller/cms/CMSClient.java",
"license": "apache-2.0",
"size": 44045
}
|
[
"com.oneops.cms.dj.domain.CmsDeployment",
"com.oneops.cms.simple.domain.CmsWorkOrderSimple",
"com.oneops.cms.util.CmsConstants",
"com.oneops.controller.workflow.WorkflowController",
"org.activiti.engine.delegate.DelegateExecution"
] |
import com.oneops.cms.dj.domain.CmsDeployment; import com.oneops.cms.simple.domain.CmsWorkOrderSimple; import com.oneops.cms.util.CmsConstants; import com.oneops.controller.workflow.WorkflowController; import org.activiti.engine.delegate.DelegateExecution;
|
import com.oneops.cms.dj.domain.*; import com.oneops.cms.simple.domain.*; import com.oneops.cms.util.*; import com.oneops.controller.workflow.*; import org.activiti.engine.delegate.*;
|
[
"com.oneops.cms",
"com.oneops.controller",
"org.activiti.engine"
] |
com.oneops.cms; com.oneops.controller; org.activiti.engine;
| 1,413,482
|
public Environment getEnvironmentIfOpen() {
if (environmentHome == null) {
return null;
}
return DbInternal.getEnvironmentShell(environmentHome);
}
|
Environment function() { if (environmentHome == null) { return null; } return DbInternal.getEnvironmentShell(environmentHome); }
|
/**
* Return an Environment only if the environment has already been opened
* in this process. A helper method for MBeans which want to only access
* open environments.
*
* @return Environment if already open, null if not open.
*/
|
Return an Environment only if the environment has already been opened in this process. A helper method for MBeans which want to only access open environments
|
getEnvironmentIfOpen
|
{
"repo_name": "prat0318/dbms",
"path": "mini_dbms/je-5.0.103/src/com/sleepycat/je/jmx/JEMBeanHelper.java",
"license": "mit",
"size": 31637
}
|
[
"com.sleepycat.je.DbInternal",
"com.sleepycat.je.Environment"
] |
import com.sleepycat.je.DbInternal; import com.sleepycat.je.Environment;
|
import com.sleepycat.je.*;
|
[
"com.sleepycat.je"
] |
com.sleepycat.je;
| 885,166
|
private static boolean validate(Response cached, Response network) {
if (network.code() == HTTP_NOT_MODIFIED) return true;
// The HTTP spec says that if the network's response is older than our
// cached response, we may return the cache's response. Like Chrome (but
// unlike Firefox), this client prefers to return the newer response.
Date lastModified = cached.headers().getDate("Last-Modified");
if (lastModified != null) {
Date networkLastModified = network.headers().getDate("Last-Modified");
if (networkLastModified != null
&& networkLastModified.getTime() < lastModified.getTime()) {
return true;
}
}
return false;
}
|
static boolean function(Response cached, Response network) { if (network.code() == HTTP_NOT_MODIFIED) return true; Date lastModified = cached.headers().getDate(STR); if (lastModified != null) { Date networkLastModified = network.headers().getDate(STR); if (networkLastModified != null && networkLastModified.getTime() < lastModified.getTime()) { return true; } } return false; }
|
/**
* Returns true if {@code cached} should be used; false if {@code network} response should be
* used.
*/
|
Returns true if cached should be used; false if network response should be used
|
validate
|
{
"repo_name": "yakatak/okhttp",
"path": "okhttp/src/main/java/okhttp3/internal/cache/CacheInterceptor.java",
"license": "apache-2.0",
"size": 10605
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,728,440
|
public static <T extends TNumber> Operand<T> mean(
Ops tf, Operand<T> x, Operand<? extends TIntegral> axes, boolean keepDims) {
if (axes == null) {
axes = allAxes(tf, x);
}
return tf.math.mean(x, axes, Mean.keepDims(keepDims));
}
|
static <T extends TNumber> Operand<T> function( Ops tf, Operand<T> x, Operand<? extends TIntegral> axes, boolean keepDims) { if (axes == null) { axes = allAxes(tf, x); } return tf.math.mean(x, axes, Mean.keepDims(keepDims)); }
|
/**
* Calculates the mean of the operand, alongside the specified axes.
*
* @param tf the TensorFlow Ops
* @param x the Operand used to calculate the mean
* @param axes Axes to compute the mean.
* @param keepDims Indicates whether to keep the dimensions or not. If {@code keepdims} is {@code
* false}, the rank of the tensor is reduced by 1 for each entry in {@code axes }. If {@code
* keepdims} is {@code true}, the reduced dimensions are retained with length 1.
* @param <T> the data type of the Operand
* @return the mean of elements of {@code x}.
*/
|
Calculates the mean of the operand, alongside the specified axes
|
mean
|
{
"repo_name": "tensorflow/java",
"path": "tensorflow-framework/src/main/java/org/tensorflow/framework/metrics/impl/MetricsHelper.java",
"license": "apache-2.0",
"size": 41042
}
|
[
"org.tensorflow.Operand",
"org.tensorflow.framework.losses.impl.LossesHelper",
"org.tensorflow.op.Ops",
"org.tensorflow.op.math.Mean",
"org.tensorflow.types.family.TIntegral",
"org.tensorflow.types.family.TNumber"
] |
import org.tensorflow.Operand; import org.tensorflow.framework.losses.impl.LossesHelper; import org.tensorflow.op.Ops; import org.tensorflow.op.math.Mean; import org.tensorflow.types.family.TIntegral; import org.tensorflow.types.family.TNumber;
|
import org.tensorflow.*; import org.tensorflow.framework.losses.impl.*; import org.tensorflow.op.*; import org.tensorflow.op.math.*; import org.tensorflow.types.family.*;
|
[
"org.tensorflow",
"org.tensorflow.framework",
"org.tensorflow.op",
"org.tensorflow.types"
] |
org.tensorflow; org.tensorflow.framework; org.tensorflow.op; org.tensorflow.types;
| 2,150,124
|
Object findCaller(RBaseNode call);
|
Object findCaller(RBaseNode call);
|
/**
* Used by error/warning handling to try to find the call that provoked the error/warning.
*
* If there is no caller, return {@link RNull#instance}, e.g. "call" of a builtin from the
* global env, otherwise return an {@code RPairList} instance that represents the call.
*
* @param call may be {@code null} or it may be the {@link Node} that was executing when the
* error.warning was generated (builtin or associated node).
*/
|
Used by error/warning handling to try to find the call that provoked the error/warning. If there is no caller, return <code>RNull#instance</code>, e.g. "call" of a builtin from the global env, otherwise return an RPairList instance that represents the call
|
findCaller
|
{
"repo_name": "graalvm/fastr",
"path": "com.oracle.truffle.r.runtime/src/com/oracle/truffle/r/runtime/RRuntimeASTAccess.java",
"license": "gpl-2.0",
"size": 7770
}
|
[
"com.oracle.truffle.r.runtime.nodes.RBaseNode"
] |
import com.oracle.truffle.r.runtime.nodes.RBaseNode;
|
import com.oracle.truffle.r.runtime.nodes.*;
|
[
"com.oracle.truffle"
] |
com.oracle.truffle;
| 1,658,998
|
public Observable<ServiceResponse<Page<USqlAssemblyClr>>> listAssembliesSinglePageAsync(final String accountName, final String databaseName, final String filter, final Integer top, final Integer skip, final String select, final String orderby, final Boolean count) {
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.adlaCatalogDnsSuffix() == null) {
throw new IllegalArgumentException("Parameter this.client.adlaCatalogDnsSuffix() is required and cannot be null.");
}
if (databaseName == null) {
throw new IllegalArgumentException("Parameter databaseName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
|
Observable<ServiceResponse<Page<USqlAssemblyClr>>> function(final String accountName, final String databaseName, final String filter, final Integer top, final Integer skip, final String select, final String orderby, final Boolean count) { if (accountName == null) { throw new IllegalArgumentException(STR); } if (this.client.adlaCatalogDnsSuffix() == null) { throw new IllegalArgumentException(STR); } if (databaseName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
|
/**
* Retrieves the list of assemblies from the Data Lake Analytics catalog.
*
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param accountName The Azure Data Lake Analytics account upon which to execute catalog operations.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param databaseName The name of the database containing the assembly.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param filter OData filter. Optional.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param top The number of items to return. Optional.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param skip The number of items to skip over before returning elements. Optional.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param select OData Select statement. Limits the properties on each entry to just those requested, e.g. Categories?$select=CategoryName,Description. Optional.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param orderby OrderBy clause. One or more comma-separated expressions with an optional "asc" (the default) or "desc" depending on the order you'd like the values sorted, e.g. Categories?$orderby=CategoryName desc. Optional.
ServiceResponse<PageImpl<USqlAssemblyClr>> * @param count The Boolean value of true or false to request a count of the matching resources included with the resources in the response, e.g. Categories?$count=true. Optional.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<USqlAssemblyClr> object wrapped in {@link ServiceResponse} if successful.
*/
|
Retrieves the list of assemblies from the Data Lake Analytics catalog
|
listAssembliesSinglePageAsync
|
{
"repo_name": "martinsawicki/azure-sdk-for-java",
"path": "azure-mgmt-datalake-analytics/src/main/java/com/microsoft/azure/management/datalake/analytics/implementation/CatalogsImpl.java",
"license": "mit",
"size": 687714
}
|
[
"com.microsoft.azure.Page",
"com.microsoft.azure.management.datalake.analytics.models.USqlAssemblyClr",
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.azure.Page; import com.microsoft.azure.management.datalake.analytics.models.USqlAssemblyClr; import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.azure.*; import com.microsoft.azure.management.datalake.analytics.models.*; import com.microsoft.rest.*;
|
[
"com.microsoft.azure",
"com.microsoft.rest"
] |
com.microsoft.azure; com.microsoft.rest;
| 396,734
|
public List<AvailabilityZone> describeAvailabilityZones(List<String> zones) throws EC2Exception {
Map<String, String> params = new HashMap<String, String>();
if (zones != null && zones.size() > 0) {
for (int i=0 ; i<zones.size(); i++) {
params.put("ZoneName."+(i+1), zones.get(i));
}
}
HttpGet method = new HttpGet();
DescribeAvailabilityZonesResponse response =
makeRequestInt(method, "DescribeAvailabilityZones", params, DescribeAvailabilityZonesResponse.class);
List<AvailabilityZone> ret = new ArrayList<AvailabilityZone>();
AvailabilityZoneSetType set = response.getAvailabilityZoneInfo();
Iterator set_iter = set.getItems().iterator();
while (set_iter.hasNext()) {
AvailabilityZoneItemType item = (AvailabilityZoneItemType) set_iter.next();
List<String> messages = new ArrayList<String>();
for (AvailabilityZoneMessageType msg : item.getMessageSet().getItems()) {
messages.add(msg.getMessage());
}
ret.add(new AvailabilityZone(item.getZoneName(), item.getZoneState(), item.getRegionName(), messages));
}
return ret;
}
|
List<AvailabilityZone> function(List<String> zones) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (zones != null && zones.size() > 0) { for (int i=0 ; i<zones.size(); i++) { params.put(STR+(i+1), zones.get(i)); } } HttpGet method = new HttpGet(); DescribeAvailabilityZonesResponse response = makeRequestInt(method, STR, params, DescribeAvailabilityZonesResponse.class); List<AvailabilityZone> ret = new ArrayList<AvailabilityZone>(); AvailabilityZoneSetType set = response.getAvailabilityZoneInfo(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { AvailabilityZoneItemType item = (AvailabilityZoneItemType) set_iter.next(); List<String> messages = new ArrayList<String>(); for (AvailabilityZoneMessageType msg : item.getMessageSet().getItems()) { messages.add(msg.getMessage()); } ret.add(new AvailabilityZone(item.getZoneName(), item.getZoneState(), item.getRegionName(), messages)); } return ret; }
|
/**
* Returns a list of availability zones and their status.
*
* @param zones a list of zones to limit the results, or null
* @return a list of zones and their availability
* @throws EC2Exception wraps checked exceptions
*/
|
Returns a list of availability zones and their status
|
describeAvailabilityZones
|
{
"repo_name": "jonnyzzz/maragogype",
"path": "tags/v1.7.1/java/com/xerox/amazonws/ec2/Jec2.java",
"license": "apache-2.0",
"size": 97555
}
|
[
"com.xerox.amazonws.typica.jaxb.AvailabilityZoneItemType",
"com.xerox.amazonws.typica.jaxb.AvailabilityZoneMessageType",
"com.xerox.amazonws.typica.jaxb.AvailabilityZoneSetType",
"com.xerox.amazonws.typica.jaxb.DescribeAvailabilityZonesResponse",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.Iterator",
"java.util.List",
"java.util.Map",
"org.apache.http.client.methods.HttpGet"
] |
import com.xerox.amazonws.typica.jaxb.AvailabilityZoneItemType; import com.xerox.amazonws.typica.jaxb.AvailabilityZoneMessageType; import com.xerox.amazonws.typica.jaxb.AvailabilityZoneSetType; import com.xerox.amazonws.typica.jaxb.DescribeAvailabilityZonesResponse; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.http.client.methods.HttpGet;
|
import com.xerox.amazonws.typica.jaxb.*; import java.util.*; import org.apache.http.client.methods.*;
|
[
"com.xerox.amazonws",
"java.util",
"org.apache.http"
] |
com.xerox.amazonws; java.util; org.apache.http;
| 2,364,621
|
protected List<Publication> parseResponse(String xml) throws CabinetException {
assert xml != null;
List<Publication> result = new ArrayList<Publication>();
//hook for titles with &
xml= xml.replace("&", "&");
//log.debug("RESPONSE: "+xml);
//Create new document factory builder
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder;
try {
builder = factory.newDocumentBuilder();
} catch (ParserConfigurationException ex) {
throw new CabinetException("Error when creating newDocumentBuilder.", ex);
}
Document doc;
try {
doc = builder.parse(new InputSource(new StringReader(xml)));
} catch (SAXParseException ex) {
throw new CabinetException("Error when parsing uri by document builder.", ErrorCodes.MALFORMED_HTTP_RESPONSE, ex);
} catch (SAXException ex) {
throw new CabinetException("Problem with parsing is more complex, not only invalid characters.", ErrorCodes.MALFORMED_HTTP_RESPONSE, ex);
} catch (IOException ex) {
throw new CabinetException("Error when parsing uri by document builder. Problem with input or output.", ErrorCodes.MALFORMED_HTTP_RESPONSE, ex);
}
//Prepare xpath expression
XPathFactory xPathFactory = XPathFactory.newInstance();
XPath xpath = xPathFactory.newXPath();
XPathExpression publicationsQuery;
try {
publicationsQuery = xpath.compile("/responseWrapper/resultList/result");
} catch (XPathExpressionException ex) {
throw new CabinetException("Error when compiling xpath query.", ex);
}
NodeList nodeList;
try {
nodeList = (NodeList) publicationsQuery.evaluate(doc, XPathConstants.NODESET);
} catch (XPathExpressionException ex) {
throw new CabinetException("Error when evaluate xpath query on document.", ex);
}
//Test if there is any nodeset in result
if(nodeList.getLength() == 0) {
//There is no results, return empty subjects
return result;
}
//Iterate through nodes and convert them to Map<String,String>
for(int i=0; i<nodeList.getLength(); i++) {
Node singleNode = nodeList.item(i);
// remove node from original structure in order to keep access time constant (otherwise is exp.)
singleNode.getParentNode().removeChild(singleNode);
try {
Publication publication = convertNodeToPublication(singleNode, xPathFactory);
result.add(publication);
} catch (InternalErrorException ex) {
log.error("Unable to parse Publication:", ex);
}
}
return result;
}
|
List<Publication> function(String xml) throws CabinetException { assert xml != null; List<Publication> result = new ArrayList<Publication>(); xml= xml.replace("&", "&"); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder; try { builder = factory.newDocumentBuilder(); } catch (ParserConfigurationException ex) { throw new CabinetException(STR, ex); } Document doc; try { doc = builder.parse(new InputSource(new StringReader(xml))); } catch (SAXParseException ex) { throw new CabinetException(STR, ErrorCodes.MALFORMED_HTTP_RESPONSE, ex); } catch (SAXException ex) { throw new CabinetException(STR, ErrorCodes.MALFORMED_HTTP_RESPONSE, ex); } catch (IOException ex) { throw new CabinetException(STR, ErrorCodes.MALFORMED_HTTP_RESPONSE, ex); } XPathFactory xPathFactory = XPathFactory.newInstance(); XPath xpath = xPathFactory.newXPath(); XPathExpression publicationsQuery; try { publicationsQuery = xpath.compile(STR); } catch (XPathExpressionException ex) { throw new CabinetException(STR, ex); } NodeList nodeList; try { nodeList = (NodeList) publicationsQuery.evaluate(doc, XPathConstants.NODESET); } catch (XPathExpressionException ex) { throw new CabinetException(STR, ex); } if(nodeList.getLength() == 0) { return result; } for(int i=0; i<nodeList.getLength(); i++) { Node singleNode = nodeList.item(i); singleNode.getParentNode().removeChild(singleNode); try { Publication publication = convertNodeToPublication(singleNode, xPathFactory); result.add(publication); } catch (InternalErrorException ex) { log.error(STR, ex); } } return result; }
|
/**
* Parse String response as XML document and retrieve Publications from it.
* @param xml XML response from EuropePMC
* @return List of Publications
* @throws CabinetException If anything fails
*/
|
Parse String response as XML document and retrieve Publications from it
|
parseResponse
|
{
"repo_name": "licehammer/perun",
"path": "perun-cabinet/src/main/java/cz/metacentrum/perun/cabinet/strategy/impl/EuropePMCStrategy.java",
"license": "bsd-2-clause",
"size": 10509
}
|
[
"cz.metacentrum.perun.cabinet.bl.CabinetException",
"cz.metacentrum.perun.cabinet.bl.ErrorCodes",
"cz.metacentrum.perun.cabinet.model.Publication",
"cz.metacentrum.perun.core.api.exceptions.InternalErrorException",
"java.io.IOException",
"java.io.StringReader",
"java.util.ArrayList",
"java.util.List",
"javax.xml.parsers.DocumentBuilder",
"javax.xml.parsers.DocumentBuilderFactory",
"javax.xml.parsers.ParserConfigurationException",
"javax.xml.xpath.XPath",
"javax.xml.xpath.XPathConstants",
"javax.xml.xpath.XPathExpression",
"javax.xml.xpath.XPathExpressionException",
"javax.xml.xpath.XPathFactory",
"org.w3c.dom.Document",
"org.w3c.dom.Node",
"org.w3c.dom.NodeList",
"org.xml.sax.InputSource",
"org.xml.sax.SAXException",
"org.xml.sax.SAXParseException"
] |
import cz.metacentrum.perun.cabinet.bl.CabinetException; import cz.metacentrum.perun.cabinet.bl.ErrorCodes; import cz.metacentrum.perun.cabinet.model.Publication; import cz.metacentrum.perun.core.api.exceptions.InternalErrorException; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException;
|
import cz.metacentrum.perun.cabinet.bl.*; import cz.metacentrum.perun.cabinet.model.*; import cz.metacentrum.perun.core.api.exceptions.*; import java.io.*; import java.util.*; import javax.xml.parsers.*; import javax.xml.xpath.*; import org.w3c.dom.*; import org.xml.sax.*;
|
[
"cz.metacentrum.perun",
"java.io",
"java.util",
"javax.xml",
"org.w3c.dom",
"org.xml.sax"
] |
cz.metacentrum.perun; java.io; java.util; javax.xml; org.w3c.dom; org.xml.sax;
| 739,172
|
public Combo createCombo(Composite parent) {
return createCombo(parent, SWT.FLAT | SWT.READ_ONLY);
}
|
Combo function(Composite parent) { return createCombo(parent, SWT.FLAT SWT.READ_ONLY); }
|
/**
* Creates a combo box as a part of the form.
*
* @param parent
* the combo box parent.
* @return the combo box.
*/
|
Creates a combo box as a part of the form
|
createCombo
|
{
"repo_name": "OpenSoftwareSolutions/PDFReporter-Studio",
"path": "com.jaspersoft.studio.properties/src/com/jaspersoft/studio/properties/view/TabbedPropertySheetWidgetFactory.java",
"license": "lgpl-3.0",
"size": 13506
}
|
[
"org.eclipse.swt.widgets.Combo",
"org.eclipse.swt.widgets.Composite"
] |
import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite;
|
import org.eclipse.swt.widgets.*;
|
[
"org.eclipse.swt"
] |
org.eclipse.swt;
| 1,776,589
|
System.setProperty("tangosol.coherence.distributed.localstorage",
"true");
System.setProperty("tangosol.coherence.rwbm.requeue.delay", "10000");
DefaultCacheServer.main(args);
}
|
System.setProperty(STR, "true"); System.setProperty(STR, "10000"); DefaultCacheServer.main(args); }
|
/**
* Test main method to call
* {@link com.tangosol.net.DefaultCacheServer#main(String[])} in Coherence.
*
* @param args
* Passed to
* {@link com.tangosol.net.DefaultCacheServer#main(String[])}
*/
|
Test main method to call <code>com.tangosol.net.DefaultCacheServer#main(String[])</code> in Coherence
|
main
|
{
"repo_name": "simukappu/Coherence-tools",
"path": "write-behind-tools/src/test/java/test/com/simukappu/coherence/writequeue/server/CacheServer.java",
"license": "apache-2.0",
"size": 645
}
|
[
"com.tangosol.net.DefaultCacheServer"
] |
import com.tangosol.net.DefaultCacheServer;
|
import com.tangosol.net.*;
|
[
"com.tangosol.net"
] |
com.tangosol.net;
| 1,825,400
|
public PreparedStatement prepareAutoCloseStatement(final String sql) throws SQLException {
PreparedStatement stmt = prepareStatement(sql);
closePreviousStatement();
_stmt = stmt;
return stmt;
}
|
PreparedStatement function(final String sql) throws SQLException { PreparedStatement stmt = prepareStatement(sql); closePreviousStatement(); _stmt = stmt; return stmt; }
|
/**
* Prepares an auto close statement. The statement is closed automatically if it is
* retrieved with this method.
*
* @param sql sql String
* @return PreparedStatement
* @throws SQLException if problem with JDBC layer.
*
* @see java.sql.Connection
*/
|
Prepares an auto close statement. The statement is closed automatically if it is retrieved with this method
|
prepareAutoCloseStatement
|
{
"repo_name": "resmo/cloudstack",
"path": "framework/db/src/com/cloud/utils/db/TransactionLegacy.java",
"license": "apache-2.0",
"size": 48007
}
|
[
"java.sql.PreparedStatement",
"java.sql.SQLException"
] |
import java.sql.PreparedStatement; import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 117,477
|
public OneResponse instantiate(int nVMs, int templateId)
{
return instantiate(client, id, nVMs, templateId, "", false, "");
}
|
OneResponse function(int nVMs, int templateId) { return instantiate(client, id, nVMs, templateId, STR"); }
|
/**
* Creates VM instances from a VM Template. New VMs will be associated
* to this Virtual Router, and its Virtual Networks
*
* @param nVMs Number of VMs to instantiate
* @param templateId VM Template id to instantiate
* @return If an error occurs the error message contains the reason.
*/
|
Creates VM instances from a VM Template. New VMs will be associated to this Virtual Router, and its Virtual Networks
|
instantiate
|
{
"repo_name": "hsanjuan/one",
"path": "src/oca/java/src/org/opennebula/client/vrouter/VirtualRouter.java",
"license": "apache-2.0",
"size": 16570
}
|
[
"org.opennebula.client.OneResponse"
] |
import org.opennebula.client.OneResponse;
|
import org.opennebula.client.*;
|
[
"org.opennebula.client"
] |
org.opennebula.client;
| 1,062,478
|
Task phaseTask = info.getPhaseTask(phase);
if (phaseTask != null) {
if (vfsStatistics != null) {
vfsStatistics.addProfileInfo(workSpaceName, info);
}
wasExecuted = true;
long infoPhaseDuration = info.getPhaseDuration(phaseTask);
phaseDurationNanos += infoPhaseDuration;
List<Task> taskList = info.getTasksForPhase(phaseTask);
long duration = infoPhaseDuration;
for (Task task : taskList) {
// Tasks on the phaseTask thread already accounted for in the phaseDuration.
if (task.threadId != phaseTask.threadId) {
duration += task.durationNanos;
}
}
totalDurationNanos += duration;
for (ProfilerTask type : ProfilerTask.values()) {
AggregateAttr attr = info.getStatsForType(type, taskList);
long totalTime = Math.max(0, attr.totalTime);
long count = Math.max(0, attr.count);
add(taskCounts, type, count);
add(taskDurations, type, totalTime);
}
}
}
|
Task phaseTask = info.getPhaseTask(phase); if (phaseTask != null) { if (vfsStatistics != null) { vfsStatistics.addProfileInfo(workSpaceName, info); } wasExecuted = true; long infoPhaseDuration = info.getPhaseDuration(phaseTask); phaseDurationNanos += infoPhaseDuration; List<Task> taskList = info.getTasksForPhase(phaseTask); long duration = infoPhaseDuration; for (Task task : taskList) { if (task.threadId != phaseTask.threadId) { duration += task.durationNanos; } } totalDurationNanos += duration; for (ProfilerTask type : ProfilerTask.values()) { AggregateAttr attr = info.getStatsForType(type, taskList); long totalTime = Math.max(0, attr.totalTime); long count = Math.max(0, attr.count); add(taskCounts, type, count); add(taskDurations, type, totalTime); } } }
|
/**
* Add statistics from {@link ProfileInfo} to the ones already accumulated for this phase.
*/
|
Add statistics from <code>ProfileInfo</code> to the ones already accumulated for this phase
|
addProfileInfo
|
{
"repo_name": "dropbox/bazel",
"path": "src/main/java/com/google/devtools/build/lib/profiler/statistics/PhaseStatistics.java",
"license": "apache-2.0",
"size": 7222
}
|
[
"com.google.devtools.build.lib.profiler.ProfilerTask",
"com.google.devtools.build.lib.profiler.analysis.ProfileInfo",
"java.util.List"
] |
import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.profiler.analysis.ProfileInfo; import java.util.List;
|
import com.google.devtools.build.lib.profiler.*; import com.google.devtools.build.lib.profiler.analysis.*; import java.util.*;
|
[
"com.google.devtools",
"java.util"
] |
com.google.devtools; java.util;
| 519,590
|
void resetTX(Transaction transaction);
|
void resetTX(Transaction transaction);
|
/**
* To be used by protocol heads that needs to control the transaction outside the session context.
*/
|
To be used by protocol heads that needs to control the transaction outside the session context
|
resetTX
|
{
"repo_name": "lburgazzoli/apache-activemq-artemis",
"path": "artemis-server/src/main/java/org/apache/activemq/artemis/core/server/ServerSession.java",
"license": "apache-2.0",
"size": 6304
}
|
[
"org.apache.activemq.artemis.core.transaction.Transaction"
] |
import org.apache.activemq.artemis.core.transaction.Transaction;
|
import org.apache.activemq.artemis.core.transaction.*;
|
[
"org.apache.activemq"
] |
org.apache.activemq;
| 2,876,892
|
public static String format(Date date, String pattern) {
return new java.text.SimpleDateFormat(pattern).format(date);
}
|
static String function(Date date, String pattern) { return new java.text.SimpleDateFormat(pattern).format(date); }
|
/**
* format the date
*/
|
format the date
|
format
|
{
"repo_name": "xmx111/ssh4",
"path": "src/main/java/lab/s2jh/core/util/ExtStringUtils.java",
"license": "lgpl-3.0",
"size": 27964
}
|
[
"java.text.SimpleDateFormat",
"java.util.Date"
] |
import java.text.SimpleDateFormat; import java.util.Date;
|
import java.text.*; import java.util.*;
|
[
"java.text",
"java.util"
] |
java.text; java.util;
| 791,910
|
public void insertEntity(BillBreakDown billBreakDown) {
billBreakDownDAO.insert(billBreakDown);
}
|
void function(BillBreakDown billBreakDown) { billBreakDownDAO.insert(billBreakDown); }
|
/**
* Insert billBreakDown.
*/
|
Insert billBreakDown
|
insertEntity
|
{
"repo_name": "terrex/tntconcept-materials-testing",
"path": "src/main/java/com/autentia/intra/manager/billing/BillBreakDownManager.java",
"license": "gpl-2.0",
"size": 3666
}
|
[
"com.autentia.intra.businessobject.BillBreakDown"
] |
import com.autentia.intra.businessobject.BillBreakDown;
|
import com.autentia.intra.businessobject.*;
|
[
"com.autentia.intra"
] |
com.autentia.intra;
| 835,725
|
public boolean determineIfRedeliveryIsEnabled() throws Exception {
// determine if redeliver is enabled either on error handler
if (getRedeliveryPolicy().getMaximumRedeliveries() != 0) {
// must check for != 0 as (-1 means redeliver forever)
return true;
}
if (retryWhilePolicy != null) {
return true;
}
// or on the exception policies
if (!exceptionPolicies.isEmpty()) {
// walk them to see if any of them have a maximum redeliveries > 0 or retry until set
for (OnExceptionDefinition def : exceptionPolicies.values()) {
String ref = def.getRedeliveryPolicyRef();
if (ref != null) {
// lookup in registry if ref provided
RedeliveryPolicy policy = CamelContextHelper.mandatoryLookup(camelContext, ref, RedeliveryPolicy.class);
if (policy.getMaximumRedeliveries() != 0) {
// must check for != 0 as (-1 means redeliver forever)
return true;
}
} else if (def.getRedeliveryPolicyType() != null) {
Integer max = CamelContextHelper.parseInteger(camelContext, def.getRedeliveryPolicyType().getMaximumRedeliveries());
if (max != null && max != 0) {
// must check for != 0 as (-1 means redeliver forever)
return true;
}
}
if (def.getRetryWhilePolicy() != null || def.getRetryWhile() != null) {
return true;
}
}
}
return false;
}
|
boolean function() throws Exception { if (getRedeliveryPolicy().getMaximumRedeliveries() != 0) { return true; } if (retryWhilePolicy != null) { return true; } if (!exceptionPolicies.isEmpty()) { for (OnExceptionDefinition def : exceptionPolicies.values()) { String ref = def.getRedeliveryPolicyRef(); if (ref != null) { RedeliveryPolicy policy = CamelContextHelper.mandatoryLookup(camelContext, ref, RedeliveryPolicy.class); if (policy.getMaximumRedeliveries() != 0) { return true; } } else if (def.getRedeliveryPolicyType() != null) { Integer max = CamelContextHelper.parseInteger(camelContext, def.getRedeliveryPolicyType().getMaximumRedeliveries()); if (max != null && max != 0) { return true; } } if (def.getRetryWhilePolicy() != null def.getRetryWhile() != null) { return true; } } } return false; }
|
/**
* Determines if redelivery is enabled by checking if any of the redelivery policy
* settings may allow redeliveries.
*
* @return <tt>true</tt> if redelivery is possible, <tt>false</tt> otherwise
* @throws Exception can be thrown
*/
|
Determines if redelivery is enabled by checking if any of the redelivery policy settings may allow redeliveries
|
determineIfRedeliveryIsEnabled
|
{
"repo_name": "punkhorn/camel-upstream",
"path": "core/camel-core/src/main/java/org/apache/camel/processor/RedeliveryErrorHandler.java",
"license": "apache-2.0",
"size": 62432
}
|
[
"org.apache.camel.model.OnExceptionDefinition",
"org.apache.camel.support.CamelContextHelper"
] |
import org.apache.camel.model.OnExceptionDefinition; import org.apache.camel.support.CamelContextHelper;
|
import org.apache.camel.model.*; import org.apache.camel.support.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 2,055,525
|
Map<String, String> tags();
|
Map<String, String> tags();
|
/**
* Gets the tags property: Resource tags.
*
* @return the tags value.
*/
|
Gets the tags property: Resource tags
|
tags
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/mobilenetwork/azure-resourcemanager-mobilenetwork/src/main/java/com/azure/resourcemanager/mobilenetwork/models/Slice.java",
"license": "mit",
"size": 7567
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,866,074
|
public void resetHand() {
for (Card card : currentPlayer.getHand()) {
card.hasBeenUsed(false);
}
}
|
void function() { for (Card card : currentPlayer.getHand()) { card.hasBeenUsed(false); } }
|
/**
* Reset the has_been_used state of the cards in hand
*/
|
Reset the has_been_used state of the cards in hand
|
resetHand
|
{
"repo_name": "oyachai/HearthSim",
"path": "src/main/java/com/hearthsim/model/BoardModel.java",
"license": "mit",
"size": 25670
}
|
[
"com.hearthsim.card.Card"
] |
import com.hearthsim.card.Card;
|
import com.hearthsim.card.*;
|
[
"com.hearthsim.card"
] |
com.hearthsim.card;
| 2,777,192
|
public static boolean putInt(Context context, String key, int value) {
SharedPreferences settings = context.getSharedPreferences(PREFERENCE_NAME, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = settings.edit();
editor.putInt(key, value);
return editor.commit();
}
|
static boolean function(Context context, String key, int value) { SharedPreferences settings = context.getSharedPreferences(PREFERENCE_NAME, Context.MODE_PRIVATE); SharedPreferences.Editor editor = settings.edit(); editor.putInt(key, value); return editor.commit(); }
|
/**
* put int preferences
*
* @param context
* @param key The name of the preference to modify
* @param value The new value for the preference
* @return True if the new values were successfully written to persistent storage.
*/
|
put int preferences
|
putInt
|
{
"repo_name": "weiwenqiang/GitHub",
"path": "ShoppingCart/PVCloudGroupn-master/app/src/main/java/pv/com/pvcloudgo/utils/PreferencesUtils.java",
"license": "apache-2.0",
"size": 9646
}
|
[
"android.content.Context",
"android.content.SharedPreferences"
] |
import android.content.Context; import android.content.SharedPreferences;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 709,253
|
public Observable<ServiceResponse<AssetInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String accountName, String assetName, AssetInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (assetName == null) {
throw new IllegalArgumentException("Parameter assetName is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
|
Observable<ServiceResponse<AssetInner>> function(String resourceGroupName, String accountName, String assetName, AssetInner parameters) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (accountName == null) { throw new IllegalArgumentException(STR); } if (assetName == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
|
/**
* Create or update an Asset.
* Creates or updates an Asset in the Media Services account.
*
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param assetName The Asset name.
* @param parameters The request parameters
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the AssetInner object
*/
|
Create or update an Asset. Creates or updates an Asset in the Media Services account
|
createOrUpdateWithServiceResponseAsync
|
{
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/mediaservices/mgmt-v2018_06_01_preview/src/main/java/com/microsoft/azure/management/mediaservices/v2018_06_01_preview/implementation/AssetsInner.java",
"license": "mit",
"size": 64209
}
|
[
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 1,685,309
|
@Override
public Response apisCopyApiPost(String newVersion, String apiId){
URI newVersionedApiUri;
APIDTO newVersionedApi;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
API api = APIMappingUtil.getAPIFromApiIdOrUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
//creates the new version
apiProvider.createNewAPIVersion(api, newVersion);
//get newly created API to return as response
APIIdentifier apiNewVersionedIdentifier =
new APIIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(), newVersion);
newVersionedApi = APIMappingUtil.fromAPItoDTO(apiProvider.getAPI(apiNewVersionedIdentifier));
//This URI used to set the location header of the POST response
newVersionedApiUri =
new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + newVersionedApi.getId());
return Response.created(newVersionedApiUri).entity(newVersionedApi).build();
} catch (APIManagementException | DuplicateAPIException e) {
if (RestApiUtil.isDueToResourceAlreadyExists(e)) {
String errorMessage = "Requested new version " + newVersion + " of API " + apiId + " already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while copying API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location of " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
|
Response function(String newVersion, String apiId){ URI newVersionedApiUri; APIDTO newVersionedApi; try { APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider(); String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain(); API api = APIMappingUtil.getAPIFromApiIdOrUUID(apiId, tenantDomain); APIIdentifier apiIdentifier = api.getId(); apiProvider.createNewAPIVersion(api, newVersion); APIIdentifier apiNewVersionedIdentifier = new APIIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(), newVersion); newVersionedApi = APIMappingUtil.fromAPItoDTO(apiProvider.getAPI(apiNewVersionedIdentifier)); newVersionedApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + newVersionedApi.getId()); return Response.created(newVersionedApiUri).entity(newVersionedApi).build(); } catch (APIManagementException DuplicateAPIException e) { if (RestApiUtil.isDueToResourceAlreadyExists(e)) { String errorMessage = STR + newVersion + STR + apiId + STR; RestApiUtil.handleResourceAlreadyExistsError(errorMessage, e, log); } else if (RestApiUtil.isDueToResourceNotFound(e) RestApiUtil.isDueToAuthorizationFailure(e)) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log); } else { String errorMessage = STR + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } } catch (URISyntaxException e) { String errorMessage = STR + apiId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; }
|
/**
* Copy API and create a new version of the API
*
* @param apiId API Identifier
* @param newVersion new version of the API to be created
* @return API new version
*/
|
Copy API and create a new version of the API
|
apisCopyApiPost
|
{
"repo_name": "dhanuka84/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/impl/ApisApiServiceImpl.java",
"license": "apache-2.0",
"size": 50401
}
|
[
"java.net.URISyntaxException",
"javax.ws.rs.core.Response",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.APIProvider",
"org.wso2.carbon.apimgt.api.model.APIIdentifier",
"org.wso2.carbon.apimgt.api.model.DuplicateAPIException",
"org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.APIMappingUtil",
"org.wso2.carbon.apimgt.rest.api.util.RestApiConstants",
"org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil"
] |
import java.net.URISyntaxException; import javax.ws.rs.core.Response; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.DuplicateAPIException; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.APIMappingUtil; import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
|
import java.net.*; import javax.ws.rs.core.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.rest.api.publisher.utils.mappings.*; import org.wso2.carbon.apimgt.rest.api.util.*; import org.wso2.carbon.apimgt.rest.api.util.utils.*;
|
[
"java.net",
"javax.ws",
"org.wso2.carbon"
] |
java.net; javax.ws; org.wso2.carbon;
| 295,647
|
public static boolean isPrivateInnerType(TypeElement type) {
switch (type.getNestingKind()) {
case ANONYMOUS:
case LOCAL:
return true;
case MEMBER:
return isPrivate(type) || isPrivateInnerType((TypeElement) type.getEnclosingElement());
case TOP_LEVEL:
return isPrivate(type);
}
throw new AssertionError("Unknown NestingKind");
}
|
static boolean function(TypeElement type) { switch (type.getNestingKind()) { case ANONYMOUS: case LOCAL: return true; case MEMBER: return isPrivate(type) isPrivateInnerType((TypeElement) type.getEnclosingElement()); case TOP_LEVEL: return isPrivate(type); } throw new AssertionError(STR); }
|
/**
* Tests if this type element is private to it's source file. A public type declared
* within a private type is considered private.
*/
|
Tests if this type element is private to it's source file. A public type declared within a private type is considered private
|
isPrivateInnerType
|
{
"repo_name": "lukhnos/j2objc",
"path": "translator/src/main/java/com/google/devtools/j2objc/util/ElementUtil.java",
"license": "apache-2.0",
"size": 31553
}
|
[
"javax.lang.model.element.TypeElement"
] |
import javax.lang.model.element.TypeElement;
|
import javax.lang.model.element.*;
|
[
"javax.lang"
] |
javax.lang;
| 2,825,151
|
@Nonnull
public String getProperty(@Nonnull String propName) throws RemoteException;
|
String function(@Nonnull String propName) throws RemoteException;
|
/**
* Get the value of a particular property, which should be listed in the
* {@link #listProperties()} method.
*
* @param propName
* The name of the property to read.
* @return The value of the property.
* @throws RemoteException
* If anything goes wrong with the communication.
*/
|
Get the value of a particular property, which should be listed in the <code>#listProperties()</code> method
|
getProperty
|
{
"repo_name": "taverna/taverna-server",
"path": "server-runinterface/src/main/java/org/taverna/server/localworker/remote/RemoteListener.java",
"license": "lgpl-2.1",
"size": 2126
}
|
[
"java.rmi.RemoteException",
"javax.annotation.Nonnull"
] |
import java.rmi.RemoteException; import javax.annotation.Nonnull;
|
import java.rmi.*; import javax.annotation.*;
|
[
"java.rmi",
"javax.annotation"
] |
java.rmi; javax.annotation;
| 1,597,480
|
@Override
public void walkRelativeFileTree(Path path, FileVisitor<Path> fileVisitor) throws IOException {
Preconditions.checkArgument(!fileContents.containsKey(path),
"FakeProjectFilesystem only supports walkRelativeFileTree over directories.");
for (Path file : getFilesUnderDir(path)) {
fileVisitor.visitFile(file, DEFAULT_FILE_ATTRIBUTES);
}
}
|
void function(Path path, FileVisitor<Path> fileVisitor) throws IOException { Preconditions.checkArgument(!fileContents.containsKey(path), STR); for (Path file : getFilesUnderDir(path)) { fileVisitor.visitFile(file, DEFAULT_FILE_ATTRIBUTES); } }
|
/**
* TODO(natthu): (1) Also traverse the directories. (2) Do not ignore return value of
* {@code fileVisitor}.
*/
|
TODO(natthu): (1) Also traverse the directories. (2) Do not ignore return value of fileVisitor
|
walkRelativeFileTree
|
{
"repo_name": "mread/buck",
"path": "test/com/facebook/buck/testutil/FakeProjectFilesystem.java",
"license": "apache-2.0",
"size": 12619
}
|
[
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.nio.file.FileVisitor",
"java.nio.file.Path"
] |
import com.google.common.base.Preconditions; import java.io.IOException; import java.nio.file.FileVisitor; import java.nio.file.Path;
|
import com.google.common.base.*; import java.io.*; import java.nio.file.*;
|
[
"com.google.common",
"java.io",
"java.nio"
] |
com.google.common; java.io; java.nio;
| 1,604,586
|
public void testAbsMathContextPos() {
String a = "123809648392384754573567356745735.63567890295784902768787678287E+21";
BigDecimal aNumber = new BigDecimal(a);
int precision = 41;
RoundingMode rm = RoundingMode.HALF_EVEN;
MathContext mc = new MathContext(precision, rm);
String result = "1.2380964839238475457356735674573563567890E+53";
int resScale = -13;
BigDecimal res = aNumber.abs(mc);
assertEquals("incorrect value", result, res.toString());
assertEquals("incorrect scale", resScale, res.scale());
}
|
void function() { String a = STR; BigDecimal aNumber = new BigDecimal(a); int precision = 41; RoundingMode rm = RoundingMode.HALF_EVEN; MathContext mc = new MathContext(precision, rm); String result = STR; int resScale = -13; BigDecimal res = aNumber.abs(mc); assertEquals(STR, result, res.toString()); assertEquals(STR, resScale, res.scale()); }
|
/**
* Abs(MathContext) of a positive BigDecimal
*/
|
Abs(MathContext) of a positive BigDecimal
|
testAbsMathContextPos
|
{
"repo_name": "shannah/cn1",
"path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/math/src/test/java/org/apache/harmony/tests/java/math/BigDecimalCompareTest.java",
"license": "gpl-2.0",
"size": 20607
}
|
[
"java.math.BigDecimal",
"java.math.MathContext",
"java.math.RoundingMode"
] |
import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 2,791,703
|
@Test
public void TestCommandLineXmlProcessorCalabash_setPipeline() throws URISyntaxException {
URI expected = new URI("/xproc/identity/copy_verbatim.xpl");
// Attempt to set the pipeline
processor.setPipeline(expected);
// Check that the active value has changed, as specified.
assertEquals(expected, processor.getPipeline());
}
|
void function() throws URISyntaxException { URI expected = new URI(STR); processor.setPipeline(expected); assertEquals(expected, processor.getPipeline()); }
|
/**
* Check that it's possible to set an input XML document.
* @throws URISyntaxException
*/
|
Check that it's possible to set an input XML document
|
TestCommandLineXmlProcessorCalabash_setPipeline
|
{
"repo_name": "martian-a/gourd",
"path": "src/test/java/com/kaikoda/gourd/TestCommandLineXmlProcessorCalabash.java",
"license": "gpl-3.0",
"size": 20598
}
|
[
"java.net.URISyntaxException",
"org.junit.Assert"
] |
import java.net.URISyntaxException; import org.junit.Assert;
|
import java.net.*; import org.junit.*;
|
[
"java.net",
"org.junit"
] |
java.net; org.junit;
| 2,701,632
|
public String getScreenshotAsBase64() throws Exception {
Class<? extends WebDriver> driverClass = driver.getClass();
// Check if the driver is implementing the interface TakesScreenshot
if(TakesScreenshot.class.isAssignableFrom(driverClass)) {
TakesScreenshot screenshotDriver = (TakesScreenshot) driver;
String screenFile = screenshotDriver.getScreenshotAs(OutputType.BASE64);
return screenFile;
} else {
throw new Exception("Can not take screenshots with driver '" + driverClass.getName() + "'");
}
}
public static class ElementNotFoundException extends Exception {
public ElementNotFoundException(String message) {
super(message);
}
}
|
String function() throws Exception { Class<? extends WebDriver> driverClass = driver.getClass(); if(TakesScreenshot.class.isAssignableFrom(driverClass)) { TakesScreenshot screenshotDriver = (TakesScreenshot) driver; String screenFile = screenshotDriver.getScreenshotAs(OutputType.BASE64); return screenFile; } else { throw new Exception(STR + driverClass.getName() + "'"); } } public static class ElementNotFoundException extends Exception { public ElementNotFoundException(String message) { super(message); } }
|
/**
* Generate a screenshot of the current view of the WebDriver and get it as a Base64 encoded string
*
* @return
* @throws Exception
*/
|
Generate a screenshot of the current view of the WebDriver and get it as a Base64 encoded string
|
getScreenshotAsBase64
|
{
"repo_name": "mbordas/qualify",
"path": "src/main/java/qualify/tools/TestToolSelenium.java",
"license": "bsd-3-clause",
"size": 25061
}
|
[
"org.openqa.selenium.OutputType",
"org.openqa.selenium.TakesScreenshot",
"org.openqa.selenium.WebDriver"
] |
import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriver;
|
import org.openqa.selenium.*;
|
[
"org.openqa.selenium"
] |
org.openqa.selenium;
| 2,157,540
|
public void fatalError(TransformerException exception)
throws TransformerException {
if (exception != null) throw(exception);
}
|
void function(TransformerException exception) throws TransformerException { if (exception != null) throw(exception); }
|
/**
* <p>Handle an error notification from the original TRAX transformer.</p>
*
* <p>This method simply throws the same exception passed in as a parameter.</p>
*
* @see ErrorListener#fatalError(TransformerException)
*/
|
Handle an error notification from the original TRAX transformer. This method simply throws the same exception passed in as a parameter
|
fatalError
|
{
"repo_name": "apache/cocoon",
"path": "blocks/cocoon-scratchpad/cocoon-scratchpad-impl/src/main/java/org/apache/cocoon/transformation/ErrorAwareTraxTransformer.java",
"license": "apache-2.0",
"size": 3144
}
|
[
"javax.xml.transform.TransformerException"
] |
import javax.xml.transform.TransformerException;
|
import javax.xml.transform.*;
|
[
"javax.xml"
] |
javax.xml;
| 267,214
|
public Printer printingEnumsAsInts() {
checkUnsetPrintingEnumsAsInts();
return new Printer(
registry,
alwaysOutputDefaultValueFields,
Collections.<FieldDescriptor>emptySet(),
preservingProtoFieldNames,
omittingInsignificantWhitespace,
true,
sortingMapKeys);
}
|
Printer function() { checkUnsetPrintingEnumsAsInts(); return new Printer( registry, alwaysOutputDefaultValueFields, Collections.<FieldDescriptor>emptySet(), preservingProtoFieldNames, omittingInsignificantWhitespace, true, sortingMapKeys); }
|
/**
* Creates a new {@link Printer} that will print enum field values as integers instead of as
* string.
* The new Printer clones all other configurations from the current
* {@link Printer}.
*/
|
Creates a new <code>Printer</code> that will print enum field values as integers instead of as string. The new Printer clones all other configurations from the current <code>Printer</code>
|
printingEnumsAsInts
|
{
"repo_name": "endlessm/chromium-browser",
"path": "third_party/protobuf/java/util/src/main/java/com/google/protobuf/util/JsonFormat.java",
"license": "bsd-3-clause",
"size": 74123
}
|
[
"com.google.protobuf.Descriptors",
"java.util.Collections"
] |
import com.google.protobuf.Descriptors; import java.util.Collections;
|
import com.google.protobuf.*; import java.util.*;
|
[
"com.google.protobuf",
"java.util"
] |
com.google.protobuf; java.util;
| 2,355,744
|
public Board getBoard() {
return board;
}
|
Board function() { return board; }
|
/**
* Get board object
*
* @return Board object
*/
|
Get board object
|
getBoard
|
{
"repo_name": "lkuza2/ECE497",
"path": "hw07/hw07/src/main/java/com/ece497/hw07/util/BoardUtil.java",
"license": "gpl-3.0",
"size": 1809
}
|
[
"io.silverspoon.bulldog.core.platform.Board"
] |
import io.silverspoon.bulldog.core.platform.Board;
|
import io.silverspoon.bulldog.core.platform.*;
|
[
"io.silverspoon.bulldog"
] |
io.silverspoon.bulldog;
| 2,733,618
|
public Calendar getCalendar() {
return _calendar;
}
|
Calendar function() { return _calendar; }
|
/**
* Gets the holiday calendar.
* @return The holiday calendar
*/
|
Gets the holiday calendar
|
getCalendar
|
{
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/commodity/multicurvecommodity/definition/CouponCommodityDefinition.java",
"license": "apache-2.0",
"size": 5264
}
|
[
"com.opengamma.financial.convention.calendar.Calendar"
] |
import com.opengamma.financial.convention.calendar.Calendar;
|
import com.opengamma.financial.convention.calendar.*;
|
[
"com.opengamma.financial"
] |
com.opengamma.financial;
| 2,065,670
|
public Color getColor() {
return this.color;
}
|
Color function() { return this.color; }
|
/**
* Return the icon's color. The color is used only if the icon is opaque.
*/
|
Return the icon's color. The color is used only if the icon is opaque
|
getColor
|
{
"repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs",
"path": "utils/eclipselink.utils.workbench/uitools/source/org/eclipse/persistence/tools/workbench/uitools/swing/ArrowIcon.java",
"license": "epl-1.0",
"size": 12681
}
|
[
"java.awt.Color"
] |
import java.awt.Color;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,690,079
|
@Test
public void testRemoteMultiKeyExecutionWithCollectorNoResult_byName() throws Exception {
VM accessor = getHost(0).getVM(3);
VM datastore0 = getHost(0).getVM(0);
VM datastore1 = getHost(0).getVM(1);
VM datastore2 = getHost(0).getVM(2);
accessor.invoke(() -> {
createPartitionedRegion(regionName, 0, 0);
});
datastore0.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7));
});
datastore1.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7));
});
datastore2.invoke(() -> {
createPartitionedRegion(regionName, 10, 0);
FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7));
});
accessor.invoke(() -> {
PartitionedRegion pr = getPartitionedRegion(regionName);
Set<String> keySet = new HashSet<>();
for (int i = pr.getTotalNumberOfBuckets() * 2; i > 0; i--) {
keySet.add(STRING_KEY + i);
}
int valueIndex = 0;
for (String key : keySet) {
int value = valueIndex++;
pr.put(key, value);
}
Function<Object> function = new TestFunction<>(false, TEST_FUNCTION7);
FunctionService.registerFunction(function);
Execution<Boolean, Object, List<Object>> dataSet =
FunctionService.onRegion(pr).withCollector(new CustomResultCollector());
ResultCollector<Object, List<Object>> resultCollector =
dataSet.withFilter(keySet).setArguments(true).execute(function.getId());
assertThatThrownBy(resultCollector::getResult).isInstanceOf(FunctionException.class)
.hasMessageStartingWith(
String.format("Cannot %s result as the Function#hasResult() is false",
"return any"));
});
}
|
void function() throws Exception { VM accessor = getHost(0).getVM(3); VM datastore0 = getHost(0).getVM(0); VM datastore1 = getHost(0).getVM(1); VM datastore2 = getHost(0).getVM(2); accessor.invoke(() -> { createPartitionedRegion(regionName, 0, 0); }); datastore0.invoke(() -> { createPartitionedRegion(regionName, 10, 0); FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7)); }); datastore1.invoke(() -> { createPartitionedRegion(regionName, 10, 0); FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7)); }); datastore2.invoke(() -> { createPartitionedRegion(regionName, 10, 0); FunctionService.registerFunction(new TestFunction(false, TEST_FUNCTION7)); }); accessor.invoke(() -> { PartitionedRegion pr = getPartitionedRegion(regionName); Set<String> keySet = new HashSet<>(); for (int i = pr.getTotalNumberOfBuckets() * 2; i > 0; i--) { keySet.add(STRING_KEY + i); } int valueIndex = 0; for (String key : keySet) { int value = valueIndex++; pr.put(key, value); } Function<Object> function = new TestFunction<>(false, TEST_FUNCTION7); FunctionService.registerFunction(function); Execution<Boolean, Object, List<Object>> dataSet = FunctionService.onRegion(pr).withCollector(new CustomResultCollector()); ResultCollector<Object, List<Object>> resultCollector = dataSet.withFilter(keySet).setArguments(true).execute(function.getId()); assertThatThrownBy(resultCollector::getResult).isInstanceOf(FunctionException.class) .hasMessageStartingWith( String.format(STR, STR)); }); }
|
/**
* Test multi-key remote execution by a pure accessor which doesn't have the function factory
* present. ResultCollector = CustomResultCollector haveResults = false;
*/
|
Test multi-key remote execution by a pure accessor which doesn't have the function factory present. ResultCollector = CustomResultCollector haveResults = false
|
testRemoteMultiKeyExecutionWithCollectorNoResult_byName
|
{
"repo_name": "jdeppe-pivotal/geode",
"path": "geode-core/src/distributedTest/java/org/apache/geode/internal/cache/execute/PRFunctionExecutionDUnitTest.java",
"license": "apache-2.0",
"size": 88505
}
|
[
"java.util.HashSet",
"java.util.List",
"java.util.Set",
"org.apache.geode.cache.execute.Execution",
"org.apache.geode.cache.execute.Function",
"org.apache.geode.cache.execute.FunctionException",
"org.apache.geode.cache.execute.FunctionService",
"org.apache.geode.cache.execute.ResultCollector",
"org.apache.geode.internal.cache.PartitionedRegion",
"org.apache.geode.internal.cache.functions.TestFunction",
"org.apache.geode.test.dunit.Host",
"org.assertj.core.api.Assertions"
] |
import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.geode.cache.execute.Execution; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.functions.TestFunction; import org.apache.geode.test.dunit.Host; import org.assertj.core.api.Assertions;
|
import java.util.*; import org.apache.geode.cache.execute.*; import org.apache.geode.internal.cache.*; import org.apache.geode.internal.cache.functions.*; import org.apache.geode.test.dunit.*; import org.assertj.core.api.*;
|
[
"java.util",
"org.apache.geode",
"org.assertj.core"
] |
java.util; org.apache.geode; org.assertj.core;
| 609,719
|
public PredictionModel getTrainedPredictionModel(String metricName) {
return trainedPredictionModel.get(metricName);
}
|
PredictionModel function(String metricName) { return trainedPredictionModel.get(metricName); }
|
/**
* Returns the trained prediction model.
*/
|
Returns the trained prediction model
|
getTrainedPredictionModel
|
{
"repo_name": "sajavadi/pinot",
"path": "thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/anomalydetection/context/AnomalyDetectionContext.java",
"license": "apache-2.0",
"size": 5584
}
|
[
"com.linkedin.thirdeye.anomalydetection.model.prediction.PredictionModel"
] |
import com.linkedin.thirdeye.anomalydetection.model.prediction.PredictionModel;
|
import com.linkedin.thirdeye.anomalydetection.model.prediction.*;
|
[
"com.linkedin.thirdeye"
] |
com.linkedin.thirdeye;
| 663,912
|
public void onBindViewHolder(final AllAppsGridAdapter.ViewHolder holder, final AppInfo item) { }
|
public void onBindViewHolder(final AllAppsGridAdapter.ViewHolder holder, final AppInfo item) { }
|
/**
* Called when a view holder is created for a remote app.
* @param holder remote view holder.
* @param viewType specific type of view holder.
*/
|
Called when a view holder is created for a remote app
|
onCreateViewHolder
|
{
"repo_name": "lcg833/Trebuchet",
"path": "Trebuchet/src/main/java/com/android/launcher3/RemoteFolderManager.java",
"license": "gpl-3.0",
"size": 5032
}
|
[
"com.android.launcher3.allapps.AllAppsGridAdapter"
] |
import com.android.launcher3.allapps.AllAppsGridAdapter;
|
import com.android.launcher3.allapps.*;
|
[
"com.android.launcher3"
] |
com.android.launcher3;
| 1,462,680
|
@Override
public void onServiceDisconnected(ComponentName className) {
}
};
|
void function(ComponentName className) { } };
|
/**
* When unbind service will call this method
*
* @param className The component name
*/
|
When unbind service will call this method
|
onServiceDisconnected
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "packages/apps/FMRadio/src/com/android/fmradio/FmFavoriteActivity.java",
"license": "gpl-3.0",
"size": 22164
}
|
[
"android.content.ComponentName"
] |
import android.content.ComponentName;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 1,784,089
|
EClass getVersionSpec();
|
EClass getVersionSpec();
|
/**
* Returns the meta object for class '{@link org.eclipse.emf.emfstore.internal.server.model.versioning.VersionSpec
* <em>Version Spec</em>}'.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @return the meta object for class '<em>Version Spec</em>'.
* @see org.eclipse.emf.emfstore.internal.server.model.versioning.VersionSpec
* @generated
*/
|
Returns the meta object for class '<code>org.eclipse.emf.emfstore.internal.server.model.versioning.VersionSpec Version Spec</code>'.
|
getVersionSpec
|
{
"repo_name": "edgarmueller/emfstore-rest",
"path": "bundles/org.eclipse.emf.emfstore.server.model/src/org/eclipse/emf/emfstore/internal/server/model/versioning/VersioningPackage.java",
"license": "epl-1.0",
"size": 84798
}
|
[
"org.eclipse.emf.ecore.EClass"
] |
import org.eclipse.emf.ecore.EClass;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,308,477
|
private void setupPaints() {
barPaint.setColor(barColor);
barPaint.setAntiAlias(true);
barPaint.setStyle(Style.STROKE);
barPaint.setStrokeWidth(barWidth);
rimPaint.setColor(rimColor);
rimPaint.setAntiAlias(true);
rimPaint.setStyle(Style.STROKE);
rimPaint.setStrokeWidth(rimWidth);
}
|
void function() { barPaint.setColor(barColor); barPaint.setAntiAlias(true); barPaint.setStyle(Style.STROKE); barPaint.setStrokeWidth(barWidth); rimPaint.setColor(rimColor); rimPaint.setAntiAlias(true); rimPaint.setStyle(Style.STROKE); rimPaint.setStrokeWidth(rimWidth); }
|
/**
* Set the properties of the paints we're using to
* draw the progress wheel
*/
|
Set the properties of the paints we're using to draw the progress wheel
|
setupPaints
|
{
"repo_name": "XHidamariSketchX/NHentai-android",
"path": "app/src/main/java/moe/feng/nhentai/view/WheelProgressView.java",
"license": "gpl-3.0",
"size": 17438
}
|
[
"android.graphics.Paint"
] |
import android.graphics.Paint;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 2,546,784
|
public Enumeration<String> attributeNames() {
// Create a new list, so that the references are copied
return Collections.enumeration(new LinkedList<String>(values.keySet()));
}
|
Enumeration<String> function() { return Collections.enumeration(new LinkedList<String>(values.keySet())); }
|
/**
* <p>
* Enumerates the attribute names.
* </p>
*
* @return An instance of {@link Enumeration}.
*/
|
Enumerates the attribute names.
|
attributeNames
|
{
"repo_name": "shannah/cn1",
"path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/beans/src/main/java/java/beans/FeatureDescriptor.java",
"license": "gpl-2.0",
"size": 6182
}
|
[
"java.util.Collections",
"java.util.Enumeration",
"java.util.LinkedList"
] |
import java.util.Collections; import java.util.Enumeration; import java.util.LinkedList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,888,212
|
public void setExistingTags(Collection tags) {}
|
public void setExistingTags(Collection tags) {}
|
/**
* Implemented as specified by {@link Finder} I/F
* @see Finder#setResult(SecurityContext, Object)
*/
|
Implemented as specified by <code>Finder</code> I/F
|
setResult
|
{
"repo_name": "emilroz/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/util/finder/QuickFinder.java",
"license": "gpl-2.0",
"size": 4860
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 670,530
|
void getRestoreInfo(CmsUUID structureId, AsyncCallback<CmsRestoreInfoBean> resultCallback);
|
void getRestoreInfo(CmsUUID structureId, AsyncCallback<CmsRestoreInfoBean> resultCallback);
|
/**
* Gets the information which is necessary for opening the 'Restore' dialog for a resource.<p>
*
* @param structureId the structure id of the resource
* @param resultCallback the callback for the result
*/
|
Gets the information which is necessary for opening the 'Restore' dialog for a resource
|
getRestoreInfo
|
{
"repo_name": "it-tavis/opencms-core",
"path": "src/org/opencms/gwt/shared/rpc/I_CmsVfsServiceAsync.java",
"license": "lgpl-2.1",
"size": 12380
}
|
[
"com.google.gwt.user.client.rpc.AsyncCallback",
"org.opencms.gwt.shared.CmsRestoreInfoBean",
"org.opencms.util.CmsUUID"
] |
import com.google.gwt.user.client.rpc.AsyncCallback; import org.opencms.gwt.shared.CmsRestoreInfoBean; import org.opencms.util.CmsUUID;
|
import com.google.gwt.user.client.rpc.*; import org.opencms.gwt.shared.*; import org.opencms.util.*;
|
[
"com.google.gwt",
"org.opencms.gwt",
"org.opencms.util"
] |
com.google.gwt; org.opencms.gwt; org.opencms.util;
| 550,504
|
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Void> deleteSqlUserDefinedFunctionAsync(
String resourceGroupName,
String accountName,
String databaseName,
String containerName,
String userDefinedFunctionName,
Context context) {
return beginDeleteSqlUserDefinedFunctionAsync(
resourceGroupName, accountName, databaseName, containerName, userDefinedFunctionName, context)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> function( String resourceGroupName, String accountName, String databaseName, String containerName, String userDefinedFunctionName, Context context) { return beginDeleteSqlUserDefinedFunctionAsync( resourceGroupName, accountName, databaseName, containerName, userDefinedFunctionName, context) .last() .flatMap(this.client::getLroFinalResultOrError); }
|
/**
* Deletes an existing Azure Cosmos DB SQL userDefinedFunction.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param databaseName Cosmos DB database name.
* @param containerName Cosmos DB container name.
* @param userDefinedFunctionName Cosmos DB userDefinedFunction name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
|
Deletes an existing Azure Cosmos DB SQL userDefinedFunction
|
deleteSqlUserDefinedFunctionAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-cosmos/src/main/java/com/azure/resourcemanager/cosmos/implementation/SqlResourcesClientImpl.java",
"license": "mit",
"size": 547809
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.util.Context"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context;
|
import com.azure.core.annotation.*; import com.azure.core.util.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 1,839,577
|
@Path("{clusterName}/widget_layouts")
public WidgetLayoutService getWidgetLayoutService(@Context javax.ws.rs.core.Request request,
@PathParam ("clusterName") String clusterName) {
return new WidgetLayoutService(clusterName);
}
|
@Path(STR) WidgetLayoutService function(@Context javax.ws.rs.core.Request request, @PathParam (STR) String clusterName) { return new WidgetLayoutService(clusterName); }
|
/**
* Gets the widget layout service
*/
|
Gets the widget layout service
|
getWidgetLayoutService
|
{
"repo_name": "zouzhberk/ambaridemo",
"path": "demo-server/src/main/java/org/apache/ambari/server/api/services/ClusterService.java",
"license": "apache-2.0",
"size": 23921
}
|
[
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.core.Context"
] |
import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Context;
|
import javax.ws.rs.*; import javax.ws.rs.core.*;
|
[
"javax.ws"
] |
javax.ws;
| 2,841,108
|
public static void saveToFile(String configFile) {
try {
Hashtable <String,Object> hash = (Hashtable) _registry.get(HASHNAME);
BufferedWriter fileWriter = new BufferedWriter(new FileWriter(configFile, false));
Set<Map.Entry<String,Object>> set = hash.entrySet();
Iterator <Map.Entry<String,Object>> it = set.iterator();
Map.Entry<String,Object> entry;
while (it.hasNext()) {
entry = it.next();
fileWriter.write(entry.getKey());
fileWriter.write(" = ");
fileWriter.write(entry.getValue().toString());
fileWriter.newLine();
}
fileWriter.flush();
fileWriter.close();
} catch (IOException ex) {
Logger.getLogger(ReadWriteConfiguration.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
static void function(String configFile) { try { Hashtable <String,Object> hash = (Hashtable) _registry.get(HASHNAME); BufferedWriter fileWriter = new BufferedWriter(new FileWriter(configFile, false)); Set<Map.Entry<String,Object>> set = hash.entrySet(); Iterator <Map.Entry<String,Object>> it = set.iterator(); Map.Entry<String,Object> entry; while (it.hasNext()) { entry = it.next(); fileWriter.write(entry.getKey()); fileWriter.write(STR); fileWriter.write(entry.getValue().toString()); fileWriter.newLine(); } fileWriter.flush(); fileWriter.close(); } catch (IOException ex) { Logger.getLogger(ReadWriteConfiguration.class.getName()).log(Level.SEVERE, null, ex); } }
|
/**
* Salveaza configuratia in fisier
* @param configFile
*/
|
Salveaza configuratia in fisier
|
saveToFile
|
{
"repo_name": "genomeartist/genomeartist",
"path": "sources_java/guiTransposon/src/ro/genomeartist/gui/utils/ReadWriteConfiguration.java",
"license": "gpl-3.0",
"size": 6463
}
|
[
"java.io.BufferedWriter",
"java.io.FileWriter",
"java.io.IOException",
"java.util.Hashtable",
"java.util.Iterator",
"java.util.Map",
"java.util.Set",
"java.util.logging.Level",
"java.util.logging.Logger"
] |
import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger;
|
import java.io.*; import java.util.*; import java.util.logging.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 1,542,699
|
public final void setDrawTime(DrawTimeCallback drawTimeCallback) {
DRAW_TIME_PROPERTY_HANDLER.setCallback(this, AnnotationPlugin.ID, drawTimeCallback, drawTimeCallbackProxy.getProxy());
}
|
final void function(DrawTimeCallback drawTimeCallback) { DRAW_TIME_PROPERTY_HANDLER.setCallback(this, AnnotationPlugin.ID, drawTimeCallback, drawTimeCallbackProxy.getProxy()); }
|
/**
* Sets the callback to set the draw time which defines when the annotations are drawn.
*
* @param drawTimeCallback to set the draw time which defines when the annotations are drawn
*/
|
Sets the callback to set the draw time which defines when the annotations are drawn
|
setDrawTime
|
{
"repo_name": "pepstock-org/Charba",
"path": "src/org/pepstock/charba/client/annotation/AbstractAnnotation.java",
"license": "apache-2.0",
"size": 41191
}
|
[
"org.pepstock.charba.client.annotation.callbacks.DrawTimeCallback"
] |
import org.pepstock.charba.client.annotation.callbacks.DrawTimeCallback;
|
import org.pepstock.charba.client.annotation.callbacks.*;
|
[
"org.pepstock.charba"
] |
org.pepstock.charba;
| 1,030,171
|
public boolean isAvailable() {
return mSurface != null;
}
/**
* <p>Start editing the pixels in the surface. The returned Canvas can be used
* to draw into the surface's bitmap. A null is returned if the surface has
* not been created or otherwise cannot be edited. You will usually need
* to implement
* {@link SurfaceTextureListener#onSurfaceTextureAvailable(android.graphics.SurfaceTexture, int, int)}
|
boolean function() { return mSurface != null; } /** * <p>Start editing the pixels in the surface. The returned Canvas can be used * to draw into the surface's bitmap. A null is returned if the surface has * not been created or otherwise cannot be edited. You will usually need * to implement * {@link SurfaceTextureListener#onSurfaceTextureAvailable(android.graphics.SurfaceTexture, int, int)}
|
/**
* Returns true if the {@link SurfaceTexture} associated with this
* TextureView is available for rendering. When this method returns
* true, {@link #getSurfaceTexture()} returns a valid surface texture.
*/
|
Returns true if the <code>SurfaceTexture</code> associated with this TextureView is available for rendering. When this method returns true, <code>#getSurfaceTexture()</code> returns a valid surface texture
|
isAvailable
|
{
"repo_name": "lynnlyc/for-honeynet-reviewers",
"path": "CallbackDroid/android-environment/src/base/core/java/android/view/TextureView.java",
"license": "gpl-3.0",
"size": 23903
}
|
[
"android.graphics.Canvas",
"android.graphics.SurfaceTexture"
] |
import android.graphics.Canvas; import android.graphics.SurfaceTexture;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 1,533,940
|
@Test
public void testMutateEdges() {
for (Class<? extends OutEdges> edgesClass : edgesClasses) {
testMutateEdgesClass(edgesClass);
}
}
|
void function() { for (Class<? extends OutEdges> edgesClass : edgesClasses) { testMutateEdgesClass(edgesClass); } }
|
/**
* Test in-place edge mutations via the iterable returned by {@link
* Vertex#getMutableEdges()}.
*/
|
Test in-place edge mutations via the iterable returned by <code>Vertex#getMutableEdges()</code>
|
testMutateEdges
|
{
"repo_name": "zfighter/giraph-research",
"path": "giraph-core/target/munged/test/org/apache/giraph/graph/TestVertexAndEdges.java",
"license": "apache-2.0",
"size": 19429
}
|
[
"org.apache.giraph.edge.OutEdges"
] |
import org.apache.giraph.edge.OutEdges;
|
import org.apache.giraph.edge.*;
|
[
"org.apache.giraph"
] |
org.apache.giraph;
| 697,449
|
ImmutableCollection<String> getAttributeNames();
|
ImmutableCollection<String> getAttributeNames();
|
/**
* Returns the names of all atttributes of this context.
* @return the names of all atttributes of this context.
*/
|
Returns the names of all atttributes of this context
|
getAttributeNames
|
{
"repo_name": "gkzhong/checkstyle",
"path": "src/checkstyle/com/puppycrawl/tools/checkstyle/api/Context.java",
"license": "lgpl-2.1",
"size": 1829
}
|
[
"com.google.common.collect.ImmutableCollection"
] |
import com.google.common.collect.ImmutableCollection;
|
import com.google.common.collect.*;
|
[
"com.google.common"
] |
com.google.common;
| 593,307
|
public static void broadcastConnectFailure(Context context, ConnectionResult connectionResult, int resolutionType) {
Intent intent = new Intent(ACTION_API_CONNECT_FAILURE);
intent.putExtra(EXTRA_CONNECTION_RESULT, connectionResult);
intent.putExtra(EXTRA_RESOLUTION_TYPE, resolutionType);
LocalBroadcastManager.getInstance(context.getApplicationContext()).sendBroadcast(intent);
}
public PlacesBroadcastReceiver(Context context, PlacesBroadcastListener listener) {
applicationContext = context.getApplicationContext();
placesBroadcastListenerWeakRef = new WeakReference<>(listener);
//create intent filter
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(ACTION_GET_PLACE_BY_ID_SUCCESS);
intentFilter.addAction(ACTION_GET_PLACE_BY_ID_FAILURE);
//inform the local broadcast manager that we are interested in this intent filter
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(applicationContext);
localBroadcastManager.registerReceiver(this, intentFilter);
}
public PlacesBroadcastReceiver(Context context, PlacesConnectionBroadcastListener listener) {
applicationContext = context.getApplicationContext();
connectionBroadcastListenerWeakRef = new WeakReference<>(listener);
//create intent filter
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(ACTION_API_CONNECT_SUCCESS);
intentFilter.addAction(ACTION_API_CONNECT_FAILURE);
//inform the local broadcast manager that we are interested in this intent filter
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(applicationContext);
localBroadcastManager.registerReceiver(this, intentFilter);
}
|
static void function(Context context, ConnectionResult connectionResult, int resolutionType) { Intent intent = new Intent(ACTION_API_CONNECT_FAILURE); intent.putExtra(EXTRA_CONNECTION_RESULT, connectionResult); intent.putExtra(EXTRA_RESOLUTION_TYPE, resolutionType); LocalBroadcastManager.getInstance(context.getApplicationContext()).sendBroadcast(intent); } public PlacesBroadcastReceiver(Context context, PlacesBroadcastListener listener) { applicationContext = context.getApplicationContext(); placesBroadcastListenerWeakRef = new WeakReference<>(listener); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(ACTION_GET_PLACE_BY_ID_SUCCESS); intentFilter.addAction(ACTION_GET_PLACE_BY_ID_FAILURE); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(applicationContext); localBroadcastManager.registerReceiver(this, intentFilter); } public PlacesBroadcastReceiver(Context context, PlacesConnectionBroadcastListener listener) { applicationContext = context.getApplicationContext(); connectionBroadcastListenerWeakRef = new WeakReference<>(listener); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(ACTION_API_CONNECT_SUCCESS); intentFilter.addAction(ACTION_API_CONNECT_FAILURE); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(applicationContext); localBroadcastManager.registerReceiver(this, intentFilter); }
|
/**
* Broadcast google api client connect failure
* @param context
* @param connectionResult
*/
|
Broadcast google api client connect failure
|
broadcastConnectFailure
|
{
"repo_name": "yeelin/betweenus",
"path": "app/src/main/java/com/example/yeelin/projects/betweenus/receiver/PlacesBroadcastReceiver.java",
"license": "mit",
"size": 9722
}
|
[
"android.content.Context",
"android.content.Intent",
"android.content.IntentFilter",
"android.support.v4.content.LocalBroadcastManager",
"com.google.android.gms.common.ConnectionResult",
"java.lang.ref.WeakReference"
] |
import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.support.v4.content.LocalBroadcastManager; import com.google.android.gms.common.ConnectionResult; import java.lang.ref.WeakReference;
|
import android.content.*; import android.support.v4.content.*; import com.google.android.gms.common.*; import java.lang.ref.*;
|
[
"android.content",
"android.support",
"com.google.android",
"java.lang"
] |
android.content; android.support; com.google.android; java.lang;
| 1,159,505
|
public static int analyzeSimplexWords(Lexicon lex, WordSet set,
Collection<Transform> learnedTransforms, boolean doubling,
TransformInference transInf) {
// Count of split compounds
int newAnalyses = 0;
// Make the possible fillers
// If null was passed for learnedTransforms, just make filler null,
// otherwise make the filler from the learned transforms
Filler filler = learnedTransforms == null ? null : new Filler(learnedTransforms);
// Loop over each word in the appropriate set and try to analyze it
for (Word word : lex.getSetWords(set)) {
// Skip words that are compounds or are too short
if (word.isCompound() || word.length() < MIN_COMPOUND_LENGTH)
continue;
// Get the best compounding hypothesis for each word
AnalysisResult bestHyp = analyzeWord(word, filler, lex, transInf, doubling);
// If the hypothesis is good, analyze the word
if (bestHyp != null) {
// Set the word's analysis
StringBuilder analysis = new StringBuilder(bestHyp.base.analyze());
for (Transform t : bestHyp.derivingTransforms) {
analysis.append(" " + t.analyze());
}
word.setExternalAnalysis(analysis.toString());
newAnalyses++;
}
}
return newAnalyses;
}
|
static int function(Lexicon lex, WordSet set, Collection<Transform> learnedTransforms, boolean doubling, TransformInference transInf) { int newAnalyses = 0; Filler filler = learnedTransforms == null ? null : new Filler(learnedTransforms); for (Word word : lex.getSetWords(set)) { if (word.isCompound() word.length() < MIN_COMPOUND_LENGTH) continue; AnalysisResult bestHyp = analyzeWord(word, filler, lex, transInf, doubling); if (bestHyp != null) { StringBuilder analysis = new StringBuilder(bestHyp.base.analyze()); for (Transform t : bestHyp.derivingTransforms) { analysis.append(" " + t.analyze()); } word.setExternalAnalysis(analysis.toString()); newAnalyses++; } } return newAnalyses; }
|
/**
* Perform simplex word analysis. This is an experimental feature which tries
* to segment words without using base/derived pairs, just by splitting a word
* into morphemes if it remains unmodeled at the end of learning.
* @param lex the lexicon
* @param set the word set to examine
* @param learnedTransforms the learned transforms
* @param doubling as used by scoreWord
* @param transInf the inferred relationships between transforms
* @return the number of words analyzed
*/
|
Perform simplex word analysis. This is an experimental feature which tries to segment words without using base/derived pairs, just by splitting a word into morphemes if it remains unmodeled at the end of learning
|
analyzeSimplexWords
|
{
"repo_name": "ConstantineLignos/MORSEL",
"path": "src/edu/upenn/ircs/lignos/morsel/compound/Compounding.java",
"license": "gpl-3.0",
"size": 23891
}
|
[
"edu.upenn.ircs.lignos.morsel.TransformInference",
"edu.upenn.ircs.lignos.morsel.lexicon.Lexicon",
"edu.upenn.ircs.lignos.morsel.lexicon.Word",
"edu.upenn.ircs.lignos.morsel.lexicon.WordSet",
"edu.upenn.ircs.lignos.morsel.transform.Transform",
"java.util.Collection"
] |
import edu.upenn.ircs.lignos.morsel.TransformInference; import edu.upenn.ircs.lignos.morsel.lexicon.Lexicon; import edu.upenn.ircs.lignos.morsel.lexicon.Word; import edu.upenn.ircs.lignos.morsel.lexicon.WordSet; import edu.upenn.ircs.lignos.morsel.transform.Transform; import java.util.Collection;
|
import edu.upenn.ircs.lignos.morsel.*; import edu.upenn.ircs.lignos.morsel.lexicon.*; import edu.upenn.ircs.lignos.morsel.transform.*; import java.util.*;
|
[
"edu.upenn.ircs",
"java.util"
] |
edu.upenn.ircs; java.util;
| 241,000
|
// ------------------------------------------------------------------------
static void messageAndLog(Player player, String msg) {
System.out.println(PREFIX + msg);
player.sendMessage(PREFIX + "[sent to: " + player.getName() + "] " + msg);
}
|
static void messageAndLog(Player player, String msg) { System.out.println(PREFIX + msg); player.sendMessage(PREFIX + STR + player.getName() + STR + msg); }
|
/**
* Sends the message to the given player and logs it (and the player's name)
* to console.
*
* @param player the player.
* @param msg the message to log.
*/
|
Sends the message to the given player and logs it (and the player's name) to console
|
messageAndLog
|
{
"repo_name": "NerdNu/SafeBuckets",
"path": "src/nu/nerd/SafeBuckets/SafeBuckets.java",
"license": "lgpl-3.0",
"size": 12271
}
|
[
"org.bukkit.entity.Player"
] |
import org.bukkit.entity.Player;
|
import org.bukkit.entity.*;
|
[
"org.bukkit.entity"
] |
org.bukkit.entity;
| 157,524
|
public void visitLiteralThrows(DetailAST literalThrows) {
for (DetailAST childAST = literalThrows.getFirstChild();
childAST != null;
childAST = childAST.getNextSibling()) {
if (childAST.getType() != TokenTypes.COMMA) {
addReferencedClassName(childAST);
}
}
}
|
void function(DetailAST literalThrows) { for (DetailAST childAST = literalThrows.getFirstChild(); childAST != null; childAST = childAST.getNextSibling()) { if (childAST.getType() != TokenTypes.COMMA) { addReferencedClassName(childAST); } } }
|
/**
* Visits throws clause and collects all exceptions we throw.
* @param literalThrows throws to process.
*/
|
Visits throws clause and collects all exceptions we throw
|
visitLiteralThrows
|
{
"repo_name": "another-dave/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/checks/metrics/AbstractClassCouplingCheck.java",
"license": "lgpl-2.1",
"size": 10278
}
|
[
"com.puppycrawl.tools.checkstyle.api.DetailAST",
"com.puppycrawl.tools.checkstyle.api.TokenTypes"
] |
import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.TokenTypes;
|
import com.puppycrawl.tools.checkstyle.api.*;
|
[
"com.puppycrawl.tools"
] |
com.puppycrawl.tools;
| 1,622,073
|
public void rename(String srcParent, String srcName, String dstParent,
String dstName)
{
if (srcParent == null || srcName == null || dstParent == null || dstName == null ||
!FileNameValidator.isValid(dstName))
{
throw new AVMBadArgumentException("Illegal argument.");
}
fAVMRepository.rename(srcParent, srcName, dstParent, dstName);
}
|
void function(String srcParent, String srcName, String dstParent, String dstName) { if (srcParent == null srcName == null dstParent == null dstName == null !FileNameValidator.isValid(dstName)) { throw new AVMBadArgumentException(STR); } fAVMRepository.rename(srcParent, srcName, dstParent, dstName); }
|
/**
* Rename a node.
* @param srcParent The path to the source parent.
* @param srcName The name of the source node.
* @param dstParent The path to the destination parent.
* @param dstName The name to give the renamed node.
*/
|
Rename a node
|
rename
|
{
"repo_name": "loftuxab/community-edition-old",
"path": "projects/repository/source/java/org/alfresco/repo/avm/AVMServiceImpl.java",
"license": "lgpl-3.0",
"size": 59118
}
|
[
"org.alfresco.service.cmr.avm.AVMBadArgumentException",
"org.alfresco.util.FileNameValidator"
] |
import org.alfresco.service.cmr.avm.AVMBadArgumentException; import org.alfresco.util.FileNameValidator;
|
import org.alfresco.service.cmr.avm.*; import org.alfresco.util.*;
|
[
"org.alfresco.service",
"org.alfresco.util"
] |
org.alfresco.service; org.alfresco.util;
| 118,867
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.