method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
private void computeAttachmentURL(Attachment attachment, UriInfo uriInfo) {
// LinkType != null -> asset is not stored in LARS
// Therefore there should be an external URL in the attachment
if (attachment.getLinkType() != null) {
return;
}
// For assets stored in LARS, we need to compute the URL and store it in the attachment
String encodedName;
try {
encodedName = URLEncoder.encode(attachment.getName(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new AssertionError("This should never happen.", e);
}
String url = configuration.getRestBaseUri(uriInfo) + "assets/" + attachment.getAssetId() + "/attachments/" + attachment.get_id() + "/" + encodedName;
attachment.setUrl(url);
}
|
void function(Attachment attachment, UriInfo uriInfo) { if (attachment.getLinkType() != null) { return; } String encodedName; try { encodedName = URLEncoder.encode(attachment.getName(), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new AssertionError(STR, e); } String url = configuration.getRestBaseUri(uriInfo) + STR + attachment.getAssetId() + STR + attachment.get_id() + "/" + encodedName; attachment.setUrl(url); }
|
/**
* Computes and sets the URL for an attachment if the attachment's content is stored in lars.
* <p>
* If the attachment is stored externally, the URL is not changed.
* <p>
* The start of the URL is computed from the base URL of the request, unless it's overridden in
* the server.xml.
*
* @param attachment the attachment for which to update and set the URL
* @param uriInfo the UriInfo from the current request
*/
|
Computes and sets the URL for an attachment if the attachment's content is stored in lars. If the attachment is stored externally, the URL is not changed. The start of the URL is computed from the base URL of the request, unless it's overridden in the server.xml
|
computeAttachmentURL
|
{
"repo_name": "ashleyrobertson/tool.lars",
"path": "server/src/main/java/com/ibm/ws/lars/rest/AssetServiceLayer.java",
"license": "apache-2.0",
"size": 15756
}
|
[
"com.ibm.ws.lars.rest.model.Attachment",
"java.io.UnsupportedEncodingException",
"java.net.URLEncoder",
"javax.ws.rs.core.UriInfo"
] |
import com.ibm.ws.lars.rest.model.Attachment; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import javax.ws.rs.core.UriInfo;
|
import com.ibm.ws.lars.rest.model.*; import java.io.*; import java.net.*; import javax.ws.rs.core.*;
|
[
"com.ibm.ws",
"java.io",
"java.net",
"javax.ws"
] |
com.ibm.ws; java.io; java.net; javax.ws;
| 102,588
|
@Test
public void testSubtract()
{
assertEquals(-1.0, Angle.deg(2).subtract(Angle.deg(3)).degrees(), 0.0001);
assertEquals(160.0, Angle.deg(-100).subtract(Angle.deg(100)).degrees(), 0.0001);
}
|
void function() { assertEquals(-1.0, Angle.deg(2).subtract(Angle.deg(3)).degrees(), 0.0001); assertEquals(160.0, Angle.deg(-100).subtract(Angle.deg(100)).degrees(), 0.0001); }
|
/**
* Test method for {@link hso.autonomy.util.geometry.Angle#subtract(Angle)}.
*/
|
Test method for <code>hso.autonomy.util.geometry.Angle#subtract(Angle)</code>
|
testSubtract
|
{
"repo_name": "HSOAutonomy/base",
"path": "srcTest/hso/autonomy/util/geometry/AngleTest.java",
"license": "mit",
"size": 6667
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 167,896
|
if (instance == null) {
try {
// SECURE with SSL support (trust everything)
// SSL certificate validation must be configured, otherwise a
// "javax.net.ssl.SSLPeerUnverifiedException: peer not authenticated"
// exception is thrown.
SSLContext ctx = SSLContext.getInstance("TLS");
ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom());
SSLContext.setDefault(ctx);
// SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER:
// Don't check if host name in certificate coincides with the server
// name of the Taverna Server, otherwise the exception
// "javax.net.ssl.SSLException: hostname in certificate didn't match"
// is thrown.
SSLSocketFactory sf = new SSLSocketFactory(ctx, SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
SchemeRegistry schemeRegistry = new SchemeRegistry();
Scheme httpScheme = new Scheme("http", 8080, sf);
Scheme httpsScheme = new Scheme("https", 8443, sf);
schemeRegistry.register(httpScheme);
schemeRegistry.register(httpsScheme);
//BasicClientConnectionManager cm = new BasicClientConnectionManager(schemeRegistry);
instance = new DefaultConnectionManager(schemeRegistry);
} catch (KeyManagementException ex) {
logger.error("Key Management Exception error", ex);
} catch (NoSuchAlgorithmException ex) {
logger.error("No Such Algorithm error", ex);
}
}
return instance;
}
private DefaultConnectionManager(SchemeRegistry schemeRegistry) {
super(schemeRegistry);
}
|
if (instance == null) { try { SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom()); SSLContext.setDefault(ctx); SSLSocketFactory sf = new SSLSocketFactory(ctx, SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); SchemeRegistry schemeRegistry = new SchemeRegistry(); Scheme httpScheme = new Scheme("http", 8080, sf); Scheme httpsScheme = new Scheme("https", 8443, sf); schemeRegistry.register(httpScheme); schemeRegistry.register(httpsScheme); instance = new DefaultConnectionManager(schemeRegistry); } catch (KeyManagementException ex) { logger.error(STR, ex); } catch (NoSuchAlgorithmException ex) { logger.error(STR, ex); } } return instance; } private DefaultConnectionManager(SchemeRegistry schemeRegistry) { super(schemeRegistry); }
|
/**
* Getter for singleton instance
*
* @return Singleton instance
*/
|
Getter for singleton instance
|
getInstance
|
{
"repo_name": "openpreserve/scape-tavernahadoop-demonstrator",
"path": "defaulthttp-restclient/src/main/java/eu/scape_project/tb/rest/ssl/DefaultConnectionManager.java",
"license": "apache-2.0",
"size": 2824
}
|
[
"eu.scape_project.tb.rest.ssl.DefaultTrustManager",
"java.security.KeyManagementException",
"java.security.NoSuchAlgorithmException",
"java.security.SecureRandom",
"javax.net.ssl.KeyManager",
"javax.net.ssl.SSLContext",
"javax.net.ssl.TrustManager",
"org.apache.http.conn.scheme.Scheme",
"org.apache.http.conn.scheme.SchemeRegistry",
"org.apache.http.conn.ssl.SSLSocketFactory"
] |
import eu.scape_project.tb.rest.ssl.DefaultTrustManager; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory;
|
import eu.scape_project.tb.rest.ssl.*; import java.security.*; import javax.net.ssl.*; import org.apache.http.conn.scheme.*; import org.apache.http.conn.ssl.*;
|
[
"eu.scape_project.tb",
"java.security",
"javax.net",
"org.apache.http"
] |
eu.scape_project.tb; java.security; javax.net; org.apache.http;
| 2,627,667
|
public long getStartTime(DayPartType type) {
long start = 0;
for (DayPart daypart : dayparts) {
if (type.equals(daypart.getType())) {
return start;
}
start += daypart.getLength();
}
return start;
}
|
long function(DayPartType type) { long start = 0; for (DayPart daypart : dayparts) { if (type.equals(daypart.getType())) { return start; } start += daypart.getLength(); } return start; }
|
/**
* Gets the start time of the {@link DayPart}.
* @param type The {@link DayPartType} to get the start time from.
* @return The start time.
*/
|
Gets the start time of the <code>DayPart</code>
|
getStartTime
|
{
"repo_name": "InspireNXE/TimeWarp",
"path": "src/main/java/org/inspirenxe/timewarp/world/WorldDay.java",
"license": "mit",
"size": 7879
}
|
[
"org.inspirenxe.timewarp.daypart.DayPart",
"org.inspirenxe.timewarp.daypart.DayPartType"
] |
import org.inspirenxe.timewarp.daypart.DayPart; import org.inspirenxe.timewarp.daypart.DayPartType;
|
import org.inspirenxe.timewarp.daypart.*;
|
[
"org.inspirenxe.timewarp"
] |
org.inspirenxe.timewarp;
| 52,879
|
private static PlotGroup analyzeUtility(CSVFile file,
BenchmarkDataset data,
BenchmarkUtilityMeasure measure,
BenchmarkPrivacyModel model,
BenchmarkAlgorithm algorithm,
double suppressionLimit,
double gsFactor,
double gsFactorStepSize) throws ParseException {
// Selects according rows
Selector<String[]> selectorRGR = file.getSelectorBuilder()
.field("Dataset")
.equals(data.toString())
.and()
.field("UtilityMeasure")
.equals(measure.toString())
.and()
.field("PrivacyModel")
.equals(model.toString())
.and()
.field("Algorithm")
.equals(BenchmarkAlgorithm.RECURSIVE_GLOBAL_RECODING.toString())
.and()
.begin()
.field("SuppressionLimit")
.equals(String.valueOf(suppressionLimit))
.or()
.field("SuppressionLimit")
.equals("0.05")
.or()
.field("SuppressionLimit")
.equals("0.1")
.end()
.and()
.field("gsFactor")
.equals(String.valueOf(gsFactor))
.and()
.field("gsFactorStepSize")
.equals(String.valueOf(gsFactorStepSize))
.build();
// Selects according rows
Selector<String[]> selectorFlash = file.getSelectorBuilder()
.field("Dataset")
.equals(data.toString())
.and()
.field("UtilityMeasure")
.equals(measure.toString())
.and()
.field("PrivacyModel")
.equals(model.toString())
.and()
.field("Algorithm")
.equals(BenchmarkAlgorithm.FLASH.toString())
.and()
.begin()
.field("SuppressionLimit")
.equals(String.valueOf(suppressionLimit))
.or()
.field("SuppressionLimit")
.equals("0.05")
.or()
.field("SuppressionLimit")
.equals("0.1")
.end()
.and()
.field("gsFactor")
.equals("0.5")
.and()
.field("gsFactorStepSize")
.equals("0.0")
.build();
// Selects according rows
Selector<String[]> selectorTassa = file.getSelectorBuilder()
.field("Dataset")
.equals(data.toString())
.and()
.field("UtilityMeasure")
.equals(measure.toString())
.and()
.field("PrivacyModel")
.equals(model.toString())
.and()
.field("Algorithm")
.equals(BenchmarkAlgorithm.TASSA.toString())
.and()
.field("SuppressionLimit")
.equals("0.0")
.and()
.field("gsFactor")
.equals("0.0")
.and()
.field("gsFactorStepSize")
.equals("0.0")
.build();
// Read data into 2D series
Series2D rgrSeries = new Series2D(file,
selectorRGR,
new Field("SuppressionLimit"),
new Field("Utility", Analyzer.VALUE));
// Read data into 2D series
Series2D flashSeries = new Series2D(file,
selectorFlash,
new Field("SuppressionLimit"),
new Field("Utility", Analyzer.VALUE));
// Read data into 2D series
Series2D tassaSeries = new Series2D(file,
selectorTassa,
new Field("SuppressionLimit"),
new Field("Utility", Analyzer.VALUE));
// Dirty hack for creating a 3D series from two 2D series'
Series3D series = new Series3D(file, selectorRGR, new Field("Dataset"), // Cluster
new Field("UtilityMeasure"), // Type
new Field("PrivacyModel")); // Value
series.getData().clear();
for (Point2D point : tassaSeries.getData()) {
series.getData().add(new Point3D(point.x,
BenchmarkAlgorithm.TASSA.toString(),
String.valueOf(1 - Double.valueOf(point.y))));
}
for (Point2D point : rgrSeries.getData()) {
series.getData()
.add(new Point3D(point.x,
BenchmarkAlgorithm.RECURSIVE_GLOBAL_RECODING.toString(),
String.valueOf(1 - Double.valueOf(point.y))));
}
for (Point2D point : flashSeries.getData()) {
series.getData().add(new Point3D(point.x,
BenchmarkAlgorithm.FLASH.toString(),
String.valueOf(1 - Double.valueOf(point.y))));
}
// Plot
List<Plot<?>> plots = new ArrayList<Plot<?>>();
plots.add(new PlotHistogramClustered("Dataset: " + data.toString() + " / Measure: " +
measure.toString() + " / Suppression Limit: " +
suppressionLimit + " / gsFactor: " + gsFactor +
" / gsStepSize: " + gsFactorStepSize,
new Labels("Suppression Limit", "Utility [%]"),
series));
GnuPlotParams params = new GnuPlotParams();
params.printValues = false;
params.colorize = true;
params.rotateXTicks = 0;
params.keypos = KeyPos.TOP_RIGHT;
params.size = 1.0d;
params.ratio = 0.5d;
if (measure != BenchmarkUtilityMeasure.DISCERNIBILITY) {
params.minY = 0d;
}
params.maxY = 1d;
return new PlotGroup("Comparison of utility of RGR, Flash and Tassa with different K. gsFactor and gsStepSize only apply to RGR.",
plots,
params,
1.0d);
}
|
static PlotGroup function(CSVFile file, BenchmarkDataset data, BenchmarkUtilityMeasure measure, BenchmarkPrivacyModel model, BenchmarkAlgorithm algorithm, double suppressionLimit, double gsFactor, double gsFactorStepSize) throws ParseException { Selector<String[]> selectorRGR = file.getSelectorBuilder() .field(STR) .equals(data.toString()) .and() .field(STR) .equals(measure.toString()) .and() .field(STR) .equals(model.toString()) .and() .field(STR) .equals(BenchmarkAlgorithm.RECURSIVE_GLOBAL_RECODING.toString()) .and() .begin() .field(STR) .equals(String.valueOf(suppressionLimit)) .or() .field(STR) .equals("0.05") .or() .field(STR) .equals("0.1") .end() .and() .field(STR) .equals(String.valueOf(gsFactor)) .and() .field(STR) .equals(String.valueOf(gsFactorStepSize)) .build(); Selector<String[]> selectorFlash = file.getSelectorBuilder() .field(STR) .equals(data.toString()) .and() .field(STR) .equals(measure.toString()) .and() .field(STR) .equals(model.toString()) .and() .field(STR) .equals(BenchmarkAlgorithm.FLASH.toString()) .and() .begin() .field(STR) .equals(String.valueOf(suppressionLimit)) .or() .field(STR) .equals("0.05") .or() .field(STR) .equals("0.1") .end() .and() .field(STR) .equals("0.5") .and() .field(STR) .equals("0.0") .build(); Selector<String[]> selectorTassa = file.getSelectorBuilder() .field(STR) .equals(data.toString()) .and() .field(STR) .equals(measure.toString()) .and() .field(STR) .equals(model.toString()) .and() .field(STR) .equals(BenchmarkAlgorithm.TASSA.toString()) .and() .field(STR) .equals("0.0") .and() .field(STR) .equals("0.0") .and() .field(STR) .equals("0.0") .build(); Series2D rgrSeries = new Series2D(file, selectorRGR, new Field(STR), new Field(STR, Analyzer.VALUE)); Series2D flashSeries = new Series2D(file, selectorFlash, new Field(STR), new Field(STR, Analyzer.VALUE)); Series2D tassaSeries = new Series2D(file, selectorTassa, new Field(STR), new Field(STR, Analyzer.VALUE)); Series3D series = new Series3D(file, selectorRGR, new Field(STR), new Field(STR), new Field(STR)); series.getData().clear(); for (Point2D point : tassaSeries.getData()) { series.getData().add(new Point3D(point.x, BenchmarkAlgorithm.TASSA.toString(), String.valueOf(1 - Double.valueOf(point.y)))); } for (Point2D point : rgrSeries.getData()) { series.getData() .add(new Point3D(point.x, BenchmarkAlgorithm.RECURSIVE_GLOBAL_RECODING.toString(), String.valueOf(1 - Double.valueOf(point.y)))); } for (Point2D point : flashSeries.getData()) { series.getData().add(new Point3D(point.x, BenchmarkAlgorithm.FLASH.toString(), String.valueOf(1 - Double.valueOf(point.y)))); } List<Plot<?>> plots = new ArrayList<Plot<?>>(); plots.add(new PlotHistogramClustered(STR + data.toString() + STR + measure.toString() + STR + suppressionLimit + STR + gsFactor + STR + gsFactorStepSize, new Labels(STR, STR), series)); GnuPlotParams params = new GnuPlotParams(); params.printValues = false; params.colorize = true; params.rotateXTicks = 0; params.keypos = KeyPos.TOP_RIGHT; params.size = 1.0d; params.ratio = 0.5d; if (measure != BenchmarkUtilityMeasure.DISCERNIBILITY) { params.minY = 0d; } params.maxY = 1d; return new PlotGroup(STR, plots, params, 1.0d); }
|
/**
* Performs the analysis
*
* @param file
* @param suppressionLimit
* @param algorithm
* @param model
* @param measure
* @param data
* @return
* @throws ParseException
*/
|
Performs the analysis
|
analyzeUtility
|
{
"repo_name": "fstahnke/arx-new-algorithms",
"path": "src/org/deidentifier/arx/benchmark/BenchmarkAnalysisBoxPlotsSuppression.java",
"license": "apache-2.0",
"size": 41882
}
|
[
"de.linearbits.objectselector.Selector",
"de.linearbits.subframe.analyzer.Analyzer",
"de.linearbits.subframe.graph.Field",
"de.linearbits.subframe.graph.Labels",
"de.linearbits.subframe.graph.Plot",
"de.linearbits.subframe.graph.PlotHistogramClustered",
"de.linearbits.subframe.graph.Point2D",
"de.linearbits.subframe.graph.Point3D",
"de.linearbits.subframe.graph.Series2D",
"de.linearbits.subframe.graph.Series3D",
"de.linearbits.subframe.io.CSVFile",
"de.linearbits.subframe.render.GnuPlotParams",
"de.linearbits.subframe.render.PlotGroup",
"java.text.ParseException",
"java.util.ArrayList",
"java.util.List",
"org.deidentifier.arx.benchmark.BenchmarkSetup"
] |
import de.linearbits.objectselector.Selector; import de.linearbits.subframe.analyzer.Analyzer; import de.linearbits.subframe.graph.Field; import de.linearbits.subframe.graph.Labels; import de.linearbits.subframe.graph.Plot; import de.linearbits.subframe.graph.PlotHistogramClustered; import de.linearbits.subframe.graph.Point2D; import de.linearbits.subframe.graph.Point3D; import de.linearbits.subframe.graph.Series2D; import de.linearbits.subframe.graph.Series3D; import de.linearbits.subframe.io.CSVFile; import de.linearbits.subframe.render.GnuPlotParams; import de.linearbits.subframe.render.PlotGroup; import java.text.ParseException; import java.util.ArrayList; import java.util.List; import org.deidentifier.arx.benchmark.BenchmarkSetup;
|
import de.linearbits.objectselector.*; import de.linearbits.subframe.analyzer.*; import de.linearbits.subframe.graph.*; import de.linearbits.subframe.io.*; import de.linearbits.subframe.render.*; import java.text.*; import java.util.*; import org.deidentifier.arx.benchmark.*;
|
[
"de.linearbits.objectselector",
"de.linearbits.subframe",
"java.text",
"java.util",
"org.deidentifier.arx"
] |
de.linearbits.objectselector; de.linearbits.subframe; java.text; java.util; org.deidentifier.arx;
| 1,061,016
|
public ArrayList<MoodleSection> getCourseContents(int courseid) {
List<MoodleSection> sections = MoodleSection.find(MoodleSection.class,
"courseid = ? and siteid = ?", courseid + "", siteid + "");
// Add modules to sections
List<MoodleModule> dbModules;
List<MoodleModuleContent> dbContents;
for (int i = 0; i < sections.size(); i++) {
dbModules = MoodleModule.find(MoodleModule.class, "parentid = ?",
sections.get(i).getId() + "");
// Set module contents to modules
for (int j = 0; j < dbModules.size(); j++) {
dbContents = MoodleModuleContent.find(
MoodleModuleContent.class, "parentid = ?", dbModules
.get(j).getId() + "");
dbModules.get(j).setContents(dbContents);
}
sections.get(i).setModules(dbModules);
}
return new ArrayList<MoodleSection>(sections);
}
|
ArrayList<MoodleSection> function(int courseid) { List<MoodleSection> sections = MoodleSection.find(MoodleSection.class, STR, courseid + STRSTRparentid = ?STRSTRparentid = ?STR"); dbModules.get(j).setContents(dbContents); } sections.get(i).setModules(dbModules); } return new ArrayList<MoodleSection>(sections); }
|
/**
* Get a list of all sections is a Course. <br/>
* Note: Depending on the contents of a course, this could take some time as
* it runs many sql queries. It is recommended that this method is called
* from a background thread
*
* @param courseid
* Moodle courseid of the course
* @return List of sections
*
* @author Praveen Kumar Pendyala (praveen@praveenkumar.co.in)
*/
|
Get a list of all sections is a Course. Note: Depending on the contents of a course, this could take some time as it runs many sql queries. It is recommended that this method is called from a background thread
|
getCourseContents
|
{
"repo_name": "luxiaoming/MDroid",
"path": "app/src/main/java/in/co/praveenkumar/mdroid/task/CourseContentSyncTask.java",
"license": "gpl-3.0",
"size": 7807
}
|
[
"in.co.praveenkumar.mdroid.model.MoodleSection",
"java.util.ArrayList",
"java.util.List"
] |
import in.co.praveenkumar.mdroid.model.MoodleSection; import java.util.ArrayList; import java.util.List;
|
import in.co.praveenkumar.mdroid.model.*; import java.util.*;
|
[
"in.co.praveenkumar",
"java.util"
] |
in.co.praveenkumar; java.util;
| 939,228
|
@Test
public void testStopGatewaySender_onMember() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = clusterStartupRule.startServerVM(3, locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server1.invoke(() -> startSender("ln"));
server1.invoke(() -> verifySenderState("ln", true, false));
final DistributedMember server1DM = (DistributedMember) server1.invoke(getMemberIdCallable());
pause(10000);
String command = CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID
+ "=ln --" + CliStrings.MEMBER + "=" + server1DM.getId();
CommandResult cmdResult = executeCommandWithIgnoredExceptions(command);
if (cmdResult != null) {
String strCmdResult = cmdResult.toString();
getLogWriter().info("testStopGatewaySender stringResult : " + strCmdResult + ">>>>");
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertTrue(strCmdResult.contains("is stopped on member"));
} else {
fail("testStopGatewaySender failed as did not get CommandResult");
}
server1.invoke(() -> verifySenderState("ln", false, false));
}
|
void function() throws Exception { Integer locator1Port = locatorSite1.getPort(); server1 = clusterStartupRule.startServerVM(3, locator1Port); server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true)); server1.invoke(() -> startSender("ln")); server1.invoke(() -> verifySenderState("ln", true, false)); final DistributedMember server1DM = (DistributedMember) server1.invoke(getMemberIdCallable()); pause(10000); String command = CliStrings.STOP_GATEWAYSENDER + STR + CliStrings.STOP_GATEWAYSENDER__ID + STR + CliStrings.MEMBER + "=" + server1DM.getId(); CommandResult cmdResult = executeCommandWithIgnoredExceptions(command); if (cmdResult != null) { String strCmdResult = cmdResult.toString(); getLogWriter().info(STR + strCmdResult + ">>>>"); assertEquals(Result.Status.OK, cmdResult.getStatus()); assertTrue(strCmdResult.contains(STR)); } else { fail(STR); } server1.invoke(() -> verifySenderState("ln", false, false)); }
|
/**
* test to validate that the start gateway sender starts the gateway sender on a member
*/
|
test to validate that the start gateway sender starts the gateway sender on a member
|
testStopGatewaySender_onMember
|
{
"repo_name": "smanvi-pivotal/geode",
"path": "geode-wan/src/test/java/org/apache/geode/internal/cache/wan/wancommand/StopGatewaySenderCommandDUnitTest.java",
"license": "apache-2.0",
"size": 13142
}
|
[
"org.apache.geode.distributed.DistributedMember",
"org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils",
"org.apache.geode.management.cli.Result",
"org.apache.geode.management.internal.cli.i18n.CliStrings",
"org.apache.geode.management.internal.cli.result.CommandResult",
"org.apache.geode.test.dunit.Assert",
"org.apache.geode.test.dunit.LogWriterUtils",
"org.apache.geode.test.dunit.Wait"
] |
import org.apache.geode.distributed.DistributedMember; import org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils; import org.apache.geode.management.cli.Result; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.result.CommandResult; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.Wait;
|
import org.apache.geode.distributed.*; import org.apache.geode.internal.cache.wan.wancommand.*; import org.apache.geode.management.cli.*; import org.apache.geode.management.internal.cli.i18n.*; import org.apache.geode.management.internal.cli.result.*; import org.apache.geode.test.dunit.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 532,952
|
void serialize(DigitalObject obj,
OutputStream out,
String format,
String encoding,
int transContext) throws ObjectIntegrityException,
StreamIOException, UnsupportedTranslationException, ServerException;
|
void serialize(DigitalObject obj, OutputStream out, String format, String encoding, int transContext) throws ObjectIntegrityException, StreamIOException, UnsupportedTranslationException, ServerException;
|
/**
* Serializes the given object.
*
* @param obj
* the object to serialize.
* @param out
* where to send the output to (auto-closed when finished).
* @param encoding
* the character encoding if the format is text-based.
* @param transContext
* the translation context.
* @throws ObjectIntegrityException
* if the given object is in such a state that serialization can't
* be performed.
* @throws StreamIOException
* if there is an error writing to the stream.
* @throws ServerException
* if the translator is unable to serialize for any other reason.
* @throws UnsupportedEncodingException
* if the encoding is not supported by the JVM.
* @see DOTranslationUtility#SERIALIZE_EXPORT_ARCHIVE
* @see DOTranslationUtility#SERIALIZE_EXPORT_PUBLIC
* @see DOTranslationUtility#SERIALIZE_EXPORT_MIGRATE
* @see DOTranslationUtility#SERIALIZE_STORAGE_INTERNAL
*/
|
Serializes the given object
|
serialize
|
{
"repo_name": "fcrepo4-archive/fcrepo",
"path": "fcrepo-server/src/main/java/org/fcrepo/server/storage/translation/DOTranslator.java",
"license": "apache-2.0",
"size": 3368
}
|
[
"java.io.OutputStream",
"org.fcrepo.server.errors.ObjectIntegrityException",
"org.fcrepo.server.errors.ServerException",
"org.fcrepo.server.errors.StreamIOException",
"org.fcrepo.server.errors.UnsupportedTranslationException",
"org.fcrepo.server.storage.types.DigitalObject"
] |
import java.io.OutputStream; import org.fcrepo.server.errors.ObjectIntegrityException; import org.fcrepo.server.errors.ServerException; import org.fcrepo.server.errors.StreamIOException; import org.fcrepo.server.errors.UnsupportedTranslationException; import org.fcrepo.server.storage.types.DigitalObject;
|
import java.io.*; import org.fcrepo.server.errors.*; import org.fcrepo.server.storage.types.*;
|
[
"java.io",
"org.fcrepo.server"
] |
java.io; org.fcrepo.server;
| 2,654,942
|
public final ChannelFuture processHandshake(final Channel channel, HttpResponse response,
final ChannelPromise promise) {
if (response instanceof FullHttpResponse) {
try {
finishHandshake(channel, (FullHttpResponse) response);
promise.setSuccess();
} catch (Throwable cause) {
promise.setFailure(cause);
}
} else {
ChannelPipeline p = channel.pipeline();
ChannelHandlerContext ctx = p.context(HttpResponseDecoder.class);
if (ctx == null) {
ctx = p.context(HttpClientCodec.class);
if (ctx == null) {
return promise.setFailure(new IllegalStateException("ChannelPipeline does not contain " +
"a HttpResponseDecoder or HttpClientCodec"));
}
}
|
final ChannelFuture function(final Channel channel, HttpResponse response, final ChannelPromise promise) { if (response instanceof FullHttpResponse) { try { finishHandshake(channel, (FullHttpResponse) response); promise.setSuccess(); } catch (Throwable cause) { promise.setFailure(cause); } } else { ChannelPipeline p = channel.pipeline(); ChannelHandlerContext ctx = p.context(HttpResponseDecoder.class); if (ctx == null) { ctx = p.context(HttpClientCodec.class); if (ctx == null) { return promise.setFailure(new IllegalStateException(STR + STR)); } }
|
/**
* Process the opening handshake initiated by {@link #handshake}}.
*
* @param channel
* Channel
* @param response
* HTTP response containing the closing handshake details
* @param promise
* the {@link ChannelPromise} to notify once the handshake completes.
* @return future
* the {@link ChannelFuture} which is notified once the handshake completes.
*/
|
Process the opening handshake initiated by <code>#handshake</code>}
|
processHandshake
|
{
"repo_name": "wangcy6/storm_app",
"path": "frame/java/netty-4.1/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker.java",
"license": "apache-2.0",
"size": 19297
}
|
[
"io.netty.channel.Channel",
"io.netty.channel.ChannelFuture",
"io.netty.channel.ChannelHandlerContext",
"io.netty.channel.ChannelPipeline",
"io.netty.channel.ChannelPromise",
"io.netty.handler.codec.http.FullHttpResponse",
"io.netty.handler.codec.http.HttpClientCodec",
"io.netty.handler.codec.http.HttpResponse",
"io.netty.handler.codec.http.HttpResponseDecoder"
] |
import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpClientCodec; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpResponseDecoder;
|
import io.netty.channel.*; import io.netty.handler.codec.http.*;
|
[
"io.netty.channel",
"io.netty.handler"
] |
io.netty.channel; io.netty.handler;
| 2,080,046
|
public DrawerBuilder withDrawerLayout(@NonNull DrawerLayout drawerLayout) {
this.mDrawerLayout = drawerLayout;
return this;
}
|
DrawerBuilder function(@NonNull DrawerLayout drawerLayout) { this.mDrawerLayout = drawerLayout; return this; }
|
/**
* Pass a custom DrawerLayout which will be used.
* NOTE: This requires the same structure as the drawer.xml
*
* @param drawerLayout
* @return
*/
|
Pass a custom DrawerLayout which will be used
|
withDrawerLayout
|
{
"repo_name": "democedes/MaterialDrawer",
"path": "library/src/main/java/com/mikepenz/materialdrawer/DrawerBuilder.java",
"license": "apache-2.0",
"size": 61635
}
|
[
"android.support.annotation.NonNull",
"android.support.v4.widget.DrawerLayout"
] |
import android.support.annotation.NonNull; import android.support.v4.widget.DrawerLayout;
|
import android.support.annotation.*; import android.support.v4.widget.*;
|
[
"android.support"
] |
android.support;
| 875,767
|
void enterArrayLiteralExpression(@NotNull ECMAScriptParser.ArrayLiteralExpressionContext ctx);
void exitArrayLiteralExpression(@NotNull ECMAScriptParser.ArrayLiteralExpressionContext ctx);
|
void enterArrayLiteralExpression(@NotNull ECMAScriptParser.ArrayLiteralExpressionContext ctx); void exitArrayLiteralExpression(@NotNull ECMAScriptParser.ArrayLiteralExpressionContext ctx);
|
/**
* Exit a parse tree produced by {@link ECMAScriptParser#ArrayLiteralExpression}.
* @param ctx the parse tree
*/
|
Exit a parse tree produced by <code>ECMAScriptParser#ArrayLiteralExpression</code>
|
exitArrayLiteralExpression
|
{
"repo_name": "IsThisThePayneResidence/intellidots",
"path": "src/main/java/ua/edu/hneu/ast/parsers/ECMAScriptListener.java",
"license": "gpl-3.0",
"size": 39591
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 44,122
|
public Future<Path> createTmpFile(String name, DistributedCacheEntry entry, JobID jobID, ExecutionAttemptID executionId) throws Exception {
synchronized (lock) {
Map<String, Future<Path>> jobEntries = entries.computeIfAbsent(jobID, k -> new HashMap<>());
// register reference holder
final Set<ExecutionAttemptID> refHolders = jobRefHolders.computeIfAbsent(jobID, id -> new HashSet<>());
refHolders.add(executionId);
Future<Path> fileEntry = jobEntries.get(name);
if (fileEntry != null) {
// file is already in the cache. return a future that
// immediately returns the file
return fileEntry;
} else {
// need to copy the file
// create the target path
File tempDirToUse = new File(storageDirectories[nextDirectory++], jobID.toString());
if (nextDirectory >= storageDirectories.length) {
nextDirectory = 0;
}
// kick off the copying
Callable<Path> cp;
if (entry.blobKey != null) {
cp = new CopyFromBlobProcess(entry, jobID, blobService, new Path(tempDirToUse.getAbsolutePath()));
} else {
cp = new CopyFromDFSProcess(entry, new Path(tempDirToUse.getAbsolutePath()));
}
FutureTask<Path> copyTask = new FutureTask<>(cp);
executorService.submit(copyTask);
// store our entry
jobEntries.put(name, copyTask);
return copyTask;
}
}
}
|
Future<Path> function(String name, DistributedCacheEntry entry, JobID jobID, ExecutionAttemptID executionId) throws Exception { synchronized (lock) { Map<String, Future<Path>> jobEntries = entries.computeIfAbsent(jobID, k -> new HashMap<>()); final Set<ExecutionAttemptID> refHolders = jobRefHolders.computeIfAbsent(jobID, id -> new HashSet<>()); refHolders.add(executionId); Future<Path> fileEntry = jobEntries.get(name); if (fileEntry != null) { return fileEntry; } else { File tempDirToUse = new File(storageDirectories[nextDirectory++], jobID.toString()); if (nextDirectory >= storageDirectories.length) { nextDirectory = 0; } Callable<Path> cp; if (entry.blobKey != null) { cp = new CopyFromBlobProcess(entry, jobID, blobService, new Path(tempDirToUse.getAbsolutePath())); } else { cp = new CopyFromDFSProcess(entry, new Path(tempDirToUse.getAbsolutePath())); } FutureTask<Path> copyTask = new FutureTask<>(cp); executorService.submit(copyTask); jobEntries.put(name, copyTask); return copyTask; } } }
|
/**
* If the file doesn't exists locally, retrieve the file from the blob-service.
*
* @param entry The cache entry descriptor (path, executable flag)
* @param jobID The ID of the job for which the file is copied.
* @return The handle to the task that copies the file.
*/
|
If the file doesn't exists locally, retrieve the file from the blob-service
|
createTmpFile
|
{
"repo_name": "jinglining/flink",
"path": "flink-runtime/src/main/java/org/apache/flink/runtime/filecache/FileCache.java",
"license": "apache-2.0",
"size": 11428
}
|
[
"java.io.File",
"java.util.HashMap",
"java.util.HashSet",
"java.util.Map",
"java.util.Set",
"java.util.concurrent.Callable",
"java.util.concurrent.Future",
"java.util.concurrent.FutureTask",
"org.apache.flink.api.common.JobID",
"org.apache.flink.api.common.cache.DistributedCache",
"org.apache.flink.core.fs.Path",
"org.apache.flink.runtime.executiongraph.ExecutionAttemptID"
] |
import java.io.File; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.cache.DistributedCache; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
|
import java.io.*; import java.util.*; import java.util.concurrent.*; import org.apache.flink.api.common.*; import org.apache.flink.api.common.cache.*; import org.apache.flink.core.fs.*; import org.apache.flink.runtime.executiongraph.*;
|
[
"java.io",
"java.util",
"org.apache.flink"
] |
java.io; java.util; org.apache.flink;
| 2,380,165
|
public Object put(DatabaseField key, Object value) {
// Value may be a direct value, nested record, or collection of values.
XMLField field = convertToXMLField(key);
Object nodeValue = convertToNodeValue(value);
NodeList replaced = null;
boolean isEmptyCollection = false;
if (nodeValue instanceof Collection) {
isEmptyCollection = ((Collection)nodeValue).size() == 0;
replaced = XPathEngine.getInstance().replaceCollection(convertToXMLField(key), dom, (Collection)nodeValue, session);
} else {
replaced = XPathEngine.getInstance().replaceValue(convertToXMLField(key), dom, nodeValue, session);
}
if (replaced.getLength() == 0) {
// Replace does nothing if the node did not exist, return no nodes.
XPathEngine.getInstance().create(convertToXMLField(key), dom, nodeValue, lastUpdatedField, getDocPresPolicy(), session);
} else if (replaced.item(0) == getDOM()) {
// If the root element/record element was changed must update the record's reference.
setDOM(getDocument().getDocumentElement());
}
if(!field.getXPathFragment().isAttribute() && !field.getXPathFragment().nameIsText()) {
if(value != null && !isEmptyCollection) {
this.lastUpdatedField = field;
}
}
return replaced;
}
|
Object function(DatabaseField key, Object value) { XMLField field = convertToXMLField(key); Object nodeValue = convertToNodeValue(value); NodeList replaced = null; boolean isEmptyCollection = false; if (nodeValue instanceof Collection) { isEmptyCollection = ((Collection)nodeValue).size() == 0; replaced = XPathEngine.getInstance().replaceCollection(convertToXMLField(key), dom, (Collection)nodeValue, session); } else { replaced = XPathEngine.getInstance().replaceValue(convertToXMLField(key), dom, nodeValue, session); } if (replaced.getLength() == 0) { XPathEngine.getInstance().create(convertToXMLField(key), dom, nodeValue, lastUpdatedField, getDocPresPolicy(), session); } else if (replaced.item(0) == getDOM()) { setDOM(getDocument().getDocumentElement()); } if(!field.getXPathFragment().isAttribute() && !field.getXPathFragment().nameIsText()) { if(value != null && !isEmptyCollection) { this.lastUpdatedField = field; } } return replaced; }
|
/**
* INTERNAL:
* Set the field value into the DOM.
* The field name must be a valid simple XPath expression.
*/
|
Set the field value into the DOM. The field name must be a valid simple XPath expression
|
put
|
{
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/oxm/record/DOMRecord.java",
"license": "epl-1.0",
"size": 29768
}
|
[
"java.util.Collection",
"org.eclipse.persistence.internal.helper.DatabaseField",
"org.eclipse.persistence.internal.oxm.XPathEngine",
"org.eclipse.persistence.oxm.XMLField",
"org.w3c.dom.NodeList"
] |
import java.util.Collection; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.oxm.XPathEngine; import org.eclipse.persistence.oxm.XMLField; import org.w3c.dom.NodeList;
|
import java.util.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.oxm.*; import org.eclipse.persistence.oxm.*; import org.w3c.dom.*;
|
[
"java.util",
"org.eclipse.persistence",
"org.w3c.dom"
] |
java.util; org.eclipse.persistence; org.w3c.dom;
| 2,560,947
|
public void setDefaultContentType(MediaType defaultContentType) {
this.defaultContentType = defaultContentType;
}
|
void function(MediaType defaultContentType) { this.defaultContentType = defaultContentType; }
|
/**
* Set the default content type.
* <p>This content type will be used when neither the request path extension,
* nor a request parameter, nor the {@code Accept} header could help
* determine the requested content type.
*/
|
Set the default content type. This content type will be used when neither the request path extension, nor a request parameter, nor the Accept header could help determine the requested content type
|
setDefaultContentType
|
{
"repo_name": "deathspeeder/class-guard",
"path": "spring-framework-3.2.x/spring-web/src/main/java/org/springframework/web/accept/ContentNegotiationManagerFactoryBean.java",
"license": "gpl-2.0",
"size": 7902
}
|
[
"org.springframework.http.MediaType"
] |
import org.springframework.http.MediaType;
|
import org.springframework.http.*;
|
[
"org.springframework.http"
] |
org.springframework.http;
| 2,752,397
|
public double getStorageSize(String name, double defaultValue,
StorageUnit targetUnit) {
Preconditions.checkNotNull(targetUnit, "Conversion unit cannot be null.");
Preconditions.checkState(isNotBlank(name), "Name cannot be blank.");
String vString = get(name);
if (isBlank(vString)) {
return targetUnit.getDefault(defaultValue);
}
StorageSize measure = StorageSize.parse(vString);
return convertStorageUnit(measure.getValue(), measure.getUnit(),
targetUnit);
}
|
double function(String name, double defaultValue, StorageUnit targetUnit) { Preconditions.checkNotNull(targetUnit, STR); Preconditions.checkState(isNotBlank(name), STR); String vString = get(name); if (isBlank(vString)) { return targetUnit.getDefault(defaultValue); } StorageSize measure = StorageSize.parse(vString); return convertStorageUnit(measure.getValue(), measure.getUnit(), targetUnit); }
|
/**
* Gets storage size from a config file.
*
* @param name - Key to read.
* @param defaultValue - The default value to return in case the key is
* not present.
* @param targetUnit - The Storage unit that should be used
* for the return value.
* @return - double value in the Storage Unit specified.
*/
|
Gets storage size from a config file
|
getStorageSize
|
{
"repo_name": "yew1eb/flink",
"path": "flink-filesystems/flink-fs-hadoop-shaded/src/main/java/org/apache/hadoop/conf/Configuration.java",
"license": "apache-2.0",
"size": 112692
}
|
[
"com.google.common.base.Preconditions"
] |
import com.google.common.base.Preconditions;
|
import com.google.common.base.*;
|
[
"com.google.common"
] |
com.google.common;
| 216,031
|
public static Word2Vec readFromBinFile(File file)
throws IOException {
return readFromBinFile(file, detectByteOrder(file));
}
|
static Word2Vec function(File file) throws IOException { return readFromBinFile(file, detectByteOrder(file)); }
|
/**
* Read word2vec from the bin file. The default byte order is LITTLE_ENDIAN.
*
* @param file the bin file
* @return word2vec
* @throws IOException
*/
|
Read word2vec from the bin file. The default byte order is LITTLE_ENDIAN
|
readFromBinFile
|
{
"repo_name": "yfpeng/pengyifan-word2vec",
"path": "src/main/java/com/pengyifan/word2vec/io/Word2VecUtils.java",
"license": "bsd-3-clause",
"size": 7928
}
|
[
"com.pengyifan.word2vec.Word2Vec",
"java.io.File",
"java.io.IOException"
] |
import com.pengyifan.word2vec.Word2Vec; import java.io.File; import java.io.IOException;
|
import com.pengyifan.word2vec.*; import java.io.*;
|
[
"com.pengyifan.word2vec",
"java.io"
] |
com.pengyifan.word2vec; java.io;
| 1,475,596
|
public static Logger getLogger(Class<?> callerClass) {
return Logger.getLogger(callerClass);
}
|
static Logger function(Class<?> callerClass) { return Logger.getLogger(callerClass); }
|
/**
* Creates reference to logger with given class name.
*
* @param callerClass class, from which logger is called.
* @return log4j logger.
*/
|
Creates reference to logger with given class name
|
getLogger
|
{
"repo_name": "LV-eMeS/eMeS_Libraries",
"path": "src/main/java/lv/emes/libraries/tools/logging/MS_Log4Java.java",
"license": "mit",
"size": 2857
}
|
[
"org.apache.log4j.Logger"
] |
import org.apache.log4j.Logger;
|
import org.apache.log4j.*;
|
[
"org.apache.log4j"
] |
org.apache.log4j;
| 185,575
|
@Test
@InSequence(3)
public void remove() throws DaoException {
currentEntity = getEntity();
currentEntity = facade.findById(currentEntity.getId());
facade.remove(currentEntity);
Assert.assertNotNull(currentEntity.getId());
log.info(currentEntity.getClass() + " was deleted with id "
+ currentEntity.getId());
}
|
@InSequence(3) void function() throws DaoException { currentEntity = getEntity(); currentEntity = facade.findById(currentEntity.getId()); facade.remove(currentEntity); Assert.assertNotNull(currentEntity.getId()); log.info(currentEntity.getClass() + STR + currentEntity.getId()); }
|
/**
* Test remove Entity
*/
|
Test remove Entity
|
remove
|
{
"repo_name": "templarfelix/framework",
"path": "base-ejb/src/main/test/io.easycm.framework.base/test/impl/CrudTestImpl.java",
"license": "gpl-2.0",
"size": 2023
}
|
[
"br.com.streamsoft.framework.base.dao.exception.DaoException",
"junit.framework.Assert",
"org.jboss.arquillian.junit.InSequence"
] |
import br.com.streamsoft.framework.base.dao.exception.DaoException; import junit.framework.Assert; import org.jboss.arquillian.junit.InSequence;
|
import br.com.streamsoft.framework.base.dao.exception.*; import junit.framework.*; import org.jboss.arquillian.junit.*;
|
[
"br.com.streamsoft",
"junit.framework",
"org.jboss.arquillian"
] |
br.com.streamsoft; junit.framework; org.jboss.arquillian;
| 561,395
|
public static void addCollision(@NotNull UpdateOp op,
@NotNull Revision revision,
@NotNull Revision other) {
checkNotNull(op).setMapEntry(COLLISIONS, checkNotNull(revision),
other.toString());
}
|
static void function(@NotNull UpdateOp op, @NotNull Revision revision, @NotNull Revision other) { checkNotNull(op).setMapEntry(COLLISIONS, checkNotNull(revision), other.toString()); }
|
/**
* Add a collision marker for the given {@code revision}.
*
* @param op the update operation.
* @param revision the commit for which a collision was detected.
* @param other the revision for the commit, which detected the collision.
*/
|
Add a collision marker for the given revision
|
addCollision
|
{
"repo_name": "trekawek/jackrabbit-oak",
"path": "oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java",
"license": "apache-2.0",
"size": 92264
}
|
[
"com.google.common.base.Preconditions",
"org.jetbrains.annotations.NotNull"
] |
import com.google.common.base.Preconditions; import org.jetbrains.annotations.NotNull;
|
import com.google.common.base.*; import org.jetbrains.annotations.*;
|
[
"com.google.common",
"org.jetbrains.annotations"
] |
com.google.common; org.jetbrains.annotations;
| 1,558,501
|
public synchronized boolean isLockOwner(Connection conn, String lockName) {
lockName = lockName.intern();
return getThreadLocks().contains(lockName);
}
|
synchronized boolean function(Connection conn, String lockName) { lockName = lockName.intern(); return getThreadLocks().contains(lockName); }
|
/**
* Determine whether the calling thread owns a lock on the identified
* resource.
*/
|
Determine whether the calling thread owns a lock on the identified resource
|
isLockOwner
|
{
"repo_name": "chandrasekhar4u/opensymphony-quartz-backup",
"path": "trunk/src/java/org/quartz/impl/jdbcjobstore/SimpleSemaphore.java",
"license": "apache-2.0",
"size": 5192
}
|
[
"java.sql.Connection"
] |
import java.sql.Connection;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 2,904,276
|
@Override
public void close() { // called by SubscriberSampler#threadFinished()
log.debug("close()");
try {
if(connection != null && connectionStarted) {
connection.stop();
connectionStarted = false;
}
} catch (JMSException e) {
log.warn("Stopping connection throws exception, message: {}", e.getMessage(), e);
}
Utils.close(subscriber, log);
Utils.close(session, log);
Utils.close(connection, log);
}
/**
* {@inheritDoc}
|
void function() { log.debug(STR); try { if(connection != null && connectionStarted) { connection.stop(); connectionStarted = false; } } catch (JMSException e) { log.warn(STR, e.getMessage(), e); } Utils.close(subscriber, log); Utils.close(session, log); Utils.close(connection, log); } /** * {@inheritDoc}
|
/**
* close() will stop the connection first.
* Then it closes the subscriber, session and connection.
*/
|
close() will stop the connection first. Then it closes the subscriber, session and connection
|
close
|
{
"repo_name": "ufctester/apache-jmeter",
"path": "src/protocol/jms/org/apache/jmeter/protocol/jms/client/ReceiveSubscriber.java",
"license": "apache-2.0",
"size": 16026
}
|
[
"javax.jms.JMSException",
"org.apache.jmeter.protocol.jms.Utils"
] |
import javax.jms.JMSException; import org.apache.jmeter.protocol.jms.Utils;
|
import javax.jms.*; import org.apache.jmeter.protocol.jms.*;
|
[
"javax.jms",
"org.apache.jmeter"
] |
javax.jms; org.apache.jmeter;
| 777,865
|
public TextureData renderProjectedAsTexture(int startZ, int endZ,
int stepping, int type, List<Integer> channels)
throws RenderingServiceException, DSOutOfServiceException
{
isSessionAlive();
List<Integer> active = getActiveChannels();
for (int i = 0; i < getPixelsDimensionsC(); i++)
setActive(i, false);
Iterator<Integer> j = channels.iterator();
while (j.hasNext())
setActive(j.next(), true);
TextureData img;
if (isCompressed())
img = renderProjectedCompressedAsTexture(startZ, endZ, stepping,
type);
else img = renderProjectedUncompressedAsTexture(startZ, endZ, stepping,
type);
//reset
j = active.iterator();
while (j.hasNext())
setActive(j.next(), true);
return img;
}
|
TextureData function(int startZ, int endZ, int stepping, int type, List<Integer> channels) throws RenderingServiceException, DSOutOfServiceException { isSessionAlive(); List<Integer> active = getActiveChannels(); for (int i = 0; i < getPixelsDimensionsC(); i++) setActive(i, false); Iterator<Integer> j = channels.iterator(); while (j.hasNext()) setActive(j.next(), true); TextureData img; if (isCompressed()) img = renderProjectedCompressedAsTexture(startZ, endZ, stepping, type); else img = renderProjectedUncompressedAsTexture(startZ, endZ, stepping, type); j = active.iterator(); while (j.hasNext()) setActive(j.next(), true); return img; }
|
/**
* Implemented as specified by {@link RenderingControl}.
* @see RenderingControl#renderProjectedAsTexture(int, int, int, int, List)
*/
|
Implemented as specified by <code>RenderingControl</code>
|
renderProjectedAsTexture
|
{
"repo_name": "rleigh-dundee/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/env/rnd/RenderingControlProxy.java",
"license": "gpl-2.0",
"size": 64159
}
|
[
"com.sun.opengl.util.texture.TextureData",
"java.util.Iterator",
"java.util.List",
"org.openmicroscopy.shoola.env.data.DSOutOfServiceException"
] |
import com.sun.opengl.util.texture.TextureData; import java.util.Iterator; import java.util.List; import org.openmicroscopy.shoola.env.data.DSOutOfServiceException;
|
import com.sun.opengl.util.texture.*; import java.util.*; import org.openmicroscopy.shoola.env.data.*;
|
[
"com.sun.opengl",
"java.util",
"org.openmicroscopy.shoola"
] |
com.sun.opengl; java.util; org.openmicroscopy.shoola;
| 779,927
|
protected void removePreviousDialog(String tag) {
Fragment fragment = mFragmentManager.findFragmentByTag(tag);
if (fragment != null) {
FragmentTransaction transaction = mFragmentManager
.beginTransaction();
transaction.remove(fragment);
transaction.commitAllowingStateLoss();
}
}
|
void function(String tag) { Fragment fragment = mFragmentManager.findFragmentByTag(tag); if (fragment != null) { FragmentTransaction transaction = mFragmentManager .beginTransaction(); transaction.remove(fragment); transaction.commitAllowingStateLoss(); } }
|
/**
* To remove previous dialog is showed
*
* @param tag
*/
|
To remove previous dialog is showed
|
removePreviousDialog
|
{
"repo_name": "ribbon-xx/VTVPro-Plus",
"path": "VtvPro/src/mdn/vtvsport/fragment/BaseFragmentPlayerDetail.java",
"license": "apache-2.0",
"size": 1823
}
|
[
"android.support.v4.app.Fragment",
"android.support.v4.app.FragmentTransaction"
] |
import android.support.v4.app.Fragment; import android.support.v4.app.FragmentTransaction;
|
import android.support.v4.app.*;
|
[
"android.support"
] |
android.support;
| 2,496,752
|
Path getLocalCache(URI cache, Configuration conf,
String subDir, FileStatus fileStatus,
boolean isArchive, long confFileStamp,
Path currentWorkDir, boolean honorSymLinkConf, boolean isPublic)
throws IOException {
String key;
key = getKey(cache, conf, confFileStamp, getLocalizedCacheOwner(isPublic));
CacheStatus lcacheStatus;
Path localizedPath = null;
synchronized (cachedArchives) {
lcacheStatus = cachedArchives.get(key);
if (lcacheStatus == null) {
// was never localized
String uniqueString = String.valueOf(random.nextLong());
String cachePath = new Path (subDir,
new Path(uniqueString, makeRelative(cache, conf))).toString();
Path localPath = lDirAllocator.getLocalPathForWrite(cachePath,
fileStatus.getLen(), trackerConf);
lcacheStatus = new CacheStatus(new Path(localPath.toString().replace(
cachePath, "")), localPath, new Path(subDir), uniqueString);
cachedArchives.put(key, lcacheStatus);
}
//mark the cache for use.
lcacheStatus.refcount++;
}
boolean initSuccessful = false;
try {
// do the localization, after releasing the global lock
synchronized (lcacheStatus) {
if (!lcacheStatus.isInited()) {
FileSystem fs = FileSystem.get(cache, conf);
checkStampSinceJobStarted(conf, fs, cache, confFileStamp,
lcacheStatus, fileStatus);
localizedPath = localizeCache(conf, cache, confFileStamp,
lcacheStatus, isArchive, isPublic);
lcacheStatus.initComplete();
} else {
localizedPath = checkCacheStatusValidity(conf, cache, confFileStamp,
lcacheStatus, fileStatus, isArchive);
}
createSymlink(conf, cache, lcacheStatus, isArchive, currentWorkDir,
honorSymLinkConf);
}
initSuccessful = true;
return localizedPath;
} finally {
if (!initSuccessful) {
synchronized (cachedArchives) {
lcacheStatus.refcount--;
}
}
}
}
|
Path getLocalCache(URI cache, Configuration conf, String subDir, FileStatus fileStatus, boolean isArchive, long confFileStamp, Path currentWorkDir, boolean honorSymLinkConf, boolean isPublic) throws IOException { String key; key = getKey(cache, conf, confFileStamp, getLocalizedCacheOwner(isPublic)); CacheStatus lcacheStatus; Path localizedPath = null; synchronized (cachedArchives) { lcacheStatus = cachedArchives.get(key); if (lcacheStatus == null) { String uniqueString = String.valueOf(random.nextLong()); String cachePath = new Path (subDir, new Path(uniqueString, makeRelative(cache, conf))).toString(); Path localPath = lDirAllocator.getLocalPathForWrite(cachePath, fileStatus.getLen(), trackerConf); lcacheStatus = new CacheStatus(new Path(localPath.toString().replace( cachePath, "")), localPath, new Path(subDir), uniqueString); cachedArchives.put(key, lcacheStatus); } lcacheStatus.refcount++; } boolean initSuccessful = false; try { synchronized (lcacheStatus) { if (!lcacheStatus.isInited()) { FileSystem fs = FileSystem.get(cache, conf); checkStampSinceJobStarted(conf, fs, cache, confFileStamp, lcacheStatus, fileStatus); localizedPath = localizeCache(conf, cache, confFileStamp, lcacheStatus, isArchive, isPublic); lcacheStatus.initComplete(); } else { localizedPath = checkCacheStatusValidity(conf, cache, confFileStamp, lcacheStatus, fileStatus, isArchive); } createSymlink(conf, cache, lcacheStatus, isArchive, currentWorkDir, honorSymLinkConf); } initSuccessful = true; return localizedPath; } finally { if (!initSuccessful) { synchronized (cachedArchives) { lcacheStatus.refcount--; } } } }
|
/**
* Get the locally cached file or archive; it could either be
* previously cached (and valid) or copy it from the {@link FileSystem} now.
*
* @param cache the cache to be localized, this should be specified as
* new URI(scheme://scheme-specific-part/absolute_path_to_file#LINKNAME).
* @param conf The Configuration file which contains the filesystem
* @param subDir The base cache subDir where you want to localize the
* files/archives
* @param fileStatus The file status on the dfs.
* @param isArchive if the cache is an archive or a file. In case it is an
* archive with a .zip or .jar or .tar or .tgz or .tar.gz extension it will
* be unzipped/unjarred/untarred automatically
* and the directory where the archive is unzipped/unjarred/untarred is
* returned as the Path.
* In case of a file, the path to the file is returned
* @param confFileStamp this is the hdfs file modification timestamp to verify
* that the file to be cached hasn't changed since the job started
* @param currentWorkDir this is the directory where you would want to create
* symlinks for the locally cached files/archives
* @param honorSymLinkConf if this is false, then the symlinks are not
* created even if conf says so (this is required for an optimization in task
* launches
* NOTE: This is effectively always on since r696957, since there is no code
* path that does not use this.
* @param isPublic to know the cache file is accessible to public or private
* @return the path to directory where the archives are unjarred in case of
* archives, the path to the file where the file is copied locally
* @throws IOException
*/
|
Get the locally cached file or archive; it could either be previously cached (and valid) or copy it from the <code>FileSystem</code> now
|
getLocalCache
|
{
"repo_name": "steveloughran/hadoop-mapreduce",
"path": "src/java/org/apache/hadoop/mapreduce/filecache/TrackerDistributedCacheManager.java",
"license": "apache-2.0",
"size": 29118
}
|
[
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileStatus",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] |
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path;
|
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 2,751,228
|
protected void positionHelper() {
if (m_dragHelper == null) {
// should never happen
CmsDebugLog.getInstance().printLine("Drag helper is null");
return;
}
Element parentElement = m_dragHelper.getParentElement();
int left = CmsDomUtil.getRelativeX(m_clientX, parentElement) - m_cursorOffsetX;
int top = CmsDomUtil.getRelativeY(m_clientY, parentElement) - m_cursorOffsetY;
m_dragHelper.getStyle().setLeft(left, Unit.PX);
m_dragHelper.getStyle().setTop(top, Unit.PX);
}
|
void function() { if (m_dragHelper == null) { CmsDebugLog.getInstance().printLine(STR); return; } Element parentElement = m_dragHelper.getParentElement(); int left = CmsDomUtil.getRelativeX(m_clientX, parentElement) - m_cursorOffsetX; int top = CmsDomUtil.getRelativeY(m_clientY, parentElement) - m_cursorOffsetY; m_dragHelper.getStyle().setLeft(left, Unit.PX); m_dragHelper.getStyle().setTop(top, Unit.PX); }
|
/**
* Positions an element depending on the current events client position and the cursor offset. This method assumes that the element parent is positioned relative.<p>
*/
|
Positions an element depending on the current events client position and the cursor offset. This method assumes that the element parent is positioned relative
|
positionHelper
|
{
"repo_name": "ggiudetti/opencms-core",
"path": "src-gwt/org/opencms/gwt/client/dnd/CmsDNDHandler.java",
"license": "lgpl-2.1",
"size": 36514
}
|
[
"com.google.gwt.dom.client.Element",
"com.google.gwt.dom.client.Style",
"org.opencms.gwt.client.util.CmsDebugLog",
"org.opencms.gwt.client.util.CmsDomUtil"
] |
import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import org.opencms.gwt.client.util.CmsDebugLog; import org.opencms.gwt.client.util.CmsDomUtil;
|
import com.google.gwt.dom.client.*; import org.opencms.gwt.client.util.*;
|
[
"com.google.gwt",
"org.opencms.gwt"
] |
com.google.gwt; org.opencms.gwt;
| 1,478,064
|
private void removeAndReconnect(MapReduceOper mr, MapReduceOper newMR) throws VisitorException {
List<MapReduceOper> mapperSuccs = getPlan().getSuccessors(mr);
List<MapReduceOper> mapperPreds = getPlan().getPredecessors(mr);
// make a copy before removing operator
ArrayList<MapReduceOper> succsCopy = null;
ArrayList<MapReduceOper> predsCopy = null;
if (mapperSuccs != null) {
succsCopy = new ArrayList<MapReduceOper>(mapperSuccs);
}
if (mapperPreds != null) {
predsCopy = new ArrayList<MapReduceOper>(mapperPreds);
}
getPlan().remove(mr);
// reconnect the mapper's successors
if (succsCopy != null) {
for (MapReduceOper succ : succsCopy) {
try {
getPlan().connect(newMR, succ);
} catch (PlanException e) {
int errCode = 2133;
String msg = "Internal Error. Unable to connect map plan with successors for optimization.";
throw new OptimizerException(msg, errCode, PigException.BUG, e);
}
}
}
// reconnect the mapper's predecessors
if (predsCopy != null) {
for (MapReduceOper pred : predsCopy) {
if (newMR.getOperatorKey().equals(pred.getOperatorKey())) {
continue;
}
try {
getPlan().connect(pred, newMR);
} catch (PlanException e) {
int errCode = 2134;
String msg = "Internal Error. Unable to connect map plan with predecessors for optimization.";
throw new OptimizerException(msg, errCode, PigException.BUG, e);
}
}
}
mergeMROperProperties(mr, newMR);
}
|
void function(MapReduceOper mr, MapReduceOper newMR) throws VisitorException { List<MapReduceOper> mapperSuccs = getPlan().getSuccessors(mr); List<MapReduceOper> mapperPreds = getPlan().getPredecessors(mr); ArrayList<MapReduceOper> succsCopy = null; ArrayList<MapReduceOper> predsCopy = null; if (mapperSuccs != null) { succsCopy = new ArrayList<MapReduceOper>(mapperSuccs); } if (mapperPreds != null) { predsCopy = new ArrayList<MapReduceOper>(mapperPreds); } getPlan().remove(mr); if (succsCopy != null) { for (MapReduceOper succ : succsCopy) { try { getPlan().connect(newMR, succ); } catch (PlanException e) { int errCode = 2133; String msg = STR; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } if (predsCopy != null) { for (MapReduceOper pred : predsCopy) { if (newMR.getOperatorKey().equals(pred.getOperatorKey())) { continue; } try { getPlan().connect(pred, newMR); } catch (PlanException e) { int errCode = 2134; String msg = STR; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } mergeMROperProperties(mr, newMR); }
|
/**
* Removes the specified MR operator from the plan after the merge.
* Connects its predecessors and successors to the merged MR operator
*
* @param mr the MR operator to remove
* @param newMR the MR operator to be connected to the predecessors and
* the successors of the removed operator
* @throws VisitorException if connect operation fails
*/
|
Removes the specified MR operator from the plan after the merge. Connects its predecessors and successors to the merged MR operator
|
removeAndReconnect
|
{
"repo_name": "piaozhexiu/apache-pig",
"path": "src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MultiQueryOptimizer.java",
"license": "apache-2.0",
"size": 49740
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.pig.PigException",
"org.apache.pig.impl.plan.PlanException",
"org.apache.pig.impl.plan.VisitorException",
"org.apache.pig.impl.plan.optimizer.OptimizerException"
] |
import java.util.ArrayList; import java.util.List; import org.apache.pig.PigException; import org.apache.pig.impl.plan.PlanException; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.impl.plan.optimizer.OptimizerException;
|
import java.util.*; import org.apache.pig.*; import org.apache.pig.impl.plan.*; import org.apache.pig.impl.plan.optimizer.*;
|
[
"java.util",
"org.apache.pig"
] |
java.util; org.apache.pig;
| 2,498,235
|
public BigDecimal getDiscount ()
{
BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Discount);
if (bd == null)
return Env.ZERO;
return bd;
}
|
BigDecimal function () { BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Discount); if (bd == null) return Env.ZERO; return bd; }
|
/** Get Discount %.
@return Discount in percent
*/
|
Get Discount %
|
getDiscount
|
{
"repo_name": "arthurmelo88/palmetalADP",
"path": "adempiere_360/base/src/org/compiere/model/X_C_RfQResponseLineQty.java",
"license": "gpl-2.0",
"size": 6387
}
|
[
"java.math.BigDecimal",
"org.compiere.util.Env"
] |
import java.math.BigDecimal; import org.compiere.util.Env;
|
import java.math.*; import org.compiere.util.*;
|
[
"java.math",
"org.compiere.util"
] |
java.math; org.compiere.util;
| 2,804,233
|
@Test
public void testRegionIterators() throws CacheException {
final Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
final int numberOfEntries = 10;
final String[] queries = new String[] {
"SELECT entry.value FROM " + this.regName + ".entries entry WHERE entry.value.id > 0",
"SELECT entry.value FROM " + this.regName + ".entries entry WHERE entry.key = 'key-1'",
"SELECT e.value FROM " + this.regName + ".entrySet e where e.value.id >= 0",
"SELECT * FROM " + this.regName + ".values p WHERE p.ticker = 'vmware'",};
|
void function() throws CacheException { final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); final int numberOfEntries = 10; final String[] queries = new String[] { STR + this.regName + STR, STR + this.regName + STR, STR + this.regName + STR, STR + this.regName + STR,};
|
/**
* Tests client-server query with region iterators.
*/
|
Tests client-server query with region iterators
|
testRegionIterators
|
{
"repo_name": "smgoller/geode",
"path": "geode-core/src/distributedTest/java/org/apache/geode/cache/query/dunit/PdxQueryDUnitTest.java",
"license": "apache-2.0",
"size": 140780
}
|
[
"org.apache.geode.cache.CacheException",
"org.apache.geode.test.dunit.Host"
] |
import org.apache.geode.cache.CacheException; import org.apache.geode.test.dunit.Host;
|
import org.apache.geode.cache.*; import org.apache.geode.test.dunit.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 1,368,479
|
private static Budget createBudget(
AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException {
BudgetServiceInterface budgetService =
adWordsServices.get(session, BudgetServiceInterface.class);
// Create a budget.
Budget budget = new Budget();
budget.setName("Interplanetary Cruise #" + System.currentTimeMillis());
Money budgetAmount = new Money();
// This budget equals 50.00 units of your account's currency, e.g.,
// 50 USD if your currency is USD.
budgetAmount.setMicroAmount(50_000_000L);
budget.setAmount(budgetAmount);
budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD);
// Non-shared budgets are required for Smart Shopping campaigns.
budget.setIsExplicitlyShared(false);
// Create operation.
BudgetOperation budgetOperation = new BudgetOperation();
budgetOperation.setOperand(budget);
budgetOperation.setOperator(Operator.ADD);
// Add the budget.
Budget newBudget = budgetService.mutate(new BudgetOperation[] {budgetOperation}).getValue(0);
System.out.printf(
"Budget with name '%s' and ID %d was added.%n",
newBudget.getName(), newBudget.getBudgetId());
return newBudget;
}
|
static Budget function( AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException { BudgetServiceInterface budgetService = adWordsServices.get(session, BudgetServiceInterface.class); Budget budget = new Budget(); budget.setName(STR + System.currentTimeMillis()); Money budgetAmount = new Money(); budgetAmount.setMicroAmount(50_000_000L); budget.setAmount(budgetAmount); budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD); budget.setIsExplicitlyShared(false); BudgetOperation budgetOperation = new BudgetOperation(); budgetOperation.setOperand(budget); budgetOperation.setOperator(Operator.ADD); Budget newBudget = budgetService.mutate(new BudgetOperation[] {budgetOperation}).getValue(0); System.out.printf( STR, newBudget.getName(), newBudget.getBudgetId()); return newBudget; }
|
/**
* Creates a non-shared budget for a Smart Shopping campaign. Smart Shopping campaigns support
* only non-shared budgets.
*/
|
Creates a non-shared budget for a Smart Shopping campaign. Smart Shopping campaigns support only non-shared budgets
|
createBudget
|
{
"repo_name": "googleads/googleads-java-lib",
"path": "examples/adwords_axis/src/main/java/adwords/axis/v201809/shoppingcampaigns/AddSmartShoppingAd.java",
"license": "apache-2.0",
"size": 16088
}
|
[
"com.google.api.ads.adwords.axis.v201809.cm.Budget",
"com.google.api.ads.adwords.axis.v201809.cm.BudgetBudgetDeliveryMethod",
"com.google.api.ads.adwords.axis.v201809.cm.BudgetOperation",
"com.google.api.ads.adwords.axis.v201809.cm.BudgetServiceInterface",
"com.google.api.ads.adwords.axis.v201809.cm.Money",
"com.google.api.ads.adwords.axis.v201809.cm.Operator",
"com.google.api.ads.adwords.lib.client.AdWordsSession",
"com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface",
"java.rmi.RemoteException"
] |
import com.google.api.ads.adwords.axis.v201809.cm.Budget; import com.google.api.ads.adwords.axis.v201809.cm.BudgetBudgetDeliveryMethod; import com.google.api.ads.adwords.axis.v201809.cm.BudgetOperation; import com.google.api.ads.adwords.axis.v201809.cm.BudgetServiceInterface; import com.google.api.ads.adwords.axis.v201809.cm.Money; import com.google.api.ads.adwords.axis.v201809.cm.Operator; import com.google.api.ads.adwords.lib.client.AdWordsSession; import com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface; import java.rmi.RemoteException;
|
import com.google.api.ads.adwords.axis.v201809.cm.*; import com.google.api.ads.adwords.lib.client.*; import com.google.api.ads.adwords.lib.factory.*; import java.rmi.*;
|
[
"com.google.api",
"java.rmi"
] |
com.google.api; java.rmi;
| 2,644,296
|
public static JButton newDiceRollButton(final BoardCanvas canvas){
final JButton button = new JButton("Roll Dice");
button.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {canvas.getGameBoard().rollDice();}
|
static JButton function(final BoardCanvas canvas){ final JButton button = new JButton(STR); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) {canvas.getGameBoard().rollDice();}
|
/**
* Method which creates "Roll Dice" Button with its logic, and returns it
* @param canvas
* @return JButton containing logic for rolling dice
*/
|
Method which creates "Roll Dice" Button with its logic, and returns it
|
newDiceRollButton
|
{
"repo_name": "Sy4z/Cluedo",
"path": "src/ui/BoardFrame.java",
"license": "mit",
"size": 12938
}
|
[
"java.awt.event.ActionEvent",
"java.awt.event.ActionListener",
"javax.swing.JButton"
] |
import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton;
|
import java.awt.event.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 1,700,766
|
@Test
public void testAddUpdateRemoveSyncItem() {
try {
ldapInterface = new OpenLdapInterface("ldap://be-mmt.babel.it/"+PSROOT, "", DM_USER, DM_PASS, false,false, piTypeCdao);
ldapInterface.setLdapId(ldapInterface.getCdao().getRdnAttribute());
String vcards[] = {
"vcard-1.vcf"
, "vcard-2.vcf","vcard-3.vcf", "vcard-4.vcf", "vcard-5.vcf"
};
Timestamp t0 = new Timestamp(System.currentTimeMillis());
for (String vcf : vcards) {
try {
SyncItem item = getResourceAsSyncItem(FCTF_BASIC + vcf, TYPE_VCF2);
String key = ldapInterface.addNewEntry(item);
addedEntries.add(String.format("(%s=%s)", ldapInterface.getLdapId(), key));
assertNotNull(key);
item.setTimestamp(t0);
item.getKey().setKeyValue(key);
ldapInterface.updateEntry(item);
} catch (Exception e) {
logger.error("missing file: "+ e.getMessage());
}
}
} catch (LDAPAccessException e) {
fail(e.getMessage());
}
}
|
void function() { try { ldapInterface = new OpenLdapInterface(STRvcard-1.vcfSTRvcard-2.vcf","vcard-3.vcfSTRvcard-4.vcfSTRvcard-5.vcfSTR(%s=%s)STRmissing file: "+ e.getMessage()); } } } catch (LDAPAccessException e) { fail(e.getMessage()); } }
|
/**
* this should create a new syncItemKey to be returned
* @param si
* @return
*/
|
this should create a new syncItemKey to be returned
|
testAddUpdateRemoveSyncItem
|
{
"repo_name": "ioggstream/funambol-ldap",
"path": "src/test/java/com/funambol/LDAP/manager/OpenLdapInterfaceTest.java",
"license": "gpl-2.0",
"size": 14046
}
|
[
"com.funambol.LDAP"
] |
import com.funambol.LDAP;
|
import com.funambol.*;
|
[
"com.funambol"
] |
com.funambol;
| 2,619,578
|
public boolean removeCollisionGroup(CollisionManager collisionGroup) {
for (int i=0;i < collisions.length;i++) {
if (collisions[i] == collisionGroup) {
collisions = (CollisionManager[]) Utility.cut(collisions,i);
return true; // successfully removed
}
}
return false;
}
|
boolean function(CollisionManager collisionGroup) { for (int i=0;i < collisions.length;i++) { if (collisions[i] == collisionGroup) { collisions = (CollisionManager[]) Utility.cut(collisions,i); return true; } } return false; }
|
/**
* Removes specified collision group from this playfield.
*
* @return true, if the collision group is successfully removed.
*/
|
Removes specified collision group from this playfield
|
removeCollisionGroup
|
{
"repo_name": "idega/com.idega.games",
"path": "docs/GTGE/tutorials/src/GTGE/PlayField.java",
"license": "gpl-3.0",
"size": 14726
}
|
[
"com.golden.gamedev.util.Utility"
] |
import com.golden.gamedev.util.Utility;
|
import com.golden.gamedev.util.*;
|
[
"com.golden.gamedev"
] |
com.golden.gamedev;
| 2,472,851
|
public void deleteRadiologyForm(RadiologyForm form);
|
void function(RadiologyForm form);
|
/**
* Delete radiology form
*
* @param form
*/
|
Delete radiology form
|
deleteRadiologyForm
|
{
"repo_name": "kenyaehrs/hospitalcore",
"path": "api/src/main/java/org/openmrs/module/hospitalcore/db/RadiologyDAO.java",
"license": "gpl-2.0",
"size": 7014
}
|
[
"org.openmrs.module.hospitalcore.form.RadiologyForm"
] |
import org.openmrs.module.hospitalcore.form.RadiologyForm;
|
import org.openmrs.module.hospitalcore.form.*;
|
[
"org.openmrs.module"
] |
org.openmrs.module;
| 2,470,578
|
public static SpeechletResponse newTellResponse(final OutputSpeech outputSpeech, final Card card) {
if (card == null) {
throw new IllegalArgumentException("Card cannot be null");
}
SpeechletResponse response = newTellResponse(outputSpeech);
response.setCard(card);
return response;
}
|
static SpeechletResponse function(final OutputSpeech outputSpeech, final Card card) { if (card == null) { throw new IllegalArgumentException(STR); } SpeechletResponse response = newTellResponse(outputSpeech); response.setCard(card); return response; }
|
/**
* Creates and returns a response intended to tell the user something, both in speech and with a
* graphical card in the companion app. After the tell output is read to the user, the session
* ends.
* <p>
* All arguments in this method are required and cannot be null.
*
* @param outputSpeech
* output speech content for the tell voice response
* @param card
* card to display in the companion application
* @return SpeechletResponse spoken and visual response for the given input
*/
|
Creates and returns a response intended to tell the user something, both in speech and with a graphical card in the companion app. After the tell output is read to the user, the session ends. All arguments in this method are required and cannot be null
|
newTellResponse
|
{
"repo_name": "wesleywillis/you-know-nothing",
"path": "youKnowNothingOnShow/src/com/amazon/speech/speechlet/SpeechletResponse.java",
"license": "mit",
"size": 7786
}
|
[
"com.amazon.speech.ui.Card",
"com.amazon.speech.ui.OutputSpeech"
] |
import com.amazon.speech.ui.Card; import com.amazon.speech.ui.OutputSpeech;
|
import com.amazon.speech.ui.*;
|
[
"com.amazon.speech"
] |
com.amazon.speech;
| 1,489,661
|
EClass getPanel();
|
EClass getPanel();
|
/**
* Returns the meta object for class '{@link com.wireframesketcher.model.story.Panel <em>Panel</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Panel</em>'.
* @see com.wireframesketcher.model.story.Panel
* @generated
*/
|
Returns the meta object for class '<code>com.wireframesketcher.model.story.Panel Panel</code>'.
|
getPanel
|
{
"repo_name": "Jose-Badeau/boomslang-wireframesketcher",
"path": "com.wireframesketcher.model/src/java/com/wireframesketcher/model/story/StoryPackage.java",
"license": "epl-1.0",
"size": 9121
}
|
[
"org.eclipse.emf.ecore.EClass"
] |
import org.eclipse.emf.ecore.EClass;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 773,516
|
public static String getChangelogString(Context context) {
return getStringFromRawResource(context, R.raw.changelog);
}
|
static String function(Context context) { return getStringFromRawResource(context, R.raw.changelog); }
|
/**
* Load the changelog string.
*
* @param context
* The current context.
* @return The changelog string.
*/
|
Load the changelog string
|
getChangelogString
|
{
"repo_name": "tjulk/gaeproxy",
"path": "src/org/gaeproxy/zirco/utils/ApplicationUtils.java",
"license": "gpl-3.0",
"size": 19052
}
|
[
"android.content.Context"
] |
import android.content.Context;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 366,081
|
private boolean checkRefreshOrLoading(final WXComponent child) {
|
boolean function(final WXComponent child) {
|
/**
* Setting refresh view and loading view
* @param child the refresh_view or loading_view
*/
|
Setting refresh view and loading view
|
checkRefreshOrLoading
|
{
"repo_name": "HJ-StevenSun/Weex-TestDemo",
"path": "weex-dev/android/sdk/src/main/java/com/taobao/weex/ui/component/list/WXListComponent.java",
"license": "apache-2.0",
"size": 42840
}
|
[
"com.taobao.weex.ui.component.WXComponent"
] |
import com.taobao.weex.ui.component.WXComponent;
|
import com.taobao.weex.ui.component.*;
|
[
"com.taobao.weex"
] |
com.taobao.weex;
| 1,366,988
|
public void setupDatabase() {
try {
if (DBFunc.dbManager != null) {
DBFunc.dbManager.close();
}
Database database;
if (Storage.MySQL.USE) {
database = new com.intellectualcrafters.plot.database.MySQL(Storage.MySQL.HOST, Storage.MySQL.PORT, Storage.MySQL.DATABASE,
Storage.MySQL.USER, Storage.MySQL.PASSWORD);
} else if (Storage.SQLite.USE) {
File file = MainUtil.getFile(IMP.getDirectory(), Storage.SQLite.DB + ".db");
database = new com.intellectualcrafters.plot.database.SQLite(file);
} else {
PS.log(C.PREFIX + "&cNo storage type is set!");
this.IMP.disable();
return;
}
DBFunc.dbManager = new SQLManager(database, Storage.PREFIX, false);
this.plots_tmp = DBFunc.getPlots();
this.clusters_tmp = DBFunc.getClusters();
} catch (ClassNotFoundException | SQLException e) {
PS.log(C.PREFIX + "&cFailed to open DATABASE connection. The plugin will disable itself.");
if (Storage.MySQL.USE) {
PS.log("$4MYSQL");
} else if (Storage.SQLite.USE) {
PS.log("$4SQLITE");
}
PS.log("&d==== Here is an ugly stacktrace, if you are interested in those things ===");
e.printStackTrace();
PS.log("&d==== End of stacktrace ====");
PS.log("&6Please go to the " + IMP.getPluginName() + " 'storage.yml' and configure the database correctly.");
this.IMP.disable();
}
}
|
void function() { try { if (DBFunc.dbManager != null) { DBFunc.dbManager.close(); } Database database; if (Storage.MySQL.USE) { database = new com.intellectualcrafters.plot.database.MySQL(Storage.MySQL.HOST, Storage.MySQL.PORT, Storage.MySQL.DATABASE, Storage.MySQL.USER, Storage.MySQL.PASSWORD); } else if (Storage.SQLite.USE) { File file = MainUtil.getFile(IMP.getDirectory(), Storage.SQLite.DB + ".db"); database = new com.intellectualcrafters.plot.database.SQLite(file); } else { PS.log(C.PREFIX + STR); this.IMP.disable(); return; } DBFunc.dbManager = new SQLManager(database, Storage.PREFIX, false); this.plots_tmp = DBFunc.getPlots(); this.clusters_tmp = DBFunc.getClusters(); } catch (ClassNotFoundException SQLException e) { PS.log(C.PREFIX + STR); if (Storage.MySQL.USE) { PS.log(STR); } else if (Storage.SQLite.USE) { PS.log(STR); } PS.log(STR); e.printStackTrace(); PS.log(STR); PS.log(STR + IMP.getPluginName() + STR); this.IMP.disable(); } }
|
/**
* Setup the database connection.
*/
|
Setup the database connection
|
setupDatabase
|
{
"repo_name": "manuelgu/PlotSquared",
"path": "Core/src/main/java/com/intellectualcrafters/plot/PS.java",
"license": "gpl-3.0",
"size": 81536
}
|
[
"com.intellectualcrafters.plot.config.Storage",
"com.intellectualcrafters.plot.database.DBFunc",
"com.intellectualcrafters.plot.database.Database",
"com.intellectualcrafters.plot.database.SQLManager",
"com.intellectualcrafters.plot.util.MainUtil",
"java.io.File",
"java.sql.SQLException"
] |
import com.intellectualcrafters.plot.config.Storage; import com.intellectualcrafters.plot.database.DBFunc; import com.intellectualcrafters.plot.database.Database; import com.intellectualcrafters.plot.database.SQLManager; import com.intellectualcrafters.plot.util.MainUtil; import java.io.File; import java.sql.SQLException;
|
import com.intellectualcrafters.plot.config.*; import com.intellectualcrafters.plot.database.*; import com.intellectualcrafters.plot.util.*; import java.io.*; import java.sql.*;
|
[
"com.intellectualcrafters.plot",
"java.io",
"java.sql"
] |
com.intellectualcrafters.plot; java.io; java.sql;
| 2,800,331
|
paintBackGround(buffer,Ecran);
for(int i=0;i<note.size();i++){
note.get(i).draw(time,buffer);
}
paintAdd(buffer, Ecran);
getContentPane().paintComponents(buffer);
pointeur.draw(time,buffer);
buffer.setColor(Color.white);
buffer.setFont(Content.font.deriveFont((float)Ecran.getHeight()/20));
buffer.drawString("score: "+ scorePercent+"%", (int)(Ecran.getWidth()*0.80), (int)Ecran.getHeight()/20);
g.drawImage(ArrierePlan,0,0,this);
}
|
paintBackGround(buffer,Ecran); for(int i=0;i<note.size();i++){ note.get(i).draw(time,buffer); } paintAdd(buffer, Ecran); getContentPane().paintComponents(buffer); pointeur.draw(time,buffer); buffer.setColor(Color.white); buffer.setFont(Content.font.deriveFont((float)Ecran.getHeight()/20)); buffer.drawString(STR+ scorePercent+"%", (int)(Ecran.getWidth()*0.80), (int)Ecran.getHeight()/20); g.drawImage(ArrierePlan,0,0,this); }
|
/**Override paint method
* paint all the components and all objects on the frame
* @param g Gtaphics
*/
|
Override paint method paint all the components and all objects on the frame
|
paint
|
{
"repo_name": "kara71/KaraOK",
"path": "GameWindow.java",
"license": "gpl-2.0",
"size": 11134
}
|
[
"java.awt.Color"
] |
import java.awt.Color;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 1,651,928
|
@Test
public void testTransmitLoopback1() {
TransmitLoopbackInput lbTr1 = new DefaultTransmitLoopbackInput();
lbTr1.maintenanceDomain(Short.valueOf((short) 1));
lbTr1.maintenanceAssociation(Short.valueOf((short) 2));
lbTr1.maintenanceAssociationEndPoint(Short.valueOf((short) 3));
DefaultTargetAddress ta = new DefaultTargetAddress();
DefaultMepId mepId = new DefaultMepId();
mepId.mepId(MepIdType.of(4));
ta.addressType(mepId);
lbTr1.targetAddress(ta);
// lbTr1.dataTlv(new byte[]{0x01, 0x02, 0x03}); Not supported in onos-yang-tools just yet
lbTr1.numberOfMessages(10);
lbTr1.vlanDropEligible(true);
lbTr1.vlanPriority(PriorityType.of((short) 1));
try {
mseaCfmService.transmitLoopback(lbTr1, session);
} catch (NetconfException e) {
fail("Calling of TransmitLoopback failed: " + e.getMessage());
}
}
|
void function() { TransmitLoopbackInput lbTr1 = new DefaultTransmitLoopbackInput(); lbTr1.maintenanceDomain(Short.valueOf((short) 1)); lbTr1.maintenanceAssociation(Short.valueOf((short) 2)); lbTr1.maintenanceAssociationEndPoint(Short.valueOf((short) 3)); DefaultTargetAddress ta = new DefaultTargetAddress(); DefaultMepId mepId = new DefaultMepId(); mepId.mepId(MepIdType.of(4)); ta.addressType(mepId); lbTr1.targetAddress(ta); lbTr1.numberOfMessages(10); lbTr1.vlanDropEligible(true); lbTr1.vlanPriority(PriorityType.of((short) 1)); try { mseaCfmService.transmitLoopback(lbTr1, session); } catch (NetconfException e) { fail(STR + e.getMessage()); } }
|
/**
* Using Remote remote MEP ID and all arguments.
*/
|
Using Remote remote MEP ID and all arguments
|
testTransmitLoopback1
|
{
"repo_name": "kuujo/onos",
"path": "drivers/microsemi/ea1000/src/test/java/org/onosproject/drivers/microsemi/yang/MseaCfmManagerTest.java",
"license": "apache-2.0",
"size": 19577
}
|
[
"org.junit.Assert",
"org.onosproject.netconf.NetconfException",
"org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.targetaddressgroup.addresstype.DefaultMepId",
"org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.DefaultTransmitLoopbackInput",
"org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.TransmitLoopbackInput",
"org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.transmitloopbackinput.DefaultTargetAddress",
"org.onosproject.yang.gen.v1.mseatypes.rev20160229.mseatypes.MepIdType",
"org.onosproject.yang.gen.v1.mseatypes.rev20160229.mseatypes.PriorityType"
] |
import org.junit.Assert; import org.onosproject.netconf.NetconfException; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.targetaddressgroup.addresstype.DefaultMepId; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.DefaultTransmitLoopbackInput; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.TransmitLoopbackInput; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.transmitloopbackinput.DefaultTargetAddress; import org.onosproject.yang.gen.v1.mseatypes.rev20160229.mseatypes.MepIdType; import org.onosproject.yang.gen.v1.mseatypes.rev20160229.mseatypes.PriorityType;
|
import org.junit.*; import org.onosproject.netconf.*; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.targetaddressgroup.addresstype.*; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.*; import org.onosproject.yang.gen.v1.mseacfm.rev20160229.mseacfm.transmitloopback.transmitloopbackinput.*; import org.onosproject.yang.gen.v1.mseatypes.rev20160229.mseatypes.*;
|
[
"org.junit",
"org.onosproject.netconf",
"org.onosproject.yang"
] |
org.junit; org.onosproject.netconf; org.onosproject.yang;
| 1,214,763
|
public static <T> T createComponent(Class<T> componentClass, Object... dependencies) {
String fqn = componentClass.getName();
String packageName = componentClass.getPackage().getName();
// Accounts for inner classes, ie MyApplication$Component
String simpleName = fqn.substring(packageName.length() + 1);
String generatedName = (packageName + ".Dagger" + simpleName).replace('$', '_');
try {
Class<?> generatedClass = Class.forName(generatedName);
Object builder = generatedClass.getMethod("builder").invoke(null);
for (Method method : builder.getClass().getDeclaredMethods()) {
Class<?>[] params = method.getParameterTypes();
if (params.length == 1) {
Class<?> dependencyClass = params[0];
for (Object dependency : dependencies) {
if (dependencyClass.isAssignableFrom(dependency.getClass())) {
method.invoke(builder, dependency);
break;
}
}
}
}
//noinspection unchecked
return (T) builder.getClass().getMethod("build").invoke(builder);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
|
static <T> T function(Class<T> componentClass, Object... dependencies) { String fqn = componentClass.getName(); String packageName = componentClass.getPackage().getName(); String simpleName = fqn.substring(packageName.length() + 1); String generatedName = (packageName + STR + simpleName).replace('$', '_'); try { Class<?> generatedClass = Class.forName(generatedName); Object builder = generatedClass.getMethod(STR).invoke(null); for (Method method : builder.getClass().getDeclaredMethods()) { Class<?>[] params = method.getParameterTypes(); if (params.length == 1) { Class<?> dependencyClass = params[0]; for (Object dependency : dependencies) { if (dependencyClass.isAssignableFrom(dependency.getClass())) { method.invoke(builder, dependency); break; } } } } return (T) builder.getClass().getMethod("build").invoke(builder); } catch (Exception e) { throw new RuntimeException(e); } }
|
/**
* Magic method that creates a component with its dependencies set, by reflection. Relies on
* Dagger2 naming conventions.
*/
|
Magic method that creates a component with its dependencies set, by reflection. Relies on Dagger2 naming conventions
|
createComponent
|
{
"repo_name": "OpenSilk/Orpheus",
"path": "common-core/src/main/java/org/opensilk/common/core/mortar/DaggerService.java",
"license": "gpl-3.0",
"size": 2776
}
|
[
"java.lang.reflect.Method"
] |
import java.lang.reflect.Method;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 1,582,082
|
public void setAlbumService(
org.liferay.jukebox.service.AlbumService albumService) {
this.albumService = albumService;
}
|
void function( org.liferay.jukebox.service.AlbumService albumService) { this.albumService = albumService; }
|
/**
* Sets the album remote service.
*
* @param albumService the album remote service
*/
|
Sets the album remote service
|
setAlbumService
|
{
"repo_name": "juliocamarero/jukebox-portlet",
"path": "docroot/WEB-INF/src/org/liferay/jukebox/service/base/AlbumServiceBaseImpl.java",
"license": "gpl-2.0",
"size": 30416
}
|
[
"org.liferay.jukebox.service.AlbumService"
] |
import org.liferay.jukebox.service.AlbumService;
|
import org.liferay.jukebox.service.*;
|
[
"org.liferay.jukebox"
] |
org.liferay.jukebox;
| 11,739
|
public static synchronized String getLocalAddress() {
long now = System.currentTimeMillis();
if (cachedLocalAddress != null) {
if (cachedLocalAddressTime + CACHE_MILLIS > now) {
return cachedLocalAddress;
}
}
InetAddress bind = null;
boolean useLocalhost = false;
try {
bind = getBindAddress();
if (bind == null) {
useLocalhost = true;
}
} catch (UnknownHostException e) {
// ignore
}
if (useLocalhost) {
try {
bind = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
throw DbException.convert(e);
}
}
String address = bind == null ? "localhost" : getHostAddress(bind);
if (address.equals("127.0.0.1")) {
address = "localhost";
}
cachedLocalAddress = address;
cachedLocalAddressTime = now;
return address;
}
|
static synchronized String function() { long now = System.currentTimeMillis(); if (cachedLocalAddress != null) { if (cachedLocalAddressTime + CACHE_MILLIS > now) { return cachedLocalAddress; } } InetAddress bind = null; boolean useLocalhost = false; try { bind = getBindAddress(); if (bind == null) { useLocalhost = true; } } catch (UnknownHostException e) { } if (useLocalhost) { try { bind = InetAddress.getLocalHost(); } catch (UnknownHostException e) { throw DbException.convert(e); } } String address = bind == null ? STR : getHostAddress(bind); if (address.equals(STR)) { address = STR; } cachedLocalAddress = address; cachedLocalAddressTime = now; return address; }
|
/**
* Get the local host address as a string.
* For performance, the result is cached for one second.
*
* @return the local host address
*/
|
Get the local host address as a string. For performance, the result is cached for one second
|
getLocalAddress
|
{
"repo_name": "titus08/frostwire-desktop",
"path": "lib/jars-src/h2-1.3.164/org/h2/util/NetUtils.java",
"license": "gpl-3.0",
"size": 8951
}
|
[
"java.net.InetAddress",
"java.net.UnknownHostException",
"org.h2.message.DbException"
] |
import java.net.InetAddress; import java.net.UnknownHostException; import org.h2.message.DbException;
|
import java.net.*; import org.h2.message.*;
|
[
"java.net",
"org.h2.message"
] |
java.net; org.h2.message;
| 1,939,678
|
protected void initialize(Class targetType, Class fieldType,
List<Field> data, Map<Field, Integer> defaultFields,
EquivalentConverter<TField> converter, Map<Class, StructureModifier> subTypeCache,
boolean useStructureCompiler) {
this.targetType = targetType;
this.fieldType = fieldType;
this.data = data;
this.defaultFields = defaultFields;
this.converter = converter;
this.subtypeCache = subTypeCache;
this.useStructureCompiler = useStructureCompiler;
}
|
void function(Class targetType, Class fieldType, List<Field> data, Map<Field, Integer> defaultFields, EquivalentConverter<TField> converter, Map<Class, StructureModifier> subTypeCache, boolean useStructureCompiler) { this.targetType = targetType; this.fieldType = fieldType; this.data = data; this.defaultFields = defaultFields; this.converter = converter; this.subtypeCache = subTypeCache; this.useStructureCompiler = useStructureCompiler; }
|
/**
* Initialize every field of this class.
* @param targetType - type of the object we're reading and writing from.
* @param fieldType - the common type of the fields we're modifying.
* @param data - list of fields to modify.
* @param defaultFields - list of fields that will be automatically initialized.
* @param converter - converts between the common field type and the actual type the consumer expects.
* @param subTypeCache - a structure modifier cache.
* @param useStructureCompiler - whether or not to automatically compile this structure modifier.
*/
|
Initialize every field of this class
|
initialize
|
{
"repo_name": "ewized/ProtocolLib",
"path": "ProtocolLib/src/main/java/com/comphenix/protocol/reflect/StructureModifier.java",
"license": "gpl-2.0",
"size": 24208
}
|
[
"java.lang.reflect.Field",
"java.util.List",
"java.util.Map"
] |
import java.lang.reflect.Field; import java.util.List; import java.util.Map;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 632,389
|
public static void AESEncrypt(ArrayList<String> plaintext) {
try {
System.out.println("Running AES Encryption");
// Get an instance of the RSAkey generator
KeyGenerator kpg = KeyGenerator.getInstance("AES");
kpg.init(128);
SecretKey SecKey = kpg.generateKey();
KeyDump("AESKey", SecKey.getEncoded());
// Get cipher instance for encryption
Cipher c = Cipher.getInstance("AES");
c.init(Cipher.ENCRYPT_MODE,SecKey);
//c.init(Cipher.ENCRYPT_MODE, SecKey, iv);
System.out.println("Private key generated: ");
System.out.println(SecKey);
for (int i=0; i<plaintext.size(); i++) {
String word = plaintext.get(i);
byte[] text = word.getBytes();
byte[] textEncrypted = c.doFinal(text);
// output as hex to avoid String removal of characters.
plaintext.set(i, DatatypeConverter.printHexBinary(textEncrypted));
}
} catch(NoSuchAlgorithmException e){
e.printStackTrace();
} catch(NoSuchPaddingException e){
e.printStackTrace();
} catch(InvalidKeyException e){
e.printStackTrace();
} catch(IllegalBlockSizeException e){
e.printStackTrace();
} catch(BadPaddingException e){
e.printStackTrace();
}
}
|
static void function(ArrayList<String> plaintext) { try { System.out.println(STR); KeyGenerator kpg = KeyGenerator.getInstance("AES"); kpg.init(128); SecretKey SecKey = kpg.generateKey(); KeyDump(STR, SecKey.getEncoded()); Cipher c = Cipher.getInstance("AES"); c.init(Cipher.ENCRYPT_MODE,SecKey); System.out.println(STR); System.out.println(SecKey); for (int i=0; i<plaintext.size(); i++) { String word = plaintext.get(i); byte[] text = word.getBytes(); byte[] textEncrypted = c.doFinal(text); plaintext.set(i, DatatypeConverter.printHexBinary(textEncrypted)); } } catch(NoSuchAlgorithmException e){ e.printStackTrace(); } catch(NoSuchPaddingException e){ e.printStackTrace(); } catch(InvalidKeyException e){ e.printStackTrace(); } catch(IllegalBlockSizeException e){ e.printStackTrace(); } catch(BadPaddingException e){ e.printStackTrace(); } }
|
/** AES encryption function
*/
|
AES encryption function
|
AESEncrypt
|
{
"repo_name": "justincely/classwork",
"path": "JHU/foundations_algorithms/hw4-6/src/Encryption.java",
"license": "bsd-3-clause",
"size": 8489
}
|
[
"java.security.InvalidKeyException",
"java.security.NoSuchAlgorithmException",
"java.util.ArrayList",
"javax.crypto.BadPaddingException",
"javax.crypto.Cipher",
"javax.crypto.IllegalBlockSizeException",
"javax.crypto.KeyGenerator",
"javax.crypto.NoSuchPaddingException",
"javax.crypto.SecretKey",
"javax.xml.bind.DatatypeConverter"
] |
import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.KeyGenerator; import javax.crypto.NoSuchPaddingException; import javax.crypto.SecretKey; import javax.xml.bind.DatatypeConverter;
|
import java.security.*; import java.util.*; import javax.crypto.*; import javax.xml.bind.*;
|
[
"java.security",
"java.util",
"javax.crypto",
"javax.xml"
] |
java.security; java.util; javax.crypto; javax.xml;
| 483,729
|
@VisibleForTesting
synchronized void updateUsersGroups(boolean force) {
final String METHOD = "updateUsersGroups";
LOGGER.entering(CLASS_NAME, METHOD);
try {
LOGGER.fine("Forcing cache update: " + force);
if (!setUpResources(force)) {
return;
}
// Pass 0 - Reset domain cache
List<String> userUnids =
getViewUnids(directoryDatabase, NCCONST.DIRVIEW_VIMUSERS);
updateNotesDomainNames(userUnids);
// Pass 1 - Update groups
List<String> groupUnids =
getViewUnids(directoryDatabase, NCCONST.DIRVIEW_VIMGROUPS);
updateGroups(groupUnids);
// Pass 2 - Update people
updateUsers(userUnids);
// Pass 3 - Update roles
// Role update is moved from the maintenance thread to the traversal
// thread so that the update only occurs when the database ACL is updated.
// Pass 4 - Delete any users that no longer exist
checkUserDeletions();
// Pass 5 - Delete any groups that no longer exist
checkGroupDeletions();
setLastCacheUpdate();
setCacheInitialized();
} catch (Exception e) {
LOGGER.logp(Level.SEVERE, CLASS_NAME, METHOD,
"Failure updating user/group cache", e);
} finally {
releaseResources();
LOGGER.exiting(CLASS_NAME, METHOD);
}
}
|
synchronized void updateUsersGroups(boolean force) { final String METHOD = STR; LOGGER.entering(CLASS_NAME, METHOD); try { LOGGER.fine(STR + force); if (!setUpResources(force)) { return; } List<String> userUnids = getViewUnids(directoryDatabase, NCCONST.DIRVIEW_VIMUSERS); updateNotesDomainNames(userUnids); List<String> groupUnids = getViewUnids(directoryDatabase, NCCONST.DIRVIEW_VIMGROUPS); updateGroups(groupUnids); updateUsers(userUnids); checkUserDeletions(); checkGroupDeletions(); setLastCacheUpdate(); setCacheInitialized(); } catch (Exception e) { LOGGER.logp(Level.SEVERE, CLASS_NAME, METHOD, STR, e); } finally { releaseResources(); LOGGER.exiting(CLASS_NAME, METHOD); } }
|
/**
* Updates the cached lists of people and groups. When force is
* true, the configure cache update interval is ignored and the
* user and group cache is updated.
*
* @param force if true, force an update
*/
|
Updates the cached lists of people and groups. When force is true, the configure cache update interval is ignored and the user and group cache is updated
|
updateUsersGroups
|
{
"repo_name": "xingh/google-enterprise-connector-notes",
"path": "projects/notes-core/source/java/com/google/enterprise/connector/notes/NotesUserGroupManager.java",
"license": "apache-2.0",
"size": 90723
}
|
[
"java.util.List",
"java.util.logging.Level"
] |
import java.util.List; import java.util.logging.Level;
|
import java.util.*; import java.util.logging.*;
|
[
"java.util"
] |
java.util;
| 1,618,932
|
if (StringUtils.isBlank(text)) {
return text;
}
if (StringUtils.isBlank(fromEncoding)) {
logger.warn("No character encoding specified to convert from, using UTF-8");
fromEncoding = "UTF-8";
}
if (StringUtils.isBlank(toEncoding)) {
logger.warn("No character encoding specified to convert to, using UTF-8");
toEncoding = "UTF-8";
}
try {
text = new String(text.getBytes(fromEncoding), toEncoding);
} catch (UnsupportedEncodingException e) {
// bad encoding
logger.warn("Unable to convert value " + text + " from " + fromEncoding + " to " + toEncoding, e);
}
return text;
}
|
if (StringUtils.isBlank(text)) { return text; } if (StringUtils.isBlank(fromEncoding)) { logger.warn(STR); fromEncoding = "UTF-8"; } if (StringUtils.isBlank(toEncoding)) { logger.warn(STR); toEncoding = "UTF-8"; } try { text = new String(text.getBytes(fromEncoding), toEncoding); } catch (UnsupportedEncodingException e) { logger.warn(STR + text + STR + fromEncoding + STR + toEncoding, e); } return text; }
|
/**
* Convert a string value from one encoding to another.
*
* @param text The string that is to be converted.
* @param fromEncoding The encoding that the string is currently encoded in.
* @param toEncoding The encoding that the string is to be encoded to.
* @return The encoded string.
*/
|
Convert a string value from one encoding to another
|
convertEncoding
|
{
"repo_name": "opendatakraken/openbiwiki",
"path": "openbiwiki-core/src/main/java/org/jamwiki/utils/Utilities.java",
"license": "mit",
"size": 17621
}
|
[
"java.io.UnsupportedEncodingException",
"org.apache.commons.lang3.StringUtils"
] |
import java.io.UnsupportedEncodingException; import org.apache.commons.lang3.StringUtils;
|
import java.io.*; import org.apache.commons.lang3.*;
|
[
"java.io",
"org.apache.commons"
] |
java.io; org.apache.commons;
| 1,289,187
|
public ArrayList<String> getZettelForms(int nr) {
// retrieve entry content
return Tools.getFormsFromString(getZettelContent(nr));
}
|
ArrayList<String> function(int nr) { return Tools.getFormsFromString(getZettelContent(nr)); }
|
/**
* This method extracts all occurences of possible form-tags (Laws of Form)
* from an entry with the index-number {@code nr} and returns them as an
* array list of strings.
*
* @param nr the index-number of the entry where the forms should be
* retrieved
* @return an array list with all form-tags of that entry as strings, or
* {@code null} if no form tag was found.
*/
|
This method extracts all occurences of possible form-tags (Laws of Form) from an entry with the index-number nr and returns them as an array list of strings
|
getZettelForms
|
{
"repo_name": "sjPlot/Zettelkasten",
"path": "src/main/java/de/danielluedecke/zettelkasten/database/Daten.java",
"license": "gpl-3.0",
"size": 336724
}
|
[
"de.danielluedecke.zettelkasten.util.Tools",
"java.util.ArrayList"
] |
import de.danielluedecke.zettelkasten.util.Tools; import java.util.ArrayList;
|
import de.danielluedecke.zettelkasten.util.*; import java.util.*;
|
[
"de.danielluedecke.zettelkasten",
"java.util"
] |
de.danielluedecke.zettelkasten; java.util;
| 181,428
|
void onOpen( Response response);
|
void onOpen( Response response);
|
/**
* Called when the request has successfully been upgraded to a web socket.
*/
|
Called when the request has successfully been upgraded to a web socket
|
onOpen
|
{
"repo_name": "rouzwawi/kubernetes-client",
"path": "kubernetes-client/src/main/java/io/fabric8/kubernetes/client/dsl/ExecListener.java",
"license": "apache-2.0",
"size": 1715
}
|
[
"com.squareup.okhttp.Response"
] |
import com.squareup.okhttp.Response;
|
import com.squareup.okhttp.*;
|
[
"com.squareup.okhttp"
] |
com.squareup.okhttp;
| 2,674,263
|
protected ExecutorService getEventExecutor() {
return eventExecutor;
}
|
ExecutorService function() { return eventExecutor; }
|
/**
* Gets the {@link ExecutorService} that executes listeners' code.
*
* @return the {@link ExecutorService}.
*/
|
Gets the <code>ExecutorService</code> that executes listeners' code
|
getEventExecutor
|
{
"repo_name": "dizitart/nitrite-database",
"path": "nitrite/src/main/java/org/dizitart/no2/event/NitriteEventBus.java",
"license": "apache-2.0",
"size": 2422
}
|
[
"java.util.concurrent.ExecutorService"
] |
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.*;
|
[
"java.util"
] |
java.util;
| 2,548,766
|
private boolean needsUpdate(FileStatus srcstatus,
FileSystem dstfs, Path dstpath) throws IOException {
return update && !sameFile(srcstatus.getPath().getFileSystem(job),
srcstatus, dstfs, dstpath, skipCRCCheck);
}
|
boolean function(FileStatus srcstatus, FileSystem dstfs, Path dstpath) throws IOException { return update && !sameFile(srcstatus.getPath().getFileSystem(job), srcstatus, dstfs, dstpath, skipCRCCheck); }
|
/**
* Return true if dst should be replaced by src and the update flag is set.
* Right now, this merely checks that the src and dst len are not equal.
* This should be improved on once modification times, CRCs, etc. can
* be meaningful in this context.
* @throws IOException
*/
|
Return true if dst should be replaced by src and the update flag is set. Right now, this merely checks that the src and dst len are not equal. This should be improved on once modification times, CRCs, etc. can be meaningful in this context
|
needsUpdate
|
{
"repo_name": "iVCE/RDFS",
"path": "src/tools/org/apache/hadoop/tools/DistCp.java",
"license": "apache-2.0",
"size": 107503
}
|
[
"java.io.IOException",
"org.apache.hadoop.fs.FileStatus",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] |
import java.io.IOException; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path;
|
import java.io.*; import org.apache.hadoop.fs.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 496,909
|
public void setSwitchId(SwitchId switchId) {
if (switchId == null) {
throw new IllegalArgumentException("need to set a switch_id");
} else if (!Utils.validateSwitchId(switchId)) {
throw new IllegalArgumentException("need to set valid value for switch_id");
}
this.switchId = switchId;
}
|
void function(SwitchId switchId) { if (switchId == null) { throw new IllegalArgumentException(STR); } else if (!Utils.validateSwitchId(switchId)) { throw new IllegalArgumentException(STR); } this.switchId = switchId; }
|
/**
* Sets switch id.
*
* @param switchId switch id
*/
|
Sets switch id
|
setSwitchId
|
{
"repo_name": "jonvestal/open-kilda",
"path": "src-java/floodlight-service/floodlight-api/src/main/java/org/openkilda/messaging/command/stats/MeterConfigStatsRequest.java",
"license": "apache-2.0",
"size": 2037
}
|
[
"org.openkilda.messaging.Utils",
"org.openkilda.model.SwitchId"
] |
import org.openkilda.messaging.Utils; import org.openkilda.model.SwitchId;
|
import org.openkilda.messaging.*; import org.openkilda.model.*;
|
[
"org.openkilda.messaging",
"org.openkilda.model"
] |
org.openkilda.messaging; org.openkilda.model;
| 1,439,648
|
public File writeToTempFile(CatalogType catalog_obj) {
return (this.writeToTempFile(catalog_obj.fullName(), null));
}
|
File function(CatalogType catalog_obj) { return (this.writeToTempFile(catalog_obj.fullName(), null)); }
|
/**
* Convenience method to write the GraphvizExport handle to a file the temporary directory
* @param catalog_obj
* @return
* @throws Exception
*/
|
Convenience method to write the GraphvizExport handle to a file the temporary directory
|
writeToTempFile
|
{
"repo_name": "malin1993ml/h-store",
"path": "src/frontend/edu/brown/graphs/GraphvizExport.java",
"license": "gpl-3.0",
"size": 18748
}
|
[
"java.io.File",
"org.voltdb.catalog.CatalogType"
] |
import java.io.File; import org.voltdb.catalog.CatalogType;
|
import java.io.*; import org.voltdb.catalog.*;
|
[
"java.io",
"org.voltdb.catalog"
] |
java.io; org.voltdb.catalog;
| 2,526,904
|
public CloudAppendBlob getAppendBlobReference(final String blobName) throws URISyntaxException, StorageException {
return this.getAppendBlobReference(blobName, null);
}
|
CloudAppendBlob function(final String blobName) throws URISyntaxException, StorageException { return this.getAppendBlobReference(blobName, null); }
|
/**
* Returns a reference to a {@link CloudAppendBlob} object that represents an append blob in the directory.
*
* @param blobName
* A <code>String</code> that represents the name of the blob.
*
* @return A {@link CloudAppendBlob} object that represents a reference to the specified append blob.
*
* @throws StorageException
* If a storage service error occurred.
* @throws URISyntaxException
* If the resource URI is invalid.
*/
|
Returns a reference to a <code>CloudAppendBlob</code> object that represents an append blob in the directory
|
getAppendBlobReference
|
{
"repo_name": "risezhang/azure-storage-cli",
"path": "src/main/java/com/microsoft/azure/storage/blob/CloudBlobDirectory.java",
"license": "mit",
"size": 21112
}
|
[
"com.microsoft.azure.storage.StorageException",
"java.net.URISyntaxException"
] |
import com.microsoft.azure.storage.StorageException; import java.net.URISyntaxException;
|
import com.microsoft.azure.storage.*; import java.net.*;
|
[
"com.microsoft.azure",
"java.net"
] |
com.microsoft.azure; java.net;
| 455,760
|
return new BundleBuilder();
}
public static class BundleBuilder {
private Bundle mBundle;
private BundleBuilder() {
mBundle = mock(Bundle.class);
}
|
return new BundleBuilder(); } public static class BundleBuilder { private Bundle mBundle; private BundleBuilder() { mBundle = mock(Bundle.class); }
|
/**
* Bundle builder factory method.
*
* @return The bundle builder created.
*/
|
Bundle builder factory method
|
builder
|
{
"repo_name": "mobgen/halo-android",
"path": "sdk/halo-testing/src/main/java/com/mobgen/halo/android/testing/BundleTestUtils.java",
"license": "apache-2.0",
"size": 5555
}
|
[
"android.os.Bundle",
"org.mockito.Mockito"
] |
import android.os.Bundle; import org.mockito.Mockito;
|
import android.os.*; import org.mockito.*;
|
[
"android.os",
"org.mockito"
] |
android.os; org.mockito;
| 2,309,015
|
public String getAllowInsertDescription() {
return EnumYesNo.decodeValue(this.allowInsert);
}
|
String function() { return EnumYesNo.decodeValue(this.allowInsert); }
|
/**
* Gets the property description.
*
* @return the property description.
*/
|
Gets the property description
|
getAllowInsertDescription
|
{
"repo_name": "aosolorzano/hiperium-model",
"path": "src/main/java/com/hiperium/model/security/ProfileFunctionality.java",
"license": "lgpl-3.0",
"size": 12559
}
|
[
"com.hiperium.model.EnumYesNo"
] |
import com.hiperium.model.EnumYesNo;
|
import com.hiperium.model.*;
|
[
"com.hiperium.model"
] |
com.hiperium.model;
| 26,101
|
List<String> getDatabases(String databasePattern) throws MetaException, TException;
|
List<String> getDatabases(String databasePattern) throws MetaException, TException;
|
/**
* Get the names of all databases in the MetaStore that match the given pattern.
* @param databasePattern
* @return List of database names.
* @throws MetaException
* @throws TException
*/
|
Get the names of all databases in the MetaStore that match the given pattern
|
getDatabases
|
{
"repo_name": "winningsix/hive",
"path": "metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java",
"license": "apache-2.0",
"size": 53383
}
|
[
"java.util.List",
"org.apache.hadoop.hive.metastore.api.MetaException",
"org.apache.thrift.TException"
] |
import java.util.List; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.thrift.TException;
|
import java.util.*; import org.apache.hadoop.hive.metastore.api.*; import org.apache.thrift.*;
|
[
"java.util",
"org.apache.hadoop",
"org.apache.thrift"
] |
java.util; org.apache.hadoop; org.apache.thrift;
| 2,371,840
|
public ResourceUtilization getAggregatedContainersUtilization() {
return this.containersUtilization;
}
|
ResourceUtilization function() { return this.containersUtilization; }
|
/**
* Get the resource utilization of the containers in the node.
* @return Resource utilization of the containers.
*/
|
Get the resource utilization of the containers in the node
|
getAggregatedContainersUtilization
|
{
"repo_name": "aliyun-beta/aliyun-oss-hadoop-fs",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerNode.java",
"license": "apache-2.0",
"size": 12125
}
|
[
"org.apache.hadoop.yarn.api.records.ResourceUtilization"
] |
import org.apache.hadoop.yarn.api.records.ResourceUtilization;
|
import org.apache.hadoop.yarn.api.records.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,100,970
|
public void setSelectedIndexWithCallback(int index) {
float angle;
if (isStepped) {
if (index < 0) index = 0;
else if (index > stepValues.size()-1) index = stepValues.size()-1;
setInternalIndex(index);
angle = index*stepSize-180+startGap;
} else {
if (index < 0) index = 0;
else if (index > 100) index = 100;
setInternalIndex(index);
angle = index*stepSize-180+startGap;
}
elCenter.setLocalRotation(elCenter.getLocalRotation().fromAngleAxis(-(angle*FastMath.DEG_TO_RAD), Vector3f.UNIT_Z));
}
|
void function(int index) { float angle; if (isStepped) { if (index < 0) index = 0; else if (index > stepValues.size()-1) index = stepValues.size()-1; setInternalIndex(index); angle = index*stepSize-180+startGap; } else { if (index < 0) index = 0; else if (index > 100) index = 100; setInternalIndex(index); angle = index*stepSize-180+startGap; } elCenter.setLocalRotation(elCenter.getLocalRotation().fromAngleAxis(-(angle*FastMath.DEG_TO_RAD), Vector3f.UNIT_Z)); }
|
/**
* Sets the selected index for both free-floating and stepped Dials
* @param index float
*/
|
Sets the selected index for both free-floating and stepped Dials
|
setSelectedIndexWithCallback
|
{
"repo_name": "sytrox/fortgeschrittene-Spieleenticklung",
"path": "src/tonegod/gui/controls/lists/Dial.java",
"license": "apache-2.0",
"size": 14484
}
|
[
"com.jme3.math.FastMath",
"com.jme3.math.Vector3f"
] |
import com.jme3.math.FastMath; import com.jme3.math.Vector3f;
|
import com.jme3.math.*;
|
[
"com.jme3.math"
] |
com.jme3.math;
| 729,485
|
public Element toElement() {
return myElement;
}
@NotThreadSafe
public static class Builder {
private final String myFieldName;
public Builder(final String fieldName) {
myFieldName = fieldName;
}
|
Element function() { return myElement; } public static class Builder { private final String myFieldName; public Builder(final String fieldName) { myFieldName = fieldName; }
|
/**
* Returns the element for the group operator's field.
*
* @return The element for the group operator's field.
*/
|
Returns the element for the group operator's field
|
toElement
|
{
"repo_name": "allanbank/mongodb-async-driver",
"path": "src/main/java/com/allanbank/mongodb/builder/AggregationGroupField.java",
"license": "apache-2.0",
"size": 15395
}
|
[
"com.allanbank.mongodb.bson.Element"
] |
import com.allanbank.mongodb.bson.Element;
|
import com.allanbank.mongodb.bson.*;
|
[
"com.allanbank.mongodb"
] |
com.allanbank.mongodb;
| 838,957
|
public Iterable<Item> modifyShop(Iterable<Item> shop);
|
Iterable<Item> function(Iterable<Item> shop);
|
/**
* Transforms a list of items.
*
* A default implementation is to return the parameter.
*
* @param shop the items
* @return the modified list
*/
|
Transforms a list of items. A default implementation is to return the parameter
|
modifyShop
|
{
"repo_name": "eliatlarge/FEMultiPlayer-V2",
"path": "src/net/fe/modifier/Modifier.java",
"license": "gpl-3.0",
"size": 1144
}
|
[
"net.fe.unit.Item"
] |
import net.fe.unit.Item;
|
import net.fe.unit.*;
|
[
"net.fe.unit"
] |
net.fe.unit;
| 2,726,749
|
public static void putPFStat( long id, Stat type, double s)
{
if( _disabled )
return; // do nothing
//check if parfor exists
if( !_pfstats.containsKey(id) )
_pfstats.put(id, new HashMap<Stat,LinkedList<Double>>());
HashMap<Stat,LinkedList<Double>> allstats = _pfstats.get(id);
//check if stat type exists
if( !allstats.containsKey(type) )
allstats.put(type, new LinkedList<Double>());
LinkedList<Double> stats = allstats.get(type);
//add new stat
stats.addLast(s);
}
|
static void function( long id, Stat type, double s) { if( _disabled ) return; if( !_pfstats.containsKey(id) ) _pfstats.put(id, new HashMap<Stat,LinkedList<Double>>()); HashMap<Stat,LinkedList<Double>> allstats = _pfstats.get(id); if( !allstats.containsKey(type) ) allstats.put(type, new LinkedList<Double>()); LinkedList<Double> stats = allstats.get(type); stats.addLast(s); }
|
/**
* Puts a specific parfor statistic for future analysis into the repository.
*
* @param id ?
* @param type parfor statistic type
* @param s ?
*/
|
Puts a specific parfor statistic for future analysis into the repository
|
putPFStat
|
{
"repo_name": "apache/incubator-systemml",
"path": "src/main/java/org/apache/sysds/runtime/controlprogram/parfor/stat/StatisticMonitor.java",
"license": "apache-2.0",
"size": 8357
}
|
[
"java.util.HashMap",
"java.util.LinkedList"
] |
import java.util.HashMap; import java.util.LinkedList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 472,471
|
void validate(String name, String value, ParameterDescription pd) throws ParameterException;
|
void validate(String name, String value, ParameterDescription pd) throws ParameterException;
|
/**
* Validate the parameter.
*
* @param name The name of the parameter (e.g. "-host").
* @param value The value of the parameter that we need to validate
* @param pd The description of this parameter
*
* @throws ParameterException Thrown if the value of the parameter is invalid.
*/
|
Validate the parameter
|
validate
|
{
"repo_name": "jbunting/cli4j",
"path": "src/main/java/io/bunting/cli4j/parser/IParameterValidator2.java",
"license": "apache-2.0",
"size": 1403
}
|
[
"io.bunting.cli4j.parser.ParameterDescription",
"io.bunting.cli4j.parser.ParameterException"
] |
import io.bunting.cli4j.parser.ParameterDescription; import io.bunting.cli4j.parser.ParameterException;
|
import io.bunting.cli4j.parser.*;
|
[
"io.bunting.cli4j"
] |
io.bunting.cli4j;
| 405,901
|
public Description description() {
return description;
}
|
Description function() { return description; }
|
/**
* Returns the description of this condition.
*
* @return the description of this condition.
*/
|
Returns the description of this condition
|
description
|
{
"repo_name": "xasx/assertj-core",
"path": "src/main/java/org/assertj/core/api/Condition.java",
"license": "apache-2.0",
"size": 5080
}
|
[
"org.assertj.core.description.Description"
] |
import org.assertj.core.description.Description;
|
import org.assertj.core.description.*;
|
[
"org.assertj.core"
] |
org.assertj.core;
| 892,039
|
default GoogleCalendarStreamEndpointBuilder scopes(List<String> scopes) {
doSetProperty("scopes", scopes);
return this;
}
|
default GoogleCalendarStreamEndpointBuilder scopes(List<String> scopes) { doSetProperty(STR, scopes); return this; }
|
/**
* Specifies the level of permissions you want a calendar application to
* have to a user account. See
* https://developers.google.com/calendar/auth for more info.
*
* The option is a: <code>java.util.List<java.lang.String></code>
* type.
*
* Group: consumer
*/
|
Specifies the level of permissions you want a calendar application to have to a user account. See HREF for more info. The option is a: <code>java.util.List<java.lang.String></code> type. Group: consumer
|
scopes
|
{
"repo_name": "DariusX/camel",
"path": "core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleCalendarStreamEndpointBuilderFactory.java",
"license": "apache-2.0",
"size": 33861
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,068,324
|
@Test
public void toEquationUnarySine()
{
// Setup.
final TreeNode<Integer> child = new ConstantTerminal<Integer>(converterInteger, 3);
final TreeNode<Integer> function = new SineFunction<Integer>(converterInteger, child);
// Run.
final String result = InfixNotationVisitor.toEquation(function);
// Verify.
assertNotNull(result);
final String expected = "sin(3)";
// System.out.println("expected = " + expected);
// System.out.println("result = " + result);
assertThat(result, is(expected));
}
|
void function() { final TreeNode<Integer> child = new ConstantTerminal<Integer>(converterInteger, 3); final TreeNode<Integer> function = new SineFunction<Integer>(converterInteger, child); final String result = InfixNotationVisitor.toEquation(function); assertNotNull(result); final String expected = STR; assertThat(result, is(expected)); }
|
/**
* Test the <code>toEquation()</code> method.
*/
|
Test the <code>toEquation()</code> method
|
toEquationUnarySine
|
{
"repo_name": "jmthompson2015/vizzini",
"path": "ai/src/test/java/org/vizzini/ai/geneticalgorithm/geneticprogramming/InfixNotationVisitorTest.java",
"license": "mit",
"size": 6082
}
|
[
"org.hamcrest.CoreMatchers",
"org.junit.Assert"
] |
import org.hamcrest.CoreMatchers; import org.junit.Assert;
|
import org.hamcrest.*; import org.junit.*;
|
[
"org.hamcrest",
"org.junit"
] |
org.hamcrest; org.junit;
| 1,628,683
|
public double getYearOnYearConvexityAdjustment(final CouponInflationYearOnYearMonthly coupon,
final InflationConvexityAdjustmentProviderInterface inflationConvexity) {
ArgumentChecker.notNull(coupon, "Coupon");
ArgumentChecker.notNull(inflationConvexity, "Inflation");
final double firstFixingTime = coupon.getReferenceStartTime();
final double secondFixingTime = coupon.getReferenceEndTime();
final double firstNaturalPaymentTime = coupon.getNaturalPaymentStartTime();
final double secondNaturalPaymentTime = coupon.getNaturalPaymentEndTime();
final double paymentTime = coupon.getPaymentTime();
final double volatilityStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0];
final double volatilityEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[1];
final double correlationInflation = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexCorrelation().getZValue(firstFixingTime,
secondFixingTime);
final double correlationInflationRateStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation()
.getYValue(firstFixingTime);
final double correlationInflationRateEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation()
.getYValue(secondFixingTime);
final double volBondForwardStart = getVolBondForward(firstNaturalPaymentTime, paymentTime, inflationConvexity);
final double volBondForwardEnd = getVolBondForward(secondNaturalPaymentTime, paymentTime, inflationConvexity);
final double adjustment = volatilityStart * (volatilityStart - volatilityEnd * correlationInflation - volBondForwardStart * correlationInflationRateStart)
* firstNaturalPaymentTime
+ volatilityEnd * volBondForwardEnd * correlationInflationRateEnd * secondNaturalPaymentTime;
return Math.exp(adjustment);
}
|
double function(final CouponInflationYearOnYearMonthly coupon, final InflationConvexityAdjustmentProviderInterface inflationConvexity) { ArgumentChecker.notNull(coupon, STR); ArgumentChecker.notNull(inflationConvexity, STR); final double firstFixingTime = coupon.getReferenceStartTime(); final double secondFixingTime = coupon.getReferenceEndTime(); final double firstNaturalPaymentTime = coupon.getNaturalPaymentStartTime(); final double secondNaturalPaymentTime = coupon.getNaturalPaymentEndTime(); final double paymentTime = coupon.getPaymentTime(); final double volatilityStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0]; final double volatilityEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[1]; final double correlationInflation = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexCorrelation().getZValue(firstFixingTime, secondFixingTime); final double correlationInflationRateStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(firstFixingTime); final double correlationInflationRateEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(secondFixingTime); final double volBondForwardStart = getVolBondForward(firstNaturalPaymentTime, paymentTime, inflationConvexity); final double volBondForwardEnd = getVolBondForward(secondNaturalPaymentTime, paymentTime, inflationConvexity); final double adjustment = volatilityStart * (volatilityStart - volatilityEnd * correlationInflation - volBondForwardStart * correlationInflationRateStart) * firstNaturalPaymentTime + volatilityEnd * volBondForwardEnd * correlationInflationRateEnd * secondNaturalPaymentTime; return Math.exp(adjustment); }
|
/**
* Computes the convexity adjustment for year on year inflation coupon with a monthly index.
*
* @param coupon
* The year on year coupon.
* @param inflationConvexity
* The inflation provider.
* @return The convexity adjustment.
*/
|
Computes the convexity adjustment for year on year inflation coupon with a monthly index
|
getYearOnYearConvexityAdjustment
|
{
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/interestrate/inflation/provider/InflationMarketModelConvexityAdjustmentForCoupon.java",
"license": "apache-2.0",
"size": 19616
}
|
[
"com.opengamma.analytics.financial.interestrate.inflation.derivative.CouponInflationYearOnYearMonthly",
"com.opengamma.analytics.financial.provider.description.inflation.InflationConvexityAdjustmentProviderInterface",
"com.opengamma.util.ArgumentChecker"
] |
import com.opengamma.analytics.financial.interestrate.inflation.derivative.CouponInflationYearOnYearMonthly; import com.opengamma.analytics.financial.provider.description.inflation.InflationConvexityAdjustmentProviderInterface; import com.opengamma.util.ArgumentChecker;
|
import com.opengamma.analytics.financial.interestrate.inflation.derivative.*; import com.opengamma.analytics.financial.provider.description.inflation.*; import com.opengamma.util.*;
|
[
"com.opengamma.analytics",
"com.opengamma.util"
] |
com.opengamma.analytics; com.opengamma.util;
| 166,667
|
void addCommand(Command cmd) throws CamiException;
|
void addCommand(Command cmd) throws CamiException;
|
/**
* Adds a new command to the Cami commands repository maps.
*
* @param cmd
* permit to add a command in Cami format
*
* @throws CamiException
* if the insertion was unsuccessfull, especially if the
* command was not recognized. <br>
* NOTE: This methods doesn't allow re-insertion of CA and
* CN commands with the same id, for they must be unique.
* Multiple insertion of CM and PI commands is allowed. In
* that case, the key is the command itself and the value
* the associated node (or arc) id. A re-insertion of a Po,
* Pt or Ct command overwrites the previous one with the
* same id. So beware of endless loops !
* @see fr.lip6.move.pnml.cpnami.cami.CamiRepository#removeCommand(Command,
* int)
*
*/
|
Adds a new command to the Cami commands repository maps
|
addCommand
|
{
"repo_name": "lhillah/camipnml",
"path": "cpnami2-cpnami2/src/fr/lip6/move/pnml/cpnami/cami/CamiRepository.java",
"license": "epl-1.0",
"size": 14631
}
|
[
"fr.lip6.move.pnml.cpnami.exceptions.CamiException"
] |
import fr.lip6.move.pnml.cpnami.exceptions.CamiException;
|
import fr.lip6.move.pnml.cpnami.exceptions.*;
|
[
"fr.lip6.move"
] |
fr.lip6.move;
| 2,679,432
|
private static List<DetailAST> getParameters(DetailAST ast) {
final DetailAST params = ast.findFirstToken(TokenTypes.PARAMETERS);
final List<DetailAST> returnValue = Lists.newArrayList();
DetailAST child = params.getFirstChild();
while (child != null) {
if (child.getType() == TokenTypes.PARAMETER_DEF) {
final DetailAST ident = child.findFirstToken(TokenTypes.IDENT);
returnValue.add(ident);
}
child = child.getNextSibling();
}
return returnValue;
}
|
static List<DetailAST> function(DetailAST ast) { final DetailAST params = ast.findFirstToken(TokenTypes.PARAMETERS); final List<DetailAST> returnValue = Lists.newArrayList(); DetailAST child = params.getFirstChild(); while (child != null) { if (child.getType() == TokenTypes.PARAMETER_DEF) { final DetailAST ident = child.findFirstToken(TokenTypes.IDENT); returnValue.add(ident); } child = child.getNextSibling(); } return returnValue; }
|
/**
* Computes the parameter nodes for a method.
*
* @param ast the method node.
* @return the list of parameter nodes for ast.
*/
|
Computes the parameter nodes for a method
|
getParameters
|
{
"repo_name": "baratali/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/checks/javadoc/JavadocMethodCheck.java",
"license": "lgpl-2.1",
"size": 37083
}
|
[
"com.google.common.collect.Lists",
"com.puppycrawl.tools.checkstyle.api.DetailAST",
"com.puppycrawl.tools.checkstyle.api.TokenTypes",
"java.util.List"
] |
import com.google.common.collect.Lists; import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.TokenTypes; import java.util.List;
|
import com.google.common.collect.*; import com.puppycrawl.tools.checkstyle.api.*; import java.util.*;
|
[
"com.google.common",
"com.puppycrawl.tools",
"java.util"
] |
com.google.common; com.puppycrawl.tools; java.util;
| 2,050,403
|
@Nonnull
public static Iterator<Object[]> cartesianProduct(@Nonnull final Set<?>... sets) {
return FluentIterable.from(Sets.cartesianProduct(sets)).transform(List::toArray).iterator();
}
|
static Iterator<Object[]> function(@Nonnull final Set<?>... sets) { return FluentIterable.from(Sets.cartesianProduct(sets)).transform(List::toArray).iterator(); }
|
/**
* produces a cartesian product of the provided sets, see {@link Sets#cartesianProduct#cartesianProduct} for
* more info
*/
|
produces a cartesian product of the provided sets, see <code>Sets#cartesianProduct#cartesianProduct</code> for more info
|
cartesianProduct
|
{
"repo_name": "bjrke/just-java-toolbox",
"path": "just-java-test-toolbox/src/main/java/de/justsoftware/toolbox/testng/DataProviders.java",
"license": "mit",
"size": 3248
}
|
[
"com.google.common.collect.FluentIterable",
"com.google.common.collect.Sets",
"java.util.Iterator",
"java.util.List",
"java.util.Set",
"javax.annotation.Nonnull"
] |
import com.google.common.collect.FluentIterable; import com.google.common.collect.Sets; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.annotation.Nonnull;
|
import com.google.common.collect.*; import java.util.*; import javax.annotation.*;
|
[
"com.google.common",
"java.util",
"javax.annotation"
] |
com.google.common; java.util; javax.annotation;
| 1,287,293
|
public static DateFormat summaryFormatter(Context context) {
return android.text.format.DateFormat.getTimeFormat(context);
}
|
static DateFormat function(Context context) { return android.text.format.DateFormat.getTimeFormat(context); }
|
/**
* Produces the date formatter used for showing the time in the summary. Override this to change it.
*
* @return the SimpleDateFormat used for summary dates
*/
|
Produces the date formatter used for showing the time in the summary. Override this to change it
|
summaryFormatter
|
{
"repo_name": "NACC-Aus/Photomon-Android",
"path": "AndroidStudio/NACC/app/src/main/java/com/appiphany/nacc/ui/controls/TimePreference.java",
"license": "gpl-3.0",
"size": 8277
}
|
[
"android.content.Context",
"java.text.DateFormat"
] |
import android.content.Context; import java.text.DateFormat;
|
import android.content.*; import java.text.*;
|
[
"android.content",
"java.text"
] |
android.content; java.text;
| 2,531,673
|
public GitClient invokeGenerically(InvocationData data) {
boolean failOnErrOut = Boolean.parseBoolean(data.getFailOnErrOut());
String stdOut = invokeGenericallyAndGetStdOut(data.getProcessName(), failOnErrOut, CollectionUtil.toArray(data.getArgs()));
data.setStdOut(stdOut);
return this;
}
// internal helper methods -------------------------------------------------
|
GitClient function(InvocationData data) { boolean failOnErrOut = Boolean.parseBoolean(data.getFailOnErrOut()); String stdOut = invokeGenericallyAndGetStdOut(data.getProcessName(), failOnErrOut, CollectionUtil.toArray(data.getArgs())); data.setStdOut(stdOut); return this; }
|
/** Provides individually parameterized git invocations.
* @param data the invocation data
* @return a reference to <code>this</code> */
|
Provides individually parameterized git invocations
|
invokeGenerically
|
{
"repo_name": "AludraTest/aludratest",
"path": "src/main/java/org/aludratest/service/gitclient/GitClient.java",
"license": "apache-2.0",
"size": 28327
}
|
[
"org.aludratest.service.gitclient.data.InvocationData",
"org.databene.commons.CollectionUtil"
] |
import org.aludratest.service.gitclient.data.InvocationData; import org.databene.commons.CollectionUtil;
|
import org.aludratest.service.gitclient.data.*; import org.databene.commons.*;
|
[
"org.aludratest.service",
"org.databene.commons"
] |
org.aludratest.service; org.databene.commons;
| 1,188,029
|
public static MozuUrl getDocumentListTypesUrl(Integer pageSize, String responseFields, Integer startIndex)
{
UrlFormatter formatter = new UrlFormatter("/api/content/documentlistTypes/{documentListTypeFQN}?responseFields={responseFields}");
formatter.formatUrl("pageSize", pageSize);
formatter.formatUrl("responseFields", responseFields);
formatter.formatUrl("startIndex", startIndex);
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ;
}
|
static MozuUrl function(Integer pageSize, String responseFields, Integer startIndex) { UrlFormatter formatter = new UrlFormatter(STR); formatter.formatUrl(STR, pageSize); formatter.formatUrl(STR, responseFields); formatter.formatUrl(STR, startIndex); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
|
/**
* Get Resource Url for GetDocumentListTypes
* @param pageSize When creating paged results from a query, this value indicates the zero-based offset in the complete result set where the returned entities begin. For example, with this parameter set to 25, to get the 51st through the 75th items, set startIndex to 50.
* @param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. This parameter should only be used to retrieve data. Attempting to update data using this parameter may cause data loss.
* @param startIndex When creating paged results from a query, this value indicates the zero-based offset in the complete result set where the returned entities begin. For example, with pageSize set to 25, to get the 51st through the 75th items, set this parameter to 50.
* @return String Resource Url
*/
|
Get Resource Url for GetDocumentListTypes
|
getDocumentListTypesUrl
|
{
"repo_name": "Mozu/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/urls/content/DocumentListTypeUrl.java",
"license": "mit",
"size": 4148
}
|
[
"com.mozu.api.MozuUrl",
"com.mozu.api.utils.UrlFormatter"
] |
import com.mozu.api.MozuUrl; import com.mozu.api.utils.UrlFormatter;
|
import com.mozu.api.*; import com.mozu.api.utils.*;
|
[
"com.mozu.api"
] |
com.mozu.api;
| 1,920,610
|
@Override
public void init() throws ServletException {
// Retrieve the MBean server
mBeanServer = ManagementFactory.getPlatformMBeanServer();
this.jsonBeanWriter = new JSONBean();
}
|
void function() throws ServletException { mBeanServer = ManagementFactory.getPlatformMBeanServer(); this.jsonBeanWriter = new JSONBean(); }
|
/**
* Initialize this servlet.
*/
|
Initialize this servlet
|
init
|
{
"repo_name": "mahak/hbase",
"path": "hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java",
"license": "apache-2.0",
"size": 9261
}
|
[
"java.lang.management.ManagementFactory",
"javax.servlet.ServletException",
"org.apache.hadoop.hbase.util.JSONBean"
] |
import java.lang.management.ManagementFactory; import javax.servlet.ServletException; import org.apache.hadoop.hbase.util.JSONBean;
|
import java.lang.management.*; import javax.servlet.*; import org.apache.hadoop.hbase.util.*;
|
[
"java.lang",
"javax.servlet",
"org.apache.hadoop"
] |
java.lang; javax.servlet; org.apache.hadoop;
| 767,745
|
public final static int extractFlagsFromType2Message(byte[] msg) {
byte[] flagsBytes = new byte[4];
System.arraycopy(msg, 20, flagsBytes, 0, 4);
ByteUtilities.changeWordEndianess(flagsBytes, 0, 4);
return ByteUtilities.makeIntFromByte4(flagsBytes);
}
|
final static int function(byte[] msg) { byte[] flagsBytes = new byte[4]; System.arraycopy(msg, 20, flagsBytes, 0, 4); ByteUtilities.changeWordEndianess(flagsBytes, 0, 4); return ByteUtilities.makeIntFromByte4(flagsBytes); }
|
/**
* Extracts the NTLM flags from the type 2 message.
*
* @param msg the type 2 message byte array
* @return the proxy flags as an int
*/
|
Extracts the NTLM flags from the type 2 message
|
extractFlagsFromType2Message
|
{
"repo_name": "DL7AD/SSR-Receiver",
"path": "src/org/apache/mina/proxy/handlers/http/ntlm/NTLMUtilities.java",
"license": "gpl-3.0",
"size": 18751
}
|
[
"org.apache.mina.proxy.utils.ByteUtilities"
] |
import org.apache.mina.proxy.utils.ByteUtilities;
|
import org.apache.mina.proxy.utils.*;
|
[
"org.apache.mina"
] |
org.apache.mina;
| 1,449,607
|
private void recordPrototypeSet(String className, String prototypeProperty,
Node node) {
JsName name = getName(className, false);
if (name != null) {
name.prototypeNames.add(prototypeProperty);
refNodes.add(new PrototypeSetNode(name, node));
recordWriteOnProperties(className);
}
}
|
void function(String className, String prototypeProperty, Node node) { JsName name = getName(className, false); if (name != null) { name.prototypeNames.add(prototypeProperty); refNodes.add(new PrototypeSetNode(name, node)); recordWriteOnProperties(className); } }
|
/**
* Records the assignment to a prototype property of a global name,
* if possible.
*
* @param className The name of the class.
* @param prototypeProperty The name of the prototype property.
* @param node The top node representing the name (GETPROP)
*/
|
Records the assignment to a prototype property of a global name, if possible
|
recordPrototypeSet
|
{
"repo_name": "110035/kissy",
"path": "tools/module-compiler/src/com/google/javascript/jscomp/NameAnalyzer.java",
"license": "mit",
"size": 55994
}
|
[
"com.google.javascript.rhino.Node"
] |
import com.google.javascript.rhino.Node;
|
import com.google.javascript.rhino.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 545,866
|
public static java.util.Set extractInPatientADTMessageQueueSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.ifInpatientADTVoCollection voCollection)
{
return extractInPatientADTMessageQueueSet(domainFactory, voCollection, null, new HashMap());
}
|
static java.util.Set function(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.ifInpatientADTVoCollection voCollection) { return extractInPatientADTMessageQueueSet(domainFactory, voCollection, null, new HashMap()); }
|
/**
* Create the ims.hl7adtout.domain.objects.InPatientADTMessageQueue set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
|
Create the ims.hl7adtout.domain.objects.InPatientADTMessageQueue set from the value object collection
|
extractInPatientADTMessageQueueSet
|
{
"repo_name": "open-health-hub/openMAXIMS",
"path": "openmaxims_workspace/ValueObjects/src/ims/emergency/vo/domain/ifInpatientADTVoAssembler.java",
"license": "agpl-3.0",
"size": 17512
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 526,877
|
public void syncData()
{
if (JOptionPane
.showConfirmDialog(
null,
"SYNC all clients?",
"Sync all?",
JOptionPane.YES_NO_OPTION)
== JOptionPane.OK_OPTION)
{
CPacket p = new CPacket();
p.m_action = CProtokoll.SYNC;
sendPacketToAll(p);
}
}
|
void function() { if (JOptionPane .showConfirmDialog( null, STR, STR, JOptionPane.YES_NO_OPTION) == JOptionPane.OK_OPTION) { CPacket p = new CPacket(); p.m_action = CProtokoll.SYNC; sendPacketToAll(p); } }
|
/**
* This method is called by the MainFrame, if the Sync menuitem has been selected.
* The server sents SYNC to all clients.
* @see CProtokoll
*/
|
This method is called by the MainFrame, if the Sync menuitem has been selected. The server sents SYNC to all clients
|
syncData
|
{
"repo_name": "luckyduck/collaborative-editor",
"path": "mpedit/net/CServerManager.java",
"license": "gpl-2.0",
"size": 14802
}
|
[
"javax.swing.JOptionPane"
] |
import javax.swing.JOptionPane;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 2,602,530
|
public void setMtLrValue(YangEnumeration mtLrValue) throws JNCException {
setLeafValue(Epc.NAMESPACE,
"mt-lr",
mtLrValue,
childrenNames());
}
|
void function(YangEnumeration mtLrValue) throws JNCException { setLeafValue(Epc.NAMESPACE, "mt-lr", mtLrValue, childrenNames()); }
|
/**
* Sets the value for child leaf "mt-lr",
* using instance of generated typedef class.
* @param mtLrValue The value to set.
* @param mtLrValue used during instantiation.
*/
|
Sets the value for child leaf "mt-lr", using instance of generated typedef class
|
setMtLrValue
|
{
"repo_name": "jnpr-shinma/yangfile",
"path": "hitel/src/hctaEpc/mmeSgsn/subscriber/MmeSubscriberPlmn.java",
"license": "apache-2.0",
"size": 40584
}
|
[
"com.tailf.jnc.YangEnumeration"
] |
import com.tailf.jnc.YangEnumeration;
|
import com.tailf.jnc.*;
|
[
"com.tailf.jnc"
] |
com.tailf.jnc;
| 2,525,646
|
void mouseDragged(BitPointInt point);
|
void mouseDragged(BitPointInt point);
|
/**
* mouse moved while button is down
*
* @param point
*/
|
mouse moved while button is down
|
mouseDragged
|
{
"repo_name": "bitDecayGames/Jump",
"path": "jump-leveleditor/src/main/java/com/bitdecay/jump/leveleditor/render/mouse/MouseMode.java",
"license": "mit",
"size": 665
}
|
[
"com.bitdecay.jump.geom.BitPointInt"
] |
import com.bitdecay.jump.geom.BitPointInt;
|
import com.bitdecay.jump.geom.*;
|
[
"com.bitdecay.jump"
] |
com.bitdecay.jump;
| 455,159
|
myUnitDirectionToRepel = new Vec2((float)1.0, (float)0.0);
}
|
myUnitDirectionToRepel = new Vec2((float)1.0, (float)0.0); }
|
/**
* sets unit vector to 01in the x direction, 0 in the y direction
*/
|
sets unit vector to 01in the x direction, 0 in the y direction
|
setUnitVector
|
{
"repo_name": "kjian279/springies",
"path": "src/objects/Wall/LeftWall.java",
"license": "mit",
"size": 706
}
|
[
"org.jbox2d.common.Vec2"
] |
import org.jbox2d.common.Vec2;
|
import org.jbox2d.common.*;
|
[
"org.jbox2d.common"
] |
org.jbox2d.common;
| 2,561,331
|
public void addCheckingPath(TreePath path);
|
void function(TreePath path);
|
/**
* add a path to the checked paths set
*
* @param path the path to be added.
*/
|
add a path to the checked paths set
|
addCheckingPath
|
{
"repo_name": "virtualcitySYSTEMS/importer-exporter",
"path": "src/org/citydb/gui/components/checkboxtree/TreeCheckingModel.java",
"license": "lgpl-3.0",
"size": 7391
}
|
[
"javax.swing.tree.TreePath"
] |
import javax.swing.tree.TreePath;
|
import javax.swing.tree.*;
|
[
"javax.swing"
] |
javax.swing;
| 691,655
|
private void sendUpstreamMessage(byte[] message) {
NetworkEndpointId endpointId = getNetworkEndpointId(this);
if (endpointId == null) {
logger.info("Buffering message to the data center: no GCM registration id");
AndroidChannelPreferences.bufferMessage(message);
return;
}
Bundle dataBundle = new Bundle();
// Add the encoded android endpoint id to the bundle
dataBundle.putString(GcmSharedConstants.NETWORK_ENDPOINT_ID_KEY,
base64Encode(endpointId.toByteArray()));
// Add the encoded message to the bundle
dataBundle.putString(GcmSharedConstants.CLIENT_TO_SERVER_MESSAGE_KEY,
base64Encode(message));
logger.info("Encoded message: %s", base64Encode(message));
// Currently we do not check for message size limits since this will be run as an experiment.
// Feedback from the experiment will be used to decide whether handling of message size
// limit is required.
deliverMessage(GcmSharedConstants.GCM_UPDATED_SENDER_ID + "@google.com", dataBundle);
}
|
void function(byte[] message) { NetworkEndpointId endpointId = getNetworkEndpointId(this); if (endpointId == null) { logger.info(STR); AndroidChannelPreferences.bufferMessage(message); return; } Bundle dataBundle = new Bundle(); dataBundle.putString(GcmSharedConstants.NETWORK_ENDPOINT_ID_KEY, base64Encode(endpointId.toByteArray())); dataBundle.putString(GcmSharedConstants.CLIENT_TO_SERVER_MESSAGE_KEY, base64Encode(message)); logger.info(STR, base64Encode(message)); deliverMessage(GcmSharedConstants.GCM_UPDATED_SENDER_ID + STR, dataBundle); }
|
/**
* Creates the Bundle for sending the {@code message}. Encodes the message and the network
* endpoint id and adds it to the bundle.
*/
|
Creates the Bundle for sending the message. Encodes the message and the network endpoint id and adds it to the bundle
|
sendUpstreamMessage
|
{
"repo_name": "endlessm/chromium-browser",
"path": "third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/GcmUpstreamSenderService.java",
"license": "bsd-3-clause",
"size": 6446
}
|
[
"android.os.Bundle",
"com.google.ipc.invalidation.common.GcmSharedConstants",
"com.google.ipc.invalidation.ticl.proto.ChannelCommon"
] |
import android.os.Bundle; import com.google.ipc.invalidation.common.GcmSharedConstants; import com.google.ipc.invalidation.ticl.proto.ChannelCommon;
|
import android.os.*; import com.google.ipc.invalidation.common.*; import com.google.ipc.invalidation.ticl.proto.*;
|
[
"android.os",
"com.google.ipc"
] |
android.os; com.google.ipc;
| 2,517,293
|
private boolean parse(int ch) throws IOException {
if (Character.isLetterOrDigit((char) ch)) {
string_value.setLength(0);
string_value.append((char) ch);
while (-1 != (ch = cin.read())
&& Character.isLetterOrDigit((char) ch))
string_value.append((char) ch);
cin.unread(ch);
final String s = string_value.toString();
final Matcher mat = floatPat.matcher(s);
if (mat.matches()) {
number_value = Double.parseDouble(s);
curr_tok = NUMBER;
} else
curr_tok = NAME;
return true;
}
return false;
}
|
boolean function(int ch) throws IOException { if (Character.isLetterOrDigit((char) ch)) { string_value.setLength(0); string_value.append((char) ch); while (-1 != (ch = cin.read()) && Character.isLetterOrDigit((char) ch)) string_value.append((char) ch); cin.unread(ch); final String s = string_value.toString(); final Matcher mat = floatPat.matcher(s); if (mat.matches()) { number_value = Double.parseDouble(s); curr_tok = NUMBER; } else curr_tok = NAME; return true; } return false; }
|
/**
* read a number or name.
*
* @param ch
* @return false: neither found.
* @throws IOException
*/
|
read a number or name
|
parse
|
{
"repo_name": "mro/jcurl",
"path": "java-core/jc-core/src/test/java/org/jcurl/mr/math/Calc0.java",
"license": "gpl-2.0",
"size": 4596
}
|
[
"java.io.IOException",
"java.util.regex.Matcher"
] |
import java.io.IOException; import java.util.regex.Matcher;
|
import java.io.*; import java.util.regex.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 2,833,864
|
public void sendEnterCombat()
{
super.sendEnterCombat();
this.connection.sendPacket(new SPacketCombatEvent(this.getCombatTracker(), SPacketCombatEvent.Event.ENTER_COMBAT));
}
|
void function() { super.sendEnterCombat(); this.connection.sendPacket(new SPacketCombatEvent(this.getCombatTracker(), SPacketCombatEvent.Event.ENTER_COMBAT)); }
|
/**
* Sends an ENTER_COMBAT packet to the client
*/
|
Sends an ENTER_COMBAT packet to the client
|
sendEnterCombat
|
{
"repo_name": "Severed-Infinity/technium",
"path": "build/tmp/recompileMc/sources/net/minecraft/entity/player/EntityPlayerMP.java",
"license": "gpl-3.0",
"size": 58179
}
|
[
"net.minecraft.network.play.server.SPacketCombatEvent"
] |
import net.minecraft.network.play.server.SPacketCombatEvent;
|
import net.minecraft.network.play.server.*;
|
[
"net.minecraft.network"
] |
net.minecraft.network;
| 2,493,911
|
@Test
public void stopWithSingleImageAndNoApplicablePattern() throws IOException, MojoExecutionException, ExecException {
givenProjectWithResolvedImage(singleImageWithBuildAndStopNamePattern(" , , "));
whenMojoExecutes();
thenNoContainerLookupByImageOccurs();
thenListContainersIsNotCalled();
thenNoContainerIsStopped();
}
|
void function() throws IOException, MojoExecutionException, ExecException { givenProjectWithResolvedImage(singleImageWithBuildAndStopNamePattern(STR)); whenMojoExecutes(); thenNoContainerLookupByImageOccurs(); thenListContainersIsNotCalled(); thenNoContainerIsStopped(); }
|
/**
* Mock project with one image, query service indicates running image, but it is not labelled.
* The stopNamePattern is set for images only and does not match.
*
* @throws IOException
* @throws MojoExecutionException
* @throws ExecException
*/
|
Mock project with one image, query service indicates running image, but it is not labelled. The stopNamePattern is set for images only and does not match
|
stopWithSingleImageAndNoApplicablePattern
|
{
"repo_name": "rhuss/docker-maven-plugin",
"path": "src/test/java/io/fabric8/maven/docker/StopMojoTest.java",
"license": "apache-2.0",
"size": 26740
}
|
[
"io.fabric8.maven.docker.access.ExecException",
"java.io.IOException",
"org.apache.maven.plugin.MojoExecutionException"
] |
import io.fabric8.maven.docker.access.ExecException; import java.io.IOException; import org.apache.maven.plugin.MojoExecutionException;
|
import io.fabric8.maven.docker.access.*; import java.io.*; import org.apache.maven.plugin.*;
|
[
"io.fabric8.maven",
"java.io",
"org.apache.maven"
] |
io.fabric8.maven; java.io; org.apache.maven;
| 1,725,172
|
public List<Prospect> getProspects() {
if (prospects == null) {
prospects = new Prospects();
}
return prospects.getProspects();
}
|
List<Prospect> function() { if (prospects == null) { prospects = new Prospects(); } return prospects.getProspects(); }
|
/**
* List of Prospects related to this opportunity.
*
* @return related prospects.
*/
|
List of Prospects related to this opportunity
|
getProspects
|
{
"repo_name": "Crim/pardot-java-client",
"path": "src/main/java/com/darksci/pardot/api/response/opportunity/Opportunity.java",
"license": "mit",
"size": 5208
}
|
[
"com.darksci.pardot.api.response.prospect.Prospect",
"java.util.List"
] |
import com.darksci.pardot.api.response.prospect.Prospect; import java.util.List;
|
import com.darksci.pardot.api.response.prospect.*; import java.util.*;
|
[
"com.darksci.pardot",
"java.util"
] |
com.darksci.pardot; java.util;
| 2,242,400
|
@Test
public void testNotifyTopologyChangedToUpdate() throws Exception {
createPowerSpy();
Topology prev = new Topology();
Topology curr = new Topology();
Response result = Whitebox.invokeMethod(target,
"notifyTopologyChangedToUpdate", prev, curr);
PowerMockito.verifyPrivate(target).invoke("notifyTopologyChanged",
prev, curr, TopologyChanged.Action.update);
assertThat(result.statusCode, is(Response.ACCEPTED));
}
|
void function() throws Exception { createPowerSpy(); Topology prev = new Topology(); Topology curr = new Topology(); Response result = Whitebox.invokeMethod(target, STR, prev, curr); PowerMockito.verifyPrivate(target).invoke(STR, prev, curr, TopologyChanged.Action.update); assertThat(result.statusCode, is(Response.ACCEPTED)); }
|
/**
* Test method for {@link org.o3project.odenos.core.component.network.Network#notifyTopologyChangedToUpdate(Topology, Topology)}.
*
* @throws Exception
*/
|
Test method for <code>org.o3project.odenos.core.component.network.Network#notifyTopologyChangedToUpdate(Topology, Topology)</code>
|
testNotifyTopologyChangedToUpdate
|
{
"repo_name": "y-higuchi/odenos",
"path": "src/test/java/org/o3project/odenos/core/component/network/NetworkTest.java",
"license": "apache-2.0",
"size": 116642
}
|
[
"org.hamcrest.CoreMatchers",
"org.junit.Assert",
"org.o3project.odenos.core.component.network.topology.Topology",
"org.o3project.odenos.core.component.network.topology.TopologyChanged",
"org.o3project.odenos.remoteobject.message.Response",
"org.powermock.api.mockito.PowerMockito",
"org.powermock.reflect.Whitebox"
] |
import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.o3project.odenos.core.component.network.topology.Topology; import org.o3project.odenos.core.component.network.topology.TopologyChanged; import org.o3project.odenos.remoteobject.message.Response; import org.powermock.api.mockito.PowerMockito; import org.powermock.reflect.Whitebox;
|
import org.hamcrest.*; import org.junit.*; import org.o3project.odenos.core.component.network.topology.*; import org.o3project.odenos.remoteobject.message.*; import org.powermock.api.mockito.*; import org.powermock.reflect.*;
|
[
"org.hamcrest",
"org.junit",
"org.o3project.odenos",
"org.powermock.api",
"org.powermock.reflect"
] |
org.hamcrest; org.junit; org.o3project.odenos; org.powermock.api; org.powermock.reflect;
| 1,632,236
|
private static final AtomicInteger sNextGeneratedId = new AtomicInteger(1);
public static int generateViewId(){
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
for (;;) {
final int result = sNextGeneratedId.get();
// aapt-generated IDs have the high byte nonzero; clamp to the range under that.
int newValue = result + 1;
if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0.
if (sNextGeneratedId.compareAndSet(result, newValue)) {
return result;
}
}
} else {
return View.generateViewId();
}
}
|
static final AtomicInteger sNextGeneratedId = new AtomicInteger(1); public static int function(){ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) { for (;;) { final int result = sNextGeneratedId.get(); int newValue = result + 1; if (newValue > 0x00FFFFFF) newValue = 1; if (sNextGeneratedId.compareAndSet(result, newValue)) { return result; } } } else { return View.generateViewId(); } }
|
/**
* Generate a value suitable for use in {@link View#setId(int)}.
* This value will not collide with ID values generated at build time by aapt for R.id.
*
* @return a generated ID value
*/
|
Generate a value suitable for use in <code>View#setId(int)</code>. This value will not collide with ID values generated at build time by aapt for R.id
|
generateViewId
|
{
"repo_name": "crgarridos/android_utils",
"path": "src/main/java/com/cgarrido/android/utils/ResUtils.java",
"license": "cc0-1.0",
"size": 2641
}
|
[
"android.os.Build",
"android.view.View",
"java.util.concurrent.atomic.AtomicInteger"
] |
import android.os.Build; import android.view.View; import java.util.concurrent.atomic.AtomicInteger;
|
import android.os.*; import android.view.*; import java.util.concurrent.atomic.*;
|
[
"android.os",
"android.view",
"java.util"
] |
android.os; android.view; java.util;
| 2,737,747
|
public static byte[] get_first_n_bits(byte in[], int n) {
int remainder = n % 8;
byte[] out;
if (remainder == 0) {
out = Arrays.copyOfRange(in, 0, n / 8);
} else {
// f.e. remainder=3: 1<<3=0000 1000(bin)
// 0000 1000-1 = 0000 0111 --> 0000 0111<<5 = 1110 0000
// logical AND to get the first N bits
out = Arrays.copyOfRange(in, 0, n / 8 + 1);
int mask = ((1 << remainder) - 1) << (8 - remainder);
out[out.length - 1] &= mask;
}
return out;
}
|
static byte[] function(byte in[], int n) { int remainder = n % 8; byte[] out; if (remainder == 0) { out = Arrays.copyOfRange(in, 0, n / 8); } else { out = Arrays.copyOfRange(in, 0, n / 8 + 1); int mask = ((1 << remainder) - 1) << (8 - remainder); out[out.length - 1] &= mask; } return out; }
|
/**
* Provides a simple way to get the first n bits of value in
*
* @param in the value
* @param n count of the first bits
* @return the first n bits of in
*/
|
Provides a simple way to get the first n bits of value in
|
get_first_n_bits
|
{
"repo_name": "jcryptool/crypto",
"path": "org.jcryptool.visual.sphincsplus/src/org/jcryptool/visual/sphincsplus/algorithm/Utils.java",
"license": "epl-1.0",
"size": 5214
}
|
[
"java.util.Arrays"
] |
import java.util.Arrays;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,702,249
|
private void replySuccess() {
NettyUtils.enableAutoRead(mChannel);
mChannel.writeAndFlush(RPCProtoMessage.createOkResponse(null))
.addListeners(ChannelFutureListener.CLOSE_ON_FAILURE);
}
|
void function() { NettyUtils.enableAutoRead(mChannel); mChannel.writeAndFlush(RPCProtoMessage.createOkResponse(null)) .addListeners(ChannelFutureListener.CLOSE_ON_FAILURE); }
|
/**
* Writes a response to signify the success of the write request.
*/
|
Writes a response to signify the success of the write request
|
replySuccess
|
{
"repo_name": "WilliamZapata/alluxio",
"path": "core/server/worker/src/main/java/alluxio/worker/netty/DataServerWriteHandler.java",
"license": "apache-2.0",
"size": 16271
}
|
[
"io.netty.channel.ChannelFutureListener"
] |
import io.netty.channel.ChannelFutureListener;
|
import io.netty.channel.*;
|
[
"io.netty.channel"
] |
io.netty.channel;
| 1,135,281
|
public void testOfferLastNull() {
try {
ConcurrentLinkedDeque q = new ConcurrentLinkedDeque();
q.offerLast(null);
shouldThrow();
} catch (NullPointerException success) {}
}
|
void function() { try { ConcurrentLinkedDeque q = new ConcurrentLinkedDeque(); q.offerLast(null); shouldThrow(); } catch (NullPointerException success) {} }
|
/**
* offerLast(null) throws NPE
*/
|
offerLast(null) throws NPE
|
testOfferLastNull
|
{
"repo_name": "AdmireTheDistance/android_libcore",
"path": "jsr166-tests/src/test/java/jsr166/ConcurrentLinkedDequeTest.java",
"license": "gpl-2.0",
"size": 26434
}
|
[
"java.util.concurrent.ConcurrentLinkedDeque"
] |
import java.util.concurrent.ConcurrentLinkedDeque;
|
import java.util.concurrent.*;
|
[
"java.util"
] |
java.util;
| 1,196,723
|
public boolean readWindowFromSingleFile( int targetChr,
List<BreakRegion> br ) throws IOException {
return readWindowFromSingleFile( targetChr, targetChr, -1, -1, br );
}
|
boolean function( int targetChr, List<BreakRegion> br ) throws IOException { return readWindowFromSingleFile( targetChr, targetChr, -1, -1, br ); }
|
/**
* This version is for CGH data since no lmin/lmax will be required. Also,
* since CGH is always for a single chromosome, pass in same value for both
* left and right chr.
*/
|
This version is for CGH data since no lmin/lmax will be required. Also, since CGH is always for a single chromosome, pass in same value for both left and right chr
|
readWindowFromSingleFile
|
{
"repo_name": "rhilker/ReadXplorer",
"path": "readxplorer-tools-gasv/src/main/java/gasv/main/ReadInput.java",
"license": "gpl-3.0",
"size": 30602
}
|
[
"java.io.IOException",
"java.util.List"
] |
import java.io.IOException; import java.util.List;
|
import java.io.*; import java.util.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 1,046,840
|
@ViewDebug.ExportedProperty
public boolean isSmoothScrollbarEnabled() {
return mSmoothScrollbarEnabled;
}
|
@ViewDebug.ExportedProperty boolean function() { return mSmoothScrollbarEnabled; }
|
/**
* Returns the current state of the fast scroll feature.
*
* @return True if smooth scrollbar is enabled is enabled, false otherwise.
*
* @see #setSmoothScrollbarEnabled(boolean)
*/
|
Returns the current state of the fast scroll feature
|
isSmoothScrollbarEnabled
|
{
"repo_name": "junchenChow/exciting-app",
"path": "mohosupportlib/src/main/java/it/sephiroth/android/library/widget/AbsHListView.java",
"license": "apache-2.0",
"size": 177577
}
|
[
"android.view.ViewDebug"
] |
import android.view.ViewDebug;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 689,796
|
public int getSize() {
LinkedHashSet<PastryContact> l = new LinkedHashSet<PastryContact>();
l.addAll(ccwNodes);
l.addAll(cwNodes);
return l.size();
}
|
int function() { LinkedHashSet<PastryContact> l = new LinkedHashSet<PastryContact>(); l.addAll(ccwNodes); l.addAll(cwNodes); return l.size(); }
|
/**
* Calculates the number of distinct contacts in the set.
*
* @return the number of distinct contacts
*/
|
Calculates the number of distinct contacts in the set
|
getSize
|
{
"repo_name": "flyroom/PeerfactSimKOM_Clone",
"path": "src/org/peerfact/impl/overlay/dht/pastry/nodestate/LeafSet.java",
"license": "gpl-2.0",
"size": 10635
}
|
[
"java.util.LinkedHashSet",
"org.peerfact.impl.overlay.dht.pastry.components.PastryContact"
] |
import java.util.LinkedHashSet; import org.peerfact.impl.overlay.dht.pastry.components.PastryContact;
|
import java.util.*; import org.peerfact.impl.overlay.dht.pastry.components.*;
|
[
"java.util",
"org.peerfact.impl"
] |
java.util; org.peerfact.impl;
| 2,703,453
|
public String getName() {
try {
return Val.chkStr((String) newXPath.evaluate("Name", ndLayer, XPathConstants.STRING));
} catch (XPathExpressionException ex) {
return "";
}
}
|
String function() { try { return Val.chkStr((String) newXPath.evaluate("Name", ndLayer, XPathConstants.STRING)); } catch (XPathExpressionException ex) { return ""; } }
|
/**
* Gets layer name.
* @return layer name
*/
|
Gets layer name
|
getName
|
{
"repo_name": "GeoinformationSystems/geoportal-server",
"path": "geoportal/src/com/esri/gpt/catalog/publication/WMSProcessor.java",
"license": "apache-2.0",
"size": 14122
}
|
[
"com.esri.gpt.framework.util.Val",
"javax.xml.xpath.XPathConstants",
"javax.xml.xpath.XPathExpressionException"
] |
import com.esri.gpt.framework.util.Val; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException;
|
import com.esri.gpt.framework.util.*; import javax.xml.xpath.*;
|
[
"com.esri.gpt",
"javax.xml"
] |
com.esri.gpt; javax.xml;
| 228,876
|
public void softDispose(RuntimeEngine runtimeEngine) {
}
|
void function(RuntimeEngine runtimeEngine) { }
|
/**
* Soft dispose means it will be invoked as sort of preparation step before actual dispose.
* Mainly used with transaction synchronization to be invoked as part of beforeCompletion
* to clean up any thread state - like thread local settings as afterCompletion can be invoked from another thread
*/
|
Soft dispose means it will be invoked as sort of preparation step before actual dispose. Mainly used with transaction synchronization to be invoked as part of beforeCompletion to clean up any thread state - like thread local settings as afterCompletion can be invoked from another thread
|
softDispose
|
{
"repo_name": "droolsjbpm/jbpm",
"path": "jbpm-runtime-manager/src/main/java/org/jbpm/runtime/manager/impl/AbstractRuntimeManager.java",
"license": "apache-2.0",
"size": 21835
}
|
[
"org.kie.api.runtime.manager.RuntimeEngine"
] |
import org.kie.api.runtime.manager.RuntimeEngine;
|
import org.kie.api.runtime.manager.*;
|
[
"org.kie.api"
] |
org.kie.api;
| 1,552,311
|
private void replaceWithLazyClient() {
// this is a defensive operation to avoid client is closed by accident, the initial state of the client is false
URL lazyUrl = URLBuilder.from(url)
.addParameter(LAZY_CONNECT_INITIAL_STATE_KEY, Boolean.FALSE)
.addParameter(RECONNECT_KEY, Boolean.FALSE)
.addParameter(SEND_RECONNECT_KEY, Boolean.TRUE.toString())
.addParameter("warning", Boolean.TRUE.toString())
.addParameter(LazyConnectExchangeClient.REQUEST_WITH_WARNING_KEY, true)
.addParameter("_client_memo", "referencecounthandler.replacewithlazyclient")
.build();
if (!(client instanceof LazyConnectExchangeClient) || client.isClosed()) {
client = new LazyConnectExchangeClient(lazyUrl, client.getExchangeHandler());
}
}
|
void function() { URL lazyUrl = URLBuilder.from(url) .addParameter(LAZY_CONNECT_INITIAL_STATE_KEY, Boolean.FALSE) .addParameter(RECONNECT_KEY, Boolean.FALSE) .addParameter(SEND_RECONNECT_KEY, Boolean.TRUE.toString()) .addParameter(STR, Boolean.TRUE.toString()) .addParameter(LazyConnectExchangeClient.REQUEST_WITH_WARNING_KEY, true) .addParameter(STR, STR) .build(); if (!(client instanceof LazyConnectExchangeClient) client.isClosed()) { client = new LazyConnectExchangeClient(lazyUrl, client.getExchangeHandler()); } }
|
/**
* when closing the client, the client needs to be set to LazyConnectExchangeClient, and if a new call is made,
* the client will "resurrect".
*
* @return
*/
|
when closing the client, the client needs to be set to LazyConnectExchangeClient, and if a new call is made, the client will "resurrect"
|
replaceWithLazyClient
|
{
"repo_name": "aglne/dubbo",
"path": "dubbo-rpc/dubbo-rpc-dubbo/src/main/java/org/apache/dubbo/rpc/protocol/dubbo/ReferenceCountExchangeClient.java",
"license": "apache-2.0",
"size": 5780
}
|
[
"org.apache.dubbo.common.URLBuilder"
] |
import org.apache.dubbo.common.URLBuilder;
|
import org.apache.dubbo.common.*;
|
[
"org.apache.dubbo"
] |
org.apache.dubbo;
| 2,783,747
|
public void testIsClassArray() {
assertFalse(ConstructorUtils.isClassArray(String.class));
assertFalse(ConstructorUtils.isClassArray(int.class));
assertFalse(ConstructorUtils.isClassArray(List.class));
assertFalse(ConstructorUtils.isClassArray(Set.class));
assertFalse(ConstructorUtils.isClassArray(Map.class));
assertFalse(ConstructorUtils.isClassArray(Object.class));
assertTrue(ConstructorUtils.isClassArray(String[].class));
assertTrue(ConstructorUtils.isClassArray(int[].class));
}
|
void function() { assertFalse(ConstructorUtils.isClassArray(String.class)); assertFalse(ConstructorUtils.isClassArray(int.class)); assertFalse(ConstructorUtils.isClassArray(List.class)); assertFalse(ConstructorUtils.isClassArray(Set.class)); assertFalse(ConstructorUtils.isClassArray(Map.class)); assertFalse(ConstructorUtils.isClassArray(Object.class)); assertTrue(ConstructorUtils.isClassArray(String[].class)); assertTrue(ConstructorUtils.isClassArray(int[].class)); }
|
/**
* Test method for {@link org.azeckoski.reflectutils.ConstructorUtils#isClassArray(java.lang.Class)}.
*/
|
Test method for <code>org.azeckoski.reflectutils.ConstructorUtils#isClassArray(java.lang.Class)</code>
|
testIsClassArray
|
{
"repo_name": "kevintcl/reflectutils",
"path": "src/test/java/org/azeckoski/reflectutils/ConstructorUtilsTest.java",
"license": "apache-2.0",
"size": 19229
}
|
[
"java.util.List",
"java.util.Map",
"java.util.Set"
] |
import java.util.List; import java.util.Map; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 877,269
|
protected Level createEmptyLevelFromType(final int lineNumber, final LevelType levelType, final String input) {
// Create the level based on the type
switch (levelType) {
case APPENDIX:
return new Appendix(null, lineNumber, input);
case CHAPTER:
return new Chapter(null, lineNumber, input);
case SECTION:
return new Section(null, lineNumber, input);
case PART:
return new Part(null, lineNumber, input);
case PROCESS:
return new Process(null, lineNumber, input);
case INITIAL_CONTENT:
return new InitialContent(lineNumber, input);
default:
return new Level(null, lineNumber, input, levelType);
}
}
|
Level function(final int lineNumber, final LevelType levelType, final String input) { switch (levelType) { case APPENDIX: return new Appendix(null, lineNumber, input); case CHAPTER: return new Chapter(null, lineNumber, input); case SECTION: return new Section(null, lineNumber, input); case PART: return new Part(null, lineNumber, input); case PROCESS: return new Process(null, lineNumber, input); case INITIAL_CONTENT: return new InitialContent(lineNumber, input); default: return new Level(null, lineNumber, input, levelType); } }
|
/**
* Creates an empty Level using the LevelType to determine which Level subclass to instantiate.
*
* @param lineNumber The line number of the level.
* @param levelType The Level Type.
* @param input The string that represents the level, if one exists,
* @return The empty Level subclass object, or a plain Level object if no type matches a subclass.
*/
|
Creates an empty Level using the LevelType to determine which Level subclass to instantiate
|
createEmptyLevelFromType
|
{
"repo_name": "pressgang-ccms/PressGangCCMSContentSpecProcessor",
"path": "src/main/java/org/jboss/pressgang/ccms/contentspec/processor/ContentSpecParser.java",
"license": "gpl-3.0",
"size": 111420
}
|
[
"org.jboss.pressgang.ccms.contentspec.Appendix",
"org.jboss.pressgang.ccms.contentspec.Chapter",
"org.jboss.pressgang.ccms.contentspec.InitialContent",
"org.jboss.pressgang.ccms.contentspec.Level",
"org.jboss.pressgang.ccms.contentspec.Part",
"org.jboss.pressgang.ccms.contentspec.Process",
"org.jboss.pressgang.ccms.contentspec.Section",
"org.jboss.pressgang.ccms.contentspec.enums.LevelType"
] |
import org.jboss.pressgang.ccms.contentspec.Appendix; import org.jboss.pressgang.ccms.contentspec.Chapter; import org.jboss.pressgang.ccms.contentspec.InitialContent; import org.jboss.pressgang.ccms.contentspec.Level; import org.jboss.pressgang.ccms.contentspec.Part; import org.jboss.pressgang.ccms.contentspec.Process; import org.jboss.pressgang.ccms.contentspec.Section; import org.jboss.pressgang.ccms.contentspec.enums.LevelType;
|
import org.jboss.pressgang.ccms.contentspec.*; import org.jboss.pressgang.ccms.contentspec.enums.*;
|
[
"org.jboss.pressgang"
] |
org.jboss.pressgang;
| 1,172,166
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.