method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
protected TestProject getProject(String projectName) {
List<TestProject> projects = getProjects();
for (TestProject project : projects) {
if (project.getName().equals(projectName)) {
return project;
}
}
return null;
}
|
TestProject function(String projectName) { List<TestProject> projects = getProjects(); for (TestProject project : projects) { if (project.getName().equals(projectName)) { return project; } } return null; }
|
/**
* gets the {@link TestProject} by the given name.
*
* @param projectName
* name of the project
* @return {@link TestProject} or null
*/
|
gets the <code>TestProject</code> by the given name
|
getProject
|
{
"repo_name": "franzbecker/test-editor",
"path": "core/org.testeditor.core/src/org/testeditor/core/services/impl/TestProjectServiceImpl.java",
"license": "epl-1.0",
"size": 38038
}
|
[
"java.util.List",
"org.testeditor.core.model.teststructure.TestProject"
] |
import java.util.List; import org.testeditor.core.model.teststructure.TestProject;
|
import java.util.*; import org.testeditor.core.model.teststructure.*;
|
[
"java.util",
"org.testeditor.core"
] |
java.util; org.testeditor.core;
| 1,110,371
|
public void addInitializer(final String name, final BackgroundInitializer<?> backgroundInitializer) {
Validate.notNull(name, "name");
Validate.notNull(backgroundInitializer, "backgroundInitializer");
synchronized (this) {
if (isStarted()) {
throw new IllegalStateException("addInitializer() must not be called after start()!");
}
childInitializers.put(name, backgroundInitializer);
}
}
/**
* Returns the number of tasks needed for executing all child {@code
* BackgroundInitializer} objects in parallel. This implementation sums up
* the required tasks for all child initializers (which is necessary if one
* of the child initializers is itself a {@code MultiBackgroundInitializer}
|
void function(final String name, final BackgroundInitializer<?> backgroundInitializer) { Validate.notNull(name, "name"); Validate.notNull(backgroundInitializer, STR); synchronized (this) { if (isStarted()) { throw new IllegalStateException(STR); } childInitializers.put(name, backgroundInitializer); } } /** * Returns the number of tasks needed for executing all child { * BackgroundInitializer} objects in parallel. This implementation sums up * the required tasks for all child initializers (which is necessary if one * of the child initializers is itself a {@code MultiBackgroundInitializer}
|
/**
* Adds a new {@code BackgroundInitializer} to this object. When this
* {@code MultiBackgroundInitializer} is started, the given initializer will
* be processed. This method must not be called after {@link #start()} has
* been invoked.
*
* @param name the name of the initializer (must not be <b>null</b>)
* @param backgroundInitializer the {@code BackgroundInitializer} to add (must not be
* <b>null</b>)
* @throws IllegalArgumentException if a required parameter is missing
* @throws IllegalStateException if {@code start()} has already been called
*/
|
Adds a new BackgroundInitializer to this object. When this MultiBackgroundInitializer is started, the given initializer will be processed. This method must not be called after <code>#start()</code> has been invoked
|
addInitializer
|
{
"repo_name": "apache/commons-lang",
"path": "src/main/java/org/apache/commons/lang3/concurrent/MultiBackgroundInitializer.java",
"license": "apache-2.0",
"size": 14783
}
|
[
"org.apache.commons.lang3.Validate"
] |
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 481,659
|
public void setSqoopRecord(SqoopRecord record) {
this.sqoopRecord = record;
}
@Override
/**
* {@inheritDoc}
|
void function(SqoopRecord record) { this.sqoopRecord = record; } /** * {@inheritDoc}
|
/**
* Set the SqoopRecord instance we should pass from the mapper to the
* reducer.
*/
|
Set the SqoopRecord instance we should pass from the mapper to the reducer
|
setSqoopRecord
|
{
"repo_name": "icoding/sqoop",
"path": "src/java/org/apache/sqoop/mapreduce/MergeRecord.java",
"license": "apache-2.0",
"size": 3689
}
|
[
"com.cloudera.sqoop.lib.SqoopRecord"
] |
import com.cloudera.sqoop.lib.SqoopRecord;
|
import com.cloudera.sqoop.lib.*;
|
[
"com.cloudera.sqoop"
] |
com.cloudera.sqoop;
| 1,829,029
|
@Test
public void addSubpixelBias() {
SelectSparseStandardSubpixel.F32 alg = new SelectSparseStandardSubpixel.F32(-1,-1);
float scores[] = new float[30];
Arrays.fill(scores,0,10,500);
// should be biased towards 4
scores[4] = 100;
scores[5] = 50;
scores[6] = 200;
assertTrue(alg.select(scores, 10));
double found = alg.getDisparity();
assertTrue( found < 5 && found > 4);
// now biased towards 6
scores[4] = 200;
scores[6] = 100;
assertTrue(alg.select(scores, 10));
found = alg.getDisparity();
assertTrue( found < 6 && found > 5);
}
|
void function() { SelectSparseStandardSubpixel.F32 alg = new SelectSparseStandardSubpixel.F32(-1,-1); float scores[] = new float[30]; Arrays.fill(scores,0,10,500); scores[4] = 100; scores[5] = 50; scores[6] = 200; assertTrue(alg.select(scores, 10)); double found = alg.getDisparity(); assertTrue( found < 5 && found > 4); scores[4] = 200; scores[6] = 100; assertTrue(alg.select(scores, 10)); found = alg.getDisparity(); assertTrue( found < 6 && found > 5); }
|
/**
* Given different local error values see if it is closer to the value with a smaller error
*/
|
Given different local error values see if it is closer to the value with a smaller error
|
addSubpixelBias
|
{
"repo_name": "intrack/BoofCV-master",
"path": "main/feature/test/boofcv/alg/feature/disparity/impl/TestSelectSparseStandardSubpixel_F32.java",
"license": "apache-2.0",
"size": 1935
}
|
[
"java.util.Arrays",
"org.junit.Assert"
] |
import java.util.Arrays; import org.junit.Assert;
|
import java.util.*; import org.junit.*;
|
[
"java.util",
"org.junit"
] |
java.util; org.junit;
| 1,628,110
|
public void addItemPoint(DPItemPoint itemPoint) {
itemPoints.add(itemPoint);
FileConfiguration partyConfig = dropParty.getConfigManager().getConfig(ConfigType.PARTY);
String path = "Parties." + partyName + ".itempoints";
List<String> dpItemPoints = partyConfig.getStringList(path);
dpItemPoints.add(itemPoint.toConfig());
partyConfig.set(path, dpItemPoints);
dropParty.getConfigManager().getConfigAccessor(ConfigType.PARTY).saveConfig();
updateItemPointList();
}
|
void function(DPItemPoint itemPoint) { itemPoints.add(itemPoint); FileConfiguration partyConfig = dropParty.getConfigManager().getConfig(ConfigType.PARTY); String path = STR + partyName + STR; List<String> dpItemPoints = partyConfig.getStringList(path); dpItemPoints.add(itemPoint.toConfig()); partyConfig.set(path, dpItemPoints); dropParty.getConfigManager().getConfigAccessor(ConfigType.PARTY).saveConfig(); updateItemPointList(); }
|
/**
* Adds an item point.
*
* @param itemPoint The item point.
*/
|
Adds an item point
|
addItemPoint
|
{
"repo_name": "ampayne2/DropParty",
"path": "src/main/java/ninja/amp/dropparty/parties/Party.java",
"license": "lgpl-3.0",
"size": 27228
}
|
[
"java.util.List",
"ninja.amp.dropparty.DPItemPoint",
"ninja.amp.dropparty.config.ConfigType",
"org.bukkit.configuration.file.FileConfiguration"
] |
import java.util.List; import ninja.amp.dropparty.DPItemPoint; import ninja.amp.dropparty.config.ConfigType; import org.bukkit.configuration.file.FileConfiguration;
|
import java.util.*; import ninja.amp.dropparty.*; import ninja.amp.dropparty.config.*; import org.bukkit.configuration.file.*;
|
[
"java.util",
"ninja.amp.dropparty",
"org.bukkit.configuration"
] |
java.util; ninja.amp.dropparty; org.bukkit.configuration;
| 2,113,027
|
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller<PollResult<Void>, Void> beginDeleteById(String resourceId, String apiVersion) {
return beginDeleteByIdAsync(resourceId, apiVersion).getSyncPoller();
}
|
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> function(String resourceId, String apiVersion) { return beginDeleteByIdAsync(resourceId, apiVersion).getSyncPoller(); }
|
/**
* Deletes a resource by ID.
*
* @param resourceId The fully qualified ID of the resource, including the resource name and resource type. Use the
* format,
* /subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
* @param apiVersion The API version to use for the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
|
Deletes a resource by ID
|
beginDeleteById
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-resources/src/main/java/com/azure/resourcemanager/resources/implementation/ResourcesClientImpl.java",
"license": "mit",
"size": 230225
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.polling.SyncPoller"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.polling.SyncPoller;
|
import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.polling.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 573,600
|
public Set<Host> getPossibleHosts(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "VM.get_possible_hosts";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toSetOfHost(result);
}
|
Set<Host> function(Connection c) throws BadServerResponse, XenAPIException, XmlRpcException { String method_call = STR; String session = c.getSessionReference(); Object[] method_params = {Marshalling.toXMLRPC(session), Marshalling.toXMLRPC(this.ref)}; Map response = c.dispatch(method_call, method_params); Object result = response.get("Value"); return Types.toSetOfHost(result); }
|
/**
* Return the list of hosts on which this VM may run.
*
* @return The possible hosts
*/
|
Return the list of hosts on which this VM may run
|
getPossibleHosts
|
{
"repo_name": "cinderella/incubator-cloudstack",
"path": "deps/XenServerJava/com/xensource/xenapi/VM.java",
"license": "apache-2.0",
"size": 169722
}
|
[
"com.xensource.xenapi.Types",
"java.util.Map",
"java.util.Set",
"org.apache.xmlrpc.XmlRpcException"
] |
import com.xensource.xenapi.Types; import java.util.Map; import java.util.Set; import org.apache.xmlrpc.XmlRpcException;
|
import com.xensource.xenapi.*; import java.util.*; import org.apache.xmlrpc.*;
|
[
"com.xensource.xenapi",
"java.util",
"org.apache.xmlrpc"
] |
com.xensource.xenapi; java.util; org.apache.xmlrpc;
| 1,830,864
|
@Override
public Integer doPingDB() {
return Yank.queryScalar(SQL_PING_DB, Long.class, null).intValue();
}
|
Integer function() { return Yank.queryScalar(SQL_PING_DB, Long.class, null).intValue(); }
|
/**************
* ITEMS DAOs *
**************/
|
ITEMS DAOs
|
doPingDB
|
{
"repo_name": "druciak/openhab",
"path": "bundles/persistence/org.openhab.persistence.jdbc/java/org/openhab/persistence/jdbc/db/JdbcMariadbDAO.java",
"license": "epl-1.0",
"size": 2942
}
|
[
"org.knowm.yank.Yank"
] |
import org.knowm.yank.Yank;
|
import org.knowm.yank.*;
|
[
"org.knowm.yank"
] |
org.knowm.yank;
| 1,905,244
|
public mxICellOverlay removeCellOverlay(Object cell, mxICellOverlay overlay)
{
if (overlay == null)
{
removeCellOverlays(cell);
}
else
{
mxICellOverlay[] arr = getCellOverlays(cell);
if (arr != null)
{
// TODO: Use arraycopy from/to same array to speed this up
List<mxICellOverlay> list = new ArrayList<mxICellOverlay>(
Arrays.asList(arr));
if (list.remove(overlay))
{
removeCellOverlayComponent(overlay, cell);
}
arr = list.toArray(new mxICellOverlay[list.size()]);
overlays.put(cell, arr);
}
}
return overlay;
}
|
mxICellOverlay function(Object cell, mxICellOverlay overlay) { if (overlay == null) { removeCellOverlays(cell); } else { mxICellOverlay[] arr = getCellOverlays(cell); if (arr != null) { List<mxICellOverlay> list = new ArrayList<mxICellOverlay>( Arrays.asList(arr)); if (list.remove(overlay)) { removeCellOverlayComponent(overlay, cell); } arr = list.toArray(new mxICellOverlay[list.size()]); overlays.put(cell, arr); } } return overlay; }
|
/**
* Removes and returns the given overlay from the given cell. This method
* fires a remove overlay event. If no overlay is given, then all overlays
* are removed using removeOverlays.
*
* @param cell
* Cell whose overlay should be removed.
* @param overlay
* Optional overlay to be removed.
*/
|
Removes and returns the given overlay from the given cell. This method fires a remove overlay event. If no overlay is given, then all overlays are removed using removeOverlays
|
removeCellOverlay
|
{
"repo_name": "dpisarewski/gka_wise12",
"path": "src/com/mxgraph/swing/mxGraphComponent.java",
"license": "lgpl-2.1",
"size": 102389
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,816,619
|
public static void verifySystemTimeIsReasonable()
{
int currentYear = DateTime.now().year().get();
if (currentYear < 2019) {
failRequirement("Presto requires the system time to be current (found year %s)", currentYear);
}
}
|
static void function() { int currentYear = DateTime.now().year().get(); if (currentYear < 2019) { failRequirement(STR, currentYear); } }
|
/**
* Perform a sanity check to make sure that the year is reasonably current, to guard against
* issues in third party libraries.
*/
|
Perform a sanity check to make sure that the year is reasonably current, to guard against issues in third party libraries
|
verifySystemTimeIsReasonable
|
{
"repo_name": "treasure-data/presto",
"path": "presto-main/src/main/java/io/prestosql/server/PrestoSystemRequirements.java",
"license": "apache-2.0",
"size": 6511
}
|
[
"org.joda.time.DateTime"
] |
import org.joda.time.DateTime;
|
import org.joda.time.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 1,217,359
|
public void printJavaTypeToObject(String value, JClass javaType)
throws IOException
{
_writer.printJavaTypeToObject(value, javaType);
}
|
void function(String value, JClass javaType) throws IOException { _writer.printJavaTypeToObject(value, javaType); }
|
/**
* Converts a java primitive type to a Java object.
*
* @param value the java expression to be converted
* @param javaType the type of the converted expression.
*/
|
Converts a java primitive type to a Java object
|
printJavaTypeToObject
|
{
"repo_name": "dwango/quercus",
"path": "src/main/java/com/caucho/java/gen/JavaWriterWrapper.java",
"license": "gpl-2.0",
"size": 5994
}
|
[
"com.caucho.bytecode.JClass",
"java.io.IOException"
] |
import com.caucho.bytecode.JClass; import java.io.IOException;
|
import com.caucho.bytecode.*; import java.io.*;
|
[
"com.caucho.bytecode",
"java.io"
] |
com.caucho.bytecode; java.io;
| 2,096,398
|
@Transactional
public void create(ExtensionLinkEntity link)
throws AmbariException {
EntityManager entityManager = entityManagerProvider.get();
entityManager.persist(link);
}
|
void function(ExtensionLinkEntity link) throws AmbariException { EntityManager entityManager = entityManagerProvider.get(); entityManager.persist(link); }
|
/**
* Persists a new extension link instance.
*
* @param link
* the extension link to persist (not {@code null}).
*/
|
Persists a new extension link instance
|
create
|
{
"repo_name": "arenadata/ambari",
"path": "ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ExtensionLinkDAO.java",
"license": "apache-2.0",
"size": 9041
}
|
[
"javax.persistence.EntityManager",
"org.apache.ambari.server.AmbariException",
"org.apache.ambari.server.orm.entities.ExtensionLinkEntity"
] |
import javax.persistence.EntityManager; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.orm.entities.ExtensionLinkEntity;
|
import javax.persistence.*; import org.apache.ambari.server.*; import org.apache.ambari.server.orm.entities.*;
|
[
"javax.persistence",
"org.apache.ambari"
] |
javax.persistence; org.apache.ambari;
| 2,323,852
|
public static <T extends HasFilename> Iterable<T> filter(final Iterable<T> items,
final Predicate<String> predicate) {
return Iterables.filter(items, typeMatchingPredicateFrom(predicate));
}
|
static <T extends HasFilename> Iterable<T> function(final Iterable<T> items, final Predicate<String> predicate) { return Iterables.filter(items, typeMatchingPredicateFrom(predicate)); }
|
/**
* A filter for Iterable<? extends HasFileType> that returns only those whose FileType matches the
* specified Predicate.
*/
|
A filter for Iterable that returns only those whose FileType matches the specified Predicate
|
filter
|
{
"repo_name": "Digas29/bazel",
"path": "src/main/java/com/google/devtools/build/lib/util/FileType.java",
"license": "apache-2.0",
"size": 8453
}
|
[
"com.google.common.base.Predicate",
"com.google.common.collect.Iterables"
] |
import com.google.common.base.Predicate; import com.google.common.collect.Iterables;
|
import com.google.common.base.*; import com.google.common.collect.*;
|
[
"com.google.common"
] |
com.google.common;
| 2,766,192
|
private Transformation<RowData> applyKeyBy(
ChangelogMode changelogMode,
Transformation<RowData> inputTransform,
int[] primaryKeys,
int sinkParallelism,
boolean upsertMaterialize) {
final int inputParallelism = inputTransform.getParallelism();
if ((inputParallelism == sinkParallelism || changelogMode.containsOnly(RowKind.INSERT))
&& !upsertMaterialize) {
return inputTransform;
}
if (primaryKeys.length == 0) {
throw new TableException(
String.format(
"The sink for table '%s' has a configured parallelism of %s, while the input parallelism is %s. "
+ "Since the configured parallelism is different from the input's parallelism and "
+ "the changelog mode is not insert-only, a primary key is required but could not "
+ "be found.",
tableSinkSpec.getObjectIdentifier().asSummaryString(),
sinkParallelism,
inputParallelism));
}
final RowDataKeySelector selector =
KeySelectorUtil.getRowDataSelector(primaryKeys, getInputTypeInfo());
final KeyGroupStreamPartitioner<RowData, RowData> partitioner =
new KeyGroupStreamPartitioner<>(
selector, KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM);
Transformation<RowData> partitionedTransform =
new PartitionTransformation<>(inputTransform, partitioner);
partitionedTransform.setParallelism(sinkParallelism);
return partitionedTransform;
}
|
Transformation<RowData> function( ChangelogMode changelogMode, Transformation<RowData> inputTransform, int[] primaryKeys, int sinkParallelism, boolean upsertMaterialize) { final int inputParallelism = inputTransform.getParallelism(); if ((inputParallelism == sinkParallelism changelogMode.containsOnly(RowKind.INSERT)) && !upsertMaterialize) { return inputTransform; } if (primaryKeys.length == 0) { throw new TableException( String.format( STR + STR + STR + STR, tableSinkSpec.getObjectIdentifier().asSummaryString(), sinkParallelism, inputParallelism)); } final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(primaryKeys, getInputTypeInfo()); final KeyGroupStreamPartitioner<RowData, RowData> partitioner = new KeyGroupStreamPartitioner<>( selector, KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM); Transformation<RowData> partitionedTransform = new PartitionTransformation<>(inputTransform, partitioner); partitionedTransform.setParallelism(sinkParallelism); return partitionedTransform; }
|
/**
* Apply a primary key partition transformation to guarantee the strict ordering of changelog
* messages.
*/
|
Apply a primary key partition transformation to guarantee the strict ordering of changelog messages
|
applyKeyBy
|
{
"repo_name": "StephanEwen/incubator-flink",
"path": "flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecSink.java",
"license": "apache-2.0",
"size": 17822
}
|
[
"org.apache.flink.api.dag.Transformation",
"org.apache.flink.runtime.state.KeyGroupRangeAssignment",
"org.apache.flink.streaming.api.transformations.PartitionTransformation",
"org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner",
"org.apache.flink.table.api.TableException",
"org.apache.flink.table.connector.ChangelogMode",
"org.apache.flink.table.data.RowData",
"org.apache.flink.table.planner.plan.utils.KeySelectorUtil",
"org.apache.flink.table.runtime.keyselector.RowDataKeySelector",
"org.apache.flink.types.RowKind"
] |
import org.apache.flink.api.dag.Transformation; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.streaming.api.transformations.PartitionTransformation; import org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner; import org.apache.flink.table.api.TableException; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.data.RowData; import org.apache.flink.table.planner.plan.utils.KeySelectorUtil; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.types.RowKind;
|
import org.apache.flink.api.dag.*; import org.apache.flink.runtime.state.*; import org.apache.flink.streaming.api.transformations.*; import org.apache.flink.streaming.runtime.partitioner.*; import org.apache.flink.table.api.*; import org.apache.flink.table.connector.*; import org.apache.flink.table.data.*; import org.apache.flink.table.planner.plan.utils.*; import org.apache.flink.table.runtime.keyselector.*; import org.apache.flink.types.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 419,357
|
private void runParForRepeatedOptTest( boolean reusePartitionedData, boolean update, boolean changedDim, ExecType et, int numExpectedMR )
{
RUNTIME_PLATFORM platformOld = rtplatform;
double memfactorOld = OptimizerUtils.MEM_UTIL_FACTOR;
boolean reuseOld = ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS;
String TEST_NAME = update ? TEST_NAME2 : ( changedDim ? TEST_NAME3 : TEST_NAME1);
TestConfiguration config = getTestConfiguration(TEST_NAME);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
try
{
rtplatform = (et==ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID;
OptimizerUtils.MEM_UTIL_FACTOR = computeMemoryUtilFactor( 70 ); //force partitioning
ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reusePartitionedData;
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-stats","-args", HOME + INPUT_DIR + "V" ,
Integer.toString(rows),
Integer.toString(cols),
HOME + OUTPUT_DIR + "R",
Integer.toString((update||changedDim)?1:0)};
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " +
HOME + INPUT_DIR + " " + HOME + EXPECTED_DIR + " " + Integer.toString((update||changedDim)?1:0);
loadTestConfiguration(config);
double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
writeInputMatrix("V", V, true);
runTest(true, false, null, -1);
runRScript(true);
Assert.assertEquals("Unexpected number of executed MR jobs.", numExpectedMR, Statistics.getNoOfExecutedMRJobs());
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R");
}
finally
{
//reset optimizer flags to pre-test configuration
rtplatform = platformOld;
OptimizerUtils.MEM_UTIL_FACTOR = memfactorOld;
ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reuseOld;
}
}
|
void function( boolean reusePartitionedData, boolean update, boolean changedDim, ExecType et, int numExpectedMR ) { RUNTIME_PLATFORM platformOld = rtplatform; double memfactorOld = OptimizerUtils.MEM_UTIL_FACTOR; boolean reuseOld = ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS; String TEST_NAME = update ? TEST_NAME2 : ( changedDim ? TEST_NAME3 : TEST_NAME1); TestConfiguration config = getTestConfiguration(TEST_NAME); config.addVariable("rows", rows); config.addVariable("cols", cols); try { rtplatform = (et==ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID; OptimizerUtils.MEM_UTIL_FACTOR = computeMemoryUtilFactor( 70 ); ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reusePartitionedData; String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME + ".dml"; programArgs = new String[]{STR,"-args", HOME + INPUT_DIR + "V" , Integer.toString(rows), Integer.toString(cols), HOME + OUTPUT_DIR + "R", Integer.toString((update changedDim)?1:0)}; fullRScriptName = HOME + TEST_NAME + ".R"; rCmd = STR + " " + fullRScriptName + " " + HOME + INPUT_DIR + " " + HOME + EXPECTED_DIR + " " + Integer.toString((update changedDim)?1:0); loadTestConfiguration(config); double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, 7); writeInputMatrix("V", V, true); runTest(true, false, null, -1); runRScript(true); Assert.assertEquals(STR, numExpectedMR, Statistics.getNoOfExecutedMRJobs()); HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R"); HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R"); TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R"); } finally { rtplatform = platformOld; OptimizerUtils.MEM_UTIL_FACTOR = memfactorOld; ParForProgramBlock.ALLOW_REUSE_PARTITION_VARS = reuseOld; } }
|
/**
* update, refers to changing data
* changed dim, refers to changing dimensions and changing parfor predicate
*
*
* @param outer execution mode of outer parfor loop
* @param inner execution mode of inner parfor loop
* @param instType execution mode of instructions
*/
|
update, refers to changing data changed dim, refers to changing dimensions and changing parfor predicate
|
runParForRepeatedOptTest
|
{
"repo_name": "fmakari/systemml",
"path": "system-ml/src/test/java/com/ibm/bi/dml/test/integration/functions/parfor/ParForRepeatedOptimizationTest.java",
"license": "apache-2.0",
"size": 6254
}
|
[
"com.ibm.bi.dml.hops.OptimizerUtils",
"com.ibm.bi.dml.lops.LopProperties",
"com.ibm.bi.dml.runtime.controlprogram.ParForProgramBlock",
"com.ibm.bi.dml.runtime.matrix.data.MatrixValue",
"com.ibm.bi.dml.test.integration.TestConfiguration",
"com.ibm.bi.dml.test.utils.TestUtils",
"com.ibm.bi.dml.utils.Statistics",
"java.util.HashMap",
"org.junit.Assert"
] |
import com.ibm.bi.dml.hops.OptimizerUtils; import com.ibm.bi.dml.lops.LopProperties; import com.ibm.bi.dml.runtime.controlprogram.ParForProgramBlock; import com.ibm.bi.dml.runtime.matrix.data.MatrixValue; import com.ibm.bi.dml.test.integration.TestConfiguration; import com.ibm.bi.dml.test.utils.TestUtils; import com.ibm.bi.dml.utils.Statistics; import java.util.HashMap; import org.junit.Assert;
|
import com.ibm.bi.dml.hops.*; import com.ibm.bi.dml.lops.*; import com.ibm.bi.dml.runtime.controlprogram.*; import com.ibm.bi.dml.runtime.matrix.data.*; import com.ibm.bi.dml.test.integration.*; import com.ibm.bi.dml.test.utils.*; import com.ibm.bi.dml.utils.*; import java.util.*; import org.junit.*;
|
[
"com.ibm.bi",
"java.util",
"org.junit"
] |
com.ibm.bi; java.util; org.junit;
| 515,585
|
public void setVmStateStarting(String vmName) {
setVmState(vmName, State.Starting);
}
|
void function(String vmName) { setVmState(vmName, State.Starting); }
|
/**
* vmStateStarting: set the state of a vm to starting
*
* @param vmName
*/
|
vmStateStarting: set the state of a vm to starting
|
setVmStateStarting
|
{
"repo_name": "ikoula/cloudstack",
"path": "plugins/hypervisors/ovm3/src/main/java/com/cloud/hypervisor/ovm3/resources/helpers/Ovm3HypervisorSupport.java",
"license": "gpl-2.0",
"size": 30635
}
|
[
"com.cloud.vm.VirtualMachine"
] |
import com.cloud.vm.VirtualMachine;
|
import com.cloud.vm.*;
|
[
"com.cloud.vm"
] |
com.cloud.vm;
| 2,029,836
|
public static void sendValues(HttpServletResponse resp, String... params) throws IOException {
PrintWriter writer = resp.getWriter();
if((params.length % 2) != 0)
throw new IllegalArgumentException(Messages.MESSAGES.argumentsShouldBeNameValue());
for(int i=0;i<params.length;i+=2){
if(i > 0)
writer.append('&');
writer.append(encodeForOAuth(params[i]));
writer.append('=');
writer.append(encodeForOAuth(params[i+1]));
}
}
|
static void function(HttpServletResponse resp, String... params) throws IOException { PrintWriter writer = resp.getWriter(); if((params.length % 2) != 0) throw new IllegalArgumentException(Messages.MESSAGES.argumentsShouldBeNameValue()); for(int i=0;i<params.length;i+=2){ if(i > 0) writer.append('&'); writer.append(encodeForOAuth(params[i])); writer.append('='); writer.append(encodeForOAuth(params[i+1])); } }
|
/**
* Sends a list of OAuth parameters in the body of the given Http Servlet Response
* @param params a list of <name, value> parameters
*/
|
Sends a list of OAuth parameters in the body of the given Http Servlet Response
|
sendValues
|
{
"repo_name": "soul2zimate/resteasy2",
"path": "security/resteasy-oauth/src/main/java/org/jboss/resteasy/auth/oauth/OAuthUtils.java",
"license": "apache-2.0",
"size": 9154
}
|
[
"java.io.IOException",
"java.io.PrintWriter",
"javax.servlet.http.HttpServletResponse",
"org.jboss.resteasy.auth.oauth.i18n.Messages"
] |
import java.io.IOException; import java.io.PrintWriter; import javax.servlet.http.HttpServletResponse; import org.jboss.resteasy.auth.oauth.i18n.Messages;
|
import java.io.*; import javax.servlet.http.*; import org.jboss.resteasy.auth.oauth.i18n.*;
|
[
"java.io",
"javax.servlet",
"org.jboss.resteasy"
] |
java.io; javax.servlet; org.jboss.resteasy;
| 1,065,942
|
public BufferedImage[] getScaledBufferedImageField_fastTesting(int field,int width,int height) {
BufferedImage[] bufferedImagesBefore = null;
BufferedImage[] scaledBufferedImages = null;
try {
bufferedImagesBefore = getImageField(field);
scaledBufferedImages = new BufferedImage[bufferedImagesBefore.length];
double scaleWidth = 0;
double scaleHeight = 0;
for (int i = 0;i < scaledBufferedImages.length;i++) {
scaleWidth = bufferedImagesBefore[i].getWidth();
scaleHeight = bufferedImagesBefore[i].getHeight();
scaledBufferedImages[i] = new BufferedImage((int)scaleWidth,(int)scaleHeight,BufferedImage.TYPE_INT_ARGB);
scaleWidth = width / scaleWidth;
scaleHeight = height / scaleHeight;
AffineTransform at = new AffineTransform();
at.scale(scaleWidth, scaleHeight);
AffineTransformOp scaleOp = new AffineTransformOp(at,AffineTransformOp.TYPE_BILINEAR);
scaledBufferedImages[i] = scaleOp.filter(bufferedImagesBefore[i], scaledBufferedImages[i]);
}
} catch(Exception e) {
logger.error("Fehler in getScaledBufferedImageField("+field+","+width+","+height+"): "+e);
}
return scaledBufferedImages;
}
|
BufferedImage[] function(int field,int width,int height) { BufferedImage[] bufferedImagesBefore = null; BufferedImage[] scaledBufferedImages = null; try { bufferedImagesBefore = getImageField(field); scaledBufferedImages = new BufferedImage[bufferedImagesBefore.length]; double scaleWidth = 0; double scaleHeight = 0; for (int i = 0;i < scaledBufferedImages.length;i++) { scaleWidth = bufferedImagesBefore[i].getWidth(); scaleHeight = bufferedImagesBefore[i].getHeight(); scaledBufferedImages[i] = new BufferedImage((int)scaleWidth,(int)scaleHeight,BufferedImage.TYPE_INT_ARGB); scaleWidth = width / scaleWidth; scaleHeight = height / scaleHeight; AffineTransform at = new AffineTransform(); at.scale(scaleWidth, scaleHeight); AffineTransformOp scaleOp = new AffineTransformOp(at,AffineTransformOp.TYPE_BILINEAR); scaledBufferedImages[i] = scaleOp.filter(bufferedImagesBefore[i], scaledBufferedImages[i]); } } catch(Exception e) { logger.error(STR+field+","+width+","+height+STR+e); } return scaledBufferedImages; }
|
/**
* <b>Fast, but complete black (eq not working), maybe taking another look at it, may be the better solution</b>
* @param field
* @param width
* @param height
* @return
*/
|
Fast, but complete black (eq not working), maybe taking another look at it, may be the better solution
|
getScaledBufferedImageField_fastTesting
|
{
"repo_name": "samp1044/SpaceGame",
"path": "src/support/Resources.java",
"license": "gpl-3.0",
"size": 37101
}
|
[
"java.awt.geom.AffineTransform",
"java.awt.image.AffineTransformOp",
"java.awt.image.BufferedImage"
] |
import java.awt.geom.AffineTransform; import java.awt.image.AffineTransformOp; import java.awt.image.BufferedImage;
|
import java.awt.geom.*; import java.awt.image.*;
|
[
"java.awt"
] |
java.awt;
| 773,680
|
protected void handleRecordException(
@NotNull final Exception e,
@Nullable final FileWALPointer ptr) {
if (log.isInfoEnabled())
log.info("Stopping WAL iteration due to an exception: " + e.getMessage() + ", ptr=" + ptr);
}
|
void function( @NotNull final Exception e, @Nullable final FileWALPointer ptr) { if (log.isInfoEnabled()) log.info(STR + e.getMessage() + STR + ptr); }
|
/**
* Handler for record deserialization exception
* @param e problem from records reading
* @param ptr file pointer was accessed
*/
|
Handler for record deserialization exception
|
handleRecordException
|
{
"repo_name": "voipp/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/AbstractWalRecordsIterator.java",
"license": "apache-2.0",
"size": 13576
}
|
[
"org.jetbrains.annotations.NotNull",
"org.jetbrains.annotations.Nullable"
] |
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,330,709
|
public void fixJUnitClassPath(IJavaProject project) {
IPath junitPath = JUnitCore.JUNIT4_CONTAINER_PATH;
boolean hasJUnit = false;
boolean hasEvoSuite = false;
boolean hasOldEvoSuite = false;
try {
Path containerPath = new Path("org.evosuite.eclipse.classpathContainerInitializer");
IClasspathContainer container = JavaCore.getClasspathContainer(containerPath, project);
System.out.println("EvoSuite JAR at: " + container.getPath().toOSString());
IClasspathEntry[] oldEntries = project.getRawClasspath();
ArrayList<IClasspathEntry> newEntries = new ArrayList<IClasspathEntry>(oldEntries.length + 1);
IClasspathEntry cpentry = JavaCore.newContainerEntry(junitPath);
for (int i = 0; i < oldEntries.length; i++) {
IClasspathEntry curr = oldEntries[i];
// Check if JUnit is already in the build path
if (curr.getEntryKind() == IClasspathEntry.CPE_CONTAINER) {
IPath path = curr.getPath();
if (path.equals(cpentry.getPath())) {
hasJUnit = true;
}
if (path.equals(container.getPath())) {
hasEvoSuite = true;
}
} else if (curr.getEntryKind() == IClasspathEntry.CPE_LIBRARY) {
// Check for older EvoSuite entries
IPath path = curr.getPath();
if (path.toFile().getName().equals(Activator.EVOSUITE_JAR)) {
if (path.equals(container.getPath())) {
System.out.println("Is current evosuite!");
hasEvoSuite = true;
} else {
System.out.println("Is NOT current evosuite!");
hasOldEvoSuite = true;
continue;
}
}
if (path.equals(cpentry.getPath())) {
hasJUnit = true;
}
if (path.equals(container.getPath())) {
hasEvoSuite = true;
}
}
if (curr != null) {
newEntries.add(curr);
}
}
if (hasJUnit && hasEvoSuite && !hasOldEvoSuite) {
return;
}
// add the entry
if (!hasJUnit) {
newEntries.add(cpentry);
}
if (!hasEvoSuite && container != null) {
for (IClasspathEntry entry : container.getClasspathEntries()) {
newEntries.add(entry);
}
}
System.out.println("New classpath: " + newEntries);
// newEntries.add(JavaCore.newContainerEntry(EvoSuiteClasspathContainer.ID));
// Convert newEntries to an array
IClasspathEntry[] newCPEntries = newEntries.toArray(new IClasspathEntry[newEntries.size()]);
project.setRawClasspath(newCPEntries, null);
} catch (JavaModelException e) {
e.printStackTrace();
}
}
|
void function(IJavaProject project) { IPath junitPath = JUnitCore.JUNIT4_CONTAINER_PATH; boolean hasJUnit = false; boolean hasEvoSuite = false; boolean hasOldEvoSuite = false; try { Path containerPath = new Path(STR); IClasspathContainer container = JavaCore.getClasspathContainer(containerPath, project); System.out.println(STR + container.getPath().toOSString()); IClasspathEntry[] oldEntries = project.getRawClasspath(); ArrayList<IClasspathEntry> newEntries = new ArrayList<IClasspathEntry>(oldEntries.length + 1); IClasspathEntry cpentry = JavaCore.newContainerEntry(junitPath); for (int i = 0; i < oldEntries.length; i++) { IClasspathEntry curr = oldEntries[i]; if (curr.getEntryKind() == IClasspathEntry.CPE_CONTAINER) { IPath path = curr.getPath(); if (path.equals(cpentry.getPath())) { hasJUnit = true; } if (path.equals(container.getPath())) { hasEvoSuite = true; } } else if (curr.getEntryKind() == IClasspathEntry.CPE_LIBRARY) { IPath path = curr.getPath(); if (path.toFile().getName().equals(Activator.EVOSUITE_JAR)) { if (path.equals(container.getPath())) { System.out.println(STR); hasEvoSuite = true; } else { System.out.println(STR); hasOldEvoSuite = true; continue; } } if (path.equals(cpentry.getPath())) { hasJUnit = true; } if (path.equals(container.getPath())) { hasEvoSuite = true; } } if (curr != null) { newEntries.add(curr); } } if (hasJUnit && hasEvoSuite && !hasOldEvoSuite) { return; } if (!hasJUnit) { newEntries.add(cpentry); } if (!hasEvoSuite && container != null) { for (IClasspathEntry entry : container.getClasspathEntries()) { newEntries.add(entry); } } System.out.println(STR + newEntries); IClasspathEntry[] newCPEntries = newEntries.toArray(new IClasspathEntry[newEntries.size()]); project.setRawClasspath(newCPEntries, null); } catch (JavaModelException e) { e.printStackTrace(); } }
|
/**
* If we generate JUnit tests, we need to make sure that JUnit is on the
* classpath of the project, otherwise we will see compile errors
*
* @param project
*/
|
If we generate JUnit tests, we need to make sure that JUnit is on the classpath of the project, otherwise we will see compile errors
|
fixJUnitClassPath
|
{
"repo_name": "sefaakca/EvoSuite-Sefa",
"path": "plugins/eclipse/evosuite-eclipse-core/src/main/java/org/evosuite/eclipse/popup/actions/TestGenerationAction.java",
"license": "lgpl-3.0",
"size": 14777
}
|
[
"java.util.ArrayList",
"org.eclipse.core.runtime.IPath",
"org.eclipse.core.runtime.Path",
"org.eclipse.jdt.core.IClasspathContainer",
"org.eclipse.jdt.core.IClasspathEntry",
"org.eclipse.jdt.core.IJavaProject",
"org.eclipse.jdt.core.JavaCore",
"org.eclipse.jdt.core.JavaModelException",
"org.eclipse.jdt.junit.JUnitCore",
"org.evosuite.eclipse.Activator"
] |
import java.util.ArrayList; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.jdt.core.IClasspathContainer; import org.eclipse.jdt.core.IClasspathEntry; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.junit.JUnitCore; import org.evosuite.eclipse.Activator;
|
import java.util.*; import org.eclipse.core.runtime.*; import org.eclipse.jdt.core.*; import org.eclipse.jdt.junit.*; import org.evosuite.eclipse.*;
|
[
"java.util",
"org.eclipse.core",
"org.eclipse.jdt",
"org.evosuite.eclipse"
] |
java.util; org.eclipse.core; org.eclipse.jdt; org.evosuite.eclipse;
| 1,252,403
|
public DomainRegistry<DomainObjectReference> replicate(Object domainObject) {
DomainRegistryReplication reuse = new DomainRegistryReplication(domainObject);
DomainRegistry<DomainObjectReference> replicant = new DomainRegistry<DomainObjectReference>();
// Cast in call of replicate() keeps the compiler from complaining
// We assume the the caller ensures that he has a Domain*OBJECT*Reference typed registry
// at hand. This should be pretty sure as the caller must pass a domain object for replication
Collection<DomainElementI<DomainObjectReference>> replicatedElements = reuse
.replicate((Collection) getElements());
for (DomainElementI<DomainObjectReference> element : replicatedElements) {
replicant.addElement(element);
}
return replicant;
}
|
DomainRegistry<DomainObjectReference> function(Object domainObject) { DomainRegistryReplication reuse = new DomainRegistryReplication(domainObject); DomainRegistry<DomainObjectReference> replicant = new DomainRegistry<DomainObjectReference>(); Collection<DomainElementI<DomainObjectReference>> replicatedElements = reuse .replicate((Collection) getElements()); for (DomainElementI<DomainObjectReference> element : replicatedElements) { replicant.addElement(element); } return replicant; }
|
/**
* This method works only for domain registries of DomainObjectReferences.
* Therefore it is not typed to <E>
*/
|
This method works only for domain registries of DomainObjectReferences. Therefore it is not typed to
|
replicate
|
{
"repo_name": "Nocket/nocket",
"path": "src/java/org/nocket/gen/domain/visitor/registry/DomainRegistry.java",
"license": "mit",
"size": 2190
}
|
[
"java.util.Collection",
"org.nocket.gen.domain.element.DomainElementI"
] |
import java.util.Collection; import org.nocket.gen.domain.element.DomainElementI;
|
import java.util.*; import org.nocket.gen.domain.element.*;
|
[
"java.util",
"org.nocket.gen"
] |
java.util; org.nocket.gen;
| 504,255
|
private void write(Kim kim, Huff huff, Huff ext) throws JSONException {
for (int at = 0; at < kim.length; at += 1) {
int c = kim.get(at);
write(c, huff);
while ((c & 128) == 128) {
at += 1;
c = kim.get(at);
write(c, ext);
}
}
}
|
void function(Kim kim, Huff huff, Huff ext) throws JSONException { for (int at = 0; at < kim.length; at += 1) { int c = kim.get(at); write(c, huff); while ((c & 128) == 128) { at += 1; c = kim.get(at); write(c, ext); } } }
|
/**
* Write each of the bytes in a kim with Huffman encoding.
*
* @param kim
* A kim containing the bytes to be written.
* @param huff
* The Huffman encoder.
* @param ext
* The Huffman encoder for the extended bytes.
* @throws JSONException
*/
|
Write each of the bytes in a kim with Huffman encoding
|
write
|
{
"repo_name": "anneomcl/DotAMapper",
"path": "cs467-2/Part 2 - DotA Map Visualization (Code)/org/Zipper.java",
"license": "mit",
"size": 14205
}
|
[
"org.json.JSONException",
"org.json.Kim"
] |
import org.json.JSONException; import org.json.Kim;
|
import org.json.*;
|
[
"org.json"
] |
org.json;
| 2,858,255
|
public SignedShort getTypeNum() {
return TYPENUM;
}
|
SignedShort function() { return TYPENUM; }
|
/**
* return uniuque type number.
* @return SignedShort holding unique type number
*/
|
return uniuque type number
|
getTypeNum
|
{
"repo_name": "mksmbrtsh/LLRPexplorer",
"path": "src/org/llrp/ltk/generated/messages/STOP_ROSPEC_RESPONSE.java",
"license": "apache-2.0",
"size": 11322
}
|
[
"org.llrp.ltk.types.SignedShort"
] |
import org.llrp.ltk.types.SignedShort;
|
import org.llrp.ltk.types.*;
|
[
"org.llrp.ltk"
] |
org.llrp.ltk;
| 766,782
|
JSubroutineParameters jSubroutineRequest = new JSubroutineParameters();
jSubroutineRequest.add(new JDynArray(""));
return jSubroutineRequest;
}
|
JSubroutineParameters jSubroutineRequest = new JSubroutineParameters(); jSubroutineRequest.add(new JDynArray("")); return jSubroutineRequest; }
|
/**
* constructs jSubroutineRequest, which is passed as an argument to
* jConnection.
*
* @return
*/
|
constructs jSubroutineRequest, which is passed as an argument to jConnection
|
constructjSubroutineRequest
|
{
"repo_name": "debabratahazra/DS",
"path": "designstudio/components/integrationfwk/ui/com.odcgroup.integrationfwk/src/com/odcgroup/integrationfwk/ui/t24connectivity/landscapeservice/CreateDataLibraryService.java",
"license": "epl-1.0",
"size": 1819
}
|
[
"com.jbase.jremote.JDynArray",
"com.jbase.jremote.JSubroutineParameters"
] |
import com.jbase.jremote.JDynArray; import com.jbase.jremote.JSubroutineParameters;
|
import com.jbase.jremote.*;
|
[
"com.jbase.jremote"
] |
com.jbase.jremote;
| 516,038
|
public List<Metric.AggregateFunction> getSupportedAggregateFunctions() {
if (!aggregateFunctionSupported) {
return new ArrayList<Metric.AggregateFunction>(0);
} else {
return Arrays.asList(new Metric.AggregateFunction[] { AggregateFunction.SUM,
AggregateFunction.MAXIMUM,
AggregateFunction.ARITHMETIC_MEAN,
AggregateFunction.GEOMETRIC_MEAN,
AggregateFunction.RANK });
}
}
|
List<Metric.AggregateFunction> function() { if (!aggregateFunctionSupported) { return new ArrayList<Metric.AggregateFunction>(0); } else { return Arrays.asList(new Metric.AggregateFunction[] { AggregateFunction.SUM, AggregateFunction.MAXIMUM, AggregateFunction.ARITHMETIC_MEAN, AggregateFunction.GEOMETRIC_MEAN, AggregateFunction.RANK }); } }
|
/**
* Returns a list of all supported aggregate functions.
*
* @return
*/
|
Returns a list of all supported aggregate functions
|
getSupportedAggregateFunctions
|
{
"repo_name": "RaffaelBild/arx",
"path": "src/main/org/deidentifier/arx/metric/MetricDescription.java",
"license": "apache-2.0",
"size": 5444
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List",
"org.deidentifier.arx.metric.Metric"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.deidentifier.arx.metric.Metric;
|
import java.util.*; import org.deidentifier.arx.metric.*;
|
[
"java.util",
"org.deidentifier.arx"
] |
java.util; org.deidentifier.arx;
| 200,306
|
protected PluginsService getPluginsService() {
return pluginsService;
}
|
PluginsService function() { return pluginsService; }
|
/**
* The {@link PluginsService} used to build this node's components.
*/
|
The <code>PluginsService</code> used to build this node's components
|
getPluginsService
|
{
"repo_name": "nezirus/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/node/Node.java",
"license": "apache-2.0",
"size": 53944
}
|
[
"org.elasticsearch.plugins.PluginsService"
] |
import org.elasticsearch.plugins.PluginsService;
|
import org.elasticsearch.plugins.*;
|
[
"org.elasticsearch.plugins"
] |
org.elasticsearch.plugins;
| 2,336,445
|
private PaletteContainer createConnections2Group() {
PaletteDrawer paletteContainer = new PaletteDrawer(
umlClassMetaModel.diagram.part.Messages.Connections2Group_title);
paletteContainer.setId("createConnections2Group"); //$NON-NLS-1$
paletteContainer.add(createIncomingRelations1CreationTool());
paletteContainer.add(createOutgoingRelations2CreationTool());
return paletteContainer;
}
|
PaletteContainer function() { PaletteDrawer paletteContainer = new PaletteDrawer( umlClassMetaModel.diagram.part.Messages.Connections2Group_title); paletteContainer.setId(STR); paletteContainer.add(createIncomingRelations1CreationTool()); paletteContainer.add(createOutgoingRelations2CreationTool()); return paletteContainer; }
|
/**
* Creates "Connections" palette tool group
* @generated
*/
|
Creates "Connections" palette tool group
|
createConnections2Group
|
{
"repo_name": "KuehneThomas/model-to-model-transformation-generator",
"path": "src/MetaModels.UmlClass.diagram/src/umlClassMetaModel/diagram/part/UmlClassMetaModelPaletteFactory.java",
"license": "mit",
"size": 5815
}
|
[
"org.eclipse.gef.palette.PaletteContainer",
"org.eclipse.gef.palette.PaletteDrawer"
] |
import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.PaletteDrawer;
|
import org.eclipse.gef.palette.*;
|
[
"org.eclipse.gef"
] |
org.eclipse.gef;
| 2,217,231
|
Set<VariableValueSource> getVariables(String valueTableName);
|
Set<VariableValueSource> getVariables(String valueTableName);
|
/**
* Returns custom variables belonging to the specified value table.
*
* @param valueTableName value table name
* @return custom variable in the specified value table (or an empty set if none)
*/
|
Returns custom variables belonging to the specified value table
|
getVariables
|
{
"repo_name": "apruden/onyx",
"path": "onyx-core/src/main/java/org/obiba/onyx/magma/CustomVariablesRegistry.java",
"license": "gpl-3.0",
"size": 929
}
|
[
"java.util.Set",
"org.obiba.magma.VariableValueSource"
] |
import java.util.Set; import org.obiba.magma.VariableValueSource;
|
import java.util.*; import org.obiba.magma.*;
|
[
"java.util",
"org.obiba.magma"
] |
java.util; org.obiba.magma;
| 2,391,558
|
public IDataSource newDataSource(String question, String category, String openAnswer) {
return ConditionBuilder.createQuestionCondition(this, question, category, openAnswer).getElement();
}
|
IDataSource function(String question, String category, String openAnswer) { return ConditionBuilder.createQuestionCondition(this, question, category, openAnswer).getElement(); }
|
/**
* Build a data source that gives the open answer in current questionnaire.
* @param question
* @param category
* @param openAnswer
* @return
*/
|
Build a data source that gives the open answer in current questionnaire
|
newDataSource
|
{
"repo_name": "apruden/onyx",
"path": "onyx-modules/quartz/quartz-core/src/main/java/org/obiba/onyx/quartz/core/engine/questionnaire/util/QuestionnaireBuilder.java",
"license": "gpl-3.0",
"size": 10984
}
|
[
"org.obiba.onyx.core.data.IDataSource",
"org.obiba.onyx.quartz.core.engine.questionnaire.util.builder.ConditionBuilder"
] |
import org.obiba.onyx.core.data.IDataSource; import org.obiba.onyx.quartz.core.engine.questionnaire.util.builder.ConditionBuilder;
|
import org.obiba.onyx.core.data.*; import org.obiba.onyx.quartz.core.engine.questionnaire.util.builder.*;
|
[
"org.obiba.onyx"
] |
org.obiba.onyx;
| 2,848,507
|
@Test
public void testPostiveManagersAccess() throws Exception {
ClientConfig config = new ClientConfig();
BasicAuthSecurityHandler secHandler = new BasicAuthSecurityHandler();
//secHandler.setSSLRequired(false);
secHandler.setUserName("user2");
secHandler.setPassword("user2pwd");
config.handlers(secHandler);
RestClient client = new RestClient(config);
//final URI uri = UriBuilder.fromUri(TestUtils.getBaseTestUri(CONTEXT_ROOT, "rolesallowed")).path(OnlyManagersResource.class).build();
final URI uri = URI.create(TestUtils.getBaseTestUri(CONTEXT_ROOT, "rolesallowed") + "/managersonly");
Resource res = client.resource(uri);
ClientResponse response = res.delete();
assertEquals(Status.NO_CONTENT, response.getStatusCode());
response = res.get();
assertEquals(Status.NO_CONTENT, response.getStatusCode());
String data = "My Hello World String!";
response = res.post(data);
assertEquals(Status.OK, response.getStatusCode());
assertEquals(data, response.getEntity(String.class));
assertNotNull("RSInHandler1 not invoked", server.waitForStringInLog("in RSInHandler1 handleMessage method"));
assertNotNull("RSInHandler2 not invoked", server.waitForStringInLog("in RSInHandler2 handleMessage method"));
response = res.get();
assertEquals(Status.OK, response.getStatusCode());
assertEquals(data, response.getEntity(String.class));
data = "Goodbye String!";
response = res.post(data);
assertEquals(Status.OK, response.getStatusCode());
assertEquals(data, response.getEntity(String.class));
response = res.get();
assertEquals(Status.OK, response.getStatusCode());
assertEquals(data, response.getEntity(String.class));
}
|
void function() throws Exception { ClientConfig config = new ClientConfig(); BasicAuthSecurityHandler secHandler = new BasicAuthSecurityHandler(); secHandler.setUserName("user2"); secHandler.setPassword(STR); config.handlers(secHandler); RestClient client = new RestClient(config); final URI uri = URI.create(TestUtils.getBaseTestUri(CONTEXT_ROOT, STR) + STR); Resource res = client.resource(uri); ClientResponse response = res.delete(); assertEquals(Status.NO_CONTENT, response.getStatusCode()); response = res.get(); assertEquals(Status.NO_CONTENT, response.getStatusCode()); String data = STR; response = res.post(data); assertEquals(Status.OK, response.getStatusCode()); assertEquals(data, response.getEntity(String.class)); assertNotNull(STR, server.waitForStringInLog(STR)); assertNotNull(STR, server.waitForStringInLog(STR)); response = res.get(); assertEquals(Status.OK, response.getStatusCode()); assertEquals(data, response.getEntity(String.class)); data = STR; response = res.post(data); assertEquals(Status.OK, response.getStatusCode()); assertEquals(data, response.getEntity(String.class)); response = res.get(); assertEquals(Status.OK, response.getStatusCode()); assertEquals(data, response.getEntity(String.class)); }
|
/**
* Tests that an authenticated and authorized user can invoke the JAX-RS
* resource methods.
*
* @throws Exception
*/
|
Tests that an authenticated and authorized user can invoke the JAX-RS resource methods
|
testPostiveManagersAccess
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/io.openliberty.ws.jaxrs.global.handler.internal_fat/fat/src/com/ibm/ws/webservices/handler/fat/CDIRolesAllowedTest.java",
"license": "epl-1.0",
"size": 17593
}
|
[
"java.net.URI",
"org.apache.wink.client.ClientConfig",
"org.apache.wink.client.ClientResponse",
"org.apache.wink.client.Resource",
"org.apache.wink.client.RestClient",
"org.apache.wink.client.handlers.BasicAuthSecurityHandler",
"org.junit.Assert"
] |
import java.net.URI; import org.apache.wink.client.ClientConfig; import org.apache.wink.client.ClientResponse; import org.apache.wink.client.Resource; import org.apache.wink.client.RestClient; import org.apache.wink.client.handlers.BasicAuthSecurityHandler; import org.junit.Assert;
|
import java.net.*; import org.apache.wink.client.*; import org.apache.wink.client.handlers.*; import org.junit.*;
|
[
"java.net",
"org.apache.wink",
"org.junit"
] |
java.net; org.apache.wink; org.junit;
| 1,253,217
|
public int getIndexOfAttributeChildView(View viewAtIndex) {
return ll_Root_ParentHolderLayout.indexOfChild(viewAtIndex);
}
|
int function(View viewAtIndex) { return ll_Root_ParentHolderLayout.indexOfChild(viewAtIndex); }
|
/**
* Returns the row/index-position of the View/Layout param within the parent layout
* NOTE: This function only checks children whose parent is the root-parent holder layout
* @param viewAtIndex
* @return
*/
|
Returns the row/index-position of the View/Layout param within the parent layout
|
getIndexOfAttributeChildView
|
{
"repo_name": "Aeonitis/GW-Android",
"path": "app/src/main/java/com/gw/view/tagattributeedit/AttributeEditForm.java",
"license": "apache-2.0",
"size": 17734
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,613,582
|
public boolean verify()
throws NoSuchAlgorithmException, NoSuchProviderException,
InvalidKeyException, SignatureException
{
return verify(BouncyCastleProvider.PROVIDER_NAME);
}
|
boolean function() throws NoSuchAlgorithmException, NoSuchProviderException, InvalidKeyException, SignatureException { return verify(BouncyCastleProvider.PROVIDER_NAME); }
|
/**
* verify the request using the BC provider.
*/
|
verify the request using the BC provider
|
verify
|
{
"repo_name": "bullda/DroidText",
"path": "src/bouncycastle/repack/org/bouncycastle/jce/PKCS10CertificationRequest.java",
"license": "lgpl-3.0",
"size": 24440
}
|
[
"java.security.InvalidKeyException",
"java.security.NoSuchAlgorithmException",
"java.security.NoSuchProviderException",
"java.security.SignatureException"
] |
import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.SignatureException;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 2,485,393
|
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<String> getMemberGroups(String objectId, boolean securityEnabledOnly) {
return new PagedIterable<>(getMemberGroupsAsync(objectId, securityEnabledOnly));
}
|
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<String> function(String objectId, boolean securityEnabledOnly) { return new PagedIterable<>(getMemberGroupsAsync(objectId, securityEnabledOnly)); }
|
/**
* Gets a collection that contains the object IDs of the groups of which the user is a member.
*
* @param objectId The object ID of the user for which to get group membership.
* @param securityEnabledOnly If true, only membership in security-enabled groups should be checked. Otherwise,
* membership in all groups should be checked.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws GraphErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a collection that contains the object IDs of the groups of which the user is a member.
*/
|
Gets a collection that contains the object IDs of the groups of which the user is a member
|
getMemberGroups
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/implementation/UsersClientImpl.java",
"license": "mit",
"size": 50884
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedIterable"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 1,714,799
|
public Font getMainFont() {
String style = getMainfont(FONTSTYLE);
String weight = getMainfont(FONTWEIGHT);
// init default values
int fstyle = Font.PLAIN;
// convert the css-string-style into a font-integer-style
switch (style) {
case "normal":
fstyle = Font.PLAIN;
break;
case "italic":
fstyle = Font.ITALIC;
break;
}
// in css, the bold-property is not a style-attribute, but a font-weight-attribute
// that's why we have separated this here
if (weight.equals("bold")) {
fstyle = fstyle + Font.BOLD;
}
// convert the size
int fsize = Integer.parseInt(getMainfont(FONTSIZE));
return new Font(getMainfont(FONTNAME), fstyle, fsize);
}
|
Font function() { String style = getMainfont(FONTSTYLE); String weight = getMainfont(FONTWEIGHT); int fstyle = Font.PLAIN; switch (style) { case STR: fstyle = Font.PLAIN; break; case STR: fstyle = Font.ITALIC; break; } if (weight.equals("bold")) { fstyle = fstyle + Font.BOLD; } int fsize = Integer.parseInt(getMainfont(FONTSIZE)); return new Font(getMainfont(FONTNAME), fstyle, fsize); }
|
/**
* Retrieves the main font as font-object.
*
* @return the main-font as {@code Font} variable.
*/
|
Retrieves the main font as font-object
|
getMainFont
|
{
"repo_name": "RalfBarkow/Zettelkasten",
"path": "src/main/java/de/danielluedecke/zettelkasten/database/Settings.java",
"license": "gpl-3.0",
"size": 218921
}
|
[
"java.awt.Font"
] |
import java.awt.Font;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 897,878
|
// Create logical node
DefaultYpmNode rootNode = new DefaultYpmNode(logicalName);
// Create module node with moduleName1
rootNode.addChild(moduleName1); // child to logical node
YpmContext moduleNode = rootNode.getChild(moduleName1);
moduleNode.addChild(xNodeName); // child to module node
moduleNode.addChild(yNodeName); // sibling node to child node "x"
YpmContext xNode = moduleNode.getChild("x");
xNode.addSibling(zNodeName); // sibling node to child node "x"
xNode.addChild(x1NodeName); // child to node x
xNode.addChild(x2NodeName); // child to node x
YpmContext yNode = moduleNode.getChild(yNodeName);
yNode.addChild(y1NodeName); // child to node y
yNode.addChild(y2NodeName); // child to node y
YpmContext zNode = moduleNode.getChild(zNodeName);
zNode.addChild(z1NodeName); // child to node z
zNode.addChild(z2NodeName); // child to node z
return rootNode;
}
|
DefaultYpmNode rootNode = new DefaultYpmNode(logicalName); rootNode.addChild(moduleName1); YpmContext moduleNode = rootNode.getChild(moduleName1); moduleNode.addChild(xNodeName); moduleNode.addChild(yNodeName); YpmContext xNode = moduleNode.getChild("x"); xNode.addSibling(zNodeName); xNode.addChild(x1NodeName); xNode.addChild(x2NodeName); YpmContext yNode = moduleNode.getChild(yNodeName); yNode.addChild(y1NodeName); yNode.addChild(y2NodeName); YpmContext zNode = moduleNode.getChild(zNodeName); zNode.addChild(z1NodeName); zNode.addChild(z2NodeName); return rootNode; }
|
/**
* Constructs ypm tree with single module.
*
* @return ypm tree root node
*/
|
Constructs ypm tree with single module
|
constructYpmTreeSingleModule
|
{
"repo_name": "sdnwiselab/onos",
"path": "apps/yms/ut/src/test/java/org/onosproject/yms/app/ypm/DefaultYpmNodeTest.java",
"license": "apache-2.0",
"size": 8886
}
|
[
"org.onosproject.yms.ypm.DefaultYpmNode",
"org.onosproject.yms.ypm.YpmContext"
] |
import org.onosproject.yms.ypm.DefaultYpmNode; import org.onosproject.yms.ypm.YpmContext;
|
import org.onosproject.yms.ypm.*;
|
[
"org.onosproject.yms"
] |
org.onosproject.yms;
| 269,738
|
Set<String> findAllDownServers() throws Exception;
|
Set<String> findAllDownServers() throws Exception;
|
/**
* get all down group servers
* @return group server ip list
* @throws Exception
*/
|
get all down group servers
|
findAllDownServers
|
{
"repo_name": "sdgdsffdsfff/zeus",
"path": "slb/src/main/java/com/ctrip/zeus/service/status/StatusService.java",
"license": "apache-2.0",
"size": 2943
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 609,586
|
EReference getElement_Next();
|
EReference getElement_Next();
|
/**
* Returns the meta object for the containment reference '{@link sequence_bdsl.Element#getNext <em>Next</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Next</em>'.
* @see sequence_bdsl.Element#getNext()
* @see #getElement()
* @generated
*/
|
Returns the meta object for the containment reference '<code>sequence_bdsl.Element#getNext Next</code>'.
|
getElement_Next
|
{
"repo_name": "jesusc/bento",
"path": "tests/test-outputs/bento.sirius.tests.metamodels.output/src/sequence_bdsl/Sequence_bdslPackage.java",
"license": "epl-1.0",
"size": 38904
}
|
[
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,677,093
|
boolean hasNext() throws IOException;
|
boolean hasNext() throws IOException;
|
/**
* Does another set of attributes exist in this reader?
*
* @return <code>true</code> if additional content exists for AttributeReader
*/
|
Does another set of attributes exist in this reader
|
hasNext
|
{
"repo_name": "geotools/geotools",
"path": "modules/library/main/src/main/java/org/geotools/data/AttributeReader.java",
"license": "lgpl-2.1",
"size": 2147
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 768,595
|
public static void openDialog(Component parent) {
JOptionPane.showMessageDialog(parent, new PNLNoAgentModeMessage());
}
|
static void function(Component parent) { JOptionPane.showMessageDialog(parent, new PNLNoAgentModeMessage()); }
|
/**
* Open dialog
*/
|
Open dialog
|
openDialog
|
{
"repo_name": "tuxedo0801/swing-explorer",
"path": "swing-explorer-core/src/org/swingexplorer/PNLNoAgentModeMessage.java",
"license": "lgpl-3.0",
"size": 4303
}
|
[
"java.awt.Component",
"javax.swing.JOptionPane"
] |
import java.awt.Component; import javax.swing.JOptionPane;
|
import java.awt.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 2,232,180
|
public void testBuildUriMatcher_allMovies() {
UriMatcher testMatcher = MovieProvider.buildUriMatcher();
Assert.assertEquals("All movies URI must match expected constant."
, testMatcher.match(CachedMovieEntry.CONTENT_URI)
, MovieProvider.CACHED_MOVIE);
}
|
void function() { UriMatcher testMatcher = MovieProvider.buildUriMatcher(); Assert.assertEquals(STR , testMatcher.match(CachedMovieEntry.CONTENT_URI) , MovieProvider.CACHED_MOVIE); }
|
/**
* Verifies that {@link MovieProvider#buildUriMatcher()} creates an
* {@link android.content.UriMatcher} that maps correctly the URIs to
* the expected constants. Case for the URI that identifies all movies.
*/
|
Verifies that <code>MovieProvider#buildUriMatcher()</code> creates an <code>android.content.UriMatcher</code> that maps correctly the URIs to the expected constants. Case for the URI that identifies all movies
|
testBuildUriMatcher_allMovies
|
{
"repo_name": "adolfogp/PopularMovies",
"path": "app/src/androidTest/java/mx/com/adolfogarcia/popularmovies/data/MovieProviderTest.java",
"license": "apache-2.0",
"size": 33598
}
|
[
"android.content.UriMatcher",
"junit.framework.Assert",
"mx.com.adolfogarcia.popularmovies.data.MovieContract"
] |
import android.content.UriMatcher; import junit.framework.Assert; import mx.com.adolfogarcia.popularmovies.data.MovieContract;
|
import android.content.*; import junit.framework.*; import mx.com.adolfogarcia.popularmovies.data.*;
|
[
"android.content",
"junit.framework",
"mx.com.adolfogarcia"
] |
android.content; junit.framework; mx.com.adolfogarcia;
| 1,106,395
|
public static void setStickyFooterSelection(DrawerBuilder drawer, int position, Boolean fireOnClick) {
if (position > -1) {
if (drawer.mStickyFooterView != null && drawer.mStickyFooterView instanceof LinearLayout) {
LinearLayout footer = (LinearLayout) drawer.mStickyFooterView;
if (footer.getChildCount() > position && position >= 0) {
IDrawerItem drawerItem = (IDrawerItem) footer.getChildAt(position).getTag();
onFooterDrawerItemClick(drawer, drawerItem, footer.getChildAt(position), fireOnClick);
}
}
}
}
|
static void function(DrawerBuilder drawer, int position, Boolean fireOnClick) { if (position > -1) { if (drawer.mStickyFooterView != null && drawer.mStickyFooterView instanceof LinearLayout) { LinearLayout footer = (LinearLayout) drawer.mStickyFooterView; if (footer.getChildCount() > position && position >= 0) { IDrawerItem drawerItem = (IDrawerItem) footer.getChildAt(position).getTag(); onFooterDrawerItemClick(drawer, drawerItem, footer.getChildAt(position), fireOnClick); } } } }
|
/**
* helper method to set the selection of the footer
*
* @param drawer
* @param position
* @param fireOnClick
*/
|
helper method to set the selection of the footer
|
setStickyFooterSelection
|
{
"repo_name": "Ryan---Yang/MaterialDrawer",
"path": "library/src/main/java/com/mikepenz/materialdrawer/DrawerUtils.java",
"license": "apache-2.0",
"size": 19916
}
|
[
"android.widget.LinearLayout",
"com.mikepenz.materialdrawer.model.interfaces.IDrawerItem"
] |
import android.widget.LinearLayout; import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
|
import android.widget.*; import com.mikepenz.materialdrawer.model.interfaces.*;
|
[
"android.widget",
"com.mikepenz.materialdrawer"
] |
android.widget; com.mikepenz.materialdrawer;
| 2,386,182
|
HtmlCanvas htmlCanvas = new BootstrapHtmlCanvas(true);
root.getAspect(ASPECT_FLAT_VIEW_GENERATOR).ifPresent(i -> i.writeOnCanvas(htmlCanvas, new FlatViewContext(root)));
return htmlCanvas.build();
}
|
HtmlCanvas htmlCanvas = new BootstrapHtmlCanvas(true); root.getAspect(ASPECT_FLAT_VIEW_GENERATOR).ifPresent(i -> i.writeOnCanvas(htmlCanvas, new FlatViewContext(root))); return htmlCanvas.build(); }
|
/**
* This is a method default that used BootstrapHtmlCanvas to generate the html of the requirement.
*
* @param root doc in the super class.
* @return doc in the super class.
*/
|
This is a method default that used BootstrapHtmlCanvas to generate the html of the requirement
|
generate
|
{
"repo_name": "opensingular/singular-server",
"path": "requirement/requirement-module/src/main/java/org/opensingular/requirement/module/extrato/ExtratoGeneratorImpl.java",
"license": "apache-2.0",
"size": 1580
}
|
[
"org.opensingular.form.flatview.FlatViewContext",
"org.opensingular.lib.commons.canvas.HtmlCanvas",
"org.opensingular.lib.commons.canvas.bootstrap.BootstrapHtmlCanvas"
] |
import org.opensingular.form.flatview.FlatViewContext; import org.opensingular.lib.commons.canvas.HtmlCanvas; import org.opensingular.lib.commons.canvas.bootstrap.BootstrapHtmlCanvas;
|
import org.opensingular.form.flatview.*; import org.opensingular.lib.commons.canvas.*; import org.opensingular.lib.commons.canvas.bootstrap.*;
|
[
"org.opensingular.form",
"org.opensingular.lib"
] |
org.opensingular.form; org.opensingular.lib;
| 1,872,081
|
@Test
public void checkConstruction() {
assertThat(data1.state(), is(IntentState.INSTALLED));
assertThat(data1.version(), is(timestamp1));
assertThat(data1.intent(), is(intent1));
}
|
void function() { assertThat(data1.state(), is(IntentState.INSTALLED)); assertThat(data1.version(), is(timestamp1)); assertThat(data1.intent(), is(intent1)); }
|
/**
* Checks that intent data objects are properly constructed.
*/
|
Checks that intent data objects are properly constructed
|
checkConstruction
|
{
"repo_name": "kuangrewawa/onos",
"path": "core/api/src/test/java/org/onosproject/net/intent/IntentDataTest.java",
"license": "apache-2.0",
"size": 3154
}
|
[
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] |
import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers;
|
import org.hamcrest.*;
|
[
"org.hamcrest"
] |
org.hamcrest;
| 1,405,153
|
public static MessageDigest sha256() {
return get(SHA_256_DIGEST);
}
|
static MessageDigest function() { return get(SHA_256_DIGEST); }
|
/**
* Returns a {@link MessageDigest} instance for SHA-256 digests;
* note that the instance returned is thread local and must not be
* shared amongst threads.
*
* @return a thread local {@link MessageDigest} instance that
* provides SHA-256 message digest functionality.
*/
|
Returns a <code>MessageDigest</code> instance for SHA-256 digests; note that the instance returned is thread local and must not be shared amongst threads
|
sha256
|
{
"repo_name": "gingerwizard/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/common/hash/MessageDigests.java",
"license": "apache-2.0",
"size": 4289
}
|
[
"java.security.MessageDigest"
] |
import java.security.MessageDigest;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 688,793
|
void create_10_2_system_procedures(
TransactionController tc,
HashSet newlyCreatedRoutines,
UUID sysUtilUUID)
throws StandardException
{
// void SYSCS_UTIL.BACKUP_DATABASE_NOWAIT(
// IN BACKUPDIR VARCHAR(Limits.DB2_VARCHAR_MAXWIDTH)
// )
{
// procedure argument names
String[] arg_names = {"BACKUPDIR"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH)
};
createSystemProcedureOrFunction(
"BACKUP_DATABASE_NOWAIT",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
(TypeDescriptor) null,
newlyCreatedRoutines, tc);
}
// void
// SYSCS_UTIL.BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT(
// IN BACKUPDIR VARCHAR(Limits.DB2_VARCHAR_MAXWIDTH),
// IN DELETE_ARCHIVED_LOG_FILES SMALLINT
// )
{
// procedure argument names
String[] arg_names =
{"BACKUPDIR", "DELETE_ARCHIVED_LOG_FILES"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH),
TypeDescriptor.SMALLINT
};
createSystemProcedureOrFunction(
"BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
(TypeDescriptor) null,
newlyCreatedRoutines, tc);
}
// SYSIBM.SQLFUNCTIONS(VARCHAR(128), VARCHAR(128), VARCHAR(128),
// VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"FUNCNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLFUNCTIONS",
getSysIBMSchemaDescriptor().getUUID(),
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
(TypeDescriptor) null,
newlyCreatedRoutines, tc);
}
// SYSIBM.SQLFUNCTIONPARAMS(VARCHAR(128), VARCHAR(128),
// VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"FUNCNAME",
"PARAMNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLFUNCTIONPARAMS",
getSysIBMSchemaDescriptor().getUUID(),
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
(TypeDescriptor) null,
newlyCreatedRoutines, tc);
}
}
|
void create_10_2_system_procedures( TransactionController tc, HashSet newlyCreatedRoutines, UUID sysUtilUUID) throws StandardException { { String[] arg_names = {STR}; TypeDescriptor[] arg_types = { DataTypeDescriptor.getCatalogType( Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH) }; createSystemProcedureOrFunction( STR, sysUtilUUID, arg_names, arg_types, 0, 0, RoutineAliasInfo.MODIFIES_SQL_DATA, (TypeDescriptor) null, newlyCreatedRoutines, tc); } { String[] arg_names = {STR, STR}; TypeDescriptor[] arg_types = { DataTypeDescriptor.getCatalogType( Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH), TypeDescriptor.SMALLINT }; createSystemProcedureOrFunction( STR, sysUtilUUID, arg_names, arg_types, 0, 0, RoutineAliasInfo.MODIFIES_SQL_DATA, (TypeDescriptor) null, newlyCreatedRoutines, tc); } { String[] arg_names = { STR, STR, STR, STR}; TypeDescriptor[] arg_types = { CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER, DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)}; createSystemProcedureOrFunction( STR, getSysIBMSchemaDescriptor().getUUID(), arg_names, arg_types, 0, 1, RoutineAliasInfo.READS_SQL_DATA, (TypeDescriptor) null, newlyCreatedRoutines, tc); } { String[] arg_names = { STR, STR, STR, STR, STR}; TypeDescriptor[] arg_types = { CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER, DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)}; createSystemProcedureOrFunction( STR, getSysIBMSchemaDescriptor().getUUID(), arg_names, arg_types, 0, 1, RoutineAliasInfo.READS_SQL_DATA, (TypeDescriptor) null, newlyCreatedRoutines, tc); } }
|
/**
* Create system procedures added in version 10.2.
* <p>
* Create 10.2 system procedures, called by either code creating new
* database, or code doing hard upgrade from previous version.
* <p>
* @param newlyCreatedRoutines TODO
* @param sysUtilUUID uuid of the SYSUTIL schema.
*
* @exception StandardException Standard exception policy.
**/
|
Create system procedures added in version 10.2. Create 10.2 system procedures, called by either code creating new database, or code doing hard upgrade from previous version.
|
create_10_2_system_procedures
|
{
"repo_name": "papicella/snappy-store",
"path": "gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/catalog/DataDictionaryImpl.java",
"license": "apache-2.0",
"size": 403048
}
|
[
"com.pivotal.gemfirexd.internal.catalog.TypeDescriptor",
"com.pivotal.gemfirexd.internal.catalog.types.RoutineAliasInfo",
"com.pivotal.gemfirexd.internal.iapi.error.StandardException",
"com.pivotal.gemfirexd.internal.iapi.reference.Limits",
"com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController",
"com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor",
"java.sql.Types",
"java.util.HashSet"
] |
import com.pivotal.gemfirexd.internal.catalog.TypeDescriptor; import com.pivotal.gemfirexd.internal.catalog.types.RoutineAliasInfo; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.Limits; import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController; import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor; import java.sql.Types; import java.util.HashSet;
|
import com.pivotal.gemfirexd.internal.catalog.*; import com.pivotal.gemfirexd.internal.catalog.types.*; import com.pivotal.gemfirexd.internal.iapi.error.*; import com.pivotal.gemfirexd.internal.iapi.reference.*; import com.pivotal.gemfirexd.internal.iapi.store.access.*; import com.pivotal.gemfirexd.internal.iapi.types.*; import java.sql.*; import java.util.*;
|
[
"com.pivotal.gemfirexd",
"java.sql",
"java.util"
] |
com.pivotal.gemfirexd; java.sql; java.util;
| 1,057,455
|
private void analyzeUnlockTable(ASTNode ast)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
// We only can have a single partition spec
assert (partSpecs.size() <= 1);
Map<String, String> partSpec = null;
if (partSpecs.size() > 0) {
partSpec = partSpecs.get(0);
}
UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
unlockTblDesc), conf));
// Need to initialize the lock manager
ctx.setNeedLockMgr(true);
}
|
void function(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); List<Map<String, String>> partSpecs = getPartitionSpecs(ast); assert (partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), unlockTblDesc), conf)); ctx.setNeedLockMgr(true); }
|
/**
* Add the task according to the parsed command tree. This is used for the CLI
* command "UNLOCK TABLE ..;".
*
* @param ast
* The parsed command tree.
* @throws SemanticException
* Parsing failed
*/
|
Add the task according to the parsed command tree. This is used for the CLI command "UNLOCK TABLE ..;"
|
analyzeUnlockTable
|
{
"repo_name": "cschenyuan/hive-hack",
"path": "ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java",
"license": "apache-2.0",
"size": 137225
}
|
[
"java.util.List",
"java.util.Map",
"org.apache.hadoop.hive.ql.exec.TaskFactory",
"org.apache.hadoop.hive.ql.plan.DDLWork",
"org.apache.hadoop.hive.ql.plan.UnlockTableDesc"
] |
import java.util.List; import java.util.Map; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
|
import java.util.*; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.plan.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,644,843
|
public static void writeChartAsPDF(OutputStream aOS, JFreeChart chart, int aWidth, int aHeight)
throws IOException
{
// Create an instance of the SVG Generator
PDFDocumentGraphics2D pdfGenerator = new PDFDocumentGraphics2D(true, aOS, aWidth, aHeight);
pdfGenerator.setDeviceDPI(PDFDocumentGraphics2D.NORMAL_PDF_RESOLUTION);
pdfGenerator.setGraphicContext(new GraphicContext());
pdfGenerator.setSVGDimension(aWidth, aHeight);
pdfGenerator.setClip(0, 0, aWidth, aHeight);
pdfGenerator.setRenderingHint(KEY_ANTIALIASING, VALUE_ANTIALIAS_ON);
pdfGenerator.setRenderingHint(KEY_INTERPOLATION, VALUE_INTERPOLATION_BILINEAR);
chart.setBackgroundPaint(Color.white);
chart.getPlot().setBackgroundPaint(Color.white);
// draw the chart in the SVG generator
chart.draw(pdfGenerator, new Rectangle(aWidth, aHeight));
pdfGenerator.finish();
}
|
static void function(OutputStream aOS, JFreeChart chart, int aWidth, int aHeight) throws IOException { PDFDocumentGraphics2D pdfGenerator = new PDFDocumentGraphics2D(true, aOS, aWidth, aHeight); pdfGenerator.setDeviceDPI(PDFDocumentGraphics2D.NORMAL_PDF_RESOLUTION); pdfGenerator.setGraphicContext(new GraphicContext()); pdfGenerator.setSVGDimension(aWidth, aHeight); pdfGenerator.setClip(0, 0, aWidth, aHeight); pdfGenerator.setRenderingHint(KEY_ANTIALIASING, VALUE_ANTIALIAS_ON); pdfGenerator.setRenderingHint(KEY_INTERPOLATION, VALUE_INTERPOLATION_BILINEAR); chart.setBackgroundPaint(Color.white); chart.getPlot().setBackgroundPaint(Color.white); chart.draw(pdfGenerator, new Rectangle(aWidth, aHeight)); pdfGenerator.finish(); }
|
/**
* Exports a JFreeChart to a scalable PDF file.
*
* @param chart JFreeChart to export
* @param aOS stream to write to.
* @param aWidth width of the chart in pixels
* @param aHeight height of the chart in pixels
* @throws IOException if writing the svgFile fails.
*/
|
Exports a JFreeChart to a scalable PDF file
|
writeChartAsPDF
|
{
"repo_name": "dkpro/dkpro-lab",
"path": "dkpro-lab-core/src/main/java/org/dkpro/lab/reporting/ChartUtil.java",
"license": "apache-2.0",
"size": 3829
}
|
[
"java.awt.Color",
"java.awt.Rectangle",
"java.io.IOException",
"java.io.OutputStream",
"org.apache.fop.svg.PDFDocumentGraphics2D",
"org.apache.xmlgraphics.java2d.GraphicContext",
"org.jfree.chart.JFreeChart"
] |
import java.awt.Color; import java.awt.Rectangle; import java.io.IOException; import java.io.OutputStream; import org.apache.fop.svg.PDFDocumentGraphics2D; import org.apache.xmlgraphics.java2d.GraphicContext; import org.jfree.chart.JFreeChart;
|
import java.awt.*; import java.io.*; import org.apache.fop.svg.*; import org.apache.xmlgraphics.java2d.*; import org.jfree.chart.*;
|
[
"java.awt",
"java.io",
"org.apache.fop",
"org.apache.xmlgraphics",
"org.jfree.chart"
] |
java.awt; java.io; org.apache.fop; org.apache.xmlgraphics; org.jfree.chart;
| 2,417,365
|
public static Kml unmarshal(final File file, final boolean validate) {
try {
Unmarshaller unmarshaller = JAXBContext.newInstance((Kml.class)).createUnmarshaller();
if (validate == true) {
Kml.validate(unmarshaller);
}
InputSource input = new InputSource(new FileReader(file));
SAXSource saxSource = new SAXSource(new NamespaceFilterXMLReader(validate), input);
Kml jaxbRootElement = ((Kml) unmarshaller.unmarshal(saxSource));
return jaxbRootElement;
} catch (SAXException _x) {
_x.printStackTrace();
} catch (ParserConfigurationException _x) {
_x.printStackTrace();
} catch (JAXBException _x) {
_x.printStackTrace();
} catch (FileNotFoundException _x) {
_x.printStackTrace();
}
return null;
}
|
static Kml function(final File file, final boolean validate) { try { Unmarshaller unmarshaller = JAXBContext.newInstance((Kml.class)).createUnmarshaller(); if (validate == true) { Kml.validate(unmarshaller); } InputSource input = new InputSource(new FileReader(file)); SAXSource saxSource = new SAXSource(new NamespaceFilterXMLReader(validate), input); Kml jaxbRootElement = ((Kml) unmarshaller.unmarshal(saxSource)); return jaxbRootElement; } catch (SAXException _x) { _x.printStackTrace(); } catch (ParserConfigurationException _x) { _x.printStackTrace(); } catch (JAXBException _x) { _x.printStackTrace(); } catch (FileNotFoundException _x) { _x.printStackTrace(); } return null; }
|
/**
* KML to Java
* KML given as a file object is transformed into a graph of Java objects.
* The boolean value indicates, whether the File object should be validated
* automatically during unmarshalling and be checked if the object graph meets
* all constraints defined in OGC's KML schema specification.
*
*/
|
KML to Java KML given as a file object is transformed into a graph of Java objects. The boolean value indicates, whether the File object should be validated automatically during unmarshalling and be checked if the object graph meets all constraints defined in OGC's KML schema specification
|
unmarshal
|
{
"repo_name": "micromata/javaapiforkml",
"path": "src/main/java/de/micromata/opengis/kml/v_2_2_0/Kml.java",
"license": "bsd-3-clause",
"size": 31924
}
|
[
"java.io.File",
"java.io.FileNotFoundException",
"java.io.FileReader",
"javax.xml.bind.JAXBContext",
"javax.xml.bind.JAXBException",
"javax.xml.bind.Unmarshaller",
"javax.xml.parsers.ParserConfigurationException",
"javax.xml.transform.sax.SAXSource",
"org.xml.sax.InputSource",
"org.xml.sax.SAXException"
] |
import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.sax.SAXSource; import org.xml.sax.InputSource; import org.xml.sax.SAXException;
|
import java.io.*; import javax.xml.bind.*; import javax.xml.parsers.*; import javax.xml.transform.sax.*; import org.xml.sax.*;
|
[
"java.io",
"javax.xml",
"org.xml.sax"
] |
java.io; javax.xml; org.xml.sax;
| 850,916
|
@Override
public void onClick(View v) {
mSnackbars.removeCurrentDueToAction();
updateView();
}
|
void function(View v) { mSnackbars.removeCurrentDueToAction(); updateView(); }
|
/**
* Handles click event for action button at end of snackbar.
*/
|
Handles click event for action button at end of snackbar
|
onClick
|
{
"repo_name": "endlessm/chromium-browser",
"path": "chrome/browser/ui/messages/android/java/src/org/chromium/chrome/browser/ui/messages/snackbar/SnackbarManager.java",
"license": "bsd-3-clause",
"size": 10388
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 22,228
|
void onUpstreamFormatChanged(Format format);
}
private static final int INITIAL_SCRATCH_SIZE = 32;
private final Allocator allocator;
private final int allocationLength;
private final SampleMetadataQueue metadataQueue;
private final SampleExtrasHolder extrasHolder;
private final ParsableByteArray scratch;
// References into the linked list of allocations.
private AllocationNode firstAllocationNode;
private AllocationNode readAllocationNode;
private AllocationNode writeAllocationNode;
// Accessed only by the consuming thread.
private Format downstreamFormat;
// Accessed only by the loading thread (or the consuming thread when there is no loading thread).
private boolean pendingFormatAdjustment;
private Format lastUnadjustedFormat;
private long sampleOffsetUs;
private long totalBytesWritten;
private boolean pendingSplice;
private UpstreamFormatChangedListener upstreamFormatChangeListener;
public SampleQueue(Allocator allocator) {
this.allocator = allocator;
allocationLength = allocator.getIndividualAllocationLength();
metadataQueue = new SampleMetadataQueue();
extrasHolder = new SampleExtrasHolder();
scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE);
firstAllocationNode = new AllocationNode(0, allocationLength);
readAllocationNode = firstAllocationNode;
writeAllocationNode = firstAllocationNode;
}
// Called by the consuming thread, but only when there is no loading thread.
|
void onUpstreamFormatChanged(Format format); } private static final int INITIAL_SCRATCH_SIZE = 32; private final Allocator allocator; private final int allocationLength; private final SampleMetadataQueue metadataQueue; private final SampleExtrasHolder extrasHolder; private final ParsableByteArray scratch; private AllocationNode firstAllocationNode; private AllocationNode readAllocationNode; private AllocationNode writeAllocationNode; private Format downstreamFormat; private boolean pendingFormatAdjustment; private Format lastUnadjustedFormat; private long sampleOffsetUs; private long totalBytesWritten; private boolean pendingSplice; private UpstreamFormatChangedListener upstreamFormatChangeListener; public SampleQueue(Allocator allocator) { this.allocator = allocator; allocationLength = allocator.getIndividualAllocationLength(); metadataQueue = new SampleMetadataQueue(); extrasHolder = new SampleExtrasHolder(); scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE); firstAllocationNode = new AllocationNode(0, allocationLength); readAllocationNode = firstAllocationNode; writeAllocationNode = firstAllocationNode; }
|
/**
* Called on the loading thread when an upstream format change occurs.
*
* @param format The new upstream format.
*/
|
Called on the loading thread when an upstream format change occurs
|
onUpstreamFormatChanged
|
{
"repo_name": "filipefilardi/Telegram",
"path": "TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/source/SampleQueue.java",
"license": "gpl-2.0",
"size": 26015
}
|
[
"org.telegram.messenger.exoplayer2.Format",
"org.telegram.messenger.exoplayer2.source.SampleMetadataQueue",
"org.telegram.messenger.exoplayer2.upstream.Allocator",
"org.telegram.messenger.exoplayer2.util.ParsableByteArray"
] |
import org.telegram.messenger.exoplayer2.Format; import org.telegram.messenger.exoplayer2.source.SampleMetadataQueue; import org.telegram.messenger.exoplayer2.upstream.Allocator; import org.telegram.messenger.exoplayer2.util.ParsableByteArray;
|
import org.telegram.messenger.exoplayer2.*; import org.telegram.messenger.exoplayer2.source.*; import org.telegram.messenger.exoplayer2.upstream.*; import org.telegram.messenger.exoplayer2.util.*;
|
[
"org.telegram.messenger"
] |
org.telegram.messenger;
| 1,437,703
|
public static Element image(
final SImage s)
{
NullCheck.notNull(s, "Image");
final Element e = new Element("s:image", SXML.XML_URI.toString());
final Attribute at =
new Attribute("s:source", SXML.XML_URI.toString(), s
.getURI()
.toString());
e.appendChild(s.getText());
e.addAttribute(at);
s.getType().map(new TypeAdder(e));
|
static Element function( final SImage s) { NullCheck.notNull(s, "Image"); final Element e = new Element(STR, SXML.XML_URI.toString()); final Attribute at = new Attribute(STR, SXML.XML_URI.toString(), s .getURI() .toString()); e.appendChild(s.getText()); e.addAttribute(at); s.getType().map(new TypeAdder(e));
|
/**
* Serialize the given element to XML.
*
* @param s
* The element
* @return An XML element
*/
|
Serialize the given element to XML
|
image
|
{
"repo_name": "io7m/jstructural",
"path": "io7m-jstructural-xom/src/main/java/com/io7m/jstructural/xom/SDocumentSerializer.java",
"license": "isc",
"size": 36816
}
|
[
"com.io7m.jnull.NullCheck",
"com.io7m.jstructural.core.SImage",
"nu.xom.Attribute",
"nu.xom.Element"
] |
import com.io7m.jnull.NullCheck; import com.io7m.jstructural.core.SImage; import nu.xom.Attribute; import nu.xom.Element;
|
import com.io7m.jnull.*; import com.io7m.jstructural.core.*; import nu.xom.*;
|
[
"com.io7m.jnull",
"com.io7m.jstructural",
"nu.xom"
] |
com.io7m.jnull; com.io7m.jstructural; nu.xom;
| 710,533
|
public Fixture getFixtureB() {
return m_fixtureB;
}
|
Fixture function() { return m_fixtureB; }
|
/**
* Get the second fixture in this contact.
*
* @return
*/
|
Get the second fixture in this contact
|
getFixtureB
|
{
"repo_name": "RedTriplane/RedTriplane",
"path": "r3-box2d-jf/src/org/jbox2d/f/dynamics/contacts/Contact.java",
"license": "unlicense",
"size": 10011
}
|
[
"org.jbox2d.f.dynamics.Fixture"
] |
import org.jbox2d.f.dynamics.Fixture;
|
import org.jbox2d.f.dynamics.*;
|
[
"org.jbox2d.f"
] |
org.jbox2d.f;
| 2,900,621
|
final BootstrapContext getBootstrapContext() {
return _bootstrapContext;
}
|
final BootstrapContext getBootstrapContext() { return _bootstrapContext; }
|
/**
* Returns the bootstrap context passed on <code>start</code>.
*
* @return the bootstrap context
*/
|
Returns the bootstrap context passed on <code>start</code>
|
getBootstrapContext
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.messaging.jms.2.0/src/com/ibm/ws/sib/ra/inbound/impl/SibRaResourceAdapterImpl.java",
"license": "epl-1.0",
"size": 17297
}
|
[
"javax.resource.spi.BootstrapContext"
] |
import javax.resource.spi.BootstrapContext;
|
import javax.resource.spi.*;
|
[
"javax.resource"
] |
javax.resource;
| 31,896
|
public static <T extends TableFactory> T find(
Class<T> factoryClass, Descriptor descriptor, ClassLoader classLoader) {
Preconditions.checkNotNull(descriptor);
Preconditions.checkNotNull(classLoader);
return findSingleInternal(
factoryClass, descriptor.toProperties(), Optional.of(classLoader));
}
|
static <T extends TableFactory> T function( Class<T> factoryClass, Descriptor descriptor, ClassLoader classLoader) { Preconditions.checkNotNull(descriptor); Preconditions.checkNotNull(classLoader); return findSingleInternal( factoryClass, descriptor.toProperties(), Optional.of(classLoader)); }
|
/**
* Finds a table factory of the given class, descriptor, and classloader.
*
* @param factoryClass desired factory class
* @param descriptor descriptor describing the factory configuration
* @param classLoader classloader for service loading
* @param <T> factory class type
* @return the matching factory
*/
|
Finds a table factory of the given class, descriptor, and classloader
|
find
|
{
"repo_name": "StephanEwen/incubator-flink",
"path": "flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java",
"license": "apache-2.0",
"size": 20338
}
|
[
"java.util.Optional",
"org.apache.flink.table.descriptors.Descriptor",
"org.apache.flink.util.Preconditions"
] |
import java.util.Optional; import org.apache.flink.table.descriptors.Descriptor; import org.apache.flink.util.Preconditions;
|
import java.util.*; import org.apache.flink.table.descriptors.*; import org.apache.flink.util.*;
|
[
"java.util",
"org.apache.flink"
] |
java.util; org.apache.flink;
| 2,191,998
|
//TODO @Test
public void JwtConsumerApiConfigTests_SigAlgPS384_trustAliasMisMatchES() throws Exception {
String consumerId = JwtConsumerConstants.SIGALG_PS384 + "_trustedAliasES";
String jwtToken = actions.getJwtTokenUsingBuilder(_testName, consumerServer, "sigAlg_PS384", null);
Expectations expectations = consumerHelpers.buildNegativeAttributeExpectations(JwtConsumerMessageConstants.CWWKS6029E_NO_SIGNING_KEY, currentAction, consumerServer, consumerId);
Page response = actions.invokeJwtConsumer(_testName, consumerServer, consumerId, jwtToken);
validationUtils.validateResult(response, currentAction, expectations);
}
|
String consumerId = JwtConsumerConstants.SIGALG_PS384 + STR; String jwtToken = actions.getJwtTokenUsingBuilder(_testName, consumerServer, STR, null); Expectations expectations = consumerHelpers.buildNegativeAttributeExpectations(JwtConsumerMessageConstants.CWWKS6029E_NO_SIGNING_KEY, currentAction, consumerServer, consumerId); Page response = actions.invokeJwtConsumer(_testName, consumerServer, consumerId, jwtToken); validationUtils.validateResult(response, currentAction, expectations); }
|
/**
* server.xml has a config that specifies a signature algorithm of PS384, but a trustAlias referring to an ES384 public key.
* This test ensures that after building a jwt using the PS384 private key, we can NOT use a consumer with a mismatch between
* its sigAlg and public key
*
* @throws Exception
*/
|
server.xml has a config that specifies a signature algorithm of PS384, but a trustAlias referring to an ES384 public key. This test ensures that after building a jwt using the PS384 private key, we can NOT use a consumer with a mismatch between its sigAlg and public key
|
JwtConsumerApiConfigTests_SigAlgPS384_trustAliasMisMatchES
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.security.jwt_fat.consumer/fat/src/com/ibm/ws/security/jwt/fat/consumer/JwtConsumerApiConfigTests.java",
"license": "epl-1.0",
"size": 109831
}
|
[
"com.gargoylesoftware.htmlunit.Page",
"com.ibm.ws.security.fat.common.expectations.Expectations",
"com.ibm.ws.security.jwt.fat.consumer.utils.JwtConsumerMessageConstants"
] |
import com.gargoylesoftware.htmlunit.Page; import com.ibm.ws.security.fat.common.expectations.Expectations; import com.ibm.ws.security.jwt.fat.consumer.utils.JwtConsumerMessageConstants;
|
import com.gargoylesoftware.htmlunit.*; import com.ibm.ws.security.fat.common.expectations.*; import com.ibm.ws.security.jwt.fat.consumer.utils.*;
|
[
"com.gargoylesoftware.htmlunit",
"com.ibm.ws"
] |
com.gargoylesoftware.htmlunit; com.ibm.ws;
| 663,261
|
@ApiModelProperty(example = "null", required = true, value = "Width of the watermark (in Pts, 1/72 of an inch)")
public String getWidth() {
return width;
}
|
@ApiModelProperty(example = "null", required = true, value = STR) String function() { return width; }
|
/**
* Width of the watermark (in Pts, 1/72 of an inch)
* @return width
**/
|
Width of the watermark (in Pts, 1/72 of an inch)
|
getWidth
|
{
"repo_name": "Muhimbi/PDF-Converter-Services-Online",
"path": "clients/v1/java/client/src/main/java/com/muhimbi/online/client/model/RectangleWatermarkData.java",
"license": "apache-2.0",
"size": 20701
}
|
[
"io.swagger.annotations.ApiModelProperty"
] |
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.*;
|
[
"io.swagger.annotations"
] |
io.swagger.annotations;
| 2,460,506
|
ImmutableMap<Action, SkyframeActionExecutor.ConflictException> findArtifactConflicts()
throws InterruptedException {
if (skyframeBuildView.isSomeConfiguredTargetEvaluated()
|| skyframeBuildView.isSomeConfiguredTargetInvalidated()) {
// This operation is somewhat expensive, so we only do it if the graph might have changed in
// some way -- either we analyzed a new target or we invalidated an old one.
try (AutoProfiler p = AutoProfiler.logged("discovering artifact conflicts", LOG)) {
skyframeActionExecutor.findAndStoreArtifactConflicts(getActionLookupValues());
skyframeBuildView.resetEvaluatedConfiguredTargetFlag();
// The invalidated configured targets flag will be reset later in the evaluate() call.
}
}
return skyframeActionExecutor.badActions();
}
|
ImmutableMap<Action, SkyframeActionExecutor.ConflictException> findArtifactConflicts() throws InterruptedException { if (skyframeBuildView.isSomeConfiguredTargetEvaluated() skyframeBuildView.isSomeConfiguredTargetInvalidated()) { try (AutoProfiler p = AutoProfiler.logged(STR, LOG)) { skyframeActionExecutor.findAndStoreArtifactConflicts(getActionLookupValues()); skyframeBuildView.resetEvaluatedConfiguredTargetFlag(); } } return skyframeActionExecutor.badActions(); }
|
/**
* Checks the actions in Skyframe for conflicts between their output artifacts. Delegates to
* {@link SkyframeActionExecutor#findAndStoreArtifactConflicts} to do the work, since any
* conflicts found will only be reported during execution.
*/
|
Checks the actions in Skyframe for conflicts between their output artifacts. Delegates to <code>SkyframeActionExecutor#findAndStoreArtifactConflicts</code> to do the work, since any conflicts found will only be reported during execution
|
findArtifactConflicts
|
{
"repo_name": "joshua0pang/bazel",
"path": "src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java",
"license": "apache-2.0",
"size": 76718
}
|
[
"com.google.common.collect.ImmutableMap",
"com.google.devtools.build.lib.actions.Action",
"com.google.devtools.build.lib.profiler.AutoProfiler"
] |
import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.profiler.AutoProfiler;
|
import com.google.common.collect.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.profiler.*;
|
[
"com.google.common",
"com.google.devtools"
] |
com.google.common; com.google.devtools;
| 1,966,606
|
private static ImmutableSetMultimap<String, PBXTarget> mapFromSchemeToPBXProject(
ImmutableSetMultimap<String, ? extends TargetNode<?>> schemeToTargetNodes,
ImmutableMap<BuildTarget, PBXTarget> buildTargetToPBXTarget) {
ImmutableSetMultimap<String, PBXTarget> schemeToPBXProject =
ImmutableSetMultimap.copyOf(
schemeToTargetNodes.entries().stream()
.map(
stringTargetNodeEntry ->
Maps.immutableEntry(
stringTargetNodeEntry.getKey(),
buildTargetToPBXTarget.get(
stringTargetNodeEntry.getValue().getBuildTarget())))
.filter(
stringPBXTargetEntry -> {
return stringPBXTargetEntry.getValue() != null;
})
.collect(Collectors.toList()));
return schemeToPBXProject;
}
|
static ImmutableSetMultimap<String, PBXTarget> function( ImmutableSetMultimap<String, ? extends TargetNode<?>> schemeToTargetNodes, ImmutableMap<BuildTarget, PBXTarget> buildTargetToPBXTarget) { ImmutableSetMultimap<String, PBXTarget> schemeToPBXProject = ImmutableSetMultimap.copyOf( schemeToTargetNodes.entries().stream() .map( stringTargetNodeEntry -> Maps.immutableEntry( stringTargetNodeEntry.getKey(), buildTargetToPBXTarget.get( stringTargetNodeEntry.getValue().getBuildTarget()))) .filter( stringPBXTargetEntry -> { return stringPBXTargetEntry.getValue() != null; }) .collect(Collectors.toList())); return schemeToPBXProject; }
|
/**
* Transform a map from scheme name to `TargetNode` to scheme name to the associated `PBXProject`.
*
* @param schemeToTargetNodes Map to transform.
* @return Map of scheme name to associated `PXBProject`s.
*/
|
Transform a map from scheme name to `TargetNode` to scheme name to the associated `PBXProject`
|
mapFromSchemeToPBXProject
|
{
"repo_name": "nguyentruongtho/buck",
"path": "src/com/facebook/buck/features/apple/project/WorkspaceAndProjectGenerator.java",
"license": "apache-2.0",
"size": 57021
}
|
[
"com.facebook.buck.apple.xcode.xcodeproj.PBXTarget",
"com.facebook.buck.core.model.BuildTarget",
"com.facebook.buck.core.model.targetgraph.TargetNode",
"com.google.common.collect.ImmutableMap",
"com.google.common.collect.ImmutableSetMultimap",
"com.google.common.collect.Maps",
"java.util.stream.Collectors"
] |
import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.targetgraph.TargetNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Maps; import java.util.stream.Collectors;
|
import com.facebook.buck.apple.xcode.xcodeproj.*; import com.facebook.buck.core.model.*; import com.facebook.buck.core.model.targetgraph.*; import com.google.common.collect.*; import java.util.stream.*;
|
[
"com.facebook.buck",
"com.google.common",
"java.util"
] |
com.facebook.buck; com.google.common; java.util;
| 2,565,008
|
public WikiStyleRenderer getWikiStyleRenderer() {
if (wikiStyleRenderer != null) {
return wikiStyleRenderer;
}
wikiStyleRenderer = (WikiStyleRenderer) ContainerManager.getComponent("wikiStyleRenderer");
return wikiStyleRenderer;
}
|
WikiStyleRenderer function() { if (wikiStyleRenderer != null) { return wikiStyleRenderer; } wikiStyleRenderer = (WikiStyleRenderer) ContainerManager.getComponent(STR); return wikiStyleRenderer; }
|
/**
* <p>Getter for the field <code>wikiStyleRenderer</code>.</p>
*
* @return a {@link com.atlassian.renderer.WikiStyleRenderer} object.
*/
|
Getter for the field <code>wikiStyleRenderer</code>
|
getWikiStyleRenderer
|
{
"repo_name": "strator-dev/greenpepper-open",
"path": "confluence/greenpepper-confluence-code/src/main/java/com/greenpepper/confluence/velocity/ConfluenceGreenPepper.java",
"license": "apache-2.0",
"size": 48805
}
|
[
"com.atlassian.renderer.WikiStyleRenderer",
"com.atlassian.spring.container.ContainerManager"
] |
import com.atlassian.renderer.WikiStyleRenderer; import com.atlassian.spring.container.ContainerManager;
|
import com.atlassian.renderer.*; import com.atlassian.spring.container.*;
|
[
"com.atlassian.renderer",
"com.atlassian.spring"
] |
com.atlassian.renderer; com.atlassian.spring;
| 2,247,656
|
public Set<T> resolve(Matcher<T> matcher) throws IOException {
Set<T> classes = new HashSet<T>();
for (URL url : paths) {
if (url.toString().endsWith(".jar")) {
// System.out.println(url);
JarFile jarFile = new JarFile(URLDecoder.decode(url.getPath(), "UTF-8"));
Manifest manifest = jarFile.getManifest();
if (manifest != null) {
// System.out.println(manifest);
Attributes mainAttributes = manifest.getMainAttributes();
if (mainAttributes != null) {
// System.out.println(mainAttributes);
String classpath = mainAttributes.getValue(Name.CLASS_PATH);
if (classpath != null) {
// System.out.println(classpath);
StringTokenizer stringTokenizer = new StringTokenizer(classpath);
while (stringTokenizer.hasMoreTokens()) {
String token = stringTokenizer.nextToken();
URL entry = new URL(url, token);
if (entry.toString().endsWith("/")) {
// System.out.println(entry);
classes.addAll(getMatchedClasses(matcher, new File(URLDecoder.decode(entry.getPath(), "UTF-8"))));
} else {
// System.out.println(entry);
classes.addAll(getMatchedClasses(matcher, new JarFile(URLDecoder.decode(entry.getPath(), "UTF-8"))));
}
}
}
}
}
classes.addAll(getMatchedClasses(matcher, jarFile));
} else {
File base = new File(URLDecoder.decode(url.getPath(), "UTF-8"));
classes.addAll(getMatchedClasses(matcher, base));
}
}
return classes;
}
|
Set<T> function(Matcher<T> matcher) throws IOException { Set<T> classes = new HashSet<T>(); for (URL url : paths) { if (url.toString().endsWith(".jar")) { JarFile jarFile = new JarFile(URLDecoder.decode(url.getPath(), "UTF-8")); Manifest manifest = jarFile.getManifest(); if (manifest != null) { Attributes mainAttributes = manifest.getMainAttributes(); if (mainAttributes != null) { String classpath = mainAttributes.getValue(Name.CLASS_PATH); if (classpath != null) { StringTokenizer stringTokenizer = new StringTokenizer(classpath); while (stringTokenizer.hasMoreTokens()) { String token = stringTokenizer.nextToken(); URL entry = new URL(url, token); if (entry.toString().endsWith("/")) { classes.addAll(getMatchedClasses(matcher, new File(URLDecoder.decode(entry.getPath(), "UTF-8")))); } else { classes.addAll(getMatchedClasses(matcher, new JarFile(URLDecoder.decode(entry.getPath(), "UTF-8")))); } } } } } classes.addAll(getMatchedClasses(matcher, jarFile)); } else { File base = new File(URLDecoder.decode(url.getPath(), "UTF-8")); classes.addAll(getMatchedClasses(matcher, base)); } } return classes; }
|
/**
* Resolves the {@code Class}es that matches to the specified {@code Matcher}.
*
* @param matcher {@code Matcher}.
* @return {@code Class}es that matches to the specified {@code Matcher}.
* @throws IOException If any I/O access fails while traversing the search
* path.
*/
|
Resolves the Classes that matches to the specified Matcher
|
resolve
|
{
"repo_name": "eiichiro/reverb",
"path": "src/main/java/org/eiichiro/reverb/lang/ClassResolver.java",
"license": "apache-2.0",
"size": 8877
}
|
[
"java.io.File",
"java.io.IOException",
"java.net.URLDecoder",
"java.util.HashSet",
"java.util.Set",
"java.util.StringTokenizer",
"java.util.jar.Attributes",
"java.util.jar.JarFile",
"java.util.jar.Manifest"
] |
import java.io.File; import java.io.IOException; import java.net.URLDecoder; import java.util.HashSet; import java.util.Set; import java.util.StringTokenizer; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest;
|
import java.io.*; import java.net.*; import java.util.*; import java.util.jar.*;
|
[
"java.io",
"java.net",
"java.util"
] |
java.io; java.net; java.util;
| 1,196,491
|
private static void addClasspathEntries(final Object owner, final ClassLoader classLoader,
final ClasspathOrder classpathOrderOut, final ScanSpec scanSpec, final LogNode log) {
// type ClasspathEntry[]
final Object entries = ReflectionUtils.getFieldVal(owner, "entries", false);
if (entries != null) {
for (int i = 0, n = Array.getLength(entries); i < n; i++) {
// type ClasspathEntry
final Object entry = Array.get(entries, i);
// type BundleFile
final Object bundlefile = ReflectionUtils.getFieldVal(entry, "bundlefile", false);
addBundleFile(bundlefile, new HashSet<>(), classLoader, classpathOrderOut, scanSpec, log);
}
}
}
|
static void function(final Object owner, final ClassLoader classLoader, final ClasspathOrder classpathOrderOut, final ScanSpec scanSpec, final LogNode log) { final Object entries = ReflectionUtils.getFieldVal(owner, STR, false); if (entries != null) { for (int i = 0, n = Array.getLength(entries); i < n; i++) { final Object entry = Array.get(entries, i); final Object bundlefile = ReflectionUtils.getFieldVal(entry, STR, false); addBundleFile(bundlefile, new HashSet<>(), classLoader, classpathOrderOut, scanSpec, log); } } }
|
/**
* Adds the classpath entries.
*
* @param owner
* the owner
* @param classLoader
* the class loader
* @param classpathOrderOut
* the classpath order out
* @param scanSpec
* the scan spec
* @param log
* the log
*/
|
Adds the classpath entries
|
addClasspathEntries
|
{
"repo_name": "lukehutch/fast-classpath-scanner",
"path": "src/main/java/nonapi/io/github/classgraph/classloaderhandler/EquinoxClassLoaderHandler.java",
"license": "mit",
"size": 11288
}
|
[
"java.lang.reflect.Array",
"java.util.HashSet"
] |
import java.lang.reflect.Array; import java.util.HashSet;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 1,186,364
|
protected void playerWantsToGiveItems() {
final ConversationStates[] states = new ConversationStates[] {ConversationStates.ATTENDING, ConversationStates.QUESTION_1};
concreteQuest.getNPC().add(states,
ConversationPhrases.YES_MESSAGES,
new QuestActiveCondition(concreteQuest.getSlotName()),
ConversationStates.QUESTION_1, concreteQuest.askForItemsAfterPlayerSaidHeHasItems(),
null);
}
|
void function() { final ConversationStates[] states = new ConversationStates[] {ConversationStates.ATTENDING, ConversationStates.QUESTION_1}; concreteQuest.getNPC().add(states, ConversationPhrases.YES_MESSAGES, new QuestActiveCondition(concreteQuest.getSlotName()), ConversationStates.QUESTION_1, concreteQuest.askForItemsAfterPlayerSaidHeHasItems(), null); }
|
/**
* Player says he has a required item with him.
*/
|
Player says he has a required item with him
|
playerWantsToGiveItems
|
{
"repo_name": "sourceress-project/archestica",
"path": "src/games/stendhal/server/maps/quests/logic/BringListOfItemsQuestLogic.java",
"license": "gpl-2.0",
"size": 12617
}
|
[
"games.stendhal.server.entity.npc.ConversationPhrases",
"games.stendhal.server.entity.npc.ConversationStates",
"games.stendhal.server.entity.npc.condition.QuestActiveCondition"
] |
import games.stendhal.server.entity.npc.ConversationPhrases; import games.stendhal.server.entity.npc.ConversationStates; import games.stendhal.server.entity.npc.condition.QuestActiveCondition;
|
import games.stendhal.server.entity.npc.*; import games.stendhal.server.entity.npc.condition.*;
|
[
"games.stendhal.server"
] |
games.stendhal.server;
| 2,516,422
|
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
try {
processRequest(request, response);
} catch (CerberusException ex) {
LOG.warn(ex);
}
}
|
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { processRequest(request, response); } catch (CerberusException ex) { LOG.warn(ex); } }
|
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
|
Handles the HTTP <code>POST</code> method
|
doPost
|
{
"repo_name": "vertigo17/Cerberus",
"path": "source/src/main/java/org/cerberus/servlet/crud/countryenvironment/ReadCountryEnvDeployType.java",
"license": "gpl-3.0",
"size": 9127
}
|
[
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"org.cerberus.exception.CerberusException"
] |
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.cerberus.exception.CerberusException;
|
import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.cerberus.exception.*;
|
[
"java.io",
"javax.servlet",
"org.cerberus.exception"
] |
java.io; javax.servlet; org.cerberus.exception;
| 121,279
|
public String userReference(String id)
{
// clean up the id
id = cleanId(id);
return getAccessPoint(true) + Entity.SEPARATOR + ((id == null) ? "" : id);
}
|
String function(String id) { id = cleanId(id); return getAccessPoint(true) + Entity.SEPARATOR + ((id == null) ? "" : id); }
|
/**
* Access the internal reference which can be used to access the resource from within the system.
*
* @param id
* The user id string.
* @return The the internal reference which can be used to access the resource from within the system.
*/
|
Access the internal reference which can be used to access the resource from within the system
|
userReference
|
{
"repo_name": "OpenCollabZA/sakai",
"path": "kernel/kernel-impl/src/main/java/org/sakaiproject/user/impl/BaseUserDirectoryService.java",
"license": "apache-2.0",
"size": 83614
}
|
[
"org.sakaiproject.entity.api.Entity"
] |
import org.sakaiproject.entity.api.Entity;
|
import org.sakaiproject.entity.api.*;
|
[
"org.sakaiproject.entity"
] |
org.sakaiproject.entity;
| 2,431,498
|
@Override
protected Collection<? extends ProcessModel> extractModelsFromDirectory(ModelDirectory directory) {
ProcessModel scenario = wb.getSelectedModel();
if (!(scenario instanceof PCMScenario)) {
System.err.println("Model has to be a PCM Scenario");
return new HashSet<>();
}
Collection<PCMFragment> fragments = new HashSet<>();
for (ProcessNode fragment : scenario.getNodesByClass(PCMFragmentNode.class)) {
fragmentIDs.add(fragment.getProperty(PCMFragmentNode.PROP_FRAGMENT_MID));
}
for (ModelDirectoryEntry modelDirectoryEntry : directory.getEntries()) {
if (modelDirectoryEntry instanceof ModelDirectory) {
fragments.addAll((Collection<? extends PCMFragment>)
extractModelsFromDirectory((ModelDirectory) modelDirectoryEntry));
} else if (modelDirectoryEntry instanceof ModelDescription) {
try {
ProcessModel model = ((ModelDescription) modelDirectoryEntry).getHead().getProcessModel();
if (model instanceof PCMFragment && fragmentIDs.contains(model.getId())) {
fragments.add((PCMFragment) model);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
return fragments;
}
|
Collection<? extends ProcessModel> function(ModelDirectory directory) { ProcessModel scenario = wb.getSelectedModel(); if (!(scenario instanceof PCMScenario)) { System.err.println(STR); return new HashSet<>(); } Collection<PCMFragment> fragments = new HashSet<>(); for (ProcessNode fragment : scenario.getNodesByClass(PCMFragmentNode.class)) { fragmentIDs.add(fragment.getProperty(PCMFragmentNode.PROP_FRAGMENT_MID)); } for (ModelDirectoryEntry modelDirectoryEntry : directory.getEntries()) { if (modelDirectoryEntry instanceof ModelDirectory) { fragments.addAll((Collection<? extends PCMFragment>) extractModelsFromDirectory((ModelDirectory) modelDirectoryEntry)); } else if (modelDirectoryEntry instanceof ModelDescription) { try { ProcessModel model = ((ModelDescription) modelDirectoryEntry).getHead().getProcessModel(); if (model instanceof PCMFragment && fragmentIDs.contains(model.getId())) { fragments.add((PCMFragment) model); } } catch (Exception e) { e.printStackTrace(); } } } return fragments; }
|
/**
* Extracts all PCMFragments from the remote repository whichs id is
* a inside the fragmentIDs collection.
* @param directory The directory to check.
* @return The List of ProcessModels which has been extracted.
*/
|
Extracts all PCMFragments from the remote repository whichs id is a inside the fragmentIDs collection
|
extractModelsFromDirectory
|
{
"repo_name": "bptlab/processeditor",
"path": "src/de/uni_potsdam/hpi/bpt/bp2014/jeditor/plugins/pcm/generation/GenerateOLCsFromScenario.java",
"license": "apache-2.0",
"size": 9798
}
|
[
"com.inubit.research.client.ModelDescription",
"com.inubit.research.client.ModelDirectory",
"com.inubit.research.client.ModelDirectoryEntry",
"de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMFragment",
"de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMFragmentNode",
"de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMScenario",
"java.util.Collection",
"java.util.HashSet",
"net.frapu.code.visualization.ProcessModel",
"net.frapu.code.visualization.ProcessNode"
] |
import com.inubit.research.client.ModelDescription; import com.inubit.research.client.ModelDirectory; import com.inubit.research.client.ModelDirectoryEntry; import de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMFragment; import de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMFragmentNode; import de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMScenario; import java.util.Collection; import java.util.HashSet; import net.frapu.code.visualization.ProcessModel; import net.frapu.code.visualization.ProcessNode;
|
import com.inubit.research.client.*; import de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.*; import java.util.*; import net.frapu.code.visualization.*;
|
[
"com.inubit.research",
"de.uni_potsdam.hpi",
"java.util",
"net.frapu.code"
] |
com.inubit.research; de.uni_potsdam.hpi; java.util; net.frapu.code;
| 92,736
|
@SuppressWarnings("unchecked")
public static <T extends Number> T convertNumberToTargetClass(Number number, Class<T> targetClass)
throws IllegalArgumentException {
Assert.notNull(number, "Number must not be null");
Assert.notNull(targetClass, "Target class must not be null");
if (targetClass.isInstance(number)) {
return (T) number;
}
else if (targetClass.equals(Byte.class)) {
long value = number.longValue();
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Byte(number.byteValue());
}
else if (targetClass.equals(Short.class)) {
long value = number.longValue();
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Short(number.shortValue());
}
else if (targetClass.equals(Integer.class)) {
long value = number.longValue();
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Integer(number.intValue());
}
else if (targetClass.equals(Long.class)) {
return (T) new Long(number.longValue());
}
else if (targetClass.equals(BigInteger.class)) {
if (number instanceof BigDecimal) {
// do not lose precision - use BigDecimal's own conversion
return (T) ((BigDecimal) number).toBigInteger();
}
else {
// original value is not a Big* number - use standard long conversion
return (T) BigInteger.valueOf(number.longValue());
}
}
else if (targetClass.equals(Float.class)) {
return (T) new Float(number.floatValue());
}
else if (targetClass.equals(Double.class)) {
return (T) new Double(number.doubleValue());
}
else if (targetClass.equals(BigDecimal.class)) {
// always use BigDecimal(String) here to avoid unpredictability of BigDecimal(double)
// (see BigDecimal javadoc for details)
return (T) new BigDecimal(number.toString());
}
else {
throw new IllegalArgumentException("Could not convert number [" + number + "] of type [" +
number.getClass().getName() + "] to unknown target class [" + targetClass.getName() + "]");
}
}
|
@SuppressWarnings(STR) static <T extends Number> T function(Number number, Class<T> targetClass) throws IllegalArgumentException { Assert.notNull(number, STR); Assert.notNull(targetClass, STR); if (targetClass.isInstance(number)) { return (T) number; } else if (targetClass.equals(Byte.class)) { long value = number.longValue(); if (value < Byte.MIN_VALUE value > Byte.MAX_VALUE) { raiseOverflowException(number, targetClass); } return (T) new Byte(number.byteValue()); } else if (targetClass.equals(Short.class)) { long value = number.longValue(); if (value < Short.MIN_VALUE value > Short.MAX_VALUE) { raiseOverflowException(number, targetClass); } return (T) new Short(number.shortValue()); } else if (targetClass.equals(Integer.class)) { long value = number.longValue(); if (value < Integer.MIN_VALUE value > Integer.MAX_VALUE) { raiseOverflowException(number, targetClass); } return (T) new Integer(number.intValue()); } else if (targetClass.equals(Long.class)) { return (T) new Long(number.longValue()); } else if (targetClass.equals(BigInteger.class)) { if (number instanceof BigDecimal) { return (T) ((BigDecimal) number).toBigInteger(); } else { return (T) BigInteger.valueOf(number.longValue()); } } else if (targetClass.equals(Float.class)) { return (T) new Float(number.floatValue()); } else if (targetClass.equals(Double.class)) { return (T) new Double(number.doubleValue()); } else if (targetClass.equals(BigDecimal.class)) { return (T) new BigDecimal(number.toString()); } else { throw new IllegalArgumentException(STR + number + STR + number.getClass().getName() + STR + targetClass.getName() + "]"); } }
|
/**
* Convert the given number into an instance of the given target class.
* @param number the number to convert
* @param targetClass the target class to convert to
* @return the converted number
* @throws IllegalArgumentException if the target class is not supported
* (i.e. not a standard Number subclass as included in the JDK)
* @see Byte
* @see Short
* @see Integer
* @see Long
* @see BigInteger
* @see Float
* @see Double
* @see BigDecimal
*/
|
Convert the given number into an instance of the given target class
|
convertNumberToTargetClass
|
{
"repo_name": "aoenang/seny-devpkg",
"path": "devpkg-android/src/main/java/org/springframework/util/NumberUtils.java",
"license": "apache-2.0",
"size": 9430
}
|
[
"java.math.BigDecimal",
"java.math.BigInteger"
] |
import java.math.BigDecimal; import java.math.BigInteger;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 1,040,573
|
protected void addValuePropertyDescriptor(Object object)
{
itemPropertyDescriptors.add(createItemPropertyDescriptor(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(),
getString("_UI_EclipseIniTask_value_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_EclipseIniTask_value_feature", "_UI_EclipseIniTask_type"),
SetupPackage.Literals.ECLIPSE_INI_TASK__VALUE, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null));
}
|
void function(Object object) { itemPropertyDescriptors.add(createItemPropertyDescriptor(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), SetupPackage.Literals.ECLIPSE_INI_TASK__VALUE, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
|
/**
* This adds a property descriptor for the Value feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds a property descriptor for the Value feature.
|
addValuePropertyDescriptor
|
{
"repo_name": "peterkir/org.eclipse.oomph",
"path": "plugins/org.eclipse.oomph.setup.edit/src/org/eclipse/oomph/setup/provider/EclipseIniTaskItemProvider.java",
"license": "epl-1.0",
"size": 6069
}
|
[
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor",
"org.eclipse.oomph.setup.SetupPackage"
] |
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.oomph.setup.SetupPackage;
|
import org.eclipse.emf.edit.provider.*; import org.eclipse.oomph.setup.*;
|
[
"org.eclipse.emf",
"org.eclipse.oomph"
] |
org.eclipse.emf; org.eclipse.oomph;
| 1,202,010
|
public void setProfileService(ProfilesManagerRemoteServiceAsync profilesManagerServiceRemote)
{
this.profilesService = profilesManagerServiceRemote;
}
|
void function(ProfilesManagerRemoteServiceAsync profilesManagerServiceRemote) { this.profilesService = profilesManagerServiceRemote; }
|
/**
* Sets the profile service.
*
* @param profilesManagerServiceRemote
* the new profile service
*/
|
Sets the profile service
|
setProfileService
|
{
"repo_name": "andrefilo/geofence",
"path": "src/gui/core/plugin/userui/src/main/java/org/geoserver/geofence/gui/client/widget/EditRuleWidget.java",
"license": "gpl-2.0",
"size": 72949
}
|
[
"org.geoserver.geofence.gui.client.service.ProfilesManagerRemoteServiceAsync"
] |
import org.geoserver.geofence.gui.client.service.ProfilesManagerRemoteServiceAsync;
|
import org.geoserver.geofence.gui.client.service.*;
|
[
"org.geoserver.geofence"
] |
org.geoserver.geofence;
| 1,784,066
|
public void showTargetFeedback(Request request) {
if (REQ_CONNECTION_START.equals(request.getType())
|| REQ_CONNECTION_END.equals(request.getType())
|| REQ_RECONNECT_SOURCE.equals(request.getType())
|| REQ_RECONNECT_TARGET.equals(request.getType()))
showTargetConnectionFeedback((DropRequest) request);
}
|
void function(Request request) { if (REQ_CONNECTION_START.equals(request.getType()) REQ_CONNECTION_END.equals(request.getType()) REQ_RECONNECT_SOURCE.equals(request.getType()) REQ_RECONNECT_TARGET.equals(request.getType())) showTargetConnectionFeedback((DropRequest) request); }
|
/**
* Calls {@link #showTargetConnectionFeedback(DropRequest)} when
* appropriate.
*
* @see org.eclipse.gef.EditPolicy#showTargetFeedback(Request)
*/
|
Calls <code>#showTargetConnectionFeedback(DropRequest)</code> when appropriate
|
showTargetFeedback
|
{
"repo_name": "opensagres/xdocreport.eclipse",
"path": "rap/org.eclipse.gef/src/org/eclipse/gef/editpolicies/GraphicalNodeEditPolicy.java",
"license": "lgpl-2.1",
"size": 10380
}
|
[
"org.eclipse.gef.Request",
"org.eclipse.gef.requests.DropRequest"
] |
import org.eclipse.gef.Request; import org.eclipse.gef.requests.DropRequest;
|
import org.eclipse.gef.*; import org.eclipse.gef.requests.*;
|
[
"org.eclipse.gef"
] |
org.eclipse.gef;
| 1,837,108
|
public void setETag(String eTag) {
if (eTag != null) {
Assert.isTrue(eTag.startsWith("\"") || eTag.startsWith("W/"),
"Invalid eTag, does not start with W/ or \"");
Assert.isTrue(eTag.endsWith("\""), "Invalid eTag, does not end with \"");
}
set(ETAG, eTag);
}
|
void function(String eTag) { if (eTag != null) { Assert.isTrue(eTag.startsWith("\"STRW/STRInvalid eTag, does not start with W/ or \STR\"STRInvalid eTag, does not end with \""); } set(ETAG, eTag); }
|
/**
* Set the (new) entity tag of the body, as specified by the {@code ETag} header.
*/
|
Set the (new) entity tag of the body, as specified by the ETag header
|
setETag
|
{
"repo_name": "spring-projects/spring-android",
"path": "spring-android-rest-template/src/main/java/org/springframework/http/HttpHeaders.java",
"license": "apache-2.0",
"size": 34317
}
|
[
"org.springframework.util.Assert"
] |
import org.springframework.util.Assert;
|
import org.springframework.util.*;
|
[
"org.springframework.util"
] |
org.springframework.util;
| 1,278,663
|
private void applyPropertyPattern(RuleCharacterIterator chars,
StringBuffer rebuiltPat, SymbolTable symbols) {
String patStr = chars.lookahead();
ParsePosition pos = new ParsePosition(0);
applyPropertyPattern(patStr, pos, symbols);
if (pos.getIndex() == 0) {
syntaxError(chars, "Invalid property pattern");
}
chars.jumpahead(pos.getIndex());
rebuiltPat.append(patStr.substring(0, pos.getIndex()));
}
//----------------------------------------------------------------
// Case folding API
//----------------------------------------------------------------
public static final int IGNORE_SPACE = 1;
|
void function(RuleCharacterIterator chars, StringBuffer rebuiltPat, SymbolTable symbols) { String patStr = chars.lookahead(); ParsePosition pos = new ParsePosition(0); applyPropertyPattern(patStr, pos, symbols); if (pos.getIndex() == 0) { syntaxError(chars, STR); } chars.jumpahead(pos.getIndex()); rebuiltPat.append(patStr.substring(0, pos.getIndex())); } public static final int IGNORE_SPACE = 1;
|
/**
* Parse a property pattern.
* @param chars iterator over the pattern characters. Upon return
* it will be advanced to the first character after the parsed
* pattern, or the end of the iteration if all characters are
* parsed.
* @param rebuiltPat the pattern that was parsed, rebuilt or
* copied from the input pattern, as appropriate.
* @param symbols TODO
*/
|
Parse a property pattern
|
applyPropertyPattern
|
{
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/jdk/src/share/classes/sun/text/normalizer/UnicodeSet.java",
"license": "gpl-2.0",
"size": 70393
}
|
[
"java.text.ParsePosition"
] |
import java.text.ParsePosition;
|
import java.text.*;
|
[
"java.text"
] |
java.text;
| 500,225
|
interface WithDnsConfig {
WithCreate withDnsServerNames(List<String> dnsServerNames);
|
interface WithDnsConfig { WithCreate withDnsServerNames(List<String> dnsServerNames);
|
/**
* Specifies the DNS servers for the container group.
*
* @param dnsServerNames the names of the DNS servers
* @return the next stage of the definition
*/
|
Specifies the DNS servers for the container group
|
withDnsServerNames
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-containerinstance/src/main/java/com/azure/resourcemanager/containerinstance/models/ContainerGroup.java",
"license": "mit",
"size": 53775
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 553,501
|
String asEndpointUri(String scheme, Map<String, String> properties, boolean encode) throws URISyntaxException;
|
String asEndpointUri(String scheme, Map<String, String> properties, boolean encode) throws URISyntaxException;
|
/**
* Creates an endpoint uri in Java style from the information from the properties
*
* @param scheme the endpoint schema
* @param properties the properties as key value pairs
* @param encode whether to URL encode the returned uri or not
* @return the constructed endpoint uri
* @throws java.net.URISyntaxException is thrown if there is encoding error
*/
|
Creates an endpoint uri in Java style from the information from the properties
|
asEndpointUri
|
{
"repo_name": "isavin/camel",
"path": "camel-core/src/main/java/org/apache/camel/runtimecatalog/RuntimeCamelCatalog.java",
"license": "apache-2.0",
"size": 9590
}
|
[
"java.net.URISyntaxException",
"java.util.Map"
] |
import java.net.URISyntaxException; import java.util.Map;
|
import java.net.*; import java.util.*;
|
[
"java.net",
"java.util"
] |
java.net; java.util;
| 18,485
|
private UpdateResult updateOne(Bson filter, Bson update) {
UpdateResult result = db.getCollection(MongoDB.COLLECTION_NAME).updateOne(filter, update);
logResultCount(result.getModifiedCount());
return result;
}
|
UpdateResult function(Bson filter, Bson update) { UpdateResult result = db.getCollection(MongoDB.COLLECTION_NAME).updateOne(filter, update); logResultCount(result.getModifiedCount()); return result; }
|
/**
* Update the first found element
* @param filter
* @param update
* @return
*/
|
Update the first found element
|
updateOne
|
{
"repo_name": "tiefenauer/mongodb-java-examples",
"path": "src/main/java/info/tiefenauer/mongodb/operations/MongoUpdate.java",
"license": "mit",
"size": 2237
}
|
[
"com.mongodb.client.result.UpdateResult",
"info.tiefenauer.mongodb.MongoDB",
"org.bson.conversions.Bson"
] |
import com.mongodb.client.result.UpdateResult; import info.tiefenauer.mongodb.MongoDB; import org.bson.conversions.Bson;
|
import com.mongodb.client.result.*; import info.tiefenauer.mongodb.*; import org.bson.conversions.*;
|
[
"com.mongodb.client",
"info.tiefenauer.mongodb",
"org.bson.conversions"
] |
com.mongodb.client; info.tiefenauer.mongodb; org.bson.conversions;
| 2,198,251
|
public void cancelQueries(Collection<Long> queries) {
if (moduleEnabled())
idx.cancelQueries(queries);
}
|
void function(Collection<Long> queries) { if (moduleEnabled()) idx.cancelQueries(queries); }
|
/**
* Cancel specified queries.
*
* @param queries Queries ID's to cancel.
*/
|
Cancel specified queries
|
cancelQueries
|
{
"repo_name": "nivanov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryProcessor.java",
"license": "apache-2.0",
"size": 29178
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,732,145
|
public CountDownLatch getFacetCategoryListAsync(Integer categoryId, Boolean includeAvailable, Boolean validate, String responseFields, AsyncCallback<com.mozu.api.contracts.productadmin.FacetSet> callback) throws Exception
{
MozuClient<com.mozu.api.contracts.productadmin.FacetSet> client = com.mozu.api.clients.commerce.catalog.admin.FacetClient.getFacetCategoryListClient( categoryId, includeAvailable, validate, responseFields);
client.setContext(_apiContext);
return client.executeRequest(callback);
}
|
CountDownLatch function(Integer categoryId, Boolean includeAvailable, Boolean validate, String responseFields, AsyncCallback<com.mozu.api.contracts.productadmin.FacetSet> callback) throws Exception { MozuClient<com.mozu.api.contracts.productadmin.FacetSet> client = com.mozu.api.clients.commerce.catalog.admin.FacetClient.getFacetCategoryListClient( categoryId, includeAvailable, validate, responseFields); client.setContext(_apiContext); return client.executeRequest(callback); }
|
/**
*
* <p><pre><code>
* Facet facet = new Facet();
* CountDownLatch latch = facet.getFacetCategoryList( categoryId, includeAvailable, validate, responseFields, callback );
* latch.await() * </code></pre></p>
* @param categoryId Unique identifier of the category to modify.
* @param includeAvailable If true, returns a list of the attributes and categories associated with a product type that have not been defined as a facet for the category.
* @param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. This parameter should only be used to retrieve data. Attempting to update data using this parameter may cause data loss.
* @param validate Validates that the product category associated with a facet is active. System-supplied and read only.
* @param callback callback handler for asynchronous operations
* @return com.mozu.api.contracts.productadmin.FacetSet
* @see com.mozu.api.contracts.productadmin.FacetSet
*/
|
<code><code> Facet facet = new Facet(); CountDownLatch latch = facet.getFacetCategoryList( categoryId, includeAvailable, validate, responseFields, callback ); latch.await() * </code></code>
|
getFacetCategoryListAsync
|
{
"repo_name": "Mozu/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/resources/commerce/catalog/admin/FacetResource.java",
"license": "mit",
"size": 16781
}
|
[
"com.mozu.api.AsyncCallback",
"com.mozu.api.MozuClient",
"java.util.concurrent.CountDownLatch"
] |
import com.mozu.api.AsyncCallback; import com.mozu.api.MozuClient; import java.util.concurrent.CountDownLatch;
|
import com.mozu.api.*; import java.util.concurrent.*;
|
[
"com.mozu.api",
"java.util"
] |
com.mozu.api; java.util;
| 477,934
|
public Set<Integer> getItemResourceIDs() {
return microInventory.getItemsStored();
}
|
Set<Integer> function() { return microInventory.getItemsStored(); }
|
/**
* Gets a list of all stored item resources
*
* @return a list of resource ids
*/
|
Gets a list of all stored item resources
|
getItemResourceIDs
|
{
"repo_name": "mars-sim/mars-sim",
"path": "mars-sim-core/src/main/java/org/mars_sim/msp/core/equipment/EVASuit.java",
"license": "gpl-3.0",
"size": 21551
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,542,636
|
public Collection<Node> getNodes() {
return this.nodes.values();
}
|
Collection<Node> function() { return this.nodes.values(); }
|
/**
* To obtain a collection of network nodes
*
* @return Collection of nodes in the network
*/
|
To obtain a collection of network nodes
|
getNodes
|
{
"repo_name": "ccascone/JNetMan",
"path": "src/jnetman/network/Network.java",
"license": "apache-2.0",
"size": 5971
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,837,426
|
private OAuthService createOAuthService() {
if (mService == null) {
mService = new ServiceBuilder()
.provider(OpenppOAuthApi.class)
.apiKey(mApiKey)
.apiSecret(mApiSecret)
.callback(getCallbackUri())
.build();
}
return mService;
}
|
OAuthService function() { if (mService == null) { mService = new ServiceBuilder() .provider(OpenppOAuthApi.class) .apiKey(mApiKey) .apiSecret(mApiSecret) .callback(getCallbackUri()) .build(); } return mService; }
|
/**
* Creates the OAuthService.
* @return
*/
|
Creates the OAuthService
|
createOAuthService
|
{
"repo_name": "webwarejp/openpp-android-sdk",
"path": "openpp/src/main/java/net/openpp/android/auth/OpenppAuthManager.java",
"license": "apache-2.0",
"size": 14491
}
|
[
"org.scribe.builder.ServiceBuilder",
"org.scribe.oauth.OAuthService"
] |
import org.scribe.builder.ServiceBuilder; import org.scribe.oauth.OAuthService;
|
import org.scribe.builder.*; import org.scribe.oauth.*;
|
[
"org.scribe.builder",
"org.scribe.oauth"
] |
org.scribe.builder; org.scribe.oauth;
| 1,869,738
|
public byte[] asBytes() {
List<Byte> bytes = new ArrayList<>();
bytes.add((byte) this.defaultMetric());
if (this.isDelayIsInternal()) {
bytes.add((byte) Integer.parseInt(value1));
} else {
bytes.add((byte) Integer.parseInt(value2));
}
if (this.isExpenseIsInternal()) {
bytes.add((byte) Integer.parseInt(value1));
} else {
bytes.add((byte) Integer.parseInt(value2));
}
if (this.isErrorIsInternal()) {
bytes.add((byte) Integer.parseInt(value1));
} else {
bytes.add((byte) Integer.parseInt(value2));
}
bytes.addAll(IsisUtil.sourceAndLanIdToBytes(this.neighborId()));
return Bytes.toArray(bytes);
}
|
byte[] function() { List<Byte> bytes = new ArrayList<>(); bytes.add((byte) this.defaultMetric()); if (this.isDelayIsInternal()) { bytes.add((byte) Integer.parseInt(value1)); } else { bytes.add((byte) Integer.parseInt(value2)); } if (this.isExpenseIsInternal()) { bytes.add((byte) Integer.parseInt(value1)); } else { bytes.add((byte) Integer.parseInt(value2)); } if (this.isErrorIsInternal()) { bytes.add((byte) Integer.parseInt(value1)); } else { bytes.add((byte) Integer.parseInt(value2)); } bytes.addAll(IsisUtil.sourceAndLanIdToBytes(this.neighborId())); return Bytes.toArray(bytes); }
|
/**
* Returns metric of reachability values as bytes of metric of reachability.
*
* @return byteArray metric of reachability values as bytes of metric of reachability
*/
|
Returns metric of reachability values as bytes of metric of reachability
|
asBytes
|
{
"repo_name": "donNewtonAlpha/onos",
"path": "protocols/isis/isisio/src/main/java/org/onosproject/isis/io/isispacket/tlv/MetricsOfReachability.java",
"license": "apache-2.0",
"size": 10727
}
|
[
"com.google.common.primitives.Bytes",
"java.util.ArrayList",
"java.util.List",
"org.onosproject.isis.io.util.IsisUtil"
] |
import com.google.common.primitives.Bytes; import java.util.ArrayList; import java.util.List; import org.onosproject.isis.io.util.IsisUtil;
|
import com.google.common.primitives.*; import java.util.*; import org.onosproject.isis.io.util.*;
|
[
"com.google.common",
"java.util",
"org.onosproject.isis"
] |
com.google.common; java.util; org.onosproject.isis;
| 991,360
|
public DatalogProgram createDatalog(OWLOntology onto) {
for (OWLAxiom axiom: onto.getAxioms()){
if(axiom.getAxiomType().equals(AxiomType.SWRL_RULE)){
SWRLRule rule =(SWRLRule) axiom;
rule.accept(this);
if (notSupported){
log.warn("Not Supported Translation of: "+ errors);
errors.clear();
}
}
}
DatalogProgram dp = fac.getDatalogProgram();
dp.appendRule(facts);
return dp;
}
|
DatalogProgram function(OWLOntology onto) { for (OWLAxiom axiom: onto.getAxioms()){ if(axiom.getAxiomType().equals(AxiomType.SWRL_RULE)){ SWRLRule rule =(SWRLRule) axiom; rule.accept(this); if (notSupported){ log.warn(STR+ errors); errors.clear(); } } } DatalogProgram dp = fac.getDatalogProgram(); dp.appendRule(facts); return dp; }
|
/**
* Translate the swrl_rules contained in the ontology
* Return a datalog program containing the supported datalog facts
* @param onto an OWLOntology
* @return DatalogProgram
*/
|
Translate the swrl_rules contained in the ontology Return a datalog program containing the supported datalog facts
|
createDatalog
|
{
"repo_name": "srapisarda/ontop",
"path": "obdalib-owlapi3/src/main/java/it/unibz/inf/ontop/owlapi/swrl/SWRLVisitor.java",
"license": "apache-2.0",
"size": 7567
}
|
[
"it.unibz.inf.ontop.model.DatalogProgram",
"org.semanticweb.owlapi.model.AxiomType",
"org.semanticweb.owlapi.model.OWLAxiom",
"org.semanticweb.owlapi.model.OWLOntology",
"org.semanticweb.owlapi.model.SWRLRule"
] |
import it.unibz.inf.ontop.model.DatalogProgram; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.SWRLRule;
|
import it.unibz.inf.ontop.model.*; import org.semanticweb.owlapi.model.*;
|
[
"it.unibz.inf",
"org.semanticweb.owlapi"
] |
it.unibz.inf; org.semanticweb.owlapi;
| 1,855,742
|
@Test
public void testConstants()
{
// Log levels
assertEquals(0, LibUsb.LOG_LEVEL_NONE);
assertEquals(1, LibUsb.LOG_LEVEL_ERROR);
assertEquals(2, LibUsb.LOG_LEVEL_WARNING);
assertEquals(3, LibUsb.LOG_LEVEL_INFO);
assertEquals(4, LibUsb.LOG_LEVEL_DEBUG);
// Speed codes
assertEquals(0, LibUsb.SPEED_UNKNOWN);
assertEquals(1, LibUsb.SPEED_LOW);
assertEquals(2, LibUsb.SPEED_FULL);
assertEquals(3, LibUsb.SPEED_HIGH);
assertEquals(4, LibUsb.SPEED_SUPER);
// Standard requests
assertEquals(0x00, LibUsb.REQUEST_GET_STATUS);
assertEquals(0x01, LibUsb.REQUEST_CLEAR_FEATURE);
assertEquals(0x03, LibUsb.REQUEST_SET_FEATURE);
assertEquals(0x05, LibUsb.REQUEST_SET_ADDRESS);
assertEquals(0x06, LibUsb.REQUEST_GET_DESCRIPTOR);
assertEquals(0x07, LibUsb.REQUEST_SET_DESCRIPTOR);
assertEquals(0x08, LibUsb.REQUEST_GET_CONFIGURATION);
assertEquals(0x09, LibUsb.REQUEST_SET_CONFIGURATION);
assertEquals(0x0A, LibUsb.REQUEST_GET_INTERFACE);
assertEquals(0x0B, LibUsb.REQUEST_SET_INTERFACE);
assertEquals(0x0C, LibUsb.REQUEST_SYNCH_FRAME);
assertEquals(0x30, LibUsb.REQUEST_SET_SEL);
assertEquals(0x31, LibUsb.SET_ISOCH_DELAY);
// Request type
assertEquals(0x00 << 5, LibUsb.REQUEST_TYPE_STANDARD);
assertEquals(0x01 << 5, LibUsb.REQUEST_TYPE_CLASS);
assertEquals(0x02 << 5, LibUsb.REQUEST_TYPE_VENDOR);
assertEquals(0x03 << 5, LibUsb.REQUEST_TYPE_RESERVED);
// Recipient bits
assertEquals(0x00, LibUsb.RECIPIENT_DEVICE);
assertEquals(0x01, LibUsb.RECIPIENT_INTERFACE);
assertEquals(0x02, LibUsb.RECIPIENT_ENDPOINT);
assertEquals(0x03, LibUsb.RECIPIENT_OTHER);
// Error codes
assertEquals(0, LibUsb.SUCCESS);
assertEquals(-1, LibUsb.ERROR_IO);
assertEquals(-2, LibUsb.ERROR_INVALID_PARAM);
assertEquals(-3, LibUsb.ERROR_ACCESS);
assertEquals(-4, LibUsb.ERROR_NO_DEVICE);
assertEquals(-5, LibUsb.ERROR_NOT_FOUND);
assertEquals(-6, LibUsb.ERROR_BUSY);
assertEquals(-7, LibUsb.ERROR_TIMEOUT);
assertEquals(-8, LibUsb.ERROR_OVERFLOW);
assertEquals(-9, LibUsb.ERROR_PIPE);
assertEquals(-10, LibUsb.ERROR_INTERRUPTED);
assertEquals(-11, LibUsb.ERROR_NO_MEM);
assertEquals(-12, LibUsb.ERROR_NOT_SUPPORTED);
assertEquals(-99, LibUsb.ERROR_OTHER);
// Capabilities
assertEquals(0, LibUsb.CAP_HAS_CAPABILITY);
// Device and/or Interface class codes
assertEquals(0, LibUsb.CLASS_PER_INTERFACE);
assertEquals(1, LibUsb.CLASS_AUDIO);
assertEquals(2, LibUsb.CLASS_COMM);
assertEquals(3, LibUsb.CLASS_HID);
assertEquals(5, LibUsb.CLASS_PHYSICAL);
assertEquals(7, LibUsb.CLASS_PRINTER);
assertEquals(6, LibUsb.CLASS_PTP);
assertEquals(6, LibUsb.CLASS_IMAGE);
assertEquals(8, LibUsb.CLASS_MASS_STORAGE);
assertEquals(9, LibUsb.CLASS_HUB);
assertEquals(10, LibUsb.CLASS_DATA);
assertEquals(0x0B, LibUsb.CLASS_SMART_CARD);
assertEquals(0x0D, LibUsb.CLASS_CONTENT_SECURITY);
assertEquals(0x0E, LibUsb.CLASS_VIDEO);
assertEquals(0x0F, LibUsb.CLASS_PERSONAL_HEALTHCARE);
assertEquals((byte) 0xDC, LibUsb.CLASS_DIAGNOSTIC_DEVICE);
assertEquals((byte) 0xE0, LibUsb.CLASS_WIRELESS);
assertEquals((byte) 0xFE, LibUsb.CLASS_APPLICATION);
assertEquals((byte) 0xFF, LibUsb.CLASS_VENDOR_SPEC);
// Descriptor types
assertEquals(0x01, LibUsb.DT_DEVICE);
assertEquals(0x02, LibUsb.DT_CONFIG);
assertEquals(0x03, LibUsb.DT_STRING);
assertEquals(0x04, LibUsb.DT_INTERFACE);
assertEquals(0x05, LibUsb.DT_ENDPOINT);
assertEquals(0x21, LibUsb.DT_HID);
assertEquals(0x22, LibUsb.DT_REPORT);
assertEquals(0x23, LibUsb.DT_PHYSICAL);
assertEquals(0x29, LibUsb.DT_HUB);
assertEquals(0x2A, LibUsb.DT_SUPERSPEED_HUB);
// Endpoint direction
assertEquals((byte) 0x80, LibUsb.ENDPOINT_IN);
assertEquals(0x00, LibUsb.ENDPOINT_OUT);
// Transfer types
assertEquals(0, LibUsb.TRANSFER_TYPE_CONTROL);
assertEquals(1, LibUsb.TRANSFER_TYPE_ISOCHRONOUS);
assertEquals(2, LibUsb.TRANSFER_TYPE_BULK);
assertEquals(3, LibUsb.TRANSFER_TYPE_INTERRUPT);
// ISO Sync types
assertEquals(0, LibUsb.ISO_SYNC_TYPE_NONE);
assertEquals(1, LibUsb.ISO_SYNC_TYPE_ASYNC);
assertEquals(2, LibUsb.ISO_SYNC_TYPE_ADAPTIVE);
assertEquals(3, LibUsb.ISO_SYNC_TYPE_SYNC);
// ISO usage types
assertEquals(0, LibUsb.ISO_USAGE_TYPE_DATA);
assertEquals(1, LibUsb.ISO_USAGE_TYPE_FEEDBACK);
assertEquals(2, LibUsb.ISO_USAGE_TYPE_IMPLICIT);
}
|
void function() { assertEquals(0, LibUsb.LOG_LEVEL_NONE); assertEquals(1, LibUsb.LOG_LEVEL_ERROR); assertEquals(2, LibUsb.LOG_LEVEL_WARNING); assertEquals(3, LibUsb.LOG_LEVEL_INFO); assertEquals(4, LibUsb.LOG_LEVEL_DEBUG); assertEquals(0, LibUsb.SPEED_UNKNOWN); assertEquals(1, LibUsb.SPEED_LOW); assertEquals(2, LibUsb.SPEED_FULL); assertEquals(3, LibUsb.SPEED_HIGH); assertEquals(4, LibUsb.SPEED_SUPER); assertEquals(0x00, LibUsb.REQUEST_GET_STATUS); assertEquals(0x01, LibUsb.REQUEST_CLEAR_FEATURE); assertEquals(0x03, LibUsb.REQUEST_SET_FEATURE); assertEquals(0x05, LibUsb.REQUEST_SET_ADDRESS); assertEquals(0x06, LibUsb.REQUEST_GET_DESCRIPTOR); assertEquals(0x07, LibUsb.REQUEST_SET_DESCRIPTOR); assertEquals(0x08, LibUsb.REQUEST_GET_CONFIGURATION); assertEquals(0x09, LibUsb.REQUEST_SET_CONFIGURATION); assertEquals(0x0A, LibUsb.REQUEST_GET_INTERFACE); assertEquals(0x0B, LibUsb.REQUEST_SET_INTERFACE); assertEquals(0x0C, LibUsb.REQUEST_SYNCH_FRAME); assertEquals(0x30, LibUsb.REQUEST_SET_SEL); assertEquals(0x31, LibUsb.SET_ISOCH_DELAY); assertEquals(0x00 << 5, LibUsb.REQUEST_TYPE_STANDARD); assertEquals(0x01 << 5, LibUsb.REQUEST_TYPE_CLASS); assertEquals(0x02 << 5, LibUsb.REQUEST_TYPE_VENDOR); assertEquals(0x03 << 5, LibUsb.REQUEST_TYPE_RESERVED); assertEquals(0x00, LibUsb.RECIPIENT_DEVICE); assertEquals(0x01, LibUsb.RECIPIENT_INTERFACE); assertEquals(0x02, LibUsb.RECIPIENT_ENDPOINT); assertEquals(0x03, LibUsb.RECIPIENT_OTHER); assertEquals(0, LibUsb.SUCCESS); assertEquals(-1, LibUsb.ERROR_IO); assertEquals(-2, LibUsb.ERROR_INVALID_PARAM); assertEquals(-3, LibUsb.ERROR_ACCESS); assertEquals(-4, LibUsb.ERROR_NO_DEVICE); assertEquals(-5, LibUsb.ERROR_NOT_FOUND); assertEquals(-6, LibUsb.ERROR_BUSY); assertEquals(-7, LibUsb.ERROR_TIMEOUT); assertEquals(-8, LibUsb.ERROR_OVERFLOW); assertEquals(-9, LibUsb.ERROR_PIPE); assertEquals(-10, LibUsb.ERROR_INTERRUPTED); assertEquals(-11, LibUsb.ERROR_NO_MEM); assertEquals(-12, LibUsb.ERROR_NOT_SUPPORTED); assertEquals(-99, LibUsb.ERROR_OTHER); assertEquals(0, LibUsb.CAP_HAS_CAPABILITY); assertEquals(0, LibUsb.CLASS_PER_INTERFACE); assertEquals(1, LibUsb.CLASS_AUDIO); assertEquals(2, LibUsb.CLASS_COMM); assertEquals(3, LibUsb.CLASS_HID); assertEquals(5, LibUsb.CLASS_PHYSICAL); assertEquals(7, LibUsb.CLASS_PRINTER); assertEquals(6, LibUsb.CLASS_PTP); assertEquals(6, LibUsb.CLASS_IMAGE); assertEquals(8, LibUsb.CLASS_MASS_STORAGE); assertEquals(9, LibUsb.CLASS_HUB); assertEquals(10, LibUsb.CLASS_DATA); assertEquals(0x0B, LibUsb.CLASS_SMART_CARD); assertEquals(0x0D, LibUsb.CLASS_CONTENT_SECURITY); assertEquals(0x0E, LibUsb.CLASS_VIDEO); assertEquals(0x0F, LibUsb.CLASS_PERSONAL_HEALTHCARE); assertEquals((byte) 0xDC, LibUsb.CLASS_DIAGNOSTIC_DEVICE); assertEquals((byte) 0xE0, LibUsb.CLASS_WIRELESS); assertEquals((byte) 0xFE, LibUsb.CLASS_APPLICATION); assertEquals((byte) 0xFF, LibUsb.CLASS_VENDOR_SPEC); assertEquals(0x01, LibUsb.DT_DEVICE); assertEquals(0x02, LibUsb.DT_CONFIG); assertEquals(0x03, LibUsb.DT_STRING); assertEquals(0x04, LibUsb.DT_INTERFACE); assertEquals(0x05, LibUsb.DT_ENDPOINT); assertEquals(0x21, LibUsb.DT_HID); assertEquals(0x22, LibUsb.DT_REPORT); assertEquals(0x23, LibUsb.DT_PHYSICAL); assertEquals(0x29, LibUsb.DT_HUB); assertEquals(0x2A, LibUsb.DT_SUPERSPEED_HUB); assertEquals((byte) 0x80, LibUsb.ENDPOINT_IN); assertEquals(0x00, LibUsb.ENDPOINT_OUT); assertEquals(0, LibUsb.TRANSFER_TYPE_CONTROL); assertEquals(1, LibUsb.TRANSFER_TYPE_ISOCHRONOUS); assertEquals(2, LibUsb.TRANSFER_TYPE_BULK); assertEquals(3, LibUsb.TRANSFER_TYPE_INTERRUPT); assertEquals(0, LibUsb.ISO_SYNC_TYPE_NONE); assertEquals(1, LibUsb.ISO_SYNC_TYPE_ASYNC); assertEquals(2, LibUsb.ISO_SYNC_TYPE_ADAPTIVE); assertEquals(3, LibUsb.ISO_SYNC_TYPE_SYNC); assertEquals(0, LibUsb.ISO_USAGE_TYPE_DATA); assertEquals(1, LibUsb.ISO_USAGE_TYPE_FEEDBACK); assertEquals(2, LibUsb.ISO_USAGE_TYPE_IMPLICIT); }
|
/**
* Tests the constant values.
*/
|
Tests the constant values
|
testConstants
|
{
"repo_name": "usb4java/usb4java",
"path": "src/test/java/org/usb4java/LibUsbTest.java",
"license": "mit",
"size": 45109
}
|
[
"org.junit.Assert",
"org.usb4java.LibUsb"
] |
import org.junit.Assert; import org.usb4java.LibUsb;
|
import org.junit.*; import org.usb4java.*;
|
[
"org.junit",
"org.usb4java"
] |
org.junit; org.usb4java;
| 534,648
|
public static void synchronizeSourceCode(Microservice microservice, Microservice oldMicroservice,
HashMap<String, JSONObject> files, BaseGitHostAdapter gitAdapter, Service service, String metadataDoc) throws ModelParseException {
// first load the needed templates from the template repository
// variables holding the template source code
String serviceClass = null;
String serviceTest = null;
String serviceProperties = null;
String genericHttpMethod = null;
String genericHttpMethodBody = null;
String genericApiResponse = null;
String genericHttpResponse = null;
String genericTestCase = null;
String databaseConfig = null;
String databaseInstantiation = null;
String serviceInvocation = null;
String databaseScript = null;
String genericTable = null;
String databaseManager = null;
String guidances = null;
// to generate schema file
String classes = null;
String genericClassBody = null;
String genericClassProperty = null;
try (TreeWalk treeWalk =
getTemplateRepositoryContent(gitAdapter)) {
// now load the TreeWalk containing the template repository content
treeWalk.setFilter(PathFilter.create("backend/"));
ObjectReader reader = treeWalk.getObjectReader();
// walk through the tree and retrieve the needed templates
while (treeWalk.next()) {
ObjectId objectId = treeWalk.getObjectId(0);
ObjectLoader loader = reader.open(objectId);
switch (treeWalk.getNameString()) {
case "genericHTTPMethod.txt":
genericHttpMethod = new String(loader.getBytes(), "UTF-8");
break;
case "genericHTTPMethodBody.txt":
genericHttpMethodBody = new String(loader.getBytes(), "UTF-8");
break;
case "genericHTTPResponse.txt":
genericHttpResponse = new String(loader.getBytes(), "UTF-8");
break;
case "genericApiResponse.txt":
genericApiResponse = new String(loader.getBytes(), "UTF-8");
break;
case "ServiceClass.java":
serviceClass = new String(loader.getBytes(), "UTF-8");
break;
case "ServiceTest.java":
serviceTest = new String(loader.getBytes(), "UTF-8");
break;
case "i5.las2peer.services.servicePackage.ServiceClass.properties":
serviceProperties = new String(loader.getBytes(), "UTF-8");
case "genericTestMethod.txt":
genericTestCase = new String(loader.getBytes(), "UTF-8");
break;
case "guidances.json":
guidances = new String(loader.getBytes(), "UTF-8");
break;
case "databaseConfig.txt":
databaseConfig = new String(loader.getBytes(), "UTF-8");
break;
case "databaseInstantiation.txt":
databaseInstantiation = new String(loader.getBytes(), "UTF-8");
break;
case "genericServiceInvocation.txt":
serviceInvocation = new String(loader.getBytes(), "UTF-8");
break;
case "genericTable.txt":
genericTable = new String(loader.getBytes(), "UTF-8");
break;
case "DatabaseManager.java":
databaseManager = new String(loader.getBytes(), "UTF-8");
break;
case "database.sql":
databaseScript = new String(loader.getBytes(), "UTF-8");
break;
case "Classes.java":
classes = new String(loader.getBytes(), "UTF-8");
break;
case "genericClassBody.txt":
genericClassBody = new String(loader.getBytes(), "UTF-8");
break;
case "genericClassProperty.txt":
genericClassProperty = new String(loader.getBytes(), "UTF-8");
break;
}
}
} catch (Exception e) {
logger.printStackTrace(e);
}
// new file names
String serviceFileName = getServiceFileName(microservice);
String servicePropertiesFileName = getServicePropertiesFileName(microservice);
String serviceTestFileName = getServiceTestFileName(microservice);
String databaseScriptFileName = getDatabaseScriptFileName(microservice);
String newDatabaseManagerFileName = "src/main/i5/las2peer/services/"
+ getPackageName(microservice) + "/database/DatabaseManager.java";
String newClassesFileName = getClassesFileName(microservice);
// old file names
String serviceOldFileName = getServiceFileName(oldMicroservice);
String serviceOldPropertiesFileName = getServicePropertiesFileName(oldMicroservice);
String serviceOldTestFileName = getServiceTestFileName(oldMicroservice);
String databaseOldScriptFileName = getDatabaseScriptFileName(oldMicroservice);
String oldDatabaseManagerFileName = "src/main/i5/las2peer/services/"
+ getPackageName(oldMicroservice) + "/database/DatabaseManager.java";
String oldClassesFileName = getClassesFileName(oldMicroservice);
// if the old service file was renamed, we need to rename it in the local repo
if (!serviceFileName.equals(serviceOldFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), serviceFileName,
serviceOldFileName);
}
// if the old service test file was renamed, we need to rename it in the local repo
if (!serviceTestFileName.equals(serviceOldTestFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), serviceTestFileName,
serviceOldTestFileName);
}
// if the old classes file was renamed, we need to rename it in the local repo
if (!newClassesFileName.equals(oldClassesFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), newClassesFileName,
oldClassesFileName);
}
// if the old service properties file was renamed, we need to rename it in the local repo
if (!servicePropertiesFileName.equals(serviceOldPropertiesFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), servicePropertiesFileName,
serviceOldPropertiesFileName);
}
// now loop through the traced files and synchronize them
Iterator<String> it = files.keySet().iterator();
TraceModel traceModel = new TraceModel();
// special case for database manager, as it is not always traced
if (!files.containsKey(oldDatabaseManagerFileName) && microservice.getDatabase() != null) {
generateOtherArtifacts(
Template.createInitialTemplateEngine(traceModel, newDatabaseManagerFileName),
microservice, gitAdapter.getGitOrganization(), databaseManager);
} else if (files.containsKey(oldDatabaseManagerFileName)
&& microservice.getDatabase() == null) {
deleteFileInLocalRepository(getRepositoryName(oldMicroservice), oldDatabaseManagerFileName);
} else if (!oldDatabaseManagerFileName.equals(newDatabaseManagerFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), newDatabaseManagerFileName,
oldDatabaseManagerFileName);
}
// special case for database script, as it is not always traced
if (!files.containsKey(databaseOldScriptFileName) && microservice.getDatabase() != null) {
FileTraceModel databaseScriptTraceModel =
new FileTraceModel(traceModel, getDatabaseScriptFileName(microservice));
traceModel.addFileTraceModel(databaseScriptTraceModel);
TemplateEngine databaseScriptTemplateEngine =
new TemplateEngine(new InitialGenerationStrategy(), databaseScriptTraceModel);
generateDatabaseScript(databaseScriptTemplateEngine, databaseScript, genericTable,
microservice);
} else if (files.containsKey(databaseOldScriptFileName) && microservice.getDatabase() == null) {
deleteFileInLocalRepository(getRepositoryName(oldMicroservice), databaseOldScriptFileName);
} else if (!databaseOldScriptFileName.equals(databaseScriptFileName)) {
renameFileInRepository(getRepositoryName(oldMicroservice), databaseScriptFileName,
databaseOldScriptFileName);
}
while (it.hasNext()) {
String fileName = it.next();
JSONObject fileObject = files.get(fileName);
String content = (String) fileObject.get("content");
byte[] base64decodedBytes = Base64.getDecoder().decode(content);
try {
Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "Synchronizing " + fileName + " now ...");
content = new String(base64decodedBytes, "utf-8");
JSONObject fileTraces = (JSONObject) fileObject.get("fileTraces");
FileTraceModel oldFileTraceModel = FileTraceModelFactory
.createFileTraceModelFromJSON(content, fileTraces, traceModel, fileName);
TemplateStrategy strategy = new SynchronizationStrategy(oldFileTraceModel);
TemplateEngine templateEngine = new TemplateEngine(strategy, oldFileTraceModel);
if (fileName.equals(serviceOldFileName)) {
oldFileTraceModel.setFileName(serviceFileName);
String repositoryLocation =
gitAdapter.getBaseURL() + gitAdapter.getGitOrganization() + "/" + getRepositoryName(microservice);
generateNewServiceClass(templateEngine, serviceClass, microservice, repositoryLocation,
genericHttpMethod, genericHttpMethodBody, genericApiResponse, genericHttpResponse,
databaseConfig, databaseInstantiation, serviceInvocation, metadataDoc);
} else if (fileName.equals(serviceOldTestFileName)) {
oldFileTraceModel.setFileName(serviceTestFileName);
generateNewServiceTest(templateEngine, serviceTest, microservice, genericTestCase);
} else if (fileName.equals(oldClassesFileName)) {
oldFileTraceModel.setFileName(newClassesFileName);
String repositoryLocation =
gitAdapter.getBaseURL() + gitAdapter.getGitOrganization() + "/" + getRepositoryName(microservice);
generateNewClasses(templateEngine, classes, microservice, repositoryLocation,genericClassBody, genericClassProperty, metadataDoc);
} else if (fileName.equals(databaseOldScriptFileName)) {
if (microservice.getDatabase() == null) {
templateEngine = null;
} else {
oldFileTraceModel.setFileName(databaseScriptFileName);
generateDatabaseScript(templateEngine, databaseScript, genericTable, microservice);
}
} else if (fileName.equals(oldDatabaseManagerFileName)) {
if (microservice.getDatabase() == null) {
templateEngine = null;
} else {
oldFileTraceModel.setFileName(newDatabaseManagerFileName);
generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content);
}
} else if (fileName.equals(serviceOldPropertiesFileName)) {
content = serviceProperties;
oldFileTraceModel.setFileName(servicePropertiesFileName);
generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content);
} else {
generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content);
}
Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "... " + fileName + " synchronized.");
// finally add the file trace model to the global trace model
if (templateEngine != null) {
traceModel.addFileTraceModel(templateEngine.getFileTraceModel());
}
} catch (UnsupportedEncodingException e) {
logger.printStackTrace(e);
}
}
try {
// commit changes
updateTracedFilesInRepository(getUpdatedTracedFilesForRepository(traceModel, guidances),
getRepositoryName(microservice), service);
} catch (UnsupportedEncodingException e) {
logger.printStackTrace(e);
}
}
|
static void function(Microservice microservice, Microservice oldMicroservice, HashMap<String, JSONObject> files, BaseGitHostAdapter gitAdapter, Service service, String metadataDoc) throws ModelParseException { String serviceClass = null; String serviceTest = null; String serviceProperties = null; String genericHttpMethod = null; String genericHttpMethodBody = null; String genericApiResponse = null; String genericHttpResponse = null; String genericTestCase = null; String databaseConfig = null; String databaseInstantiation = null; String serviceInvocation = null; String databaseScript = null; String genericTable = null; String databaseManager = null; String guidances = null; String classes = null; String genericClassBody = null; String genericClassProperty = null; try (TreeWalk treeWalk = getTemplateRepositoryContent(gitAdapter)) { treeWalk.setFilter(PathFilter.create(STR)); ObjectReader reader = treeWalk.getObjectReader(); while (treeWalk.next()) { ObjectId objectId = treeWalk.getObjectId(0); ObjectLoader loader = reader.open(objectId); switch (treeWalk.getNameString()) { case STR: genericHttpMethod = new String(loader.getBytes(), "UTF-8"); break; case STR: genericHttpMethodBody = new String(loader.getBytes(), "UTF-8"); break; case STR: genericHttpResponse = new String(loader.getBytes(), "UTF-8"); break; case STR: genericApiResponse = new String(loader.getBytes(), "UTF-8"); break; case STR: serviceClass = new String(loader.getBytes(), "UTF-8"); break; case STR: serviceTest = new String(loader.getBytes(), "UTF-8"); break; case STR: serviceProperties = new String(loader.getBytes(), "UTF-8"); case STR: genericTestCase = new String(loader.getBytes(), "UTF-8"); break; case STR: guidances = new String(loader.getBytes(), "UTF-8"); break; case STR: databaseConfig = new String(loader.getBytes(), "UTF-8"); break; case STR: databaseInstantiation = new String(loader.getBytes(), "UTF-8"); break; case STR: serviceInvocation = new String(loader.getBytes(), "UTF-8"); break; case STR: genericTable = new String(loader.getBytes(), "UTF-8"); break; case STR: databaseManager = new String(loader.getBytes(), "UTF-8"); break; case STR: databaseScript = new String(loader.getBytes(), "UTF-8"); break; case STR: classes = new String(loader.getBytes(), "UTF-8"); break; case STR: genericClassBody = new String(loader.getBytes(), "UTF-8"); break; case STR: genericClassProperty = new String(loader.getBytes(), "UTF-8"); break; } } } catch (Exception e) { logger.printStackTrace(e); } String serviceFileName = getServiceFileName(microservice); String servicePropertiesFileName = getServicePropertiesFileName(microservice); String serviceTestFileName = getServiceTestFileName(microservice); String databaseScriptFileName = getDatabaseScriptFileName(microservice); String newDatabaseManagerFileName = STR + getPackageName(microservice) + STR; String newClassesFileName = getClassesFileName(microservice); String serviceOldFileName = getServiceFileName(oldMicroservice); String serviceOldPropertiesFileName = getServicePropertiesFileName(oldMicroservice); String serviceOldTestFileName = getServiceTestFileName(oldMicroservice); String databaseOldScriptFileName = getDatabaseScriptFileName(oldMicroservice); String oldDatabaseManagerFileName = STR + getPackageName(oldMicroservice) + STR; String oldClassesFileName = getClassesFileName(oldMicroservice); if (!serviceFileName.equals(serviceOldFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), serviceFileName, serviceOldFileName); } if (!serviceTestFileName.equals(serviceOldTestFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), serviceTestFileName, serviceOldTestFileName); } if (!newClassesFileName.equals(oldClassesFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), newClassesFileName, oldClassesFileName); } if (!servicePropertiesFileName.equals(serviceOldPropertiesFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), servicePropertiesFileName, serviceOldPropertiesFileName); } Iterator<String> it = files.keySet().iterator(); TraceModel traceModel = new TraceModel(); if (!files.containsKey(oldDatabaseManagerFileName) && microservice.getDatabase() != null) { generateOtherArtifacts( Template.createInitialTemplateEngine(traceModel, newDatabaseManagerFileName), microservice, gitAdapter.getGitOrganization(), databaseManager); } else if (files.containsKey(oldDatabaseManagerFileName) && microservice.getDatabase() == null) { deleteFileInLocalRepository(getRepositoryName(oldMicroservice), oldDatabaseManagerFileName); } else if (!oldDatabaseManagerFileName.equals(newDatabaseManagerFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), newDatabaseManagerFileName, oldDatabaseManagerFileName); } if (!files.containsKey(databaseOldScriptFileName) && microservice.getDatabase() != null) { FileTraceModel databaseScriptTraceModel = new FileTraceModel(traceModel, getDatabaseScriptFileName(microservice)); traceModel.addFileTraceModel(databaseScriptTraceModel); TemplateEngine databaseScriptTemplateEngine = new TemplateEngine(new InitialGenerationStrategy(), databaseScriptTraceModel); generateDatabaseScript(databaseScriptTemplateEngine, databaseScript, genericTable, microservice); } else if (files.containsKey(databaseOldScriptFileName) && microservice.getDatabase() == null) { deleteFileInLocalRepository(getRepositoryName(oldMicroservice), databaseOldScriptFileName); } else if (!databaseOldScriptFileName.equals(databaseScriptFileName)) { renameFileInRepository(getRepositoryName(oldMicroservice), databaseScriptFileName, databaseOldScriptFileName); } while (it.hasNext()) { String fileName = it.next(); JSONObject fileObject = files.get(fileName); String content = (String) fileObject.get(STR); byte[] base64decodedBytes = Base64.getDecoder().decode(content); try { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, STR + fileName + STR); content = new String(base64decodedBytes, "utf-8"); JSONObject fileTraces = (JSONObject) fileObject.get(STR); FileTraceModel oldFileTraceModel = FileTraceModelFactory .createFileTraceModelFromJSON(content, fileTraces, traceModel, fileName); TemplateStrategy strategy = new SynchronizationStrategy(oldFileTraceModel); TemplateEngine templateEngine = new TemplateEngine(strategy, oldFileTraceModel); if (fileName.equals(serviceOldFileName)) { oldFileTraceModel.setFileName(serviceFileName); String repositoryLocation = gitAdapter.getBaseURL() + gitAdapter.getGitOrganization() + "/" + getRepositoryName(microservice); generateNewServiceClass(templateEngine, serviceClass, microservice, repositoryLocation, genericHttpMethod, genericHttpMethodBody, genericApiResponse, genericHttpResponse, databaseConfig, databaseInstantiation, serviceInvocation, metadataDoc); } else if (fileName.equals(serviceOldTestFileName)) { oldFileTraceModel.setFileName(serviceTestFileName); generateNewServiceTest(templateEngine, serviceTest, microservice, genericTestCase); } else if (fileName.equals(oldClassesFileName)) { oldFileTraceModel.setFileName(newClassesFileName); String repositoryLocation = gitAdapter.getBaseURL() + gitAdapter.getGitOrganization() + "/" + getRepositoryName(microservice); generateNewClasses(templateEngine, classes, microservice, repositoryLocation,genericClassBody, genericClassProperty, metadataDoc); } else if (fileName.equals(databaseOldScriptFileName)) { if (microservice.getDatabase() == null) { templateEngine = null; } else { oldFileTraceModel.setFileName(databaseScriptFileName); generateDatabaseScript(templateEngine, databaseScript, genericTable, microservice); } } else if (fileName.equals(oldDatabaseManagerFileName)) { if (microservice.getDatabase() == null) { templateEngine = null; } else { oldFileTraceModel.setFileName(newDatabaseManagerFileName); generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content); } } else if (fileName.equals(serviceOldPropertiesFileName)) { content = serviceProperties; oldFileTraceModel.setFileName(servicePropertiesFileName); generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content); } else { generateOtherArtifacts(templateEngine, microservice, gitAdapter.getGitOrganization(), content); } Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, STR + fileName + STR); if (templateEngine != null) { traceModel.addFileTraceModel(templateEngine.getFileTraceModel()); } } catch (UnsupportedEncodingException e) { logger.printStackTrace(e); } } try { updateTracedFilesInRepository(getUpdatedTracedFilesForRepository(traceModel, guidances), getRepositoryName(microservice), service); } catch (UnsupportedEncodingException e) { logger.printStackTrace(e); } }
|
/**
* Synchronize the source code of a
* {@link i5.las2peer.services.codeGenerationService.models.microservice.Microservice} model with
* an updated version of that model
*
* @param microservice The updated microservice model
* @param oldMicroservice The current/old microservice model
* @param files The traced files with the current source code
* @param gitAdapter adapter for git
* @param service name of the service
* @param metadataDoc metadata string from swagger
* @throws ModelParseException thrown incase of error in model parsing
*/
|
Synchronize the source code of a <code>i5.las2peer.services.codeGenerationService.models.microservice.Microservice</code> model with an updated version of that model
|
synchronizeSourceCode
|
{
"repo_name": "PedeLa/CAE-Code-Generation-Service",
"path": "src/main/i5/las2peer/services/codeGenerationService/generators/MicroserviceSynchronization.java",
"license": "bsd-3-clause",
"size": 14634
}
|
[
"java.io.UnsupportedEncodingException",
"java.util.Base64",
"java.util.HashMap",
"java.util.Iterator",
"org.eclipse.jgit.lib.ObjectId",
"org.eclipse.jgit.lib.ObjectLoader",
"org.eclipse.jgit.lib.ObjectReader",
"org.eclipse.jgit.treewalk.TreeWalk",
"org.eclipse.jgit.treewalk.filter.PathFilter",
"org.json.simple.JSONObject"
] |
import java.io.UnsupportedEncodingException; import java.util.Base64; import java.util.HashMap; import java.util.Iterator; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.filter.PathFilter; import org.json.simple.JSONObject;
|
import java.io.*; import java.util.*; import org.eclipse.jgit.lib.*; import org.eclipse.jgit.treewalk.*; import org.eclipse.jgit.treewalk.filter.*; import org.json.simple.*;
|
[
"java.io",
"java.util",
"org.eclipse.jgit",
"org.json.simple"
] |
java.io; java.util; org.eclipse.jgit; org.json.simple;
| 1,196,634
|
private int compare(TlvTag tlvTag1, TlvTag tlvTag2) {
short class1 = Utils.maskUnsignedByteToShort(tlvTag1.getEncodedClass());
short class2 = Utils.maskUnsignedByteToShort(tlvTag2.getEncodedClass());
short classDiff = (short) (class1 - class2);
if(classDiff != 0) {
return classDiff;
} else{
return tlvTag1.getIndicatedTagNo() - tlvTag2.getIndicatedTagNo();
}
}
|
int function(TlvTag tlvTag1, TlvTag tlvTag2) { short class1 = Utils.maskUnsignedByteToShort(tlvTag1.getEncodedClass()); short class2 = Utils.maskUnsignedByteToShort(tlvTag2.getEncodedClass()); short classDiff = (short) (class1 - class2); if(classDiff != 0) { return classDiff; } else{ return tlvTag1.getIndicatedTagNo() - tlvTag2.getIndicatedTagNo(); } }
|
/**
* Performs Comparator's sorting based on tags
* @param tlvTag1 tag 1
* @param tlvTag2 tag 2
* @return the Comparator's compare result
*/
|
Performs Comparator's sorting based on tags
|
compare
|
{
"repo_name": "JGoeke/de.persosim.simulator",
"path": "de.persosim.simulator/src/de/persosim/simulator/tlv/TlvDataObjectComparatorDer.java",
"license": "gpl-3.0",
"size": 1507
}
|
[
"de.persosim.simulator.utils.Utils"
] |
import de.persosim.simulator.utils.Utils;
|
import de.persosim.simulator.utils.*;
|
[
"de.persosim.simulator"
] |
de.persosim.simulator;
| 2,901,728
|
@Override
protected Date getNextExecutionTime() {
// calculate next scheduled execution
TruncatedDate truncated = new TruncatedDate();
truncated.truncate(TruncatedDate.TruncateLevel.DAY); //trim to beginning of today
Calendar scheduled = new GregorianCalendar();
scheduled.setTime(truncated);
switch (monthOfYear) {
case JANUARY :
scheduled.set(Calendar.MONTH, Calendar.JANUARY);
break;
case FEBRUARY :
scheduled.set(Calendar.MONTH, Calendar.FEBRUARY);
break;
case MARCH :
scheduled.set(Calendar.MONTH, Calendar.MARCH);
break;
case APRIL :
scheduled.set(Calendar.MONTH, Calendar.APRIL);
break;
case MAY :
scheduled.set(Calendar.MONTH, Calendar.MAY);
break;
case JUNE :
scheduled.set(Calendar.MONTH, Calendar.JUNE);
break;
case JULY :
scheduled.set(Calendar.MONTH, Calendar.JULY);
break;
case AUGUST :
scheduled.set(Calendar.MONTH, Calendar.AUGUST);
break;
case SEPTEMBER :
scheduled.set(Calendar.MONTH, Calendar.SEPTEMBER);
break;
case OCTOBER :
scheduled.set(Calendar.MONTH, Calendar.OCTOBER);
break;
case NOVEMBER :
scheduled.set(Calendar.MONTH, Calendar.NOVEMBER);
break;
case DECEMBER :
scheduled.set(Calendar.MONTH, Calendar.DECEMBER);
break;
}
// we are scheduling a particular day of month each year
this.dayOfMonth = MonthUtils.calculateDayOfMonth(this.dayOfMonth, scheduled.getTime());
scheduled.set(Calendar.DAY_OF_MONTH, this.dayOfMonth);
scheduled.set(Calendar.HOUR_OF_DAY, this.hourOfDay);
scheduled.set(Calendar.MINUTE, this.minuteOfHour);
if (scheduled.getTime().before(new Date())) {
// next execution needs to be next year
scheduled.add(Calendar.YEAR, 1);
}
Date nextScheduledExecution = scheduled.getTime();
// determine if task needs to retry sooner
if (this.retry) {
this.retry = false; //clear the flag
// retry in an hour if it's before the
// regularly scheduled time
Calendar retry = new GregorianCalendar();
retry.add(Calendar.HOUR, 1);
if (retry.before(scheduled)) {
// execute retry instead of scheduled
nextScheduledExecution = retry.getTime();
}
}
return nextScheduledExecution;
}
|
Date function() { TruncatedDate truncated = new TruncatedDate(); truncated.truncate(TruncatedDate.TruncateLevel.DAY); Calendar scheduled = new GregorianCalendar(); scheduled.setTime(truncated); switch (monthOfYear) { case JANUARY : scheduled.set(Calendar.MONTH, Calendar.JANUARY); break; case FEBRUARY : scheduled.set(Calendar.MONTH, Calendar.FEBRUARY); break; case MARCH : scheduled.set(Calendar.MONTH, Calendar.MARCH); break; case APRIL : scheduled.set(Calendar.MONTH, Calendar.APRIL); break; case MAY : scheduled.set(Calendar.MONTH, Calendar.MAY); break; case JUNE : scheduled.set(Calendar.MONTH, Calendar.JUNE); break; case JULY : scheduled.set(Calendar.MONTH, Calendar.JULY); break; case AUGUST : scheduled.set(Calendar.MONTH, Calendar.AUGUST); break; case SEPTEMBER : scheduled.set(Calendar.MONTH, Calendar.SEPTEMBER); break; case OCTOBER : scheduled.set(Calendar.MONTH, Calendar.OCTOBER); break; case NOVEMBER : scheduled.set(Calendar.MONTH, Calendar.NOVEMBER); break; case DECEMBER : scheduled.set(Calendar.MONTH, Calendar.DECEMBER); break; } this.dayOfMonth = MonthUtils.calculateDayOfMonth(this.dayOfMonth, scheduled.getTime()); scheduled.set(Calendar.DAY_OF_MONTH, this.dayOfMonth); scheduled.set(Calendar.HOUR_OF_DAY, this.hourOfDay); scheduled.set(Calendar.MINUTE, this.minuteOfHour); if (scheduled.getTime().before(new Date())) { scheduled.add(Calendar.YEAR, 1); } Date nextScheduledExecution = scheduled.getTime(); if (this.retry) { this.retry = false; Calendar retry = new GregorianCalendar(); retry.add(Calendar.HOUR, 1); if (retry.before(scheduled)) { nextScheduledExecution = retry.getTime(); } } return nextScheduledExecution; }
|
/**
* Called by the framework to determine the next time this task should execute.
* If the 'retry' flag is set (because execution failed), then next execution will
* be rescheduled in an hour.
*
* @see ScheduledTask#getNextExecutionTime()
*/
|
Called by the framework to determine the next time this task should execute. If the 'retry' flag is set (because execution failed), then next execution will be rescheduled in an hour
|
getNextExecutionTime
|
{
"repo_name": "wheresmybrain/syp-scheduler",
"path": "src/main/java/com/wheresmybrain/syp/scheduler/mixins/YearlyScheduleMixin.java",
"license": "lgpl-3.0",
"size": 7296
}
|
[
"com.wheresmybrain.syp.scheduler.utils.TruncatedDate",
"java.util.Calendar",
"java.util.Date",
"java.util.GregorianCalendar"
] |
import com.wheresmybrain.syp.scheduler.utils.TruncatedDate; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar;
|
import com.wheresmybrain.syp.scheduler.utils.*; import java.util.*;
|
[
"com.wheresmybrain.syp",
"java.util"
] |
com.wheresmybrain.syp; java.util;
| 592,396
|
public X500Principal getIssuerX500Principal() {
if (issuerPrincipal == null) {
issuerPrincipal = X509CRLImpl.getIssuerX500Principal(this);
}
return issuerPrincipal;
}
/**
* Gets the {@code thisUpdate} date from the CRL.
* The ASN.1 definition for this is:
* <pre>
* thisUpdate ChoiceOfTime
* ChoiceOfTime ::= CHOICE {
* utcTime UTCTime,
* generalTime GeneralizedTime }
|
X500Principal function() { if (issuerPrincipal == null) { issuerPrincipal = X509CRLImpl.getIssuerX500Principal(this); } return issuerPrincipal; } /** * Gets the {@code thisUpdate} date from the CRL. * The ASN.1 definition for this is: * <pre> * thisUpdate ChoiceOfTime * ChoiceOfTime ::= CHOICE { * utcTime UTCTime, * generalTime GeneralizedTime }
|
/**
* Returns the issuer (issuer distinguished name) value from the
* CRL as an {@code X500Principal}.
* <p>
* It is recommended that subclasses override this method.
*
* @return an {@code X500Principal} representing the issuer
* distinguished name
* @since 1.4
*/
|
Returns the issuer (issuer distinguished name) value from the CRL as an X500Principal. It is recommended that subclasses override this method
|
getIssuerX500Principal
|
{
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/java/security/cert/X509CRL.java",
"license": "apache-2.0",
"size": 16112
}
|
[
"javax.security.auth.x500.X500Principal"
] |
import javax.security.auth.x500.X500Principal;
|
import javax.security.auth.x500.*;
|
[
"javax.security"
] |
javax.security;
| 376,498
|
public void copyMerge() throws IOException {
// if there is only one path/dir in the bucket, then there is no need to merge it
if ( size() < 2 ) {
return;
}
// here size() >= 2
Path hdfsTargetFile = new Path(targetFile);
OutputStream out = fs.create(hdfsTargetFile);
try {
for (int i = 0; i < bucket.size(); i++) {
FileStatus contents[] = fs.listStatus(bucket.get(i));
for (int k = 0; k < contents.length; k++) {
if (!contents[k].isDir()) {
InputStream in = fs.open(contents[k].getPath());
try {
IOUtils.copyBytes(in, out, conf, false);
}
finally {
InputOutputUtil.close(in);
}
}
} //for k
} // for i
}
finally {
InputOutputUtil.close(out);
}
}
|
void function() throws IOException { if ( size() < 2 ) { return; } Path hdfsTargetFile = new Path(targetFile); OutputStream out = fs.create(hdfsTargetFile); try { for (int i = 0; i < bucket.size(); i++) { FileStatus contents[] = fs.listStatus(bucket.get(i)); for (int k = 0; k < contents.length; k++) { if (!contents[k].isDir()) { InputStream in = fs.open(contents[k].getPath()); try { IOUtils.copyBytes(in, out, conf, false); } finally { InputOutputUtil.close(in); } } } } } finally { InputOutputUtil.close(out); } }
|
/**
* Copy all files in several directories to one output file (merge).
*
* parentDir will be "/tmp/<uuid>/"
* targetDir will be "/tmp/<uuid>/id/"
* targetFile will be "/tmp/<uuid>/id/id"
*
* merge all paths in bucket and return a new directory (targetDir), which holds merged paths
*/
|
Copy all files in several directories to one output file (merge). parentDir will be "/tmp//" targetDir will be "/tmp//id/" targetFile will be "/tmp//id/id" merge all paths in bucket and return a new directory (targetDir), which holds merged paths
|
copyMerge
|
{
"repo_name": "batermj/algorithm-challenger",
"path": "Interviews/Basics/MapReduce/ISBN978-7-5123-9594-7/data-algorithms-book-master/src/main/java/org/dataalgorithms/chap29/combinesmallfilesbybuckets/BucketThread.java",
"license": "apache-2.0",
"size": 5140
}
|
[
"java.io.IOException",
"java.io.InputStream",
"java.io.OutputStream",
"org.apache.hadoop.fs.FileStatus",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.io.IOUtils",
"org.dataalgorithms.util.InputOutputUtil"
] |
import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.dataalgorithms.util.InputOutputUtil;
|
import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.dataalgorithms.util.*;
|
[
"java.io",
"org.apache.hadoop",
"org.dataalgorithms.util"
] |
java.io; org.apache.hadoop; org.dataalgorithms.util;
| 2,386,619
|
@Override
protected void collectNewChildDescriptors(
Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add(createChildParameter(
Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__DATA_STATE,
Bpmn2Factory.eINSTANCE.createDataState()));
newChildDescriptors
.add(createChildParameter(
Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS,
Securebpmn2Factory.eINSTANCE
.createItemAwareElementAction()));
newChildDescriptors
.add(createChildParameter(
Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS,
Securebpmn2Factory.eINSTANCE
.createAtomicItemAwareElementAction()));
newChildDescriptors
.add(createChildParameter(
Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS,
Securebpmn2Factory.eINSTANCE
.createCompositeItemAwareElementAction()));
}
|
void function( Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add(createChildParameter( Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__DATA_STATE, Bpmn2Factory.eINSTANCE.createDataState())); newChildDescriptors .add(createChildParameter( Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS, Securebpmn2Factory.eINSTANCE .createItemAwareElementAction())); newChildDescriptors .add(createChildParameter( Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS, Securebpmn2Factory.eINSTANCE .createAtomicItemAwareElementAction())); newChildDescriptors .add(createChildParameter( Bpmn2Package.Literals.ITEM_AWARE_ELEMENT__ITEM_AWARE_ELEMENT_ACTIONS, Securebpmn2Factory.eINSTANCE .createCompositeItemAwareElementAction())); }
|
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds <code>org.eclipse.emf.edit.command.CommandParameter</code>s describing the children that can be created under this object.
|
collectNewChildDescriptors
|
{
"repo_name": "adbrucker/SecureBPMN",
"path": "designer/src/org.activiti.designer.model.edit/src/org/eclipse/bpmn2/provider/DataStoreReferenceItemProvider.java",
"license": "apache-2.0",
"size": 7719
}
|
[
"java.util.Collection",
"org.eclipse.bpmn2.Bpmn2Factory",
"org.eclipse.bpmn2.Bpmn2Package",
"org.eclipse.securebpmn2.Securebpmn2Factory"
] |
import java.util.Collection; import org.eclipse.bpmn2.Bpmn2Factory; import org.eclipse.bpmn2.Bpmn2Package; import org.eclipse.securebpmn2.Securebpmn2Factory;
|
import java.util.*; import org.eclipse.bpmn2.*; import org.eclipse.securebpmn2.*;
|
[
"java.util",
"org.eclipse.bpmn2",
"org.eclipse.securebpmn2"
] |
java.util; org.eclipse.bpmn2; org.eclipse.securebpmn2;
| 841,079
|
public NotificationConfiguration withEvents(Set<String> events) {
this.events.clear();
this.events.addAll(events);
return this;
}
|
NotificationConfiguration function(Set<String> events) { this.events.clear(); this.events.addAll(events); return this; }
|
/**
* Sets the given events in this {@link NotificationConfiguration} object and returns this
* object.
*
* @param events
* the set of events for the notification configuration.
*/
|
Sets the given events in this <code>NotificationConfiguration</code> object and returns this object
|
withEvents
|
{
"repo_name": "loremipsumdolor/CastFast",
"path": "src/com/amazonaws/services/s3/model/NotificationConfiguration.java",
"license": "mit",
"size": 6010
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 767,278
|
public static void validateSQLFile( final File aFile ) throws IllegalArgumentException, IOException
{
validateFileOrDirectory( aFile );
if ( !aFile.isFile() || !SQLParserUtil.isSQLFile( aFile ) )
{
throw new IllegalArgumentException( "Not an SQL file: " + aFile );
}
}
|
static void function( final File aFile ) throws IllegalArgumentException, IOException { validateFileOrDirectory( aFile ); if ( !aFile.isFile() !SQLParserUtil.isSQLFile( aFile ) ) { throw new IllegalArgumentException( STR + aFile ); } }
|
/**
* Validates the specified file as SQL file. In addition to the conditions in <code>validateFileOrDirectory</code>
* it also checks the following conditions: <br/>
* <br/>
* - the file is an actual file (and not a directory) <br/>
* - the file is an SQL file <br/>
*
* @param aFile the file to validate
* @throws IllegalArgumentException when the file is undefined (null) or not an SQL file (but a directory or file
* with an extension other than .sql)
* @throws IOException when the file does not exists or is not readable
* @see nl.hugojanssen.sqlgraph.shared.SQLParserUtil.validateFileOrDirectory
*/
|
Validates the specified file as SQL file. In addition to the conditions in <code>validateFileOrDirectory</code> it also checks the following conditions: - the file is an actual file (and not a directory) - the file is an SQL file
|
validateSQLFile
|
{
"repo_name": "nl-hugo/sqlgraph",
"path": "src/main/java/nl/hugojanssen/sqlgraph/shared/SQLParserUtil.java",
"license": "mit",
"size": 4470
}
|
[
"java.io.File",
"java.io.IOException"
] |
import java.io.File; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,343,595
|
public static ims.hl7adtout.domain.objects.InPatientADTMessageQueue extractInPatientADTMessageQueue(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.ifInpatientADTVo valueObject)
{
return extractInPatientADTMessageQueue(domainFactory, valueObject, new HashMap());
}
|
static ims.hl7adtout.domain.objects.InPatientADTMessageQueue function(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.ifInpatientADTVo valueObject) { return extractInPatientADTMessageQueue(domainFactory, valueObject, new HashMap()); }
|
/**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/
|
Create the domain object from the value object
|
extractInPatientADTMessageQueue
|
{
"repo_name": "open-health-hub/openMAXIMS",
"path": "openmaxims_workspace/ValueObjects/src/ims/emergency/vo/domain/ifInpatientADTVoAssembler.java",
"license": "agpl-3.0",
"size": 17512
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 526,879
|
public static void copyCurrentStructure(XContentGenerator destination, XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
// Let's handle field-name separately first
if (token == XContentParser.Token.FIELD_NAME) {
destination.writeFieldName(parser.currentName());
token = parser.nextToken();
// fall-through to copy the associated value
}
switch (token) {
case START_ARRAY:
destination.writeStartArray();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
copyCurrentStructure(destination, parser);
}
destination.writeEndArray();
break;
case START_OBJECT:
destination.writeStartObject();
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
copyCurrentStructure(destination, parser);
}
destination.writeEndObject();
break;
default: // others are simple:
copyCurrentEvent(destination, parser);
}
}
|
static void function(XContentGenerator destination, XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.FIELD_NAME) { destination.writeFieldName(parser.currentName()); token = parser.nextToken(); } switch (token) { case START_ARRAY: destination.writeStartArray(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { copyCurrentStructure(destination, parser); } destination.writeEndArray(); break; case START_OBJECT: destination.writeStartObject(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { copyCurrentStructure(destination, parser); } destination.writeEndObject(); break; default: copyCurrentEvent(destination, parser); } }
|
/**
* Low level implementation detail of {@link XContentGenerator#copyCurrentStructure(XContentParser)}.
*/
|
Low level implementation detail of <code>XContentGenerator#copyCurrentStructure(XContentParser)</code>
|
copyCurrentStructure
|
{
"repo_name": "nilabhsagar/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java",
"license": "apache-2.0",
"size": 21284
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,097,197
|
protected void onMessage(Date evDate, String channel, String sender, String login, String hostname, String message) {
}
|
void function(Date evDate, String channel, String sender, String login, String hostname, String message) { }
|
/**
* This method is called whenever a message is sent to a channel.
* <p/>
* The implementation of this method in the PircBot abstract class
* performs no actions and may be overridden as required.
*
* @param channel The channel to which the message was sent.
* @param sender The nick of the person who sent the message.
* @param login The login of the person who sent the message.
* @param hostname The hostname of the person who sent the message.
* @param message The actual message sent to the channel.
*/
|
This method is called whenever a message is sent to a channel. The implementation of this method in the PircBot abstract class performs no actions and may be overridden as required
|
onMessage
|
{
"repo_name": "indrora/Atomic",
"path": "application/src/main/java/org/jibble/pircbot/PircBot.java",
"license": "gpl-3.0",
"size": 122077
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,462,112
|
@Test
public void testBagIterator() {
CustomBagLL<String> bag = new CustomBagLL<>();
Assert.assertTrue(bag.isEmpty());
bag.add("A");
bag.add("B");
bag.add("B");
bag.add("C");
Iterator<String> itr = bag.iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), "A");
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), "B");
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), "B");
Assert.assertTrue(itr.hasNext());
}
|
void function() { CustomBagLL<String> bag = new CustomBagLL<>(); Assert.assertTrue(bag.isEmpty()); bag.add("A"); bag.add("B"); bag.add("B"); bag.add("C"); Iterator<String> itr = bag.iterator(); Assert.assertTrue(itr.hasNext()); Assert.assertEquals(itr.next(), "A"); Assert.assertTrue(itr.hasNext()); Assert.assertEquals(itr.next(), "B"); Assert.assertTrue(itr.hasNext()); Assert.assertEquals(itr.next(), "B"); Assert.assertTrue(itr.hasNext()); }
|
/**
* Test case to check iterator
*/
|
Test case to check iterator
|
testBagIterator
|
{
"repo_name": "deepak-malik/Data-Structures-In-Java",
"path": "test/com/deepak/data/structures/Bag/CustomBagLLTest.java",
"license": "mit",
"size": 3471
}
|
[
"java.util.Iterator",
"org.junit.Assert"
] |
import java.util.Iterator; import org.junit.Assert;
|
import java.util.*; import org.junit.*;
|
[
"java.util",
"org.junit"
] |
java.util; org.junit;
| 612,790
|
LiveData<ContentResource> getPremiumContent();
|
LiveData<ContentResource> getPremiumContent();
|
/**
* The premium content URL.
*/
|
The premium content URL
|
getPremiumContent
|
{
"repo_name": "android/play-billing-samples",
"path": "ClassyTaxiJava/app/src/main/java/com/sample/android/classytaxijava/data/network/firebase/ServerFunctions.java",
"license": "apache-2.0",
"size": 2282
}
|
[
"androidx.lifecycle.LiveData",
"com.sample.android.classytaxijava.data.ContentResource"
] |
import androidx.lifecycle.LiveData; import com.sample.android.classytaxijava.data.ContentResource;
|
import androidx.lifecycle.*; import com.sample.android.classytaxijava.data.*;
|
[
"androidx.lifecycle",
"com.sample.android"
] |
androidx.lifecycle; com.sample.android;
| 838,868
|
public static Integer epsgCode(CoordinateReferenceSystem crs) {
String name = crs.getName();
if (name != null) {
String[] split = name.split(":");
if (split.length == 2 && "epsg".equalsIgnoreCase(split[0])) {
return Integer.parseInt(split[1]);
}
}
return null;
}
|
static Integer function(CoordinateReferenceSystem crs) { String name = crs.getName(); if (name != null) { String[] split = name.split(":"); if (split.length == 2 && "epsg".equalsIgnoreCase(split[0])) { return Integer.parseInt(split[1]); } } return null; }
|
/**
* Returns the EPSG identifier for a crs object.
*
* @return The epsg identifier, or null if the CRS has no epsg code.
*/
|
Returns the EPSG identifier for a crs object
|
epsgCode
|
{
"repo_name": "ryantxu/jeo",
"path": "core/src/main/java/io/jeo/proj/Proj.java",
"license": "apache-2.0",
"size": 15807
}
|
[
"org.osgeo.proj4j.CoordinateReferenceSystem"
] |
import org.osgeo.proj4j.CoordinateReferenceSystem;
|
import org.osgeo.proj4j.*;
|
[
"org.osgeo.proj4j"
] |
org.osgeo.proj4j;
| 2,512,845
|
public final void setEvictorShutdownTimeout(final Duration evictorShutdownTimeout) {
this.evictorShutdownTimeoutDuration = PoolImplUtils.nonNull(evictorShutdownTimeout, BaseObjectPoolConfig.DEFAULT_EVICTOR_SHUTDOWN_TIMEOUT);
}
|
final void function(final Duration evictorShutdownTimeout) { this.evictorShutdownTimeoutDuration = PoolImplUtils.nonNull(evictorShutdownTimeout, BaseObjectPoolConfig.DEFAULT_EVICTOR_SHUTDOWN_TIMEOUT); }
|
/**
* Sets the timeout that will be used when waiting for the Evictor to shutdown if this pool is closed and it is the
* only pool still using the the value for the Evictor.
*
* @param evictorShutdownTimeout the timeout in milliseconds that will be used while waiting for the Evictor
* to shut down.
* @since 2.10.0
*/
|
Sets the timeout that will be used when waiting for the Evictor to shutdown if this pool is closed and it is the only pool still using the the value for the Evictor
|
setEvictorShutdownTimeout
|
{
"repo_name": "apache/tomcat",
"path": "java/org/apache/tomcat/dbcp/pool2/impl/BaseGenericObjectPool.java",
"license": "apache-2.0",
"size": 75879
}
|
[
"java.time.Duration"
] |
import java.time.Duration;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 196,508
|
public boolean addFormingPressRecipe(ItemStack aItemToImprint, ItemStack aForm, ItemStack aImprintedItem, int aDuration, int aEUt);
|
boolean function(ItemStack aItemToImprint, ItemStack aForm, ItemStack aImprintedItem, int aDuration, int aEUt);
|
/**
* Adds a Recipe for the Forming Press
*/
|
Adds a Recipe for the Forming Press
|
addFormingPressRecipe
|
{
"repo_name": "Tictim/TTMPMOD",
"path": "libs_n/gregtech/api/interfaces/internal/IGT_RecipeAdder.java",
"license": "lgpl-2.1",
"size": 20136
}
|
[
"net.minecraft.item.ItemStack"
] |
import net.minecraft.item.ItemStack;
|
import net.minecraft.item.*;
|
[
"net.minecraft.item"
] |
net.minecraft.item;
| 1,139,219
|
@Transactional(readOnly = true)
public Map<String, String> findMapDataByRootPrimaryKey(String primaryKey) {
return findMapDataByRootPrimaryKey(primaryKey, false);
}
|
@Transactional(readOnly = true) Map<String, String> function(String primaryKey) { return findMapDataByRootPrimaryKey(primaryKey, false); }
|
/**
* Map directly returns the corresponding key-value data structure based on the root primaryKey
* Note : If the association to return the child nodes, make sure primaryKey uniqueness of all nodes , otherwise the data can not be expected to cover a problem
* @param PrimaryKey the root primaryKey
* @return
*/
|
Map directly returns the corresponding key-value data structure based on the root primaryKey Note : If the association to return the child nodes, make sure primaryKey uniqueness of all nodes , otherwise the data can not be expected to cover a problem
|
findMapDataByRootPrimaryKey
|
{
"repo_name": "mugenya/arch_app",
"path": "src/main/java/lab/s2jh/module/sys/service/DataDictService.java",
"license": "lgpl-3.0",
"size": 5942
}
|
[
"java.util.Map",
"org.springframework.transaction.annotation.Transactional"
] |
import java.util.Map; import org.springframework.transaction.annotation.Transactional;
|
import java.util.*; import org.springframework.transaction.annotation.*;
|
[
"java.util",
"org.springframework.transaction"
] |
java.util; org.springframework.transaction;
| 1,027,508
|
@Deprecated
public void load(XMPPConnection connection) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException {
load(connection, null);
}
|
void function(XMPPConnection connection) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { load(connection, null); }
|
/**
* Load VCard information for a connected user. XMPPConnection should be authenticated
* and not anonymous.
* @throws XMPPErrorException
* @throws NoResponseException
* @throws NotConnectedException
* @throws InterruptedException
* @deprecated use {@link VCardManager#loadVCard()} instead.
*/
|
Load VCard information for a connected user. XMPPConnection should be authenticated and not anonymous
|
load
|
{
"repo_name": "andrey42/Smack",
"path": "smack-extensions/src/main/java/org/jivesoftware/smackx/vcardtemp/packet/VCard.java",
"license": "apache-2.0",
"size": 25445
}
|
[
"org.jivesoftware.smack.SmackException",
"org.jivesoftware.smack.XMPPConnection",
"org.jivesoftware.smack.XMPPException"
] |
import org.jivesoftware.smack.SmackException; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException;
|
import org.jivesoftware.smack.*;
|
[
"org.jivesoftware.smack"
] |
org.jivesoftware.smack;
| 1,430,756
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.