method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public void execute(JobExecutionContext context)
throws JobExecutionException {
LOG.info("TRIGGER: " + context.getTrigger().getKey());
}
|
void function(JobExecutionContext context) throws JobExecutionException { LOG.info(STR + context.getTrigger().getKey()); }
|
/**
* <p>
* Called by the <code>{@link org.quartz.Scheduler}</code> when a
* <code>{@link org.quartz.Trigger}</code> fires that is associated with
* the <code>Job</code>.
* </p>
*
* @throws JobExecutionException
* if there is an exception while executing the job.
*/
|
Called by the <code><code>org.quartz.Scheduler</code></code> when a <code><code>org.quartz.Trigger</code></code> fires that is associated with the <code>Job</code>.
|
execute
|
{
"repo_name": "zhongfuhua/tsp-quartz",
"path": "tsp-quartz-parent/tsp-quartz-distribution/examples/src/main/java/org/quartz/examples/example14/TriggerEchoJob.java",
"license": "apache-2.0",
"size": 1845
}
|
[
"org.quartz.JobExecutionContext",
"org.quartz.JobExecutionException"
] |
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException;
|
import org.quartz.*;
|
[
"org.quartz"
] |
org.quartz;
| 1,845,917
|
public static java.util.List extractPrognosticGroupingConfigList(ims.domain.ILightweightDomainFactory domainFactory, ims.clinicaladmin.vo.PrognosticGroupingCongfigVoCollection voCollection)
{
return extractPrognosticGroupingConfigList(domainFactory, voCollection, null, new HashMap());
}
|
static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.clinicaladmin.vo.PrognosticGroupingCongfigVoCollection voCollection) { return extractPrognosticGroupingConfigList(domainFactory, voCollection, null, new HashMap()); }
|
/**
* Create the ims.oncology.configuration.domain.objects.PrognosticGroupingConfig list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
|
Create the ims.oncology.configuration.domain.objects.PrognosticGroupingConfig list from the value object collection
|
extractPrognosticGroupingConfigList
|
{
"repo_name": "open-health-hub/openmaxims-linux",
"path": "openmaxims_workspace/ValueObjects/src/ims/clinicaladmin/vo/domain/PrognosticGroupingCongfigVoAssembler.java",
"license": "agpl-3.0",
"size": 28758
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 248,967
|
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String virtualHubName, String routeTableName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (virtualHubName == null) {
throw new IllegalArgumentException("Parameter virtualHubName is required and cannot be null.");
}
if (routeTableName == null) {
throw new IllegalArgumentException("Parameter routeTableName is required and cannot be null.");
}
|
Observable<ServiceResponse<Void>> function(String resourceGroupName, String virtualHubName, String routeTableName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (virtualHubName == null) { throw new IllegalArgumentException(STR); } if (routeTableName == null) { throw new IllegalArgumentException(STR); }
|
/**
* Deletes a RouteTable.
*
* @param resourceGroupName The resource group name of the RouteTable.
* @param virtualHubName The name of the VirtualHub.
* @param routeTableName The name of the RouteTable.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
|
Deletes a RouteTable
|
beginDeleteWithServiceResponseAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2020_05_01/src/main/java/com/microsoft/azure/management/network/v2020_05_01/implementation/HubRouteTablesInner.java",
"license": "mit",
"size": 45135
}
|
[
"com.microsoft.rest.ServiceResponse"
] |
import com.microsoft.rest.ServiceResponse;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 1,081,610
|
public Date getLatestDate() {
CriteriaBuilder cb = em.getCriteriaBuilder();
CriteriaQuery<GitlabMetricMeasurement> query =
cb.createQuery(GitlabMetricMeasurement.class);
Root<GitlabMetricMeasurement> root =
query.from(GitlabMetricMeasurement.class);
query.select(root);
query.orderBy(cb.desc(root.get(GitlabMetricMeasurement_.timeStamp)));
Date latest;
try {
GitlabMetricMeasurement m = em.createQuery(query).setMaxResults(1).getSingleResult();
latest = m.getTimeStamp();
} catch (NoResultException nre) {
latest = null;
}
return latest;
}
|
Date function() { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<GitlabMetricMeasurement> query = cb.createQuery(GitlabMetricMeasurement.class); Root<GitlabMetricMeasurement> root = query.from(GitlabMetricMeasurement.class); query.select(root); query.orderBy(cb.desc(root.get(GitlabMetricMeasurement_.timeStamp))); Date latest; try { GitlabMetricMeasurement m = em.createQuery(query).setMaxResults(1).getSingleResult(); latest = m.getTimeStamp(); } catch (NoResultException nre) { latest = null; } return latest; }
|
/**
* Get the latest date of measurement snapshots
* @return
*/
|
Get the latest date of measurement snapshots
|
getLatestDate
|
{
"repo_name": "schlotze/u-qasar.platform",
"path": "src/main/java/eu/uqasar/service/dataadapter/GitlabDataService.java",
"license": "apache-2.0",
"size": 10637
}
|
[
"eu.uqasar.model.measure.GitlabMetricMeasurement",
"java.util.Date",
"javax.persistence.NoResultException",
"javax.persistence.criteria.CriteriaBuilder",
"javax.persistence.criteria.CriteriaQuery",
"javax.persistence.criteria.Root"
] |
import eu.uqasar.model.measure.GitlabMetricMeasurement; import java.util.Date; import javax.persistence.NoResultException; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root;
|
import eu.uqasar.model.measure.*; import java.util.*; import javax.persistence.*; import javax.persistence.criteria.*;
|
[
"eu.uqasar.model",
"java.util",
"javax.persistence"
] |
eu.uqasar.model; java.util; javax.persistence;
| 2,155,426
|
void generate(BlockManager blockManager, CoreChunk view, Random rand, int posX, int posY, int posZ);
|
void generate(BlockManager blockManager, CoreChunk view, Random rand, int posX, int posY, int posZ);
|
/**
* Generates a tree at the given position.
*
* @param blockManager the block manager to resolve the block uris
* @param view Chunk view
* @param rand The random number generator
* @param posX Relative position on the x-axis (wrt. the chunk)
* @param posY Relative position on the y-axis (wrt. the chunk)
* @param posZ Relative position on the z-axis (wrt. the chunk)
*/
|
Generates a tree at the given position
|
generate
|
{
"repo_name": "Vizaxo/Terasology",
"path": "modules/Core/src/main/java/org/terasology/core/world/generator/trees/TreeGenerator.java",
"license": "apache-2.0",
"size": 1432
}
|
[
"org.terasology.utilities.random.Random",
"org.terasology.world.block.BlockManager",
"org.terasology.world.chunks.CoreChunk"
] |
import org.terasology.utilities.random.Random; import org.terasology.world.block.BlockManager; import org.terasology.world.chunks.CoreChunk;
|
import org.terasology.utilities.random.*; import org.terasology.world.block.*; import org.terasology.world.chunks.*;
|
[
"org.terasology.utilities",
"org.terasology.world"
] |
org.terasology.utilities; org.terasology.world;
| 1,083,482
|
private BitSetSupport performAND(BitSetSupport tidsetI, BitSetSupport tidsetJ) {
// Create the new diffset
BitSetSupport bitsetSupportIJ = new BitSetSupport();
// Calculate the diffset
bitsetSupportIJ.bitset = (BitSet)tidsetJ.bitset.clone();
bitsetSupportIJ.bitset.andNot(tidsetI.bitset);
// Calculate the support
bitsetSupportIJ.support = tidsetI.support - bitsetSupportIJ.bitset.cardinality();
// return the new diffset
return bitsetSupportIJ;
}
|
BitSetSupport function(BitSetSupport tidsetI, BitSetSupport tidsetJ) { BitSetSupport bitsetSupportIJ = new BitSetSupport(); bitsetSupportIJ.bitset = (BitSet)tidsetJ.bitset.clone(); bitsetSupportIJ.bitset.andNot(tidsetI.bitset); bitsetSupportIJ.support = tidsetI.support - bitsetSupportIJ.bitset.cardinality(); return bitsetSupportIJ; }
|
/**
* Perform the intersection of two diffsets for itemsets containing more than one item.
* @param tidsetI the first diffset
* @param tidsetJ the second diffset
* @return the resulting diffset and its support
*/
|
Perform the intersection of two diffsets for itemsets containing more than one item
|
performAND
|
{
"repo_name": "pommedeterresautee/spmf",
"path": "ca/pfv/spmf/algorithms/frequentpatterns/eclat/AlgoDEclat_Bitset.java",
"license": "gpl-3.0",
"size": 6609
}
|
[
"java.util.BitSet"
] |
import java.util.BitSet;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,851,946
|
public static List<FunctionDecl> retrieveUsedStoredFunctions(MetadataProvider metadataProvider,
Expression expression, List<FunctionSignature> declaredFunctions, List<FunctionDecl> inputFunctionDecls,
IFunctionCollector functionCollector, IFunctionParser functionParser,
IFunctionNormalizer functionNormalizer) throws CompilationException {
List<FunctionDecl> functionDecls =
inputFunctionDecls == null ? new ArrayList<>() : new ArrayList<>(inputFunctionDecls);
if (expression == null) {
return functionDecls;
}
String value = (String) metadataProvider.getConfig().get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS);
boolean includePrivateFunctions = (value != null) ? Boolean.valueOf(value.toLowerCase()) : false;
Set<CallExpr> functionCalls = functionCollector.getFunctionCalls(expression);
for (CallExpr functionCall : functionCalls) {
FunctionSignature signature = functionCall.getFunctionSignature();
if (declaredFunctions != null && declaredFunctions.contains(signature)) {
continue;
}
if (signature.getNamespace() == null) {
signature.setNamespace(metadataProvider.getDefaultDataverseName());
}
String namespace = signature.getNamespace();
// Checks the existence of the referred dataverse.
try {
if (!namespace.equals(FunctionConstants.ASTERIX_NS)
&& !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS)
&& metadataProvider.findDataverse(namespace) == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(),
"In function call \"" + namespace + "." + signature.getName() + "(...)\", the dataverse \""
+ namespace + "\" cannot be found!");
}
} catch (AlgebricksException e) {
throw new CompilationException(e);
}
Function function;
try {
function = lookupUserDefinedFunctionDecl(metadataProvider.getMetadataTxnContext(), signature);
} catch (AlgebricksException e) {
throw new CompilationException(e);
}
if (function == null) {
FunctionSignature normalizedSignature = functionNormalizer == null ? signature
: functionNormalizer.normalizeBuiltinFunctionSignature(signature,
functionCall.getSourceLocation());
if (BuiltinFunctions.isBuiltinCompilerFunction(normalizedSignature, includePrivateFunctions)) {
continue;
}
StringBuilder messageBuilder = new StringBuilder();
if (!functionDecls.isEmpty()) {
messageBuilder.append("function " + functionDecls.get(functionDecls.size() - 1).getSignature()
+ " depends upon function " + signature + " which is undefined");
} else {
messageBuilder.append("function " + signature + " is not defined");
}
throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(),
messageBuilder.toString());
}
if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)
|| function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_SQLPP)) {
FunctionDecl functionDecl = functionParser.getFunctionDecl(function);
if (functionDecl != null) {
if (functionDecls.contains(functionDecl)) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(),
"Recursive invocation " + functionDecls.get(functionDecls.size() - 1).getSignature()
+ " <==> " + functionDecl.getSignature());
}
functionDecls.add(functionDecl);
functionDecls = retrieveUsedStoredFunctions(metadataProvider, functionDecl.getFuncBody(),
declaredFunctions, functionDecls, functionCollector, functionParser, functionNormalizer);
}
}
}
return functionDecls;
}
|
static List<FunctionDecl> function(MetadataProvider metadataProvider, Expression expression, List<FunctionSignature> declaredFunctions, List<FunctionDecl> inputFunctionDecls, IFunctionCollector functionCollector, IFunctionParser functionParser, IFunctionNormalizer functionNormalizer) throws CompilationException { List<FunctionDecl> functionDecls = inputFunctionDecls == null ? new ArrayList<>() : new ArrayList<>(inputFunctionDecls); if (expression == null) { return functionDecls; } String value = (String) metadataProvider.getConfig().get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS); boolean includePrivateFunctions = (value != null) ? Boolean.valueOf(value.toLowerCase()) : false; Set<CallExpr> functionCalls = functionCollector.getFunctionCalls(expression); for (CallExpr functionCall : functionCalls) { FunctionSignature signature = functionCall.getFunctionSignature(); if (declaredFunctions != null && declaredFunctions.contains(signature)) { continue; } if (signature.getNamespace() == null) { signature.setNamespace(metadataProvider.getDefaultDataverseName()); } String namespace = signature.getNamespace(); try { if (!namespace.equals(FunctionConstants.ASTERIX_NS) && !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS) && metadataProvider.findDataverse(namespace) == null) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(), STRSTR.STR(...)\STRSTR\STR); } } catch (AlgebricksException e) { throw new CompilationException(e); } Function function; try { function = lookupUserDefinedFunctionDecl(metadataProvider.getMetadataTxnContext(), signature); } catch (AlgebricksException e) { throw new CompilationException(e); } if (function == null) { FunctionSignature normalizedSignature = functionNormalizer == null ? signature : functionNormalizer.normalizeBuiltinFunctionSignature(signature, functionCall.getSourceLocation()); if (BuiltinFunctions.isBuiltinCompilerFunction(normalizedSignature, includePrivateFunctions)) { continue; } StringBuilder messageBuilder = new StringBuilder(); if (!functionDecls.isEmpty()) { messageBuilder.append(STR + functionDecls.get(functionDecls.size() - 1).getSignature() + STR + signature + STR); } else { messageBuilder.append(STR + signature + STR); } throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(), messageBuilder.toString()); } if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL) function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_SQLPP)) { FunctionDecl functionDecl = functionParser.getFunctionDecl(function); if (functionDecl != null) { if (functionDecls.contains(functionDecl)) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(), STR + functionDecls.get(functionDecls.size() - 1).getSignature() + STR + functionDecl.getSignature()); } functionDecls.add(functionDecl); functionDecls = retrieveUsedStoredFunctions(metadataProvider, functionDecl.getFuncBody(), declaredFunctions, functionDecls, functionCollector, functionParser, functionNormalizer); } } } return functionDecls; }
|
/**
* Retrieve stored functions (from CREATE FUNCTION statements) that have been
* used in an expression.
*
* @param metadataProvider,
* the metadata provider
* @param expression,
* the expression for analysis
* @param declaredFunctions,
* a set of declared functions in the query, which can potentially
* override stored functions.
* @param functionCollector,
* for collecting function calls in the <code>expression</code>
* @param functionParser,
* for parsing stored functions in the string represetnation.
* @param functionNormalizer,
* for normalizing function names.
* @throws CompilationException
*/
|
Retrieve stored functions (from CREATE FUNCTION statements) that have been used in an expression
|
retrieveUsedStoredFunctions
|
{
"repo_name": "ecarm002/incubator-asterixdb",
"path": "asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java",
"license": "apache-2.0",
"size": 10528
}
|
[
"java.util.ArrayList",
"java.util.List",
"java.util.Set",
"org.apache.asterix.common.exceptions.CompilationException",
"org.apache.asterix.common.exceptions.ErrorCode",
"org.apache.asterix.common.functions.FunctionConstants",
"org.apache.asterix.common.functions.FunctionSignature",
"org.apache.asterix.lang.common.base.Expression",
"org.apache.asterix.lang.common.expression.CallExpr",
"org.apache.asterix.lang.common.statement.FunctionDecl",
"org.apache.asterix.metadata.declared.MetadataProvider",
"org.apache.asterix.metadata.entities.Function",
"org.apache.asterix.om.functions.BuiltinFunctions",
"org.apache.hyracks.algebricks.common.exceptions.AlgebricksException",
"org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions"
] |
import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.asterix.common.exceptions.CompilationException; import org.apache.asterix.common.exceptions.ErrorCode; import org.apache.asterix.common.functions.FunctionConstants; import org.apache.asterix.common.functions.FunctionSignature; import org.apache.asterix.lang.common.base.Expression; import org.apache.asterix.lang.common.expression.CallExpr; import org.apache.asterix.lang.common.statement.FunctionDecl; import org.apache.asterix.metadata.declared.MetadataProvider; import org.apache.asterix.metadata.entities.Function; import org.apache.asterix.om.functions.BuiltinFunctions; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
|
import java.util.*; import org.apache.asterix.common.exceptions.*; import org.apache.asterix.common.functions.*; import org.apache.asterix.lang.common.base.*; import org.apache.asterix.lang.common.expression.*; import org.apache.asterix.lang.common.statement.*; import org.apache.asterix.metadata.declared.*; import org.apache.asterix.metadata.entities.*; import org.apache.asterix.om.functions.*; import org.apache.hyracks.algebricks.common.exceptions.*; import org.apache.hyracks.algebricks.core.algebra.functions.*;
|
[
"java.util",
"org.apache.asterix",
"org.apache.hyracks"
] |
java.util; org.apache.asterix; org.apache.hyracks;
| 44,772
|
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
String trackLine = null;
String nextLine;
final List<String> errors = new ArrayList<String>();
final HashMap<String, List<Track>> loadedTracks = new HashMap();
List<ResourceLocator> aSync = new ArrayList();
// Index aware sessions do not have means to set genome, or if they do we don't use it
Genome genome = GenomeManager.getInstance().getCurrentGenome();
if (genome != null) {
IGV.getInstance().setGenomeTracks(genome.getGeneTrack());
}
while ((nextLine = reader.readLine()) != null) {
ResourceLocator locator = null;
try {
if (nextLine.startsWith("#")) {
continue;
} else if (nextLine.startsWith("browser")) {
parseBrowserLine(nextLine, session);
} else if (nextLine.startsWith("track")) {
trackLine = nextLine;
String dataURL = getDataURL(trackLine);
if (dataURL != null) {
locator = new ResourceLocator(dataURL);
String indexURL = getIndexURL(trackLine);
if (indexURL != null) {
locator.setIndexPath(indexURL);
}
String coverageURL = getCoverageURL(trackLine);
if (coverageURL != null) {
locator.setCoverage(coverageURL);
}
loadedTracks.put(dataURL, igv.load(locator));
}
} else {
locator = parseResourceLine(nextLine);
}
if (locator != null) {
locator.setTrackLine(trackLine);
// Alignment tracks must be loaded synchronously
if (isAlignmentFile(locator.getPath())) {
TrackPanel panel = igv.getPanelFor(locator);
panel.addTracks(igv.load(locator));
} else {
aSync.add(locator);
}
trackLine = null; // Reset for next time
locator = null;
}
} catch (Exception e) {
log.error("Error loading resource " + locator.getPath(), e);
String ms = "<b>" + locator.getPath() + "</b><br> " + e.toString() + "<br>";
errors.add(ms);
}
}
loadAsynchronous(aSync, loadedTracks, errors);
if (errors.size() > 0) {
displayErrors(errors);
}
}
|
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); String trackLine = null; String nextLine; final List<String> errors = new ArrayList<String>(); final HashMap<String, List<Track>> loadedTracks = new HashMap(); List<ResourceLocator> aSync = new ArrayList(); Genome genome = GenomeManager.getInstance().getCurrentGenome(); if (genome != null) { IGV.getInstance().setGenomeTracks(genome.getGeneTrack()); } while ((nextLine = reader.readLine()) != null) { ResourceLocator locator = null; try { if (nextLine.startsWith("#")) { continue; } else if (nextLine.startsWith(STR)) { parseBrowserLine(nextLine, session); } else if (nextLine.startsWith("track")) { trackLine = nextLine; String dataURL = getDataURL(trackLine); if (dataURL != null) { locator = new ResourceLocator(dataURL); String indexURL = getIndexURL(trackLine); if (indexURL != null) { locator.setIndexPath(indexURL); } String coverageURL = getCoverageURL(trackLine); if (coverageURL != null) { locator.setCoverage(coverageURL); } loadedTracks.put(dataURL, igv.load(locator)); } } else { locator = parseResourceLine(nextLine); } if (locator != null) { locator.setTrackLine(trackLine); if (isAlignmentFile(locator.getPath())) { TrackPanel panel = igv.getPanelFor(locator); panel.addTracks(igv.load(locator)); } else { aSync.add(locator); } trackLine = null; locator = null; } } catch (Exception e) { log.error(STR + locator.getPath(), e); String ms = "<b>" + locator.getPath() + STR + e.toString() + "<br>"; errors.add(ms); } } loadAsynchronous(aSync, loadedTracks, errors); if (errors.size() > 0) { displayErrors(errors); } }
|
/**
* Load an inex aware session from the given stream.
*
* @param inputStream
* @param session
* @param sessionPath
* @throws IOException
*/
|
Load an inex aware session from the given stream
|
loadSession
|
{
"repo_name": "popitsch/varan-gie",
"path": "src/org/broad/igv/session/IndexAwareSessionReader.java",
"license": "mit",
"size": 9195
}
|
[
"java.io.BufferedReader",
"java.io.InputStreamReader",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.List",
"org.broad.igv.feature.genome.Genome",
"org.broad.igv.feature.genome.GenomeManager",
"org.broad.igv.track.Track",
"org.broad.igv.ui.IGV",
"org.broad.igv.ui.panel.TrackPanel",
"org.broad.igv.util.ResourceLocator"
] |
import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.broad.igv.feature.genome.Genome; import org.broad.igv.feature.genome.GenomeManager; import org.broad.igv.track.Track; import org.broad.igv.ui.IGV; import org.broad.igv.ui.panel.TrackPanel; import org.broad.igv.util.ResourceLocator;
|
import java.io.*; import java.util.*; import org.broad.igv.feature.genome.*; import org.broad.igv.track.*; import org.broad.igv.ui.*; import org.broad.igv.ui.panel.*; import org.broad.igv.util.*;
|
[
"java.io",
"java.util",
"org.broad.igv"
] |
java.io; java.util; org.broad.igv;
| 2,070,030
|
@ThreadConfined(type = ThreadType.JFX)
public void markGroupSeen(DrawableGroup group, boolean seen) {
db.markGroupSeen(group.getGroupKey(), seen);
group.setSeen(seen);
if (seen) {
unSeenGroups.removeAll(group);
} else if (unSeenGroups.contains(group) == false) {
unSeenGroups.add(group);
FXCollections.sort(unSeenGroups, sortBy.getGrpComparator(sortOrder));
}
}
|
@ThreadConfined(type = ThreadType.JFX) void function(DrawableGroup group, boolean seen) { db.markGroupSeen(group.getGroupKey(), seen); group.setSeen(seen); if (seen) { unSeenGroups.removeAll(group); } else if (unSeenGroups.contains(group) == false) { unSeenGroups.add(group); FXCollections.sort(unSeenGroups, sortBy.getGrpComparator(sortOrder)); } }
|
/**
* 'mark' the given group as seen. This removes it from the queue of groups
* to review, and is persisted in the drawable db.
*
* @param group the {@link DrawableGroup} to mark as seen
*/
|
'mark' the given group as seen. This removes it from the queue of groups to review, and is persisted in the drawable db
|
markGroupSeen
|
{
"repo_name": "sidheshenator/autopsy",
"path": "ImageGallery/src/org/sleuthkit/autopsy/imagegallery/grouping/GroupManager.java",
"license": "apache-2.0",
"size": 30074
}
|
[
"org.sleuthkit.autopsy.coreutils.ThreadConfined"
] |
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
import org.sleuthkit.autopsy.coreutils.*;
|
[
"org.sleuthkit.autopsy"
] |
org.sleuthkit.autopsy;
| 1,473,017
|
@SuppressWarnings("unused")
protected String getActualDefaultValue(RealName tableName, ColumnBuilder column, ResultSet columnResultSet) throws SQLException {
final String actualDefaultValue = columnResultSet.getString(COLUMN_DEFAULT_EXPR);
// columns that never had DEFAULT
if (actualDefaultValue == null)
return "";
final String trimedActualDefaultValue = actualDefaultValue.trim();
// columns that previously had DEFAULT and were set to DEFAULT NULL
if ("NULL".equalsIgnoreCase(trimedActualDefaultValue))
return "";
// other values returned with just a bit of trimming
// - note that these are Oracle expressions, not actual default values
// - simple decimals come back as decimals,
// - strings come back wrapped in single quotes,
// - functions come back as expressions,
// - as specified in the last alter statement
return trimedActualDefaultValue;
}
|
@SuppressWarnings(STR) String function(RealName tableName, ColumnBuilder column, ResultSet columnResultSet) throws SQLException { final String actualDefaultValue = columnResultSet.getString(COLUMN_DEFAULT_EXPR); if (actualDefaultValue == null) return STRNULLSTR"; return trimedActualDefaultValue; }
|
/**
* Reads the actual default value in the database.
*
* @param tableName Name of the table.
* @param column Column builder to set to.
* @param columnResultSet Result set to be read.
* @return The default value, usually as an expression.
* @throws SQLException Upon errors.
*/
|
Reads the actual default value in the database
|
getActualDefaultValue
|
{
"repo_name": "alfasoftware/morf",
"path": "morf-core/src/main/java/org/alfasoftware/morf/jdbc/DatabaseMetaDataProvider.java",
"license": "apache-2.0",
"size": 38526
}
|
[
"java.sql.ResultSet",
"java.sql.SQLException",
"org.alfasoftware.morf.metadata.SchemaUtils"
] |
import java.sql.ResultSet; import java.sql.SQLException; import org.alfasoftware.morf.metadata.SchemaUtils;
|
import java.sql.*; import org.alfasoftware.morf.metadata.*;
|
[
"java.sql",
"org.alfasoftware.morf"
] |
java.sql; org.alfasoftware.morf;
| 1,207,573
|
public ValidationRunContext build()
{
Validate
.notNull( this.context.periodTypeExtendedMap, "Missing required property 'periodTypeExtendedMap'" );
Validate.notNull( this.context.constantMap, "Missing required property 'constantMap'" );
Validate.notNull( this.context.dimensionItems, "Missing required property 'dimensionItems'" );
Validate.notEmpty( this.context.sourceXs, "Missing required property 'sourceXs'" );
return this.context;
}
// -------------------------------------------------------------------------
// Setter methods
// -------------------------------------------------------------------------
|
ValidationRunContext function() { Validate .notNull( this.context.periodTypeExtendedMap, STR ); Validate.notNull( this.context.constantMap, STR ); Validate.notNull( this.context.dimensionItems, STR ); Validate.notEmpty( this.context.sourceXs, STR ); return this.context; }
|
/**
* Builds the actual ValidationRunContext object configured with the builder
*
* @return a new ValidationParam based on the builders configuration
*/
|
Builds the actual ValidationRunContext object configured with the builder
|
build
|
{
"repo_name": "vmluan/dhis2-core",
"path": "dhis-2/dhis-services/dhis-service-reporting/src/main/java/org/hisp/dhis/validation/ValidationRunContext.java",
"license": "bsd-3-clause",
"size": 8286
}
|
[
"org.apache.commons.lang3.Validate"
] |
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,597,013
|
public static Action getAction() {
return (Action) getNewComponentInstance(ACTION);
}
|
static Action function() { return (Action) getNewComponentInstance(ACTION); }
|
/**
* Gets the action
*
* @return action
*/
|
Gets the action
|
getAction
|
{
"repo_name": "ricepanda/rice",
"path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/util/ComponentFactory.java",
"license": "apache-2.0",
"size": 47540
}
|
[
"org.kuali.rice.krad.uif.element.Action"
] |
import org.kuali.rice.krad.uif.element.Action;
|
import org.kuali.rice.krad.uif.element.*;
|
[
"org.kuali.rice"
] |
org.kuali.rice;
| 425,192
|
private void addIntoCorruptedBlockMap(ExtendedBlock blk, DatanodeInfo node,
Map<ExtendedBlock, Set<DatanodeInfo>> corruptedBlockMap) {
Set<DatanodeInfo> dnSet = null;
if((corruptedBlockMap.containsKey(blk))) {
dnSet = corruptedBlockMap.get(blk);
}else {
dnSet = new HashSet<DatanodeInfo>();
}
if (!dnSet.contains(node)) {
dnSet.add(node);
corruptedBlockMap.put(blk, dnSet);
}
}
|
void function(ExtendedBlock blk, DatanodeInfo node, Map<ExtendedBlock, Set<DatanodeInfo>> corruptedBlockMap) { Set<DatanodeInfo> dnSet = null; if((corruptedBlockMap.containsKey(blk))) { dnSet = corruptedBlockMap.get(blk); }else { dnSet = new HashSet<DatanodeInfo>(); } if (!dnSet.contains(node)) { dnSet.add(node); corruptedBlockMap.put(blk, dnSet); } }
|
/**
* Add corrupted block replica into map.
*/
|
Add corrupted block replica into map
|
addIntoCorruptedBlockMap
|
{
"repo_name": "tseen/Federated-HDFS",
"path": "tseenliu/FedHDFS-hadoop-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java",
"license": "apache-2.0",
"size": 62498
}
|
[
"java.util.HashSet",
"java.util.Map",
"java.util.Set",
"org.apache.hadoop.hdfs.protocol.DatanodeInfo",
"org.apache.hadoop.hdfs.protocol.ExtendedBlock"
] |
import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
import java.util.*; import org.apache.hadoop.hdfs.protocol.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,242,043
|
public List<T> allResults() {
List<T> results = null;
if (responses != null && responses.size() > 0) {
// We first calculate the total size needed
int totalSize = allResultsSize();
// We init the list and copy data
results = new ArrayList<>(totalSize);
for (DataResult<T> dataResult : responses) {
results.addAll(dataResult.getResults());
}
}
return results;
}
|
List<T> function() { List<T> results = null; if (responses != null && responses.size() > 0) { int totalSize = allResultsSize(); results = new ArrayList<>(totalSize); for (DataResult<T> dataResult : responses) { results.addAll(dataResult.getResults()); } } return results; }
|
/**
* This method flats the two levels (DataResponse and DataResult) into a single list of T.
* @return a single list with all the results, or null if no response exists
*/
|
This method flats the two levels (DataResponse and DataResult) into a single list of T
|
allResults
|
{
"repo_name": "opencb/java-common-libs",
"path": "commons-datastore/commons-datastore-core/src/main/java/org/opencb/commons/datastore/core/DataResponse.java",
"license": "apache-2.0",
"size": 5539
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,971,740
|
protected void writeCallBackHandlers() throws Exception {
if (codeGenConfiguration.isAsyncOn()) {
Document interfaceModel = createDOMDocumentForCallbackHandler();
debugLogDocument("Document for callback handler:", interfaceModel);
CallbackHandlerWriter callbackWriter =
new CallbackHandlerWriter(
codeGenConfiguration.isFlattenFiles() ?
getOutputDirectory(codeGenConfiguration.getOutputLocation(),
null) :
getOutputDirectory(codeGenConfiguration.getOutputLocation(),
codeGenConfiguration.getSourceLocation()),
codeGenConfiguration.getOutputLanguage());
callbackWriter.setOverride(codeGenConfiguration.isOverride());
writeFile(interfaceModel, callbackWriter);
}
}
|
void function() throws Exception { if (codeGenConfiguration.isAsyncOn()) { Document interfaceModel = createDOMDocumentForCallbackHandler(); debugLogDocument(STR, interfaceModel); CallbackHandlerWriter callbackWriter = new CallbackHandlerWriter( codeGenConfiguration.isFlattenFiles() ? getOutputDirectory(codeGenConfiguration.getOutputLocation(), null) : getOutputDirectory(codeGenConfiguration.getOutputLocation(), codeGenConfiguration.getSourceLocation()), codeGenConfiguration.getOutputLanguage()); callbackWriter.setOverride(codeGenConfiguration.isOverride()); writeFile(interfaceModel, callbackWriter); } }
|
/**
* Writes the callback handlers.
*/
|
Writes the callback handlers
|
writeCallBackHandlers
|
{
"repo_name": "arunasujith/wso2-axis2",
"path": "modules/codegen/src/org/apache/axis2/wsdl/codegen/emitter/AxisServiceBasedMultiLanguageEmitter.java",
"license": "apache-2.0",
"size": 144631
}
|
[
"org.apache.axis2.wsdl.codegen.writer.CallbackHandlerWriter",
"org.w3c.dom.Document"
] |
import org.apache.axis2.wsdl.codegen.writer.CallbackHandlerWriter; import org.w3c.dom.Document;
|
import org.apache.axis2.wsdl.codegen.writer.*; import org.w3c.dom.*;
|
[
"org.apache.axis2",
"org.w3c.dom"
] |
org.apache.axis2; org.w3c.dom;
| 2,329,193
|
protected boolean shouldOutputHeader(org.apache.camel.Message camelMessage, String headerName,
Object headerValue, Exchange exchange) {
return headerFilterStrategy == null
|| !headerFilterStrategy.applyFilterToCamelHeaders(headerName, headerValue, exchange);
}
|
boolean function(org.apache.camel.Message camelMessage, String headerName, Object headerValue, Exchange exchange) { return headerFilterStrategy == null !headerFilterStrategy.applyFilterToCamelHeaders(headerName, headerValue, exchange); }
|
/**
* Strategy to allow filtering of headers which are put on the JMS message
* <p/>
* <b>Note</b>: Currently only supports sending java identifiers as keys
*/
|
Strategy to allow filtering of headers which are put on the JMS message Note: Currently only supports sending java identifiers as keys
|
shouldOutputHeader
|
{
"repo_name": "CodeSmell/camel",
"path": "components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java",
"license": "apache-2.0",
"size": 35836
}
|
[
"javax.jms.Message",
"org.apache.camel.Exchange",
"org.apache.camel.component.jms.JmsMessageType"
] |
import javax.jms.Message; import org.apache.camel.Exchange; import org.apache.camel.component.jms.JmsMessageType;
|
import javax.jms.*; import org.apache.camel.*; import org.apache.camel.component.jms.*;
|
[
"javax.jms",
"org.apache.camel"
] |
javax.jms; org.apache.camel;
| 155,601
|
private List<LocaleServiceProvider> findProviders(Locale locale, boolean isObjectProvider) {
List<LocaleServiceProvider> providersList = providersCache.get(locale);
if (providersList == null) {
for (LocaleProviderAdapter.Type type : LocaleProviderAdapter.getAdapterPreference()) {
LocaleProviderAdapter lda = LocaleProviderAdapter.forType(type);
if (lda != null) {
LocaleServiceProvider lsp = lda.getLocaleServiceProvider(providerClass);
if (lsp != null) {
if (lsp.isSupportedLocale(locale)) {
if (providersList == null) {
providersList = new ArrayList<>(2);
}
providersList.add(lsp);
if (isObjectProvider) {
break;
}
}
}
}
}
if (providersList == null) {
providersList = NULL_LIST;
}
List<LocaleServiceProvider> val = providersCache.putIfAbsent(locale, providersList);
if (val != null) {
providersList = val;
}
}
return providersList;
}
|
List<LocaleServiceProvider> function(Locale locale, boolean isObjectProvider) { List<LocaleServiceProvider> providersList = providersCache.get(locale); if (providersList == null) { for (LocaleProviderAdapter.Type type : LocaleProviderAdapter.getAdapterPreference()) { LocaleProviderAdapter lda = LocaleProviderAdapter.forType(type); if (lda != null) { LocaleServiceProvider lsp = lda.getLocaleServiceProvider(providerClass); if (lsp != null) { if (lsp.isSupportedLocale(locale)) { if (providersList == null) { providersList = new ArrayList<>(2); } providersList.add(lsp); if (isObjectProvider) { break; } } } } } if (providersList == null) { providersList = NULL_LIST; } List<LocaleServiceProvider> val = providersCache.putIfAbsent(locale, providersList); if (val != null) { providersList = val; } } return providersList; }
|
/**
* Returns the list of locale service provider instances that support
* the specified locale.
*
* @param locale the given locale
* @return the list of locale data adapter types
*/
|
Returns the list of locale service provider instances that support the specified locale
|
findProviders
|
{
"repo_name": "mirkosertic/Bytecoder",
"path": "classlib/java.base/src/main/resources/META-INF/modules/java.base/classes/sun/util/locale/provider/LocaleServiceProviderPool.java",
"license": "apache-2.0",
"size": 15752
}
|
[
"java.util.ArrayList",
"java.util.List",
"java.util.Locale",
"java.util.spi.LocaleServiceProvider"
] |
import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.spi.LocaleServiceProvider;
|
import java.util.*; import java.util.spi.*;
|
[
"java.util"
] |
java.util;
| 2,611,746
|
public void serializeStationChart(DataChart oChart, AcronetDaemonConfiguration oConfig, String sStationCode, String sChartName, DateFormat oDateFormat) {
try {
Date oDate = new Date();
String sFullPath = getSubPath(oConfig.getFileRepositoryPath()+"/charts" ,oDate) + "/" + sStationCode + "/" + sChartName;
File oPath = new File(sFullPath);
if (!oPath.exists())
{
oPath.mkdirs();
}
if (sFullPath != null) {
String sFileName = sChartName+oDateFormat.format(oDate)+".xml";
SerializationUtils.serializeObjectToXML(sFullPath+"/"+sFileName, oChart);
}
}
catch(Exception oEx) {
oEx.printStackTrace();
}
}
|
void function(DataChart oChart, AcronetDaemonConfiguration oConfig, String sStationCode, String sChartName, DateFormat oDateFormat) { try { Date oDate = new Date(); String sFullPath = getSubPath(oConfig.getFileRepositoryPath()+STR ,oDate) + "/" + sStationCode + "/" + sChartName; File oPath = new File(sFullPath); if (!oPath.exists()) { oPath.mkdirs(); } if (sFullPath != null) { String sFileName = sChartName+oDateFormat.format(oDate)+".xml"; SerializationUtils.serializeObjectToXML(sFullPath+"/"+sFileName, oChart); } } catch(Exception oEx) { oEx.printStackTrace(); } }
|
/**
* Serializes a station Chart on disk
* @param oChart
* @param oConfig
* @param sStationCode
* @param sChartName
* @param oDateFormat
*/
|
Serializes a station Chart on disk
|
serializeStationChart
|
{
"repo_name": "fadeoutsoftware/acronetwork",
"path": "AcronetworkServer/AcronetDaemon/src/it/fadeout/acronet/daemon/AcronetDaemon.java",
"license": "apache-2.0",
"size": 38208
}
|
[
"it.fadeout.acronetwork.business.DataChart",
"java.io.File",
"java.text.DateFormat",
"java.util.Date"
] |
import it.fadeout.acronetwork.business.DataChart; import java.io.File; import java.text.DateFormat; import java.util.Date;
|
import it.fadeout.acronetwork.business.*; import java.io.*; import java.text.*; import java.util.*;
|
[
"it.fadeout.acronetwork",
"java.io",
"java.text",
"java.util"
] |
it.fadeout.acronetwork; java.io; java.text; java.util;
| 1,009,820
|
public void onFileOpen()
{
JFileChooser open = createFileChooser();
int status = open.showOpenDialog(this.ui.frame);
if (status == JFileChooser.APPROVE_OPTION) {
if (!open.getSelectedFile().exists() || !open.getSelectedFile().canRead() || !open.getSelectedFile().canWrite()) {
this.ui.displayErrorDialog("File does not exist or is not read/write", "Error");
return;
}
this.openFile = open.getSelectedFile();
loadFile(this.openFile);
}
}
|
void function() { JFileChooser open = createFileChooser(); int status = open.showOpenDialog(this.ui.frame); if (status == JFileChooser.APPROVE_OPTION) { if (!open.getSelectedFile().exists() !open.getSelectedFile().canRead() !open.getSelectedFile().canWrite()) { this.ui.displayErrorDialog(STR, "Error"); return; } this.openFile = open.getSelectedFile(); loadFile(this.openFile); } }
|
/**
* Open a file. Invoked from the File menu, Open item.
*/
|
Open a file. Invoked from the File menu, Open item
|
onFileOpen
|
{
"repo_name": "grmcdorman/SunlessSeaSaveEditor",
"path": "src/org/gmc/ssseditor/SSSaveEditor.java",
"license": "gpl-3.0",
"size": 23744
}
|
[
"javax.swing.JFileChooser"
] |
import javax.swing.JFileChooser;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 1,018,332
|
@Test
public void testNoCopyFromJar() throws Exception {
final Deadline deadline = TEST_TIMEOUT.fromNow();
TestingCluster flink = null;
final Configuration configuration = new Configuration();
// activate the web monitor
configuration.setBoolean(LOCAL_START_WEBSERVER, true);
configuration.setInteger(WebOptions.PORT, 0);
try {
flink = new TestingCluster(configuration);
flink.start(true);
WebRuntimeMonitor webMonitor = ((WebRuntimeMonitor) flink.webMonitor().get());
try (HttpTestClient client = new HttpTestClient("localhost", webMonitor.getServerPort())) {
String expectedIndex = new Scanner(new File(mainResourcesPath + "/index.html"))
.useDelimiter("\\A").next();
// 1) Request index.html from web server
client.sendGetRequest("index.html", deadline.timeLeft());
HttpTestClient.SimpleHttpResponse response = client.getNextResponse(deadline.timeLeft());
assertEquals(HttpResponseStatus.OK, response.getStatus());
assertEquals(response.getType(), MimeTypes.getMimeTypeForExtension("html"));
assertEquals(expectedIndex, response.getContent());
// 2) Request file from class loader
client.sendGetRequest("../log4j-test.properties", deadline.timeLeft());
response = client.getNextResponse(deadline.timeLeft());
assertEquals(
"Returned status code " + response.getStatus() + " for file outside of web root.",
HttpResponseStatus.NOT_FOUND,
response.getStatus());
assertFalse("Did not respond with the file, but still copied it from the JAR.",
new File(webMonitor.getBaseDir(new Configuration()), "log4j-test.properties").exists());
// 3) Request non-existing file
client.sendGetRequest("not-existing-resource", deadline.timeLeft());
response = client.getNextResponse(deadline.timeLeft());
assertEquals(
"Unexpected status code " + response.getStatus() + " for file outside of web root.",
HttpResponseStatus.NOT_FOUND,
response.getStatus());
}
} finally {
if (flink != null) {
flink.shutdown();
}
}
}
// ------------------------------------------------------------------------
|
void function() throws Exception { final Deadline deadline = TEST_TIMEOUT.fromNow(); TestingCluster flink = null; final Configuration configuration = new Configuration(); configuration.setBoolean(LOCAL_START_WEBSERVER, true); configuration.setInteger(WebOptions.PORT, 0); try { flink = new TestingCluster(configuration); flink.start(true); WebRuntimeMonitor webMonitor = ((WebRuntimeMonitor) flink.webMonitor().get()); try (HttpTestClient client = new HttpTestClient(STR, webMonitor.getServerPort())) { String expectedIndex = new Scanner(new File(mainResourcesPath + STR)) .useDelimiter("\\A").next(); client.sendGetRequest(STR, deadline.timeLeft()); HttpTestClient.SimpleHttpResponse response = client.getNextResponse(deadline.timeLeft()); assertEquals(HttpResponseStatus.OK, response.getStatus()); assertEquals(response.getType(), MimeTypes.getMimeTypeForExtension("html")); assertEquals(expectedIndex, response.getContent()); client.sendGetRequest(STR, deadline.timeLeft()); response = client.getNextResponse(deadline.timeLeft()); assertEquals( STR + response.getStatus() + STR, HttpResponseStatus.NOT_FOUND, response.getStatus()); assertFalse(STR, new File(webMonitor.getBaseDir(new Configuration()), STR).exists()); client.sendGetRequest(STR, deadline.timeLeft()); response = client.getNextResponse(deadline.timeLeft()); assertEquals( STR + response.getStatus() + STR, HttpResponseStatus.NOT_FOUND, response.getStatus()); } } finally { if (flink != null) { flink.shutdown(); } } }
|
/**
* Files are copied from the flink-dist jar to a temporary directory and
* then served from there. Only allow to copy files from <code>flink-dist.jar:/web</code>
*/
|
Files are copied from the flink-dist jar to a temporary directory and then served from there. Only allow to copy files from <code>flink-dist.jar:/web</code>
|
testNoCopyFromJar
|
{
"repo_name": "haohui/flink",
"path": "flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/WebRuntimeMonitorITCase.java",
"license": "apache-2.0",
"size": 18549
}
|
[
"java.io.File",
"java.util.Scanner",
"org.apache.flink.configuration.Configuration",
"org.apache.flink.configuration.WebOptions",
"org.apache.flink.runtime.rest.handler.util.MimeTypes",
"org.apache.flink.runtime.testingUtils.TestingCluster",
"org.apache.flink.runtime.webmonitor.testutils.HttpTestClient",
"org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus",
"org.junit.Assert"
] |
import java.io.File; import java.util.Scanner; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.WebOptions; import org.apache.flink.runtime.rest.handler.util.MimeTypes; import org.apache.flink.runtime.testingUtils.TestingCluster; import org.apache.flink.runtime.webmonitor.testutils.HttpTestClient; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus; import org.junit.Assert;
|
import java.io.*; import java.util.*; import org.apache.flink.configuration.*; import org.apache.flink.runtime.*; import org.apache.flink.runtime.rest.handler.util.*; import org.apache.flink.runtime.webmonitor.testutils.*; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.*; import org.junit.*;
|
[
"java.io",
"java.util",
"org.apache.flink",
"org.junit"
] |
java.io; java.util; org.apache.flink; org.junit;
| 2,362,529
|
public static Element getLastChildElement(Node parent) {
// search for node
Node child = parent.getLastChild();
while (child != null) {
if (child.getNodeType() == Node.ELEMENT_NODE) {
return (Element)child;
}
child = child.getPreviousSibling();
}
// not found
return null;
} // getLastChildElement(Node):Element
|
static Element function(Node parent) { Node child = parent.getLastChild(); while (child != null) { if (child.getNodeType() == Node.ELEMENT_NODE) { return (Element)child; } child = child.getPreviousSibling(); } return null; }
|
/** Finds and returns the last child element node.
* Overload previous method for non-Xerces node impl.
*/
|
Finds and returns the last child element node. Overload previous method for non-Xerces node impl
|
getLastChildElement
|
{
"repo_name": "lostdj/Jaklin-OpenJDK-JAXP",
"path": "src/java.xml/share/classes/com/sun/org/apache/xerces/internal/util/DOMUtil.java",
"license": "gpl-2.0",
"size": 31144
}
|
[
"org.w3c.dom.Element",
"org.w3c.dom.Node"
] |
import org.w3c.dom.Element; import org.w3c.dom.Node;
|
import org.w3c.dom.*;
|
[
"org.w3c.dom"
] |
org.w3c.dom;
| 10,610
|
AccountingLineViewCurrentBaseAmount layoutElement = new AccountingLineViewCurrentBaseAmount();
layoutElement.setBaseAmountField(createFieldForPropertyName(baseAmountPropertyName, accountingLineClass));
layoutElement.setBaseAmountFieldDefinition(createFieldDefinitionForProperty(baseAmountPropertyName));
layoutElement.setCurrentAmountField(createFieldForPropertyName(currentAmountPropertyName, accountingLineClass));
layoutElement.setCurrentAmountFieldDefinition(createFieldDefinitionForProperty(currentAmountPropertyName));
layoutElement.setDefinition(this);
return layoutElement;
}
|
AccountingLineViewCurrentBaseAmount layoutElement = new AccountingLineViewCurrentBaseAmount(); layoutElement.setBaseAmountField(createFieldForPropertyName(baseAmountPropertyName, accountingLineClass)); layoutElement.setBaseAmountFieldDefinition(createFieldDefinitionForProperty(baseAmountPropertyName)); layoutElement.setCurrentAmountField(createFieldForPropertyName(currentAmountPropertyName, accountingLineClass)); layoutElement.setCurrentAmountFieldDefinition(createFieldDefinitionForProperty(currentAmountPropertyName)); layoutElement.setDefinition(this); return layoutElement; }
|
/**
* Creates a property initiated AccountingLineViewCurrentBaseAmount element
* @see org.kuali.kfs.sys.document.datadictionary.AccountingLineViewRenderableElementDefinition#createLayoutElement(java.lang.Class)
*/
|
Creates a property initiated AccountingLineViewCurrentBaseAmount element
|
createLayoutElement
|
{
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/sys/document/datadictionary/AccountingLineViewCurrentBaseAmountFieldDefinition.java",
"license": "apache-2.0",
"size": 7926
}
|
[
"org.kuali.kfs.sys.document.web.AccountingLineViewCurrentBaseAmount"
] |
import org.kuali.kfs.sys.document.web.AccountingLineViewCurrentBaseAmount;
|
import org.kuali.kfs.sys.document.web.*;
|
[
"org.kuali.kfs"
] |
org.kuali.kfs;
| 1,767,696
|
Scanner numberFromKeyboard = new Scanner(System.in);
System.out.println("Insert number:");
int number;
int sumofMultiplier3,sumofMultiplier5,sumofMultiplier15,sumofMultiplier = 0;
number = numberFromKeyboard.nextInt();
sumofMultiplier3 =((number/3 * (number/3 + 1)) / 2) *3;
sumofMultiplier5 =((number/5 * (number/5 + 1)) / 2) *5;
sumofMultiplier15 =((number/15 * (number/15 + 1)) / 2) *15;
sumofMultiplier= sumofMultiplier3 + sumofMultiplier5 - sumofMultiplier15;
System.out.println("SumofMultipliers of 3 or 5 smaller than " + number);
System.out.println(sumofMultiplier);
numberFromKeyboard.close();
}
|
Scanner numberFromKeyboard = new Scanner(System.in); System.out.println(STR); int number; int sumofMultiplier3,sumofMultiplier5,sumofMultiplier15,sumofMultiplier = 0; number = numberFromKeyboard.nextInt(); sumofMultiplier3 =((number/3 * (number/3 + 1)) / 2) *3; sumofMultiplier5 =((number/5 * (number/5 + 1)) / 2) *5; sumofMultiplier15 =((number/15 * (number/15 + 1)) / 2) *15; sumofMultiplier= sumofMultiplier3 + sumofMultiplier5 - sumofMultiplier15; System.out.println(STR + number); System.out.println(sumofMultiplier); numberFromKeyboard.close(); }
|
/**
* . javadoc comment
*/
|
. javadoc comment
|
main
|
{
"repo_name": "gergo13/JavaMainRepo",
"path": "Students/Toderici Dan/Pack1/SumofMultipliers2.java",
"license": "apache-2.0",
"size": 1125
}
|
[
"java.util.Scanner"
] |
import java.util.Scanner;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,843,402
|
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<CloudServiceInner> list(Context context) {
return new PagedIterable<>(listAsync(context));
}
|
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<CloudServiceInner> function(Context context) { return new PagedIterable<>(listAsync(context)); }
|
/**
* Gets a list of all cloud services in the subscription, regardless of the associated resource group. Use nextLink
* property in the response to get the next page of Cloud Services. Do this till nextLink is null to fetch all the
* Cloud Services.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of all cloud services in the subscription, regardless of the associated resource group as
* paginated response with {@link PagedIterable}.
*/
|
Gets a list of all cloud services in the subscription, regardless of the associated resource group. Use nextLink property in the response to get the next page of Cloud Services. Do this till nextLink is null to fetch all the Cloud Services
|
list
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/implementation/CloudServicesClientImpl.java",
"license": "mit",
"size": 179410
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedIterable",
"com.azure.core.util.Context",
"com.azure.resourcemanager.compute.fluent.models.CloudServiceInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Context; import com.azure.resourcemanager.compute.fluent.models.CloudServiceInner;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.compute.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 30,219
|
public void readCITelephoneTypeElements(XMLStreamReader reader, CITelephone bean) throws XMLStreamException
{
boolean found;
// voice
do
{
found = checkElementName(reader, "voice");
if (found)
{
reader.nextTag();
String voice = ns1Bindings.readCharacterString(reader);
if (voice != null)
bean.addVoice(voice);
reader.nextTag(); // end property tag
reader.nextTag();
}
}
while (found);
// facsimile
do
{
found = checkElementName(reader, "facsimile");
if (found)
{
reader.nextTag();
String facsimile = ns1Bindings.readCharacterString(reader);
if (facsimile != null)
bean.addFacsimile(facsimile);
reader.nextTag(); // end property tag
reader.nextTag();
}
}
while (found);
}
|
void function(XMLStreamReader reader, CITelephone bean) throws XMLStreamException { boolean found; do { found = checkElementName(reader, "voice"); if (found) { reader.nextTag(); String voice = ns1Bindings.readCharacterString(reader); if (voice != null) bean.addVoice(voice); reader.nextTag(); reader.nextTag(); } } while (found); do { found = checkElementName(reader, STR); if (found) { reader.nextTag(); String facsimile = ns1Bindings.readCharacterString(reader); if (facsimile != null) bean.addFacsimile(facsimile); reader.nextTag(); reader.nextTag(); } } while (found); }
|
/**
* Reads elements of CITelephoneType complex type
*/
|
Reads elements of CITelephoneType complex type
|
readCITelephoneTypeElements
|
{
"repo_name": "sensiasoft/lib-sensorml",
"path": "sensorml-core/src/main/java/org/isotc211/v2005/gmd/bind/XMLStreamBindings.java",
"license": "mpl-2.0",
"size": 80004
}
|
[
"javax.xml.stream.XMLStreamException",
"javax.xml.stream.XMLStreamReader",
"org.isotc211.v2005.gmd.CITelephone"
] |
import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.isotc211.v2005.gmd.CITelephone;
|
import javax.xml.stream.*; import org.isotc211.v2005.gmd.*;
|
[
"javax.xml",
"org.isotc211.v2005"
] |
javax.xml; org.isotc211.v2005;
| 2,643,575
|
public SelenideElement pathInput() {
return $("#path");
}
|
SelenideElement function() { return $("#path"); }
|
/**
* Path input inside Add Authorization Rule box
* @return element
*/
|
Path input inside Add Authorization Rule box
|
pathInput
|
{
"repo_name": "apiman/apiman-test",
"path": "apiman-it-ui/src/test/java/io/apiman/test/integration/ui/support/selenide/pages/policies/AddAuthorizationPolicyPage.java",
"license": "apache-2.0",
"size": 3451
}
|
[
"com.codeborne.selenide.SelenideElement"
] |
import com.codeborne.selenide.SelenideElement;
|
import com.codeborne.selenide.*;
|
[
"com.codeborne.selenide"
] |
com.codeborne.selenide;
| 1,589,610
|
void loadSnapshotFrom(File source) throws IOException;
|
void loadSnapshotFrom(File source) throws IOException;
|
/**
* Load records from a snapshot file.
*
* @param source the source file.
* @throws IOException on failure.
* @see kyotocabinet.DB#load_snapshot(String)
*/
|
Load records from a snapshot file
|
loadSnapshotFrom
|
{
"repo_name": "lastfm/lastcommons-kyoto",
"path": "src/main/java/fm/last/commons/kyoto/KyotoDb.java",
"license": "apache-2.0",
"size": 31888
}
|
[
"java.io.File",
"java.io.IOException"
] |
import java.io.File; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 569,478
|
private void inflateAbsList(LayoutInflater inflater, ViewGroup container,
boolean inflateListView) {
if (inflateListView) {
mAbsListView = (AbsListView) inflater.inflate(R.layout.fragment_meat_list,
container, false);
mAdapter = new MeatAdapter(inflater, R.layout.item_meat_list);
} else {
mAbsListView = (AbsListView) inflater.inflate(R.layout.fragment_meat_grid,
container, false);
mAdapter = new MeatAdapter(inflater, R.layout.item_meat_grid);
}
mAbsListView.setAdapter(mAdapter);
mAbsListView.setOnItemClickListener(mAdapter);
}
|
void function(LayoutInflater inflater, ViewGroup container, boolean inflateListView) { if (inflateListView) { mAbsListView = (AbsListView) inflater.inflate(R.layout.fragment_meat_list, container, false); mAdapter = new MeatAdapter(inflater, R.layout.item_meat_list); } else { mAbsListView = (AbsListView) inflater.inflate(R.layout.fragment_meat_grid, container, false); mAdapter = new MeatAdapter(inflater, R.layout.item_meat_grid); } mAbsListView.setAdapter(mAdapter); mAbsListView.setOnItemClickListener(mAdapter); }
|
/**
* Inflate a ListView or a GridView with a corresponding ListAdapter.
*
* @param inflater The LayoutInflater.
* @param container The ViewGroup that contains this AbsListView. The AbsListView won't be
* attached to it.
* @param inflateListView Pass true to inflate a ListView, or false to inflate a GridView.
*/
|
Inflate a ListView or a GridView with a corresponding ListAdapter
|
inflateAbsList
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "developers/samples/android/ui/transition/AdapterTransition/Application/src/main/java/com/example/android/adaptertransition/AdapterTransitionFragment.java",
"license": "gpl-3.0",
"size": 10278
}
|
[
"android.view.LayoutInflater",
"android.view.ViewGroup",
"android.widget.AbsListView"
] |
import android.view.LayoutInflater; import android.view.ViewGroup; import android.widget.AbsListView;
|
import android.view.*; import android.widget.*;
|
[
"android.view",
"android.widget"
] |
android.view; android.widget;
| 2,452,311
|
@Nullable public static <R> R wrapThreadLoader(ClassLoader ldr, IgniteOutClosure<R> c) {
Thread curThread = Thread.currentThread();
// Get original context class loader.
ClassLoader ctxLdr = curThread.getContextClassLoader();
try {
curThread.setContextClassLoader(ldr);
return c.apply();
}
finally {
// Set the original class loader back.
curThread.setContextClassLoader(ctxLdr);
}
}
|
@Nullable static <R> R function(ClassLoader ldr, IgniteOutClosure<R> c) { Thread curThread = Thread.currentThread(); ClassLoader ctxLdr = curThread.getContextClassLoader(); try { curThread.setContextClassLoader(ldr); return c.apply(); } finally { curThread.setContextClassLoader(ctxLdr); } }
|
/**
* Sets thread context class loader to the given loader, executes the closure, and then
* resets thread context class loader to its initial value.
*
* @param ldr Class loader to run the closure under.
* @param c Closure to run.
* @param <R> Return type.
* @return Return value.
*/
|
Sets thread context class loader to the given loader, executes the closure, and then resets thread context class loader to its initial value
|
wrapThreadLoader
|
{
"repo_name": "apache/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 387878
}
|
[
"org.apache.ignite.lang.IgniteOutClosure",
"org.jetbrains.annotations.Nullable"
] |
import org.apache.ignite.lang.IgniteOutClosure; import org.jetbrains.annotations.Nullable;
|
import org.apache.ignite.lang.*; import org.jetbrains.annotations.*;
|
[
"org.apache.ignite",
"org.jetbrains.annotations"
] |
org.apache.ignite; org.jetbrains.annotations;
| 1,234,811
|
public void setRepeatToggleModes(@RepeatModeUtil.RepeatToggleModes int repeatToggleModes) {
this.repeatToggleModes = repeatToggleModes;
if (player != null) {
@Player.RepeatMode int currentMode = player.getRepeatMode();
if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_NONE
&& currentMode != Player.REPEAT_MODE_OFF) {
controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_OFF);
} else if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_ONE
&& currentMode == Player.REPEAT_MODE_ALL) {
controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_ONE);
} else if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_ALL
&& currentMode == Player.REPEAT_MODE_ONE) {
controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_ALL);
}
}
}
|
void function(@RepeatModeUtil.RepeatToggleModes int repeatToggleModes) { this.repeatToggleModes = repeatToggleModes; if (player != null) { @Player.RepeatMode int currentMode = player.getRepeatMode(); if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_NONE && currentMode != Player.REPEAT_MODE_OFF) { controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_OFF); } else if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_ONE && currentMode == Player.REPEAT_MODE_ALL) { controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_ONE); } else if (repeatToggleModes == RepeatModeUtil.REPEAT_TOGGLE_MODE_ALL && currentMode == Player.REPEAT_MODE_ONE) { controlDispatcher.dispatchSetRepeatMode(player, Player.REPEAT_MODE_ALL); } } }
|
/**
* Sets which repeat toggle modes are enabled.
*
* @param repeatToggleModes A set of {@link RepeatModeUtil.RepeatToggleModes}.
*/
|
Sets which repeat toggle modes are enabled
|
setRepeatToggleModes
|
{
"repo_name": "Zooc/AnDevHelper",
"path": "app/src/main/java/an/devhp/widget/PlayCtrlView.java",
"license": "apache-2.0",
"size": 37498
}
|
[
"com.google.android.exoplayer2.Player",
"com.google.android.exoplayer2.util.RepeatModeUtil"
] |
import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.util.RepeatModeUtil;
|
import com.google.android.exoplayer2.*; import com.google.android.exoplayer2.util.*;
|
[
"com.google.android"
] |
com.google.android;
| 745,450
|
public void removePropertyChangeListener(PropertyChangeListener listener);
|
void function(PropertyChangeListener listener);
|
/**
* Removes a property change listener.
*
* @param listener
* the property change listener.
*/
|
Removes a property change listener
|
removePropertyChangeListener
|
{
"repo_name": "fsaravia/JCalendar",
"path": "src/com/toedter/calendar/IDateEditor.java",
"license": "lgpl-2.1",
"size": 4813
}
|
[
"java.beans.PropertyChangeListener"
] |
import java.beans.PropertyChangeListener;
|
import java.beans.*;
|
[
"java.beans"
] |
java.beans;
| 2,281,791
|
public long getInt(String varName) {
ScopeSymbol ssym = getScope().getSymbol(varName);
return (Long) ssym.getValue();
}
|
long function(String varName) { ScopeSymbol ssym = getScope().getSymbol(varName); return (Long) ssym.getValue(); }
|
/**
* Get variable's value as an int
*/
|
Get variable's value as an int
|
getInt
|
{
"repo_name": "leepc12/BigDataScript",
"path": "src/org/bds/run/BdsThread.java",
"license": "apache-2.0",
"size": 38184
}
|
[
"org.bds.scope.ScopeSymbol"
] |
import org.bds.scope.ScopeSymbol;
|
import org.bds.scope.*;
|
[
"org.bds.scope"
] |
org.bds.scope;
| 2,907,857
|
@Override
public synchronized IAccountInfo loginSilent() throws ClientException {
if (!mInitialized) {
throw new IllegalStateException("init must be called");
}
mLogger.logDebug("Starting login silent");
final AccountType accountType = getAccountTypeInPreferences();
if (accountType != null) {
mLogger.logDebug(String.format("Expecting %s type of account", accountType));
}
mLogger.logDebug("Checking MSA");
IAccountInfo accountInfo = mMSAAuthenticator.loginSilent();
if (accountInfo != null) {
mLogger.logDebug("Found account info in MSA");
setAccountTypeInPreferences(accountType);
mAccountInfo.set(accountInfo);
return accountInfo;
}
mLogger.logDebug("Checking ADAL");
accountInfo = mADALAuthenticator.loginSilent();
mAccountInfo.set(accountInfo);
if (accountInfo != null) {
mLogger.logDebug("Found account info in ADAL");
setAccountTypeInPreferences(accountType);
}
return mAccountInfo.get();
}
|
synchronized IAccountInfo function() throws ClientException { if (!mInitialized) { throw new IllegalStateException(STR); } mLogger.logDebug(STR); final AccountType accountType = getAccountTypeInPreferences(); if (accountType != null) { mLogger.logDebug(String.format(STR, accountType)); } mLogger.logDebug(STR); IAccountInfo accountInfo = mMSAAuthenticator.loginSilent(); if (accountInfo != null) { mLogger.logDebug(STR); setAccountTypeInPreferences(accountType); mAccountInfo.set(accountInfo); return accountInfo; } mLogger.logDebug(STR); accountInfo = mADALAuthenticator.loginSilent(); mAccountInfo.set(accountInfo); if (accountInfo != null) { mLogger.logDebug(STR); setAccountTypeInPreferences(accountType); } return mAccountInfo.get(); }
|
/**
* Starts a silent login.
* @return The account info.
* @throws ClientException Exception occurs if the login was unable to complete for any reason.
*/
|
Starts a silent login
|
loginSilent
|
{
"repo_name": "daboxu/onedrive-sdk-android",
"path": "onedrivesdk/src/main/java/com/onedrive/sdk/authentication/DisambiguationAuthenticator.java",
"license": "mit",
"size": 14423
}
|
[
"com.onedrive.sdk.core.ClientException"
] |
import com.onedrive.sdk.core.ClientException;
|
import com.onedrive.sdk.core.*;
|
[
"com.onedrive.sdk"
] |
com.onedrive.sdk;
| 812,866
|
protected void checkIsPresent(EObject issueTarget, Multimap<String, JSONValue> documentValues,
String propertyPath) {
if (!documentValues.containsKey(propertyPath)) {
addIssue(JSONIssueCodes.getMessageForJSON_MISSING_PROPERTY(propertyPath), issueTarget,
JSONIssueCodes.JSON_MISSING_PROPERTY);
}
}
|
void function(EObject issueTarget, Multimap<String, JSONValue> documentValues, String propertyPath) { if (!documentValues.containsKey(propertyPath)) { addIssue(JSONIssueCodes.getMessageForJSON_MISSING_PROPERTY(propertyPath), issueTarget, JSONIssueCodes.JSON_MISSING_PROPERTY); } }
|
/**
* Checks that in the given JSON {@code documentValues}, a value has been set for the given property path.
*
* Adds an {@code IssueCodes#JSON_MISSING_PROPERTY} issue to {@code issueTarget} otherwise.
*/
|
Checks that in the given JSON documentValues, a value has been set for the given property path. Adds an IssueCodes#JSON_MISSING_PROPERTY issue to issueTarget otherwise
|
checkIsPresent
|
{
"repo_name": "lbeurerkellner/n4js",
"path": "plugins/org.eclipse.n4js/src/org/eclipse/n4js/validation/validators/packagejson/AbstractJSONValidatorExtension.java",
"license": "epl-1.0",
"size": 16169
}
|
[
"com.google.common.collect.Multimap",
"org.eclipse.emf.ecore.EObject",
"org.eclipse.n4js.json.JSON",
"org.eclipse.n4js.json.validation.JSONIssueCodes"
] |
import com.google.common.collect.Multimap; import org.eclipse.emf.ecore.EObject; import org.eclipse.n4js.json.JSON; import org.eclipse.n4js.json.validation.JSONIssueCodes;
|
import com.google.common.collect.*; import org.eclipse.emf.ecore.*; import org.eclipse.n4js.json.*; import org.eclipse.n4js.json.validation.*;
|
[
"com.google.common",
"org.eclipse.emf",
"org.eclipse.n4js"
] |
com.google.common; org.eclipse.emf; org.eclipse.n4js;
| 7,723
|
public JRCrosstabRowGroup removeRowGroup(String groupName)
{
JRCrosstabRowGroup removed = null;
Integer idx = rowGroupsMap.remove(groupName);
if (idx != null)
{
removed = rowGroups.remove(idx.intValue());
for (ListIterator<JRCrosstabRowGroup> it = rowGroups.listIterator(idx.intValue()); it.hasNext();)
{
JRCrosstabRowGroup group = it.next();
rowGroupsMap.put(group.getName(), Integer.valueOf(it.previousIndex()));
}
for (Iterator<JRCrosstabCell> it = cellsList.iterator(); it.hasNext();)
{
JRCrosstabCell cell = it.next();
String rowTotalGroup = cell.getRowTotalGroup();
if (rowTotalGroup != null && rowTotalGroup.equals(groupName))
{
it.remove();
cellsMap.remove(new Pair<String,String>(rowTotalGroup, cell.getColumnTotalGroup()));
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_CELLS, cell, -1);
}
}
removeRowGroupVars(removed);
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_ROW_GROUPS, removed, idx.intValue());
}
return removed;
}
|
JRCrosstabRowGroup function(String groupName) { JRCrosstabRowGroup removed = null; Integer idx = rowGroupsMap.remove(groupName); if (idx != null) { removed = rowGroups.remove(idx.intValue()); for (ListIterator<JRCrosstabRowGroup> it = rowGroups.listIterator(idx.intValue()); it.hasNext();) { JRCrosstabRowGroup group = it.next(); rowGroupsMap.put(group.getName(), Integer.valueOf(it.previousIndex())); } for (Iterator<JRCrosstabCell> it = cellsList.iterator(); it.hasNext();) { JRCrosstabCell cell = it.next(); String rowTotalGroup = cell.getRowTotalGroup(); if (rowTotalGroup != null && rowTotalGroup.equals(groupName)) { it.remove(); cellsMap.remove(new Pair<String,String>(rowTotalGroup, cell.getColumnTotalGroup())); getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_CELLS, cell, -1); } } removeRowGroupVars(removed); getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_ROW_GROUPS, removed, idx.intValue()); } return removed; }
|
/**
* Removes a row group.
*
* @param groupName the group name
* @return the removed group
*/
|
Removes a row group
|
removeRowGroup
|
{
"repo_name": "sikachu/jasperreports",
"path": "src/net/sf/jasperreports/crosstabs/design/JRDesignCrosstab.java",
"license": "lgpl-3.0",
"size": 51176
}
|
[
"java.util.Iterator",
"java.util.ListIterator",
"net.sf.jasperreports.crosstabs.JRCrosstabCell",
"net.sf.jasperreports.crosstabs.JRCrosstabRowGroup",
"net.sf.jasperreports.engine.util.Pair"
] |
import java.util.Iterator; import java.util.ListIterator; import net.sf.jasperreports.crosstabs.JRCrosstabCell; import net.sf.jasperreports.crosstabs.JRCrosstabRowGroup; import net.sf.jasperreports.engine.util.Pair;
|
import java.util.*; import net.sf.jasperreports.crosstabs.*; import net.sf.jasperreports.engine.util.*;
|
[
"java.util",
"net.sf.jasperreports"
] |
java.util; net.sf.jasperreports;
| 2,757,260
|
public Poly.Type getStyle() { return style; }
|
public Poly.Type getStyle() { return style; }
|
/**
* Returns the distance from the center of the standard ArcInst to the outsize of this ArcLayer in grid units.
* The distance from the center of arbitrary ArcInst ai to the outsize of its ArcLayer is
* ai.getD().getExtendOverMin() + arcLayer.getGridExtend()
* @return the distance from the outside of the ArcInst to this ArcLayer in grid units.
*/
|
Returns the distance from the center of the standard ArcInst to the outsize of this ArcLayer in grid units. The distance from the center of arbitrary ArcInst ai to the outsize of its ArcLayer is ai.getD().getExtendOverMin() + arcLayer.getGridExtend()
|
getGridExtend
|
{
"repo_name": "imr/Electric8",
"path": "com/sun/electric/technology/Technology.java",
"license": "gpl-3.0",
"size": 194212
}
|
[
"com.sun.electric.database.geometry.Poly"
] |
import com.sun.electric.database.geometry.Poly;
|
import com.sun.electric.database.geometry.*;
|
[
"com.sun.electric"
] |
com.sun.electric;
| 2,180,314
|
public Block writeBlock(long blockAddress, int blockOffset,
char []buffer, int offset, int charLength)
throws IOException
{
int length = 2 * charLength;
if (BLOCK_SIZE - blockOffset < length)
throw new IllegalArgumentException(L.l("write offset {0} length {1} too long",
blockOffset, length));
Block block = readBlock(addressToBlockId(blockAddress));
try {
Lock lock = block.getWriteLock();
lock.tryLock(_blockLockTimeout, TimeUnit.MILLISECONDS);
try {
byte []blockBuffer = block.getBuffer();
int blockTail = blockOffset;
for (int i = 0; i < charLength; i++) {
char ch = buffer[offset + i];
blockBuffer[blockTail] = (byte) (ch >> 8);
blockBuffer[blockTail + 1] = (byte) (ch);
blockTail += 2;
}
block.setDirty(blockOffset, blockTail);
return block;
} finally {
lock.unlock();
}
} catch (InterruptedException e) {
throw new IllegalStateException(e);
} finally {
block.free();
}
}
|
Block function(long blockAddress, int blockOffset, char []buffer, int offset, int charLength) throws IOException { int length = 2 * charLength; if (BLOCK_SIZE - blockOffset < length) throw new IllegalArgumentException(L.l(STR, blockOffset, length)); Block block = readBlock(addressToBlockId(blockAddress)); try { Lock lock = block.getWriteLock(); lock.tryLock(_blockLockTimeout, TimeUnit.MILLISECONDS); try { byte []blockBuffer = block.getBuffer(); int blockTail = blockOffset; for (int i = 0; i < charLength; i++) { char ch = buffer[offset + i]; blockBuffer[blockTail] = (byte) (ch >> 8); blockBuffer[blockTail + 1] = (byte) (ch); blockTail += 2; } block.setDirty(blockOffset, blockTail); return block; } finally { lock.unlock(); } } catch (InterruptedException e) { throw new IllegalStateException(e); } finally { block.free(); } }
|
/**
* Writes a character based block
*
* @param blockAddress the fragment to write
* @param blockOffset the offset into the fragment
* @param buffer the write buffer
* @param offset offset into the write buffer
* @param length the number of bytes to write
*/
|
Writes a character based block
|
writeBlock
|
{
"repo_name": "mdaniel/svn-caucho-com-resin",
"path": "modules/resin/src/com/caucho/db/block/BlockStore.java",
"license": "gpl-2.0",
"size": 51974
}
|
[
"com.caucho.db.lock.Lock",
"java.io.IOException",
"java.util.concurrent.TimeUnit",
"java.util.concurrent.locks.Lock"
] |
import com.caucho.db.lock.Lock; import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock;
|
import com.caucho.db.lock.*; import java.io.*; import java.util.concurrent.*; import java.util.concurrent.locks.*;
|
[
"com.caucho.db",
"java.io",
"java.util"
] |
com.caucho.db; java.io; java.util;
| 2,156,518
|
public float getMaxAngleStop() {
return Ode.dJointGetHinge2Param( jointId, OdeConstants.dParamHiStop );
}
|
float function() { return Ode.dJointGetHinge2Param( jointId, OdeConstants.dParamHiStop ); }
|
/**
* Fetch the currently set maximum angle stop for axis 1 from this joint.
*
* @return A angle in radians in the range [-π,+π] or
* Float.POSITIVE_INFINITY
*/
|
Fetch the currently set maximum angle stop for axis 1 from this joint
|
getMaxAngleStop
|
{
"repo_name": "jogjayr/InTEL-Project",
"path": "JMEPhysics/src/org/odejava/JointHinge2.java",
"license": "gpl-3.0",
"size": 28413
}
|
[
"org.odejava.ode.Ode",
"org.odejava.ode.OdeConstants"
] |
import org.odejava.ode.Ode; import org.odejava.ode.OdeConstants;
|
import org.odejava.ode.*;
|
[
"org.odejava.ode"
] |
org.odejava.ode;
| 2,320,689
|
Set<Application> findByNameAndStatuses(String partialName, ApplicationStatus ...statuses) throws TechnicalException;
|
Set<Application> findByNameAndStatuses(String partialName, ApplicationStatus ...statuses) throws TechnicalException;
|
/**
* find applications by name. Support partial name (works like `contains`)
* @param partialName
* @return applications
* @throws TechnicalException
*/
|
find applications by name. Support partial name (works like `contains`)
|
findByNameAndStatuses
|
{
"repo_name": "gravitee-io/gravitee-repository",
"path": "src/main/java/io/gravitee/repository/management/api/ApplicationRepository.java",
"license": "apache-2.0",
"size": 2927
}
|
[
"io.gravitee.repository.exceptions.TechnicalException",
"io.gravitee.repository.management.model.Application",
"io.gravitee.repository.management.model.ApplicationStatus",
"java.util.Set"
] |
import io.gravitee.repository.exceptions.TechnicalException; import io.gravitee.repository.management.model.Application; import io.gravitee.repository.management.model.ApplicationStatus; import java.util.Set;
|
import io.gravitee.repository.exceptions.*; import io.gravitee.repository.management.model.*; import java.util.*;
|
[
"io.gravitee.repository",
"java.util"
] |
io.gravitee.repository; java.util;
| 2,081,376
|
@Nullable
public ITextComponent getTabListDisplayName()
{
return null;
}
|
ITextComponent function() { return null; }
|
/**
* Returns null which indicates the tab list should just display the player's name, return a different value to
* display the specified text instead of the player's name
*/
|
Returns null which indicates the tab list should just display the player's name, return a different value to display the specified text instead of the player's name
|
getTabListDisplayName
|
{
"repo_name": "Severed-Infinity/technium",
"path": "build/tmp/recompileMc/sources/net/minecraft/entity/player/EntityPlayerMP.java",
"license": "gpl-3.0",
"size": 58179
}
|
[
"net.minecraft.util.text.ITextComponent"
] |
import net.minecraft.util.text.ITextComponent;
|
import net.minecraft.util.text.*;
|
[
"net.minecraft.util"
] |
net.minecraft.util;
| 2,493,939
|
public void add(XMLX509IssuerSerial xmlX509IssuerSerial) {
appendSelf(xmlX509IssuerSerial);
addReturnToSelf();
}
|
void function(XMLX509IssuerSerial xmlX509IssuerSerial) { appendSelf(xmlX509IssuerSerial); addReturnToSelf(); }
|
/**
* Method add
*
* @param xmlX509IssuerSerial
*/
|
Method add
|
add
|
{
"repo_name": "apache/santuario-java",
"path": "src/main/java/org/apache/xml/security/keys/content/X509Data.java",
"license": "apache-2.0",
"size": 13758
}
|
[
"org.apache.xml.security.keys.content.x509.XMLX509IssuerSerial"
] |
import org.apache.xml.security.keys.content.x509.XMLX509IssuerSerial;
|
import org.apache.xml.security.keys.content.x509.*;
|
[
"org.apache.xml"
] |
org.apache.xml;
| 1,385,058
|
public static <T> List<T> readCollection(PortableRawReaderEx reader, @Nullable PlatformReaderClosure<T> readClo) {
int cnt = reader.readInt();
List<T> res = new ArrayList<>(cnt);
if (readClo == null) {
for (int i = 0; i < cnt; i++)
res.add((T)reader.readObjectDetached());
}
else {
for (int i = 0; i < cnt; i++)
res.add(readClo.read(reader));
}
return res;
}
|
static <T> List<T> function(PortableRawReaderEx reader, @Nullable PlatformReaderClosure<T> readClo) { int cnt = reader.readInt(); List<T> res = new ArrayList<>(cnt); if (readClo == null) { for (int i = 0; i < cnt; i++) res.add((T)reader.readObjectDetached()); } else { for (int i = 0; i < cnt; i++) res.add(readClo.read(reader)); } return res; }
|
/**
* Read collection.
*
* @param reader Reader.
* @param readClo Optional reader closure.
* @return List.
*/
|
Read collection
|
readCollection
|
{
"repo_name": "agoncharuk/ignite",
"path": "modules/platform/src/main/java/org/apache/ignite/internal/processors/platform/utils/PlatformUtils.java",
"license": "apache-2.0",
"size": 22836
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.ignite.internal.portable.PortableRawReaderEx",
"org.jetbrains.annotations.Nullable"
] |
import java.util.ArrayList; import java.util.List; import org.apache.ignite.internal.portable.PortableRawReaderEx; import org.jetbrains.annotations.Nullable;
|
import java.util.*; import org.apache.ignite.internal.portable.*; import org.jetbrains.annotations.*;
|
[
"java.util",
"org.apache.ignite",
"org.jetbrains.annotations"
] |
java.util; org.apache.ignite; org.jetbrains.annotations;
| 587,388
|
public static void writeChartAsPNG(OutputStream out, JFreeChart chart,
int width, int height) throws IOException {
// defer argument checking...
writeChartAsPNG(out, chart, width, height, null);
}
|
static void function(OutputStream out, JFreeChart chart, int width, int height) throws IOException { writeChartAsPNG(out, chart, width, height, null); }
|
/**
* Writes a chart to an output stream in PNG format.
*
* @param out the output stream (<code>null</code> not permitted).
* @param chart the chart (<code>null</code> not permitted).
* @param width the image width.
* @param height the image height.
*
* @throws IOException if there are any I/O errors.
*/
|
Writes a chart to an output stream in PNG format
|
writeChartAsPNG
|
{
"repo_name": "oskopek/jfreechart-fse",
"path": "src/main/java/org/jfree/chart/ChartUtilities.java",
"license": "lgpl-2.1",
"size": 29584
}
|
[
"java.io.IOException",
"java.io.OutputStream"
] |
import java.io.IOException; import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 837,301
|
private List<Integer> getDistinctEdgeColumnsLeft() {
return getDistinctEdgeColumns(getLeftChild().getEmbeddingMetaData());
}
|
List<Integer> function() { return getDistinctEdgeColumns(getLeftChild().getEmbeddingMetaData()); }
|
/**
* According to the specified {@link ValueJoinNode#edgeStrategy}, the method returns
* the columns that need to contain distinct entries in the left embedding.
*
* @return distinct edge columns of the left embedding
*/
|
According to the specified <code>ValueJoinNode#edgeStrategy</code>, the method returns the columns that need to contain distinct entries in the left embedding
|
getDistinctEdgeColumnsLeft
|
{
"repo_name": "niklasteichmann/gradoop",
"path": "gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/single/cypher/planning/queryplan/binary/ValueJoinNode.java",
"license": "apache-2.0",
"size": 9393
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,305,242
|
public void testSize() throws Exception {
//SystemScope ss = new SystemScope("SystemScope");
assertEquals(0, ss.size());
ss.addIdentity(new IdentityScopeStub("aaa"));
assertEquals(1, ss.size());
ss.addIdentity(new IdentityScopeStub("bbb"));
assertEquals(2, ss.size());
}
|
void function() throws Exception { assertEquals(0, ss.size()); ss.addIdentity(new IdentityScopeStub("aaa")); assertEquals(1, ss.size()); ss.addIdentity(new IdentityScopeStub("bbb")); assertEquals(2, ss.size()); }
|
/**
* verify SystemScope.size() returns number of Identities
*/
|
verify SystemScope.size() returns number of Identities
|
testSize
|
{
"repo_name": "freeVM/freeVM",
"path": "enhanced/archive/classlib/java6/modules/security/src/test/impl/java/org/apache/harmony/security/tests/SystemScopeTest.java",
"license": "apache-2.0",
"size": 6388
}
|
[
"org.apache.harmony.security.tests.support.IdentityScopeStub"
] |
import org.apache.harmony.security.tests.support.IdentityScopeStub;
|
import org.apache.harmony.security.tests.support.*;
|
[
"org.apache.harmony"
] |
org.apache.harmony;
| 2,596,414
|
public void testConversionsMultiple()
throws NotExecutableException, RepositoryException {
PropertyDefinition propDef =
NodeTypeUtil.locatePropertyDef(session, PropertyType.BOOLEAN, true, false, false, false);
if (propDef == null) {
throw new NotExecutableException("No multiple boolean property def that meets the " +
"requirements of the test has been found");
}
NodeType nodeType = propDef.getDeclaringNodeType();
Value booleanValue = NodeTypeUtil.getValueOfType(session, PropertyType.BOOLEAN);
Value stringValue = NodeTypeUtil.getValueOfType(session, PropertyType.STRING);
Value stringValues[] = new Value[] {stringValue};
assertTrue("canSetProperty(String propertyName, Value[] values) must return " +
"true if the property is of type Boolean and values are of type StringValue",
nodeType.canSetProperty(propDef.getName(), stringValues));
Value binaryValue = NodeTypeUtil.getValueOfType(session, PropertyType.BINARY);
Value binaryValues[] = new Value[] {binaryValue};
assertTrue("canSetProperty(String propertyName, Value[] values) must return " +
"true if the property is of type Boolean and values are of type BinaryValue",
nodeType.canSetProperty(propDef.getName(), binaryValues));
Value dateValue = NodeTypeUtil.getValueOfType(session, PropertyType.DATE);
Value dateValues[] = new Value[] {booleanValue, dateValue};
assertFalse("canSetProperty(String propertyName, Value[] values) must return " +
"false if the property is of type Boolean and values are of type DateValue",
nodeType.canSetProperty(propDef.getName(), dateValues));
Value doubleValue = NodeTypeUtil.getValueOfType(session, PropertyType.DOUBLE);
Value doubleValues[] = new Value[] {booleanValue, doubleValue};
assertFalse("canSetProperty(String propertyName, Value[] values) must return " +
"false if the property is of type Boolean and values are of type DoubleValue",
nodeType.canSetProperty(propDef.getName(), doubleValues));
Value longValue = NodeTypeUtil.getValueOfType(session, PropertyType.LONG);
Value longValues[] = new Value[] {booleanValue, longValue};
assertFalse("canSetProperty(String propertyName, Value[] values) must return " +
"false if the property is of type Boolean and values are of type LongValue",
nodeType.canSetProperty(propDef.getName(), longValues));
Value booleanValues[] = new Value[] {booleanValue};
assertTrue("canSetProperty(String propertyName, Value[] values) must return " +
"true if the property is of type Boolean and values are of type BooleanValue",
nodeType.canSetProperty(propDef.getName(), booleanValues));
Value nameValue = NodeTypeUtil.getValueOfType(session, PropertyType.NAME);
Value nameValues[] = new Value[] {booleanValue, nameValue};
assertFalse("canSetProperty(String propertyName, Value[] values) must return " +
"false if the property is of type Boolean and values are of type NameValue",
nodeType.canSetProperty(propDef.getName(), nameValues));
Value pathValue = NodeTypeUtil.getValueOfType(session, PropertyType.PATH);
Value pathValues[] = new Value[] {booleanValue, pathValue};
assertFalse("canSetProperty(String propertyName, Value[] values) must return " +
"false if the property is of type Boolean and values are of type PathValue",
nodeType.canSetProperty(propDef.getName(), pathValues));
}
|
void function() throws NotExecutableException, RepositoryException { PropertyDefinition propDef = NodeTypeUtil.locatePropertyDef(session, PropertyType.BOOLEAN, true, false, false, false); if (propDef == null) { throw new NotExecutableException(STR + STR); } NodeType nodeType = propDef.getDeclaringNodeType(); Value booleanValue = NodeTypeUtil.getValueOfType(session, PropertyType.BOOLEAN); Value stringValue = NodeTypeUtil.getValueOfType(session, PropertyType.STRING); Value stringValues[] = new Value[] {stringValue}; assertTrue(STR + STR, nodeType.canSetProperty(propDef.getName(), stringValues)); Value binaryValue = NodeTypeUtil.getValueOfType(session, PropertyType.BINARY); Value binaryValues[] = new Value[] {binaryValue}; assertTrue(STR + STR, nodeType.canSetProperty(propDef.getName(), binaryValues)); Value dateValue = NodeTypeUtil.getValueOfType(session, PropertyType.DATE); Value dateValues[] = new Value[] {booleanValue, dateValue}; assertFalse(STR + STR, nodeType.canSetProperty(propDef.getName(), dateValues)); Value doubleValue = NodeTypeUtil.getValueOfType(session, PropertyType.DOUBLE); Value doubleValues[] = new Value[] {booleanValue, doubleValue}; assertFalse(STR + STR, nodeType.canSetProperty(propDef.getName(), doubleValues)); Value longValue = NodeTypeUtil.getValueOfType(session, PropertyType.LONG); Value longValues[] = new Value[] {booleanValue, longValue}; assertFalse(STR + STR, nodeType.canSetProperty(propDef.getName(), longValues)); Value booleanValues[] = new Value[] {booleanValue}; assertTrue(STR + STR, nodeType.canSetProperty(propDef.getName(), booleanValues)); Value nameValue = NodeTypeUtil.getValueOfType(session, PropertyType.NAME); Value nameValues[] = new Value[] {booleanValue, nameValue}; assertFalse(STR + STR, nodeType.canSetProperty(propDef.getName(), nameValues)); Value pathValue = NodeTypeUtil.getValueOfType(session, PropertyType.PATH); Value pathValues[] = new Value[] {booleanValue, pathValue}; assertFalse(STR + STR, nodeType.canSetProperty(propDef.getName(), pathValues)); }
|
/**
* Tests if NodeType.canSetProperty(String propertyName, Value[] values)
* returns true if all values and its types are convertible to
* BooleanValue.
*/
|
Tests if NodeType.canSetProperty(String propertyName, Value[] values) returns true if all values and its types are convertible to BooleanValue
|
testConversionsMultiple
|
{
"repo_name": "jalkanen/Priha",
"path": "tests/tck/org/apache/jackrabbit/test/api/nodetype/CanSetPropertyBooleanTest.java",
"license": "apache-2.0",
"size": 11995
}
|
[
"javax.jcr.PropertyType",
"javax.jcr.RepositoryException",
"javax.jcr.Value",
"javax.jcr.nodetype.NodeType",
"javax.jcr.nodetype.PropertyDefinition",
"org.apache.jackrabbit.test.NotExecutableException"
] |
import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.nodetype.NodeType; import javax.jcr.nodetype.PropertyDefinition; import org.apache.jackrabbit.test.NotExecutableException;
|
import javax.jcr.*; import javax.jcr.nodetype.*; import org.apache.jackrabbit.test.*;
|
[
"javax.jcr",
"org.apache.jackrabbit"
] |
javax.jcr; org.apache.jackrabbit;
| 1,711,919
|
public DateTimeField minuteOfDay() {
return UnsupportedDateTimeField.getInstance(DateTimeFieldType.minuteOfDay(), minutes());
}
|
DateTimeField function() { return UnsupportedDateTimeField.getInstance(DateTimeFieldType.minuteOfDay(), minutes()); }
|
/**
* Get the minute of day field for this chronology.
*
* @return DateTimeField or UnsupportedDateTimeField if unsupported
*/
|
Get the minute of day field for this chronology
|
minuteOfDay
|
{
"repo_name": "charles-cooper/idylfin",
"path": "src/org/joda/time/chrono/BaseChronology.java",
"license": "apache-2.0",
"size": 25218
}
|
[
"org.joda.time.DateTimeField",
"org.joda.time.DateTimeFieldType",
"org.joda.time.field.UnsupportedDateTimeField"
] |
import org.joda.time.DateTimeField; import org.joda.time.DateTimeFieldType; import org.joda.time.field.UnsupportedDateTimeField;
|
import org.joda.time.*; import org.joda.time.field.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 547,852
|
@Override
public IOObject[] apply() throws OperatorException {
// get example set
ExampleSet exampleSet = this.getInput(ExampleSet.class);
int exampleSetSize = exampleSet.size();
this.logNote("Input example-set has " + exampleSetSize + " elements.");
// get parameters
String metaFileName = this.getParameterAsString(PARAMETER_META_FILENAME);
String selectorFileName = this.getParameterAsString(PARAMETER_SELECTOR_FILENAME);
MetaConfig mc = MetaConfig.load(metaFileName);
String snmiColumnName = mc.getSnmiColumnName();
String clusterColumnPrefix = mc.getClusteringColumnPrefix();
int sampleSize = this.getParameterAsInt(PARAMETER_SAMPLE_SIZE);
if (sampleSize < 1 || sampleSize > exampleSetSize) {
throw new UserError(this, 116, new Object[] { PARAMETER_SAMPLE_SIZE, sampleSize });
}
this.logNote("Requested clustering sample size: " + sampleSize);
// create attributes for the selection flag and for a general order of selection
Attribute jcOrderAttr = AttributeFactory.createAttribute(JOINT_CRITERION_COLUMN_NAME_ORDER, Ontology.INTEGER);
Attribute jcSelectedAttr = AttributeFactory.createAttribute(JOINT_CRITERION_COLUMN_NAME_SELECTED,
Ontology.NOMINAL);
exampleSet.getExampleTable().addAttribute(jcOrderAttr);
exampleSet.getExampleTable().addAttribute(jcSelectedAttr);
// add attribute to view
exampleSet.getAttributes().setSpecialAttribute(jcOrderAttr, JOINT_CRITERION_COLUMN_NAME_ORDER);
exampleSet.getAttributes().setSpecialAttribute(jcSelectedAttr, JOINT_CRITERION_COLUMN_NAME_SELECTED);
// add dummy attribute-column (only to the table)
Attribute workingSNMIAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "1", Ontology.REAL);
Attribute workingNMIAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "2", Ontology.REAL);
Attribute workingSumAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "3", Ontology.REAL);
exampleSet.getExampleTable().addAttribute(workingSNMIAttr);
exampleSet.getExampleTable().addAttribute(workingNMIAttr);
exampleSet.getExampleTable().addAttribute(workingSumAttr);
// get a sorted iterator over the snmi-column of the nmi-csv-file
Attribute snmiAttr = exampleSet.getAttributes().get(snmiColumnName);
if (snmiAttr == null) {
throw new UserError(this, 111, snmiColumnName);
}
List<SortingIndex> sortedIndex = new ArrayList<SortingIndex>(exampleSetSize);
int counter = 0;
Iterator<Example> it = exampleSet.iterator();
Example example = null;
while (it.hasNext()) {
example = it.next();
sortedIndex.add(new SortingIndex(Double.valueOf(example.getNumericalValue(snmiAttr)), counter));
counter++;
}
Collections.sort(sortedIndex);
// setting first selected element-index (the one with the highest snmi)
int selectedIndex = sortedIndex.get(exampleSetSize - 1).getIndex();
// a list with all (so far) selected indices
List<Integer> selectedIndices = new LinkedList<Integer>();
// fill selection attributes
int order = 0;
double sumSNMIs = 0.0d;
double sumNMIs = 0.0d;
double curSNMI, curNMI, newSumSNMIs, newSumNMIs;
DataRow selectedRow = null;
DataRow curRow = null;
for (int i = 0; i < exampleSetSize; i++) {
for (int j = 0; j < exampleSetSize; j++) {
if (selectedIndices.contains(j)) {
// not relevant anymore
continue;
}
curRow = exampleSet.getExampleTable().getDataRow(j);
curSNMI = curRow.get(snmiAttr);
curNMI = 0.0d;
for (Attribute attr : exampleSet.getAttributes()) {
if (!attr.getName().startsWith(clusterColumnPrefix)) {
// not relevant
continue;
}
if (selectedIndices.contains(Integer.valueOf(attr.getName().substring(clusterColumnPrefix.length())))) {
curNMI += 1.0d - curRow.get(attr);
}
}
newSumSNMIs = sumSNMIs + curSNMI;
newSumNMIs = sumNMIs + curNMI;
curRow.set(workingSNMIAttr, newSumSNMIs);
curRow.set(workingNMIAttr, newSumNMIs);
curRow.set(workingSumAttr, ALPHA * newSumSNMIs + (1.0d - ALPHA) * newSumNMIs);
}
// find maximum
sortedIndex = new ArrayList<SortingIndex>(exampleSetSize);
counter = 0;
it = exampleSet.iterator();
while (it.hasNext()) {
example = it.next();
sortedIndex.add(new SortingIndex(Double.valueOf(example.getNumericalValue(workingSumAttr)), counter));
counter++;
}
Collections.sort(sortedIndex);
// set new selected index
selectedIndex = sortedIndex.get(exampleSetSize - 1).getIndex();
selectedIndices.add(selectedIndex);
// get new basic values for the calculation
selectedRow = exampleSet.getExampleTable().getDataRow(selectedIndex);
sumSNMIs = selectedRow.get(workingSNMIAttr);
sumNMIs = selectedRow.get(workingNMIAttr);
// reset working values of this selected index so that they are no longer relevant
selectedRow.set(workingSNMIAttr, Double.NEGATIVE_INFINITY);
selectedRow.set(workingNMIAttr, Double.NEGATIVE_INFINITY);
selectedRow.set(workingSumAttr, Double.NEGATIVE_INFINITY);
// set ordering-index
selectedRow.set(jcOrderAttr, order);
// set selected flag
if (order < sampleSize) {
exampleSet.getExample(selectedIndex).setValue(jcSelectedAttr, "true");
}
else {
exampleSet.getExample(selectedIndex).setValue(jcSelectedAttr, "false");
}
order++;
}
// remove working attributes
exampleSet.getExampleTable().removeAttribute(workingSNMIAttr);
exampleSet.getExampleTable().removeAttribute(workingNMIAttr);
exampleSet.getExampleTable().removeAttribute(workingSumAttr);
// write meta config
mc.setSelectorFileName(selectorFileName);
ClusteringInfo ci = new ClusteringInfo();
ci.setInfoColumnName(JOINT_CRITERION_COLUMN_NAME_ORDER);
ci.setSelectedColumnName(JOINT_CRITERION_COLUMN_NAME_SELECTED);
ci.setSampleSize(sampleSize);
mc.getClusteringInfo().put(this.getClass().getName(), ci);
mc.save(metaFileName);
return new IOObject[] { exampleSet };
}
|
IOObject[] function() throws OperatorException { ExampleSet exampleSet = this.getInput(ExampleSet.class); int exampleSetSize = exampleSet.size(); this.logNote(STR + exampleSetSize + STR); String metaFileName = this.getParameterAsString(PARAMETER_META_FILENAME); String selectorFileName = this.getParameterAsString(PARAMETER_SELECTOR_FILENAME); MetaConfig mc = MetaConfig.load(metaFileName); String snmiColumnName = mc.getSnmiColumnName(); String clusterColumnPrefix = mc.getClusteringColumnPrefix(); int sampleSize = this.getParameterAsInt(PARAMETER_SAMPLE_SIZE); if (sampleSize < 1 sampleSize > exampleSetSize) { throw new UserError(this, 116, new Object[] { PARAMETER_SAMPLE_SIZE, sampleSize }); } this.logNote(STR + sampleSize); Attribute jcOrderAttr = AttributeFactory.createAttribute(JOINT_CRITERION_COLUMN_NAME_ORDER, Ontology.INTEGER); Attribute jcSelectedAttr = AttributeFactory.createAttribute(JOINT_CRITERION_COLUMN_NAME_SELECTED, Ontology.NOMINAL); exampleSet.getExampleTable().addAttribute(jcOrderAttr); exampleSet.getExampleTable().addAttribute(jcSelectedAttr); exampleSet.getAttributes().setSpecialAttribute(jcOrderAttr, JOINT_CRITERION_COLUMN_NAME_ORDER); exampleSet.getAttributes().setSpecialAttribute(jcSelectedAttr, JOINT_CRITERION_COLUMN_NAME_SELECTED); Attribute workingSNMIAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "1", Ontology.REAL); Attribute workingNMIAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "2", Ontology.REAL); Attribute workingSumAttr = AttributeFactory.createAttribute(WORKING_COLUMN_NAME + "3", Ontology.REAL); exampleSet.getExampleTable().addAttribute(workingSNMIAttr); exampleSet.getExampleTable().addAttribute(workingNMIAttr); exampleSet.getExampleTable().addAttribute(workingSumAttr); Attribute snmiAttr = exampleSet.getAttributes().get(snmiColumnName); if (snmiAttr == null) { throw new UserError(this, 111, snmiColumnName); } List<SortingIndex> sortedIndex = new ArrayList<SortingIndex>(exampleSetSize); int counter = 0; Iterator<Example> it = exampleSet.iterator(); Example example = null; while (it.hasNext()) { example = it.next(); sortedIndex.add(new SortingIndex(Double.valueOf(example.getNumericalValue(snmiAttr)), counter)); counter++; } Collections.sort(sortedIndex); int selectedIndex = sortedIndex.get(exampleSetSize - 1).getIndex(); List<Integer> selectedIndices = new LinkedList<Integer>(); int order = 0; double sumSNMIs = 0.0d; double sumNMIs = 0.0d; double curSNMI, curNMI, newSumSNMIs, newSumNMIs; DataRow selectedRow = null; DataRow curRow = null; for (int i = 0; i < exampleSetSize; i++) { for (int j = 0; j < exampleSetSize; j++) { if (selectedIndices.contains(j)) { continue; } curRow = exampleSet.getExampleTable().getDataRow(j); curSNMI = curRow.get(snmiAttr); curNMI = 0.0d; for (Attribute attr : exampleSet.getAttributes()) { if (!attr.getName().startsWith(clusterColumnPrefix)) { continue; } if (selectedIndices.contains(Integer.valueOf(attr.getName().substring(clusterColumnPrefix.length())))) { curNMI += 1.0d - curRow.get(attr); } } newSumSNMIs = sumSNMIs + curSNMI; newSumNMIs = sumNMIs + curNMI; curRow.set(workingSNMIAttr, newSumSNMIs); curRow.set(workingNMIAttr, newSumNMIs); curRow.set(workingSumAttr, ALPHA * newSumSNMIs + (1.0d - ALPHA) * newSumNMIs); } sortedIndex = new ArrayList<SortingIndex>(exampleSetSize); counter = 0; it = exampleSet.iterator(); while (it.hasNext()) { example = it.next(); sortedIndex.add(new SortingIndex(Double.valueOf(example.getNumericalValue(workingSumAttr)), counter)); counter++; } Collections.sort(sortedIndex); selectedIndex = sortedIndex.get(exampleSetSize - 1).getIndex(); selectedIndices.add(selectedIndex); selectedRow = exampleSet.getExampleTable().getDataRow(selectedIndex); sumSNMIs = selectedRow.get(workingSNMIAttr); sumNMIs = selectedRow.get(workingNMIAttr); selectedRow.set(workingSNMIAttr, Double.NEGATIVE_INFINITY); selectedRow.set(workingNMIAttr, Double.NEGATIVE_INFINITY); selectedRow.set(workingSumAttr, Double.NEGATIVE_INFINITY); selectedRow.set(jcOrderAttr, order); if (order < sampleSize) { exampleSet.getExample(selectedIndex).setValue(jcSelectedAttr, "true"); } else { exampleSet.getExample(selectedIndex).setValue(jcSelectedAttr, "false"); } order++; } exampleSet.getExampleTable().removeAttribute(workingSNMIAttr); exampleSet.getExampleTable().removeAttribute(workingNMIAttr); exampleSet.getExampleTable().removeAttribute(workingSumAttr); mc.setSelectorFileName(selectorFileName); ClusteringInfo ci = new ClusteringInfo(); ci.setInfoColumnName(JOINT_CRITERION_COLUMN_NAME_ORDER); ci.setSelectedColumnName(JOINT_CRITERION_COLUMN_NAME_SELECTED); ci.setSampleSize(sampleSize); mc.getClusteringInfo().put(this.getClass().getName(), ci); mc.save(metaFileName); return new IOObject[] { exampleSet }; }
|
/************************************************************************************************
* PUBLIC METHODS
***********************************************************************************************/
|
PUBLIC METHODS
|
apply
|
{
"repo_name": "ntj/ComplexRapidMiner",
"path": "src/de/tud/inf/operator/mm/JointCriterionSelector.java",
"license": "gpl-2.0",
"size": 9702
}
|
[
"com.rapidminer.example.Attribute",
"com.rapidminer.example.Example",
"com.rapidminer.example.ExampleSet",
"com.rapidminer.example.table.AttributeFactory",
"com.rapidminer.example.table.DataRow",
"com.rapidminer.operator.IOObject",
"com.rapidminer.operator.OperatorException",
"com.rapidminer.operator.UserError",
"com.rapidminer.tools.Ontology",
"de.tud.inf.operator.mm.util.ClusteringInfo",
"de.tud.inf.operator.mm.util.MetaConfig",
"de.tud.inf.operator.mm.util.SortingIndex",
"java.util.ArrayList",
"java.util.Collections",
"java.util.Iterator",
"java.util.LinkedList",
"java.util.List"
] |
import com.rapidminer.example.Attribute; import com.rapidminer.example.Example; import com.rapidminer.example.ExampleSet; import com.rapidminer.example.table.AttributeFactory; import com.rapidminer.example.table.DataRow; import com.rapidminer.operator.IOObject; import com.rapidminer.operator.OperatorException; import com.rapidminer.operator.UserError; import com.rapidminer.tools.Ontology; import de.tud.inf.operator.mm.util.ClusteringInfo; import de.tud.inf.operator.mm.util.MetaConfig; import de.tud.inf.operator.mm.util.SortingIndex; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List;
|
import com.rapidminer.example.*; import com.rapidminer.example.table.*; import com.rapidminer.operator.*; import com.rapidminer.tools.*; import de.tud.inf.operator.mm.util.*; import java.util.*;
|
[
"com.rapidminer.example",
"com.rapidminer.operator",
"com.rapidminer.tools",
"de.tud.inf",
"java.util"
] |
com.rapidminer.example; com.rapidminer.operator; com.rapidminer.tools; de.tud.inf; java.util;
| 416,491
|
@Test(expected = IllegalArgumentException.class)
public void testGetPerso_ValidIdentifier() throws Exception {
CommandParser.getPerso("01");
}
|
@Test(expected = IllegalArgumentException.class) void function() throws Exception { CommandParser.getPerso("01"); }
|
/**
* Negative test case: parse personalization from a perso identifier without OSGi-Context
* @throws Exception
*/
|
Negative test case: parse personalization from a perso identifier without OSGi-Context
|
testGetPerso_ValidIdentifier
|
{
"repo_name": "halemmerich/de.persosim.simulator",
"path": "de.persosim.simulator.test/src/de/persosim/simulator/CommandParserTest.java",
"license": "gpl-3.0",
"size": 3167
}
|
[
"org.junit.Test"
] |
import org.junit.Test;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 686,143
|
List<String> getFileExtensions();
|
List<String> getFileExtensions();
|
/**
* Returns file extensions.
*
* @return file extensions.
*/
|
Returns file extensions
|
getFileExtensions
|
{
"repo_name": "softelnet/sponge",
"path": "sponge-api/src/main/java/org/openksavi/sponge/kb/KnowledgeBaseType.java",
"license": "apache-2.0",
"size": 1323
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,374,480
|
public void testAddCanvas() {
DrawingCanvas canvas2 = this.canvas.newTile();
DrawingCanvas canvas3 = canvas2.newTile();
Rectangle r1 = new Rectangle(100, 100, 100, 100, Color.BLUE);
Rectangle r2 = new Rectangle(100, 100, 100, 100, Color.RED);
Rectangle r3 = new Rectangle(100, 100, 100, 100, Color.GREEN);
canvas3.add(r3);
canvas2.add(canvas3);
canvas2.add(r2);
this.canvas.add(r1);
this.canvas.add(canvas2, 100, 100);
GraphicFileUtils.write(TEST_DIR, "add.png",
this.canvas.toBufferedImage(), RasterGraphicFileType.PNG);
}
|
void function() { DrawingCanvas canvas2 = this.canvas.newTile(); DrawingCanvas canvas3 = canvas2.newTile(); Rectangle r1 = new Rectangle(100, 100, 100, 100, Color.BLUE); Rectangle r2 = new Rectangle(100, 100, 100, 100, Color.RED); Rectangle r3 = new Rectangle(100, 100, 100, 100, Color.GREEN); canvas3.add(r3); canvas2.add(canvas3); canvas2.add(r2); this.canvas.add(r1); this.canvas.add(canvas2, 100, 100); GraphicFileUtils.write(TEST_DIR, STR, this.canvas.toBufferedImage(), RasterGraphicFileType.PNG); }
|
/**
* Tests adding a child to a parent canvas.
*
*/
|
Tests adding a child to a parent canvas
|
testAddCanvas
|
{
"repo_name": "NCIP/webgenome",
"path": "tags/WEBGENOME_R3.2_6MAR2009_BUILD1/java/core/junit/org/rti/webgenome/graphics/primitive/RasterDrawingCanvasTester.java",
"license": "bsd-3-clause",
"size": 6441
}
|
[
"java.awt.Color",
"org.rti.webgenome.graphics.DrawingCanvas",
"org.rti.webgenome.graphics.io.GraphicFileUtils",
"org.rti.webgenome.graphics.io.RasterGraphicFileType",
"org.rti.webgenome.graphics.primitive.Rectangle"
] |
import java.awt.Color; import org.rti.webgenome.graphics.DrawingCanvas; import org.rti.webgenome.graphics.io.GraphicFileUtils; import org.rti.webgenome.graphics.io.RasterGraphicFileType; import org.rti.webgenome.graphics.primitive.Rectangle;
|
import java.awt.*; import org.rti.webgenome.graphics.*; import org.rti.webgenome.graphics.io.*; import org.rti.webgenome.graphics.primitive.*;
|
[
"java.awt",
"org.rti.webgenome"
] |
java.awt; org.rti.webgenome;
| 2,335
|
public IDataset getDescription();
|
IDataset function();
|
/**
* This should describe the reason for including this reference.
* For example: The dataset in this group was normalised using the method
* which is described in detail in this reference.
* <p>
* <b>Type:</b> NX_CHAR
* </p>
*
* @return the value.
*/
|
This should describe the reason for including this reference. For example: The dataset in this group was normalised using the method which is described in detail in this reference. Type: NX_CHAR
|
getDescription
|
{
"repo_name": "Anthchirp/dawnsci",
"path": "org.eclipse.dawnsci.nexus/src/org/eclipse/dawnsci/nexus/NXcite.java",
"license": "epl-1.0",
"size": 1940
}
|
[
"org.eclipse.dawnsci.analysis.api.dataset.IDataset"
] |
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
|
import org.eclipse.dawnsci.analysis.api.dataset.*;
|
[
"org.eclipse.dawnsci"
] |
org.eclipse.dawnsci;
| 746,639
|
public List<String> getArguments();
public static class EngineArgument {
private String keyWord;
private AtomValue<?> atom;
public EngineArgument(String keyWord, AtomValue<?> atom) {
this.keyWord = keyWord;
this.atom = atom;
}
|
List<String> function(); public static class EngineArgument { private String keyWord; private AtomValue<?> atom; public EngineArgument(String keyWord, AtomValue<?> atom) { this.keyWord = keyWord; this.atom = atom; }
|
/**
* Provides the ordered names of the arguments
*
* @return
*/
|
Provides the ordered names of the arguments
|
getArguments
|
{
"repo_name": "notaql/notaql",
"path": "src/main/java/notaql/engines/Engine.java",
"license": "apache-2.0",
"size": 2526
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 817,151
|
public CreateSimpleLexer langHandlerForExtension(String extension, String source) {
if (!(extension != null && langHandlerRegistry.get(extension) != null)) {
// Treat it as markup if the first non whitespace character is a < and
// the last non-whitespace character is a >.
extension = Util.test(Pattern.compile("^\\s*<"), source)
? "default-markup"
: "default-code";
}
Object handler = langHandlerRegistry.get(extension);
if (handler instanceof CreateSimpleLexer) {
return (CreateSimpleLexer) handler;
} else {
CreateSimpleLexer _simpleLexer;
try {
Lang _lang = ((Class<Lang>) handler).newInstance();
_simpleLexer = new CreateSimpleLexer(_lang.getShortcutStylePatterns(), _lang.getFallthroughStylePatterns());
List<Lang> extendedLangs = _lang.getExtendedLangs();
for (Lang _extendedLang : extendedLangs) {
register(_extendedLang.getClass());
}
List<String> fileExtensions = getFileExtensionsFromClass((Class<Lang>) handler);
for (String _extension : fileExtensions) {
langHandlerRegistry.put(_extension, _simpleLexer);
}
} catch (Exception ex) {
LOG.log(Level.SEVERE, null, ex);
return null;
}
return _simpleLexer;
}
}
|
CreateSimpleLexer function(String extension, String source) { if (!(extension != null && langHandlerRegistry.get(extension) != null)) { extension = Util.test(Pattern.compile(STR), source) ? STR : STR; } Object handler = langHandlerRegistry.get(extension); if (handler instanceof CreateSimpleLexer) { return (CreateSimpleLexer) handler; } else { CreateSimpleLexer _simpleLexer; try { Lang _lang = ((Class<Lang>) handler).newInstance(); _simpleLexer = new CreateSimpleLexer(_lang.getShortcutStylePatterns(), _lang.getFallthroughStylePatterns()); List<Lang> extendedLangs = _lang.getExtendedLangs(); for (Lang _extendedLang : extendedLangs) { register(_extendedLang.getClass()); } List<String> fileExtensions = getFileExtensionsFromClass((Class<Lang>) handler); for (String _extension : fileExtensions) { langHandlerRegistry.put(_extension, _simpleLexer); } } catch (Exception ex) { LOG.log(Level.SEVERE, null, ex); return null; } return _simpleLexer; } }
|
/**
* Get the parser for the extension specified.
* @param extension the file extension, if null, default parser will be returned
* @param source the source code
* @return the parser
*/
|
Get the parser for the extension specified
|
langHandlerForExtension
|
{
"repo_name": "GerritCodeReview/java-prettify",
"path": "src/prettify/parser/Prettify.java",
"license": "apache-2.0",
"size": 43810
}
|
[
"java.util.List",
"java.util.logging.Level",
"java.util.regex.Pattern"
] |
import java.util.List; import java.util.logging.Level; import java.util.regex.Pattern;
|
import java.util.*; import java.util.logging.*; import java.util.regex.*;
|
[
"java.util"
] |
java.util;
| 1,626,002
|
public ConcurrencyLimit getConcurrencyLimit() {
return new ConcurrencyLimit(
getDelegate().getNumThreads(),
getResourceAllocationFairness(),
getManagedThreadCount(),
getDefaultResourceAmounts(),
getMaximumResourceAmounts());
}
|
ConcurrencyLimit function() { return new ConcurrencyLimit( getDelegate().getNumThreads(), getResourceAllocationFairness(), getManagedThreadCount(), getDefaultResourceAmounts(), getMaximumResourceAmounts()); }
|
/**
* Construct a default ConcurrencyLimit instance from this config.
*
* @return New instance of ConcurrencyLimit.
*/
|
Construct a default ConcurrencyLimit instance from this config
|
getConcurrencyLimit
|
{
"repo_name": "LegNeato/buck",
"path": "src/com/facebook/buck/config/resources/AbstractResourcesConfig.java",
"license": "apache-2.0",
"size": 5549
}
|
[
"com.facebook.buck.util.concurrent.ConcurrencyLimit"
] |
import com.facebook.buck.util.concurrent.ConcurrencyLimit;
|
import com.facebook.buck.util.concurrent.*;
|
[
"com.facebook.buck"
] |
com.facebook.buck;
| 2,874,851
|
@Deprecated
public synchronized boolean saveToRepositoryAsGone() throws ContentEncodingException, IOException {
setSaveType(SaveType.REPOSITORY);
return saveAsGone();
}
|
synchronized boolean function() throws ContentEncodingException, IOException { setSaveType(SaveType.REPOSITORY); return saveAsGone(); }
|
/**
* Deprecated; use either object defaults or setRepositorySave() to indicate writes
* should go to a repository, then call save() to write.
* If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made
* for this object.
* @throws ContentEncodingException if there is an error encoding the content
* @throws IOException if there is an error reading the content from the network
*/
|
Deprecated; use either object defaults or setRepositorySave() to indicate writes should go to a repository, then call save() to write. If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made for this object
|
saveToRepositoryAsGone
|
{
"repo_name": "StefanoSalsano/alien-ofelia-conet-ccnx",
"path": "javasrc/src/org/ccnx/ccn/io/content/CCNNetworkObject.java",
"license": "lgpl-2.1",
"size": 68811
}
|
[
"java.io.IOException",
"org.ccnx.ccn.impl.CCNFlowControl"
] |
import java.io.IOException; import org.ccnx.ccn.impl.CCNFlowControl;
|
import java.io.*; import org.ccnx.ccn.impl.*;
|
[
"java.io",
"org.ccnx.ccn"
] |
java.io; org.ccnx.ccn;
| 180,314
|
@Test
public void shouldFormatAssigneed() {
Notification notification = new Notification("review-changed")
.setFieldValue("reviewId", "1")
.setFieldValue("project", "Sonar")
.setFieldValue("resource", "org.sonar.server.ui.DefaultPages")
.setFieldValue("title", "Utility classes should not have a public or default constructor.")
.setFieldValue("author", "freddy.mallet")
.setFieldValue("old.assignee", null)
.setFieldValue("new.assignee", "evgeny.mandrikov");
EmailMessage message = template.format(notification);
assertThat(message.getMessageId(), is("review/1"));
assertThat(message.getSubject(), is("Review #1"));
assertThat(message.getFrom(), is("Freddy Mallet"));
assertThat(message.getMessage(), is("" +
"Project: Sonar\n" +
"Resource: org.sonar.server.ui.DefaultPages\n" +
"\n" +
"Utility classes should not have a public or default constructor.\n" +
"\n" +
"Assignee: Evgeny Mandrikov\n" +
"\n" +
"See it in Sonar: http://nemo.sonarsource.org/reviews/view/1\n"));
}
|
void function() { Notification notification = new Notification(STR) .setFieldValue(STR, "1") .setFieldValue(STR, "Sonar") .setFieldValue(STR, STR) .setFieldValue("title", STR) .setFieldValue(STR, STR) .setFieldValue(STR, null) .setFieldValue(STR, STR); EmailMessage message = template.format(notification); assertThat(message.getMessageId(), is(STR)); assertThat(message.getSubject(), is(STR)); assertThat(message.getFrom(), is(STR)); assertThat(message.getMessage(), is(STRProject: Sonar\nSTRResource: org.sonar.server.ui.DefaultPages\nSTR\nSTRUtility classes should not have a public or default constructor.\nSTR\nSTRAssignee: Evgeny Mandrikov\nSTR\nSTRSee it in Sonar: http: }
|
/**
* <pre>
* Subject: Review #1
* From: Freddy Mallet
*
* Project: Sonar
* Resource: org.sonar.server.ui.DefaultPages
*
* Utility classes should not have a public or default constructor.
*
* Assignee: Evgeny Mandrikov
*
* See it in Sonar: http://nemo.sonarsource.org/review/view/1
* </pre>
*/
|
<code> Subject: Review #1 From: Freddy Mallet Project: Sonar Resource: org.sonar.server.ui.DefaultPages Utility classes should not have a public or default constructor. Assignee: Evgeny Mandrikov See it in Sonar: HREF </code>
|
shouldFormatAssigneed
|
{
"repo_name": "jmecosta/sonar",
"path": "plugins/sonar-email-notifications-plugin/src/test/java/org/sonar/plugins/emailnotifications/templates/reviews/ReviewEmailTemplateTest.java",
"license": "lgpl-3.0",
"size": 18251
}
|
[
"org.hamcrest.Matchers",
"org.junit.Assert",
"org.sonar.api.notifications.Notification",
"org.sonar.plugins.emailnotifications.api.EmailMessage"
] |
import org.hamcrest.Matchers; import org.junit.Assert; import org.sonar.api.notifications.Notification; import org.sonar.plugins.emailnotifications.api.EmailMessage;
|
import org.hamcrest.*; import org.junit.*; import org.sonar.api.notifications.*; import org.sonar.plugins.emailnotifications.api.*;
|
[
"org.hamcrest",
"org.junit",
"org.sonar.api",
"org.sonar.plugins"
] |
org.hamcrest; org.junit; org.sonar.api; org.sonar.plugins;
| 872,510
|
@Override
public JClustersTestItem rename(Name name) {
return new JClustersTestItem(name, null);
}
// -------------------------------------------------------------------------
// Row2 type methods
// -------------------------------------------------------------------------
|
JClustersTestItem function(Name name) { return new JClustersTestItem(name, null); }
|
/**
* Rename this table
*/
|
Rename this table
|
rename
|
{
"repo_name": "reportportal/commons-dao",
"path": "src/main/java/com/epam/ta/reportportal/jooq/tables/JClustersTestItem.java",
"license": "apache-2.0",
"size": 4310
}
|
[
"org.jooq.Name"
] |
import org.jooq.Name;
|
import org.jooq.*;
|
[
"org.jooq"
] |
org.jooq;
| 951,893
|
public void draw() {
_parent.pushStyle();
_parent.pushMatrix();
_parent.translate(position.x, position.y, position.z);
// Align all the text by centre point
_parent.textAlign(PConstants.CENTER);
// Draw Axis line
PVector length = PVector.mult(_unitVec, _length);
if (_drawLine) {
_parent.stroke(_axisColour.toInt());
_parent.fill(_fontColour.toInt());
_parent.strokeWeight(_axisLineWidth);
_parent.line(0, 0, 0, length.x, length.y, length.z);
}
float longestLabel = 0; // difference in value for tick positions
PVector pos = null;
PVector eye = null;
PVector XZVector = null;
float angleCamZAxis = 0;
float angleCamXZPlane = 0;
if (_drawTicks || _drawTickLabels) {
// Start to add ticks on axis by initialising a bunch of variables
// find distance between ticks on axis
PVector dvTick = PVector.div(length, _majorTicks); // distance
// between major
// ticks
PVector dvTickMinor = PVector.div(dvTick, _minorTicks + 1); // distance
// between
// minor
// ticks
PVector tickPos = new PVector(0, 0, 0);
PVector tickEnd = new PVector(0, 0, 0);
PVector tickPosMinor = new PVector(0, 0, 0);
PVector tickEndMinor = new PVector(0, 0, 0);
PVector tickLblPos;
String tickLbl = null;
float dValue = (float) ((_maxShow - _minShow) / (_majorTicks)); // difference
// in
// label
// text
// between
// major
// ticks
_parent.strokeWeight(_axisTickLineWidth);
_parent.textFont(_font, _axisTickLblSize * 2);
// calc eye vector only once as it wont change
if (_textBillboard) {
eye = Camera3D.getEyeVector().get();
eye.normalize();
// calculate billboarding angle only once for all ticks
pos = Camera3D.getPosition().get();
pos.sub(Camera3D.getLookat());
pos.sub(position);
XZVector = new PVector(pos.x, 0, pos.z);
XZVector.normalize();
angleCamZAxis = (float) Math.atan2(pos.x, pos.z);
angleCamXZPlane = (float) Math.acos(XZVector.dot(eye));
}
for (int i = 0; i <= _majorTicks; i++) {
// Write tick label
switch (_tickLblType) {
case DECIMAL:
tickLbl = String.format("% ." + _accuracy + "f",
(float) (_minShow + i * dValue));
break;
case EXPONENT:
tickLbl = String.format("% ." + _accuracy + "E",
(float) (_minShow + i * dValue));
break;
case INTEGER:
tickLbl = String.format("% d", Math.round(_minShow + i
* dValue));
break;
}
// determine if this is the longest label for main axis label
// placement later
if (tickLbl.length() > longestLabel) {
longestLabel = _parent.textWidth(tickLbl);
}
// to define where the major tick label is, we take the current
// tick position
// and move in the label direction depending on width of the
// text and the major
// tick size
tickLblPos = PVector.add(tickPos, PVector.mult(_labelDirection,
1 + _parent.textWidth(tickLbl) / 2
+ Math.abs(_majorTickSize)));
// as the text is not vertically centred about the render point
// we offset slightly
// depending on the size of the font so label is approx centred
// to tick
tickLblPos.add(PVector.mult(_unitVec,
(float) ((float) _axisTickLblSize * 1.2)));
// also determine point where the tick should end, based on
// label direction and length
tickEnd = PVector.add(tickPos, PVector.mult(_labelDirection,
_majorTickSize));
// next draw the tick label
if (_drawTickLabels) {
_parent.pushMatrix();
_parent.translate(tickLblPos.x, tickLblPos.y, tickLblPos.z);
if (_textBillboard) {
_parent.rotateZ(PI);
_parent.rotateY(-angleCamZAxis);
if (Math.signum(pos.y) == 1) {
_parent.rotateX(PI - angleCamXZPlane);
} else {
_parent.rotateX(PI + angleCamXZPlane);
}
} else {
_parent.rotateZ(_axisTickLblRotation.z);
_parent.rotateX(_axisTickLblRotation.x);
_parent.rotateY(_axisTickLblRotation.y);
}
_parent.text(tickLbl, 0, (0.25f * _font.getSize()), 0);
_parent.popMatrix();
}
if (_drawTicks) {
// draw the tick line
_parent.line(tickPos.x, tickPos.y, tickPos.z, tickEnd.x, tickEnd.y, tickEnd.z);
// Draw minor ticks
if (i != 0) {// dont draw minor tick if first iteration as
// the minor ticks get drawn in the -ive
// axis
// direction from the major tick in question, was that
// or miss the last one out if drawn
// in +ve axis direction.
tickPosMinor = tickPos.get();
for (int j = 0; j < _minorTicks; j++) {
// same as with major ticks, get start and end
// points
tickPosMinor = PVector.sub(tickPosMinor,
dvTickMinor);
tickEndMinor = PVector.add(tickPosMinor, PVector.mult(_labelDirection, _minorTickSize));
_parent.line(tickPosMinor.x, tickPosMinor.y,
tickPosMinor.z, tickEndMinor.x, tickEndMinor.y,
tickEndMinor.z);
}
}
}
// position of tick along the axis
tickPos.add(dvTick);
}
}
_parent.popMatrix();
if (_drawName) {
// Add Label
_parent.textFont(_font, _axisLblSize);
// label position will be half way along the axis
PVector lblPos = PVector.add(position, PVector.div(length, 2));
// next need to offset in the label direction
lblPos.add(PVector.mult(_labelDirection, longestLabel
+ _axisLblSize + _axisLblOffset));
// Draw the label
_parent.pushMatrix();
// move into a local space about the label centre so we can easily
// rotate it
_parent.translate(lblPos.x, lblPos.y, lblPos.z);
_parent.rotateZ(_axisLblRotation.z);
_parent.rotateX(_axisLblRotation.x);
_parent.rotateY(_axisLblRotation.y);
_parent.text(String.valueOf(_label), 0, 0.25f * _font.getSize(), 0);
_parent.popMatrix();
}
_parent.popStyle();
}
|
void function() { _parent.pushStyle(); _parent.pushMatrix(); _parent.translate(position.x, position.y, position.z); _parent.textAlign(PConstants.CENTER); PVector length = PVector.mult(_unitVec, _length); if (_drawLine) { _parent.stroke(_axisColour.toInt()); _parent.fill(_fontColour.toInt()); _parent.strokeWeight(_axisLineWidth); _parent.line(0, 0, 0, length.x, length.y, length.z); } float longestLabel = 0; PVector pos = null; PVector eye = null; PVector XZVector = null; float angleCamZAxis = 0; float angleCamXZPlane = 0; if (_drawTicks _drawTickLabels) { PVector dvTick = PVector.div(length, _majorTicks); PVector dvTickMinor = PVector.div(dvTick, _minorTicks + 1); PVector tickPos = new PVector(0, 0, 0); PVector tickEnd = new PVector(0, 0, 0); PVector tickPosMinor = new PVector(0, 0, 0); PVector tickEndMinor = new PVector(0, 0, 0); PVector tickLblPos; String tickLbl = null; float dValue = (float) ((_maxShow - _minShow) / (_majorTicks)); _parent.strokeWeight(_axisTickLineWidth); _parent.textFont(_font, _axisTickLblSize * 2); if (_textBillboard) { eye = Camera3D.getEyeVector().get(); eye.normalize(); pos = Camera3D.getPosition().get(); pos.sub(Camera3D.getLookat()); pos.sub(position); XZVector = new PVector(pos.x, 0, pos.z); XZVector.normalize(); angleCamZAxis = (float) Math.atan2(pos.x, pos.z); angleCamXZPlane = (float) Math.acos(XZVector.dot(eye)); } for (int i = 0; i <= _majorTicks; i++) { switch (_tickLblType) { case DECIMAL: tickLbl = String.format(STR + _accuracy + "f", (float) (_minShow + i * dValue)); break; case EXPONENT: tickLbl = String.format(STR + _accuracy + "E", (float) (_minShow + i * dValue)); break; case INTEGER: tickLbl = String.format(STR, Math.round(_minShow + i * dValue)); break; } if (tickLbl.length() > longestLabel) { longestLabel = _parent.textWidth(tickLbl); } tickLblPos = PVector.add(tickPos, PVector.mult(_labelDirection, 1 + _parent.textWidth(tickLbl) / 2 + Math.abs(_majorTickSize))); tickLblPos.add(PVector.mult(_unitVec, (float) ((float) _axisTickLblSize * 1.2))); tickEnd = PVector.add(tickPos, PVector.mult(_labelDirection, _majorTickSize)); if (_drawTickLabels) { _parent.pushMatrix(); _parent.translate(tickLblPos.x, tickLblPos.y, tickLblPos.z); if (_textBillboard) { _parent.rotateZ(PI); _parent.rotateY(-angleCamZAxis); if (Math.signum(pos.y) == 1) { _parent.rotateX(PI - angleCamXZPlane); } else { _parent.rotateX(PI + angleCamXZPlane); } } else { _parent.rotateZ(_axisTickLblRotation.z); _parent.rotateX(_axisTickLblRotation.x); _parent.rotateY(_axisTickLblRotation.y); } _parent.text(tickLbl, 0, (0.25f * _font.getSize()), 0); _parent.popMatrix(); } if (_drawTicks) { _parent.line(tickPos.x, tickPos.y, tickPos.z, tickEnd.x, tickEnd.y, tickEnd.z); if (i != 0) { tickPosMinor = tickPos.get(); for (int j = 0; j < _minorTicks; j++) { tickPosMinor = PVector.sub(tickPosMinor, dvTickMinor); tickEndMinor = PVector.add(tickPosMinor, PVector.mult(_labelDirection, _minorTickSize)); _parent.line(tickPosMinor.x, tickPosMinor.y, tickPosMinor.z, tickEndMinor.x, tickEndMinor.y, tickEndMinor.z); } } } tickPos.add(dvTick); } } _parent.popMatrix(); if (_drawName) { _parent.textFont(_font, _axisLblSize); PVector lblPos = PVector.add(position, PVector.div(length, 2)); lblPos.add(PVector.mult(_labelDirection, longestLabel + _axisLblSize + _axisLblOffset)); _parent.pushMatrix(); _parent.translate(lblPos.x, lblPos.y, lblPos.z); _parent.rotateZ(_axisLblRotation.z); _parent.rotateX(_axisLblRotation.x); _parent.rotateY(_axisLblRotation.y); _parent.text(String.valueOf(_label), 0, 0.25f * _font.getSize(), 0); _parent.popMatrix(); } _parent.popStyle(); }
|
/**
* This function starts rendering the axis to the properties that have been
* specified.
*/
|
This function starts rendering the axis to the properties that have been specified
|
draw
|
{
"repo_name": "OpenBCI/OpenBCI_Processing",
"path": "libraries/gwoptics/src/org/gwoptics/graphics/graph3D/Axis3D.java",
"license": "mit",
"size": 14273
}
|
[
"org.gwoptics.graphics.camera.Camera3D"
] |
import org.gwoptics.graphics.camera.Camera3D;
|
import org.gwoptics.graphics.camera.*;
|
[
"org.gwoptics.graphics"
] |
org.gwoptics.graphics;
| 2,672,225
|
private void checkSimpleViews() throws SQLException {
// ................................................................
// SELECT *
checkViewTranslationAndContent(
"S1", null, "SELECT * FROM ABC",
"SELECT ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC",
"ABC");
// adding a column to "ABC" should succeed, and still leave the view with the columns
// before the addition
executeStatement("ALTER TABLE ABC ADD COLUMN D VARCHAR(50)");
ensureTableColumns("ABC", new String[] {
"ID","A", "B", "C", "D"
});
ensureTableColumns("S1", new String[] {
"ID", "A", "B", "C"
});
// dropping the column which is not part of the view should be possible
executeStatement("ALTER TABLE ABC DROP COLUMN D");
// dropping another column which *is* part of the view shouldn't
executeStatement("ALTER TABLE ABC DROP COLUMN C",
Trace.COLUMN_IS_REFERENCED);
// finally, dropping the column with CASCADE should work, and also drop the view
//executeStatement("ALTER TABLE ABC DROP COLUMN C CASCADE");
// DROP COLUMN c CASCADE not implemented, yet
// ................................................................
// same as S1, but this time add a LIMIT clause to the statement
checkViewTranslationAndContent(
"S2", null, "SELECT LIMIT 0 2 * FROM ABC ORDER BY ID",
"SELECT LIMIT 0 2 ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC ORDER BY ID",
"ABC");
// ................................................................
// same as S1, but this time add a TOP clause to the statement
checkViewTranslationAndContent(
"S3", null, "SELECT TOP 2 * FROM ABC ORDER BY ID",
"SELECT TOP 2 ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC ORDER BY ID",
"ABC");
// ................................................................
// same as S1, but this time add a DISTINCT clause to the statement
checkViewTranslationAndContent(
"S4", null, "SELECT DISTINCT * FROM ABC",
"SELECT DISTINCT ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC",
"ABC");
// ................................................................
// same as S1, but this time qualifying the asterisk
checkViewTranslationAndContent(
"S5", null, "SELECT ABC.* FROM ABC",
"SELECT ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC",
"ABC");
// ................................................................
// same as S5, but this time also giving the table an alias
checkViewTranslationAndContent(
"S6", null, "SELECT \"A\".* FROM ABC AS A",
"SELECT \"A\".ID,\"A\".A,\"A\".B,\"A\".C FROM ABC AS A", "ABC");
// ................................................................
// same as S1, but bracket the SELECT definition
checkViewTranslationAndContent(
"S7", null, "( SELECT * FROM ABC )",
"( SELECT ABC.ID,ABC.A,ABC.B,ABC.C FROM ABC )",
"ABC");
}
|
void function() throws SQLException { checkViewTranslationAndContent( "S1", null, STR, STR, "ABC"); executeStatement(STR); ensureTableColumns("ABC", new String[] { "ID","A", "B", "C", "D" }); ensureTableColumns("S1", new String[] { "ID", "A", "B", "C" }); executeStatement(STR); executeStatement(STR, Trace.COLUMN_IS_REFERENCED); checkViewTranslationAndContent( "S2", null, STR, STR, "ABC"); checkViewTranslationAndContent( "S3", null, STR, STR, "ABC"); checkViewTranslationAndContent( "S4", null, STR, STR, "ABC"); checkViewTranslationAndContent( "S5", null, STR, STR, "ABC"); checkViewTranslationAndContent( "S6", null, STRA\STR, STRA\".ID,\"A\".A,\"A\".B,\"A\STR, "ABC"); checkViewTranslationAndContent( "S7", null, STR, STR, "ABC"); }
|
/**
* checks views selecting an asterisk from a table, in different flavours
*/
|
checks views selecting an asterisk from a table, in different flavours
|
checkSimpleViews
|
{
"repo_name": "minghao7896321/canyin",
"path": "hsqldb/src/org/hsqldb/test/TestViewAsterisks.java",
"license": "apache-2.0",
"size": 23579
}
|
[
"java.sql.SQLException",
"org.hsqldb.Trace"
] |
import java.sql.SQLException; import org.hsqldb.Trace;
|
import java.sql.*; import org.hsqldb.*;
|
[
"java.sql",
"org.hsqldb"
] |
java.sql; org.hsqldb;
| 1,053,036
|
public DeployArchiveBuilder setJavaStartClass(@Nullable String javaStartClass) {
this.javaStartClass = javaStartClass;
return this;
}
|
DeployArchiveBuilder function(@Nullable String javaStartClass) { this.javaStartClass = javaStartClass; return this; }
|
/**
* Sets the class to launch the Java application.
*/
|
Sets the class to launch the Java application
|
setJavaStartClass
|
{
"repo_name": "kamalmarhubi/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/java/DeployArchiveBuilder.java",
"license": "apache-2.0",
"size": 8851
}
|
[
"javax.annotation.Nullable"
] |
import javax.annotation.Nullable;
|
import javax.annotation.*;
|
[
"javax.annotation"
] |
javax.annotation;
| 391,014
|
@BeforeAll
public static void beforeTests()
{
Medias.setResourcesDirectory(System.getProperty("java.io.tmpdir"));
}
|
static void function() { Medias.setResourcesDirectory(System.getProperty(STR)); }
|
/**
* Prepare test.
*/
|
Prepare test
|
beforeTests
|
{
"repo_name": "b3dgs/lionengine",
"path": "lionengine-game/src/test/java/com/b3dgs/lionengine/game/feature/LayerableConfigTest.java",
"license": "gpl-3.0",
"size": 3591
}
|
[
"com.b3dgs.lionengine.Medias"
] |
import com.b3dgs.lionengine.Medias;
|
import com.b3dgs.lionengine.*;
|
[
"com.b3dgs.lionengine"
] |
com.b3dgs.lionengine;
| 2,249,570
|
public boolean hasBound(Bound b);
|
boolean function(Bound b);
|
/**
* Checks if the specified bound is set or not.
* @param b the bound type
* @return <code>true</code> if the specified bound is set, <code>false</code> otherwise
*/
|
Checks if the specified bound is set or not
|
hasBound
|
{
"repo_name": "fengshao0907/cassandra-1",
"path": "src/java/org/apache/cassandra/cql3/restrictions/Restriction.java",
"license": "apache-2.0",
"size": 5322
}
|
[
"org.apache.cassandra.cql3.statements.Bound"
] |
import org.apache.cassandra.cql3.statements.Bound;
|
import org.apache.cassandra.cql3.statements.*;
|
[
"org.apache.cassandra"
] |
org.apache.cassandra;
| 2,770,615
|
public void verifyCompressionEmulation(JobConf origJobConf,
JobConf simuJobConf,Counters counters,
Map<String, Long> origReduceCounters,
Map<String, Long> origMapJobCounters)
throws ParseException,IOException {
if (simuJobConf.getBoolean(compEmulKey, false)) {
String inputDir = origJobConf.get(fileInputFormatKey);
Assert.assertNotNull(fileInputFormatKey + " is Null",inputDir);
long simMapInputBytes = getCounterValue(counters, "HDFS_BYTES_READ");
long uncompressedInputSize = origMapJobCounters.get("MAP_INPUT_BYTES");
long simReduceInputBytes =
getCounterValue(counters, "REDUCE_SHUFFLE_BYTES");
long simMapOutputBytes = getCounterValue(counters, "MAP_OUTPUT_BYTES");
// Verify input compression whether it's enable or not.
if (inputDir.contains(".gz") || inputDir.contains(".tgz")
|| inputDir.contains(".bz")) {
Assert.assertTrue("Input decompression attribute has been not set for "
+ "for compressed input",
simuJobConf.getBoolean(inputDecompKey, false));
float INPUT_COMP_RATIO =
getExpectedCompressionRatio(simuJobConf, mapInputCompRatio);
float INTERMEDIATE_COMP_RATIO =
getExpectedCompressionRatio(simuJobConf, mapOutputCompRatio);
// Verify Map Input Compression Ratio.
assertMapInputCompressionRatio(simMapInputBytes, uncompressedInputSize,
INPUT_COMP_RATIO);
// Verify Map Output Compression Ratio.
assertMapOuputCompressionRatio(simReduceInputBytes, simMapOutputBytes,
INTERMEDIATE_COMP_RATIO);
} else {
Assert.assertEquals("MAP input bytes has not matched.",
convertBytes(uncompressedInputSize),
convertBytes(simMapInputBytes));
}
Assert.assertEquals("Simulated job output format has not matched with "
+ "original job output format.",
origJobConf.getBoolean(fileOutputFormatKey,false),
simuJobConf.getBoolean(fileOutputFormatKey,false));
if (simuJobConf.getBoolean(fileOutputFormatKey,false)) {
float OUTPUT_COMP_RATIO =
getExpectedCompressionRatio(simuJobConf, reduceOutputCompRatio);
//Verify reduce output compression ratio.
long simReduceOutputBytes =
getCounterValue(counters, "HDFS_BYTES_WRITTEN");
long origReduceOutputBytes =
origReduceCounters.get("REDUCE_OUTPUT_BYTES");
assertReduceOutputCompressionRatio(simReduceOutputBytes,
origReduceOutputBytes,
OUTPUT_COMP_RATIO);
}
}
}
|
void function(JobConf origJobConf, JobConf simuJobConf,Counters counters, Map<String, Long> origReduceCounters, Map<String, Long> origMapJobCounters) throws ParseException,IOException { if (simuJobConf.getBoolean(compEmulKey, false)) { String inputDir = origJobConf.get(fileInputFormatKey); Assert.assertNotNull(fileInputFormatKey + STR,inputDir); long simMapInputBytes = getCounterValue(counters, STR); long uncompressedInputSize = origMapJobCounters.get(STR); long simReduceInputBytes = getCounterValue(counters, STR); long simMapOutputBytes = getCounterValue(counters, STR); if (inputDir.contains(".gz") inputDir.contains(".tgz") inputDir.contains(".bz")) { Assert.assertTrue(STR + STR, simuJobConf.getBoolean(inputDecompKey, false)); float INPUT_COMP_RATIO = getExpectedCompressionRatio(simuJobConf, mapInputCompRatio); float INTERMEDIATE_COMP_RATIO = getExpectedCompressionRatio(simuJobConf, mapOutputCompRatio); assertMapInputCompressionRatio(simMapInputBytes, uncompressedInputSize, INPUT_COMP_RATIO); assertMapOuputCompressionRatio(simReduceInputBytes, simMapOutputBytes, INTERMEDIATE_COMP_RATIO); } else { Assert.assertEquals(STR, convertBytes(uncompressedInputSize), convertBytes(simMapInputBytes)); } Assert.assertEquals(STR + STR, origJobConf.getBoolean(fileOutputFormatKey,false), simuJobConf.getBoolean(fileOutputFormatKey,false)); if (simuJobConf.getBoolean(fileOutputFormatKey,false)) { float OUTPUT_COMP_RATIO = getExpectedCompressionRatio(simuJobConf, reduceOutputCompRatio); long simReduceOutputBytes = getCounterValue(counters, STR); long origReduceOutputBytes = origReduceCounters.get(STR); assertReduceOutputCompressionRatio(simReduceOutputBytes, origReduceOutputBytes, OUTPUT_COMP_RATIO); } } }
|
/**
* It verifies the compression ratios of mapreduce jobs.
* @param origJobConf - original job configuration.
* @param simuJobConf - simulated job configuration.
* @param counters - simulated job counters.
* @param origReduceCounters - original job reduce counters.
* @param origMapCounters - original job map counters.
* @throws ParseException - if a parser error occurs.
* @throws IOException - if an I/O error occurs.
*/
|
It verifies the compression ratios of mapreduce jobs
|
verifyCompressionEmulation
|
{
"repo_name": "rekhajoshm/mapreduce-fork",
"path": "src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java",
"license": "apache-2.0",
"size": 34287
}
|
[
"java.io.IOException",
"java.text.ParseException",
"java.util.Map",
"org.apache.hadoop.mapred.JobConf",
"org.apache.hadoop.mapreduce.Counters",
"org.junit.Assert"
] |
import java.io.IOException; import java.text.ParseException; import java.util.Map; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Counters; import org.junit.Assert;
|
import java.io.*; import java.text.*; import java.util.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapreduce.*; import org.junit.*;
|
[
"java.io",
"java.text",
"java.util",
"org.apache.hadoop",
"org.junit"
] |
java.io; java.text; java.util; org.apache.hadoop; org.junit;
| 1,651,018
|
public ListFoldersResult listFoldersContinue(String cursor)
throws ListFoldersContinueException, DbxException
{
ListFoldersContinueArg arg = new ListFoldersContinueArg(cursor);
return listFoldersContinue(arg);
}
public static class GetFolderMetadataException extends DbxApiException {
public final SharedFolderAccessError errorValue;
public GetFolderMetadataException(String requestId, LocalizedText userMessage, SharedFolderAccessError errorValue) {
super(requestId, userMessage, buildMessage("get_folder_metadata", userMessage, errorValue));
this.errorValue = errorValue;
}
}
|
ListFoldersResult function(String cursor) throws ListFoldersContinueException, DbxException { ListFoldersContinueArg arg = new ListFoldersContinueArg(cursor); return listFoldersContinue(arg); } public static class GetFolderMetadataException extends DbxApiException { public final SharedFolderAccessError errorValue; public GetFolderMetadataException(String requestId, LocalizedText userMessage, SharedFolderAccessError errorValue) { super(requestId, userMessage, buildMessage(STR, userMessage, errorValue)); this.errorValue = errorValue; } }
|
/**
* Once a cursor has been retrieved from {@link DbxSharing#listFolders}, use
* this to paginate through all shared folders. Apps must have full Dropbox
* access to use this endpoint. Warning: This endpoint is in beta and is
* subject to minor but possibly backwards-incompatible changes.
*
* @param cursor The cursor returned by your last call to {@link
* DbxSharing#listFolders} or {@link
* DbxSharing#listFoldersContinue(String)}. {@code cursor} must not be
* {@code null}.
*
* @throws IllegalArgumentException if any argument does not meet its
* preconditions.
*/
|
Once a cursor has been retrieved from <code>DbxSharing#listFolders</code>, use this to paginate through all shared folders. Apps must have full Dropbox access to use this endpoint. Warning: This endpoint is in beta and is subject to minor but possibly backwards-incompatible changes
|
listFoldersContinue
|
{
"repo_name": "hunchee/dropbox-sdk-java",
"path": "src/com/dropbox/core/v2/DbxSharing.java",
"license": "mit",
"size": 808400
}
|
[
"com.dropbox.core.DbxApiException",
"com.dropbox.core.DbxException",
"com.dropbox.core.LocalizedText"
] |
import com.dropbox.core.DbxApiException; import com.dropbox.core.DbxException; import com.dropbox.core.LocalizedText;
|
import com.dropbox.core.*;
|
[
"com.dropbox.core"
] |
com.dropbox.core;
| 2,703,143
|
@Description("enable the driver")
public boolean start();
|
@Description(STR) boolean function();
|
/**
* Enables the driver.
*/
|
Enables the driver
|
start
|
{
"repo_name": "dlitz/resin",
"path": "modules/resin/src/com/caucho/management/server/JdbcDriverMXBean.java",
"license": "gpl-2.0",
"size": 2590
}
|
[
"com.caucho.jmx.Description"
] |
import com.caucho.jmx.Description;
|
import com.caucho.jmx.*;
|
[
"com.caucho.jmx"
] |
com.caucho.jmx;
| 2,782,919
|
@Test
public void testApacheHttpClient4ExecutorNonSharedHttpClientClose() throws Throwable
{
ApacheHttpClient4Engine engine = new ApacheHttpClient4Engine();
ResteasyClient client = new ResteasyClientBuilder().httpEngine(engine).build();
Response response = client.target(generateURL("/test")).request().post(null);
Assert.assertEquals(204, response.getStatus());
engine.close();
HttpClient httpClient = engine.getHttpClient();
HttpPost post = new HttpPost(generateURL("/test"));
try
{
httpClient.execute(post);
fail("Expected IllegalStateException");
}
catch (IllegalStateException e)
{
log.info("Got expected IllegalStateException");
}
}
|
void function() throws Throwable { ApacheHttpClient4Engine engine = new ApacheHttpClient4Engine(); ResteasyClient client = new ResteasyClientBuilder().httpEngine(engine).build(); Response response = client.target(generateURL("/test")).request().post(null); Assert.assertEquals(204, response.getStatus()); engine.close(); HttpClient httpClient = engine.getHttpClient(); HttpPost post = new HttpPost(generateURL("/test")); try { httpClient.execute(post); fail(STR); } catch (IllegalStateException e) { log.info(STR); } }
|
/**
* Verify that if ApacheHttpClient4Executor creates its own HttpClient,
* then ApacheHttpClient4Executor.close() will close the HttpClient's
* org.apache.http.conn.ClientConnectionManager.
*/
|
Verify that if ApacheHttpClient4Executor creates its own HttpClient, then ApacheHttpClient4Executor.close() will close the HttpClient's org.apache.http.conn.ClientConnectionManager
|
testApacheHttpClient4ExecutorNonSharedHttpClientClose
|
{
"repo_name": "raphaelning/resteasy-client-android",
"path": "jaxrs/resteasy-jaxrs-testsuite/src/test/java/org/jboss/resteasy/test/nextgen/client/ClientExecutorShutdownTest.java",
"license": "apache-2.0",
"size": 5569
}
|
[
"javax.ws.rs.core.Response",
"org.apache.http.client.HttpClient",
"org.apache.http.client.methods.HttpPost",
"org.jboss.resteasy.client.jaxrs.ResteasyClient",
"org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder",
"org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine",
"org.junit.Assert"
] |
import javax.ws.rs.core.Response; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.jboss.resteasy.client.jaxrs.ResteasyClient; import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder; import org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine; import org.junit.Assert;
|
import javax.ws.rs.core.*; import org.apache.http.client.*; import org.apache.http.client.methods.*; import org.jboss.resteasy.client.jaxrs.*; import org.jboss.resteasy.client.jaxrs.engines.*; import org.junit.*;
|
[
"javax.ws",
"org.apache.http",
"org.jboss.resteasy",
"org.junit"
] |
javax.ws; org.apache.http; org.jboss.resteasy; org.junit;
| 740,979
|
protected static <T> List<T> copyOf(List<T> source) {
return source == null ? null : new ArrayList<T>(source);
}
|
static <T> List<T> function(List<T> source) { return source == null ? null : new ArrayList<T>(source); }
|
/**
* Utility method to return a mutable copy of a given List. Used by generated code.
*/
|
Utility method to return a mutable copy of a given List. Used by generated code
|
copyOf
|
{
"repo_name": "wakandan/wire",
"path": "wire-runtime/src/main/java/com/squareup/wire/Message.java",
"license": "apache-2.0",
"size": 11846
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,412,318
|
@Override
public void assignSlots(Topologies topologies, Map<String, Collection<WorkerSlot>> slotsForTopologiesNeedingAssignments) {
if (slotsForTopologiesNeedingAssignments.isEmpty()) {
LOG.info("assignSlots: no slots passed in, nothing to do");
return;
}
// This is purely to print the debug information. Otherwise, the following for loop is unnecessary.
for (Map.Entry<String, Collection<WorkerSlot>> topologyToSlots : slotsForTopologiesNeedingAssignments.entrySet()) {
String topologyId = topologyToSlots.getKey();
List<String> topologySlotAssignmentStrings = new ArrayList<String>();
String info = "assignSlots: " + topologyId + " being assigned to " + topologyToSlots.getValue().size() + " slots (worker:port, cpu, mem) as follows: ";
for (WorkerSlot slot : topologyToSlots.getValue()) {
TopologyDetails details = topologies.getById(topologyId);
topologySlotAssignmentStrings.add("(" + slot + ", " + MesosCommon.topologyWorkerCpu(mesosStormConf, details) + ", " + MesosCommon.topologyWorkerMem(mesosStormConf, details) + ")");
}
if (!topologyToSlots.getValue().isEmpty()) {
info += StringUtils.join(topologySlotAssignmentStrings, ", ");
LOG.info(info);
}
}
synchronized (_offersLock) {
Map<String, AggregatedOffers> aggregatedOffersPerNode = MesosCommon.getAggregatedOffersPerNode(_offers);
Map<String, List<TaskInfo>> tasksToLaunchPerNode = getTasksToLaunch(topologies, slotsForTopologiesNeedingAssignments, aggregatedOffersPerNode);
for (String node : tasksToLaunchPerNode.keySet()) {
List<OfferID> offerIDList = aggregatedOffersPerNode.get(node).getOfferIDList();
List<TaskInfo> taskInfoList = tasksToLaunchPerNode.get(node);
LOG.info("Using offerIDs: {} on host: {} to launch tasks: {}", offerIDListToString(offerIDList), node, taskInfoListToString(taskInfoList));
_driver.launchTasks(offerIDList, taskInfoList);
for (OfferID offerID: offerIDList) {
_offers.remove(offerID);
}
}
}
}
|
void function(Topologies topologies, Map<String, Collection<WorkerSlot>> slotsForTopologiesNeedingAssignments) { if (slotsForTopologiesNeedingAssignments.isEmpty()) { LOG.info(STR); return; } for (Map.Entry<String, Collection<WorkerSlot>> topologyToSlots : slotsForTopologiesNeedingAssignments.entrySet()) { String topologyId = topologyToSlots.getKey(); List<String> topologySlotAssignmentStrings = new ArrayList<String>(); String info = STR + topologyId + STR + topologyToSlots.getValue().size() + STR; for (WorkerSlot slot : topologyToSlots.getValue()) { TopologyDetails details = topologies.getById(topologyId); topologySlotAssignmentStrings.add("(" + slot + STR + MesosCommon.topologyWorkerCpu(mesosStormConf, details) + STR + MesosCommon.topologyWorkerMem(mesosStormConf, details) + ")"); } if (!topologyToSlots.getValue().isEmpty()) { info += StringUtils.join(topologySlotAssignmentStrings, STR); LOG.info(info); } } synchronized (_offersLock) { Map<String, AggregatedOffers> aggregatedOffersPerNode = MesosCommon.getAggregatedOffersPerNode(_offers); Map<String, List<TaskInfo>> tasksToLaunchPerNode = getTasksToLaunch(topologies, slotsForTopologiesNeedingAssignments, aggregatedOffersPerNode); for (String node : tasksToLaunchPerNode.keySet()) { List<OfferID> offerIDList = aggregatedOffersPerNode.get(node).getOfferIDList(); List<TaskInfo> taskInfoList = tasksToLaunchPerNode.get(node); LOG.info(STR, offerIDListToString(offerIDList), node, taskInfoListToString(taskInfoList)); _driver.launchTasks(offerIDList, taskInfoList); for (OfferID offerID: offerIDList) { _offers.remove(offerID); } } } }
|
/**
* This method is invoked after IScheduler.schedule assigns the worker slots to the topologies that need assignments
*
* @param topologies - Information about all topologies
* @param slotsForTopologiesNeedingAssignments - A map of topology name and collection of worker slots that are assigned to the topologies
* that need assignments
*/
|
This method is invoked after IScheduler.schedule assigns the worker slots to the topologies that need assignments
|
assignSlots
|
{
"repo_name": "erikdw/storm-mesos",
"path": "storm/src/main/storm/mesos/MesosNimbus.java",
"license": "apache-2.0",
"size": 42847
}
|
[
"java.util.ArrayList",
"java.util.Collection",
"java.util.List",
"java.util.Map",
"org.apache.commons.lang3.StringUtils",
"org.apache.mesos.Protos",
"org.apache.storm.scheduler.Topologies",
"org.apache.storm.scheduler.TopologyDetails",
"org.apache.storm.scheduler.WorkerSlot"
] |
import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.mesos.Protos; import org.apache.storm.scheduler.Topologies; import org.apache.storm.scheduler.TopologyDetails; import org.apache.storm.scheduler.WorkerSlot;
|
import java.util.*; import org.apache.commons.lang3.*; import org.apache.mesos.*; import org.apache.storm.scheduler.*;
|
[
"java.util",
"org.apache.commons",
"org.apache.mesos",
"org.apache.storm"
] |
java.util; org.apache.commons; org.apache.mesos; org.apache.storm;
| 855,520
|
public void addData(Actor actor, int position) {
mAccount.addActor(actor);
notifyItemInserted(position);
}
|
void function(Actor actor, int position) { mAccount.addActor(actor); notifyItemInserted(position); }
|
/**
* Adds and item into the underlying data set
* at the position passed into the method.
*
* @param actor The item to add to the data set.
* @param position The index of the item to remove.
*/
|
Adds and item into the underlying data set at the position passed into the method
|
addData
|
{
"repo_name": "thunerd/shreck",
"path": "app/src/main/java/com/thunerd/shreck/views/adapters/ActorListAdapter.java",
"license": "gpl-2.0",
"size": 3773
}
|
[
"com.thunerd.shreck.models.Actor"
] |
import com.thunerd.shreck.models.Actor;
|
import com.thunerd.shreck.models.*;
|
[
"com.thunerd.shreck"
] |
com.thunerd.shreck;
| 161,537
|
public ServerName getServerHoldingMeta() throws IOException {
return getServerHoldingRegion(TableName.META_TABLE_NAME,
HRegionInfo.FIRST_META_REGIONINFO.getRegionName());
}
|
ServerName function() throws IOException { return getServerHoldingRegion(TableName.META_TABLE_NAME, HRegionInfo.FIRST_META_REGIONINFO.getRegionName()); }
|
/**
* Get the ServerName of region server serving the first hbase:meta region
*/
|
Get the ServerName of region server serving the first hbase:meta region
|
getServerHoldingMeta
|
{
"repo_name": "Guavus/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java",
"license": "apache-2.0",
"size": 10469
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,852,635
|
void txApplyDestroyPart2(RegionEntry re, Object key, boolean inTokenMode, boolean clearConflict)
{
if (this.testCallable != null) {
this.testCallable.call(this, Operation.DESTROY, re);
}
if (inTokenMode) {
getImageState().addDestroyedEntry(key);
}
else {
updateStatsForDestroy();
}
if (this.entryUserAttributes != null) {
this.entryUserAttributes.remove(key);
}
}
|
void txApplyDestroyPart2(RegionEntry re, Object key, boolean inTokenMode, boolean clearConflict) { if (this.testCallable != null) { this.testCallable.call(this, Operation.DESTROY, re); } if (inTokenMode) { getImageState().addDestroyedEntry(key); } else { updateStatsForDestroy(); } if (this.entryUserAttributes != null) { this.entryUserAttributes.remove(key); } }
|
/**
* Called by lower levels, while still holding the write sync lock, and the
* low level has completed its part of the basic destroy
*/
|
Called by lower levels, while still holding the write sync lock, and the low level has completed its part of the basic destroy
|
txApplyDestroyPart2
|
{
"repo_name": "kidaa/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/LocalRegion.java",
"license": "apache-2.0",
"size": 457988
}
|
[
"com.gemstone.gemfire.cache.Operation"
] |
import com.gemstone.gemfire.cache.Operation;
|
import com.gemstone.gemfire.cache.*;
|
[
"com.gemstone.gemfire"
] |
com.gemstone.gemfire;
| 1,666,388
|
public Index<T> unique(Boolean unique)
{
childNode.attribute("unique", unique);
return this;
}
|
Index<T> function(Boolean unique) { childNode.attribute(STR, unique); return this; }
|
/**
* Sets the <code>unique</code> attribute
* @param unique the value for the attribute <code>unique</code>
* @return the current instance of <code>Index<T></code>
*/
|
Sets the <code>unique</code> attribute
|
unique
|
{
"repo_name": "forge/javaee-descriptors",
"path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/orm21/IndexImpl.java",
"license": "epl-1.0",
"size": 6022
}
|
[
"org.jboss.shrinkwrap.descriptor.api.orm21.Index"
] |
import org.jboss.shrinkwrap.descriptor.api.orm21.Index;
|
import org.jboss.shrinkwrap.descriptor.api.orm21.*;
|
[
"org.jboss.shrinkwrap"
] |
org.jboss.shrinkwrap;
| 2,301,269
|
if(references > 0) {
references--;
} else {
Log.w(TAG, "decrementReferences() called and references is 0");
}
|
if(references > 0) { references--; } else { Log.w(TAG, STR); }
|
/**
* Decrement reference count. When this gets to zero, we set a timer, and if
* nobody else has increased the reference count (by binding to the service,
* for example) when the timer expires, we stop the service.
*/
|
Decrement reference count. When this gets to zero, we set a timer, and if nobody else has increased the reference count (by binding to the service, for example) when the timer expires, we stop the service
|
decrementReferences
|
{
"repo_name": "routemaster/routemaster-android",
"path": "src/org/lumeh/routemaster/service/TrackingService.java",
"license": "gpl-3.0",
"size": 12379
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 1,963,282
|
private JMenu getQueryAddMenu() {
if (queryAddMenu == null) {
queryAddMenu = new JMenu();
queryAddMenu.setText("Add");
queryAddMenu.add(getAddGroupMenuItem());
queryAddMenu.add(getAddAssociationMenuItem());
queryAddMenu.add(getAddAttributeMenuItem());
}
return queryAddMenu;
}
|
JMenu function() { if (queryAddMenu == null) { queryAddMenu = new JMenu(); queryAddMenu.setText("Add"); queryAddMenu.add(getAddGroupMenuItem()); queryAddMenu.add(getAddAssociationMenuItem()); queryAddMenu.add(getAddAttributeMenuItem()); } return queryAddMenu; }
|
/**
* This method initializes queryAddMenu
*
* @return javax.swing.JMenu
*/
|
This method initializes queryAddMenu
|
getQueryAddMenu
|
{
"repo_name": "NCIP/cagrid",
"path": "cagrid/Software/core/caGrid/projects/data/src/java/tools/gov/nih/nci/cagrid/data/utilities/vizquery/VisualQueryBuilder.java",
"license": "bsd-3-clause",
"size": 38642
}
|
[
"javax.swing.JMenu"
] |
import javax.swing.JMenu;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 1,349,694
|
@Nonnull
public UnifiedRoleDefinitionCollectionRequest filter(@Nonnull final String value) {
addFilterOption(value);
return this;
}
|
UnifiedRoleDefinitionCollectionRequest function(@Nonnull final String value) { addFilterOption(value); return this; }
|
/**
* Sets the filter clause for the request
*
* @param value the filter clause
* @return the updated request
*/
|
Sets the filter clause for the request
|
filter
|
{
"repo_name": "microsoftgraph/msgraph-sdk-java",
"path": "src/main/java/com/microsoft/graph/requests/UnifiedRoleDefinitionCollectionRequest.java",
"license": "mit",
"size": 6059
}
|
[
"com.microsoft.graph.requests.UnifiedRoleDefinitionCollectionRequest",
"javax.annotation.Nonnull"
] |
import com.microsoft.graph.requests.UnifiedRoleDefinitionCollectionRequest; import javax.annotation.Nonnull;
|
import com.microsoft.graph.requests.*; import javax.annotation.*;
|
[
"com.microsoft.graph",
"javax.annotation"
] |
com.microsoft.graph; javax.annotation;
| 1,076,649
|
public void setExceptionTypes(Type... types) {
exceptions = types;
}
|
void function(Type... types) { exceptions = types; }
|
/**
* Sets the types of the exceptions this method throws
* @param types the types of the exceptions this method throws
*/
|
Sets the types of the exceptions this method throws
|
setExceptionTypes
|
{
"repo_name": "mhems/jhelp",
"path": "src/com/binghamton/jhelp/symbols/MyMethodSymbol.java",
"license": "bsd-3-clause",
"size": 3801
}
|
[
"com.binghamton.jhelp.types.Type"
] |
import com.binghamton.jhelp.types.Type;
|
import com.binghamton.jhelp.types.*;
|
[
"com.binghamton.jhelp"
] |
com.binghamton.jhelp;
| 2,391,151
|
public void startDocument()
throws SAXException {
try {
super.startDocument();
} catch (Exception exc) {
log.error(exc.getMessage(), exc);
}
} // startDocument
|
void function() throws SAXException { try { super.startDocument(); } catch (Exception exc) { log.error(exc.getMessage(), exc); } }
|
/** Receive notification of the beginning of the document,
* and initialize the outgoing handler for a filter.
* @throws SAXException - any SAX exception,
* possibly wrapping another exception
*/
|
Receive notification of the beginning of the document, and initialize the outgoing handler for a filter
|
startDocument
|
{
"repo_name": "gfis/xtrans",
"path": "src/main/java/org/teherba/xtrans/CharTransformer.java",
"license": "apache-2.0",
"size": 7888
}
|
[
"org.xml.sax.SAXException"
] |
import org.xml.sax.SAXException;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 592,743
|
public List<Expression> getArguments() {
return arguments;
}
|
List<Expression> function() { return arguments; }
|
/** get the List of paramter
*
* @return the paramters */
|
get the List of paramter
|
getArguments
|
{
"repo_name": "hewie/moco",
"path": "src/main/java/de/uni/bremen/monty/moco/ast/expression/FunctionCall.java",
"license": "gpl-3.0",
"size": 3390
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 317,215
|
void setVariablesLocal(String executionId, Map<String, ? extends Object> variables);
|
void setVariablesLocal(String executionId, Map<String, ? extends Object> variables);
|
/**
* Update or create given variables for an execution (not considering parent scopes). If the variables are not already existing, it will be created in the given execution.
*
* @param executionId
* id of the execution, cannot be null.
* @param variables
* map containing name (key) and value of variables, can be null.
* @throws FlowableObjectNotFoundException
* when no execution is found for the given executionId.
*/
|
Update or create given variables for an execution (not considering parent scopes). If the variables are not already existing, it will be created in the given execution
|
setVariablesLocal
|
{
"repo_name": "paulstapleton/flowable-engine",
"path": "modules/flowable-engine/src/main/java/org/flowable/engine/RuntimeService.java",
"license": "apache-2.0",
"size": 63251
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 177,244
|
public void send(LLRPMessage message){
if (session == null){
log.warn("session is not yet established");
endpoint.errorOccured("session is not yet established");
return;
}
if(!session.isConnected()){
if(reconnect()){
session.write(message);
}else{
log.info("session is not yet connected");
endpoint.errorOccured("session is not yet connected");
}
}else{
session.write(message);
}
}
|
void function(LLRPMessage message){ if (session == null){ log.warn(STR); endpoint.errorOccured(STR); return; } if(!session.isConnected()){ if(reconnect()){ session.write(message); }else{ log.info(STR); endpoint.errorOccured(STR); } }else{ session.write(message); } }
|
/**
* sends an LLRP message without waiting for a response message.
*
* @param message LLRP message to be sent
*/
|
sends an LLRP message without waiting for a response message
|
send
|
{
"repo_name": "icunning/fcvtc",
"path": "src/LTK/LTKJava/src/main/java/org/llrp/ltk/net/LLRPConnection.java",
"license": "apache-2.0",
"size": 7031
}
|
[
"org.llrp.ltk.types.LLRPMessage"
] |
import org.llrp.ltk.types.LLRPMessage;
|
import org.llrp.ltk.types.*;
|
[
"org.llrp.ltk"
] |
org.llrp.ltk;
| 2,840,840
|
public boolean onKeyDown(final int keyCode, final KeyEvent event, final MapView mapView) {
return false;
}
|
boolean function(final int keyCode, final KeyEvent event, final MapView mapView) { return false; }
|
/**
* By default does nothing (<code>return false</code>). If you handled the Event, return
* <code>true</code>, otherwise return <code>false</code>. If you returned <code>true</code>
* none of the following Overlays or the underlying {@link MapView} has the chance to handle
* this event.
*/
|
By default does nothing (<code>return false</code>). If you handled the Event, return <code>true</code>, otherwise return <code>false</code>. If you returned <code>true</code> none of the following Overlays or the underlying <code>MapView</code> has the chance to handle this event
|
onKeyDown
|
{
"repo_name": "RoProducts/rastertheque",
"path": "MapboxLibrary/src/com/mapbox/mapboxsdk/overlay/Overlay.java",
"license": "gpl-2.0",
"size": 10882
}
|
[
"android.view.KeyEvent",
"com.mapbox.mapboxsdk.views.MapView"
] |
import android.view.KeyEvent; import com.mapbox.mapboxsdk.views.MapView;
|
import android.view.*; import com.mapbox.mapboxsdk.views.*;
|
[
"android.view",
"com.mapbox.mapboxsdk"
] |
android.view; com.mapbox.mapboxsdk;
| 190,308
|
public DateTime ifModifiedSince() {
if (this.ifModifiedSince == null) {
return null;
}
return this.ifModifiedSince.getDateTime();
}
|
DateTime function() { if (this.ifModifiedSince == null) { return null; } return this.ifModifiedSince.getDateTime(); }
|
/**
* Get the ifModifiedSince value.
*
* @return the ifModifiedSince value
*/
|
Get the ifModifiedSince value
|
ifModifiedSince
|
{
"repo_name": "pomortaz/azure-sdk-for-java",
"path": "azure-batch/src/main/java/com/microsoft/azure/batch/protocol/models/JobScheduleDeleteOptions.java",
"license": "mit",
"size": 6927
}
|
[
"org.joda.time.DateTime"
] |
import org.joda.time.DateTime;
|
import org.joda.time.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 2,812,248
|
@Test
public void testNoPropertyQualifierOverlap() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
// Get properties
url = metaBaseUrl + "/ncit/properties";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list1 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list1).isNotEmpty();
// Get qualifiers
url = metaBaseUrl + "/ncit/qualifiers";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list2 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list2).isNotEmpty();
// list1 and list2 should not have any codes in common
final Set<String> codes1 = list1.stream().map(c -> c.getCode()).collect(Collectors.toSet());
final Set<String> codes2 = list2.stream().map(c -> c.getCode()).collect(Collectors.toSet());
assertThat(Sets.intersection(codes1, codes2).size()).isEqualTo(0);
// list1 and list2 should not have any names in common
final Set<String> names1 = list1.stream().map(c -> c.getName()).collect(Collectors.toSet());
final Set<String> names2 = list2.stream().map(c -> c.getName()).collect(Collectors.toSet());
assertThat(Sets.intersection(names1, names2).size()).isEqualTo(0);
}
|
void function() throws Exception { String url = null; MvcResult result = null; String content = null; url = metaBaseUrl + STR; log.info(STR + url); result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn(); content = result.getResponse().getContentAsString(); log.info(STR + content); final List<Concept> list1 = new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() { }); assertThat(list1).isNotEmpty(); url = metaBaseUrl + STR; log.info(STR + url); result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn(); content = result.getResponse().getContentAsString(); log.info(STR + content); final List<Concept> list2 = new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() { }); assertThat(list2).isNotEmpty(); final Set<String> codes1 = list1.stream().map(c -> c.getCode()).collect(Collectors.toSet()); final Set<String> codes2 = list2.stream().map(c -> c.getCode()).collect(Collectors.toSet()); assertThat(Sets.intersection(codes1, codes2).size()).isEqualTo(0); final Set<String> names1 = list1.stream().map(c -> c.getName()).collect(Collectors.toSet()); final Set<String> names2 = list2.stream().map(c -> c.getName()).collect(Collectors.toSet()); assertThat(Sets.intersection(names1, names2).size()).isEqualTo(0); }
|
/**
* Test no property qualifier overlap.
*
* @throws Exception the exception
*/
|
Test no property qualifier overlap
|
testNoPropertyQualifierOverlap
|
{
"repo_name": "NCIEVS/evsrestapi",
"path": "src/test/java/gov/nih/nci/evs/api/controller/QualifierTests.java",
"license": "bsd-3-clause",
"size": 16828
}
|
[
"com.fasterxml.jackson.core.type.TypeReference",
"com.fasterxml.jackson.databind.ObjectMapper",
"com.google.common.collect.Sets",
"gov.nih.nci.evs.api.model.Concept",
"java.util.List",
"java.util.Set",
"java.util.stream.Collectors",
"org.assertj.core.api.Assertions",
"org.springframework.test.web.servlet.MvcResult"
] |
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import gov.nih.nci.evs.api.model.Concept; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.assertj.core.api.Assertions; import org.springframework.test.web.servlet.MvcResult;
|
import com.fasterxml.jackson.core.type.*; import com.fasterxml.jackson.databind.*; import com.google.common.collect.*; import gov.nih.nci.evs.api.model.*; import java.util.*; import java.util.stream.*; import org.assertj.core.api.*; import org.springframework.test.web.servlet.*;
|
[
"com.fasterxml.jackson",
"com.google.common",
"gov.nih.nci",
"java.util",
"org.assertj.core",
"org.springframework.test"
] |
com.fasterxml.jackson; com.google.common; gov.nih.nci; java.util; org.assertj.core; org.springframework.test;
| 162,488
|
@Override
public void onClick(View view) {
int clickedPosition = getAdapterPosition();
Article article = listOfArticles.get(clickedPosition);
mClickHandler.onClick(article);
}
}
|
void function(View view) { int clickedPosition = getAdapterPosition(); Article article = listOfArticles.get(clickedPosition); mClickHandler.onClick(article); } }
|
/**
* This gets called by the child views during a click.
*
* @param view The View that was clicked
*/
|
This gets called by the child views during a click
|
onClick
|
{
"repo_name": "ansh94/DailyTech",
"path": "app/src/main/java/com/anshdeep/dailytech/ui/main/ArticlesAdapter.java",
"license": "apache-2.0",
"size": 3394
}
|
[
"android.view.View",
"com.anshdeep.dailytech.data.model.Article"
] |
import android.view.View; import com.anshdeep.dailytech.data.model.Article;
|
import android.view.*; import com.anshdeep.dailytech.data.model.*;
|
[
"android.view",
"com.anshdeep.dailytech"
] |
android.view; com.anshdeep.dailytech;
| 327,244
|
public Set<String> getUserReferencesForEntityReference(String reference, String permission);
|
Set<String> function(String reference, String permission);
|
/**
* Get the user references which have the given permission in the given entity reference,
* this is most commonly used to get the users which have a permission in a site but it should
* work for any entity type which uses Sakai permissions
*
* @param reference a globally unique reference to an entity,
* consists of the entity prefix and optional segments (normally the id at least)
* @param permission a permission string constant
* @return a set of user entity references (e.g. /user/{userId} - not id, eid, or username)
*/
|
Get the user references which have the given permission in the given entity reference, this is most commonly used to get the users which have a permission in a site but it should work for any entity type which uses Sakai permissions
|
getUserReferencesForEntityReference
|
{
"repo_name": "marktriggs/nyu-sakai-10.4",
"path": "entitybroker/api/src/java/org/sakaiproject/entitybroker/DeveloperHelperService.java",
"license": "apache-2.0",
"size": 25539
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,486,902
|
@IncompatibleChange(release = "5.4", details = "Added method")
void addHeader(String name, String value);
|
@IncompatibleChange(release = "5.4", details = STR) void addHeader(String name, String value);
|
/**
* Adds a response header with the given name and value, not overwriting any previous values which
* may have already been added.
*
* @param name
* the name of the header to add
* @param value
* the assigned value
* @since 5.4
*/
|
Adds a response header with the given name and value, not overwriting any previous values which may have already been added
|
addHeader
|
{
"repo_name": "apache/tapestry-5",
"path": "tapestry-http/src/main/java/org/apache/tapestry5/http/services/Response.java",
"license": "apache-2.0",
"size": 6667
}
|
[
"org.apache.tapestry5.ioc.annotations.IncompatibleChange"
] |
import org.apache.tapestry5.ioc.annotations.IncompatibleChange;
|
import org.apache.tapestry5.ioc.annotations.*;
|
[
"org.apache.tapestry5"
] |
org.apache.tapestry5;
| 750,033
|
@Test
public void testToString() {
String expected = "あいうえお";
UniCord str = new UniCord(expected);
String actual = str.toString();
assertThat(actual, is(expected));
}
|
void function() { String expected = "あいうえお"; UniCord str = new UniCord(expected); String actual = str.toString(); assertThat(actual, is(expected)); }
|
/**
* Test of toString method, of class UniCord.
*/
|
Test of toString method, of class UniCord
|
testToString
|
{
"repo_name": "enlo/jmt-projects",
"path": "jmt-core/src/test/java/info/naiv/lab/java/jmt/text/UniCordTest.java",
"license": "mit",
"size": 19003
}
|
[
"org.hamcrest.Matchers",
"org.junit.Assert"
] |
import org.hamcrest.Matchers; import org.junit.Assert;
|
import org.hamcrest.*; import org.junit.*;
|
[
"org.hamcrest",
"org.junit"
] |
org.hamcrest; org.junit;
| 1,120,986
|
public String toString() {
if ( !Utils.isEmpty( filename ) ) {
if ( Utils.isEmpty( name ) ) {
return filename;
} else {
return filename + " : " + name;
}
}
if ( name != null ) {
if ( directory != null ) {
String path = directory.getPath();
if ( path.endsWith( RepositoryDirectory.DIRECTORY_SEPARATOR ) ) {
return path + name;
} else {
return path + RepositoryDirectory.DIRECTORY_SEPARATOR + name;
}
} else {
return name;
}
} else {
return JobMeta.class.getName();
}
}
|
String function() { if ( !Utils.isEmpty( filename ) ) { if ( Utils.isEmpty( name ) ) { return filename; } else { return filename + STR + name; } } if ( name != null ) { if ( directory != null ) { String path = directory.getPath(); if ( path.endsWith( RepositoryDirectory.DIRECTORY_SEPARATOR ) ) { return path + name; } else { return path + RepositoryDirectory.DIRECTORY_SEPARATOR + name; } } else { return name; } } else { return JobMeta.class.getName(); } }
|
/**
* Gets a textual representation of the job. If its name has been set, it will be returned, otherwise the classname is
* returned.
*
* @return the textual representation of the job.
*/
|
Gets a textual representation of the job. If its name has been set, it will be returned, otherwise the classname is returned
|
toString
|
{
"repo_name": "lgrill-pentaho/pentaho-kettle",
"path": "engine/src/main/java/org/pentaho/di/job/JobMeta.java",
"license": "apache-2.0",
"size": 88937
}
|
[
"org.pentaho.di.core.util.Utils",
"org.pentaho.di.repository.RepositoryDirectory"
] |
import org.pentaho.di.core.util.Utils; import org.pentaho.di.repository.RepositoryDirectory;
|
import org.pentaho.di.core.util.*; import org.pentaho.di.repository.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 553,206
|
public Map<String, String> getTags() {
return this.tags;
}
|
Map<String, String> function() { return this.tags; }
|
/**
* Get the tags property: The tags property.
*
* @return the tags value.
*/
|
Get the tags property: The tags property
|
getTags
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/storage/azure-storage-blob/src/main/java/com/azure/storage/blob/models/TaggedBlobItem.java",
"license": "mit",
"size": 1750
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,036,883
|
private void printVars(String portName, ImmutableElectricObject d) {
// write the variables
for(Iterator<Variable> it = d.getVariables(); it.hasNext(); ) {
Variable var = it.next();
printVar(portName, var);
}
}
|
void function(String portName, ImmutableElectricObject d) { for(Iterator<Variable> it = d.getVariables(); it.hasNext(); ) { Variable var = it.next(); printVar(portName, var); } }
|
/**
* Method to write the variables on an object.
*/
|
Method to write the variables on an object
|
printVars
|
{
"repo_name": "imr/Electric8",
"path": "com/sun/electric/tool/io/output/JELIB.java",
"license": "gpl-3.0",
"size": 34105
}
|
[
"com.sun.electric.database.ImmutableElectricObject",
"com.sun.electric.database.variable.Variable",
"java.util.Iterator"
] |
import com.sun.electric.database.ImmutableElectricObject; import com.sun.electric.database.variable.Variable; import java.util.Iterator;
|
import com.sun.electric.database.*; import com.sun.electric.database.variable.*; import java.util.*;
|
[
"com.sun.electric",
"java.util"
] |
com.sun.electric; java.util;
| 1,184,061
|
public static APIMgtAdminService getAPIMgtAdminService() throws APIManagementException {
return APIManagerFactory.getInstance().getAPIMgtAdminService();
}
|
static APIMgtAdminService function() throws APIManagementException { return APIManagerFactory.getInstance().getAPIMgtAdminService(); }
|
/**
* Returns an APIMgtAdminService.
*
* @return API Management Admin Service
* @throws APIManagementException If failed to retrieve admin service.
*/
|
Returns an APIMgtAdminService
|
getAPIMgtAdminService
|
{
"repo_name": "sambaheerathan/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.commons/src/main/java/org/wso2/carbon/apimgt/rest/api/common/util/RestApiUtil.java",
"license": "apache-2.0",
"size": 17206
}
|
[
"org.wso2.carbon.apimgt.core.api.APIMgtAdminService",
"org.wso2.carbon.apimgt.core.exception.APIManagementException",
"org.wso2.carbon.apimgt.core.impl.APIManagerFactory"
] |
import org.wso2.carbon.apimgt.core.api.APIMgtAdminService; import org.wso2.carbon.apimgt.core.exception.APIManagementException; import org.wso2.carbon.apimgt.core.impl.APIManagerFactory;
|
import org.wso2.carbon.apimgt.core.api.*; import org.wso2.carbon.apimgt.core.exception.*; import org.wso2.carbon.apimgt.core.impl.*;
|
[
"org.wso2.carbon"
] |
org.wso2.carbon;
| 1,437,667
|
public void createClauseLogicPageView(CellTree cellTree) {
this.cellTree = cellTree;
mainPanel.clear();
//mainPanel.setStyleName("div-wrapper"); //main div
SimplePanel leftPanel = new SimplePanel();
leftPanel.getElement().setId("leftPanel_SimplePanelCW");
leftPanel.setStyleName("div-first bottomPadding10px"); //left side div which will have tree
VerticalPanel treePanel = new VerticalPanel();
treePanel.getElement().setId("treePanel_VerticalPanelCW");
HorizontalPanel expandCollapse = new HorizontalPanel();
expandCollapse.getElement().setId("expandCollapse_HorizontalPanelCW");
expandCollapse.setStyleName("leftAndTopPadding");
expandCollapse.setSize("100px", "20px");
buttonExpandClauseWorkSpace.setStylePrimaryName("expandAllButton");
buttonCollapseClauseWorkSpace.setStylePrimaryName("collapseAllButton");
buttonExpandClauseWorkSpace.setTitle("Expand All (Shift +)");
buttonCollapseClauseWorkSpace.setTitle("Collapse All (Shift -)");
expandCollapse.add(buttonExpandClauseWorkSpace);
expandCollapse.add(buttonCollapseClauseWorkSpace);
buttonExpandClauseWorkSpace.getElement().setId("buttonExpandClauseWorkSpace_Button");
buttonCollapseClauseWorkSpace.getElement().setId("buttonCollapseClauseWorkSpace_Button");
buttonExpandClauseWorkSpace.setFocus(true);
buttonCollapseClauseWorkSpace.setVisible(true);
if (cellTree != null) {
treePanel.add(expandCollapse);
treePanel.add(cellTree);
addCWExpandCollapseButtonHandler();
cellTreeHandlers();
} else {
treePanel.setHeight("100%");
}
leftPanel.add(treePanel);
mainPanel.add(leftPanel);
focusPanel.addKeyDownHandler(this);
focusPanel.addFocusHandler(this);
}
|
void function(CellTree cellTree) { this.cellTree = cellTree; mainPanel.clear(); SimplePanel leftPanel = new SimplePanel(); leftPanel.getElement().setId(STR); leftPanel.setStyleName(STR); VerticalPanel treePanel = new VerticalPanel(); treePanel.getElement().setId(STR); HorizontalPanel expandCollapse = new HorizontalPanel(); expandCollapse.getElement().setId(STR); expandCollapse.setStyleName(STR); expandCollapse.setSize("100px", "20px"); buttonExpandClauseWorkSpace.setStylePrimaryName(STR); buttonCollapseClauseWorkSpace.setStylePrimaryName(STR); buttonExpandClauseWorkSpace.setTitle(STR); buttonCollapseClauseWorkSpace.setTitle(STR); expandCollapse.add(buttonExpandClauseWorkSpace); expandCollapse.add(buttonCollapseClauseWorkSpace); buttonExpandClauseWorkSpace.getElement().setId(STR); buttonCollapseClauseWorkSpace.getElement().setId(STR); buttonExpandClauseWorkSpace.setFocus(true); buttonCollapseClauseWorkSpace.setVisible(true); if (cellTree != null) { treePanel.add(expandCollapse); treePanel.add(cellTree); addCWExpandCollapseButtonHandler(); cellTreeHandlers(); } else { treePanel.setHeight("100%"); } leftPanel.add(treePanel); mainPanel.add(leftPanel); focusPanel.addKeyDownHandler(this); focusPanel.addFocusHandler(this); }
|
/**
* Creates the clause logic page view.
*
* @param cellTree the cell tree
*/
|
Creates the clause logic page view
|
createClauseLogicPageView
|
{
"repo_name": "MeasureAuthoringTool/MeasureAuthoringTool_Release",
"path": "mat/src/main/java/mat/client/clause/clauseworkspace/view/XmlTreeView.java",
"license": "cc0-1.0",
"size": 103469
}
|
[
"com.google.gwt.user.cellview.client.CellTree",
"com.google.gwt.user.client.ui.HorizontalPanel",
"com.google.gwt.user.client.ui.SimplePanel",
"com.google.gwt.user.client.ui.VerticalPanel"
] |
import com.google.gwt.user.cellview.client.CellTree; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.VerticalPanel;
|
import com.google.gwt.user.cellview.client.*; import com.google.gwt.user.client.ui.*;
|
[
"com.google.gwt"
] |
com.google.gwt;
| 2,020,486
|
static TreeMultimap<Integer, Long> createDataForIFile(int keyCount, int repeatCount) {
TreeMultimap<Integer, Long> dataSet = TreeMultimap.create();
Random rnd = new Random();
for (int i = 0; i < keyCount; i++) {
if (repeatCount > 0 && (rnd.nextInt(keyCount) % 2 == 0)) {
//repeat this key
for (int j = 0; j < repeatCount; j++) {
IntWritable key = new IntWritable(rnd.nextInt(keyCount));
LongWritable value = new LongWritable(System.nanoTime());
dataSet.put(key.get(), value.get());
}
i += repeatCount;
LOG.info("Repeated key count=" + (repeatCount));
} else {
IntWritable key = new IntWritable(rnd.nextInt(keyCount));
LongWritable value = new LongWritable(System.nanoTime());
dataSet.put(key.get(), value.get());
}
}
for (Integer key : dataSet.keySet()) {
for (Long value : dataSet.get(key)) {
LOG.info("Key=" + key + ", val=" + value);
}
}
LOG.info("=============");
return dataSet;
}
|
static TreeMultimap<Integer, Long> createDataForIFile(int keyCount, int repeatCount) { TreeMultimap<Integer, Long> dataSet = TreeMultimap.create(); Random rnd = new Random(); for (int i = 0; i < keyCount; i++) { if (repeatCount > 0 && (rnd.nextInt(keyCount) % 2 == 0)) { for (int j = 0; j < repeatCount; j++) { IntWritable key = new IntWritable(rnd.nextInt(keyCount)); LongWritable value = new LongWritable(System.nanoTime()); dataSet.put(key.get(), value.get()); } i += repeatCount; LOG.info(STR + (repeatCount)); } else { IntWritable key = new IntWritable(rnd.nextInt(keyCount)); LongWritable value = new LongWritable(System.nanoTime()); dataSet.put(key.get(), value.get()); } } for (Integer key : dataSet.keySet()) { for (Long value : dataSet.get(key)) { LOG.info("Key=" + key + STR + value); } } LOG.info(STR); return dataSet; }
|
/**
* Generate data set for ifile. Create repeated keys if needed.
*
* @param keyCount approximate number of keys to be created
* @param repeatCount number of times a key should be repeated
* @return
*/
|
Generate data set for ifile. Create repeated keys if needed
|
createDataForIFile
|
{
"repo_name": "sidseth/tez",
"path": "tez-runtime-library/src/test/java/org/apache/tez/runtime/library/common/sort/impl/TestTezMerger.java",
"license": "apache-2.0",
"size": 26742
}
|
[
"com.google.common.collect.TreeMultimap",
"java.util.Random",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.LongWritable"
] |
import com.google.common.collect.TreeMultimap; import java.util.Random; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable;
|
import com.google.common.collect.*; import java.util.*; import org.apache.hadoop.io.*;
|
[
"com.google.common",
"java.util",
"org.apache.hadoop"
] |
com.google.common; java.util; org.apache.hadoop;
| 2,088,761
|
protected void prettyPrint(File file) {
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(false);
SAXParser parser = factory.newSAXParser();
File prettyFile = enunciate.createTempFile("enunciate", file.getName());
parser.parse(file, new PrettyPrinter(prettyFile));
if (file.delete()) {
enunciate.copyFile(prettyFile, file);
}
else {
warn("Unable to delete %s. Skipping pretty-print transformation....", file);
}
}
catch (Exception e) {
//fall through... skip pretty printing.
warn("Unable to pretty-print %s (%s).", file, e.getMessage());
if (enunciate.isDebug()) {
e.printStackTrace(System.err);
}
}
}
|
void function(File file) { try { SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(false); SAXParser parser = factory.newSAXParser(); File prettyFile = enunciate.createTempFile(STR, file.getName()); parser.parse(file, new PrettyPrinter(prettyFile)); if (file.delete()) { enunciate.copyFile(prettyFile, file); } else { warn(STR, file); } } catch (Exception e) { warn(STR, file, e.getMessage()); if (enunciate.isDebug()) { e.printStackTrace(System.err); } } }
|
/**
* Pretty-prints the specified xml file.
*
* @param file The file to pretty-print.
*/
|
Pretty-prints the specified xml file
|
prettyPrint
|
{
"repo_name": "garyhodgson/enunciate",
"path": "xml/src/main/java/org/codehaus/enunciate/modules/xml/XMLDeploymentModule.java",
"license": "apache-2.0",
"size": 19703
}
|
[
"java.io.File",
"javax.xml.parsers.SAXParser",
"javax.xml.parsers.SAXParserFactory"
] |
import java.io.File; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory;
|
import java.io.*; import javax.xml.parsers.*;
|
[
"java.io",
"javax.xml"
] |
java.io; javax.xml;
| 1,250,494
|
public IBoxResourceManager getResourceManagerWithSharedLinkAuth(BoxResourceType type, String sharedLink, String password) {
switch (type) {
case FILE:
return getSharedFilesManager(sharedLink, password);
case FOLDER:
return getSharedFoldersManager(sharedLink, password);
case COMMENT:
return getSharedCommentsManager(sharedLink, password);
default:
throw new NotImplementedException();
}
}
|
IBoxResourceManager function(BoxResourceType type, String sharedLink, String password) { switch (type) { case FILE: return getSharedFilesManager(sharedLink, password); case FOLDER: return getSharedFoldersManager(sharedLink, password); case COMMENT: return getSharedCommentsManager(sharedLink, password); default: throw new NotImplementedException(); } }
|
/**
* A generic way to get a resourceManager with shared link auth. Currently only supports file, folder and comment endpoints.
*
* @param type
* @param sharedLink
* @param password
* @return
*/
|
A generic way to get a resourceManager with shared link auth. Currently only supports file, folder and comment endpoints
|
getResourceManagerWithSharedLinkAuth
|
{
"repo_name": "shelsonjava/box-java-sdk-v2",
"path": "BoxJavaLibraryV2/src/com/box/boxjavalibv2/BoxClient.java",
"license": "apache-2.0",
"size": 24646
}
|
[
"com.box.boxjavalibv2.dao.BoxResourceType",
"com.box.boxjavalibv2.resourcemanagers.IBoxResourceManager",
"org.apache.commons.lang.NotImplementedException"
] |
import com.box.boxjavalibv2.dao.BoxResourceType; import com.box.boxjavalibv2.resourcemanagers.IBoxResourceManager; import org.apache.commons.lang.NotImplementedException;
|
import com.box.boxjavalibv2.dao.*; import com.box.boxjavalibv2.resourcemanagers.*; import org.apache.commons.lang.*;
|
[
"com.box.boxjavalibv2",
"org.apache.commons"
] |
com.box.boxjavalibv2; org.apache.commons;
| 70,114
|
public static void setPaginationParams(APIListDTO apiListDTO, String query, int offset, int limit, int size) {
Map<String, Integer> paginatedParams = RestApiCommonUtil.getPaginationParams(offset, limit, size);
String paginatedPrevious = "";
String paginatedNext = "";
if (paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_OFFSET) != null) {
paginatedPrevious = RestApiCommonUtil
.getAPIPaginatedURL(paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_OFFSET),
paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_LIMIT), query);
}
if (paginatedParams.get(RestApiConstants.PAGINATION_NEXT_OFFSET) != null) {
paginatedNext = RestApiCommonUtil
.getAPIPaginatedURL(paginatedParams.get(RestApiConstants.PAGINATION_NEXT_OFFSET),
paginatedParams.get(RestApiConstants.PAGINATION_NEXT_LIMIT), query);
}
PaginationDTO paginationDTO = CommonMappingUtil
.getPaginationDTO(limit, offset, size, paginatedNext, paginatedPrevious);
apiListDTO.setPagination(paginationDTO);
}
|
static void function(APIListDTO apiListDTO, String query, int offset, int limit, int size) { Map<String, Integer> paginatedParams = RestApiCommonUtil.getPaginationParams(offset, limit, size); String paginatedPrevious = STR"; if (paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_OFFSET) != null) { paginatedPrevious = RestApiCommonUtil .getAPIPaginatedURL(paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_OFFSET), paginatedParams.get(RestApiConstants.PAGINATION_PREVIOUS_LIMIT), query); } if (paginatedParams.get(RestApiConstants.PAGINATION_NEXT_OFFSET) != null) { paginatedNext = RestApiCommonUtil .getAPIPaginatedURL(paginatedParams.get(RestApiConstants.PAGINATION_NEXT_OFFSET), paginatedParams.get(RestApiConstants.PAGINATION_NEXT_LIMIT), query); } PaginationDTO paginationDTO = CommonMappingUtil .getPaginationDTO(limit, offset, size, paginatedNext, paginatedPrevious); apiListDTO.setPagination(paginationDTO); }
|
/**
* Sets pagination urls for a APIListDTO object given pagination parameters and url parameters
*
* @param apiListDTO APIListDTO object to which pagination urls need to be set
* @param query query parameter
* @param offset starting index
* @param limit max number of returned objects
* @param size max offset
*/
|
Sets pagination urls for a APIListDTO object given pagination parameters and url parameters
|
setPaginationParams
|
{
"repo_name": "isharac/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.store.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/store/v1/mappings/APIMappingUtil.java",
"license": "apache-2.0",
"size": 49022
}
|
[
"java.util.Map",
"org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil",
"org.wso2.carbon.apimgt.rest.api.common.RestApiConstants",
"org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIListDTO",
"org.wso2.carbon.apimgt.rest.api.store.v1.dto.PaginationDTO"
] |
import java.util.Map; import org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil; import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.APIListDTO; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.PaginationDTO;
|
import java.util.*; import org.wso2.carbon.apimgt.rest.api.common.*; import org.wso2.carbon.apimgt.rest.api.store.v1.dto.*;
|
[
"java.util",
"org.wso2.carbon"
] |
java.util; org.wso2.carbon;
| 656,780
|
public float getOverlayCenterY() {
return overlay.getHeight()/2;
}
}
public GraphicOverlay(Context context, AttributeSet attrs) {
super(context, attrs);
}
|
float function() { return overlay.getHeight()/2; } } public GraphicOverlay(Context context, AttributeSet attrs) { super(context, attrs); }
|
/**
* Outputs the overlay y center according to the preview coordinate system.
*/
|
Outputs the overlay y center according to the preview coordinate system
|
getOverlayCenterY
|
{
"repo_name": "crashlytics/18thScale",
"path": "app/src/main/java/com/firebase/hackweek/tank18thscale/common/GraphicOverlay.java",
"license": "apache-2.0",
"size": 6808
}
|
[
"android.content.Context",
"android.util.AttributeSet"
] |
import android.content.Context; import android.util.AttributeSet;
|
import android.content.*; import android.util.*;
|
[
"android.content",
"android.util"
] |
android.content; android.util;
| 382,253
|
public RWAction createCreateEnvelopeAction(RWList tags) {
RWAction action = createDefaultAction(true);
action.add(R.string.rw_key_label,
R.string.roundware_notification_announcing_recording)
.add(R.string.rw_key_operation, R.string.rw_op_create_envelope);
addTags(action, tags);
if ((mService != null) && (mService.getConfiguration().isUsingLocationBasedSpeak())) {
addCoordinates(action);
}
return action;
}
|
RWAction function(RWList tags) { RWAction action = createDefaultAction(true); action.add(R.string.rw_key_label, R.string.roundware_notification_announcing_recording) .add(R.string.rw_key_operation, R.string.rw_op_create_envelope); addTags(action, tags); if ((mService != null) && (mService.getConfiguration().isUsingLocationBasedSpeak())) { addCoordinates(action); } return action; }
|
/**
* Creates an action to create an envelope on the server, based on
* the included tags selections and the coordinates of the current
* location of the user / device that will be included automatically
* if available.
*
* @param tags to include in the call
* @return RWAction instance for the server call
*/
|
Creates an action to create an envelope on the server, based on the included tags selections and the coordinates of the current location of the user / device that will be included automatically if available
|
createCreateEnvelopeAction
|
{
"repo_name": "Sillson/roundware-android",
"path": "rwservice/src/main/java/org/roundware/service/RWActionFactory.java",
"license": "gpl-3.0",
"size": 19589
}
|
[
"org.roundware.service.util.RWList"
] |
import org.roundware.service.util.RWList;
|
import org.roundware.service.util.*;
|
[
"org.roundware.service"
] |
org.roundware.service;
| 1,848,897
|
@Before
public void setupEnv() throws IOException
{
// Create a local temp directory.
localTempPath = Files.createTempDirectory(null);
}
|
void function() throws IOException { localTempPath = Files.createTempDirectory(null); }
|
/**
* Sets up the test environment.
*/
|
Sets up the test environment
|
setupEnv
|
{
"repo_name": "seoj/herd",
"path": "herd-code/herd-dao/src/test/java/org/finra/herd/dao/GlacierDaoTest.java",
"license": "apache-2.0",
"size": 2889
}
|
[
"java.io.IOException",
"java.nio.file.Files"
] |
import java.io.IOException; import java.nio.file.Files;
|
import java.io.*; import java.nio.file.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 1,153,706
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.