method stringlengths 13 441k | clean_method stringlengths 7 313k | doc stringlengths 17 17.3k | comment stringlengths 3 1.42k | method_name stringlengths 1 273 | extra dict | imports list | imports_info stringlengths 19 34.8k | cluster_imports_info stringlengths 15 3.66k | libraries list | libraries_info stringlengths 6 661 | id int64 0 2.92M |
|---|---|---|---|---|---|---|---|---|---|---|---|
private String getOverheadFromSourceTasks(String counterGroupName, TaskAttemptInfo attemptInfo) {
String firstEventReceived = getCounterValue(TaskCounter.FIRST_EVENT_RECEIVED,
counterGroupName, attemptInfo);
String lastEventReceived = getCounterValue(TaskCounter.LAST_EVENT_RECEIVED,
counterGroupName, attemptInfo);
if (!Strings.isNullOrEmpty(firstEventReceived) && !Strings.isNullOrEmpty(lastEventReceived)) {
return Long.toString(Long.parseLong(lastEventReceived) - Long.parseLong(firstEventReceived));
} else {
return "";
}
} | String function(String counterGroupName, TaskAttemptInfo attemptInfo) { String firstEventReceived = getCounterValue(TaskCounter.FIRST_EVENT_RECEIVED, counterGroupName, attemptInfo); String lastEventReceived = getCounterValue(TaskCounter.LAST_EVENT_RECEIVED, counterGroupName, attemptInfo); if (!Strings.isNullOrEmpty(firstEventReceived) && !Strings.isNullOrEmpty(lastEventReceived)) { return Long.toString(Long.parseLong(lastEventReceived) - Long.parseLong(firstEventReceived)); } else { return ""; } } | /**
* Time taken to receive all events from source tasks
*
* @param counterGroupName
* @param attemptInfo
* @return String
*/ | Time taken to receive all events from source tasks | getOverheadFromSourceTasks | {
"repo_name": "guiling/tez",
"path": "tez-tools/analyzers/job-analyzer/src/main/java/org/apache/tez/analyzer/plugins/ShuffleTimeAnalyzer.java",
"license": "apache-2.0",
"size": 8439
} | [
"com.google.common.base.Strings",
"org.apache.tez.common.counters.TaskCounter",
"org.apache.tez.history.parser.datamodel.TaskAttemptInfo"
] | import com.google.common.base.Strings; import org.apache.tez.common.counters.TaskCounter; import org.apache.tez.history.parser.datamodel.TaskAttemptInfo; | import com.google.common.base.*; import org.apache.tez.common.counters.*; import org.apache.tez.history.parser.datamodel.*; | [
"com.google.common",
"org.apache.tez"
] | com.google.common; org.apache.tez; | 540,428 |
EAttribute getDesignatorDescriptor_CheckMethod(); | EAttribute getDesignatorDescriptor_CheckMethod(); | /**
* Returns the meta object for the attribute '{@link uk.ac.lancs.comp.vmlLangInst.DesignatorDescriptor#getCheckMethod <em>Check Method</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Check Method</em>'.
* @see uk.ac.lancs.comp.vmlLangInst.DesignatorDescriptor#getCheckMethod()
* @see #getDesignatorDescriptor()
* @generated
*/ | Returns the meta object for the attribute '<code>uk.ac.lancs.comp.vmlLangInst.DesignatorDescriptor#getCheckMethod Check Method</code>'. | getDesignatorDescriptor_CheckMethod | {
"repo_name": "szschaler/vml_star",
"path": "uk.ac.lancs.comp.vmlstar.lang_inst.model/src/uk/ac/lancs/comp/vmlLangInst/VmlLangInstPackage.java",
"license": "mit",
"size": 27322
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,149,411 |
Effects fadeToggle(int millisecs, Function... f); | Effects fadeToggle(int millisecs, Function... f); | /**
* Toggle the visibility of all matched elements by adjusting their opacity and firing an optional
* callback after completion. Only the opacity is adjusted for this animation, meaning that all of
* the matched elements should already have some form of height and width associated with them.
*/ | Toggle the visibility of all matched elements by adjusting their opacity and firing an optional callback after completion. Only the opacity is adjusted for this animation, meaning that all of the matched elements should already have some form of height and width associated with them | fadeToggle | {
"repo_name": "lucasam/gwtquery",
"path": "gwtquery-core/src/main/java/com/google/gwt/query/client/LazyGQuery.java",
"license": "mit",
"size": 90576
} | [
"com.google.gwt.query.client.plugins.Effects"
] | import com.google.gwt.query.client.plugins.Effects; | import com.google.gwt.query.client.plugins.*; | [
"com.google.gwt"
] | com.google.gwt; | 1,459,748 |
public Response post(URI url, InputStream input) throws IOException; | Response function(URI url, InputStream input) throws IOException; | /**
* Perform a POST-request against the given URL
*
* @param url URL to perform request against
* @param input Input stream to use for reading POST-data from
* @return HTTP response
* @throws IOException
*/ | Perform a POST-request against the given URL | post | {
"repo_name": "imbo/imboclient-java",
"path": "src/main/java/io/imbo/client/Http/HttpClient.java",
"license": "mit",
"size": 8922
} | [
"java.io.IOException",
"java.io.InputStream"
] | import java.io.IOException; import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,983,013 |
@Override
void generateTypeClassReference(ModuleName moduleName, String unqualifiedName) {
String typeClassLabel = labelMaker.getTypeClassLabel(unqualifiedName);
String appropriatelyQualifiedName = getAppropriatelyQualifiedName(moduleName, unqualifiedName);
if (!isDocForTypeClassGenerated(moduleName, unqualifiedName)) {
currentPage.addText(appropriatelyQualifiedName);
} else {
generateNonLocalReference(currentPage, relativePathToBaseDirectory, moduleName, typeClassLabel, appropriatelyQualifiedName, getFullyQualifiedNameString(moduleName, unqualifiedName));
}
}
| void generateTypeClassReference(ModuleName moduleName, String unqualifiedName) { String typeClassLabel = labelMaker.getTypeClassLabel(unqualifiedName); String appropriatelyQualifiedName = getAppropriatelyQualifiedName(moduleName, unqualifiedName); if (!isDocForTypeClassGenerated(moduleName, unqualifiedName)) { currentPage.addText(appropriatelyQualifiedName); } else { generateNonLocalReference(currentPage, relativePathToBaseDirectory, moduleName, typeClassLabel, appropriatelyQualifiedName, getFullyQualifiedNameString(moduleName, unqualifiedName)); } } | /**
* Generates a reference to a type class, hyperlinked as non-local references.
*
* @param moduleName the module name of the type class.
* @param unqualifiedName the unqualified name of the type class.
*/ | Generates a reference to a type class, hyperlinked as non-local references | generateTypeClassReference | {
"repo_name": "levans/Open-Quark",
"path": "src/CAL_Platform/src/org/openquark/cal/caldoc/HTMLDocumentationGenerator.java",
"license": "bsd-3-clause",
"size": 414134
} | [
"org.openquark.cal.compiler.ModuleName"
] | import org.openquark.cal.compiler.ModuleName; | import org.openquark.cal.compiler.*; | [
"org.openquark.cal"
] | org.openquark.cal; | 669,276 |
public static String getRealFileName(File name, String format) {
return getRealFileName(name.getAbsolutePath(), format);
}
| static String function(File name, String format) { return getRealFileName(name.getAbsolutePath(), format); } | /**
* Returns the real file name as filename.fileformat
*
* @param name
* @param format a format starting with a dot for example .pdf
* @return
*/ | Returns the real file name as filename.fileformat | getRealFileName | {
"repo_name": "du-lab/mzmine2",
"path": "src/main/java/net/sf/mzmine/util/files/FileAndPathUtil.java",
"license": "gpl-2.0",
"size": 13773
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 339,623 |
private void init(File file,File index) throws IOException {
setSourceKey(new SAMKey(file.toString()));
size = file.length();
content = new SeekableFileStream(file);
this.index =index;
}
public SAMDataSource(Locator data, Locator index) {
super(data);
if (data == null || index == null)
throw new RuntimeException("Either data or index are not provided: " + data + "; " + index);
if (data.isURL()) {
try {
if(index.isURL())
init(data.url(), index.url());
else
init(data.url(), index.file());
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ReadFailedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
try {
init(data.file(),index.file());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} | void function(File file,File index) throws IOException { setSourceKey(new SAMKey(file.toString())); size = file.length(); content = new SeekableFileStream(file); this.index =index; } SAMDataSource(Locator data, Locator index) { super(data); if (data == null index == null) throw new RuntimeException(STR + data + STR + index); if (data.isURL()) { try { if(index.isURL()) function(data.url(), index.url()); else init(data.url(), index.file()); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (ReadFailedException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } } else { try { init(data.file(),index.file()); } catch (IOException e) { e.printStackTrace(); } } } | /**
* BAM file
*
* @param file
* @throws IOException
*/ | BAM file | init | {
"repo_name": "GenomeView/genomeview",
"path": "src/net/sf/jannot/source/SAMDataSource.java",
"license": "gpl-3.0",
"size": 6666
} | [
"java.io.File",
"java.io.IOException",
"java.net.MalformedURLException",
"java.net.URISyntaxException",
"net.sf.jannot.exception.ReadFailedException",
"net.sf.samtools.seekablestream.SeekableFileStream"
] | import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import net.sf.jannot.exception.ReadFailedException; import net.sf.samtools.seekablestream.SeekableFileStream; | import java.io.*; import java.net.*; import net.sf.jannot.exception.*; import net.sf.samtools.seekablestream.*; | [
"java.io",
"java.net",
"net.sf.jannot",
"net.sf.samtools"
] | java.io; java.net; net.sf.jannot; net.sf.samtools; | 347,950 |
EAttribute getAttribute(); | EAttribute getAttribute(); | /**
* Returns the value of the '<em><b>Attribute</b></em>' reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Attribute</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Attribute</em>' reference.
* @see #setAttribute(EAttribute)
* @see mutatorenvironment.MutatorenvironmentPackage#getObjectAttributeType_Attribute()
* @model required="true"
* @generated
*/ | Returns the value of the 'Attribute' reference. If the meaning of the 'Attribute' reference isn't clear, there really should be more of a description here... | getAttribute | {
"repo_name": "gomezabajo/Wodel",
"path": "wodel.models/src/mutatorenvironment/ObjectAttributeType.java",
"license": "epl-1.0",
"size": 3475
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 383,587 |
protected Theme getTheme() throws IOException {
return session.theme().getTheme(Theme.Type.LOGIN);
} | Theme function() throws IOException { return session.theme().getTheme(Theme.Type.LOGIN); } | /**
* Get Theme used for page rendering.
*
* @return theme for page rendering, never null
* @throws IOException in case of Theme loading problem
*/ | Get Theme used for page rendering | getTheme | {
"repo_name": "srose/keycloak",
"path": "services/src/main/java/org/keycloak/forms/login/freemarker/FreeMarkerLoginFormsProvider.java",
"license": "apache-2.0",
"size": 32049
} | [
"java.io.IOException",
"org.keycloak.theme.Theme"
] | import java.io.IOException; import org.keycloak.theme.Theme; | import java.io.*; import org.keycloak.theme.*; | [
"java.io",
"org.keycloak.theme"
] | java.io; org.keycloak.theme; | 703,218 |
@Value.Parameter
Path sourceRoot(); | @Value.Parameter Path sourceRoot(); | /**
* The source root directory. If a filter causes files to be open, they must be descendants of
* this directory.
*
* @return A source root directory
*/ | The source root directory. If a filter causes files to be open, they must be descendants of this directory | sourceRoot | {
"repo_name": "io7m/smf",
"path": "com.io7m.smfj.processing.api/src/main/java/com/io7m/smfj/processing/api/SMFFilterCommandContextType.java",
"license": "isc",
"size": 2848
} | [
"java.nio.file.Path",
"org.immutables.value.Value"
] | import java.nio.file.Path; import org.immutables.value.Value; | import java.nio.file.*; import org.immutables.value.*; | [
"java.nio",
"org.immutables.value"
] | java.nio; org.immutables.value; | 2,087,690 |
List<CategoricalDistribution> convert(List<Matrix> matrixs); | List<CategoricalDistribution> convert(List<Matrix> matrixs); | /**
* Convert a list of column vectors in Matrix form into a corresponding list
* of Categorical Distributions.
*
* @param matrixs
* the column vectors in matrix form to be converted.
* @return a corresponding list of Categorical Distribution representation
* of the passed in column vectors.
*/ | Convert a list of column vectors in Matrix form into a corresponding list of Categorical Distributions | convert | {
"repo_name": "Iroxsmyth/Brisca-AI-2017",
"path": "src/main/java/aima/core/probability/hmm/HiddenMarkovModel.java",
"license": "mit",
"size": 3613
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,698,594 |
public void testMultiFactory() {
// init TilesPlugin
module1 = createModuleConfig("/module1", "tiles-defs.xml", true);
module2 = createModuleConfig("/module2", "tiles-defs.xml", true);
initModulePlugIns(module1);
initModulePlugIns(module2);
// mock request context
request.setAttribute(Globals.MODULE_KEY, module1);
request.setPathElements("/myapp", "/module1/foo.do", null, null);
// Retrieve factory for module1
DefinitionsFactory factory1 =
TilesUtil.getDefinitionsFactory(request, context);
assertNotNull("factory found", factory1);
assertEquals(
"factory name",
"/module1",
factory1.getConfig().getFactoryName());
// mock request context
request.setAttribute(Globals.MODULE_KEY, module2);
request.setPathElements("/myapp", "/module2/foo.do", null, null);
// Retrieve factory for module2
DefinitionsFactory factory2 =
TilesUtil.getDefinitionsFactory(request, context);
assertNotNull("factory found", factory2);
assertEquals(
"factory name",
"/module2",
factory2.getConfig().getFactoryName());
// Check that factory are different
assertNotSame("Factory from different modules", factory1, factory2);
} | void function() { module1 = createModuleConfig(STR, STR, true); module2 = createModuleConfig(STR, STR, true); initModulePlugIns(module1); initModulePlugIns(module2); request.setAttribute(Globals.MODULE_KEY, module1); request.setPathElements(STR, STR, null, null); DefinitionsFactory factory1 = TilesUtil.getDefinitionsFactory(request, context); assertNotNull(STR, factory1); assertEquals( STR, STR, factory1.getConfig().getFactoryName()); request.setAttribute(Globals.MODULE_KEY, module2); request.setPathElements(STR, STR, null, null); DefinitionsFactory factory2 = TilesUtil.getDefinitionsFactory(request, context); assertNotNull(STR, factory2); assertEquals( STR, STR, factory2.getConfig().getFactoryName()); assertNotSame(STR, factory1, factory2); } | /**
* Test multi factory creation when moduleAware=true.
*/ | Test multi factory creation when moduleAware=true | testMultiFactory | {
"repo_name": "codelibs/cl-struts",
"path": "src/test/org/apache/struts/tiles/TestTilesPlugin.java",
"license": "apache-2.0",
"size": 15195
} | [
"org.apache.struts.Globals"
] | import org.apache.struts.Globals; | import org.apache.struts.*; | [
"org.apache.struts"
] | org.apache.struts; | 2,512,240 |
protected PlanNode attachSubqueries( QueryContext context,
PlanNode plan,
Map<String, Subquery> subqueriesByVariableName ) {
// Order the variable names in reverse order ...
List<String> varNames = new ArrayList<String>(subqueriesByVariableName.keySet());
Collections.sort(varNames);
Collections.reverse(varNames);
for (String varName : varNames) {
Subquery subquery = subqueriesByVariableName.get(varName);
// Plan out the subquery ...
PlanNode subqueryNode = createPlan(context, subquery.getQuery());
setSubqueryVariableName(subqueryNode, varName);
// Create a DEPENDENT_QUERY node, with the subquery on the LHS (so it is executed first) ...
PlanNode depQuery = new PlanNode(Type.DEPENDENT_QUERY);
depQuery.addChildren(subqueryNode, plan);
depQuery.addSelectors(subqueryNode.getSelectors());
depQuery.addSelectors(plan.getSelectors());
plan = depQuery;
}
return plan;
} | PlanNode function( QueryContext context, PlanNode plan, Map<String, Subquery> subqueriesByVariableName ) { List<String> varNames = new ArrayList<String>(subqueriesByVariableName.keySet()); Collections.sort(varNames); Collections.reverse(varNames); for (String varName : varNames) { Subquery subquery = subqueriesByVariableName.get(varName); PlanNode subqueryNode = createPlan(context, subquery.getQuery()); setSubqueryVariableName(subqueryNode, varName); PlanNode depQuery = new PlanNode(Type.DEPENDENT_QUERY); depQuery.addChildren(subqueryNode, plan); depQuery.addSelectors(subqueryNode.getSelectors()); depQuery.addSelectors(plan.getSelectors()); plan = depQuery; } return plan; } | /**
* Attach plan nodes for each subquery, resulting with the first subquery at the top of the plan tree.
*
* @param context the context in which the query is being planned
* @param plan the existing plan
* @param subqueriesByVariableName the queries by the variable name used in substitution
* @return the updated plan, or the existing plan if there were no limits
*/ | Attach plan nodes for each subquery, resulting with the first subquery at the top of the plan tree | attachSubqueries | {
"repo_name": "rhauch/modeshape",
"path": "modeshape-jcr/src/main/java/org/modeshape/jcr/query/plan/CanonicalPlanner.java",
"license": "apache-2.0",
"size": 26348
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"java.util.Map",
"org.modeshape.jcr.query.QueryContext",
"org.modeshape.jcr.query.model.Subquery",
"org.modeshape.jcr.query.plan.PlanNode"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.modeshape.jcr.query.QueryContext; import org.modeshape.jcr.query.model.Subquery; import org.modeshape.jcr.query.plan.PlanNode; | import java.util.*; import org.modeshape.jcr.query.*; import org.modeshape.jcr.query.model.*; import org.modeshape.jcr.query.plan.*; | [
"java.util",
"org.modeshape.jcr"
] | java.util; org.modeshape.jcr; | 1,015,035 |
Symbol access(Symbol sym, int pos, Type site, Name name, boolean qualified,
List argtypes) {
if (sym.kind >= AMBIGUOUS) {
if (!site.isErroneous() && !Type.isErroneous(argtypes))
((ResolveError) sym).report(log, pos, site, name, argtypes);
do {
sym = ((ResolveError) sym).sym;
} while (sym.kind >= AMBIGUOUS);
if (sym == syms.errSymbol)
sym = new ErrorType(name, qualified ? site.tsym : syms.noSymbol).tsym;
}
return sym;
} | Symbol access(Symbol sym, int pos, Type site, Name name, boolean qualified, List argtypes) { if (sym.kind >= AMBIGUOUS) { if (!site.isErroneous() && !Type.isErroneous(argtypes)) ((ResolveError) sym).report(log, pos, site, name, argtypes); do { sym = ((ResolveError) sym).sym; } while (sym.kind >= AMBIGUOUS); if (sym == syms.errSymbol) sym = new ErrorType(name, qualified ? site.tsym : syms.noSymbol).tsym; } return sym; } | /**
* If `sym' is a bad symbol: report error and return errSymbol else pass
* through unchanged, additional arguments duplicate what has been used in
* trying to find the symbol (--> flyweight pattern). This improves
* performance since we expect misses to happen frequently.
*
* @param sym
* The symbol that was found, or a ResolveError.
* @param pos
* The position to use for error reporting.
* @param site
* The original type from where the selection took place.
* @param name
* The symbol's name.
* @param argtypes
* The invocation's value parameters, if we looked for a method.
*/ | If `sym' is a bad symbol: report error and return errSymbol else pass through unchanged, additional arguments duplicate what has been used in trying to find the symbol (--> flyweight pattern). This improves performance since we expect misses to happen frequently | access | {
"repo_name": "nileshpatelksy/hello-pod-cast",
"path": "archive/FILE/Compiler/java_GJC1.42_src/src/com/sun/tools/javac/v8/comp/Resolve.java",
"license": "apache-2.0",
"size": 42456
} | [
"com.sun.tools.javac.v8.code.Symbol",
"com.sun.tools.javac.v8.code.Type",
"com.sun.tools.javac.v8.util.List",
"com.sun.tools.javac.v8.util.Name"
] | import com.sun.tools.javac.v8.code.Symbol; import com.sun.tools.javac.v8.code.Type; import com.sun.tools.javac.v8.util.List; import com.sun.tools.javac.v8.util.Name; | import com.sun.tools.javac.v8.code.*; import com.sun.tools.javac.v8.util.*; | [
"com.sun.tools"
] | com.sun.tools; | 2,693,593 |
public GitClient getCurrentBranch(StringData data) {
BranchListData list = new BranchListData();
listBranches(list);
data.setValue(list.getCurrentBranch());
return this;
} | GitClient function(StringData data) { BranchListData list = new BranchListData(); listBranches(list); data.setValue(list.getCurrentBranch()); return this; } | /** Provides the name of the current branch.
* @param data a StringData object that receives the operations result
* @return a reference to <code>this</code> */ | Provides the name of the current branch | getCurrentBranch | {
"repo_name": "AludraTest/aludratest",
"path": "src/main/java/org/aludratest/service/gitclient/GitClient.java",
"license": "apache-2.0",
"size": 28327
} | [
"org.aludratest.service.gitclient.data.BranchListData",
"org.aludratest.util.data.StringData"
] | import org.aludratest.service.gitclient.data.BranchListData; import org.aludratest.util.data.StringData; | import org.aludratest.service.gitclient.data.*; import org.aludratest.util.data.*; | [
"org.aludratest.service",
"org.aludratest.util"
] | org.aludratest.service; org.aludratest.util; | 1,188,013 |
String getLatestText(EMMessage message, int fromUsersNum, int messageNum); | String getLatestText(EMMessage message, int fromUsersNum, int messageNum); | /**
* set the notification content: such as "you received 5 message from 2 contacts"
*
* @param message
* @param fromUsersNum- number of message sender
* @param messageNum -number of messages
* @return null-will use the default text
*/ | set the notification content: such as "you received 5 message from 2 contacts" | getLatestText | {
"repo_name": "CinderellaCJ/ARCard",
"path": "easeui/src/com/hyphenate/easeui/model/EaseNotifier.java",
"license": "apache-2.0",
"size": 14427
} | [
"com.hyphenate.chat.EMMessage"
] | import com.hyphenate.chat.EMMessage; | import com.hyphenate.chat.*; | [
"com.hyphenate.chat"
] | com.hyphenate.chat; | 2,839,799 |
public void generatePdfReport(String marshalledSimpleCertificateReport, Result result) throws IOException, TransformerException {
Transformer transformer = SimpleCertificateReportXmlDefiner.getPdfTemplates().newTransformer();
transformer.transform(new StreamSource(new StringReader(marshalledSimpleCertificateReport)), result);
} | void function(String marshalledSimpleCertificateReport, Result result) throws IOException, TransformerException { Transformer transformer = SimpleCertificateReportXmlDefiner.getPdfTemplates().newTransformer(); transformer.transform(new StreamSource(new StringReader(marshalledSimpleCertificateReport)), result); } | /**
* Generates a PDF Detailed report
*
* @param marshalledSimpleCertificateReport {@link String} marshalled report
* @param result {@link Result} to write the report into
* @throws IOException if an IOException occurs
* @throws TransformerException if an TransformerException occurs
*/ | Generates a PDF Detailed report | generatePdfReport | {
"repo_name": "esig/dss",
"path": "dss-simple-certificate-report-jaxb/src/main/java/eu/europa/esig/dss/simplecertificatereport/SimpleCertificateReportFacade.java",
"license": "lgpl-2.1",
"size": 6589
} | [
"java.io.IOException",
"java.io.StringReader",
"javax.xml.transform.Result",
"javax.xml.transform.Transformer",
"javax.xml.transform.TransformerException",
"javax.xml.transform.stream.StreamSource"
] | import java.io.IOException; import java.io.StringReader; import javax.xml.transform.Result; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamSource; | import java.io.*; import javax.xml.transform.*; import javax.xml.transform.stream.*; | [
"java.io",
"javax.xml"
] | java.io; javax.xml; | 1,542,774 |
public CmsUUID getStructureId() {
return m_structureId;
}
}
private static final Log LOG = CmsLog.getLog(CmsMappingResolutionContext.class);
private CmsObject m_cms;
private List<InternalUrlNameMappingEntry> m_urlNameMappingEntries = Lists.newArrayList();
public CmsMappingResolutionContext() {
// empty
} | CmsUUID function() { return m_structureId; } } private static final Log LOG = CmsLog.getLog(CmsMappingResolutionContext.class); private CmsObject m_cms; private List<InternalUrlNameMappingEntry> m_urlNameMappingEntries = Lists.newArrayList(); public CmsMappingResolutionContext() { } | /**
* Returns the structureId.<p>
*
* @return the structureId
*/ | Returns the structureId | getStructureId | {
"repo_name": "ggiudetti/opencms-core",
"path": "src/org/opencms/xml/content/CmsMappingResolutionContext.java",
"license": "lgpl-2.1",
"size": 6230
} | [
"com.google.common.collect.Lists",
"java.util.List",
"org.apache.commons.logging.Log",
"org.opencms.file.CmsObject",
"org.opencms.main.CmsLog",
"org.opencms.util.CmsUUID"
] | import com.google.common.collect.Lists; import java.util.List; import org.apache.commons.logging.Log; import org.opencms.file.CmsObject; import org.opencms.main.CmsLog; import org.opencms.util.CmsUUID; | import com.google.common.collect.*; import java.util.*; import org.apache.commons.logging.*; import org.opencms.file.*; import org.opencms.main.*; import org.opencms.util.*; | [
"com.google.common",
"java.util",
"org.apache.commons",
"org.opencms.file",
"org.opencms.main",
"org.opencms.util"
] | com.google.common; java.util; org.apache.commons; org.opencms.file; org.opencms.main; org.opencms.util; | 1,357,163 |
Map<String, List<ChatRequest>> buildTagMap(List<ChatRequest> requestList) {
Map<String, List<ChatRequest>> tagMap = new HashMap<>();
for (ChatRequest request : requestList) {
if (request.getTags().isEmpty()) {
List<ChatRequest> randomReqs = tagMap.getOrDefault(MATCH_RANDOM_TAG, new ArrayList<>());
randomReqs.add(request);
tagMap.put(MATCH_RANDOM_TAG, randomReqs);
} else {
for (String tag : request.getTags()) {
List<ChatRequest> tagReqs = tagMap.getOrDefault(tag, new ArrayList<>());
tagReqs.add(request);
tagMap.put(tag, tagReqs);
}
}
}
return tagMap;
} | Map<String, List<ChatRequest>> buildTagMap(List<ChatRequest> requestList) { Map<String, List<ChatRequest>> tagMap = new HashMap<>(); for (ChatRequest request : requestList) { if (request.getTags().isEmpty()) { List<ChatRequest> randomReqs = tagMap.getOrDefault(MATCH_RANDOM_TAG, new ArrayList<>()); randomReqs.add(request); tagMap.put(MATCH_RANDOM_TAG, randomReqs); } else { for (String tag : request.getTags()) { List<ChatRequest> tagReqs = tagMap.getOrDefault(tag, new ArrayList<>()); tagReqs.add(request); tagMap.put(tag, tagReqs); } } } return tagMap; } | /**
* Creates mapping from tags in given requests, to all requests in given list which share the same
* tag.
*
* @param requestList List of ChatRequests to be grouped by tag.
* @return Mapping of tag's string to list of ChatRequests with matching tag.
*/ | Creates mapping from tags in given requests, to all requests in given list which share the same tag | buildTagMap | {
"repo_name": "googleinterns/step250-2020",
"path": "coffee-chats/src/main/java/com/google/step/coffee/tasks/RequestMatcher.java",
"license": "apache-2.0",
"size": 18885
} | [
"com.google.step.coffee.entity.ChatRequest",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.List",
"java.util.Map"
] | import com.google.step.coffee.entity.ChatRequest; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; | import com.google.step.coffee.entity.*; import java.util.*; | [
"com.google.step",
"java.util"
] | com.google.step; java.util; | 641,233 |
EList<Classifier> getClassifier();
| EList<Classifier> getClassifier(); | /**
* Returns the value of the '<em><b>Classifier</b></em>' containment reference list.
* The list contents are of type {@link ClassMM.Classifier}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Classifier</em>' containment reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Classifier</em>' containment reference list.
* @see ClassMM.ClassMMPackage#getClassModel_Classifier()
* @model containment="true"
* @generated
*/ | Returns the value of the 'Classifier' containment reference list. The list contents are of type <code>ClassMM.Classifier</code>. If the meaning of the 'Classifier' containment reference list isn't clear, there really should be more of a description here... | getClassifier | {
"repo_name": "diverse-project/k3",
"path": "k3-samples-incomplete/class2rdbms/fr.inria.triskell.k3.sample.class2rdbms.classmm.model/src/ClassMM/ClassModel.java",
"license": "epl-1.0",
"size": 1857
} | [
"org.eclipse.emf.common.util.EList"
] | import org.eclipse.emf.common.util.EList; | import org.eclipse.emf.common.util.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,770,849 |
public void serialize( final File rootDir ) throws IOException
{
final String fileName = getType().replace( '.', '/' ).concat( ".meta" );
final String depsName = getType().replace( '.', '/' ).concat( ".deps" );
// Write out the meta file if it has changed.
writeIfChanged( m_attributes, new File( rootDir, fileName ),
"Meta information for " + getType() );
final File depsFile = new File( rootDir, depsName );
if ( m_dependencies.size() > 0 )
{
final PrintWriter writer = new PrintWriter( new OutputStreamWriter(
new ChangedFileOutputStream( depsFile ), "UTF-8" ) );
try
{
for ( Iterator iter = m_dependencies.iterator(); iter.hasNext(); )
{
Service service = (Service)iter.next();
writer.println( service.getType() );
}
}
finally
{
writer.close();
}
}
else if ( depsFile.exists() )
{
depsFile.delete();
}
} | void function( final File rootDir ) throws IOException { final String fileName = getType().replace( '.', '/' ).concat( ".meta" ); final String depsName = getType().replace( '.', '/' ).concat( ".deps" ); writeIfChanged( m_attributes, new File( rootDir, fileName ), STR + getType() ); final File depsFile = new File( rootDir, depsName ); if ( m_dependencies.size() > 0 ) { final PrintWriter writer = new PrintWriter( new OutputStreamWriter( new ChangedFileOutputStream( depsFile ), "UTF-8" ) ); try { for ( Iterator iter = m_dependencies.iterator(); iter.hasNext(); ) { Service service = (Service)iter.next(); writer.println( service.getType() ); } } finally { writer.close(); } } else if ( depsFile.exists() ) { depsFile.delete(); } } | /**
* Output the meta information.
*
* @param rootDir
* @throws IOException
*/ | Output the meta information | serialize | {
"repo_name": "eva-xuyen/excalibur",
"path": "fortress/meta/src/java/org/apache/avalon/fortress/tools/Component.java",
"license": "apache-2.0",
"size": 15536
} | [
"java.io.File",
"java.io.IOException",
"java.io.OutputStreamWriter",
"java.io.PrintWriter",
"java.util.Iterator"
] | import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.Iterator; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 2,524,426 |
public final void teardown() throws Throwable {
LOGGER.info("Stop the server...");
if (mTomcat.getServer() != null && mTomcat.getServer().getState() != LifecycleState.DESTROYED) {
if (mTomcat.getServer().getState() != LifecycleState.STOPPED) {
mTomcat.stop();
}
mTomcat.destroy();
}
} | final void function() throws Throwable { LOGGER.info(STR); if (mTomcat.getServer() != null && mTomcat.getServer().getState() != LifecycleState.DESTROYED) { if (mTomcat.getServer().getState() != LifecycleState.STOPPED) { mTomcat.stop(); } mTomcat.destroy(); } } | /**
* Stops the tomcat server.
*
* @throws Throwable
* if anything goes wrong.
*/ | Stops the tomcat server | teardown | {
"repo_name": "nathieb/red5_embedded",
"path": "src/main/java/com/red5/EmbeddedTomcat.java",
"license": "gpl-3.0",
"size": 3341
} | [
"org.apache.catalina.LifecycleState"
] | import org.apache.catalina.LifecycleState; | import org.apache.catalina.*; | [
"org.apache.catalina"
] | org.apache.catalina; | 870,551 |
public JavaClass findClass( String className ) {
SoftReference ref = (SoftReference) _loadedClasses.get(className);
if (ref == null) {
return null;
}
return (JavaClass) ref.get();
} | JavaClass function( String className ) { SoftReference ref = (SoftReference) _loadedClasses.get(className); if (ref == null) { return null; } return (JavaClass) ref.get(); } | /**
* Find an already defined (cached) JavaClass object by name.
*/ | Find an already defined (cached) JavaClass object by name | findClass | {
"repo_name": "Xyene/JBL",
"path": "src/test/java/benchmark/bcel/util/SyntheticRepository.java",
"license": "lgpl-3.0",
"size": 6386
} | [
"java.lang.ref.SoftReference"
] | import java.lang.ref.SoftReference; | import java.lang.ref.*; | [
"java.lang"
] | java.lang; | 322,433 |
@Override
public void dropBlockAsItemWithChance(World worldIn, BlockPos pos, IBlockState state, float chance, int fortune)
{
super.dropBlockAsItemWithChance(worldIn, pos, state, chance, fortune);
}
| void function(World worldIn, BlockPos pos, IBlockState state, float chance, int fortune) { super.dropBlockAsItemWithChance(worldIn, pos, state, chance, fortune); } | /**
* Spawns this Block's drops into the World as EntityItems.
*/ | Spawns this Block's drops into the World as EntityItems | dropBlockAsItemWithChance | {
"repo_name": "Alec-WAM/CrystalMod",
"path": "src/main/java/alec_wam/CrystalMod/blocks/crops/BlockCrystalPlant.java",
"license": "mit",
"size": 10097
} | [
"net.minecraft.block.state.IBlockState",
"net.minecraft.util.math.BlockPos",
"net.minecraft.world.World"
] | import net.minecraft.block.state.IBlockState; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; | import net.minecraft.block.state.*; import net.minecraft.util.math.*; import net.minecraft.world.*; | [
"net.minecraft.block",
"net.minecraft.util",
"net.minecraft.world"
] | net.minecraft.block; net.minecraft.util; net.minecraft.world; | 2,054,364 |
public final Bundle call(Uri uri, String method, String arg, Bundle extras) {
if (uri == null) {
throw new NullPointerException("uri == null");
}
if (method == null) {
throw new NullPointerException("method == null");
}
IContentProvider provider = acquireProvider(uri);
if (provider == null) {
throw new IllegalArgumentException("Unknown URI " + uri);
}
try {
return provider.call(method, arg, extras);
} catch (RemoteException e) {
// Arbitrary and not worth documenting, as Activity
// Manager will kill this process shortly anyway.
return null;
} finally {
releaseProvider(provider);
}
} | final Bundle function(Uri uri, String method, String arg, Bundle extras) { if (uri == null) { throw new NullPointerException(STR); } if (method == null) { throw new NullPointerException(STR); } IContentProvider provider = acquireProvider(uri); if (provider == null) { throw new IllegalArgumentException(STR + uri); } try { return provider.call(method, arg, extras); } catch (RemoteException e) { return null; } finally { releaseProvider(provider); } } | /**
* Call a provider-defined method. This can be used to implement
* read or write interfaces which are cheaper than using a Cursor and/or
* do not fit into the traditional table model.
*
* @param method provider-defined method name to call. Opaque to
* framework, but must be non-null.
* @param arg provider-defined String argument. May be null.
* @param extras provider-defined Bundle argument. May be null.
* @return a result Bundle, possibly null. Will be null if the ContentProvider
* does not implement call.
* @throws NullPointerException if uri or method is null
* @throws IllegalArgumentException if uri is not known
*/ | Call a provider-defined method. This can be used to implement read or write interfaces which are cheaper than using a Cursor and/or do not fit into the traditional table model | call | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/android/content/ContentResolver.java",
"license": "apache-2.0",
"size": 82308
} | [
"android.net.Uri",
"android.os.Bundle",
"android.os.RemoteException"
] | import android.net.Uri; import android.os.Bundle; import android.os.RemoteException; | import android.net.*; import android.os.*; | [
"android.net",
"android.os"
] | android.net; android.os; | 428,835 |
@Override public T visitArithOpExpr(@NotNull BooleanExpressions2Parser.ArithOpExprContext ctx) { return visitChildren(ctx); } | @Override public T visitArithOpExpr(@NotNull BooleanExpressions2Parser.ArithOpExprContext ctx) { return visitChildren(ctx); } | /**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/ | The default implementation returns the result of calling <code>#visitChildren</code> on ctx | visitBoolOpExpr | {
"repo_name": "giovannibotta/java-sandbox",
"path": "antlr-examples/src/main/java/net/giovannibotta/antlr/example/bool2/BooleanExpressions2BaseVisitor.java",
"license": "apache-2.0",
"size": 3525
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 315,810 |
public SoyGeneralOptions setExperimentalFeatures(List<String> experimentalFeatures) {
this.experimentalFeatures = ImmutableList.copyOf(experimentalFeatures);
return this;
} | SoyGeneralOptions function(List<String> experimentalFeatures) { this.experimentalFeatures = ImmutableList.copyOf(experimentalFeatures); return this; } | /**
* Sets experimental features. These features are unreleased and are not generally available.
*
* @param experimentalFeatures
*/ | Sets experimental features. These features are unreleased and are not generally available | setExperimentalFeatures | {
"repo_name": "rpatil26/closure-templates",
"path": "java/src/com/google/template/soy/shared/SoyGeneralOptions.java",
"license": "apache-2.0",
"size": 8474
} | [
"com.google.common.collect.ImmutableList",
"java.util.List"
] | import com.google.common.collect.ImmutableList; import java.util.List; | import com.google.common.collect.*; import java.util.*; | [
"com.google.common",
"java.util"
] | com.google.common; java.util; | 808,201 |
@Test
public void testMaxPendingJobs() throws Exception {
LOG.info("Test testMaxPendingJobs started.");
long blockSize = 8192L;
int stripeLength = 3;
mySetup(stripeLength, -1); // never har
Path file1 = new Path("/user/dhruba/raidtest/file1");
Path file2 = new Path("/user/dhruba/raidtest/file2");
Path destPath = new Path("/destraid/user/dhruba/raidtest");
long crc1 = TestRaidDfs.createTestFilePartialLastBlock(fileSys, file1,
1, 20, blockSize);
long crc2 = TestRaidDfs.createTestFilePartialLastBlock(fileSys, file2,
1, 20, blockSize);
long file1Len = fileSys.getFileStatus(file1).getLen();
long file2Len = fileSys.getFileStatus(file2).getLen();
LOG.info("Test testMaxPendingJobs created test files");
// create an instance of the RaidNode
Configuration localConf = new Configuration(conf);
localConf.set(RaidNode.RAID_LOCATION_KEY, "/destraid");
localConf.setInt("raid.blockfix.interval", 1000);
localConf.set("raid.blockfix.classname",
"org.apache.hadoop.raid.DistBlockFixer");
localConf.setLong("raid.blockfix.filespertask", 2L);
localConf.setLong("raid.blockfix.maxpendingjobs", 1L);
try {
cnode = RaidNode.createRaidNode(null, localConf);
TestRaidDfs.waitForFileRaided(LOG, fileSys, file1, destPath);
TestRaidDfs.waitForFileRaided(LOG, fileSys, file2, destPath);
cnode.stop(); cnode.join();
FileStatus file1Stat = fileSys.getFileStatus(file1);
FileStatus file2Stat = fileSys.getFileStatus(file2);
DistributedFileSystem dfs = (DistributedFileSystem)fileSys;
LocatedBlocks file1Loc =
RaidDFSUtil.getBlockLocations(dfs, file1.toUri().getPath(),
0, file1Stat.getLen());
LocatedBlocks file2Loc =
RaidDFSUtil.getBlockLocations(dfs, file2.toUri().getPath(),
0, file2Stat.getLen());
String[] corruptFiles = DFSUtil.getCorruptFiles(dfs);
assertEquals("no corrupt files expected", 0, corruptFiles.length);
assertEquals("filesFixed() should return 0 before fixing files",
0, cnode.blockFixer.filesFixed());
// corrupt file1
int[] corruptBlockIdxs = new int[]{0, 4, 6};
for (int idx: corruptBlockIdxs)
corruptBlock(file1Loc.get(idx).getBlock().getBlockName());
reportCorruptBlocks(dfs, file1, corruptBlockIdxs, blockSize);
corruptFiles = DFSUtil.getCorruptFiles(dfs);
cnode = RaidNode.createRaidNode(null, localConf);
DistBlockFixer blockFixer = (DistBlockFixer) cnode.blockFixer;
long start = System.currentTimeMillis();
while (blockFixer.jobsRunning() < 1 &&
System.currentTimeMillis() - start < 240000) {
LOG.info("Test testBlockFix waiting for fixing job 1 to start");
Thread.sleep(10);
}
assertEquals("job not running", 1, blockFixer.jobsRunning());
// corrupt file2
for (int idx: corruptBlockIdxs)
corruptBlock(file2Loc.get(idx).getBlock().getBlockName());
reportCorruptBlocks(dfs, file2, corruptBlockIdxs, blockSize);
corruptFiles = DFSUtil.getCorruptFiles(dfs);
// wait until both files are fixed
while (blockFixer.filesFixed() < 2 &&
System.currentTimeMillis() - start < 240000) {
// make sure the block fixer does not start a second job while
// the first one is still running
assertTrue("too many jobs running", blockFixer.jobsRunning() <= 1);
Thread.sleep(10);
}
assertEquals("files not fixed", 2, blockFixer.filesFixed());
dfs = getDFS(conf, dfs);
try {
Thread.sleep(5*1000);
} catch (InterruptedException ignore) {
}
assertTrue("file not fixed",
TestRaidDfs.validateFile(dfs, file1, file1Len, crc1));
assertTrue("file not fixed",
TestRaidDfs.validateFile(dfs, file2, file2Len, crc2));
} catch (Exception e) {
LOG.info("Test testMaxPendingJobs exception " + e +
StringUtils.stringifyException(e));
throw e;
} finally {
myTearDown();
}
}
static class FakeDistBlockFixer extends DistBlockFixer {
Map<String, List<String>> submittedJobs =
new HashMap<String, List<String>>();
FakeDistBlockFixer(Configuration conf) {
super(conf);
} | void function() throws Exception { LOG.info(STR); long blockSize = 8192L; int stripeLength = 3; mySetup(stripeLength, -1); Path file1 = new Path(STR); Path file2 = new Path(STR); Path destPath = new Path(STR); long crc1 = TestRaidDfs.createTestFilePartialLastBlock(fileSys, file1, 1, 20, blockSize); long crc2 = TestRaidDfs.createTestFilePartialLastBlock(fileSys, file2, 1, 20, blockSize); long file1Len = fileSys.getFileStatus(file1).getLen(); long file2Len = fileSys.getFileStatus(file2).getLen(); LOG.info(STR); Configuration localConf = new Configuration(conf); localConf.set(RaidNode.RAID_LOCATION_KEY, STR); localConf.setInt(STR, 1000); localConf.set(STR, STR); localConf.setLong(STR, 2L); localConf.setLong(STR, 1L); try { cnode = RaidNode.createRaidNode(null, localConf); TestRaidDfs.waitForFileRaided(LOG, fileSys, file1, destPath); TestRaidDfs.waitForFileRaided(LOG, fileSys, file2, destPath); cnode.stop(); cnode.join(); FileStatus file1Stat = fileSys.getFileStatus(file1); FileStatus file2Stat = fileSys.getFileStatus(file2); DistributedFileSystem dfs = (DistributedFileSystem)fileSys; LocatedBlocks file1Loc = RaidDFSUtil.getBlockLocations(dfs, file1.toUri().getPath(), 0, file1Stat.getLen()); LocatedBlocks file2Loc = RaidDFSUtil.getBlockLocations(dfs, file2.toUri().getPath(), 0, file2Stat.getLen()); String[] corruptFiles = DFSUtil.getCorruptFiles(dfs); assertEquals(STR, 0, corruptFiles.length); assertEquals(STR, 0, cnode.blockFixer.filesFixed()); int[] corruptBlockIdxs = new int[]{0, 4, 6}; for (int idx: corruptBlockIdxs) corruptBlock(file1Loc.get(idx).getBlock().getBlockName()); reportCorruptBlocks(dfs, file1, corruptBlockIdxs, blockSize); corruptFiles = DFSUtil.getCorruptFiles(dfs); cnode = RaidNode.createRaidNode(null, localConf); DistBlockFixer blockFixer = (DistBlockFixer) cnode.blockFixer; long start = System.currentTimeMillis(); while (blockFixer.jobsRunning() < 1 && System.currentTimeMillis() - start < 240000) { LOG.info(STR); Thread.sleep(10); } assertEquals(STR, 1, blockFixer.jobsRunning()); for (int idx: corruptBlockIdxs) corruptBlock(file2Loc.get(idx).getBlock().getBlockName()); reportCorruptBlocks(dfs, file2, corruptBlockIdxs, blockSize); corruptFiles = DFSUtil.getCorruptFiles(dfs); while (blockFixer.filesFixed() < 2 && System.currentTimeMillis() - start < 240000) { assertTrue(STR, blockFixer.jobsRunning() <= 1); Thread.sleep(10); } assertEquals(STR, 2, blockFixer.filesFixed()); dfs = getDFS(conf, dfs); try { Thread.sleep(5*1000); } catch (InterruptedException ignore) { } assertTrue(STR, TestRaidDfs.validateFile(dfs, file1, file1Len, crc1)); assertTrue(STR, TestRaidDfs.validateFile(dfs, file2, file2Len, crc2)); } catch (Exception e) { LOG.info(STR + e + StringUtils.stringifyException(e)); throw e; } finally { myTearDown(); } } static class FakeDistBlockFixer extends DistBlockFixer { Map<String, List<String>> submittedJobs = new HashMap<String, List<String>>(); FakeDistBlockFixer(Configuration conf) { super(conf); } | /**
* tests that the distributed block fixer obeys
* the limit on how many jobs to submit simultaneously.
*/ | tests that the distributed block fixer obeys the limit on how many jobs to submit simultaneously | testMaxPendingJobs | {
"repo_name": "jchen123/hadoop-20-warehouse-fix",
"path": "src/contrib/raid/src/test/org/apache/hadoop/raid/TestBlockFixer.java",
"license": "apache-2.0",
"size": 40571
} | [
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileStatus",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.DFSUtil",
"org.apache.hadoop.hdfs.DistributedFileSystem",
"org.apache.hadoop.hdfs.RaidDFSUtil",
"org.apache.hadoop.hdfs.TestRaidDfs",
"org.apache.hadoop.hdfs.protocol.LocatedBlocks",
"org.apache.hadoop.util.StringUtils"
] | import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.RaidDFSUtil; import org.apache.hadoop.hdfs.TestRaidDfs; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.util.StringUtils; | import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.util.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 2,858,916 |
public void startcutcopyservice(String targetfolder) {
List<String> cutcopylist = new ArrayList<String>();
Intent cutcopyintent = new Intent(act.get(), CutCopyService.class);
cutcopyintent.putExtra("action", currentaction);
for (int i = 0; i < operationqueue.size(); i++) {
cutcopylist.add(operationqueue.get(i).getAbsolutePath());
}
cutcopyintent.putStringArrayListExtra("filelist",
(ArrayList<String>) cutcopylist);
cutcopyintent.putExtra("targetfolder", targetfolder);
act.get().startService(cutcopyintent);
cutcopylist.clear();
}
| void function(String targetfolder) { List<String> cutcopylist = new ArrayList<String>(); Intent cutcopyintent = new Intent(act.get(), CutCopyService.class); cutcopyintent.putExtra(STR, currentaction); for (int i = 0; i < operationqueue.size(); i++) { cutcopylist.add(operationqueue.get(i).getAbsolutePath()); } cutcopyintent.putStringArrayListExtra(STR, (ArrayList<String>) cutcopylist); cutcopyintent.putExtra(STR, targetfolder); act.get().startService(cutcopyintent); cutcopylist.clear(); } | /**
* Start the service to copy or move files
* @param destination folder
*/ | Start the service to copy or move files | startcutcopyservice | {
"repo_name": "ashmikuz/Open-file-manager",
"path": "src/com/open/file/manager/FileOperations.java",
"license": "gpl-3.0",
"size": 16454
} | [
"android.content.Intent",
"java.util.ArrayList",
"java.util.List"
] | import android.content.Intent; import java.util.ArrayList; import java.util.List; | import android.content.*; import java.util.*; | [
"android.content",
"java.util"
] | android.content; java.util; | 1,204,637 |
public OutputStream getOutputStream() throws IOException {
return mOutputStream;
} | OutputStream function() throws IOException { return mOutputStream; } | /**
* Get the output stream associated with this socket.
* <p>The output stream will be returned even if the socket is not yet
* connected, but operations on that stream will throw IOException until
* the associated socket is connected.
* @return OutputStream
*/ | Get the output stream associated with this socket. The output stream will be returned even if the socket is not yet connected, but operations on that stream will throw IOException until the associated socket is connected | getOutputStream | {
"repo_name": "mateor/pdroid",
"path": "android-4.0.3_r1/trunk/frameworks/base/core/java/android/bluetooth/BluetoothSocket.java",
"license": "gpl-3.0",
"size": 15340
} | [
"java.io.IOException",
"java.io.OutputStream"
] | import java.io.IOException; import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,518,508 |
public static ILateralCacheListener getInstance( ILateralCacheAttributes ilca, ICompositeCacheManager cacheMgr )
{
//throws IOException, NotBoundException
ILateralCacheListener ins = (ILateralCacheListener) instances.get( ilca.getJGChannelProperties() );
synchronized ( LateralCacheJGListener.class )
{
if ( ins == null )
{
ins = new LateralCacheJGListener( ilca );
ins.setCacheManager( cacheMgr );
ins.init();
}
if ( log.isInfoEnabled() )
{
log.info( "created new listener " + ilca.getJGChannelProperties() );
}
instances.put( ilca.getJGChannelProperties(), ins );
}
return ins;
} | static ILateralCacheListener function( ILateralCacheAttributes ilca, ICompositeCacheManager cacheMgr ) { ILateralCacheListener ins = (ILateralCacheListener) instances.get( ilca.getJGChannelProperties() ); synchronized ( LateralCacheJGListener.class ) { if ( ins == null ) { ins = new LateralCacheJGListener( ilca ); ins.setCacheManager( cacheMgr ); ins.init(); } if ( log.isInfoEnabled() ) { log.info( STR + ilca.getJGChannelProperties() ); } instances.put( ilca.getJGChannelProperties(), ins ); } return ins; } | /**
* Gets the instance attribute of the LateralCacheJGListener class
*
* @return The instance value
* @param ilca
* @param cacheMgr
*/ | Gets the instance attribute of the LateralCacheJGListener class | getInstance | {
"repo_name": "tikue/jcs2-snapshot",
"path": "auxiliary-builds/jdk14/src/java/org/apache/commons/jcs/auxiliary/lateral/javagroups/LateralCacheJGListener.java",
"license": "apache-2.0",
"size": 8470
} | [
"org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheAttributes",
"org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheListener",
"org.apache.commons.jcs.engine.behavior.ICompositeCacheManager"
] | import org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheAttributes; import org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheListener; import org.apache.commons.jcs.engine.behavior.ICompositeCacheManager; | import org.apache.commons.jcs.auxiliary.lateral.behavior.*; import org.apache.commons.jcs.engine.behavior.*; | [
"org.apache.commons"
] | org.apache.commons; | 867,170 |
public List<ExprUnion> getAllExprUnions();
}
// -----------------------------------------------------------------------------------------------
public static interface MsgBlockNode extends BlockNode {}
// -----------------------------------------------------------------------------------------------
public static interface MsgPlaceholderInitialContentNode extends StandaloneNode { | List<ExprUnion> function(); } public static interface MsgBlockNode extends BlockNode {} public static interface MsgPlaceholderInitialContentNode extends StandaloneNode { | /**
* Gets the list of expressions in this node.
* @return The list of expressions in this node.
*/ | Gets the list of expressions in this node | getAllExprUnions | {
"repo_name": "wimm/google-closure-templates",
"path": "java/src/com/google/template/soy/soytree/SoyNode.java",
"license": "apache-2.0",
"size": 11263
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,895,487 |
@Override
public void stop(BundleContext context) throws Exception {
AstroActivator.context = null;
logger.debug("Astro binding has been stopped.");
} | void function(BundleContext context) throws Exception { AstroActivator.context = null; logger.debug(STR); } | /**
* Called whenever the OSGi framework stops our bundle
*/ | Called whenever the OSGi framework stops our bundle | stop | {
"repo_name": "idserda/openhab",
"path": "bundles/binding/org.openhab.binding.astro/src/main/java/org/openhab/binding/astro/internal/bus/AstroActivator.java",
"license": "epl-1.0",
"size": 1506
} | [
"org.osgi.framework.BundleContext"
] | import org.osgi.framework.BundleContext; | import org.osgi.framework.*; | [
"org.osgi.framework"
] | org.osgi.framework; | 959,466 |
public Driver createDriver(Properties props) throws SQLException {
Driver driver = newJdbcDriver();
setBeanProperties(driver, props);
return driver;
} | Driver function(Properties props) throws SQLException { Driver driver = newJdbcDriver(); setBeanProperties(driver, props); return driver; } | /**
* Creates a new JDBC Driver instance.
*
* @param props The properties used to configure the Driver. {@literal null} indicates no properties. If the
* property cannot be set on the Driver being created then an SQLException must be thrown.
* @return A configured java.sql.Driver.
* @throws SQLException If the driver instance cannot be created
*/ | Creates a new JDBC Driver instance | createDriver | {
"repo_name": "wisdom-framework/wisdom-jdbc",
"path": "wisdom-jdbc-drivers/abstract-jdbc-driver/src/main/java/org/wisdom/jdbc/driver/helpers/AbstractDataSourceFactory.java",
"license": "apache-2.0",
"size": 6060
} | [
"java.sql.Driver",
"java.sql.SQLException",
"java.util.Properties"
] | import java.sql.Driver; import java.sql.SQLException; import java.util.Properties; | import java.sql.*; import java.util.*; | [
"java.sql",
"java.util"
] | java.sql; java.util; | 941,229 |
public static String getParam(HttpServletRequest req, String strParam, String strDefault)
{
String strParams[] = null;
if (req == null)
return null;
strParams = req.getParameterValues(strParam); // Menu page
if (DBParams.URL.equals(strParam))
{
strParams = new String[1];
strParams[0] = req.getRequestURL().toString(); // Special param (url).
}
if (DBParams.CODEBASE.equals(strParam))
{
strParams = new String[1];
strParams[0] = req.getRequestURL().toString(); // Special param (url).
if (strParams[0].endsWith(Constants.DEFAULT_SERVLET))
strParams[0] = strParams[0].substring(0, strParams[0].length() - Constants.DEFAULT_SERVLET.length());
if (!strParams[0].endsWith("/"))
strParams[0] = strParams[0] + "/"; // Codebase should always end with this
}
if (DBParams.DOMAIN.equals(strParam))
{
strParams = new String[1];
strParams[0] = req.getRequestURL().toString(); // Special param (url).
return Utility.getDomainFromURL(strParams[0], null);
}
if (strParams == null)
if (DBParams.DATATYPE.equals(strParam))
{
strParams = new String[1];
strParams[0] = req.getRequestURL().toString(); // Special param (url).
if ((strParams[0] != null)
&& (strParams[0].lastIndexOf('/') != -1))
{
strParams[0] = strParams[0].substring(strParams[0].lastIndexOf('/') + 1);
if (strParams[0].indexOf('.') != -1)
strParams[0] = strParams[0].substring(0, strParams[0].indexOf('.'));
if ((!DBParams.TABLE_PARAM.equalsIgnoreCase(strParams[0]))
&& (!DBParams.WEBSTART_PARAM.equalsIgnoreCase(strParams[0]))
&& (!DBParams.WEBSTART_APPLET_PARAM.equalsIgnoreCase(strParams[0]))
&& (!DBParams.WSDL_PARAM.equalsIgnoreCase(strParams[0]))
&& (!DBParams.IMAGE_PATH.equalsIgnoreCase(strParams[0])))
strParams = null; // Must be one of these
}
else
strParams = null;
}
if (strParams == null)
return strDefault;
if (strParams.length == 0)
return Constants.BLANK;
if (strParams[0] == null)
return Constants.BLANK;
return strParams[0];
} | static String function(HttpServletRequest req, String strParam, String strDefault) { String strParams[] = null; if (req == null) return null; strParams = req.getParameterValues(strParam); if (DBParams.URL.equals(strParam)) { strParams = new String[1]; strParams[0] = req.getRequestURL().toString(); } if (DBParams.CODEBASE.equals(strParam)) { strParams = new String[1]; strParams[0] = req.getRequestURL().toString(); if (strParams[0].endsWith(Constants.DEFAULT_SERVLET)) strParams[0] = strParams[0].substring(0, strParams[0].length() - Constants.DEFAULT_SERVLET.length()); if (!strParams[0].endsWith("/")) strParams[0] = strParams[0] + "/"; } if (DBParams.DOMAIN.equals(strParam)) { strParams = new String[1]; strParams[0] = req.getRequestURL().toString(); return Utility.getDomainFromURL(strParams[0], null); } if (strParams == null) if (DBParams.DATATYPE.equals(strParam)) { strParams = new String[1]; strParams[0] = req.getRequestURL().toString(); if ((strParams[0] != null) && (strParams[0].lastIndexOf('/') != -1)) { strParams[0] = strParams[0].substring(strParams[0].lastIndexOf('/') + 1); if (strParams[0].indexOf('.') != -1) strParams[0] = strParams[0].substring(0, strParams[0].indexOf('.')); if ((!DBParams.TABLE_PARAM.equalsIgnoreCase(strParams[0])) && (!DBParams.WEBSTART_PARAM.equalsIgnoreCase(strParams[0])) && (!DBParams.WEBSTART_APPLET_PARAM.equalsIgnoreCase(strParams[0])) && (!DBParams.WSDL_PARAM.equalsIgnoreCase(strParams[0])) && (!DBParams.IMAGE_PATH.equalsIgnoreCase(strParams[0]))) strParams = null; } else strParams = null; } if (strParams == null) return strDefault; if (strParams.length == 0) return Constants.BLANK; if (strParams[0] == null) return Constants.BLANK; return strParams[0]; } | /**
* Get the first occurrence of this parameter.
*/ | Get the first occurrence of this parameter | getParam | {
"repo_name": "jbundle/jbundle",
"path": "base/screen/control/servlet/src/main/java/org/jbundle/base/screen/control/servlet/BaseHttpTask.java",
"license": "gpl-3.0",
"size": 41977
} | [
"javax.servlet.http.HttpServletRequest",
"org.jbundle.base.model.DBParams",
"org.jbundle.base.model.Utility",
"org.jbundle.thin.base.db.Constants"
] | import javax.servlet.http.HttpServletRequest; import org.jbundle.base.model.DBParams; import org.jbundle.base.model.Utility; import org.jbundle.thin.base.db.Constants; | import javax.servlet.http.*; import org.jbundle.base.model.*; import org.jbundle.thin.base.db.*; | [
"javax.servlet",
"org.jbundle.base",
"org.jbundle.thin"
] | javax.servlet; org.jbundle.base; org.jbundle.thin; | 1,691,479 |
public Hashtable getEnvironment()
{
return env;
} | Hashtable function() { return env; } | /**
* Gets derived Script environment
*
* @return Script environment
*
*/ | Gets derived Script environment | getEnvironment | {
"repo_name": "Netprophets/JBOSSWEB_7_0_13_FINAL",
"path": "java/org/jboss/web/php/ScriptEnvironment.java",
"license": "lgpl-3.0",
"size": 25947
} | [
"java.util.Hashtable"
] | import java.util.Hashtable; | import java.util.*; | [
"java.util"
] | java.util; | 829,243 |
public void testMapValue()
{
try
{
PersistenceManager pm = pmf.getPersistenceManager();
Transaction tx = pm.currentTransaction();
try
{
tx.begin();
MapConverterHolder h1 = new MapConverterHolder(1, "First");
h1.getConvertedValueMap().put("AB", new MyType1("A", "B"));
h1.getConvertedValueMap().put("CD", new MyType1("C", "D"));
pm.makePersistent(h1);
MapConverterHolder h2 = new MapConverterHolder(2, "Second");
pm.makePersistent(h2);
tx.commit();
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
pmf.getDataStoreCache().evictAll();
pm = pmf.getPersistenceManager();
tx = pm.currentTransaction();
try
{
tx.begin();
Query<MapConverterHolder> q = pm.newQuery(MapConverterHolder.class, "this.name == :name");
Map<String, Object> params = new HashMap<String, Object>();
params.put("name", "First");
q.setNamedParameters(params);
List<MapConverterHolder> results = q.executeList();
assertEquals(1, results.size());
MapConverterHolder h = results.get(0);
Map<String, MyType1> hmap = h.getConvertedValueMap();
assertNotNull(hmap);
assertEquals(2, hmap.size());
assertTrue(hmap.containsKey("AB"));
assertTrue(hmap.containsKey("CD"));
MyType1 val1 = hmap.get("AB");
assertEquals("A", val1.getName1());
assertEquals("B", val1.getName2());
MyType1 val2 = hmap.get("CD");
assertEquals("C", val2.getName1());
assertEquals("D", val2.getName2());
MapConverterHolder h2 = pm.getObjectById(MapConverterHolder.class, 2);
Map<String, MyType1> h2map = h2.getConvertedValueMap();
assertNotNull(h2map);
h2map.put("HJ", new MyType1("H", "J"));
tx.commit();
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
pmf.getDataStoreCache().evictAll();
pm = pmf.getPersistenceManager();
tx = pm.currentTransaction();
try
{
tx.begin();
MapConverterHolder h2 = pm.getObjectById(MapConverterHolder.class, 2);
Map<String, MyType1> h2map = h2.getConvertedValueMap();
assertNotNull(h2map);
assertEquals(1, h2map.size());
assertTrue(h2map.containsKey("HJ"));
MyType1 val1 = h2map.get("HJ");
assertEquals("H", val1.getName1());
assertEquals("J", val1.getName2());
tx.commit();
}
finally
{
if (tx.isActive())
{
tx.rollback();
}
pm.close();
}
}
finally
{
// Cleanup
clean(MapConverterHolder.class);
}
}
| void function() { try { PersistenceManager pm = pmf.getPersistenceManager(); Transaction tx = pm.currentTransaction(); try { tx.begin(); MapConverterHolder h1 = new MapConverterHolder(1, "First"); h1.getConvertedValueMap().put("AB", new MyType1("A", "B")); h1.getConvertedValueMap().put("CD", new MyType1("C", "D")); pm.makePersistent(h1); MapConverterHolder h2 = new MapConverterHolder(2, STR); pm.makePersistent(h2); tx.commit(); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); } pmf.getDataStoreCache().evictAll(); pm = pmf.getPersistenceManager(); tx = pm.currentTransaction(); try { tx.begin(); Query<MapConverterHolder> q = pm.newQuery(MapConverterHolder.class, STR); Map<String, Object> params = new HashMap<String, Object>(); params.put("name", "First"); q.setNamedParameters(params); List<MapConverterHolder> results = q.executeList(); assertEquals(1, results.size()); MapConverterHolder h = results.get(0); Map<String, MyType1> hmap = h.getConvertedValueMap(); assertNotNull(hmap); assertEquals(2, hmap.size()); assertTrue(hmap.containsKey("AB")); assertTrue(hmap.containsKey("CD")); MyType1 val1 = hmap.get("AB"); assertEquals("A", val1.getName1()); assertEquals("B", val1.getName2()); MyType1 val2 = hmap.get("CD"); assertEquals("C", val2.getName1()); assertEquals("D", val2.getName2()); MapConverterHolder h2 = pm.getObjectById(MapConverterHolder.class, 2); Map<String, MyType1> h2map = h2.getConvertedValueMap(); assertNotNull(h2map); h2map.put("HJ", new MyType1("H", "J")); tx.commit(); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); } pmf.getDataStoreCache().evictAll(); pm = pmf.getPersistenceManager(); tx = pm.currentTransaction(); try { tx.begin(); MapConverterHolder h2 = pm.getObjectById(MapConverterHolder.class, 2); Map<String, MyType1> h2map = h2.getConvertedValueMap(); assertNotNull(h2map); assertEquals(1, h2map.size()); assertTrue(h2map.containsKey("HJ")); MyType1 val1 = h2map.get("HJ"); assertEquals("H", val1.getName1()); assertEquals("J", val1.getName2()); tx.commit(); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); } } finally { clean(MapConverterHolder.class); } } | /**
* Tests for conversion of a value of a map.
*/ | Tests for conversion of a value of a map | testMapValue | {
"repo_name": "datanucleus/tests",
"path": "jdo/general/src/test/org/datanucleus/tests/types/AttributeConverterTest.java",
"license": "apache-2.0",
"size": 19328
} | [
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"javax.jdo.PersistenceManager",
"javax.jdo.Query",
"javax.jdo.Transaction",
"org.datanucleus.samples.types.converters.MapConverterHolder",
"org.datanucleus.samples.types.converters.MyType1"
] | import java.util.HashMap; import java.util.List; import java.util.Map; import javax.jdo.PersistenceManager; import javax.jdo.Query; import javax.jdo.Transaction; import org.datanucleus.samples.types.converters.MapConverterHolder; import org.datanucleus.samples.types.converters.MyType1; | import java.util.*; import javax.jdo.*; import org.datanucleus.samples.types.converters.*; | [
"java.util",
"javax.jdo",
"org.datanucleus.samples"
] | java.util; javax.jdo; org.datanucleus.samples; | 576,525 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String publicIpAddressName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (publicIpAddressName == null) {
return Mono
.error(new IllegalArgumentException("Parameter publicIpAddressName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2021-05-01";
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.delete(
this.client.getEndpoint(),
resourceGroupName,
publicIpAddressName,
apiVersion,
this.client.getSubscriptionId(),
accept,
context);
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> function( String resourceGroupName, String publicIpAddressName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (publicIpAddressName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String apiVersion = STR; final String accept = STR; context = this.client.mergeContext(context); return service .delete( this.client.getEndpoint(), resourceGroupName, publicIpAddressName, apiVersion, this.client.getSubscriptionId(), accept, context); } | /**
* Deletes the specified public IP address.
*
* @param resourceGroupName The name of the resource group.
* @param publicIpAddressName The name of the public IP address.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link Response} on successful completion of {@link Mono}.
*/ | Deletes the specified public IP address | deleteWithResponseAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/PublicIpAddressesClientImpl.java",
"license": "mit",
"size": 177318
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context",
"java.nio.ByteBuffer"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import java.nio.ByteBuffer; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import java.nio.*; | [
"com.azure.core",
"java.nio"
] | com.azure.core; java.nio; | 1,220,325 |
public IspResponse isp(InetAddress ipAddress) throws IOException, GeoIp2Exception {
return this.get(ipAddress, IspResponse.class, false, "GeoIP2-ISP");
} | IspResponse function(InetAddress ipAddress) throws IOException, GeoIp2Exception { return this.get(ipAddress, IspResponse.class, false, STR); } | /**
* Look up an IP address in a GeoIP2 ISP database.
*
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return an IspResponse for the requested IP address.
* @throws GeoIp2Exception if there is an error looking up the IP
* @throws IOException if there is an IO error
*/ | Look up an IP address in a GeoIP2 ISP database | isp | {
"repo_name": "jtstorck/nifi",
"path": "nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/maxmind/DatabaseReader.java",
"license": "apache-2.0",
"size": 9797
} | [
"com.maxmind.geoip2.exception.GeoIp2Exception",
"com.maxmind.geoip2.model.IspResponse",
"java.io.IOException",
"java.net.InetAddress"
] | import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.IspResponse; import java.io.IOException; import java.net.InetAddress; | import com.maxmind.geoip2.exception.*; import com.maxmind.geoip2.model.*; import java.io.*; import java.net.*; | [
"com.maxmind.geoip2",
"java.io",
"java.net"
] | com.maxmind.geoip2; java.io; java.net; | 2,346,925 |
void removeAttributes(PerunSession sess, Vo vo, List<? extends AttributeDefinition> attributes) throws PrivilegeException, AttributeNotExistsException, VoNotExistsException, WrongAttributeAssignmentException, WrongAttributeValueException, WrongReferenceAttributeValueException; | void removeAttributes(PerunSession sess, Vo vo, List<? extends AttributeDefinition> attributes) throws PrivilegeException, AttributeNotExistsException, VoNotExistsException, WrongAttributeAssignmentException, WrongAttributeValueException, WrongReferenceAttributeValueException; | /**
* PRIVILEGE: Remove attributes only when principal has access to write on them.
* <p>
* Batch version of removeAttribute. This method automatically skip all core attributes which can't be removed this way.
*
* @throws AttributeNotExistsException if the any of attributes doesn't exists in underlying data source
* @see cz.metacentrum.perun.core.api.AttributesManager#removeAttribute(PerunSession, Vo, AttributeDefinition)
*/ | Batch version of removeAttribute. This method automatically skip all core attributes which can't be removed this way | removeAttributes | {
"repo_name": "zoraseb/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/api/AttributesManager.java",
"license": "bsd-2-clause",
"size": 265364
} | [
"cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException",
"cz.metacentrum.perun.core.api.exceptions.PrivilegeException",
"cz.metacentrum.perun.core.api.exceptions.VoNotExistsException",
"cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException",
"cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException",
"cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException",
"java.util.List"
] | import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.PrivilegeException; import cz.metacentrum.perun.core.api.exceptions.VoNotExistsException; import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException; import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException; import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException; import java.util.List; | import cz.metacentrum.perun.core.api.exceptions.*; import java.util.*; | [
"cz.metacentrum.perun",
"java.util"
] | cz.metacentrum.perun; java.util; | 1,122,925 |
@Test
public void testFinalizeWithHostsAlreadyCurrent() throws Exception {
String hostName = "h1";
createUpgradeCluster(repositoryVersion2110, hostName);
createHostVersions(repositoryVersion2111, hostName);
// move the old version from CURRENT to INSTALLED and the new version from
// UPGRADED to CURRENT - this will simulate what happens when a host is
// removed before finalization and all hosts transition to CURRENT
List<HostVersionEntity> hostVersions = hostVersionDAO.findAll();
for (HostVersionEntity hostVersion : hostVersions) {
if (hostVersion.getState() == RepositoryVersionState.CURRENT) {
hostVersion.setState(RepositoryVersionState.INSTALLED);
} else {
hostVersion.setState(RepositoryVersionState.CURRENT);
}
hostVersionDAO.merge(hostVersion);
}
// Verify the repo before calling Finalize
Cluster cluster = clusters.getCluster(clusterName);
createUpgrade(cluster, repositoryVersion2111);
// Finalize the upgrade
Map<String, String> commandParams = new HashMap<>();
ExecutionCommand executionCommand = new ExecutionCommand();
executionCommand.setCommandParams(commandParams);
executionCommand.setClusterName(clusterName);
HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
finalizeUpgradeAction.setExecutionCommand(executionCommand);
finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand);
CommandReport report = finalizeUpgradeAction.execute(null);
assertNotNull(report);
assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
} | void function() throws Exception { String hostName = "h1"; createUpgradeCluster(repositoryVersion2110, hostName); createHostVersions(repositoryVersion2111, hostName); List<HostVersionEntity> hostVersions = hostVersionDAO.findAll(); for (HostVersionEntity hostVersion : hostVersions) { if (hostVersion.getState() == RepositoryVersionState.CURRENT) { hostVersion.setState(RepositoryVersionState.INSTALLED); } else { hostVersion.setState(RepositoryVersionState.CURRENT); } hostVersionDAO.merge(hostVersion); } Cluster cluster = clusters.getCluster(clusterName); createUpgrade(cluster, repositoryVersion2111); Map<String, String> commandParams = new HashMap<>(); ExecutionCommand executionCommand = new ExecutionCommand(); executionCommand.setCommandParams(commandParams); executionCommand.setClusterName(clusterName); HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null); hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand)); finalizeUpgradeAction.setExecutionCommand(executionCommand); finalizeUpgradeAction.setHostRoleCommand(hostRoleCommand); CommandReport report = finalizeUpgradeAction.execute(null); assertNotNull(report); assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus()); } | /**
* Tests that finalize still works when there are hosts which are already
* {@link RepositoryVersionState#CURRENT}.
*
* @throws Exception
*/ | Tests that finalize still works when there are hosts which are already <code>RepositoryVersionState#CURRENT</code> | testFinalizeWithHostsAlreadyCurrent | {
"repo_name": "arenadata/ambari",
"path": "ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java",
"license": "apache-2.0",
"size": 28265
} | [
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.apache.ambari.server.actionmanager.ExecutionCommandWrapper",
"org.apache.ambari.server.actionmanager.HostRoleCommand",
"org.apache.ambari.server.actionmanager.HostRoleStatus",
"org.apache.ambari.server.agent.CommandReport",
"org.apache.ambari.server.agent.ExecutionCommand",
"org.apache.ambari.server.orm.entities.HostVersionEntity",
"org.apache.ambari.server.state.Cluster",
"org.apache.ambari.server.state.RepositoryVersionState",
"org.junit.Assert"
] | import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.orm.entities.HostVersionEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.RepositoryVersionState; import org.junit.Assert; | import java.util.*; import org.apache.ambari.server.actionmanager.*; import org.apache.ambari.server.agent.*; import org.apache.ambari.server.orm.entities.*; import org.apache.ambari.server.state.*; import org.junit.*; | [
"java.util",
"org.apache.ambari",
"org.junit"
] | java.util; org.apache.ambari; org.junit; | 814,761 |
public PChargingVectorHeader createChargingVectorHeader(String icid) throws ParseException; | PChargingVectorHeader function(String icid) throws ParseException; | /**
* P-Charging-Vector header
*
* @param icid -
* icid string
* @return newly created P-Charging-Vector header
* @throws NullPointerException
* @throws ParseException
*/ | P-Charging-Vector header | createChargingVectorHeader | {
"repo_name": "fhg-fokus-nubomedia/signaling-plane",
"path": "modules/lib-sip/src/main/java/gov/nist/javax/sip/header/HeaderFactoryExt.java",
"license": "apache-2.0",
"size": 9223
} | [
"gov.nist.javax.sip.header.ims.PChargingVectorHeader",
"java.text.ParseException"
] | import gov.nist.javax.sip.header.ims.PChargingVectorHeader; import java.text.ParseException; | import gov.nist.javax.sip.header.ims.*; import java.text.*; | [
"gov.nist.javax",
"java.text"
] | gov.nist.javax; java.text; | 678,605 |
@Override
public boolean attackEntityFrom(DamageSource par1DamageSource, float par2)
{
return this.isEntityInvulnerable() ? false : (this.parentBody == null ? false : this.parentBody.attackEntityFrom(par1DamageSource, par2));
} | boolean function(DamageSource par1DamageSource, float par2) { return this.isEntityInvulnerable() ? false : (this.parentBody == null ? false : this.parentBody.attackEntityFrom(par1DamageSource, par2)); } | /**
* Called when the entity is attacked.
*/ | Called when the entity is attacked | attackEntityFrom | {
"repo_name": "KILLER-CHIEF/Halocraft-KCWM",
"path": "java/net/killerchief/halocraft/entities/vehicles/EntityPassengerSeat.java",
"license": "gpl-2.0",
"size": 5355
} | [
"net.minecraft.util.DamageSource"
] | import net.minecraft.util.DamageSource; | import net.minecraft.util.*; | [
"net.minecraft.util"
] | net.minecraft.util; | 1,011,193 |
public static Session getCurrentSession(FacesContext ctx) {
return (Session)ctx.getApplication().getVariableResolver().resolveVariable(ctx, "session");
}
| static Session function(FacesContext ctx) { return (Session)ctx.getApplication().getVariableResolver().resolveVariable(ctx, STR); } | /**
* Return the current session.
*/ | Return the current session | getCurrentSession | {
"repo_name": "LonDC/demos-xpages",
"path": "template/WebContent/WEB-INF/src/com/ibm/xsp/extlib/util/ExtLibUtil.java",
"license": "mit",
"size": 27969
} | [
"javax.faces.context.FacesContext"
] | import javax.faces.context.FacesContext; | import javax.faces.context.*; | [
"javax.faces"
] | javax.faces; | 1,780,320 |
protected void serverinfo(PrintWriter writer, StringManager smClient) {
if (debug >= 1)
log("serverinfo");
try {
StringBuilder props = new StringBuilder();
props.append("OK - Server info");
props.append("\nTomcat Version: ");
props.append(ServerInfo.getServerInfo());
props.append("\nOS Name: ");
props.append(System.getProperty("os.name"));
props.append("\nOS Version: ");
props.append(System.getProperty("os.version"));
props.append("\nOS Architecture: ");
props.append(System.getProperty("os.arch"));
props.append("\nJVM Version: ");
props.append(System.getProperty("java.runtime.version"));
props.append("\nJVM Vendor: ");
props.append(System.getProperty("java.vm.vendor"));
writer.println(props.toString());
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
getServletContext().log("ManagerServlet.serverinfo",t);
writer.println(smClient.getString("managerServlet.exception",
t.toString()));
}
} | void function(PrintWriter writer, StringManager smClient) { if (debug >= 1) log(STR); try { StringBuilder props = new StringBuilder(); props.append(STR); props.append(STR); props.append(ServerInfo.getServerInfo()); props.append(STR); props.append(System.getProperty(STR)); props.append(STR); props.append(System.getProperty(STR)); props.append(STR); props.append(System.getProperty(STR)); props.append(STR); props.append(System.getProperty(STR)); props.append(STR); props.append(System.getProperty(STR)); writer.println(props.toString()); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); getServletContext().log(STR,t); writer.println(smClient.getString(STR, t.toString())); } } | /**
* Writes System OS and JVM properties.
* @param writer Writer to render to
*/ | Writes System OS and JVM properties | serverinfo | {
"repo_name": "plumer/codana",
"path": "tomcat_files/8.0.22/ManagerServlet.java",
"license": "mit",
"size": 61543
} | [
"java.io.PrintWriter",
"org.apache.catalina.util.ServerInfo",
"org.apache.tomcat.util.ExceptionUtils",
"org.apache.tomcat.util.res.StringManager"
] | import java.io.PrintWriter; import org.apache.catalina.util.ServerInfo; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.res.StringManager; | import java.io.*; import org.apache.catalina.util.*; import org.apache.tomcat.util.*; import org.apache.tomcat.util.res.*; | [
"java.io",
"org.apache.catalina",
"org.apache.tomcat"
] | java.io; org.apache.catalina; org.apache.tomcat; | 120,211 |
private void invalidateContainingBuildFile(
DaemonicCellState state, Cell cell, BuildFileTree buildFiles, Path path) {
LOG.verbose("Invalidating rules dependent on change to %s in cell %s", path, cell);
Set<Path> packageBuildFiles = new HashSet<>();
// Find the closest ancestor package for the input path. We'll definitely need to invalidate
// that.
Optional<Path> packageBuildFile = buildFiles.getBasePathOfAncestorTarget(path);
if (packageBuildFile.isPresent()) {
packageBuildFiles.add(cell.getFilesystem().resolve(packageBuildFile.get()));
}
// If we're *not* enforcing package boundary checks, it's possible for multiple ancestor
// packages to reference the same file
if (cell.getBuckConfigView(ParserConfig.class).getPackageBoundaryEnforcementPolicy(path)
!= ParserConfig.PackageBoundaryEnforcement.ENFORCE) {
while (packageBuildFile.isPresent() && packageBuildFile.get().getParent() != null) {
packageBuildFile =
buildFiles.getBasePathOfAncestorTarget(packageBuildFile.get().getParent());
if (packageBuildFile.isPresent()) {
packageBuildFiles.add(packageBuildFile.get());
}
}
}
if (packageBuildFiles.isEmpty()) {
LOG.debug(
"%s is not owned by any build file. Not invalidating anything.",
cell.getFilesystem().resolve(path).toAbsolutePath().toString());
return;
}
buildFilesInvalidatedByFileAddOrRemoveCounter.inc(packageBuildFiles.size());
pathsAddedOrRemovedInvalidatingBuildFiles.add(path.toString());
// Invalidate all the packages we found.
for (Path buildFile : packageBuildFiles) {
invalidatePath(
state, buildFile.resolve(cell.getBuckConfigView(ParserConfig.class).getBuildFileName()));
}
} | void function( DaemonicCellState state, Cell cell, BuildFileTree buildFiles, Path path) { LOG.verbose(STR, path, cell); Set<Path> packageBuildFiles = new HashSet<>(); Optional<Path> packageBuildFile = buildFiles.getBasePathOfAncestorTarget(path); if (packageBuildFile.isPresent()) { packageBuildFiles.add(cell.getFilesystem().resolve(packageBuildFile.get())); } if (cell.getBuckConfigView(ParserConfig.class).getPackageBoundaryEnforcementPolicy(path) != ParserConfig.PackageBoundaryEnforcement.ENFORCE) { while (packageBuildFile.isPresent() && packageBuildFile.get().getParent() != null) { packageBuildFile = buildFiles.getBasePathOfAncestorTarget(packageBuildFile.get().getParent()); if (packageBuildFile.isPresent()) { packageBuildFiles.add(packageBuildFile.get()); } } } if (packageBuildFiles.isEmpty()) { LOG.debug( STR, cell.getFilesystem().resolve(path).toAbsolutePath().toString()); return; } buildFilesInvalidatedByFileAddOrRemoveCounter.inc(packageBuildFiles.size()); pathsAddedOrRemovedInvalidatingBuildFiles.add(path.toString()); for (Path buildFile : packageBuildFiles) { invalidatePath( state, buildFile.resolve(cell.getBuckConfigView(ParserConfig.class).getBuildFileName())); } } | /**
* Finds the build file responsible for the given {@link Path} and invalidates all of the cached
* rules dependent on it.
*
* @param path A {@link Path}, relative to the project root and "contained" within the build file
* to find and invalidate.
*/ | Finds the build file responsible for the given <code>Path</code> and invalidates all of the cached rules dependent on it | invalidateContainingBuildFile | {
"repo_name": "zpao/buck",
"path": "src/com/facebook/buck/parser/DaemonicParserState.java",
"license": "apache-2.0",
"size": 31526
} | [
"com.facebook.buck.core.cell.Cell",
"com.facebook.buck.core.model.BuildFileTree",
"com.facebook.buck.parser.config.ParserConfig",
"java.nio.file.Path",
"java.util.HashSet",
"java.util.Optional",
"java.util.Set"
] | import com.facebook.buck.core.cell.Cell; import com.facebook.buck.core.model.BuildFileTree; import com.facebook.buck.parser.config.ParserConfig; import java.nio.file.Path; import java.util.HashSet; import java.util.Optional; import java.util.Set; | import com.facebook.buck.core.cell.*; import com.facebook.buck.core.model.*; import com.facebook.buck.parser.config.*; import java.nio.file.*; import java.util.*; | [
"com.facebook.buck",
"java.nio",
"java.util"
] | com.facebook.buck; java.nio; java.util; | 2,514,732 |
private void numberOfCombosNeeded()
{
if(dataMap.size() > 0)
{
Map map = dataMap;
while(map instanceof Map)
{
numberOfCombosNeeded++;
Set kSet = map.keySet();
Object[] kSetArray = kSet.toArray();
Object firstVal = map.get(kSetArray[0]);
if(firstVal instanceof Map)
{
map = (Map)firstVal;
}else
{
numberOfCombosNeeded++;
break;
}
}
}else
{
numberOfCombosNeeded = Integer.parseInt(noOfEmptyCombos);
}
}
| void function() { if(dataMap.size() > 0) { Map map = dataMap; while(map instanceof Map) { numberOfCombosNeeded++; Set kSet = map.keySet(); Object[] kSetArray = kSet.toArray(); Object firstVal = map.get(kSetArray[0]); if(firstVal instanceof Map) { map = (Map)firstVal; }else { numberOfCombosNeeded++; break; } } }else { numberOfCombosNeeded = Integer.parseInt(noOfEmptyCombos); } } | /**
* A utility function to find nunber of combos needed from a recursive map data structure.
*/ | A utility function to find nunber of combos needed from a recursive map data structure | numberOfCombosNeeded | {
"repo_name": "NCIP/wustl-common-package",
"path": "src/edu/wustl/common/util/tag/NLevelCustomCombo.java",
"license": "bsd-3-clause",
"size": 16933
} | [
"java.util.Map",
"java.util.Set"
] | import java.util.Map; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 146,869 |
public void broadcast(String text) {
synchronized (connections) {
for (Iterator iterator = connections.iterator(); iterator.hasNext();) {
Connection connection = (Connection) iterator.next();
connection.send(text);
}
}
}
| void function(String text) { synchronized (connections) { for (Iterator iterator = connections.iterator(); iterator.hasNext();) { Connection connection = (Connection) iterator.next(); connection.send(text); } } } | /**
* send a message to all known connections
*
* @param text
* content of the message
*/ | send a message to all known connections | broadcast | {
"repo_name": "SergiyKolesnikov/fuji",
"path": "examples/Chat_casestudies/chat-maik-lampe/epmd_chat/build/Base/Base/Server.java",
"license": "lgpl-3.0",
"size": 2306
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 2,675,485 |
public static Map<TypeVariable<?>, Type> getTypeArguments(final ParameterizedType type) {
return getTypeArguments(type, getRawType(type), null);
}
/**
* <p>Gets the type arguments of a class/interface based on a subtype. For
* instance, this method will determine that both of the parameters for the
* interface {@link Map} are {@link Object} for the subtype
* {@link java.util.Properties Properties} even though the subtype does not
* directly implement the {@code Map} interface.</p>
* <p>This method returns {@code null} if {@code type} is not assignable to
* {@code toClass}. It returns an empty map if none of the classes or
* interfaces in its inheritance hierarchy specify any type arguments.</p>
* <p>A side effect of this method is that it also retrieves the type
* arguments for the classes and interfaces that are part of the hierarchy
* between {@code type} and {@code toClass}. So with the above
* example, this method will also determine that the type arguments for
* {@link java.util.Hashtable Hashtable} are also both {@code Object}.
* In cases where the interface specified by {@code toClass} is
* (indirectly) implemented more than once (e.g. where {@code toClass}
* specifies the interface {@link java.lang.Iterable Iterable} and
* {@code type} specifies a parameterized type that implements both
* {@link java.util.Set Set} and {@link java.util.Collection Collection}),
* this method will look at the inheritance hierarchy of only one of the
* implementations/subclasses; the first interface encountered that isn't a
* subinterface to one of the others in the {@code type} to
* {@code toClass} hierarchy.</p>
*
* @param type the type from which to determine the type parameters of
* {@code toClass}
* @param toClass the class whose type parameters are to be determined based
* on the subtype {@code type} | static Map<TypeVariable<?>, Type> function(final ParameterizedType type) { return getTypeArguments(type, getRawType(type), null); } /** * <p>Gets the type arguments of a class/interface based on a subtype. For * instance, this method will determine that both of the parameters for the * interface {@link Map} are {@link Object} for the subtype * {@link java.util.Properties Properties} even though the subtype does not * directly implement the {@code Map} interface.</p> * <p>This method returns {@code null} if {@code type} is not assignable to * {@code toClass}. It returns an empty map if none of the classes or * interfaces in its inheritance hierarchy specify any type arguments.</p> * <p>A side effect of this method is that it also retrieves the type * arguments for the classes and interfaces that are part of the hierarchy * between {@code type} and {@code toClass}. So with the above * example, this method will also determine that the type arguments for * {@link java.util.Hashtable Hashtable} are also both {@code Object}. * In cases where the interface specified by {@code toClass} is * (indirectly) implemented more than once (e.g. where {@code toClass} * specifies the interface {@link java.lang.Iterable Iterable} and * {@code type} specifies a parameterized type that implements both * {@link java.util.Set Set} and {@link java.util.Collection Collection}), * this method will look at the inheritance hierarchy of only one of the * implementations/subclasses; the first interface encountered that isn't a * subinterface to one of the others in the {@code type} to * {@code toClass} hierarchy.</p> * * @param type the type from which to determine the type parameters of * {@code toClass} * @param toClass the class whose type parameters are to be determined based * on the subtype {@code type} | /**
* <p>Retrieves all the type arguments for this parameterized type
* including owner hierarchy arguments such as
* {@code Outer<K,V>.Inner<T>.DeepInner<E>} .
* The arguments are returned in a
* {@link Map} specifying the argument type for each {@link TypeVariable}.
* </p>
*
* @param type specifies the subject parameterized type from which to
* harvest the parameters.
* @return a {@code Map} of the type arguments to their respective type
* variables.
*/ | Retrieves all the type arguments for this parameterized type including owner hierarchy arguments such as Outer.Inner.DeepInner . The arguments are returned in a <code>Map</code> specifying the argument type for each <code>TypeVariable</code>. | getTypeArguments | {
"repo_name": "rikles/commons-lang",
"path": "src/main/java/org/apache/commons/lang3/reflect/TypeUtils.java",
"license": "apache-2.0",
"size": 70048
} | [
"java.lang.reflect.ParameterizedType",
"java.lang.reflect.Type",
"java.lang.reflect.TypeVariable",
"java.util.Map",
"java.util.Set"
] | import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Map; import java.util.Set; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 2,126,463 |
public int onNotificationRemoved(@NonNull OpenNotification n);
}
public NotificationList(@Nullable OnNotificationListChangedListener listener) {
mListener = listener;
mList = new ArrayList<>(10);
} | int function(@NonNull OpenNotification n); } public NotificationList(@Nullable OnNotificationListChangedListener listener) { mListener = listener; mList = new ArrayList<>(10); } | /**
* Called when notification was removed from list.
*
* @param n removed notification
*/ | Called when notification was removed from list | onNotificationRemoved | {
"repo_name": "0359xiaodong/AcDisplay",
"path": "project/app/src/main/java/com/achep/acdisplay/notifications/NotificationList.java",
"license": "gpl-2.0",
"size": 6249
} | [
"android.support.annotation.NonNull",
"android.support.annotation.Nullable",
"java.util.ArrayList"
] | import android.support.annotation.NonNull; import android.support.annotation.Nullable; import java.util.ArrayList; | import android.support.annotation.*; import java.util.*; | [
"android.support",
"java.util"
] | android.support; java.util; | 2,073,501 |
boolean doPreAction(CmsResource resource, CmsDialog dialog, String originalParams) throws Exception; | boolean doPreAction(CmsResource resource, CmsDialog dialog, String originalParams) throws Exception; | /**
* Returns if an action has to be performed before opening the editor depending on the resource to edit
* and eventual request parameter values.<p>
*
* @param resource the resource to be edited
* @param dialog the dialog instance
* @param originalParams the original request parameters as String passed to the editor
* @return true if an action has to be performed before opening the editor
* @throws Exception if something goes wrong
*/ | Returns if an action has to be performed before opening the editor depending on the resource to edit and eventual request parameter values | doPreAction | {
"repo_name": "mediaworx/opencms-core",
"path": "src/org/opencms/workplace/editors/I_CmsPreEditorActionDefinition.java",
"license": "lgpl-2.1",
"size": 2735
} | [
"org.opencms.file.CmsResource",
"org.opencms.workplace.CmsDialog"
] | import org.opencms.file.CmsResource; import org.opencms.workplace.CmsDialog; | import org.opencms.file.*; import org.opencms.workplace.*; | [
"org.opencms.file",
"org.opencms.workplace"
] | org.opencms.file; org.opencms.workplace; | 2,370,276 |
private synchronized boolean loadBuffer() throws IOException {
// wait until data is available or stream is closed
DataPacketExtension data = null;
try {
if (this.readTimeout == 0) {
while (data == null) {
if (isClosed && this.dataQueue.isEmpty()) {
return false;
}
data = this.dataQueue.poll(1000, TimeUnit.MILLISECONDS);
}
}
else {
data = this.dataQueue.poll(this.readTimeout, TimeUnit.MILLISECONDS);
if (data == null) {
throw new SocketTimeoutException();
}
}
}
catch (InterruptedException e) {
// Restore the interrupted status
Thread.currentThread().interrupt();
return false;
}
// handle sequence overflow
if (this.seq == 65535) {
this.seq = -1;
}
// check if data packets sequence is successor of last seen sequence
long seq = data.getSeq();
if (seq - 1 != this.seq) {
// packets out of order; close stream/session
InBandBytestreamSession.this.close();
throw new IOException("Packets out of sequence");
}
else {
this.seq = seq;
}
// set buffer to decoded data
buffer = data.getDecodedData();
bufferPointer = 0;
return true;
} | synchronized boolean function() throws IOException { DataPacketExtension data = null; try { if (this.readTimeout == 0) { while (data == null) { if (isClosed && this.dataQueue.isEmpty()) { return false; } data = this.dataQueue.poll(1000, TimeUnit.MILLISECONDS); } } else { data = this.dataQueue.poll(this.readTimeout, TimeUnit.MILLISECONDS); if (data == null) { throw new SocketTimeoutException(); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; } if (this.seq == 65535) { this.seq = -1; } long seq = data.getSeq(); if (seq - 1 != this.seq) { InBandBytestreamSession.this.close(); throw new IOException(STR); } else { this.seq = seq; } buffer = data.getDecodedData(); bufferPointer = 0; return true; } | /**
* This method blocks until a data stanza(/packet) is received, the stream is closed or the current
* thread is interrupted.
*
* @return <code>true</code> if data was received, otherwise <code>false</code>
* @throws IOException if data packets are out of sequence
*/ | This method blocks until a data stanza(/packet) is received, the stream is closed or the current thread is interrupted | loadBuffer | {
"repo_name": "esl/Smack",
"path": "smack-extensions/src/main/java/org/jivesoftware/smackx/bytestreams/ibb/InBandBytestreamSession.java",
"license": "apache-2.0",
"size": 29879
} | [
"java.io.IOException",
"java.net.SocketTimeoutException",
"java.util.concurrent.TimeUnit",
"org.jivesoftware.smackx.bytestreams.ibb.packet.DataPacketExtension"
] | import java.io.IOException; import java.net.SocketTimeoutException; import java.util.concurrent.TimeUnit; import org.jivesoftware.smackx.bytestreams.ibb.packet.DataPacketExtension; | import java.io.*; import java.net.*; import java.util.concurrent.*; import org.jivesoftware.smackx.bytestreams.ibb.packet.*; | [
"java.io",
"java.net",
"java.util",
"org.jivesoftware.smackx"
] | java.io; java.net; java.util; org.jivesoftware.smackx; | 316,424 |
protected double booleanSelectivity(Optimizable optTable)
throws StandardException
{
TypeId typeId = null;
double retval = -1.0d;
int columnSide;
columnSide = columnOnOneSide(optTable);
if (columnSide == LEFT)
typeId = leftOperand.getTypeId();
else if (columnSide == RIGHT)
typeId = rightOperand.getTypeId();
if (typeId != null && (typeId.getJDBCTypeId() == Types.BIT ||
typeId.getJDBCTypeId() == Types.BOOLEAN))
retval = 0.5d;
return retval;
} | double function(Optimizable optTable) throws StandardException { TypeId typeId = null; double retval = -1.0d; int columnSide; columnSide = columnOnOneSide(optTable); if (columnSide == LEFT) typeId = leftOperand.getTypeId(); else if (columnSide == RIGHT) typeId = rightOperand.getTypeId(); if (typeId != null && (typeId.getJDBCTypeId() == Types.BIT typeId.getJDBCTypeId() == Types.BOOLEAN)) retval = 0.5d; return retval; } | /**
* Return 50% if this is a comparison with a boolean column, a negative
* selectivity otherwise.
*/ | Return 50% if this is a comparison with a boolean column, a negative selectivity otherwise | booleanSelectivity | {
"repo_name": "lpxz/grail-derby104",
"path": "java/engine/org/apache/derby/impl/sql/compile/BinaryRelationalOperatorNode.java",
"license": "apache-2.0",
"size": 52744
} | [
"java.sql.Types",
"org.apache.derby.iapi.error.StandardException",
"org.apache.derby.iapi.sql.compile.Optimizable",
"org.apache.derby.iapi.types.TypeId"
] | import java.sql.Types; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.sql.compile.Optimizable; import org.apache.derby.iapi.types.TypeId; | import java.sql.*; import org.apache.derby.iapi.error.*; import org.apache.derby.iapi.sql.compile.*; import org.apache.derby.iapi.types.*; | [
"java.sql",
"org.apache.derby"
] | java.sql; org.apache.derby; | 410,982 |
@Test
public void getDigitsInteger() {
Field field = InvocationUtil.getField(ValidationConstraint.class, "noConstraint");
Assert.assertNull(AnnotationUtil.getDigitsInteger(field));
field = InvocationUtil.getField(ValidationConstraint.class, "digits");
Assert.assertTrue(AnnotationUtil.getDigitsInteger(field) == 1);
} | void function() { Field field = InvocationUtil.getField(ValidationConstraint.class, STR); Assert.assertNull(AnnotationUtil.getDigitsInteger(field)); field = InvocationUtil.getField(ValidationConstraint.class, STR); Assert.assertTrue(AnnotationUtil.getDigitsInteger(field) == 1); } | /**
* Test getDigitsInteger method.
*/ | Test getDigitsInteger method | getDigitsInteger | {
"repo_name": "qjafcunuas/jbromo",
"path": "jbromo-lib/src/test/java/org/jbromo/common/invocation/AnnotationUtilTest.java",
"license": "apache-2.0",
"size": 14957
} | [
"java.lang.reflect.Field",
"org.junit.Assert"
] | import java.lang.reflect.Field; import org.junit.Assert; | import java.lang.reflect.*; import org.junit.*; | [
"java.lang",
"org.junit"
] | java.lang; org.junit; | 1,368,314 |
public void changeMode(String filename, int mode)
throws FileResourceException {
String cmd = "chmod " + mode + " " + filename; // or something else
try {
ftpClient.site(cmd);
}
catch (Exception e) {
throw translateException("Cannot change the file permissions", e);
}
} | void function(String filename, int mode) throws FileResourceException { String cmd = STR + mode + " " + filename; try { ftpClient.site(cmd); } catch (Exception e) { throw translateException(STR, e); } } | /**
* Changes the permissions on the file if authorized to do so
*/ | Changes the permissions on the file if authorized to do so | changeMode | {
"repo_name": "swift-lang/swift-k",
"path": "cogkit/modules/provider-gt2/src/org/globus/cog/abstraction/impl/file/ftp/FileResourceImpl.java",
"license": "apache-2.0",
"size": 20617
} | [
"org.globus.cog.abstraction.impl.file.FileResourceException"
] | import org.globus.cog.abstraction.impl.file.FileResourceException; | import org.globus.cog.abstraction.impl.file.*; | [
"org.globus.cog"
] | org.globus.cog; | 1,078,204 |
@Override
public B permission(@Nonnull final FsPermission perm) {
checkNotNull(perm);
permission = perm;
return getThisBuilder();
} | B function(@Nonnull final FsPermission perm) { checkNotNull(perm); permission = perm; return getThisBuilder(); } | /**
* Set permission for the file.
*/ | Set permission for the file | permission | {
"repo_name": "steveloughran/hadoop",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java",
"license": "apache-2.0",
"size": 5506
} | [
"com.google.common.base.Preconditions",
"javax.annotation.Nonnull",
"org.apache.hadoop.fs.permission.FsPermission"
] | import com.google.common.base.Preconditions; import javax.annotation.Nonnull; import org.apache.hadoop.fs.permission.FsPermission; | import com.google.common.base.*; import javax.annotation.*; import org.apache.hadoop.fs.permission.*; | [
"com.google.common",
"javax.annotation",
"org.apache.hadoop"
] | com.google.common; javax.annotation; org.apache.hadoop; | 327,298 |
@Override
public void removeInstanceSnapshot(final MachineSource machineSource) throws SnapshotException {
// use registry API directly because docker doesn't have such API yet
// https://github.com/docker/docker-registry/issues/45
final DockerMachineSource dockerMachineSource;
try {
dockerMachineSource = new DockerMachineSource(machineSource);
} catch (MachineException e) {
throw new SnapshotException(e);
}
if (!snapshotUseRegistry) {
try {
docker.removeImage(RemoveImageParams.create(dockerMachineSource.getLocation(false)));
} catch (IOException ignore) {
}
return;
}
final String repository = dockerMachineSource.getRepository();
if (repository == null) {
LOG.error("Failed to remove instance snapshot: invalid machine source: {}", dockerMachineSource);
throw new SnapshotException("Snapshot removing failed. Snapshot attributes are not valid");
}
if (DEFAULT_REGISTRY_SYNONYMS.contains(dockerMachineSource.getRegistry())) {
removeSnapshotFromDockerHub(repository);
} else {
removeSnapshotFromRegistry(dockerMachineSource);
}
} | void function(final MachineSource machineSource) throws SnapshotException { final DockerMachineSource dockerMachineSource; try { dockerMachineSource = new DockerMachineSource(machineSource); } catch (MachineException e) { throw new SnapshotException(e); } if (!snapshotUseRegistry) { try { docker.removeImage(RemoveImageParams.create(dockerMachineSource.getLocation(false))); } catch (IOException ignore) { } return; } final String repository = dockerMachineSource.getRepository(); if (repository == null) { LOG.error(STR, dockerMachineSource); throw new SnapshotException(STR); } if (DEFAULT_REGISTRY_SYNONYMS.contains(dockerMachineSource.getRegistry())) { removeSnapshotFromDockerHub(repository); } else { removeSnapshotFromRegistry(dockerMachineSource); } } | /**
* Removes snapshot of the instance in implementation specific way.
*
* @param machineSource
* contains implementation specific key of the snapshot of the instance that should be removed
* @throws SnapshotException
* if exception occurs on instance snapshot removal
*/ | Removes snapshot of the instance in implementation specific way | removeInstanceSnapshot | {
"repo_name": "snjeza/che",
"path": "plugins/plugin-docker/che-plugin-docker-machine/src/main/java/org/eclipse/che/plugin/docker/machine/DockerInstanceProvider.java",
"license": "epl-1.0",
"size": 10540
} | [
"java.io.IOException",
"org.eclipse.che.api.core.model.machine.MachineSource",
"org.eclipse.che.api.machine.server.exception.MachineException",
"org.eclipse.che.api.machine.server.exception.SnapshotException",
"org.eclipse.che.plugin.docker.client.params.RemoveImageParams"
] | import java.io.IOException; import org.eclipse.che.api.core.model.machine.MachineSource; import org.eclipse.che.api.machine.server.exception.MachineException; import org.eclipse.che.api.machine.server.exception.SnapshotException; import org.eclipse.che.plugin.docker.client.params.RemoveImageParams; | import java.io.*; import org.eclipse.che.api.core.model.machine.*; import org.eclipse.che.api.machine.server.exception.*; import org.eclipse.che.plugin.docker.client.params.*; | [
"java.io",
"org.eclipse.che"
] | java.io; org.eclipse.che; | 305,727 |
private void verifyAndAssignRootWithRetries() throws IOException {
int iTimes = this.server.getConfiguration().getInt(
"hbase.catalog.verification.retries", 10);
long waitTime = this.server.getConfiguration().getLong(
"hbase.catalog.verification.timeout", 1000);
int iFlag = 0;
while (true) {
try {
verifyAndAssignRoot();
break;
} catch (KeeperException e) {
this.server.abort("In server shutdown processing, assigning root", e);
throw new IOException("Aborting", e);
} catch (Exception e) {
if (iFlag >= iTimes) {
this.server.abort("verifyAndAssignRoot failed after" + iTimes
+ " times retries, aborting", e);
throw new IOException("Aborting", e);
}
try {
Thread.sleep(waitTime);
} catch (InterruptedException e1) {
LOG.warn("Interrupted when is the thread sleep", e1);
Thread.currentThread().interrupt();
throw new IOException("Interrupted", e1);
}
iFlag++;
}
}
} | void function() throws IOException { int iTimes = this.server.getConfiguration().getInt( STR, 10); long waitTime = this.server.getConfiguration().getLong( STR, 1000); int iFlag = 0; while (true) { try { verifyAndAssignRoot(); break; } catch (KeeperException e) { this.server.abort(STR, e); throw new IOException(STR, e); } catch (Exception e) { if (iFlag >= iTimes) { this.server.abort(STR + iTimes + STR, e); throw new IOException(STR, e); } try { Thread.sleep(waitTime); } catch (InterruptedException e1) { LOG.warn(STR, e1); Thread.currentThread().interrupt(); throw new IOException(STR, e1); } iFlag++; } } } | /**
* Failed many times, shutdown processing
* @throws IOException
*/ | Failed many times, shutdown processing | verifyAndAssignRootWithRetries | {
"repo_name": "matteobertozzi/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/ServerShutdownHandler.java",
"license": "apache-2.0",
"size": 22286
} | [
"java.io.IOException",
"org.apache.zookeeper.KeeperException"
] | import java.io.IOException; import org.apache.zookeeper.KeeperException; | import java.io.*; import org.apache.zookeeper.*; | [
"java.io",
"org.apache.zookeeper"
] | java.io; org.apache.zookeeper; | 1,205,087 |
public Collection<DbAndTable> getTables() throws IOException {
List<DbAndTable> tables = Lists.newArrayList(); | Collection<DbAndTable> function() throws IOException { List<DbAndTable> tables = Lists.newArrayList(); | /**
* Get all tables in db with given table pattern.
*/ | Get all tables in db with given table pattern | getTables | {
"repo_name": "jinhyukchang/gobblin",
"path": "gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/hive/HiveDatasetFinder.java",
"license": "apache-2.0",
"size": 14532
} | [
"com.google.common.collect.Lists",
"java.io.IOException",
"java.util.Collection",
"java.util.List"
] | import com.google.common.collect.Lists; import java.io.IOException; import java.util.Collection; import java.util.List; | import com.google.common.collect.*; import java.io.*; import java.util.*; | [
"com.google.common",
"java.io",
"java.util"
] | com.google.common; java.io; java.util; | 1,521,951 |
private boolean servicesConnected() {
// Check that Google Play services is available
int resultCode = GooglePlayServicesUtil
.isGooglePlayServicesAvailable(this);
// If Google Play services is available
if (ConnectionResult.SUCCESS == resultCode) {
// In debug mode, log the status
Log.d(GeofenceUtils.APPTAG,
getString(R.string.play_services_available));
// Continue
return true;
// Google Play services was not available for some reason
}
else {
// Display an error dialog
Dialog dialog = GooglePlayServicesUtil.getErrorDialog(resultCode,
this, 0);
if (dialog != null) {
ErrorDialogFragment errorFragment = new ErrorDialogFragment();
errorFragment.setDialog(dialog);
errorFragment.show(getFragmentManager(), GeofenceUtils.APPTAG);
}
return false;
}
} | boolean function() { int resultCode = GooglePlayServicesUtil .isGooglePlayServicesAvailable(this); if (ConnectionResult.SUCCESS == resultCode) { Log.d(GeofenceUtils.APPTAG, getString(R.string.play_services_available)); return true; } else { Dialog dialog = GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0); if (dialog != null) { ErrorDialogFragment errorFragment = new ErrorDialogFragment(); errorFragment.setDialog(dialog); errorFragment.show(getFragmentManager(), GeofenceUtils.APPTAG); } return false; } } | /**
* Verify that Google Play services is available before making a request.
*
* @return true if Google Play services is available, otherwise false
*/ | Verify that Google Play services is available before making a request | servicesConnected | {
"repo_name": "zycbobby/NotePad",
"path": "app/src/play/java/com/nononsenseapps/notepad/ActivityLocation.java",
"license": "gpl-3.0",
"size": 25899
} | [
"android.app.Dialog",
"android.util.Log",
"com.google.android.gms.common.ConnectionResult",
"com.google.android.gms.common.GooglePlayServicesUtil",
"com.nononsenseapps.util.GeofenceUtils"
] | import android.app.Dialog; import android.util.Log; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.nononsenseapps.util.GeofenceUtils; | import android.app.*; import android.util.*; import com.google.android.gms.common.*; import com.nononsenseapps.util.*; | [
"android.app",
"android.util",
"com.google.android",
"com.nononsenseapps.util"
] | android.app; android.util; com.google.android; com.nononsenseapps.util; | 1,134,090 |
PCollectionContentsAssert<T> satisfies(
final SerializableMatcher<Iterable<? extends T>> matcher) {
// Safe covariant cast. Could be elided by changing a lot of this file to use
// more flexible bounds.
@SuppressWarnings({
"rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
"unchecked"
})
SerializableFunction<Iterable<T>, Void> checkerFn =
(SerializableFunction) new MatcherCheckerFn<>(matcher);
actual.apply(
"PAssert$" + (assertCount++),
new GroupThenAssert<>(checkerFn, rewindowingStrategy, paneExtractor, site));
return this;
}
protected static class MatcherCheckerFn<T> implements SerializableFunction<T, Void> {
private SerializableMatcher<T> matcher;
public MatcherCheckerFn(SerializableMatcher<T> matcher) {
this.matcher = matcher;
} | PCollectionContentsAssert<T> satisfies( final SerializableMatcher<Iterable<? extends T>> matcher) { @SuppressWarnings({ STR, STR }) SerializableFunction<Iterable<T>, Void> checkerFn = (SerializableFunction) new MatcherCheckerFn<>(matcher); actual.apply( STR + (assertCount++), new GroupThenAssert<>(checkerFn, rewindowingStrategy, paneExtractor, site)); return this; } protected static class MatcherCheckerFn<T> implements SerializableFunction<T, Void> { private SerializableMatcher<T> matcher; public MatcherCheckerFn(SerializableMatcher<T> matcher) { this.matcher = matcher; } | /**
* Applies a {@link SerializableMatcher} to check the elements of the {@code Iterable}.
*
* <p>Returns this {@code IterableAssert}.
*/ | Applies a <code>SerializableMatcher</code> to check the elements of the Iterable. Returns this IterableAssert | satisfies | {
"repo_name": "robertwb/incubator-beam",
"path": "sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java",
"license": "apache-2.0",
"size": 70194
} | [
"org.apache.beam.sdk.transforms.SerializableFunction"
] | import org.apache.beam.sdk.transforms.SerializableFunction; | import org.apache.beam.sdk.transforms.*; | [
"org.apache.beam"
] | org.apache.beam; | 474,997 |
public void collapse(Collection<T> items) {
HierarchicalDataCommunicator<T> communicator = getDataCommunicator();
items.forEach(item -> {
if (communicator.isExpanded(item)) {
communicator.collapse(item);
fireCollapseEvent(item, false);
}
});
}
/**
* Collapse the given items and their children recursively until the given
* depth.
* <p>
* {@code depth} describes the maximum distance between a given item and its
* descendant, meaning that {@code collapseRecursively(items, 0)} collapses
* only the given items while {@code collapseRecursively(items, 2)} | void function(Collection<T> items) { HierarchicalDataCommunicator<T> communicator = getDataCommunicator(); items.forEach(item -> { if (communicator.isExpanded(item)) { communicator.collapse(item); fireCollapseEvent(item, false); } }); } /** * Collapse the given items and their children recursively until the given * depth. * <p> * {@code depth} describes the maximum distance between a given item and its * descendant, meaning that {@code collapseRecursively(items, 0)} collapses * only the given items while {@code collapseRecursively(items, 2)} | /**
* Collapse the given items.
* <p>
* For items that are already collapsed, does nothing.
*
* @param items
* the collection of items to collapse
*/ | Collapse the given items. For items that are already collapsed, does nothing | collapse | {
"repo_name": "mstahv/framework",
"path": "server/src/main/java/com/vaadin/ui/TreeGrid.java",
"license": "apache-2.0",
"size": 22963
} | [
"com.vaadin.data.provider.HierarchicalDataCommunicator",
"java.util.Collection"
] | import com.vaadin.data.provider.HierarchicalDataCommunicator; import java.util.Collection; | import com.vaadin.data.provider.*; import java.util.*; | [
"com.vaadin.data",
"java.util"
] | com.vaadin.data; java.util; | 2,612,969 |
@Nullable
public MusicWorld create(World world, MusicPlayList playList) {
PreCon.notNull(world);
PreCon.notNull(playList);
if (contains(world.getName()))
return null;
MusicWorld musicWorld = new MusicWorld(world,
playList.getName(), getNode(world.getName()));
add(musicWorld);
return musicWorld;
} | MusicWorld function(World world, MusicPlayList playList) { PreCon.notNull(world); PreCon.notNull(playList); if (contains(world.getName())) return null; MusicWorld musicWorld = new MusicWorld(world, playList.getName(), getNode(world.getName())); add(musicWorld); return musicWorld; } | /**
* Create a new world playlist binding.
*
* @param world The world.
* @param playList The play list.
*
* @return The music world playlist binding that was created or null
* if the world already has a binding.
*/ | Create a new world playlist binding | create | {
"repo_name": "JCThePants/MusicalRegions",
"path": "src/com/jcwhatever/musical/worlds/WorldManager.java",
"license": "mit",
"size": 2419
} | [
"com.jcwhatever.musical.playlists.MusicPlayList",
"com.jcwhatever.nucleus.utils.PreCon",
"org.bukkit.World"
] | import com.jcwhatever.musical.playlists.MusicPlayList; import com.jcwhatever.nucleus.utils.PreCon; import org.bukkit.World; | import com.jcwhatever.musical.playlists.*; import com.jcwhatever.nucleus.utils.*; import org.bukkit.*; | [
"com.jcwhatever.musical",
"com.jcwhatever.nucleus",
"org.bukkit"
] | com.jcwhatever.musical; com.jcwhatever.nucleus; org.bukkit; | 2,094,577 |
public PhraseSuggestionBuilder collateParams(Map<String, Object> collateParams) {
Objects.requireNonNull(collateParams, "collate parameters cannot be null.");
this.collateParams = new HashMap<>(collateParams);
return this;
} | PhraseSuggestionBuilder function(Map<String, Object> collateParams) { Objects.requireNonNull(collateParams, STR); this.collateParams = new HashMap<>(collateParams); return this; } | /**
* Adds additional parameters for collate scripts. Previously added parameters on the
* same builder will be overwritten.
*/ | Adds additional parameters for collate scripts. Previously added parameters on the same builder will be overwritten | collateParams | {
"repo_name": "xuzha/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java",
"license": "apache-2.0",
"size": 32412
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.Objects"
] | import java.util.HashMap; import java.util.Map; import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 335,325 |
public List<String> parsiSobitunudAlamgrupid(AjavtSona sobituvSona, int references []){
List<String> alamSoned = new LinkedList<String>();
if (references != null){
Matcher m = regexp.matcher(sobituvSona.getAlgSonaYmbritsevateMarkideta());
if (m.matches()){
for (int i = 0; i < references.length; i++) {
int reference = references[i];
if (reference > -1){
//
// indekseerimise loogika: ((A)(B(C)))
//
// 1 ((A)(B(C)))
// 2 (A)
// 3 (B(C))
// 4 (C)
//
if (reference <= m.groupCount()){
alamSoned.add( m.group(reference) );
}
} else {
alamSoned.add("");
}
}
}
}
return alamSoned;
}
| List<String> function(AjavtSona sobituvSona, int references []){ List<String> alamSoned = new LinkedList<String>(); if (references != null){ Matcher m = regexp.matcher(sobituvSona.getAlgSonaYmbritsevateMarkideta()); if (m.matches()){ for (int i = 0; i < references.length; i++) { int reference = references[i]; if (reference > -1){ if (reference <= m.groupCount()){ alamSoned.add( m.group(reference) ); } } else { alamSoned.add(""); } } } } return alamSoned; } | /**
* Parsib selle regulaaravaldis-malliga sobituvast sonast <tt>viimaneSobitunudSona</tt> k6ik alamgrupid,
* mille indeksid on toodud massiivis <tt>references</tt>. Tagastab listi parsitud alams6nedest, sellises
* j2rjekorras, nagu indeksid olid toodud massiivis <tt>references</tt>. NB! Iga massiivi <tt>references</tt>
* element, mille v22rtus < -1, saab uues massiivis v22rtuse <tt>""</tt>.
*
* @param sobituvSona sona, mis sobitub antud regulaaravaldis-malliga
* @param references regulaaravaldise poolt eraldatud alamgruppide indeksid, mis tuleb v2lja parsida
* @return tagastab listi parsitud alams6nedest
*/ | Parsib selle regulaaravaldis-malliga sobituvast sonast viimaneSobitunudSona k6ik alamgrupid, mille indeksid on toodud massiivis references. Tagastab listi parsitud alams6nedest, sellises j2rjekorras, nagu indeksid olid toodud massiivis references. NB! Iga massiivi references element, mille v22rtus < -1, saab uues massiivis v22rtuse "" | parsiSobitunudAlamgrupid | {
"repo_name": "soras/Ajavt",
"path": "src/ee/ut/soras/ajavtV2/mudel/sonamallid/RegExpSonaMall.java",
"license": "gpl-2.0",
"size": 4470
} | [
"ee.ut.soras.ajavtV2.mudel.AjavtSona",
"java.util.LinkedList",
"java.util.List",
"java.util.regex.Matcher"
] | import ee.ut.soras.ajavtV2.mudel.AjavtSona; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; | import ee.ut.soras.*; import java.util.*; import java.util.regex.*; | [
"ee.ut.soras",
"java.util"
] | ee.ut.soras; java.util; | 2,080,715 |
@Beta
public static Converter<String, Float> stringConverter() {
return FloatConverter.INSTANCE;
}
/**
* Returns an array containing the same values as {@code array}, but
* guaranteed to be of a specified minimum length. If {@code array} already
* has a length of at least {@code minLength}, it is returned directly.
* Otherwise, a new array of size {@code minLength + padding} is returned,
* containing the values of {@code array}, and zeroes in the remaining places.
*
* @param array the source array
* @param minLength the minimum length the returned array must guarantee
* @param padding an extra amount to "grow" the array by if growth is
* necessary
* @throws IllegalArgumentException if {@code minLength} or {@code padding} is
* negative
* @return an array containing the values of {@code array}, with guaranteed
* minimum length {@code minLength} | static Converter<String, Float> function() { return FloatConverter.INSTANCE; } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} | /**
* Returns a serializable converter object that converts between strings and
* floats using {@link Float#valueOf} and {@link Float#toString()}.
*
* @since 16.0
*/ | Returns a serializable converter object that converts between strings and floats using <code>Float#valueOf</code> and <code>Float#toString()</code> | stringConverter | {
"repo_name": "10045125/guava",
"path": "guava-gwt/src-super/com/google/common/primitives/super/com/google/common/primitives/Floats.java",
"license": "apache-2.0",
"size": 18997
} | [
"com.google.common.base.Converter"
] | import com.google.common.base.Converter; | import com.google.common.base.*; | [
"com.google.common"
] | com.google.common; | 2,626,769 |
@Test
public void endDeltaPreparedIncrementsDeltasPrepared() {
cachePerfStats.endDeltaPrepared(1);
assertThat(statistics.getInt(deltasPreparedId)).isEqualTo(1);
} | void function() { cachePerfStats.endDeltaPrepared(1); assertThat(statistics.getInt(deltasPreparedId)).isEqualTo(1); } | /**
* Characterization test: Note that the only way to increment {@code deltasPrepared} is to invoke
* {@code endDeltaPrepared}.
*/ | Characterization test: Note that the only way to increment deltasPrepared is to invoke endDeltaPrepared | endDeltaPreparedIncrementsDeltasPrepared | {
"repo_name": "davebarnes97/geode",
"path": "geode-core/src/test/java/org/apache/geode/internal/cache/CachePerfStatsTest.java",
"license": "apache-2.0",
"size": 34945
} | [
"org.assertj.core.api.Assertions"
] | import org.assertj.core.api.Assertions; | import org.assertj.core.api.*; | [
"org.assertj.core"
] | org.assertj.core; | 996,718 |
public void showForm(PDFormXObject form) throws IOException {
if (currentPage == null) {
throw new IllegalStateException("No current page, call " + "#processChildStream(PDContentStream, PDPage) instead");
}
processStream(form);
} | void function(PDFormXObject form) throws IOException { if (currentPage == null) { throw new IllegalStateException(STR + STR); } processStream(form); } | /**
* Shows a form from the content stream.
*
* @param form form XObject
* @throws IOException if the form cannot be processed
*/ | Shows a form from the content stream | showForm | {
"repo_name": "gavanx/pdflearn",
"path": "pdfbox/src/main/java/org/apache/pdfbox/contentstream/PDFStreamEngine.java",
"license": "apache-2.0",
"size": 32281
} | [
"java.io.IOException",
"org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject"
] | import java.io.IOException; import org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject; | import java.io.*; import org.apache.pdfbox.pdmodel.graphics.form.*; | [
"java.io",
"org.apache.pdfbox"
] | java.io; org.apache.pdfbox; | 1,436,920 |
public Observable<ServiceResponse<Page<VirtualMachineInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
} | Observable<ServiceResponse<Page<VirtualMachineInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); } | /**
* Lists all of the virtual machines in the specified resource group. Use the nextLink property in the response to get the next page of virtual machines.
*
ServiceResponse<PageImpl1<VirtualMachineInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<VirtualMachineInner> object wrapped in {@link ServiceResponse} if successful.
*/ | Lists all of the virtual machines in the specified resource group. Use the nextLink property in the response to get the next page of virtual machines | listByResourceGroupNextSinglePageAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/compute/mgmt-v2020_06_01/src/main/java/com/microsoft/azure/management/compute/v2020_06_01/implementation/VirtualMachinesInner.java",
"license": "mit",
"size": 261375
} | [
"com.microsoft.azure.Page",
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse; | import com.microsoft.azure.*; import com.microsoft.rest.*; | [
"com.microsoft.azure",
"com.microsoft.rest"
] | com.microsoft.azure; com.microsoft.rest; | 1,926,652 |
public Set<RexNode> getExpressionLineage(Join rel, RelMetadataQuery mq,
RexNode outputExpression) {
final RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
final RelNode leftInput = rel.getLeft();
final RelNode rightInput = rel.getRight();
final int nLeftColumns = leftInput.getRowType().getFieldList().size();
// Extract input fields referenced by expression
final ImmutableBitSet inputFieldsUsed = extractInputRefs(outputExpression);
if (rel.getJoinType().isOuterJoin()) {
// If we reference the inner side, we will bail out
if (rel.getJoinType() == JoinRelType.LEFT) {
ImmutableBitSet rightFields = ImmutableBitSet.range(
nLeftColumns, rel.getRowType().getFieldCount());
if (inputFieldsUsed.intersects(rightFields)) {
// We cannot map origin of this expression.
return null;
}
} else if (rel.getJoinType() == JoinRelType.RIGHT) {
ImmutableBitSet leftFields = ImmutableBitSet.range(
0, nLeftColumns);
if (inputFieldsUsed.intersects(leftFields)) {
// We cannot map origin of this expression.
return null;
}
} else {
// We cannot map origin of this expression.
return null;
}
}
// Gather table references
final Set<RelTableRef> leftTableRefs = mq.getTableReferences(leftInput);
if (leftTableRefs == null) {
// Bail out
return null;
}
final Set<RelTableRef> rightTableRefs = mq.getTableReferences(rightInput);
if (rightTableRefs == null) {
// Bail out
return null;
}
final Multimap<List<String>, RelTableRef> qualifiedNamesToRefs = HashMultimap.create();
final Map<RelTableRef, RelTableRef> currentTablesMapping = new HashMap<>();
for (RelTableRef leftRef : leftTableRefs) {
qualifiedNamesToRefs.put(leftRef.getQualifiedName(), leftRef);
}
for (RelTableRef rightRef : rightTableRefs) {
int shift = 0;
Collection<RelTableRef> lRefs = qualifiedNamesToRefs.get(
rightRef.getQualifiedName());
if (lRefs != null) {
shift = lRefs.size();
}
currentTablesMapping.put(rightRef,
RelTableRef.of(rightRef.getTable(), shift + rightRef.getEntityNumber()));
}
// Infer column origin expressions for given references
final Map<RexInputRef, Set<RexNode>> mapping = new LinkedHashMap<>();
for (int idx : inputFieldsUsed) {
if (idx < nLeftColumns) {
final RexInputRef inputRef = RexInputRef.of(idx, leftInput.getRowType().getFieldList());
final Set<RexNode> originalExprs = mq.getExpressionLineage(leftInput, inputRef);
if (originalExprs == null) {
// Bail out
return null;
}
// Left input references remain unchanged
mapping.put(RexInputRef.of(idx, rel.getRowType().getFieldList()), originalExprs);
} else {
// Right input.
final RexInputRef inputRef = RexInputRef.of(idx - nLeftColumns,
rightInput.getRowType().getFieldList());
final Set<RexNode> originalExprs = mq.getExpressionLineage(rightInput, inputRef);
if (originalExprs == null) {
// Bail out
return null;
}
// Right input references might need to be updated if there are
// table names clashes with left input
final RelDataType fullRowType = SqlValidatorUtil.createJoinType(
rexBuilder.getTypeFactory(),
rel.getLeft().getRowType(),
rel.getRight().getRowType(),
null,
ImmutableList.of());
final Set<RexNode> updatedExprs = ImmutableSet.copyOf(
Iterables.transform(originalExprs, e ->
RexUtil.swapTableReferences(rexBuilder, e,
currentTablesMapping)));
mapping.put(RexInputRef.of(idx, fullRowType), updatedExprs);
}
}
// Return result
return createAllPossibleExpressions(rexBuilder, outputExpression, mapping);
} | Set<RexNode> function(Join rel, RelMetadataQuery mq, RexNode outputExpression) { final RexBuilder rexBuilder = rel.getCluster().getRexBuilder(); final RelNode leftInput = rel.getLeft(); final RelNode rightInput = rel.getRight(); final int nLeftColumns = leftInput.getRowType().getFieldList().size(); final ImmutableBitSet inputFieldsUsed = extractInputRefs(outputExpression); if (rel.getJoinType().isOuterJoin()) { if (rel.getJoinType() == JoinRelType.LEFT) { ImmutableBitSet rightFields = ImmutableBitSet.range( nLeftColumns, rel.getRowType().getFieldCount()); if (inputFieldsUsed.intersects(rightFields)) { return null; } } else if (rel.getJoinType() == JoinRelType.RIGHT) { ImmutableBitSet leftFields = ImmutableBitSet.range( 0, nLeftColumns); if (inputFieldsUsed.intersects(leftFields)) { return null; } } else { return null; } } final Set<RelTableRef> leftTableRefs = mq.getTableReferences(leftInput); if (leftTableRefs == null) { return null; } final Set<RelTableRef> rightTableRefs = mq.getTableReferences(rightInput); if (rightTableRefs == null) { return null; } final Multimap<List<String>, RelTableRef> qualifiedNamesToRefs = HashMultimap.create(); final Map<RelTableRef, RelTableRef> currentTablesMapping = new HashMap<>(); for (RelTableRef leftRef : leftTableRefs) { qualifiedNamesToRefs.put(leftRef.getQualifiedName(), leftRef); } for (RelTableRef rightRef : rightTableRefs) { int shift = 0; Collection<RelTableRef> lRefs = qualifiedNamesToRefs.get( rightRef.getQualifiedName()); if (lRefs != null) { shift = lRefs.size(); } currentTablesMapping.put(rightRef, RelTableRef.of(rightRef.getTable(), shift + rightRef.getEntityNumber())); } final Map<RexInputRef, Set<RexNode>> mapping = new LinkedHashMap<>(); for (int idx : inputFieldsUsed) { if (idx < nLeftColumns) { final RexInputRef inputRef = RexInputRef.of(idx, leftInput.getRowType().getFieldList()); final Set<RexNode> originalExprs = mq.getExpressionLineage(leftInput, inputRef); if (originalExprs == null) { return null; } mapping.put(RexInputRef.of(idx, rel.getRowType().getFieldList()), originalExprs); } else { final RexInputRef inputRef = RexInputRef.of(idx - nLeftColumns, rightInput.getRowType().getFieldList()); final Set<RexNode> originalExprs = mq.getExpressionLineage(rightInput, inputRef); if (originalExprs == null) { return null; } final RelDataType fullRowType = SqlValidatorUtil.createJoinType( rexBuilder.getTypeFactory(), rel.getLeft().getRowType(), rel.getRight().getRowType(), null, ImmutableList.of()); final Set<RexNode> updatedExprs = ImmutableSet.copyOf( Iterables.transform(originalExprs, e -> RexUtil.swapTableReferences(rexBuilder, e, currentTablesMapping))); mapping.put(RexInputRef.of(idx, fullRowType), updatedExprs); } } return createAllPossibleExpressions(rexBuilder, outputExpression, mapping); } | /**
* Expression lineage from {@link Join}.
*
* <p>We only extract the lineage for INNER joins.
*/ | Expression lineage from <code>Join</code>. We only extract the lineage for INNER joins | getExpressionLineage | {
"repo_name": "xhoong/incubator-calcite",
"path": "core/src/main/java/org/apache/calcite/rel/metadata/RelMdExpressionLineage.java",
"license": "apache-2.0",
"size": 18850
} | [
"com.google.common.collect.HashMultimap",
"com.google.common.collect.ImmutableList",
"com.google.common.collect.ImmutableSet",
"com.google.common.collect.Iterables",
"com.google.common.collect.Multimap",
"java.util.Collection",
"java.util.HashMap",
"java.util.LinkedHashMap",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.apache.calcite.rel.RelNode",
"org.apache.calcite.rel.core.Join",
"org.apache.calcite.rel.core.JoinRelType",
"org.apache.calcite.rel.type.RelDataType",
"org.apache.calcite.rex.RexBuilder",
"org.apache.calcite.rex.RexInputRef",
"org.apache.calcite.rex.RexNode",
"org.apache.calcite.rex.RexTableInputRef",
"org.apache.calcite.rex.RexUtil",
"org.apache.calcite.sql.validate.SqlValidatorUtil",
"org.apache.calcite.util.ImmutableBitSet"
] | import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexTableInputRef; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.ImmutableBitSet; | import com.google.common.collect.*; import java.util.*; import org.apache.calcite.rel.*; import org.apache.calcite.rel.core.*; import org.apache.calcite.rel.type.*; import org.apache.calcite.rex.*; import org.apache.calcite.sql.validate.*; import org.apache.calcite.util.*; | [
"com.google.common",
"java.util",
"org.apache.calcite"
] | com.google.common; java.util; org.apache.calcite; | 2,306,805 |
public void addDecoratorOnce(IDecorator decorator) {
if (decorators == null)
decorators = new ArrayList<IDecorator>();
if (!decorators.contains(decorator))
decorators.add(decorator);
} | void function(IDecorator decorator) { if (decorators == null) decorators = new ArrayList<IDecorator>(); if (!decorators.contains(decorator)) decorators.add(decorator); } | /**
* Add a decorator to the decorators list, but only if it isn't already present
*
* @param decorator
* the decorator to add
*/ | Add a decorator to the decorators list, but only if it isn't already present | addDecoratorOnce | {
"repo_name": "OpenSoftwareSolutions/PDFReporter-Studio",
"path": "com.jaspersoft.studio/src/com/jaspersoft/studio/editor/gef/figures/ComponentFigure.java",
"license": "lgpl-3.0",
"size": 4462
} | [
"com.jaspersoft.studio.editor.gef.decorator.IDecorator",
"java.util.ArrayList"
] | import com.jaspersoft.studio.editor.gef.decorator.IDecorator; import java.util.ArrayList; | import com.jaspersoft.studio.editor.gef.decorator.*; import java.util.*; | [
"com.jaspersoft.studio",
"java.util"
] | com.jaspersoft.studio; java.util; | 2,670,947 |
@Test
public void evaluate_OriginalHasNoTypes() {
Scope s = new Scope();
Monitor m = new Monitor();
Location loc = new Location("myfile.treo", 1, 1);
List<TermExpression> indices = new ArrayList<TermExpression>();
VariableExpression vark = new VariableExpression("k", indices, loc);
TermExpression t1 = new IntegerValue(1);
TermExpression t3 = new IntegerValue(3);
TermExpression k = new VariableTermExpression(vark);
Range rng = new Range(t1, t3);
ListExpression list = new ListExpression(Arrays.asList(rng));
Identifier x = new Identifier("x");
PredicateExpression P1 = new Membership(x, list);
PredicateExpression P2 = new Relation(RelationSymbol.EQ, Arrays.asList(k, t3), loc);
Conjunction c = new Conjunction(Arrays.asList(P1, P2));
List<Scope> scopes = c.evaluate(s, m);
assertEquals(scopes.size(), 3);
assertEquals(scopes.get(0).get(x), new IntegerValue(1));
assertEquals(scopes.get(1).get(x), new IntegerValue(2));
assertEquals(scopes.get(2).get(x), new IntegerValue(3));
} | void function() { Scope s = new Scope(); Monitor m = new Monitor(); Location loc = new Location(STR, 1, 1); List<TermExpression> indices = new ArrayList<TermExpression>(); VariableExpression vark = new VariableExpression("k", indices, loc); TermExpression t1 = new IntegerValue(1); TermExpression t3 = new IntegerValue(3); TermExpression k = new VariableTermExpression(vark); Range rng = new Range(t1, t3); ListExpression list = new ListExpression(Arrays.asList(rng)); Identifier x = new Identifier("x"); PredicateExpression P1 = new Membership(x, list); PredicateExpression P2 = new Relation(RelationSymbol.EQ, Arrays.asList(k, t3), loc); Conjunction c = new Conjunction(Arrays.asList(P1, P2)); List<Scope> scopes = c.evaluate(s, m); assertEquals(scopes.size(), 3); assertEquals(scopes.get(0).get(x), new IntegerValue(1)); assertEquals(scopes.get(1).get(x), new IntegerValue(2)); assertEquals(scopes.get(2).get(x), new IntegerValue(3)); } | /**
* Evaluate original has no types.
*/ | Evaluate original has no types | evaluate_OriginalHasNoTypes | {
"repo_name": "kasperdokter/Reo-compiler",
"path": "reo-interpreter/src/test/java/nl/cwi/reo/interpret/statements/ConjunctionTests.java",
"license": "mit",
"size": 3085
} | [
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List",
"nl.cwi.reo.interpret.Scope",
"nl.cwi.reo.interpret.terms.ListExpression",
"nl.cwi.reo.interpret.terms.Range",
"nl.cwi.reo.interpret.terms.TermExpression",
"nl.cwi.reo.interpret.terms.VariableTermExpression",
"nl.cwi.reo.interpret.values.IntegerValue",
"nl.cwi.reo.interpret.variables.Identifier",
"nl.cwi.reo.interpret.variables.VariableExpression",
"nl.cwi.reo.util.Location",
"nl.cwi.reo.util.Monitor",
"org.junit.Assert"
] | import java.util.ArrayList; import java.util.Arrays; import java.util.List; import nl.cwi.reo.interpret.Scope; import nl.cwi.reo.interpret.terms.ListExpression; import nl.cwi.reo.interpret.terms.Range; import nl.cwi.reo.interpret.terms.TermExpression; import nl.cwi.reo.interpret.terms.VariableTermExpression; import nl.cwi.reo.interpret.values.IntegerValue; import nl.cwi.reo.interpret.variables.Identifier; import nl.cwi.reo.interpret.variables.VariableExpression; import nl.cwi.reo.util.Location; import nl.cwi.reo.util.Monitor; import org.junit.Assert; | import java.util.*; import nl.cwi.reo.interpret.*; import nl.cwi.reo.interpret.terms.*; import nl.cwi.reo.interpret.values.*; import nl.cwi.reo.interpret.variables.*; import nl.cwi.reo.util.*; import org.junit.*; | [
"java.util",
"nl.cwi.reo",
"org.junit"
] | java.util; nl.cwi.reo; org.junit; | 1,525,597 |
public static void checkNCName(String name) {
if (!XML11Char.isXML11ValidNCName(name)) {
runTimeError(INVALID_NCNAME_ERR,name);
}
}
| static void function(String name) { if (!XML11Char.isXML11ValidNCName(name)) { runTimeError(INVALID_NCNAME_ERR,name); } } | /**
* Utility function to check if a name is a valid ncname
* This method should only be invoked if the attribute value is an AVT
*/ | Utility function to check if a name is a valid ncname This method should only be invoked if the attribute value is an AVT | checkNCName | {
"repo_name": "kcsl/immutability-benchmark",
"path": "benchmark-applications/reiminfer-oopsla-2012/source/Xalan/src/org/apache/xalan/xsltc/runtime/BasisLibrary.java",
"license": "mit",
"size": 57511
} | [
"org.apache.xml.utils.XML11Char"
] | import org.apache.xml.utils.XML11Char; | import org.apache.xml.utils.*; | [
"org.apache.xml"
] | org.apache.xml; | 395,224 |
public static RexNode projectNonColumnEquiConditions(ProjectFactory factory, RelNode[] inputRels,
List<RexNode> leftJoinKeys, List<RexNode> rightJoinKeys, int systemColCount,
List<Integer> leftKeys, List<Integer> rightKeys) {
RelNode leftRel = inputRels[0];
RelNode rightRel = inputRels[1];
RexBuilder rexBuilder = leftRel.getCluster().getRexBuilder();
RexNode outJoinCond = null;
int origLeftInputSize = leftRel.getRowType().getFieldCount();
int origRightInputSize = rightRel.getRowType().getFieldCount();
List<RexNode> newLeftFields = new ArrayList<RexNode>();
List<String> newLeftFieldNames = new ArrayList<String>();
List<RexNode> newRightFields = new ArrayList<RexNode>();
List<String> newRightFieldNames = new ArrayList<String>();
int leftKeyCount = leftJoinKeys.size();
int i;
for (i = 0; i < origLeftInputSize; i++) {
final RelDataTypeField field = leftRel.getRowType().getFieldList().get(i);
newLeftFields.add(rexBuilder.makeInputRef(field.getType(), i));
newLeftFieldNames.add(field.getName());
}
for (i = 0; i < origRightInputSize; i++) {
final RelDataTypeField field = rightRel.getRowType().getFieldList().get(i);
newRightFields.add(rexBuilder.makeInputRef(field.getType(), i));
newRightFieldNames.add(field.getName());
}
ImmutableBitSet.Builder origColEqCondsPosBuilder = ImmutableBitSet.builder();
int newKeyCount = 0;
List<Pair<Integer, Integer>> origColEqConds = new ArrayList<Pair<Integer, Integer>>();
for (i = 0; i < leftKeyCount; i++) {
RexNode leftKey = leftJoinKeys.get(i);
RexNode rightKey = rightJoinKeys.get(i);
if (leftKey instanceof RexInputRef && rightKey instanceof RexInputRef) {
origColEqConds.add(Pair.of(((RexInputRef) leftKey).getIndex(),
((RexInputRef) rightKey).getIndex()));
origColEqCondsPosBuilder.set(i);
} else {
newLeftFields.add(leftKey);
newLeftFieldNames.add(null);
newRightFields.add(rightKey);
newRightFieldNames.add(null);
newKeyCount++;
}
}
ImmutableBitSet origColEqCondsPos = origColEqCondsPosBuilder.build();
for (i = 0; i < origColEqConds.size(); i++) {
Pair<Integer, Integer> p = origColEqConds.get(i);
int condPos = origColEqCondsPos.nth(i);
RexNode leftKey = leftJoinKeys.get(condPos);
RexNode rightKey = rightJoinKeys.get(condPos);
leftKeys.add(p.left);
rightKeys.add(p.right);
RexNode cond = rexBuilder.makeCall(
SqlStdOperatorTable.EQUALS,
rexBuilder.makeInputRef(leftKey.getType(), systemColCount + p.left),
rexBuilder.makeInputRef(rightKey.getType(), systemColCount + origLeftInputSize
+ newKeyCount + p.right));
if (outJoinCond == null) {
outJoinCond = cond;
} else {
outJoinCond = rexBuilder.makeCall(SqlStdOperatorTable.AND, outJoinCond, cond);
}
}
if (newKeyCount == 0) {
return outJoinCond;
}
int newLeftOffset = systemColCount + origLeftInputSize;
int newRightOffset = systemColCount + origLeftInputSize + origRightInputSize + newKeyCount;
for (i = 0; i < newKeyCount; i++) {
leftKeys.add(origLeftInputSize + i);
rightKeys.add(origRightInputSize + i);
RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeInputRef(newLeftFields.get(origLeftInputSize + i).getType(), newLeftOffset + i),
rexBuilder.makeInputRef(newRightFields.get(origRightInputSize + i).getType(), newRightOffset + i));
if (outJoinCond == null) {
outJoinCond = cond;
} else {
outJoinCond = rexBuilder.makeCall(SqlStdOperatorTable.AND, outJoinCond, cond);
}
}
// added project if need to produce new keys than the original input
// fields
if (newKeyCount > 0) {
leftRel = factory.createProject(leftRel, newLeftFields,
SqlValidatorUtil.uniquify(newLeftFieldNames));
rightRel = factory.createProject(rightRel, newRightFields,
SqlValidatorUtil.uniquify(newRightFieldNames));
}
inputRels[0] = leftRel;
inputRels[1] = rightRel;
return outJoinCond;
}
public static class JoinPredicateInfo {
private final ImmutableList<JoinLeafPredicateInfo> nonEquiJoinPredicateElements;
private final ImmutableList<JoinLeafPredicateInfo> equiJoinPredicateElements;
private final ImmutableList<Set<Integer>> projsJoinKeysInChildSchema;
private final ImmutableList<Set<Integer>> projsJoinKeysInJoinSchema;
private final ImmutableMap<Integer, ImmutableList<JoinLeafPredicateInfo>> mapOfProjIndxInJoinSchemaToLeafPInfo;
public JoinPredicateInfo(List<JoinLeafPredicateInfo> nonEquiJoinPredicateElements,
List<JoinLeafPredicateInfo> equiJoinPredicateElements,
List<Set<Integer>> projsJoinKeysInChildSchema,
List<Set<Integer>> projsJoinKeysInJoinSchema,
Map<Integer, ImmutableList<JoinLeafPredicateInfo>> mapOfProjIndxInJoinSchemaToLeafPInfo) {
this.nonEquiJoinPredicateElements = ImmutableList.copyOf(nonEquiJoinPredicateElements);
this.equiJoinPredicateElements = ImmutableList.copyOf(equiJoinPredicateElements);
this.projsJoinKeysInChildSchema = ImmutableList
.copyOf(projsJoinKeysInChildSchema);
this.projsJoinKeysInJoinSchema = ImmutableList
.copyOf(projsJoinKeysInJoinSchema);
this.mapOfProjIndxInJoinSchemaToLeafPInfo = ImmutableMap
.copyOf(mapOfProjIndxInJoinSchemaToLeafPInfo);
} | static RexNode function(ProjectFactory factory, RelNode[] inputRels, List<RexNode> leftJoinKeys, List<RexNode> rightJoinKeys, int systemColCount, List<Integer> leftKeys, List<Integer> rightKeys) { RelNode leftRel = inputRels[0]; RelNode rightRel = inputRels[1]; RexBuilder rexBuilder = leftRel.getCluster().getRexBuilder(); RexNode outJoinCond = null; int origLeftInputSize = leftRel.getRowType().getFieldCount(); int origRightInputSize = rightRel.getRowType().getFieldCount(); List<RexNode> newLeftFields = new ArrayList<RexNode>(); List<String> newLeftFieldNames = new ArrayList<String>(); List<RexNode> newRightFields = new ArrayList<RexNode>(); List<String> newRightFieldNames = new ArrayList<String>(); int leftKeyCount = leftJoinKeys.size(); int i; for (i = 0; i < origLeftInputSize; i++) { final RelDataTypeField field = leftRel.getRowType().getFieldList().get(i); newLeftFields.add(rexBuilder.makeInputRef(field.getType(), i)); newLeftFieldNames.add(field.getName()); } for (i = 0; i < origRightInputSize; i++) { final RelDataTypeField field = rightRel.getRowType().getFieldList().get(i); newRightFields.add(rexBuilder.makeInputRef(field.getType(), i)); newRightFieldNames.add(field.getName()); } ImmutableBitSet.Builder origColEqCondsPosBuilder = ImmutableBitSet.builder(); int newKeyCount = 0; List<Pair<Integer, Integer>> origColEqConds = new ArrayList<Pair<Integer, Integer>>(); for (i = 0; i < leftKeyCount; i++) { RexNode leftKey = leftJoinKeys.get(i); RexNode rightKey = rightJoinKeys.get(i); if (leftKey instanceof RexInputRef && rightKey instanceof RexInputRef) { origColEqConds.add(Pair.of(((RexInputRef) leftKey).getIndex(), ((RexInputRef) rightKey).getIndex())); origColEqCondsPosBuilder.set(i); } else { newLeftFields.add(leftKey); newLeftFieldNames.add(null); newRightFields.add(rightKey); newRightFieldNames.add(null); newKeyCount++; } } ImmutableBitSet origColEqCondsPos = origColEqCondsPosBuilder.build(); for (i = 0; i < origColEqConds.size(); i++) { Pair<Integer, Integer> p = origColEqConds.get(i); int condPos = origColEqCondsPos.nth(i); RexNode leftKey = leftJoinKeys.get(condPos); RexNode rightKey = rightJoinKeys.get(condPos); leftKeys.add(p.left); rightKeys.add(p.right); RexNode cond = rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(leftKey.getType(), systemColCount + p.left), rexBuilder.makeInputRef(rightKey.getType(), systemColCount + origLeftInputSize + newKeyCount + p.right)); if (outJoinCond == null) { outJoinCond = cond; } else { outJoinCond = rexBuilder.makeCall(SqlStdOperatorTable.AND, outJoinCond, cond); } } if (newKeyCount == 0) { return outJoinCond; } int newLeftOffset = systemColCount + origLeftInputSize; int newRightOffset = systemColCount + origLeftInputSize + origRightInputSize + newKeyCount; for (i = 0; i < newKeyCount; i++) { leftKeys.add(origLeftInputSize + i); rightKeys.add(origRightInputSize + i); RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(newLeftFields.get(origLeftInputSize + i).getType(), newLeftOffset + i), rexBuilder.makeInputRef(newRightFields.get(origRightInputSize + i).getType(), newRightOffset + i)); if (outJoinCond == null) { outJoinCond = cond; } else { outJoinCond = rexBuilder.makeCall(SqlStdOperatorTable.AND, outJoinCond, cond); } } if (newKeyCount > 0) { leftRel = factory.createProject(leftRel, newLeftFields, SqlValidatorUtil.uniquify(newLeftFieldNames)); rightRel = factory.createProject(rightRel, newRightFields, SqlValidatorUtil.uniquify(newRightFieldNames)); } inputRels[0] = leftRel; inputRels[1] = rightRel; return outJoinCond; } public static class JoinPredicateInfo { private final ImmutableList<JoinLeafPredicateInfo> nonEquiJoinPredicateElements; private final ImmutableList<JoinLeafPredicateInfo> equiJoinPredicateElements; private final ImmutableList<Set<Integer>> projsJoinKeysInChildSchema; private final ImmutableList<Set<Integer>> projsJoinKeysInJoinSchema; private final ImmutableMap<Integer, ImmutableList<JoinLeafPredicateInfo>> mapOfProjIndxInJoinSchemaToLeafPInfo; public JoinPredicateInfo(List<JoinLeafPredicateInfo> nonEquiJoinPredicateElements, List<JoinLeafPredicateInfo> equiJoinPredicateElements, List<Set<Integer>> projsJoinKeysInChildSchema, List<Set<Integer>> projsJoinKeysInJoinSchema, Map<Integer, ImmutableList<JoinLeafPredicateInfo>> mapOfProjIndxInJoinSchemaToLeafPInfo) { this.nonEquiJoinPredicateElements = ImmutableList.copyOf(nonEquiJoinPredicateElements); this.equiJoinPredicateElements = ImmutableList.copyOf(equiJoinPredicateElements); this.projsJoinKeysInChildSchema = ImmutableList .copyOf(projsJoinKeysInChildSchema); this.projsJoinKeysInJoinSchema = ImmutableList .copyOf(projsJoinKeysInJoinSchema); this.mapOfProjIndxInJoinSchemaToLeafPInfo = ImmutableMap .copyOf(mapOfProjIndxInJoinSchemaToLeafPInfo); } | /**
* Push any equi join conditions that are not column references as Projections
* on top of the children.
*
* @param factory
* Project factory to use.
* @param inputRels
* inputs to a join
* @param leftJoinKeys
* expressions for LHS of join key
* @param rightJoinKeys
* expressions for RHS of join key
* @param systemColCount
* number of system columns, usually zero. These columns are
* projected at the leading edge of the output row.
* @param leftKeys
* on return this contains the join key positions from the new
* project rel on the LHS.
* @param rightKeys
* on return this contains the join key positions from the new
* project rel on the RHS.
* @return the join condition after the equi expressions pushed down.
*/ | Push any equi join conditions that are not column references as Projections on top of the children | projectNonColumnEquiConditions | {
"repo_name": "vergilchiu/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java",
"license": "apache-2.0",
"size": 42761
} | [
"com.google.common.collect.ImmutableList",
"com.google.common.collect.ImmutableMap",
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.apache.calcite.rel.RelNode",
"org.apache.calcite.rel.core.RelFactories",
"org.apache.calcite.rel.type.RelDataTypeField",
"org.apache.calcite.rex.RexBuilder",
"org.apache.calcite.rex.RexInputRef",
"org.apache.calcite.rex.RexNode",
"org.apache.calcite.sql.fun.SqlStdOperatorTable",
"org.apache.calcite.sql.validate.SqlValidatorUtil",
"org.apache.calcite.util.ImmutableBitSet",
"org.apache.calcite.util.Pair"
] | import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.RelFactories; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; | import com.google.common.collect.*; import java.util.*; import org.apache.calcite.rel.*; import org.apache.calcite.rel.core.*; import org.apache.calcite.rel.type.*; import org.apache.calcite.rex.*; import org.apache.calcite.sql.fun.*; import org.apache.calcite.sql.validate.*; import org.apache.calcite.util.*; | [
"com.google.common",
"java.util",
"org.apache.calcite"
] | com.google.common; java.util; org.apache.calcite; | 1,082,423 |
@Override
public Map<String, Object> getFingerprint() {
Map<String, Object> map = new HashMap<String, Object>();
List<String> families = new ArrayList<String>();
if(this.familyMap.size() == 0) {
map.put("families", "ALL");
return map;
} else {
map.put("families", families);
}
for (Map.Entry<byte [], NavigableSet<byte[]>> entry :
this.familyMap.entrySet()) {
families.add(Bytes.toStringBinary(entry.getKey()));
}
return map;
} | Map<String, Object> function() { Map<String, Object> map = new HashMap<String, Object>(); List<String> families = new ArrayList<String>(); if(this.familyMap.size() == 0) { map.put(STR, "ALL"); return map; } else { map.put(STR, families); } for (Map.Entry<byte [], NavigableSet<byte[]>> entry : this.familyMap.entrySet()) { families.add(Bytes.toStringBinary(entry.getKey())); } return map; } | /**
* Compile the table and column family (i.e. schema) information
* into a String. Useful for parsing and aggregation by debugging,
* logging, and administration tools.
* @return Map
*/ | Compile the table and column family (i.e. schema) information into a String. Useful for parsing and aggregation by debugging, logging, and administration tools | getFingerprint | {
"repo_name": "bcopeland/hbase-thrift",
"path": "src/main/java/org/apache/hadoop/hbase/client/Scan.java",
"license": "apache-2.0",
"size": 21473
} | [
"java.util.ArrayList",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"java.util.NavigableSet",
"org.apache.hadoop.hbase.util.Bytes"
] | import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NavigableSet; import org.apache.hadoop.hbase.util.Bytes; | import java.util.*; import org.apache.hadoop.hbase.util.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 1,276,255 |
public void setDataSource(String path, String[] keys, String[] values) throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
final Uri uri = Uri.parse(path);
if ("file".equals(uri.getScheme())) {
path = uri.getPath();
}
final File file = new File(path);
if (file.exists()) {
FileInputStream is = new FileInputStream(file);
FileDescriptor fd = is.getFD();
setDataSource(fd);
is.close();
} else {
_setDataSource(path, keys, values);
}
} | void function(String path, String[] keys, String[] values) throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { final Uri uri = Uri.parse(path); if ("file".equals(uri.getScheme())) { path = uri.getPath(); } final File file = new File(path); if (file.exists()) { FileInputStream is = new FileInputStream(file); FileDescriptor fd = is.getFD(); setDataSource(fd); is.close(); } else { _setDataSource(path, keys, values); } } | /**
* Sets the data source (file-path or http/rtsp URL) to use.
*
* @param path the path of the file, or the http/rtsp URL of the stream you want to play
* @param keys AVOption key
* @param values AVOption value
* @throws IllegalStateException if it is called in an invalid state
*/ | Sets the data source (file-path or http/rtsp URL) to use | setDataSource | {
"repo_name": "liaozhoubei/NetEasyNews",
"path": "library/InitActivity/src/io/vov/vitamio/MediaPlayer.java",
"license": "gpl-3.0",
"size": 57992
} | [
"android.net.Uri",
"java.io.File",
"java.io.FileDescriptor",
"java.io.FileInputStream",
"java.io.IOException"
] | import android.net.Uri; import java.io.File; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.IOException; | import android.net.*; import java.io.*; | [
"android.net",
"java.io"
] | android.net; java.io; | 2,360,413 |
private void writeSQLDHROW(int holdability) throws DRDAProtocolException,SQLException
{
if (JVMInfo.JDK_ID < 2) //write null indicator for SQLDHROW because there is no holdability support prior to jdk1.3
{
writer.writeByte(CodePoint.NULLDATA);
return;
}
writer.writeByte(0); // SQLDHROW INDICATOR
//SQLDHOLD
writer.writeShort(holdability);
//SQLDRETURN
writer.writeShort(0);
//SQLDSCROLL
writer.writeShort(0);
//SQLDSENSITIVE
writer.writeShort(0);
//SQLDFCODE
writer.writeShort(0);
//SQLDKEYTYPE
writer.writeShort(0);
//SQLRDBNAME
writer.writeShort(0); //CCC on Windows somehow does not take any dbname
//SQLDSCHEMA
writeVCMorVCS(null);
} | void function(int holdability) throws DRDAProtocolException,SQLException { if (JVMInfo.JDK_ID < 2) { writer.writeByte(CodePoint.NULLDATA); return; } writer.writeByte(0); writer.writeShort(holdability); writer.writeShort(0); writer.writeShort(0); writer.writeShort(0); writer.writeShort(0); writer.writeShort(0); writer.writeShort(0); writeVCMorVCS(null); } | /**
* Holdability passed in as it can represent the holdability of
* the statement or a specific result set.
* @param holdability HOLD_CURSORS_OVER_COMMIT or CLOSE_CURSORS_AT_COMMIT
* @throws DRDAProtocolException
* @throws SQLException
*/ | Holdability passed in as it can represent the holdability of the statement or a specific result set | writeSQLDHROW | {
"repo_name": "lpxz/grail-derby104",
"path": "java/drda/org/apache/derby/impl/drda/DRDAConnThread.java",
"license": "apache-2.0",
"size": 273542
} | [
"java.sql.SQLException",
"org.apache.derby.iapi.services.info.JVMInfo"
] | import java.sql.SQLException; import org.apache.derby.iapi.services.info.JVMInfo; | import java.sql.*; import org.apache.derby.iapi.services.info.*; | [
"java.sql",
"org.apache.derby"
] | java.sql; org.apache.derby; | 1,765,846 |
return new Header(requireNonNull(name), requireNonNull(value));
} | return new Header(requireNonNull(name), requireNonNull(value)); } | /**
* Create new header.
* @param name header name
* @param value header value
* @return header
*/ | Create new header | of | {
"repo_name": "clearthesky/requests",
"path": "src/main/java/net/dongliu/requests/Header.java",
"license": "bsd-2-clause",
"size": 1142
} | [
"java.util.Objects"
] | import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 609,983 |
public void setMonth13Amount(KualiDecimal month13Amount); | void function(KualiDecimal month13Amount); | /**
* Sets the month13Amount attribute value.
*
* @param month13Amount The month13Amount to set.
*/ | Sets the month13Amount attribute value | setMonth13Amount | {
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-core/src/main/java/org/kuali/kfs/integration/ld/LaborLedgerBalance.java",
"license": "agpl-3.0",
"size": 16145
} | [
"org.kuali.rice.core.api.util.type.KualiDecimal"
] | import org.kuali.rice.core.api.util.type.KualiDecimal; | import org.kuali.rice.core.api.util.type.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 1,417,119 |
public synchronized void activateOptions() {
// First ensure host, port and topic are set
if (!verifyOptions()) {
return;
}
try {
itsHostName = InetAddress.getLocalHost().getHostName();
} catch (Exception e) {
itsHostName = "unknown";
}
// Map all log4j log levels to ASKAP/ICE log levels so a log4j event can
// be turned into an ASKAP LogEvent
if (itsLevelMap.isEmpty()) {
itsLevelMap.put(Level.TRACE, askap.interfaces.logging.LogLevel.TRACE);
itsLevelMap.put(Level.DEBUG, askap.interfaces.logging.LogLevel.DEBUG);
itsLevelMap.put(Level.INFO, askap.interfaces.logging.LogLevel.INFO);
itsLevelMap.put(Level.WARN, askap.interfaces.logging.LogLevel.WARN);
itsLevelMap.put(Level.ERROR, askap.interfaces.logging.LogLevel.ERROR);
itsLevelMap.put(Level.FATAL, askap.interfaces.logging.LogLevel.FATAL);
}
itsIceLoggerThread = new IceLoggerThread(itsLocatorHost, itsLocatorPort,
itsTopic, itsTopicManager);
itsIceLoggerThread.start();
} | synchronized void function() { if (!verifyOptions()) { return; } try { itsHostName = InetAddress.getLocalHost().getHostName(); } catch (Exception e) { itsHostName = STR; } if (itsLevelMap.isEmpty()) { itsLevelMap.put(Level.TRACE, askap.interfaces.logging.LogLevel.TRACE); itsLevelMap.put(Level.DEBUG, askap.interfaces.logging.LogLevel.DEBUG); itsLevelMap.put(Level.INFO, askap.interfaces.logging.LogLevel.INFO); itsLevelMap.put(Level.WARN, askap.interfaces.logging.LogLevel.WARN); itsLevelMap.put(Level.ERROR, askap.interfaces.logging.LogLevel.ERROR); itsLevelMap.put(Level.FATAL, askap.interfaces.logging.LogLevel.FATAL); } itsIceLoggerThread = new IceLoggerThread(itsLocatorHost, itsLocatorPort, itsTopic, itsTopicManager); itsIceLoggerThread.start(); } | /**
* Called once all the options have been set. This is where ICE can be
* initialized and the topic created, since the configuration options have
* now been set hence we know the locator host, locator port and logger
* topic name.
*/ | Called once all the options have been set. This is where ICE can be initialized and the topic created, since the configuration options have now been set hence we know the locator host, locator port and logger topic name | activateOptions | {
"repo_name": "ATNF/askapsdp",
"path": "Code/Base/java-logappenders/current/src/askap/IceAppender.java",
"license": "gpl-2.0",
"size": 6873
} | [
"java.net.InetAddress",
"org.apache.log4j.Level"
] | import java.net.InetAddress; import org.apache.log4j.Level; | import java.net.*; import org.apache.log4j.*; | [
"java.net",
"org.apache.log4j"
] | java.net; org.apache.log4j; | 971,884 |
public static Configuration create(final Configuration that) {
Configuration conf = create();
merge(conf, that);
return conf;
} | static Configuration function(final Configuration that) { Configuration conf = create(); merge(conf, that); return conf; } | /**
* Creates a clone of passed configuration.
* @param that Configuration to clone.
* @return a Configuration created with the hbase-*.xml files plus
* the given configuration.
*/ | Creates a clone of passed configuration | create | {
"repo_name": "bcopeland/hbase-thrift",
"path": "src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java",
"license": "apache-2.0",
"size": 4605
} | [
"org.apache.hadoop.conf.Configuration"
] | import org.apache.hadoop.conf.Configuration; | import org.apache.hadoop.conf.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,985,672 |
public ArrayList<Plan> getPlans(final UUID project) {
final UUID locationId = UUID.fromString("0b42cb47-cd73-4810-ac90-19c9ba147453"); //$NON-NLS-1$
final ApiResourceVersion apiVersion = new ApiResourceVersion("3.1-preview.1"); //$NON-NLS-1$
final Map<String, Object> routeValues = new HashMap<String, Object>();
routeValues.put("project", project); //$NON-NLS-1$
final VssRestRequest httpRequest = super.createRequest(HttpMethod.GET,
locationId,
routeValues,
apiVersion,
VssMediaTypes.APPLICATION_JSON_TYPE);
return super.sendRequest(httpRequest, new TypeReference<ArrayList<Plan>>() {});
} | ArrayList<Plan> function(final UUID project) { final UUID locationId = UUID.fromString(STR); final ApiResourceVersion apiVersion = new ApiResourceVersion(STR); final Map<String, Object> routeValues = new HashMap<String, Object>(); routeValues.put(STR, project); final VssRestRequest httpRequest = super.createRequest(HttpMethod.GET, locationId, routeValues, apiVersion, VssMediaTypes.APPLICATION_JSON_TYPE); return super.sendRequest(httpRequest, new TypeReference<ArrayList<Plan>>() {}); } | /**
* [Preview API 3.1-preview.1]
*
* @param project
* Project ID
* @return ArrayList<Plan>
*/ | [Preview API 3.1-preview.1] | getPlans | {
"repo_name": "Microsoft/vso-httpclient-java",
"path": "Rest/alm-tfs-client/src/main/generated/com/microsoft/alm/teamfoundation/work/webapi/WorkHttpClientBase.java",
"license": "mit",
"size": 234377
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"com.microsoft.alm.client.HttpMethod",
"com.microsoft.alm.client.VssMediaTypes",
"com.microsoft.alm.client.VssRestRequest",
"com.microsoft.alm.teamfoundation.work.webapi.Plan",
"com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.Map",
"java.util.UUID"
] | import com.fasterxml.jackson.core.type.TypeReference; import com.microsoft.alm.client.HttpMethod; import com.microsoft.alm.client.VssMediaTypes; import com.microsoft.alm.client.VssRestRequest; import com.microsoft.alm.teamfoundation.work.webapi.Plan; import com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.UUID; | import com.fasterxml.jackson.core.type.*; import com.microsoft.alm.client.*; import com.microsoft.alm.teamfoundation.work.webapi.*; import com.microsoft.alm.visualstudio.services.webapi.*; import java.util.*; | [
"com.fasterxml.jackson",
"com.microsoft.alm",
"java.util"
] | com.fasterxml.jackson; com.microsoft.alm; java.util; | 880,853 |
public void testGetSetAlpha() {
// we use ValueMarker for the tests, because we need a concrete
// subclass...
ValueMarker m = new ValueMarker(1.1);
m.addChangeListener(this);
this.lastEvent = null;
assertEquals(0.8f, m.getAlpha(), EPSILON);
m.setAlpha(0.5f);
assertEquals(0.5f, m.getAlpha(), EPSILON);
assertEquals(m, this.lastEvent.getMarker());
} | void function() { ValueMarker m = new ValueMarker(1.1); m.addChangeListener(this); this.lastEvent = null; assertEquals(0.8f, m.getAlpha(), EPSILON); m.setAlpha(0.5f); assertEquals(0.5f, m.getAlpha(), EPSILON); assertEquals(m, this.lastEvent.getMarker()); } | /**
* Some checks for the getAlpha() and setAlpha() methods.
*/ | Some checks for the getAlpha() and setAlpha() methods | testGetSetAlpha | {
"repo_name": "nologic/nabs",
"path": "client/trunk/shared/libraries/jfreechart-1.0.5/tests/org/jfree/chart/plot/junit/MarkerTests.java",
"license": "gpl-2.0",
"size": 14251
} | [
"org.jfree.chart.plot.ValueMarker"
] | import org.jfree.chart.plot.ValueMarker; | import org.jfree.chart.plot.*; | [
"org.jfree.chart"
] | org.jfree.chart; | 1,948,588 |
public void checkBlock(ExtendedBlock b, long minLength, ReplicaState state)
throws ReplicaNotFoundException, UnexpectedReplicaStateException,
FileNotFoundException, EOFException, IOException;
| void function(ExtendedBlock b, long minLength, ReplicaState state) throws ReplicaNotFoundException, UnexpectedReplicaStateException, FileNotFoundException, EOFException, IOException; | /**
* Check if a block is valid.
*
* @param b The block to check.
* @param minLength The minimum length that the block must have. May be 0.
* @param state If this is null, it is ignored. If it is non-null, we
* will check that the replica has this state.
*
* @throws ReplicaNotFoundException If the replica is not found
*
* @throws UnexpectedReplicaStateException If the replica is not in the
* expected state.
* @throws FileNotFoundException If the block file is not found or there
* was an error locating it.
* @throws EOFException If the replica length is too short.
*
* @throws IOException May be thrown from the methods called.
*/ | Check if a block is valid | checkBlock | {
"repo_name": "vesense/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java",
"license": "apache-2.0",
"size": 18892
} | [
"java.io.EOFException",
"java.io.FileNotFoundException",
"java.io.IOException",
"org.apache.hadoop.hdfs.protocol.ExtendedBlock",
"org.apache.hadoop.hdfs.server.common.HdfsServerConstants",
"org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException",
"org.apache.hadoop.hdfs.server.datanode.UnexpectedReplicaStateException"
] | import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException; import org.apache.hadoop.hdfs.server.datanode.UnexpectedReplicaStateException; | import java.io.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.datanode.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,499,813 |
public static void verifyRomeosEntry(final RosterEntry romeo) {
assertNotNull("Can't get Romeo's roster entry!", romeo);
assertSame("Setup wrong name for Romeo!",
"Romeo",
romeo.getName());
assertSame("Setup wrong subscription status for Romeo!",
ItemType.both,
romeo.getType());
assertSame("Romeo should be member of exactly one group!",
1,
romeo.getGroups().size());
assertSame("Setup wrong group name for Romeo!",
"Friends",
romeo.getGroups().iterator().next().getName());
} | static void function(final RosterEntry romeo) { assertNotNull(STR, romeo); assertSame(STR, "Romeo", romeo.getName()); assertSame(STR, ItemType.both, romeo.getType()); assertSame(STR, 1, romeo.getGroups().size()); assertSame(STR, STR, romeo.getGroups().iterator().next().getName()); } | /**
* Check Romeo's roster entry according to the example in
* <a href="http://xmpp.org/rfcs/rfc3921.html#roster-login"
* >RFC3921: Retrieving One's Roster on Login</a>.
*
* @param romeo the roster entry which should be verified.
*/ | Check Romeo's roster entry according to the example in RFC3921: Retrieving One's Roster on Login | verifyRomeosEntry | {
"repo_name": "vito-c/Smack",
"path": "smack-core/src/test/java/org/jivesoftware/smack/RosterTest.java",
"license": "apache-2.0",
"size": 32878
} | [
"org.jivesoftware.smack.packet.RosterPacket",
"org.junit.Assert"
] | import org.jivesoftware.smack.packet.RosterPacket; import org.junit.Assert; | import org.jivesoftware.smack.packet.*; import org.junit.*; | [
"org.jivesoftware.smack",
"org.junit"
] | org.jivesoftware.smack; org.junit; | 2,587,082 |
public void subscribeToDeviceStatus(String deviceType, String deviceId) {
try {
String newTopic = "iot-2/type/"+deviceType+"/id/"+deviceId+"/mon";
subscriptions.put(newTopic, new Integer(0));
mqttAsyncClient.subscribe(newTopic, 0);
} catch (MqttException e) {
e.printStackTrace();
}
}
| void function(String deviceType, String deviceId) { try { String newTopic = STR+deviceType+"/id/"+deviceId+"/mon"; subscriptions.put(newTopic, new Integer(0)); mqttAsyncClient.subscribe(newTopic, 0); } catch (MqttException e) { e.printStackTrace(); } } | /**
* Subscribe to device status of the IBM Internet of Things Foundation. <br>
*
* @param deviceType
* object of String which denotes deviceType
* @param deviceId
* object of String which denotes deviceId
*/ | Subscribe to device status of the IBM Internet of Things Foundation. | subscribeToDeviceStatus | {
"repo_name": "BorisDaich/iot-java",
"path": "src/com/ibm/iotf/client/app/ApplicationClient.java",
"license": "epl-1.0",
"size": 24050
} | [
"org.eclipse.paho.client.mqttv3.MqttException"
] | import org.eclipse.paho.client.mqttv3.MqttException; | import org.eclipse.paho.client.mqttv3.*; | [
"org.eclipse.paho"
] | org.eclipse.paho; | 1,013,489 |
public ApplicationDetail getAppDetail(Id.Application appId) throws ApplicationNotFoundException {
ApplicationSpecification appSpec = store.getApplication(appId);
if (appSpec == null) {
throw new ApplicationNotFoundException(appId);
}
return ApplicationDetail.fromSpec(appSpec);
} | ApplicationDetail function(Id.Application appId) throws ApplicationNotFoundException { ApplicationSpecification appSpec = store.getApplication(appId); if (appSpec == null) { throw new ApplicationNotFoundException(appId); } return ApplicationDetail.fromSpec(appSpec); } | /**
* Get detail about the specified application
*
* @param appId the id of the application to get
* @return detail about the specified application
* @throws ApplicationNotFoundException if the specified application does not exist
*/ | Get detail about the specified application | getAppDetail | {
"repo_name": "chtyim/cdap",
"path": "cdap-app-fabric/src/main/java/co/cask/cdap/internal/app/services/ApplicationLifecycleService.java",
"license": "apache-2.0",
"size": 31945
} | [
"co.cask.cdap.api.app.ApplicationSpecification",
"co.cask.cdap.common.ApplicationNotFoundException",
"co.cask.cdap.proto.ApplicationDetail",
"co.cask.cdap.proto.Id"
] | import co.cask.cdap.api.app.ApplicationSpecification; import co.cask.cdap.common.ApplicationNotFoundException; import co.cask.cdap.proto.ApplicationDetail; import co.cask.cdap.proto.Id; | import co.cask.cdap.api.app.*; import co.cask.cdap.common.*; import co.cask.cdap.proto.*; | [
"co.cask.cdap"
] | co.cask.cdap; | 1,155,959 |
@Test
public void cycleWithMultipleUnfinishedChildren() throws Exception {
graph = new DeterministicHelper.DeterministicProcessableGraph(new InMemoryGraphImpl());
SkyKey cycleKey = GraphTester.toSkyKey("zcycle");
SkyKey midKey = GraphTester.toSkyKey("mid");
SkyKey topKey = GraphTester.toSkyKey("top");
SkyKey selfEdge1 = GraphTester.toSkyKey("selfEdge1");
SkyKey selfEdge2 = GraphTester.toSkyKey("selfEdge2");
tester.getOrCreate(topKey).addDependency(midKey).setComputedValue(CONCATENATE);
// selfEdge* come before cycleKey, so cycleKey's path will be checked first (LIFO), and the
// cycle with mid will be detected before the selfEdge* cycles are.
tester.getOrCreate(midKey).addDependency(selfEdge1).addDependency(selfEdge2)
.addDependency(cycleKey)
.setComputedValue(CONCATENATE);
tester.getOrCreate(cycleKey).addDependency(midKey);
tester.getOrCreate(selfEdge1).addDependency(selfEdge1);
tester.getOrCreate(selfEdge2).addDependency(selfEdge2);
EvaluationResult<StringValue> result = eval(true, ImmutableSet.of(topKey));
assertThat(result.errorMap().keySet()).containsExactly(topKey);
Iterable<CycleInfo> cycleInfos = result.getError(topKey).getCycleInfo();
CycleInfo cycleInfo = Iterables.getOnlyElement(cycleInfos);
assertThat(cycleInfo.getPathToCycle()).containsExactly(topKey);
assertThat(cycleInfo.getCycle()).containsExactly(midKey, cycleKey);
} | void function() throws Exception { graph = new DeterministicHelper.DeterministicProcessableGraph(new InMemoryGraphImpl()); SkyKey cycleKey = GraphTester.toSkyKey(STR); SkyKey midKey = GraphTester.toSkyKey("mid"); SkyKey topKey = GraphTester.toSkyKey("top"); SkyKey selfEdge1 = GraphTester.toSkyKey(STR); SkyKey selfEdge2 = GraphTester.toSkyKey(STR); tester.getOrCreate(topKey).addDependency(midKey).setComputedValue(CONCATENATE); tester.getOrCreate(midKey).addDependency(selfEdge1).addDependency(selfEdge2) .addDependency(cycleKey) .setComputedValue(CONCATENATE); tester.getOrCreate(cycleKey).addDependency(midKey); tester.getOrCreate(selfEdge1).addDependency(selfEdge1); tester.getOrCreate(selfEdge2).addDependency(selfEdge2); EvaluationResult<StringValue> result = eval(true, ImmutableSet.of(topKey)); assertThat(result.errorMap().keySet()).containsExactly(topKey); Iterable<CycleInfo> cycleInfos = result.getError(topKey).getCycleInfo(); CycleInfo cycleInfo = Iterables.getOnlyElement(cycleInfos); assertThat(cycleInfo.getPathToCycle()).containsExactly(topKey); assertThat(cycleInfo.getCycle()).containsExactly(midKey, cycleKey); } | /**
* Make sure that multiple unfinished children can be cleared from a cycle value.
*/ | Make sure that multiple unfinished children can be cleared from a cycle value | cycleWithMultipleUnfinishedChildren | {
"repo_name": "Asana/bazel",
"path": "src/test/java/com/google/devtools/build/skyframe/ParallelEvaluatorTest.java",
"license": "apache-2.0",
"size": 97707
} | [
"com.google.common.collect.ImmutableSet",
"com.google.common.collect.Iterables",
"com.google.common.truth.Truth",
"com.google.devtools.build.skyframe.GraphTester"
] | import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.truth.Truth; import com.google.devtools.build.skyframe.GraphTester; | import com.google.common.collect.*; import com.google.common.truth.*; import com.google.devtools.build.skyframe.*; | [
"com.google.common",
"com.google.devtools"
] | com.google.common; com.google.devtools; | 913,316 |
@Abstract(ifExported = "readHashValue")
public boolean isHashEntryReadable(Object receiver, Object key) {
return false;
} | @Abstract(ifExported = STR) boolean function(Object receiver, Object key) { return false; } | /**
* Returns {@code true} if mapping for the specified key exists and is
* {@link #readHashValue(Object, Object) readable}. This method may only return {@code true} if
* {@link #hasHashEntries(Object)} returns {@code true} as well and
* {@link #isHashEntryInsertable(Object, Object)} returns {@code false}. Invoking this message
* does not cause any observable side-effects. Returns {@code false} by default.
*
* @see #readHashValue(Object, Object)
* @since 21.1
*/ | Returns true if mapping for the specified key exists and is <code>#readHashValue(Object, Object) readable</code>. This method may only return true if <code>#hasHashEntries(Object)</code> returns true as well and <code>#isHashEntryInsertable(Object, Object)</code> returns false. Invoking this message does not cause any observable side-effects. Returns false by default | isHashEntryReadable | {
"repo_name": "smarr/Truffle",
"path": "truffle/src/com.oracle.truffle.api.interop/src/com/oracle/truffle/api/interop/InteropLibrary.java",
"license": "gpl-2.0",
"size": 247630
} | [
"com.oracle.truffle.api.library.GenerateLibrary"
] | import com.oracle.truffle.api.library.GenerateLibrary; | import com.oracle.truffle.api.library.*; | [
"com.oracle.truffle"
] | com.oracle.truffle; | 430,500 |
public static LocalAddress cacheStaticMethod(int methodOffset, int classStaticTocOffset) {
if (VM.VerifyAssertions) {
VM._assert(VM.NOT_REACHED);
}
return null;
} | static LocalAddress function(int methodOffset, int classStaticTocOffset) { if (VM.VerifyAssertions) { VM._assert(VM.NOT_REACHED); } return null; } | /**
* On Cell Spu, caches a method in local store.
*
* @param methodOffset offset of method in JTOC
* @return address of entry point to method in local memory if cached already, else null
*/ | On Cell Spu, caches a method in local store | cacheStaticMethod | {
"repo_name": "rmcilroy/HeraJVM",
"path": "rvm/src/org/jikesrvm/runtime/VM_Magic.java",
"license": "epl-1.0",
"size": 37346
} | [
"org.vmmagic.unboxed.LocalAddress"
] | import org.vmmagic.unboxed.LocalAddress; | import org.vmmagic.unboxed.*; | [
"org.vmmagic.unboxed"
] | org.vmmagic.unboxed; | 236,564 |
public ServiceFuture<Void> stopAsync(String resourceGroupName, String accountName, String liveEventName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(stopWithServiceResponseAsync(resourceGroupName, accountName, liveEventName), serviceCallback);
} | ServiceFuture<Void> function(String resourceGroupName, String accountName, String liveEventName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(stopWithServiceResponseAsync(resourceGroupName, accountName, liveEventName), serviceCallback); } | /**
* Stop Live Event.
* Stops an existing Live Event.
*
* @param resourceGroupName The name of the resource group within the Azure subscription.
* @param accountName The Media Services account name.
* @param liveEventName The name of the Live Event.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/ | Stop Live Event. Stops an existing Live Event | stopAsync | {
"repo_name": "hovsepm/azure-sdk-for-java",
"path": "mediaservices/resource-manager/v2018_30_30_preview/src/main/java/com/microsoft/azure/management/mediaservices/v2018_30_30_preview/implementation/LiveEventsInner.java",
"license": "mit",
"size": 111938
} | [
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture"
] | import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 87,322 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<NamedValuesGetEntityTagResponse> getEntityTagWithResponseAsync(
String resourceGroupName, String serviceName, String namedValueId, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (serviceName == null) {
return Mono.error(new IllegalArgumentException("Parameter serviceName is required and cannot be null."));
}
if (namedValueId == null) {
return Mono.error(new IllegalArgumentException("Parameter namedValueId is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.getEntityTag(
this.client.getEndpoint(),
resourceGroupName,
serviceName,
namedValueId,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context);
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<NamedValuesGetEntityTagResponse> function( String resourceGroupName, String serviceName, String namedValueId, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (serviceName == null) { return Mono.error(new IllegalArgumentException(STR)); } if (namedValueId == null) { return Mono.error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String accept = STR; context = this.client.mergeContext(context); return service .getEntityTag( this.client.getEndpoint(), resourceGroupName, serviceName, namedValueId, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context); } | /**
* Gets the entity state (Etag) version of the named value specified by its identifier.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param namedValueId Identifier of the NamedValue.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the entity state (Etag) version of the named value specified by its identifier.
*/ | Gets the entity state (Etag) version of the named value specified by its identifier | getEntityTagWithResponseAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/implementation/NamedValuesClientImpl.java",
"license": "mit",
"size": 110327
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.util.Context",
"com.azure.resourcemanager.apimanagement.models.NamedValuesGetEntityTagResponse"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; import com.azure.resourcemanager.apimanagement.models.NamedValuesGetEntityTagResponse; | import com.azure.core.annotation.*; import com.azure.core.util.*; import com.azure.resourcemanager.apimanagement.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,688,573 |
@Override
protected void setBorderToNormal(Component c) {} | protected void setBorderToNormal(Component c) {} | /**
* This implementation does nothing, because the {@code rollover}
* property of the {@code JToolBar} class is not used
* in the Synth Look and Feel.
*/ | This implementation does nothing, because the rollover property of the JToolBar class is not used in the Synth Look and Feel | setBorderToRollover | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/javax/swing/plaf/synth/SynthToolBarUI.java",
"license": "apache-2.0",
"size": 20137
} | [
"java.awt.Component"
] | import java.awt.Component; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,572,084 |
public boolean removeMember(
final String containerId,
final String memberId) {
final UUID locationId = UUID.fromString("8ba35978-138e-41f8-8963-7b1ea2c5f775"); //$NON-NLS-1$
final ApiResourceVersion apiVersion = new ApiResourceVersion("3.1-preview.1"); //$NON-NLS-1$
final Map<String, Object> routeValues = new HashMap<String, Object>();
routeValues.put("containerId", containerId); //$NON-NLS-1$
routeValues.put("memberId", memberId); //$NON-NLS-1$
final VssRestRequest httpRequest = super.createRequest(HttpMethod.DELETE,
locationId,
routeValues,
apiVersion,
VssMediaTypes.APPLICATION_JSON_TYPE);
return super.sendRequest(httpRequest, boolean.class);
} | boolean function( final String containerId, final String memberId) { final UUID locationId = UUID.fromString(STR); final ApiResourceVersion apiVersion = new ApiResourceVersion(STR); final Map<String, Object> routeValues = new HashMap<String, Object>(); routeValues.put(STR, containerId); routeValues.put(STR, memberId); final VssRestRequest httpRequest = super.createRequest(HttpMethod.DELETE, locationId, routeValues, apiVersion, VssMediaTypes.APPLICATION_JSON_TYPE); return super.sendRequest(httpRequest, boolean.class); } | /**
* [Preview API 3.1-preview.1]
*
* @param containerId
*
* @param memberId
*
* @return boolean
*/ | [Preview API 3.1-preview.1] | removeMember | {
"repo_name": "Microsoft/vso-httpclient-java",
"path": "Rest/alm-vss-client/src/main/generated/com/microsoft/alm/visualstudio/services/identity/client/IdentityHttpClientBase.java",
"license": "mit",
"size": 37296
} | [
"com.microsoft.alm.client.HttpMethod",
"com.microsoft.alm.client.VssMediaTypes",
"com.microsoft.alm.client.VssRestRequest",
"com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion",
"java.util.HashMap",
"java.util.Map",
"java.util.UUID"
] | import com.microsoft.alm.client.HttpMethod; import com.microsoft.alm.client.VssMediaTypes; import com.microsoft.alm.client.VssRestRequest; import com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion; import java.util.HashMap; import java.util.Map; import java.util.UUID; | import com.microsoft.alm.client.*; import com.microsoft.alm.visualstudio.services.webapi.*; import java.util.*; | [
"com.microsoft.alm",
"java.util"
] | com.microsoft.alm; java.util; | 2,694,178 |
public VirtualMachineInner withAdditionalCapabilities(AdditionalCapabilities additionalCapabilities) {
this.additionalCapabilities = additionalCapabilities;
return this;
} | VirtualMachineInner function(AdditionalCapabilities additionalCapabilities) { this.additionalCapabilities = additionalCapabilities; return this; } | /**
* Set the additionalCapabilities property: Specifies additional capabilities enabled or disabled on the virtual
* machine.
*
* @param additionalCapabilities the additionalCapabilities value to set.
* @return the VirtualMachineInner object itself.
*/ | Set the additionalCapabilities property: Specifies additional capabilities enabled or disabled on the virtual machine | withAdditionalCapabilities | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/fluent/models/VirtualMachineInner.java",
"license": "mit",
"size": 28975
} | [
"com.azure.resourcemanager.compute.models.AdditionalCapabilities"
] | import com.azure.resourcemanager.compute.models.AdditionalCapabilities; | import com.azure.resourcemanager.compute.models.*; | [
"com.azure.resourcemanager"
] | com.azure.resourcemanager; | 584,962 |
private static void oneTimeSetUp() {
leccCALServices = CALServicesTestUtilities.getCommonCALServices(MachineType.LECC, "cal.platform.test.cws");
}
| static void function() { leccCALServices = CALServicesTestUtilities.getCommonCALServices(MachineType.LECC, STR); } | /**
* Performs the setup for the test suite.
*/ | Performs the setup for the test suite | oneTimeSetUp | {
"repo_name": "levans/Open-Quark",
"path": "src/CAL_Platform/test/org/openquark/cal/internal/machine/lecc/CALClassLoader_Test.java",
"license": "bsd-3-clause",
"size": 10874
} | [
"org.openquark.cal.runtime.MachineType",
"org.openquark.cal.services.CALServicesTestUtilities"
] | import org.openquark.cal.runtime.MachineType; import org.openquark.cal.services.CALServicesTestUtilities; | import org.openquark.cal.runtime.*; import org.openquark.cal.services.*; | [
"org.openquark.cal"
] | org.openquark.cal; | 1,685,279 |
@ServiceMethod(returns = ReturnType.SINGLE)
Response<Void> getGenerateStatusWithResponse(UUID operationId, Context context); | @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> getGenerateStatusWithResponse(UUID operationId, Context context); | /**
* Retrieves the status of the recommendation computation or generation process. Invoke this API after calling the
* generation recommendation. The URI of this API is returned in the Location field of the response header.
*
* @param operationId The operation ID, which can be found from the Location field in the generate recommendation
* response header.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/ | Retrieves the status of the recommendation computation or generation process. Invoke this API after calling the generation recommendation. The URI of this API is returned in the Location field of the response header | getGenerateStatusWithResponse | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/advisor/azure-resourcemanager-advisor/src/main/java/com/azure/resourcemanager/advisor/fluent/RecommendationsClient.java",
"license": "mit",
"size": 7225
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 656,079 |
public InputChannel monitorInput(String inputChannelName) {
if (inputChannelName == null) {
throw new IllegalArgumentException("inputChannelName must not be null.");
}
InputChannel[] inputChannels = InputChannel.openInputChannelPair(inputChannelName);
nativeRegisterInputChannel(mPtr, inputChannels[0], null, true);
inputChannels[0].dispose(); // don't need to retain the Java object reference
return inputChannels[1];
} | InputChannel function(String inputChannelName) { if (inputChannelName == null) { throw new IllegalArgumentException(STR); } InputChannel[] inputChannels = InputChannel.openInputChannelPair(inputChannelName); nativeRegisterInputChannel(mPtr, inputChannels[0], null, true); inputChannels[0].dispose(); return inputChannels[1]; } | /**
* Creates an input channel that will receive all input from the input dispatcher.
* @param inputChannelName The input channel name.
* @return The input channel.
*/ | Creates an input channel that will receive all input from the input dispatcher | monitorInput | {
"repo_name": "OmniEvo/android_frameworks_base",
"path": "services/core/java/com/android/server/input/InputManagerService.java",
"license": "gpl-3.0",
"size": 78202
} | [
"android.view.InputChannel"
] | import android.view.InputChannel; | import android.view.*; | [
"android.view"
] | android.view; | 195,226 |
public boolean canCreateWarpSign(Player player) {
return player == null ? false : player.hasPermission("signprintf.createwarp");
}
| boolean function(Player player) { return player == null ? false : player.hasPermission(STR); } | /**
* Check if a player can create a warp sign.
*
* @param player The player to check.
* @return True if the player can create a warp sign, otherwise false.
*/ | Check if a player can create a warp sign | canCreateWarpSign | {
"repo_name": "Coryf88/SignPrintf",
"path": "src/main/java/com/coryf88/bukkit/signprintf/Settings.java",
"license": "gpl-3.0",
"size": 2981
} | [
"org.bukkit.entity.Player"
] | import org.bukkit.entity.Player; | import org.bukkit.entity.*; | [
"org.bukkit.entity"
] | org.bukkit.entity; | 895,412 |
boolean touch(String resourceId, Object debugInfo) throws IOException; | boolean touch(String resourceId, Object debugInfo) throws IOException; | /**
* Does a resource with this name exist? If so, update the last-accessed time for the
* resource
* @param resourceId id of the resource
* @param debugInfo helper object for debugging
* @return true, if the resource is present in the storage, false otherwise
* @throws IOException
*/ | Does a resource with this name exist? If so, update the last-accessed time for the resource | touch | {
"repo_name": "MaTriXy/fresco",
"path": "imagepipeline-base/src/main/java/com/facebook/cache/disk/DiskStorage.java",
"license": "bsd-3-clause",
"size": 5980
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 337,612 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.