method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
default Reader toReaderUtf8() {
return toReader(StandardCharsets.UTF_8);
}
|
default Reader toReaderUtf8() { return toReader(StandardCharsets.UTF_8); }
|
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#UTF_8}.
*/
|
Returns a new <code>Reader</code> that is sourced from this data and decoded using <code>StandardCharsets#UTF_8</code>
|
toReaderUtf8
|
{
"repo_name": "anuraaga/armeria",
"path": "core/src/main/java/com/linecorp/armeria/common/HttpData.java",
"license": "apache-2.0",
"size": 13337
}
|
[
"java.io.Reader",
"java.nio.charset.StandardCharsets"
] |
import java.io.Reader; import java.nio.charset.StandardCharsets;
|
import java.io.*; import java.nio.charset.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 2,381,267
|
private static void extractJarEntry(String jarFile, String entryName, String outFile) throws IOException {
JarFile jf = new JarFile(jarFile);
ZipEntry zipEntry = jf.getEntry(entryName);
if (zipEntry == null) {
throw new IOException("Entry not found: " + entryName);
}
InputStream in = null;
OutputStream out = null;
try {
in = jf.getInputStream(zipEntry);
out = new FileOutputStream(outFile);
IOUtil.copy(in, out);
} finally {
IOUtil.closeQuietly(in);
IOUtil.closeQuietly(out);
}
}
|
static void function(String jarFile, String entryName, String outFile) throws IOException { JarFile jf = new JarFile(jarFile); ZipEntry zipEntry = jf.getEntry(entryName); if (zipEntry == null) { throw new IOException(STR + entryName); } InputStream in = null; OutputStream out = null; try { in = jf.getInputStream(zipEntry); out = new FileOutputStream(outFile); IOUtil.copy(in, out); } finally { IOUtil.closeQuietly(in); IOUtil.closeQuietly(out); } }
|
/**
* Extract an entry from a jarfile to a local file.
*
* @param jarFile the jarfile
* @param entryName the entry
* @param outFile the local file
* @throws IOException
*/
|
Extract an entry from a jarfile to a local file
|
extractJarEntry
|
{
"repo_name": "vjpudelski/CloudCoder",
"path": "CloudCoderBuilder2/src/org/cloudcoder/builder2/server/Builder2DaemonController.java",
"license": "agpl-3.0",
"size": 8986
}
|
[
"java.io.FileOutputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.OutputStream",
"java.util.jar.JarFile",
"java.util.zip.ZipEntry",
"org.cloudcoder.daemon.IOUtil"
] |
import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.jar.JarFile; import java.util.zip.ZipEntry; import org.cloudcoder.daemon.IOUtil;
|
import java.io.*; import java.util.jar.*; import java.util.zip.*; import org.cloudcoder.daemon.*;
|
[
"java.io",
"java.util",
"org.cloudcoder.daemon"
] |
java.io; java.util; org.cloudcoder.daemon;
| 1,333,979
|
public ColGroupDataHolder getRowIdRlePage() {
// not required for column group storage
return null;
}
|
ColGroupDataHolder function() { return null; }
|
/**
* for column group storage its not required
*/
|
for column group storage its not required
|
getRowIdRlePage
|
{
"repo_name": "shivangi1015/incubator-carbondata",
"path": "processing/src/main/java/org/apache/carbondata/core/datastore/columnar/ColGroupBlockStorage.java",
"license": "apache-2.0",
"size": 2910
}
|
[
"org.apache.carbondata.processing.store.colgroup.ColGroupDataHolder"
] |
import org.apache.carbondata.processing.store.colgroup.ColGroupDataHolder;
|
import org.apache.carbondata.processing.store.colgroup.*;
|
[
"org.apache.carbondata"
] |
org.apache.carbondata;
| 2,148,126
|
public Exchange getExchange();
|
Exchange function();
|
/**
* Return exchange
*
* @return
*/
|
Return exchange
|
getExchange
|
{
"repo_name": "bcvsolutions/CzechIdMng",
"path": "Realization/backend/core/core-impl/src/main/java/eu/bcvsolutions/idm/core/notification/domain/SendOperation.java",
"license": "mit",
"size": 580
}
|
[
"org.apache.camel.Exchange"
] |
import org.apache.camel.Exchange;
|
import org.apache.camel.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 2,714,733
|
public @NonNull List<String> getLog(int maxLines) throws IOException {
if (maxLines == 0) {
return Collections.emptyList();
}
int lines = 0;
long filePointer;
final List<String> lastLines = new ArrayList<>(Math.min(maxLines, 128));
final List<Byte> bytes = new ArrayList<>();
try (RandomAccessFile fileHandler = new RandomAccessFile(getLogFile(), "r")) {
long fileLength = fileHandler.length() - 1;
for (filePointer = fileLength; filePointer != -1 && maxLines != lines; filePointer--) {
fileHandler.seek(filePointer);
byte readByte = fileHandler.readByte();
if (readByte == 0x0A) {
if (filePointer < fileLength) {
lines = lines + 1;
lastLines.add(convertBytesToString(bytes));
bytes.clear();
}
} else if (readByte != 0xD) {
bytes.add(readByte);
}
}
}
if (lines != maxLines) {
lastLines.add(convertBytesToString(bytes));
}
Collections.reverse(lastLines);
// If the log has been truncated, include that information.
// Use set (replaces the first element) rather than add so that
// the list doesn't grow beyond the specified maximum number of lines.
if (lines == maxLines) {
lastLines.set(0, "[...truncated " + Functions.humanReadableByteSize(filePointer)+ "...]");
}
return ConsoleNote.removeNotes(lastLines);
}
|
@NonNull List<String> function(int maxLines) throws IOException { if (maxLines == 0) { return Collections.emptyList(); } int lines = 0; long filePointer; final List<String> lastLines = new ArrayList<>(Math.min(maxLines, 128)); final List<Byte> bytes = new ArrayList<>(); try (RandomAccessFile fileHandler = new RandomAccessFile(getLogFile(), "r")) { long fileLength = fileHandler.length() - 1; for (filePointer = fileLength; filePointer != -1 && maxLines != lines; filePointer--) { fileHandler.seek(filePointer); byte readByte = fileHandler.readByte(); if (readByte == 0x0A) { if (filePointer < fileLength) { lines = lines + 1; lastLines.add(convertBytesToString(bytes)); bytes.clear(); } } else if (readByte != 0xD) { bytes.add(readByte); } } } if (lines != maxLines) { lastLines.add(convertBytesToString(bytes)); } Collections.reverse(lastLines); if (lines == maxLines) { lastLines.set(0, STR + Functions.humanReadableByteSize(filePointer)+ "...]"); } return ConsoleNote.removeNotes(lastLines); }
|
/**
* Gets the log of the build as a list of strings (one per log line).
* The number of lines returned is constrained by the maxLines parameter.
*
* @param maxLines The maximum number of log lines to return. If the log
* is bigger than this, only the most recent lines are returned.
* @return A list of log lines. Will have no more than maxLines elements.
* @throws IOException If there is a problem reading the log file.
*/
|
Gets the log of the build as a list of strings (one per log line). The number of lines returned is constrained by the maxLines parameter
|
getLog
|
{
"repo_name": "rsandell/jenkins",
"path": "core/src/main/java/hudson/model/Run.java",
"license": "mit",
"size": 96962
}
|
[
"edu.umd.cs.findbugs.annotations.NonNull",
"hudson.console.ConsoleNote",
"java.io.IOException",
"java.io.RandomAccessFile",
"java.util.ArrayList",
"java.util.Collections",
"java.util.List"
] |
import edu.umd.cs.findbugs.annotations.NonNull; import hudson.console.ConsoleNote; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Collections; import java.util.List;
|
import edu.umd.cs.findbugs.annotations.*; import hudson.console.*; import java.io.*; import java.util.*;
|
[
"edu.umd.cs",
"hudson.console",
"java.io",
"java.util"
] |
edu.umd.cs; hudson.console; java.io; java.util;
| 2,082,118
|
@Override
public Date getDate(int columnIndex) throws SQLException {
return (Date) get(columnIndex);
}
|
Date function(int columnIndex) throws SQLException { return (Date) get(columnIndex); }
|
/**
* Returns the value as an java.sql.Date.
*
* @param columnIndex (1,2,...)
* @return the value
*/
|
Returns the value as an java.sql.Date
|
getDate
|
{
"repo_name": "miloszpiglas/h2mod",
"path": "src/main/org/h2/tools/SimpleResultSet.java",
"license": "mpl-2.0",
"size": 55168
}
|
[
"java.sql.Date",
"java.sql.SQLException"
] |
import java.sql.Date; import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 1,053,695
|
public void setSearchResults(List<SearchResult> searchResults) {
this.searchResults = searchResults;
}
|
void function(List<SearchResult> searchResults) { this.searchResults = searchResults; }
|
/**
* Set list of data sets
* @param datasets list of data sets
*/
|
Set list of data sets
|
setSearchResults
|
{
"repo_name": "AKSW/Tapioca",
"path": "Tapioca_STP_code/tapioca.webinterface/src/main/java/org/aksw/simba/tapioca/webinterface/DataBean.java",
"license": "lgpl-3.0",
"size": 6500
}
|
[
"java.util.List",
"org.aksw.simba.tapioca.server.data.SearchResult"
] |
import java.util.List; import org.aksw.simba.tapioca.server.data.SearchResult;
|
import java.util.*; import org.aksw.simba.tapioca.server.data.*;
|
[
"java.util",
"org.aksw.simba"
] |
java.util; org.aksw.simba;
| 502,855
|
@Test
public void testGetSetDrawAsLine() {
CategoryMarker m = new CategoryMarker("X");
m.addChangeListener(this);
this.lastEvent = null;
assertEquals(false, m.getDrawAsLine());
m.setDrawAsLine(true);
assertEquals(true, m.getDrawAsLine());
assertEquals(m, this.lastEvent.getMarker());
}
|
void function() { CategoryMarker m = new CategoryMarker("X"); m.addChangeListener(this); this.lastEvent = null; assertEquals(false, m.getDrawAsLine()); m.setDrawAsLine(true); assertEquals(true, m.getDrawAsLine()); assertEquals(m, this.lastEvent.getMarker()); }
|
/**
* Some checks for the getDrawAsLine() and setDrawAsLine() methods.
*/
|
Some checks for the getDrawAsLine() and setDrawAsLine() methods
|
testGetSetDrawAsLine
|
{
"repo_name": "oskopek/jfreechart-fse",
"path": "src/test/java/org/jfree/chart/plot/CategoryMarkerTest.java",
"license": "lgpl-2.1",
"size": 7874
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 1,476,120
|
public static void safeTransform(@Nonnull Source source, @Nonnull Result out) throws TransformerException,
SAXException {
InputSource src = SAXSource.sourceToInputSource(source);
if (src != null) {
SAXTransformerFactory stFactory = (SAXTransformerFactory) TransformerFactory.newInstance();
stFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
try {
xmlReader.setFeature(FEATURE_HTTP_XML_ORG_SAX_FEATURES_EXTERNAL_GENERAL_ENTITIES, false);
}
catch (SAXException ignored) { }
try {
xmlReader.setFeature(FEATURE_HTTP_XML_ORG_SAX_FEATURES_EXTERNAL_PARAMETER_ENTITIES, false);
}
catch (SAXException ignored) { }
// defend against XXE
// the above features should strip out entities - however the feature may not be supported depending
// on the xml implementation used and this is out of our control.
// So add a fallback plan if all else fails.
xmlReader.setEntityResolver(RestrictiveEntityResolver.INSTANCE);
SAXSource saxSource = new SAXSource(xmlReader, src);
_transform(saxSource, out);
}
else {
// for some reason we could not convert source
// this applies to DOMSource and StAXSource - and possibly 3rd party implementations...
// a DOMSource can already be compromised as it is parsed by the time it gets to us.
if (SystemProperties.getBoolean(DISABLED_PROPERTY_NAME)) {
LOGGER.log(Level.WARNING, "XML external entity (XXE) prevention has been disabled by the system " +
"property {0}=true Your system may be vulnerable to XXE attacks.", DISABLED_PROPERTY_NAME);
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, "Caller stack trace: ", new Exception("XXE Prevention caller history"));
}
_transform(source, out);
}
else {
throw new TransformerException("Could not convert source of type " + source.getClass() + " and " +
"XXEPrevention is enabled.");
}
}
}
|
static void function(@Nonnull Source source, @Nonnull Result out) throws TransformerException, SAXException { InputSource src = SAXSource.sourceToInputSource(source); if (src != null) { SAXTransformerFactory stFactory = (SAXTransformerFactory) TransformerFactory.newInstance(); stFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); XMLReader xmlReader = XMLReaderFactory.createXMLReader(); try { xmlReader.setFeature(FEATURE_HTTP_XML_ORG_SAX_FEATURES_EXTERNAL_GENERAL_ENTITIES, false); } catch (SAXException ignored) { } try { xmlReader.setFeature(FEATURE_HTTP_XML_ORG_SAX_FEATURES_EXTERNAL_PARAMETER_ENTITIES, false); } catch (SAXException ignored) { } xmlReader.setEntityResolver(RestrictiveEntityResolver.INSTANCE); SAXSource saxSource = new SAXSource(xmlReader, src); _transform(saxSource, out); } else { if (SystemProperties.getBoolean(DISABLED_PROPERTY_NAME)) { LOGGER.log(Level.WARNING, STR + STR, DISABLED_PROPERTY_NAME); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, STR, new Exception(STR)); } _transform(source, out); } else { throw new TransformerException(STR + source.getClass() + STR + STR); } } }
|
/**
* Transform the source to the output in a manner that is protected against XXE attacks.
* If the transform can not be completed safely then an IOException is thrown.
* Note - to turn off safety set the system property <code>disableXXEPrevention</code> to <code>true</code>.
* @param source The XML input to transform. - This should be a <code>StreamSource</code> or a
* <code>SAXSource</code> in order to be able to prevent XXE attacks.
* @param out The Result of transforming the <code>source</code>.
*/
|
Transform the source to the output in a manner that is protected against XXE attacks. If the transform can not be completed safely then an IOException is thrown. Note - to turn off safety set the system property <code>disableXXEPrevention</code> to <code>true</code>
|
safeTransform
|
{
"repo_name": "aldaris/jenkins",
"path": "core/src/main/java/jenkins/util/xml/XMLUtils.java",
"license": "mit",
"size": 11553
}
|
[
"java.util.logging.Level",
"javax.annotation.Nonnull",
"javax.xml.XMLConstants",
"javax.xml.transform.Result",
"javax.xml.transform.Source",
"javax.xml.transform.TransformerException",
"javax.xml.transform.TransformerFactory",
"javax.xml.transform.sax.SAXSource",
"javax.xml.transform.sax.SAXTransformerFactory",
"org.xml.sax.InputSource",
"org.xml.sax.SAXException",
"org.xml.sax.XMLReader",
"org.xml.sax.helpers.XMLReaderFactory"
] |
import java.util.logging.Level; import javax.annotation.Nonnull; import javax.xml.XMLConstants; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.sax.SAXTransformerFactory; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory;
|
import java.util.logging.*; import javax.annotation.*; import javax.xml.*; import javax.xml.transform.*; import javax.xml.transform.sax.*; import org.xml.sax.*; import org.xml.sax.helpers.*;
|
[
"java.util",
"javax.annotation",
"javax.xml",
"org.xml.sax"
] |
java.util; javax.annotation; javax.xml; org.xml.sax;
| 1,855,016
|
public void run( String[] args ) throws Exception {
// Check
File baseDirectory = null;
if( args.length != 2
|| ! (baseDirectory = new File( args[ 0 ])).exists())
throw new RuntimeException( "The path of the module's directory was expected as an argument." );
// Generate
File f = new File( baseDirectory, "target/docs/apidocs/ui/swagger-websocket.json" );
generate( args[1], f );
}
|
void function( String[] args ) throws Exception { File baseDirectory = null; if( args.length != 2 ! (baseDirectory = new File( args[ 0 ])).exists()) throw new RuntimeException( STR ); File f = new File( baseDirectory, STR ); generate( args[1], f ); }
|
/**
* The method that does the job.
* @param args
* @throws Exception
*/
|
The method that does the job
|
run
|
{
"repo_name": "gibello/roboconf",
"path": "miscellaneous/roboconf-swagger/src/main/java/net/roboconf/swagger/GenerateSwaggerJsonForWebSockets.java",
"license": "apache-2.0",
"size": 9597
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,229,821
|
private Map<String, Map<String, Integer>> getCompRoleCountMap() {
Map<String, Map<String, Integer>> compRoleCountMap = new HashMap<String, Map<String, Integer>>();
for (NodeConfig nodeConf : this.clusterConf.getNodes().values()) {
for (String component : nodeConf.getRoles().keySet()) {
Map<String, Integer> roleCountMap = new HashMap<String, Integer>();
Set<String> compRole = nodeConf.getRoles().get(component);
if (compRoleCountMap.get(component) == null) {
for (String role : compRole) {
roleCountMap.put(role, 1);
}
} else {
roleCountMap = compRoleCountMap.get(component);
for (String role : compRole) {
if (roleCountMap.get(role) == null) {
roleCountMap.put(role, 1);
} else {
int count = roleCountMap.get(role);
count++;
roleCountMap.put(role, count);
}
}
}
compRoleCountMap.put(component, roleCountMap);
}
}
return compRoleCountMap;
}
|
Map<String, Map<String, Integer>> function() { Map<String, Map<String, Integer>> compRoleCountMap = new HashMap<String, Map<String, Integer>>(); for (NodeConfig nodeConf : this.clusterConf.getNodes().values()) { for (String component : nodeConf.getRoles().keySet()) { Map<String, Integer> roleCountMap = new HashMap<String, Integer>(); Set<String> compRole = nodeConf.getRoles().get(component); if (compRoleCountMap.get(component) == null) { for (String role : compRole) { roleCountMap.put(role, 1); } } else { roleCountMap = compRoleCountMap.get(component); for (String role : compRole) { if (roleCountMap.get(role) == null) { roleCountMap.put(role, 1); } else { int count = roleCountMap.get(role); count++; roleCountMap.put(role, count); } } } compRoleCountMap.put(component, roleCountMap); } } return compRoleCountMap; }
|
/**
* Gets the comp role count map.
*
* @return the comp role count map
*/
|
Gets the comp role count map
|
getCompRoleCountMap
|
{
"repo_name": "zengzhaozheng/ankush",
"path": "ankush/src/main/java/com/impetus/ankush2/framework/monitor/AbstractMonitor.java",
"license": "lgpl-3.0",
"size": 63629
}
|
[
"com.impetus.ankush2.framework.config.NodeConfig",
"java.util.HashMap",
"java.util.Map",
"java.util.Set"
] |
import com.impetus.ankush2.framework.config.NodeConfig; import java.util.HashMap; import java.util.Map; import java.util.Set;
|
import com.impetus.ankush2.framework.config.*; import java.util.*;
|
[
"com.impetus.ankush2",
"java.util"
] |
com.impetus.ankush2; java.util;
| 1,111,766
|
private void doUpdate() throws SchedulerException {
if ("SIMPLE".equalsIgnoreCase(this.timerOptions.getType())) {
scheduleSimpleInterval(
this.timerOptions.getSimpleInterval() * this.timerOptions.getSimpleTimeUnitMultiplier());
return;
}
final String cronExpression = this.timerOptions.getCronExpression();
scheduleCronInterval(cronExpression);
}
|
void function() throws SchedulerException { if (STR.equalsIgnoreCase(this.timerOptions.getType())) { scheduleSimpleInterval( this.timerOptions.getSimpleInterval() * this.timerOptions.getSimpleTimeUnitMultiplier()); return; } final String cronExpression = this.timerOptions.getCronExpression(); scheduleCronInterval(cronExpression); }
|
/**
* Perform update operation which internally emits a {@link WireRecord} every
* interval
*
* @throws SchedulerException
* if job scheduling fails
*/
|
Perform update operation which internally emits a <code>WireRecord</code> every interval
|
doUpdate
|
{
"repo_name": "cdealti/kura",
"path": "kura/org.eclipse.kura.wire.component.provider/src/main/java/org/eclipse/kura/internal/wire/timer/Timer.java",
"license": "epl-1.0",
"size": 9486
}
|
[
"org.quartz.SchedulerException"
] |
import org.quartz.SchedulerException;
|
import org.quartz.*;
|
[
"org.quartz"
] |
org.quartz;
| 2,089,028
|
private List sortPlane(Collection nodes)
{
List<Object> l = new ArrayList<Object>();
if (nodes == null) return l;
Iterator i = nodes.iterator();
while (i.hasNext()) {
l.add(i.next());
}
|
List function(Collection nodes) { List<Object> l = new ArrayList<Object>(); if (nodes == null) return l; Iterator i = nodes.iterator(); while (i.hasNext()) { l.add(i.next()); }
|
/**
* Sorts the passed nodes by row.
*
* @param nodes The nodes to sort.
* @return See above.
*/
|
Sorts the passed nodes by row
|
sortPlane
|
{
"repo_name": "tp81/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/metadata/editor/EditorModel.java",
"license": "gpl-2.0",
"size": 128116
}
|
[
"java.util.ArrayList",
"java.util.Collection",
"java.util.Iterator",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,084,707
|
@Generated
@Selector("minimumFontSize")
@NFloat
public native double minimumFontSize();
|
@Selector(STR) native double function();
|
/**
* default is 0.0. actual min may be pinned to something readable. used if adjustsFontSizeToFitWidth is YES
*/
|
default is 0.0. actual min may be pinned to something readable. used if adjustsFontSizeToFitWidth is YES
|
minimumFontSize
|
{
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/uikit/UITextField.java",
"license": "apache-2.0",
"size": 46138
}
|
[
"org.moe.natj.objc.ann.Selector"
] |
import org.moe.natj.objc.ann.Selector;
|
import org.moe.natj.objc.ann.*;
|
[
"org.moe.natj"
] |
org.moe.natj;
| 2,335,790
|
public String getGrammarName( Grammar<PasswordModifyResponseContainer> grammar )
{
return "PASSWORD_MODIFY_RESPONSE_GRAMMER";
}
|
String function( Grammar<PasswordModifyResponseContainer> grammar ) { return STR; }
|
/**
* Get the grammar name
*
* @param grammar The grammar class
* @return The grammar name
*/
|
Get the grammar name
|
getGrammarName
|
{
"repo_name": "darranl/directory-shared",
"path": "ldap/extras/codec/src/main/java/org/apache/directory/api/ldap/extras/extended/ads_impl/pwdModify/PasswordModifyResponseStatesEnum.java",
"license": "apache-2.0",
"size": 2644
}
|
[
"org.apache.directory.api.asn1.ber.grammar.Grammar"
] |
import org.apache.directory.api.asn1.ber.grammar.Grammar;
|
import org.apache.directory.api.asn1.ber.grammar.*;
|
[
"org.apache.directory"
] |
org.apache.directory;
| 2,845,627
|
@NotNull
EntityIterable findStartingWith(@NotNull final String entityType,
@NotNull final String propertyName,
@NotNull final String value);
|
EntityIterable findStartingWith(@NotNull final String entityType, @NotNull final String propertyName, @NotNull final String value);
|
/**
* Returns {@linkplain EntityIterable} with entities of specified type which have {@linkplain String} values of
* specified property starting with specified {@code value}.
*
* @param entityType entity type
* @param propertyName name of the property to search for
* @param value {@linkplain String} value which searched properties are starting with
* @return {@linkplain EntityIterable} instance
* @see EntityIterable
*/
|
Returns EntityIterable with entities of specified type which have String values of specified property starting with specified value
|
findStartingWith
|
{
"repo_name": "JetBrains/xodus",
"path": "openAPI/src/main/java/jetbrains/exodus/entitystore/StoreTransaction.java",
"license": "apache-2.0",
"size": 26380
}
|
[
"org.jetbrains.annotations.NotNull"
] |
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,414,973
|
public static void setMaxConnectionsPerHost(HttpClient httpClient,
int max) {
// would have been nice if there was a common interface
if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager();
mgr.setDefaultMaxPerRoute(max);
} else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) {
PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager();
mgr.setDefaultMaxPerRoute(max);
}
}
|
static void function(HttpClient httpClient, int max) { if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) { ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager(); mgr.setDefaultMaxPerRoute(max); } else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) { PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager(); mgr.setDefaultMaxPerRoute(max); } }
|
/**
* Set max connections allowed per host. This call will only work when
* {@link ThreadSafeClientConnManager} or
* {@link PoolingClientConnectionManager} is used.
*/
|
Set max connections allowed per host. This call will only work when <code>ThreadSafeClientConnManager</code> or <code>PoolingClientConnectionManager</code> is used
|
setMaxConnectionsPerHost
|
{
"repo_name": "netboynb/netty-tool",
"path": "src/main/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java",
"license": "gpl-2.0",
"size": 11349
}
|
[
"org.apache.http.client.HttpClient",
"org.apache.http.impl.conn.PoolingClientConnectionManager"
] |
import org.apache.http.client.HttpClient; import org.apache.http.impl.conn.PoolingClientConnectionManager;
|
import org.apache.http.client.*; import org.apache.http.impl.conn.*;
|
[
"org.apache.http"
] |
org.apache.http;
| 1,439,734
|
public JSONObject getUserByEmail(final String email) throws ServiceException {
try {
return userRepository.getByEmail(email);
} catch (final RepositoryException e) {
LOGGER.log(Level.ERROR, "Gets user by email[" + email + "] failed", e);
throw new ServiceException(e);
}
}
/**
* Gets users by the specified request json object.
*
* @param requestJSONObject the specified request json object, for example,
* <pre>
* {
* "paginationCurrentPageNum": 1,
* "paginationPageSize": 20,
* "paginationWindowSize": 10,
* }, see {@link Pagination} for more details
* </pre>
* @return for example,
* <pre>
* {
* "pagination": {
* "paginationPageCount": 100,
* "paginationPageNums": [1, 2, 3, 4, 5]
* },
* "users": [{
* "oId": "",
* "userName": "",
* "userEmail": "",
* "userPassword": "",
* "roleName": ""
* }, ....]
* }
|
JSONObject function(final String email) throws ServiceException { try { return userRepository.getByEmail(email); } catch (final RepositoryException e) { LOGGER.log(Level.ERROR, STR + email + STR, e); throw new ServiceException(e); } } /** * Gets users by the specified request json object. * * @param requestJSONObject the specified request json object, for example, * <pre> * { * STR: 1, * STR: 20, * STR: 10, * }, see {@link Pagination} for more details * </pre> * @return for example, * <pre> * { * STR: { * STR: 100, * STR: [1, 2, 3, 4, 5] * }, * "users": [{ * "oId": STRuserName": STRuserEmail": STRuserPassword": STRroleNameSTR" * }, ....] * }
|
/**
* Gets a user by the specified email.
*
* @param email the specified email
* @return user, returns {@code null} if not found
* @throws ServiceException service exception
*/
|
Gets a user by the specified email
|
getUserByEmail
|
{
"repo_name": "sshiting/solo",
"path": "src/main/java/org/b3log/solo/service/UserQueryService.java",
"license": "apache-2.0",
"size": 10032
}
|
[
"org.b3log.latke.logging.Level",
"org.b3log.latke.model.Pagination",
"org.b3log.latke.repository.RepositoryException",
"org.b3log.latke.service.ServiceException",
"org.json.JSONObject"
] |
import org.b3log.latke.logging.Level; import org.b3log.latke.model.Pagination; import org.b3log.latke.repository.RepositoryException; import org.b3log.latke.service.ServiceException; import org.json.JSONObject;
|
import org.b3log.latke.logging.*; import org.b3log.latke.model.*; import org.b3log.latke.repository.*; import org.b3log.latke.service.*; import org.json.*;
|
[
"org.b3log.latke",
"org.json"
] |
org.b3log.latke; org.json;
| 2,361,597
|
@Override
public List<InsightsDataArchivalConfig> getAllArchivalRecord() throws InsightsCustomException {
List<InsightsDataArchivalConfig> archivedRecordList = new ArrayList<>();
try {
archivedRecordList = dataArchivalConfigdal.getAllArchivalRecord();
} catch (Exception e) {
log.error("Error getting all archival records {}", e.getMessage());
throw new InsightsCustomException(e.getMessage());
}
return archivedRecordList;
}
|
List<InsightsDataArchivalConfig> function() throws InsightsCustomException { List<InsightsDataArchivalConfig> archivedRecordList = new ArrayList<>(); try { archivedRecordList = dataArchivalConfigdal.getAllArchivalRecord(); } catch (Exception e) { log.error(STR, e.getMessage()); throw new InsightsCustomException(e.getMessage()); } return archivedRecordList; }
|
/**
*
* Method to get all Data Archival records
*
* @return List<InsightsDataArchivalConfig>
* @throws InsightsCustomException
*/
|
Method to get all Data Archival records
|
getAllArchivalRecord
|
{
"repo_name": "CognizantOneDevOps/Insights",
"path": "PlatformService/src/main/java/com/cognizant/devops/platformservice/dataarchival/service/DataArchivalServiceImpl.java",
"license": "apache-2.0",
"size": 10701
}
|
[
"com.cognizant.devops.platformcommons.exception.InsightsCustomException",
"com.cognizant.devops.platformdal.dataArchivalConfig.InsightsDataArchivalConfig",
"java.util.ArrayList",
"java.util.List"
] |
import com.cognizant.devops.platformcommons.exception.InsightsCustomException; import com.cognizant.devops.platformdal.dataArchivalConfig.InsightsDataArchivalConfig; import java.util.ArrayList; import java.util.List;
|
import com.cognizant.devops.platformcommons.exception.*; import com.cognizant.devops.platformdal.*; import java.util.*;
|
[
"com.cognizant.devops",
"java.util"
] |
com.cognizant.devops; java.util;
| 1,200,477
|
private DefaultFullHttpResponse responseFor(HttpVersion httpVersion,
HttpResponseStatus status) {
return responseFor(httpVersion, status, (ByteBuf) null, 0);
}
|
DefaultFullHttpResponse function(HttpVersion httpVersion, HttpResponseStatus status) { return responseFor(httpVersion, status, (ByteBuf) null, 0); }
|
/**
* Factory for {@link DefaultFullHttpResponse}s.
*
* @param httpVersion
* @param status
* @return
*/
|
Factory for <code>DefaultFullHttpResponse</code>s
|
responseFor
|
{
"repo_name": "Banno/LittleProxy",
"path": "src/main/java/org/littleshoot/proxy/impl/ClientToProxyConnection.java",
"license": "apache-2.0",
"size": 49847
}
|
[
"io.netty.buffer.ByteBuf",
"io.netty.handler.codec.http.DefaultFullHttpResponse",
"io.netty.handler.codec.http.HttpResponseStatus",
"io.netty.handler.codec.http.HttpVersion"
] |
import io.netty.buffer.ByteBuf; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion;
|
import io.netty.buffer.*; import io.netty.handler.codec.http.*;
|
[
"io.netty.buffer",
"io.netty.handler"
] |
io.netty.buffer; io.netty.handler;
| 1,734,616
|
private HttpURLConnection getHttpConnection(URL url) throws IOException, ProtocolException {
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setDoInput(true);
con.setDoOutput(true);
con.setUseCaches(false);
con.setRequestMethod("GET");
return con;
}
|
HttpURLConnection function(URL url) throws IOException, ProtocolException { HttpURLConnection con = (HttpURLConnection) url.openConnection(); con.setDoInput(true); con.setDoOutput(true); con.setUseCaches(false); con.setRequestMethod("GET"); return con; }
|
/**
* This method creates a connection to a webpage and then returns the connection
*
* @param url The Http Address to connect to
* @return The connection to the Http address
*/
|
This method creates a connection to a webpage and then returns the connection
|
getHttpConnection
|
{
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.ws.request.timing_fat/fat/src/com/ibm/ws/request/timing/fat/SlowRequestTiming.java",
"license": "epl-1.0",
"size": 40760
}
|
[
"java.io.IOException",
"java.net.HttpURLConnection",
"java.net.ProtocolException"
] |
import java.io.IOException; import java.net.HttpURLConnection; import java.net.ProtocolException;
|
import java.io.*; import java.net.*;
|
[
"java.io",
"java.net"
] |
java.io; java.net;
| 6,595
|
public void terminate() {
synchronized (deviceAddressGattMap) {
for (BluetoothGatt bluetoothGatt : deviceAddressGattMap.values()) {
bluetoothGatt.disconnect();
bluetoothGatt.close();
}
deviceAddressGattMap.clear();
}
synchronized (midiInputDevicesMap) {
for (Set<MidiInputDevice> midiInputDevices : midiInputDevicesMap.values()) {
for (MidiInputDevice midiInputDevice : midiInputDevices) {
midiInputDevice.setOnMidiInputEventListener(null);
}
midiInputDevices.clear();
}
midiInputDevicesMap.clear();
}
synchronized (midiOutputDevicesMap) {
midiOutputDevicesMap.clear();
}
if (bondingBroadcastReceiver != null) {
context.unregisterReceiver(bondingBroadcastReceiver);
bondingBroadcastReceiver = null;
}
}
private BondingBroadcastReceiver bondingBroadcastReceiver;
|
void function() { synchronized (deviceAddressGattMap) { for (BluetoothGatt bluetoothGatt : deviceAddressGattMap.values()) { bluetoothGatt.disconnect(); bluetoothGatt.close(); } deviceAddressGattMap.clear(); } synchronized (midiInputDevicesMap) { for (Set<MidiInputDevice> midiInputDevices : midiInputDevicesMap.values()) { for (MidiInputDevice midiInputDevice : midiInputDevices) { midiInputDevice.setOnMidiInputEventListener(null); } midiInputDevices.clear(); } midiInputDevicesMap.clear(); } synchronized (midiOutputDevicesMap) { midiOutputDevicesMap.clear(); } if (bondingBroadcastReceiver != null) { context.unregisterReceiver(bondingBroadcastReceiver); bondingBroadcastReceiver = null; } } private BondingBroadcastReceiver bondingBroadcastReceiver;
|
/**
* Terminates callback
*/
|
Terminates callback
|
terminate
|
{
"repo_name": "ioriwellings/BLE-MIDI-for-Android",
"path": "library/src/main/java/jp/kshoji/blemidi/central/BleMidiCallback.java",
"license": "apache-2.0",
"size": 22583
}
|
[
"android.bluetooth.BluetoothGatt",
"java.util.Set",
"jp.kshoji.blemidi.device.MidiInputDevice"
] |
import android.bluetooth.BluetoothGatt; import java.util.Set; import jp.kshoji.blemidi.device.MidiInputDevice;
|
import android.bluetooth.*; import java.util.*; import jp.kshoji.blemidi.device.*;
|
[
"android.bluetooth",
"java.util",
"jp.kshoji.blemidi"
] |
android.bluetooth; java.util; jp.kshoji.blemidi;
| 502,967
|
public ActivityMonitor addMonitor(
IntentFilter filter, ActivityResult result, boolean block) {
ActivityMonitor am = new ActivityMonitor(filter, result, block);
addMonitor(am);
return am;
}
|
ActivityMonitor function( IntentFilter filter, ActivityResult result, boolean block) { ActivityMonitor am = new ActivityMonitor(filter, result, block); addMonitor(am); return am; }
|
/**
* A convenience wrapper for {@link #addMonitor(ActivityMonitor)} that
* creates an intent filter matching {@link ActivityMonitor} for you and
* returns it.
*
* @param filter The set of intents this monitor is responsible for.
* @param result A canned result to return if the monitor is hit; can
* be null.
* @param block Controls whether the monitor should block the activity
* start (returning its canned result) or let the call
* proceed.
*
* @return The newly created and added activity monitor.
*
* @see #addMonitor(ActivityMonitor)
* @see #checkMonitorHit
*/
|
A convenience wrapper for <code>#addMonitor(ActivityMonitor)</code> that creates an intent filter matching <code>ActivityMonitor</code> for you and returns it
|
addMonitor
|
{
"repo_name": "cuplv/droidel",
"path": "stubs/src/android/app/Instrumentation.java",
"license": "apache-2.0",
"size": 69632
}
|
[
"android.content.IntentFilter"
] |
import android.content.IntentFilter;
|
import android.content.*;
|
[
"android.content"
] |
android.content;
| 1,564,575
|
@ActionDoc(text = "Sends a Scene invocation to a MiOS Unit.")
public static boolean sendMiosScene(
@ParamDoc(name = "item", text = "The Item used to determine the MiOS Unit Address information for sending the Action call.") String itemName) {
return sendMiosSceneInternal(itemName);
}
|
@ActionDoc(text = STR) static boolean function( @ParamDoc(name = "item", text = STR) String itemName) { return sendMiosSceneInternal(itemName); }
|
/**
* Sends a Scene invocation to a MiOS Unit.
*/
|
Sends a Scene invocation to a MiOS Unit
|
sendMiosScene
|
{
"repo_name": "openhab/openhab",
"path": "bundles/action/org.openhab.action.mios/src/main/java/org/openhab/action/mios/internal/MiosAction.java",
"license": "epl-1.0",
"size": 7717
}
|
[
"org.openhab.core.scriptengine.action.ActionDoc",
"org.openhab.core.scriptengine.action.ParamDoc"
] |
import org.openhab.core.scriptengine.action.ActionDoc; import org.openhab.core.scriptengine.action.ParamDoc;
|
import org.openhab.core.scriptengine.action.*;
|
[
"org.openhab.core"
] |
org.openhab.core;
| 424,651
|
@Override
public String getAlertFiredHistory()
{
try
{
ObjectMapper mapper = new ObjectMapper();
SerializationConfig serializationConfig = mapper.getSerializationConfig();
serializationConfig.set(SerializationConfig.Feature.INDENT_OUTPUT, true);
StringWriter sw = new StringWriter();
mapper.writeValue(sw, _alertHistory);
return sw.toString();
}
catch(Exception e)
{
_logger.warn("", e);
return "";
}
}
}
public ClusterAlertMBeanCollection()
{
_beanServer = ManagementFactory.getPlatformMBeanServer();
}
|
String function() { try { ObjectMapper mapper = new ObjectMapper(); SerializationConfig serializationConfig = mapper.getSerializationConfig(); serializationConfig.set(SerializationConfig.Feature.INDENT_OUTPUT, true); StringWriter sw = new StringWriter(); mapper.writeValue(sw, _alertHistory); return sw.toString(); } catch(Exception e) { _logger.warn(STR"; } } } public ClusterAlertMBeanCollection() { _beanServer = ManagementFactory.getPlatformMBeanServer(); }
|
/**
* Returns the previous 100 alert mbean turn on / off history
* */
|
Returns the previous 100 alert mbean turn on / off history
|
getAlertFiredHistory
|
{
"repo_name": "kishoreg/incubator-helix",
"path": "helix-core/src/main/java/org/apache/helix/monitoring/mbeans/ClusterAlertMBeanCollection.java",
"license": "apache-2.0",
"size": 10013
}
|
[
"java.io.StringWriter",
"java.lang.management.ManagementFactory",
"org.codehaus.jackson.map.ObjectMapper",
"org.codehaus.jackson.map.SerializationConfig"
] |
import java.io.StringWriter; import java.lang.management.ManagementFactory; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.SerializationConfig;
|
import java.io.*; import java.lang.management.*; import org.codehaus.jackson.map.*;
|
[
"java.io",
"java.lang",
"org.codehaus.jackson"
] |
java.io; java.lang; org.codehaus.jackson;
| 394,383
|
VH onCreateViewHolder(ViewGroup parent, int viewType, @LayoutRes int layoutId);
}
@SuppressWarnings({"JavaDoc", "unused"})
public BaseRecyclerViewAdapter(@NonNull final BaseCacheAdapter<T, R, E, D> baseCacheAdapter,
@NonNull final DataBinder<T> dataBinder) {
this(baseCacheAdapter, dataBinder, Core.NOT_VALID_RES_ID);
}
@SuppressWarnings("JavaDoc")
public BaseRecyclerViewAdapter(@NonNull final BaseCacheAdapter<T, R, E, D> baseCacheAdapter,
@NonNull final DataBinder<T> dataBinder,
@LayoutRes final int layoutId) {
mBaseCacheAdapter = baseCacheAdapter;
mDataBinder = dataBinder;
mLayoutId = layoutId;
}
|
VH onCreateViewHolder(ViewGroup parent, int viewType, @LayoutRes int layoutId); } @SuppressWarnings({STR, STR}) public BaseRecyclerViewAdapter(@NonNull final BaseCacheAdapter<T, R, E, D> baseCacheAdapter, @NonNull final DataBinder<T> dataBinder) { this(baseCacheAdapter, dataBinder, Core.NOT_VALID_RES_ID); } @SuppressWarnings(STR) public BaseRecyclerViewAdapter(@NonNull final BaseCacheAdapter<T, R, E, D> baseCacheAdapter, @NonNull final DataBinder<T> dataBinder, @LayoutRes final int layoutId) { mBaseCacheAdapter = baseCacheAdapter; mDataBinder = dataBinder; mLayoutId = layoutId; }
|
/**
* Called when RecyclerView needs a new RecyclerView.ViewHolder of the given type
* to represent an item.
*
* @param parent
* The ViewGroup into which the new View will be added
*
* @param viewType
* The view type of the new View
*
* @param layoutId
* The resource identifier of a layout file that defines the views
*
* @return A new ViewHolder that holds a View of the given view type
*/
|
Called when RecyclerView needs a new RecyclerView.ViewHolder of the given type to represent an item
|
onCreateViewHolder
|
{
"repo_name": "akhasoft/Yakhont",
"path": "yakhont/src/main/java/akha/yakhont/adapter/BaseRecyclerViewAdapter.java",
"license": "apache-2.0",
"size": 5643
}
|
[
"android.support.annotation.LayoutRes",
"android.support.annotation.NonNull",
"android.view.ViewGroup"
] |
import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.view.ViewGroup;
|
import android.support.annotation.*; import android.view.*;
|
[
"android.support",
"android.view"
] |
android.support; android.view;
| 177,594
|
//===========================================
void insert(DeviceData deviceData, DevVarDoubleStringArray argin);
|
void insert(DeviceData deviceData, DevVarDoubleStringArray argin);
|
/**
* Insert method for argin is DevVarDoubleStringArray.
*
* @param argin argin value for next command.
*/
|
Insert method for argin is DevVarDoubleStringArray
|
insert
|
{
"repo_name": "tango-controls/JTango",
"path": "common/src/main/java/fr/esrf/TangoApi/IDeviceDataDAO.java",
"license": "lgpl-3.0",
"size": 14055
}
|
[
"fr.esrf.Tango"
] |
import fr.esrf.Tango;
|
import fr.esrf.*;
|
[
"fr.esrf"
] |
fr.esrf;
| 2,099,249
|
private void startForegroundCompat(Notification notification) {
// If we have the new startForeground API, then use it.
if (mStartForeground != null) {
mStartForegroundArgs[0] = Integer.valueOf(mNotificationId);
mStartForegroundArgs[1] = notification;
try {
mStartForeground.invoke(this, mStartForegroundArgs);
} catch (Exception e) {
Log.e(e);
}
return;
}
// Fall back on the old API.
setForeground(true);
if (notification != null) {
mNotificationManager.notify(mNotificationId, notification);
}
}
|
void function(Notification notification) { if (mStartForeground != null) { mStartForegroundArgs[0] = Integer.valueOf(mNotificationId); mStartForegroundArgs[1] = notification; try { mStartForeground.invoke(this, mStartForegroundArgs); } catch (Exception e) { Log.e(e); } return; } setForeground(true); if (notification != null) { mNotificationManager.notify(mNotificationId, notification); } }
|
/**
* This is a wrapper around the new startForeground method, using the older APIs if it is not
* available.
*/
|
This is a wrapper around the new startForeground method, using the older APIs if it is not available
|
startForegroundCompat
|
{
"repo_name": "damonkohler/sl4a",
"path": "android/Utils/src/com/googlecode/android_scripting/ForegroundService.java",
"license": "apache-2.0",
"size": 2848
}
|
[
"android.app.Notification"
] |
import android.app.Notification;
|
import android.app.*;
|
[
"android.app"
] |
android.app;
| 113,898
|
public static void write(WritableByteChannel outputChannel,
MessageBuilder message) throws IOException {
ByteBuffer[] segments = message.getSegmentsForOutput();
writeSegmentTable(outputChannel, segments);
for (ByteBuffer buffer : segments) {
while(buffer.hasRemaining()) {
outputChannel.write(buffer);
}
}
}
|
static void function(WritableByteChannel outputChannel, MessageBuilder message) throws IOException { ByteBuffer[] segments = message.getSegmentsForOutput(); writeSegmentTable(outputChannel, segments); for (ByteBuffer buffer : segments) { while(buffer.hasRemaining()) { outputChannel.write(buffer); } } }
|
/**
* Serializes a MessageBuilder to a WritableByteChannel.
*/
|
Serializes a MessageBuilder to a WritableByteChannel
|
write
|
{
"repo_name": "dwrensha/capnproto-java",
"path": "runtime/src/main/java/org/capnproto/Serialize.java",
"license": "mit",
"size": 9081
}
|
[
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.channels.WritableByteChannel"
] |
import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel;
|
import java.io.*; import java.nio.*; import java.nio.channels.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 2,457,107
|
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<SBSubscriptionInner> listByTopicAsync(
String resourceGroupName, String namespaceName, String topicName, Integer skip, Integer top) {
return new PagedFlux<>(
() -> listByTopicSinglePageAsync(resourceGroupName, namespaceName, topicName, skip, top),
nextLink -> listByTopicNextSinglePageAsync(nextLink));
}
|
@ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<SBSubscriptionInner> function( String resourceGroupName, String namespaceName, String topicName, Integer skip, Integer top) { return new PagedFlux<>( () -> listByTopicSinglePageAsync(resourceGroupName, namespaceName, topicName, skip, top), nextLink -> listByTopicNextSinglePageAsync(nextLink)); }
|
/**
* List all the subscriptions under a specified topic.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response to the List Subscriptions operation.
*/
|
List all the subscriptions under a specified topic
|
listByTopicAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-servicebus/src/main/java/com/azure/resourcemanager/servicebus/implementation/SubscriptionsClientImpl.java",
"license": "mit",
"size": 50121
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedFlux",
"com.azure.resourcemanager.servicebus.fluent.models.SBSubscriptionInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.resourcemanager.servicebus.fluent.models.SBSubscriptionInner;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.servicebus.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 1,392,319
|
List<T> result = extract(newArrayList(objects), extractor);
return toArray(result);
}
/**
* Behavior is described in {@link AbstractIterableAssert#extracting(Extractor)}
|
List<T> result = extract(newArrayList(objects), extractor); return toArray(result); } /** * Behavior is described in {@link AbstractIterableAssert#extracting(Extractor)}
|
/**
* Call {@link #extract(Iterable, Extractor)} after converting objects to an iterable.
* <p>
* Behavior is described in javadoc {@link AbstractObjectArrayAssert#extracting(Extractor)}
*/
|
Call <code>#extract(Iterable, Extractor)</code> after converting objects to an iterable. Behavior is described in javadoc <code>AbstractObjectArrayAssert#extracting(Extractor)</code>
|
extract
|
{
"repo_name": "AlexBischof/assertj-core",
"path": "src/main/java/org/assertj/core/groups/FieldsOrPropertiesExtractor.java",
"license": "apache-2.0",
"size": 2165
}
|
[
"java.util.List",
"org.assertj.core.api.AbstractIterableAssert",
"org.assertj.core.api.iterable.Extractor",
"org.assertj.core.util.Iterables"
] |
import java.util.List; import org.assertj.core.api.AbstractIterableAssert; import org.assertj.core.api.iterable.Extractor; import org.assertj.core.util.Iterables;
|
import java.util.*; import org.assertj.core.api.*; import org.assertj.core.api.iterable.*; import org.assertj.core.util.*;
|
[
"java.util",
"org.assertj.core"
] |
java.util; org.assertj.core;
| 2,840,778
|
public void init(FilterConfig filterConfig) throws ServletException {
this.filterConfig = filterConfig;
this.encoding = filterConfig.getInitParameter("encoding");
String value = filterConfig.getInitParameter("ignore");
if (value == null)
this.ignore = true;
else if (value.equalsIgnoreCase("true"))
this.ignore = true;
else if (value.equalsIgnoreCase("yes"))
this.ignore = true;
else
this.ignore = false;
}
// ------------------------------------------------------ Protected Methods
|
void function(FilterConfig filterConfig) throws ServletException { this.filterConfig = filterConfig; this.encoding = filterConfig.getInitParameter(STR); String value = filterConfig.getInitParameter(STR); if (value == null) this.ignore = true; else if (value.equalsIgnoreCase("true")) this.ignore = true; else if (value.equalsIgnoreCase("yes")) this.ignore = true; else this.ignore = false; }
|
/**
* Place this filter into service.
*
* @param filterConfig The filter configuration object
*/
|
Place this filter into service
|
init
|
{
"repo_name": "IntecsSPA/buddata-ebxml-registry",
"path": "Installer/distribution/bundled-tomcat/apache-tomcat-5.5.28/webapps/servlets-examples/WEB-INF/classes/filters/SetCharacterEncodingFilter.java",
"license": "gpl-3.0",
"size": 5954
}
|
[
"javax.servlet.FilterConfig",
"javax.servlet.ServletException"
] |
import javax.servlet.FilterConfig; import javax.servlet.ServletException;
|
import javax.servlet.*;
|
[
"javax.servlet"
] |
javax.servlet;
| 2,284,982
|
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public void testTimerJobLowLevelActiviti() throws Exception
{
try
{// Read workflow XML from classpath and deploy it.
activitiRepositoryService.createDeployment().addClasspathResource(ACTIVITI_XML_HERD_TIMER).deploy();
// Set workflow variables for no other reason than to make sure it doesn't cause any problems.
Map<String, Object> variables = new HashMap<>();
variables.put("key1", "value1");
// Execute the workflow.
ProcessInstance processInstance = activitiRuntimeService.startProcessInstanceByKey("testNamespace.testHerdWorkflow", variables);
// Wait a reasonable amount of time for the process to finish.
waitUntilAllProcessCompleted();
// Get the history for the process and ensure the workflow completed.
HistoricProcessInstance historicProcessInstance =
activitiHistoryService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getProcessInstanceId()).singleResult();
Assert.assertNotNull(historicProcessInstance);
Assert.assertNotNull(historicProcessInstance.getEndTime());
}
finally
{
deleteActivitiDeployments();
}
}
|
@Transactional(propagation = Propagation.NOT_SUPPORTED) void function() throws Exception { try { activitiRepositoryService.createDeployment().addClasspathResource(ACTIVITI_XML_HERD_TIMER).deploy(); Map<String, Object> variables = new HashMap<>(); variables.put("key1", STR); ProcessInstance processInstance = activitiRuntimeService.startProcessInstanceByKey(STR, variables); waitUntilAllProcessCompleted(); HistoricProcessInstance historicProcessInstance = activitiHistoryService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getProcessInstanceId()).singleResult(); Assert.assertNotNull(historicProcessInstance); Assert.assertNotNull(historicProcessInstance.getEndTime()); } finally { deleteActivitiDeployments(); } }
|
/**
* This is an alternative way of running the timer test above which doesn't go through the herd job infrastructure, but rather ensures that Activiti can run
* the timer job successfully. This is accomplished by invoking Activiti API's directly and not running within our own transaction.
*
* @throws Exception
*/
|
This is an alternative way of running the timer test above which doesn't go through the herd job infrastructure, but rather ensures that Activiti can run the timer job successfully. This is accomplished by invoking Activiti API's directly and not running within our own transaction
|
testTimerJobLowLevelActiviti
|
{
"repo_name": "FINRAOS/herd",
"path": "herd-code/herd-service/src/test/java/org/finra/herd/service/activiti/ActivitiDelegateTest.java",
"license": "apache-2.0",
"size": 12491
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.activiti.engine.history.HistoricProcessInstance",
"org.activiti.engine.runtime.ProcessInstance",
"org.junit.Assert",
"org.springframework.transaction.annotation.Propagation",
"org.springframework.transaction.annotation.Transactional"
] |
import java.util.HashMap; import java.util.Map; import org.activiti.engine.history.HistoricProcessInstance; import org.activiti.engine.runtime.ProcessInstance; import org.junit.Assert; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional;
|
import java.util.*; import org.activiti.engine.history.*; import org.activiti.engine.runtime.*; import org.junit.*; import org.springframework.transaction.annotation.*;
|
[
"java.util",
"org.activiti.engine",
"org.junit",
"org.springframework.transaction"
] |
java.util; org.activiti.engine; org.junit; org.springframework.transaction;
| 2,002,207
|
public static MUDFile read(File file) {
return null;
}
|
static MUDFile function(File file) { return null; }
|
/**
* Reads in a MUDFile from its file form. Note that no data is loaded but the entries that reside in
* the MUDFile as a result of this operation.
* @param file
* @return
*/
|
Reads in a MUDFile from its file form. Note that no data is loaded but the entries that reside in the MUDFile as a result of this operation
|
read
|
{
"repo_name": "ProjectMoon/ringmud",
"path": "src/ring/compiler/MUDFile.java",
"license": "lgpl-3.0",
"size": 4005
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,058,751
|
public static String getLocale() {
return BaseApplication.getSharedPreferences().getString(PREF_LOCALE, null);
}
|
static String function() { return BaseApplication.getSharedPreferences().getString(PREF_LOCALE, null); }
|
/**
* Get current locale
*
* @return Current device's locale
*/
|
Get current locale
|
getLocale
|
{
"repo_name": "literacyapp-org/literacyapp-appstore",
"path": "app/src/main/java/ai/elimu/appstore/util/AppPrefs.java",
"license": "apache-2.0",
"size": 2794
}
|
[
"ai.elimu.appstore.BaseApplication"
] |
import ai.elimu.appstore.BaseApplication;
|
import ai.elimu.appstore.*;
|
[
"ai.elimu.appstore"
] |
ai.elimu.appstore;
| 789,661
|
public synchronized void printResults() throws Exception {
printHeading("Transaction Results");
BenchmarkCallback.printAllResults();
ClientStats stats = fullStatsContext.fetch().getStats();
// 3. Performance statistics
printHeading("Client Workload Statistics");
System.out.printf("Average throughput: %,9d txns/sec\n", stats.getTxnThroughput());
// cast stats.getAverateLatency from long to double
System.out.printf("Average latency: %,9.2f ms\n", stats.getAverageLatency());
//System.out.printf("Average latency: %,9d ms\n", stats.getAverageLatency());
System.out.printf("95th percentile latency: %,9d ms\n", stats.kPercentileLatency(.95));
System.out.printf("99th percentile latency: %,9d ms\n", stats.kPercentileLatency(.99));
printHeading("System Server Statistics");
// cast stats.getAverageInternalLatency from long to double
System.out.printf("Reported Internal Avg Latency: %,9.2f ms\n", stats.getAverageInternalLatency());
//System.out.printf("Reported Internal Avg Latency: %,9d ms\n", stats.getAverageInternalLatency());
// 4. Write stats to file if requested
client.writeSummaryCSV(stats, config.statsfile);
}
|
synchronized void function() throws Exception { printHeading(STR); BenchmarkCallback.printAllResults(); ClientStats stats = fullStatsContext.fetch().getStats(); printHeading(STR); System.out.printf(STR, stats.getTxnThroughput()); System.out.printf(STR, stats.getAverageLatency()); System.out.printf(STR, stats.kPercentileLatency(.95)); System.out.printf(STR, stats.kPercentileLatency(.99)); printHeading(STR); System.out.printf(STR, stats.getAverageInternalLatency()); client.writeSummaryCSV(stats, config.statsfile); }
|
/**
* Prints the results of the voting simulation and statistics
* about performance.
*
* @throws Exception if anything unexpected happens.
*/
|
Prints the results of the voting simulation and statistics about performance
|
printResults
|
{
"repo_name": "simonzhangsm/voltdb",
"path": "examples/positionkeeper/client/positionkeeper/PositionsBenchmark.java",
"license": "agpl-3.0",
"size": 18729
}
|
[
"org.voltdb.client.ClientStats"
] |
import org.voltdb.client.ClientStats;
|
import org.voltdb.client.*;
|
[
"org.voltdb.client"
] |
org.voltdb.client;
| 1,039,753
|
private List<SAMRecord> fetchByReadName(final PeekableIterator<SAMRecord> iterator) {
final List<SAMRecord> out = new ArrayList<>();
if (iterator.hasNext()) {
final SAMRecord first = iterator.next();
out.add(first);
while (iterator.hasNext() && iterator.peek().getReadName().equals(first.getReadName())) {
out.add(iterator.next());
}
}
return out;
}
|
List<SAMRecord> function(final PeekableIterator<SAMRecord> iterator) { final List<SAMRecord> out = new ArrayList<>(); if (iterator.hasNext()) { final SAMRecord first = iterator.next(); out.add(first); while (iterator.hasNext() && iterator.peek().getReadName().equals(first.getReadName())) { out.add(iterator.next()); } } return out; }
|
/**
* Generates a list by consuming from the iterator in order starting with the first available
* read and continuing while subsequent reads share the same read name. If there are no reads
* remaining returns an empty list.
*/
|
Generates a list by consuming from the iterator in order starting with the first available read and continuing while subsequent reads share the same read name. If there are no reads remaining returns an empty list
|
fetchByReadName
|
{
"repo_name": "annkupi/picard",
"path": "src/main/java/picard/sam/RevertSam.java",
"license": "mit",
"size": 36146
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,392,059
|
@SuppressWarnings({ "serial", "unchecked" })
@Test
public void testNotYetReplicatedErrors() throws IOException
{
final String exceptionMsg = "Nope, not replicated yet...";
final int maxRetries = 1; // Allow one retry (total of two calls)
conf.setInt(HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY, maxRetries);
NamenodeProtocols mockNN = mock(NamenodeProtocols.class);
Answer<Object> answer = new ThrowsException(new IOException()) {
int retryCount = 0;
|
@SuppressWarnings({ STR, STR }) void function() throws IOException { final String exceptionMsg = STR; final int maxRetries = 1; conf.setInt(HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY, maxRetries); NamenodeProtocols mockNN = mock(NamenodeProtocols.class); Answer<Object> answer = new ThrowsException(new IOException()) { int retryCount = 0;
|
/**
* Verify that client will correctly give up after the specified number
* of times trying to add a block
*/
|
Verify that client will correctly give up after the specified number of times trying to add a block
|
testNotYetReplicatedErrors
|
{
"repo_name": "soumabrata-chakraborty/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java",
"license": "apache-2.0",
"size": 45709
}
|
[
"java.io.IOException",
"org.apache.hadoop.hdfs.client.HdfsClientConfigKeys",
"org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols",
"org.mockito.Mockito",
"org.mockito.internal.stubbing.answers.ThrowsException",
"org.mockito.stubbing.Answer"
] |
import java.io.IOException; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.mockito.Mockito; import org.mockito.internal.stubbing.answers.ThrowsException; import org.mockito.stubbing.Answer;
|
import java.io.*; import org.apache.hadoop.hdfs.client.*; import org.apache.hadoop.hdfs.server.protocol.*; import org.mockito.*; import org.mockito.internal.stubbing.answers.*; import org.mockito.stubbing.*;
|
[
"java.io",
"org.apache.hadoop",
"org.mockito",
"org.mockito.internal",
"org.mockito.stubbing"
] |
java.io; org.apache.hadoop; org.mockito; org.mockito.internal; org.mockito.stubbing;
| 283,137
|
@Override
synchronized public void onManagerEvent(final ManagerEvent event)
{
if (event instanceof HangupEvent)
{
final HangupEvent hangupEvt = (HangupEvent) event;
final Channel hangupChannel = hangupEvt.getChannel();
if ((this.newChannel != null) && (hangupChannel.isSame(this.newChannel)))
{
this.originateSuccess = false;
OriginateBaseClass.logger.error("Dest channel " + this.newChannel + " hungup after answer"); //$NON-NLS-1$//$NON-NLS-2$
originateLatch.countDown();
}
if ((this.monitorChannel1 != null) && (hangupChannel.isSame(this.monitorChannel1)))
{
this.originateSuccess = false;
this.hungup = true;
if (this.newChannel != null)
{
OriginateBaseClass.logger.debug("hanging up " + this.newChannel);//$NON-NLS-1$
this.result.setChannelHungup(true);
PBX pbx = PBXFactory.getActivePBX();
try
{
pbx.hangup(this.newChannel);
}
catch (IllegalArgumentException | IllegalStateException | PBXException e)
{
logger.error(e, e);
}
}
OriginateBaseClass.logger.debug("notify channel 1 hungup");//$NON-NLS-1$
originateLatch.countDown();
}
if ((this.monitorChannel2 != null) && (hangupChannel.isSame(this.monitorChannel2)))
{
this.originateSuccess = false;
this.hungup = true;
if (this.newChannel != null)
{
OriginateBaseClass.logger.debug("Hanging up channel " + this.newChannel);//$NON-NLS-1$
this.result.setChannelHungup(true);
PBX pbx = PBXFactory.getActivePBX();
try
{
pbx.hangup(this.newChannel);
}
catch (IllegalArgumentException | IllegalStateException | PBXException e)
{
logger.error(e, e);
}
}
OriginateBaseClass.logger.debug("Notify channel 2 (" + this.monitorChannel2 + ") hungup");//$NON-NLS-1$//$NON-NLS-2$
originateLatch.countDown();
}
}
if (event instanceof OriginateResponseEvent)
{
OriginateBaseClass.logger.debug("response : " + this.newChannel); //$NON-NLS-1$
final OriginateResponseEvent response = (OriginateResponseEvent) event;
OriginateBaseClass.logger.debug("OriginateResponseEvent: channel=" //$NON-NLS-1$
+ (response.isChannel() ? response.getChannel() : response.getEndPoint()) + " originateID:" //$NON-NLS-1$
+ this.originateID);
OriginateBaseClass.logger.debug("{" + response.getReason() + ":" + response.getResponse() + "}"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
if (this.originateID != null)
{
if (this.originateID.compareToIgnoreCase(response.getActionId()) == 0)
{
this.originateSuccess = response.isSuccess();
OriginateBaseClass.logger.debug("OriginateResponse: matched actionId, success=" + this.originateSuccess //$NON-NLS-1$
+ " channelSeen=" + this.channelSeen); //$NON-NLS-1$
this.originateSeen = true;
// if we have also seen the channel then we can notify
// the
// originate() method
// that the call is up. Otherwise we will rely on the
// NewChannelEvent doing the
// notify.
if (this.channelSeen == true)
{
OriginateBaseClass.logger.debug("notify originate response event 305 " + this.originateSuccess);//$NON-NLS-1$
originateLatch.countDown();
}
}
}
else
{
OriginateBaseClass.logger.warn("actionid is null");//$NON-NLS-1$
}
}
// Look for the channel events that tell us that both sides of the
// call
// are up.
// We will see a number of channels come up as the call progresses.
// The LOCAL/ channels are just internal workings of Asterisk so we
// need
// to ignore these.
if (event instanceof NewChannelEvent)
{
final NewChannelEvent newState = (NewChannelEvent) event;
final Channel channel = newState.getChannel();
final GetVarAction var = new GetVarAction(channel, OriginateBaseClass.NJR_ORIGINATE_ID);
OriginateBaseClass.logger.debug("new channel event :" + channel + " context = " + newState.getContext() //$NON-NLS-1$//$NON-NLS-2$
+ " state =" + newState.getChannelStateDesc() + " state =" + newState.getChannelState()); //$NON-NLS-1$ //$NON-NLS-2$
// Now try to get the NJR_ORIGINATE_ID's value to see if this is
// an
// event for our channel
// If it is for our channel then the NJR_ORIGINATE_ID will match
// our
// originateID.
// We need to try several times as it can take some time to
// appear
// within asterisk.
int ctr = 0;
String __originateID = null;
while ((ctr < 5) && (__originateID == null))
{
try
{
ctr++;
Thread.sleep(100);
AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX();
final ManagerResponse response = pbx.sendAction(var, 500);
__originateID = response.getAttribute("value"); //$NON-NLS-1$
if ((__originateID != null))
{
// Check if the event is for our channel by checking
// the
// originateIDs match.
if (__originateID.compareToIgnoreCase(this.originateID) == 0)
{
if ((this.newChannel == null) && !channel.isLocal())
{
this.newChannel = channel;
this.channelSeen = true;
OriginateBaseClass.logger.debug("new channel name " + channel); //$NON-NLS-1$ }
if (this.listener != null)
{
this.listener.channelUpdate(channel);
}
if (this.originateSeen == true)
{
OriginateBaseClass.logger.debug("notifying success 362");//$NON-NLS-1$
originateLatch.countDown();
}
}
}
else
{
// So we got an originate but it wasn't for us which
// means this channel isn't ours.
// so we can ignore the event.
OriginateBaseClass.logger.debug("originateID " + __originateID); //$NON-NLS-1$
}
}
}
catch (final Exception e)
{
// We only care about error if we are on the last
// attempt.
if ((this.originateSuccess == false) && (ctr == 4))
{
OriginateBaseClass.logger.error(e, e);
}
}
}
}
// Look for the channel events that tell us that both sides of the
// call
// are up.
// We will see a number of channels come up as the call progresses.
// The LOCAL/ channels are just internal workings of Asterisk so we
// need
// to ignore these.
if (event instanceof BridgeEvent)
{
final BridgeEvent bridgeEvent = (BridgeEvent) event;
Channel channel = bridgeEvent.getChannel1();
if (bridgeEvent.getChannel1().isLocal())
{
channel = bridgeEvent.getChannel2();
}
final GetVarAction var = new GetVarAction(channel, OriginateBaseClass.NJR_ORIGINATE_ID);
OriginateBaseClass.logger.debug("new channel event :" + channel + " channel1 = " + bridgeEvent.getChannel1() //$NON-NLS-1$//$NON-NLS-2$
+ " channel2 =" + bridgeEvent.getChannel2()); //$NON-NLS-1$
// Now try to get the NJR_ORIGINATE_ID's value to see if this is
// an
// event for our channel
// If it is for our channel then the NJR_ORIGINATE_ID will match
// our
// originateID.
// We need to try several times as it can take some time to
// appear
// within asterisk.
int ctr = 0;
String __originateID = null;
while ((ctr < 5) && (__originateID == null))
{
try
{
ctr++;
Thread.sleep(100);
AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX();
final ManagerResponse response = pbx.sendAction(var, 500);
__originateID = response.getAttribute("value"); //$NON-NLS-1$
if ((__originateID != null))
{
// Check if the event is for our channel by checking
// the
// originateIDs match.
if ((this.originateID != null) && (__originateID.compareToIgnoreCase(this.originateID) == 0))
{
if ((this.newChannel == null) && !channel.isLocal())
{
this.newChannel = channel;
this.channelSeen = true;
OriginateBaseClass.logger.debug("new channel name " + channel); //$NON-NLS-1$ }
if (this.listener != null)
{
this.listener.channelUpdate(channel);
}
if (this.originateSeen == true)
{
OriginateBaseClass.logger.debug("notifying success 362");//$NON-NLS-1$
originateLatch.countDown();
}
}
}
}
else
{
// So we got an originate but it wasn't for us which
// means this channel isn't ours.
// so we can ignore the event.
OriginateBaseClass.logger.debug("originateID " + __originateID); //$NON-NLS-1$
}
}
catch (final Exception e)
{
// We only care about error if we are on the last
// attempt.
if ((this.originateSuccess == false) && (ctr == 4))
{
OriginateBaseClass.logger.error(e, e);
}
}
}
}
}
|
synchronized void function(final ManagerEvent event) { if (event instanceof HangupEvent) { final HangupEvent hangupEvt = (HangupEvent) event; final Channel hangupChannel = hangupEvt.getChannel(); if ((this.newChannel != null) && (hangupChannel.isSame(this.newChannel))) { this.originateSuccess = false; OriginateBaseClass.logger.error(STR + this.newChannel + STR); originateLatch.countDown(); } if ((this.monitorChannel1 != null) && (hangupChannel.isSame(this.monitorChannel1))) { this.originateSuccess = false; this.hungup = true; if (this.newChannel != null) { OriginateBaseClass.logger.debug(STR + this.newChannel); this.result.setChannelHungup(true); PBX pbx = PBXFactory.getActivePBX(); try { pbx.hangup(this.newChannel); } catch (IllegalArgumentException IllegalStateException PBXException e) { logger.error(e, e); } } OriginateBaseClass.logger.debug(STR); originateLatch.countDown(); } if ((this.monitorChannel2 != null) && (hangupChannel.isSame(this.monitorChannel2))) { this.originateSuccess = false; this.hungup = true; if (this.newChannel != null) { OriginateBaseClass.logger.debug(STR + this.newChannel); this.result.setChannelHungup(true); PBX pbx = PBXFactory.getActivePBX(); try { pbx.hangup(this.newChannel); } catch (IllegalArgumentException IllegalStateException PBXException e) { logger.error(e, e); } } OriginateBaseClass.logger.debug(STR + this.monitorChannel2 + STR); originateLatch.countDown(); } } if (event instanceof OriginateResponseEvent) { OriginateBaseClass.logger.debug(STR + this.newChannel); final OriginateResponseEvent response = (OriginateResponseEvent) event; OriginateBaseClass.logger.debug(STR + (response.isChannel() ? response.getChannel() : response.getEndPoint()) + STR + this.originateID); OriginateBaseClass.logger.debug("{" + response.getReason() + ":" + response.getResponse() + "}"); if (this.originateID != null) { if (this.originateID.compareToIgnoreCase(response.getActionId()) == 0) { this.originateSuccess = response.isSuccess(); OriginateBaseClass.logger.debug(STR + this.originateSuccess + STR + this.channelSeen); this.originateSeen = true; if (this.channelSeen == true) { OriginateBaseClass.logger.debug(STR + this.originateSuccess); originateLatch.countDown(); } } } else { OriginateBaseClass.logger.warn(STR); } } if (event instanceof NewChannelEvent) { final NewChannelEvent newState = (NewChannelEvent) event; final Channel channel = newState.getChannel(); final GetVarAction var = new GetVarAction(channel, OriginateBaseClass.NJR_ORIGINATE_ID); OriginateBaseClass.logger.debug(STR + channel + STR + newState.getContext() + STR + newState.getChannelStateDesc() + STR + newState.getChannelState()); int ctr = 0; String __originateID = null; while ((ctr < 5) && (__originateID == null)) { try { ctr++; Thread.sleep(100); AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX(); final ManagerResponse response = pbx.sendAction(var, 500); __originateID = response.getAttribute("value"); if ((__originateID != null)) { if (__originateID.compareToIgnoreCase(this.originateID) == 0) { if ((this.newChannel == null) && !channel.isLocal()) { this.newChannel = channel; this.channelSeen = true; OriginateBaseClass.logger.debug(STR + channel); if (this.listener != null) { this.listener.channelUpdate(channel); } if (this.originateSeen == true) { OriginateBaseClass.logger.debug(STR); originateLatch.countDown(); } } } else { OriginateBaseClass.logger.debug(STR + __originateID); } } } catch (final Exception e) { if ((this.originateSuccess == false) && (ctr == 4)) { OriginateBaseClass.logger.error(e, e); } } } } if (event instanceof BridgeEvent) { final BridgeEvent bridgeEvent = (BridgeEvent) event; Channel channel = bridgeEvent.getChannel1(); if (bridgeEvent.getChannel1().isLocal()) { channel = bridgeEvent.getChannel2(); } final GetVarAction var = new GetVarAction(channel, OriginateBaseClass.NJR_ORIGINATE_ID); OriginateBaseClass.logger.debug(STR + channel + STR + bridgeEvent.getChannel1() + STR + bridgeEvent.getChannel2()); int ctr = 0; String __originateID = null; while ((ctr < 5) && (__originateID == null)) { try { ctr++; Thread.sleep(100); AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX(); final ManagerResponse response = pbx.sendAction(var, 500); __originateID = response.getAttribute("value"); if ((__originateID != null)) { if ((this.originateID != null) && (__originateID.compareToIgnoreCase(this.originateID) == 0)) { if ((this.newChannel == null) && !channel.isLocal()) { this.newChannel = channel; this.channelSeen = true; OriginateBaseClass.logger.debug(STR + channel); if (this.listener != null) { this.listener.channelUpdate(channel); } if (this.originateSeen == true) { OriginateBaseClass.logger.debug(STR); originateLatch.countDown(); } } } } else { OriginateBaseClass.logger.debug(STR + __originateID); } } catch (final Exception e) { if ((this.originateSuccess == false) && (ctr == 4)) { OriginateBaseClass.logger.error(e, e); } } } } }
|
/**
* It is important that this method is synchronised as there is some
* interaction between the events and we need to ensure we process one at a
* time.
*/
|
It is important that this method is synchronised as there is some interaction between the events and we need to ensure we process one at a time
|
onManagerEvent
|
{
"repo_name": "scgm11/asterisk-java",
"path": "src/main/java/org/asteriskjava/pbx/internal/managerAPI/OriginateBaseClass.java",
"license": "apache-2.0",
"size": 23921
}
|
[
"org.asteriskjava.pbx.Channel",
"org.asteriskjava.pbx.PBXException",
"org.asteriskjava.pbx.PBXFactory",
"org.asteriskjava.pbx.asterisk.wrap.actions.GetVarAction",
"org.asteriskjava.pbx.asterisk.wrap.events.BridgeEvent",
"org.asteriskjava.pbx.asterisk.wrap.events.HangupEvent",
"org.asteriskjava.pbx.asterisk.wrap.events.ManagerEvent",
"org.asteriskjava.pbx.asterisk.wrap.events.NewChannelEvent",
"org.asteriskjava.pbx.asterisk.wrap.events.OriginateResponseEvent",
"org.asteriskjava.pbx.asterisk.wrap.response.ManagerResponse",
"org.asteriskjava.pbx.internal.core.AsteriskPBX"
] |
import org.asteriskjava.pbx.Channel; import org.asteriskjava.pbx.PBXException; import org.asteriskjava.pbx.PBXFactory; import org.asteriskjava.pbx.asterisk.wrap.actions.GetVarAction; import org.asteriskjava.pbx.asterisk.wrap.events.BridgeEvent; import org.asteriskjava.pbx.asterisk.wrap.events.HangupEvent; import org.asteriskjava.pbx.asterisk.wrap.events.ManagerEvent; import org.asteriskjava.pbx.asterisk.wrap.events.NewChannelEvent; import org.asteriskjava.pbx.asterisk.wrap.events.OriginateResponseEvent; import org.asteriskjava.pbx.asterisk.wrap.response.ManagerResponse; import org.asteriskjava.pbx.internal.core.AsteriskPBX;
|
import org.asteriskjava.pbx.*; import org.asteriskjava.pbx.asterisk.wrap.actions.*; import org.asteriskjava.pbx.asterisk.wrap.events.*; import org.asteriskjava.pbx.asterisk.wrap.response.*; import org.asteriskjava.pbx.internal.core.*;
|
[
"org.asteriskjava.pbx"
] |
org.asteriskjava.pbx;
| 280,035
|
static List<String> trimLabels( List< String > labels ) {
//special case: if there are 1 or fewer labels, we cannot do redundance
//checking or precision checking because there aren't enough
//labels to compare with each other
if ( labels.size() <= 1 ) {
return labels;
}
//first, we calculate the greatest changing precision amongst all the
//labels. This is the precision that needs to be maintained throughout
//all the labels
int greatestChangingPrecision = calculateGreatestChangingField( labels );
ArrayList< String > rtn = new ArrayList< String >( labels.size() );
DateTrimmer firstDate = new DateTrimmer( labels.get( 0 ) );
//the first date will need to display all information, even if it is
//redundant; however, we can drop some trailing 0s, up to the
//precision that is changing
rtn.add( firstDate.getCompactForm( DateTrimmer.NO_PRECISION , greatestChangingPrecision ) );
for ( int i=1 ; i<labels.size() ; i++ ) {
String prevLabel = labels.get( i-1 );
String nextLabel = labels.get( i );
int redundancePrecision = greatestRedundancePrecision( prevLabel , nextLabel );
DateTrimmer f = new DateTrimmer( nextLabel );
String trimmedLabel = f.getCompactForm( redundancePrecision , greatestChangingPrecision );
rtn.add( trimmedLabel );
}
return rtn;
}
|
static List<String> trimLabels( List< String > labels ) { if ( labels.size() <= 1 ) { return labels; } int greatestChangingPrecision = calculateGreatestChangingField( labels ); ArrayList< String > rtn = new ArrayList< String >( labels.size() ); DateTrimmer firstDate = new DateTrimmer( labels.get( 0 ) ); rtn.add( firstDate.getCompactForm( DateTrimmer.NO_PRECISION , greatestChangingPrecision ) ); for ( int i=1 ; i<labels.size() ; i++ ) { String prevLabel = labels.get( i-1 ); String nextLabel = labels.get( i ); int redundancePrecision = greatestRedundancePrecision( prevLabel , nextLabel ); DateTrimmer f = new DateTrimmer( nextLabel ); String trimmedLabel = f.getCompactForm( redundancePrecision , greatestChangingPrecision ); rtn.add( trimmedLabel ); } return rtn; }
|
/**
* Trims a list of time axis labels to remove unnecessary redundancy.
*
* @param labels a list of dates that will be displayed on a time axis
* @return a list of modified dates without redundancy that can be displayed
* on a time axis
*/
|
Trims a list of time axis labels to remove unnecessary redundancy
|
trimLabels
|
{
"repo_name": "diirt/diirt",
"path": "graphene/graphene/src/main/java/org/diirt/graphene/TimeScales.java",
"license": "mit",
"size": 36132
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,482,383
|
private static void writeToResult(Result res, MeasureDefinition measure) {
Document modelDoc = getDocument(measure);
if (modelDoc == null) {
return;
}
try {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty("indent", "yes");
transformer.setOutputProperty("encoding", ENCODING);
transformer.transform(new DOMSource(modelDoc), res);
} catch (TransformerConfigurationException e) {
e.printStackTrace();
} catch (TransformerFactoryConfigurationError transformerFactoryConfigurationError) {
transformerFactoryConfigurationError.printStackTrace();
} catch (TransformerException e) {
e.printStackTrace();
}
}
|
static void function(Result res, MeasureDefinition measure) { Document modelDoc = getDocument(measure); if (modelDoc == null) { return; } try { Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(STR, "yes"); transformer.setOutputProperty(STR, ENCODING); transformer.transform(new DOMSource(modelDoc), res); } catch (TransformerConfigurationException e) { e.printStackTrace(); } catch (TransformerFactoryConfigurationError transformerFactoryConfigurationError) { transformerFactoryConfigurationError.printStackTrace(); } catch (TransformerException e) { e.printStackTrace(); } }
|
/**
* Helper method used to call transformer to build up an XML file from a Document
* @param res Result where created xml should be put
* @param measure data structure
*/
|
Helper method used to call transformer to build up an XML file from a Document
|
writeToResult
|
{
"repo_name": "HOMlab/QN-ACTR-Release",
"path": "QN-ACTR Java/src/jmt/gui/common/xml/XMLResultsWriter.java",
"license": "lgpl-3.0",
"size": 9505
}
|
[
"javax.xml.transform.Result",
"javax.xml.transform.Transformer",
"javax.xml.transform.TransformerConfigurationException",
"javax.xml.transform.TransformerException",
"javax.xml.transform.TransformerFactory",
"javax.xml.transform.TransformerFactoryConfigurationError",
"javax.xml.transform.dom.DOMSource",
"org.w3c.dom.Document"
] |
import javax.xml.transform.Result; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMSource; import org.w3c.dom.Document;
|
import javax.xml.transform.*; import javax.xml.transform.dom.*; import org.w3c.dom.*;
|
[
"javax.xml",
"org.w3c.dom"
] |
javax.xml; org.w3c.dom;
| 807,524
|
public String getVersion() {
return Config.get().getString("java.apiversion");
}
|
String function() { return Config.get().getString(STR); }
|
/**
* Returns the api version. Called as: api.get_version
* @return the api version.
*
* @xmlrpc.doc Returns the version of the API. Since Spacewalk 0.4
* (Satellite 5.3) it is no more related to server version.
* @xmlrpc.returntype string
*/
|
Returns the api version. Called as: api.get_version
|
getVersion
|
{
"repo_name": "xkollar/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/api/ApiHandler.java",
"license": "gpl-2.0",
"size": 8232
}
|
[
"com.redhat.rhn.common.conf.Config"
] |
import com.redhat.rhn.common.conf.Config;
|
import com.redhat.rhn.common.conf.*;
|
[
"com.redhat.rhn"
] |
com.redhat.rhn;
| 123,077
|
List<Tscreenpanel> loadAll();
|
List<Tscreenpanel> loadAll();
|
/**
* Loads all fields from TScreenPanel table
*
* @return
*/
|
Loads all fields from TScreenPanel table
|
loadAll
|
{
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/trackplus/dao/ScreenPanelDAO.java",
"license": "gpl-3.0",
"size": 1928
}
|
[
"com.trackplus.model.Tscreenpanel",
"java.util.List"
] |
import com.trackplus.model.Tscreenpanel; import java.util.List;
|
import com.trackplus.model.*; import java.util.*;
|
[
"com.trackplus.model",
"java.util"
] |
com.trackplus.model; java.util;
| 109,810
|
private Referenceable getTableReference(String dbName, String tableName) throws Exception {
LOG.debug("Getting reference for table {}.{}", dbName, tableName);
String typeName = HiveDataTypes.HIVE_TABLE.getName();
String entityName = getTableQualifiedName(clusterName, dbName, tableName);
String dslQuery = String.format("%s as t where name = '%s'", typeName, entityName);
return getEntityReferenceFromDSL(typeName, dslQuery);
}
|
Referenceable function(String dbName, String tableName) throws Exception { LOG.debug(STR, dbName, tableName); String typeName = HiveDataTypes.HIVE_TABLE.getName(); String entityName = getTableQualifiedName(clusterName, dbName, tableName); String dslQuery = String.format(STR, typeName, entityName); return getEntityReferenceFromDSL(typeName, dslQuery); }
|
/**
* Gets reference for the table
*
* @param dbName database name
* @param tableName table name
* @return table reference if exists, else null
* @throws Exception
*/
|
Gets reference for the table
|
getTableReference
|
{
"repo_name": "SarahMehddi/HelloWorld",
"path": "addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java",
"license": "apache-2.0",
"size": 22246
}
|
[
"org.apache.atlas.hive.model.HiveDataTypes",
"org.apache.atlas.typesystem.Referenceable"
] |
import org.apache.atlas.hive.model.HiveDataTypes; import org.apache.atlas.typesystem.Referenceable;
|
import org.apache.atlas.hive.model.*; import org.apache.atlas.typesystem.*;
|
[
"org.apache.atlas"
] |
org.apache.atlas;
| 1,527,500
|
// Process destruction occurs in a separate thread, as in some (rare)
// cases, deadlocks will occur while trying to kill a native process.
// An example of that is executing <code>echo blah | ssh localhost ls -l</code>
// under MAC OS X.
// Using a separate thread allows muCommander to continue working properly even
// when that occurs.
new Thread(() -> {
// Closes the process' streams.
LOGGER.debug("Destroying process...");
stdoutMonitor.stopMonitoring();
if (stderrMonitor != null) {
stderrMonitor.stopMonitoring();
}
// Destroys the process.
try {
destroyProcess();
} catch(IOException e) {
LOGGER.debug("IOException caught", e);
}
}).start();
}
|
new Thread(() -> { LOGGER.debug(STR); stdoutMonitor.stopMonitoring(); if (stderrMonitor != null) { stderrMonitor.stopMonitoring(); } try { destroyProcess(); } catch(IOException e) { LOGGER.debug(STR, e); } }).start(); }
|
/**
* Kills the process.
*/
|
Kills the process
|
destroy
|
{
"repo_name": "Keltek/mucommander",
"path": "src/main/com/mucommander/process/AbstractProcess.java",
"license": "gpl-3.0",
"size": 6947
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,377,187
|
public void error(String msg, Object arg0, Object arg1) {
logIfEnabled(Level.ERROR, null, msg, arg0, arg1, UNKNOWN_ARG, null);
}
|
void function(String msg, Object arg0, Object arg1) { logIfEnabled(Level.ERROR, null, msg, arg0, arg1, UNKNOWN_ARG, null); }
|
/**
* Log a error message.
*/
|
Log a error message
|
error
|
{
"repo_name": "dankito/ormlite-jpa-core",
"path": "src/main/java/com/j256/ormlite/logger/Logger.java",
"license": "isc",
"size": 17794
}
|
[
"com.j256.ormlite.logger.Log"
] |
import com.j256.ormlite.logger.Log;
|
import com.j256.ormlite.logger.*;
|
[
"com.j256.ormlite"
] |
com.j256.ormlite;
| 1,024,536
|
static Calendar getDayCopy(Calendar rawCalendar) {
Calendar rawCalendarInUtc = getUtcCalendarOf(rawCalendar);
Calendar utcCalendar = getUtcCalendar();
utcCalendar.set(
rawCalendarInUtc.get(Calendar.YEAR),
rawCalendarInUtc.get(Calendar.MONTH),
rawCalendarInUtc.get(Calendar.DAY_OF_MONTH));
return utcCalendar;
}
|
static Calendar getDayCopy(Calendar rawCalendar) { Calendar rawCalendarInUtc = getUtcCalendarOf(rawCalendar); Calendar utcCalendar = getUtcCalendar(); utcCalendar.set( rawCalendarInUtc.get(Calendar.YEAR), rawCalendarInUtc.get(Calendar.MONTH), rawCalendarInUtc.get(Calendar.DAY_OF_MONTH)); return utcCalendar; }
|
/**
* Returns a Calendar object in UTC time zone representing the start of day in UTC represented in
* the input Calendar object, i.e., the time (fields smaller than a day) is stripped based on the
* UTC time zone.
*
* @param rawCalendar the Calendar object representing the moment to process.
* @return A Calendar object representing the start of day in UTC time zone.
*/
|
Returns a Calendar object in UTC time zone representing the start of day in UTC represented in the input Calendar object, i.e., the time (fields smaller than a day) is stripped based on the UTC time zone
|
getDayCopy
|
{
"repo_name": "material-components/material-components-android",
"path": "lib/java/com/google/android/material/datepicker/UtcDates.java",
"license": "apache-2.0",
"size": 9688
}
|
[
"java.util.Calendar"
] |
import java.util.Calendar;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,447,838
|
public static <E extends Throwable> E withCause(E e, @Nullable Throwable cause) {
assert e != null;
if (cause != null)
e.initCause(cause);
return e;
}
|
static <E extends Throwable> E function(E e, @Nullable Throwable cause) { assert e != null; if (cause != null) e.initCause(cause); return e; }
|
/**
* Utility method that sets cause into exception and returns it.
*
* @param e Exception to set cause to and return.
* @param cause Optional cause to set (if not {@code null}).
* @param <E> Type of the exception.
* @return Passed in exception with optionally set cause.
*/
|
Utility method that sets cause into exception and returns it
|
withCause
|
{
"repo_name": "WilliamDo/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 325083
}
|
[
"org.jetbrains.annotations.Nullable"
] |
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,202,621
|
@Override
public DateTime roundFloor(DateTime dateTime) {
return baseTimeGrain.roundFloor(dateTime.withZone(timeZone)).withZone(timeZone);
}
|
DateTime function(DateTime dateTime) { return baseTimeGrain.roundFloor(dateTime.withZone(timeZone)).withZone(timeZone); }
|
/**
* Use the inner grain's round function to round a {@link org.joda.time.DateTime}, but use the bucketing of this
* grain's timezone rather than the one from the date time itself.
*
* @param dateTime The time being rounded
*
* @return the time, as rounded by the inner time grain and adjusted into this grain's time zone.
*/
|
Use the inner grain's round function to round a <code>org.joda.time.DateTime</code>, but use the bucketing of this grain's timezone rather than the one from the date time itself
|
roundFloor
|
{
"repo_name": "yahoo/fili",
"path": "fili-core/src/main/java/com/yahoo/bard/webservice/data/time/ZonedTimeGrain.java",
"license": "apache-2.0",
"size": 4542
}
|
[
"org.joda.time.DateTime"
] |
import org.joda.time.DateTime;
|
import org.joda.time.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 2,699,950
|
@Reference(
name = "oauth.config.service",
service = OAuthServerConfiguration.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetOauthServerConfiguration")
protected void setOauthServerConfiguration(OAuthServerConfiguration oauthServerConfiguration) {
ServiceReferenceHolder.getInstance().setOauthServerConfiguration(oauthServerConfiguration);
}
|
@Reference( name = STR, service = OAuthServerConfiguration.class, cardinality = ReferenceCardinality.MANDATORY, policy = ReferencePolicy.DYNAMIC, unbind = STR) void function(OAuthServerConfiguration oauthServerConfiguration) { ServiceReferenceHolder.getInstance().setOauthServerConfiguration(oauthServerConfiguration); }
|
/**
* Initialize the Oauth Server configuration Service Service dependency
*
* @param oauthServerConfiguration Output EventAdapter Service reference
*/
|
Initialize the Oauth Server configuration Service Service dependency
|
setOauthServerConfiguration
|
{
"repo_name": "bhathiya/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/internal/APIManagerComponent.java",
"license": "apache-2.0",
"size": 54315
}
|
[
"org.osgi.service.component.annotations.Reference",
"org.osgi.service.component.annotations.ReferenceCardinality",
"org.osgi.service.component.annotations.ReferencePolicy",
"org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration"
] |
import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.osgi.service.component.annotations.ReferencePolicy; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
|
import org.osgi.service.component.annotations.*; import org.wso2.carbon.identity.oauth.config.*;
|
[
"org.osgi.service",
"org.wso2.carbon"
] |
org.osgi.service; org.wso2.carbon;
| 303,079
|
public boolean createNewTable(String tableName, String hashKeyName, String hashKeyType, String rangeKeyName, String rangeKeyType, long readCapacity,
long writeCapacity) {
ArrayList<AttributeDefinition> attributeDefinition = new ArrayList<AttributeDefinition>();
attributeDefinition.add(new AttributeDefinition().withAttributeName(hashKeyName).withAttributeType(hashKeyType));
if (rangeKeyName != null)
attributeDefinition.add(new AttributeDefinition().withAttributeName(rangeKeyName).withAttributeType(rangeKeyType));
ArrayList<KeySchemaElement> keySchema = new ArrayList<KeySchemaElement>();
keySchema.add(new KeySchemaElement().withAttributeName(hashKeyName).withKeyType(KeyType.HASH));
if (rangeKeyName != null)
keySchema.add(new KeySchemaElement().withAttributeName(rangeKeyName).withKeyType(KeyType.RANGE));
ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput().withReadCapacityUnits(readCapacity).withWriteCapacityUnits(writeCapacity);
CreateTableRequest request = new CreateTableRequest().withTableName(tableName).withAttributeDefinitions(attributeDefinition).withKeySchema(keySchema)
.withProvisionedThroughput(provisionedThroughput);
try {
client.createTable(request);
waitForTableToBecomeAvailable(tableName);
System.out.println("Table " + tableName + " created.");
} catch (ResourceInUseException riue) {
System.out.println("Table " + tableName + " already existed.");
return false;
} catch (Exception e) {
System.out.println(e.getMessage());
return false;
}
return true;
}
|
boolean function(String tableName, String hashKeyName, String hashKeyType, String rangeKeyName, String rangeKeyType, long readCapacity, long writeCapacity) { ArrayList<AttributeDefinition> attributeDefinition = new ArrayList<AttributeDefinition>(); attributeDefinition.add(new AttributeDefinition().withAttributeName(hashKeyName).withAttributeType(hashKeyType)); if (rangeKeyName != null) attributeDefinition.add(new AttributeDefinition().withAttributeName(rangeKeyName).withAttributeType(rangeKeyType)); ArrayList<KeySchemaElement> keySchema = new ArrayList<KeySchemaElement>(); keySchema.add(new KeySchemaElement().withAttributeName(hashKeyName).withKeyType(KeyType.HASH)); if (rangeKeyName != null) keySchema.add(new KeySchemaElement().withAttributeName(rangeKeyName).withKeyType(KeyType.RANGE)); ProvisionedThroughput provisionedThroughput = new ProvisionedThroughput().withReadCapacityUnits(readCapacity).withWriteCapacityUnits(writeCapacity); CreateTableRequest request = new CreateTableRequest().withTableName(tableName).withAttributeDefinitions(attributeDefinition).withKeySchema(keySchema) .withProvisionedThroughput(provisionedThroughput); try { client.createTable(request); waitForTableToBecomeAvailable(tableName); System.out.println(STR + tableName + STR); } catch (ResourceInUseException riue) { System.out.println(STR + tableName + STR); return false; } catch (Exception e) { System.out.println(e.getMessage()); return false; } return true; }
|
/**
*
* Create table with given name and schema
*
*/
|
Create table with given name and schema
|
createNewTable
|
{
"repo_name": "awslabs/dynamodb-online-index-violation-detector",
"path": "src/test/java/com/amazonaws/services/dynamodbv2/online/index/integration/tests/TableManager.java",
"license": "apache-2.0",
"size": 10377
}
|
[
"com.amazonaws.services.dynamodbv2.model.AttributeDefinition",
"com.amazonaws.services.dynamodbv2.model.CreateTableRequest",
"com.amazonaws.services.dynamodbv2.model.KeySchemaElement",
"com.amazonaws.services.dynamodbv2.model.KeyType",
"com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput",
"com.amazonaws.services.dynamodbv2.model.ResourceInUseException",
"java.util.ArrayList"
] |
import com.amazonaws.services.dynamodbv2.model.AttributeDefinition; import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; import com.amazonaws.services.dynamodbv2.model.KeySchemaElement; import com.amazonaws.services.dynamodbv2.model.KeyType; import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput; import com.amazonaws.services.dynamodbv2.model.ResourceInUseException; import java.util.ArrayList;
|
import com.amazonaws.services.dynamodbv2.model.*; import java.util.*;
|
[
"com.amazonaws.services",
"java.util"
] |
com.amazonaws.services; java.util;
| 1,465,709
|
public String getPropertiesAsTags()
{
StringBuffer sb = new StringBuffer();
for(Iterator iter = this.propertyBeans.values().iterator(); iter.hasNext();)
{
CFCPropertyBean bean = (CFCPropertyBean)iter.next();
sb.append("\t");
sb.append("<cfproperty name=\"");
sb.append(bean.getName() + "\"");
if(bean.getDisplayName().length() > 0)
sb.append(" displayname=\"" + bean.getDisplayName() + "\"");
if(bean.getHint().length() > 0)
sb.append(" hint=\"" + bean.getHint() + "\"");
if(bean.getType().length() > 0)
sb.append(" type=\"" + bean.getType() + "\"");
if(bean.getDefaultVal().length() > 0)
sb.append(" default=\"" + bean.getDefaultVal() + "\"");
sb.append(" />");
sb.append("\n");
}
return sb.toString();
}
|
String function() { StringBuffer sb = new StringBuffer(); for(Iterator iter = this.propertyBeans.values().iterator(); iter.hasNext();) { CFCPropertyBean bean = (CFCPropertyBean)iter.next(); sb.append("\t"); sb.append(STRSTR\STR displayname=\STR\STR hint=\STR\STR type=\STR\STR default=\STR\STR />STR\n"); } return sb.toString(); }
|
/**
* This is used to get the CFML representation of the properties that were
* collected on this page
*
* @return the CFML code
*/
|
This is used to get the CFML representation of the properties that were collected on this page
|
getPropertiesAsTags
|
{
"repo_name": "cybersonic/org.cfeclipse.cfml",
"path": "src/org/cfeclipse/cfml/wizards/cfcwizard/NewCFCWizardProperties.java",
"license": "mit",
"size": 26694
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,347,003
|
@Override
public Object clone() {
MaleAdolescentParameters clone = null;
try {
clone = (MaleAdolescentParameters) super.clone();
for (String pKey: setOfParamKeys) {
clone.setValue(pKey,this.getValue(pKey));
}
for (String fcKey: setOfFunctionCategories) {
Set<String> fKeys = this.getIBMFunctionNamesByCategory(fcKey);
IBMFunctionInterface sfi = this.getSelectedIBMFunctionForCategory(fcKey);
for (String fKey: fKeys){
IBMFunctionInterface tfi = this.getIBMFunction(fcKey, fKey);
IBMFunctionInterface cfi = clone.getIBMFunction(fcKey,fKey);
Set<String> pKeys = tfi.getParameterNames();
for (String pKey: pKeys) {
cfi.setParameterValue(pKey, tfi.getParameter(pKey).getValue());
}
if (sfi==tfi) clone.selectIBMFunctionForCategory(fcKey, fKey);
}
}
clone.propertySupport = new PropertyChangeSupport(clone);
} catch (CloneNotSupportedException ex) {
ex.printStackTrace();
}
return clone;
}
|
Object function() { MaleAdolescentParameters clone = null; try { clone = (MaleAdolescentParameters) super.clone(); for (String pKey: setOfParamKeys) { clone.setValue(pKey,this.getValue(pKey)); } for (String fcKey: setOfFunctionCategories) { Set<String> fKeys = this.getIBMFunctionNamesByCategory(fcKey); IBMFunctionInterface sfi = this.getSelectedIBMFunctionForCategory(fcKey); for (String fKey: fKeys){ IBMFunctionInterface tfi = this.getIBMFunction(fcKey, fKey); IBMFunctionInterface cfi = clone.getIBMFunction(fcKey,fKey); Set<String> pKeys = tfi.getParameterNames(); for (String pKey: pKeys) { cfi.setParameterValue(pKey, tfi.getParameter(pKey).getValue()); } if (sfi==tfi) clone.selectIBMFunctionForCategory(fcKey, fKey); } } clone.propertySupport = new PropertyChangeSupport(clone); } catch (CloneNotSupportedException ex) { ex.printStackTrace(); } return clone; }
|
/**
* Returns a deep copy of the instance. Values are copied.
* Any listeners on 'this' are not(?) copied, so these need to be hooked up.
* @return - the clone.
*/
|
Returns a deep copy of the instance. Values are copied. Any listeners on 'this' are not(?) copied, so these need to be hooked up
|
clone
|
{
"repo_name": "wStockhausen/DisMELS_SnowCrab",
"path": "src/wts/models/DisMELS/IBMs/SnowCrab/MaleAdolescent/MaleAdolescentParameters.java",
"license": "mit",
"size": 16010
}
|
[
"java.beans.PropertyChangeSupport",
"java.util.Set"
] |
import java.beans.PropertyChangeSupport; import java.util.Set;
|
import java.beans.*; import java.util.*;
|
[
"java.beans",
"java.util"
] |
java.beans; java.util;
| 460,778
|
public String signature() {
return this.type + '#' +
PropertiesUtils.getSignature(this.name, this.properties) + "#" +
this.additionalSignature();
}
|
String function() { return this.type + '#' + PropertiesUtils.getSignature(this.name, this.properties) + "#" + this.additionalSignature(); }
|
/**
* Creates the annotator's signature given the current properties.
* We use this to understand if the user wants to recreate
* the same annotator type but with different parameters.
*/
|
Creates the annotator's signature given the current properties. We use this to understand if the user wants to recreate the same annotator type but with different parameters
|
signature
|
{
"repo_name": "intfloat/CoreNLP",
"path": "src/edu/stanford/nlp/pipeline/AnnotatorFactory.java",
"license": "gpl-2.0",
"size": 2825
}
|
[
"edu.stanford.nlp.util.PropertiesUtils"
] |
import edu.stanford.nlp.util.PropertiesUtils;
|
import edu.stanford.nlp.util.*;
|
[
"edu.stanford.nlp"
] |
edu.stanford.nlp;
| 2,301,374
|
public SearchRequestBuilder setTemplate(Template template) {
request.template(template);
return this;
}
|
SearchRequestBuilder function(Template template) { request.template(template); return this; }
|
/**
* template stuff
*/
|
template stuff
|
setTemplate
|
{
"repo_name": "baishuo/elasticsearch_v2.1.0-baishuo",
"path": "core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java",
"license": "apache-2.0",
"size": 34250
}
|
[
"org.elasticsearch.script.Template"
] |
import org.elasticsearch.script.Template;
|
import org.elasticsearch.script.*;
|
[
"org.elasticsearch.script"
] |
org.elasticsearch.script;
| 2,374,970
|
public String generateWorkflowExecutionFile(armadillo_workflow armadillo, Workflows workflow) {
StringBuilder st=new StringBuilder();
int count=0;
for (workflow_object obj:armadillo.workflow.outputExecution()) {
workflow_properties properties=obj.getProperties();
if (properties.get("ObjectType").equals("Program")) {
count++;
properties.getStatus();
st.append(count+" "+properties.getName()+" "+getHtmlStatus(properties));
//--
String filename= count+"_"+properties.getName()+"_"+properties.getID();
filename = filename.replaceAll(" ", "_");
String results_dir=config.resultsDir()+File.separator+projectName+File.separator+"results"+File.separator+filename;
config.createDir(results_dir);
config.createDir(results_dir+File.separator+"input");
config.createDir(results_dir+File.separator+"output");
//--Note: this might failed if we output All!
for (String output_type:properties.Outputed()) {
Vector<Integer> ids=properties.getOutputID(output_type, null);
for (int id:ids) {
if (id!=0) {
//st.append(" "+output_type+properties.getID().replaceAll(" ", "_")+".html\">"+(output_type.equals("OutputText")?"Software output":output_type)+"</a></li>\n");
Output output=new Output();
output.setType(output_type);
output.setTypeid(id);
Biologic bio=output.getBiologic();
String filename2=results_dir+File.separator+"output"+File.separator+output_type+properties.getID().replaceAll(" ", "_")+".txt";
this.generateBiologicalContentFile(filename2, bio);
}
}
}
//--Note: this might failed if we input All!
for (String input_type:properties.Inputed()) {
Vector<Integer> ids=properties.getInputID(input_type, null);
for (int id:ids) {
if (id!=0) {
//st.append(" "+input_type+properties.getID().replaceAll(" ", "_")+".html\">"+(input_type.equals("OutputText")?"Software output":input_type)+"</a></li>\n");
Output output=new Output();
output.setType(input_type);
output.setTypeid(id);
Biologic bio=output.getBiologic();
String filename2=results_dir+File.separator+"input"+File.separator+input_type+properties.getID().replaceAll(" ", "_")+".txt";
this.generateBiologicalContentFile(filename2, bio);
}
}
}
}
// if (properties.get("ObjectType").equals("OutputDatabase")) {
// count++;
// for (String output_type:properties.Outputed()) {
// Vector<Integer> ids=properties.getOutputID(output_type, null);
// for (int id:ids) {
// if (id!=0) {
// st.append(output_type+properties.getID().replaceAll(" ", "_")+".html\">"+output_type+"</a></li>\n");
// Output output=new Output();
// output.setType(output_type);
// output.setTypeid(id);
// Object bio=output.getBiologic();
// generateBiologicalContentPage(output_type+properties.getID(),bio);
// }
// }
// }
// }
} //--End Workflow List
return st.toString();
}
|
String function(armadillo_workflow armadillo, Workflows workflow) { StringBuilder st=new StringBuilder(); int count=0; for (workflow_object obj:armadillo.workflow.outputExecution()) { workflow_properties properties=obj.getProperties(); if (properties.get(STR).equals(STR)) { count++; properties.getStatus(); st.append(count+" "+properties.getName()+" "+getHtmlStatus(properties)); String filename= count+"_"+properties.getName()+"_"+properties.getID(); filename = filename.replaceAll(" ", "_"); String results_dir=config.resultsDir()+File.separator+projectName+File.separator+STR+File.separator+filename; config.createDir(results_dir); config.createDir(results_dir+File.separator+"input"); config.createDir(results_dir+File.separator+STR); for (String output_type:properties.Outputed()) { Vector<Integer> ids=properties.getOutputID(output_type, null); for (int id:ids) { if (id!=0) { Output output=new Output(); output.setType(output_type); output.setTypeid(id); Biologic bio=output.getBiologic(); String filename2=results_dir+File.separator+STR+File.separator+output_type+properties.getID().replaceAll(" ", "_")+".txt"; this.generateBiologicalContentFile(filename2, bio); } } } for (String input_type:properties.Inputed()) { Vector<Integer> ids=properties.getInputID(input_type, null); for (int id:ids) { if (id!=0) { Output output=new Output(); output.setType(input_type); output.setTypeid(id); Biologic bio=output.getBiologic(); String filename2=results_dir+File.separator+"input"+File.separator+input_type+properties.getID().replaceAll(" ", "_")+".txt"; this.generateBiologicalContentFile(filename2, bio); } } } } } return st.toString(); }
|
/**
* This generate all the input-output files for the workflow and a web page
* @param armadillo
* @param workflow
* @return
*/
|
This generate all the input-output files for the workflow and a web page
|
generateWorkflowExecutionFile
|
{
"repo_name": "armadilloUQAM/armadillo2",
"path": "src/results/report_list_workflows.java",
"license": "gpl-3.0",
"size": 39213
}
|
[
"java.io.File",
"java.util.Vector"
] |
import java.io.File; import java.util.Vector;
|
import java.io.*; import java.util.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 466,709
|
public static DateTimeFormatter ty() {
return DateTimeFormatter.ofPattern("yyyy");
}
|
static DateTimeFormatter function() { return DateTimeFormatter.ofPattern("yyyy"); }
|
/**
* Standard yearly date format
* @return DateTimeFormatter class object for formatting a date value as
* a string
*/
|
Standard yearly date format
|
ty
|
{
"repo_name": "wbuchanan/StataJavaUtilities",
"path": "src/main/java/org/paces/Stata/Utilities/StataDateDisplayFormats.java",
"license": "gpl-2.0",
"size": 2403
}
|
[
"java.time.format.DateTimeFormatter"
] |
import java.time.format.DateTimeFormatter;
|
import java.time.format.*;
|
[
"java.time"
] |
java.time;
| 1,006,566
|
public static Bitmap decodeSampledBitmapFromFile(String filename,
int reqWidth, int reqHeight, ImageCache cache) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filename, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
// If we're running on Honeycomb or newer, try to use inBitmap
if (Utils.hasHoneycomb()) {
addInBitmapOptions(options, cache);
}
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeFile(filename, options);
}
|
static Bitmap function(String filename, int reqWidth, int reqHeight, ImageCache cache) { final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeFile(filename, options); options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); if (Utils.hasHoneycomb()) { addInBitmapOptions(options, cache); } options.inJustDecodeBounds = false; return BitmapFactory.decodeFile(filename, options); }
|
/**
* Decode and sample down a bitmap from a file to the requested width and height.
*
* @param filename The full path of the file to decode
* @param reqWidth The requested width of the resulting bitmap
* @param reqHeight The requested height of the resulting bitmap
* @param cache The ImageCache used to find candidate bitmaps for use with inBitmap
* @return A bitmap sampled down from the original with the same aspect ratio and dimensions
* that are equal to or greater than the requested width and height
*/
|
Decode and sample down a bitmap from a file to the requested width and height
|
decodeSampledBitmapFromFile
|
{
"repo_name": "jaydeepw/android-multiple-image-picker-eclipse",
"path": "Lib/src/com/giljulio/imagepicker/utils/ImageResizer.java",
"license": "mit",
"size": 12178
}
|
[
"android.graphics.Bitmap",
"android.graphics.BitmapFactory"
] |
import android.graphics.Bitmap; import android.graphics.BitmapFactory;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 2,522,899
|
T visitFuncExtern(@NotNull jazzikParser.FuncExternContext ctx);
|
T visitFuncExtern(@NotNull jazzikParser.FuncExternContext ctx);
|
/**
* Visit a parse tree produced by {@link jazzikParser#FuncExtern}.
* @param ctx the parse tree
* @return the visitor result
*/
|
Visit a parse tree produced by <code>jazzikParser#FuncExtern</code>
|
visitFuncExtern
|
{
"repo_name": "petersch/jazzik",
"path": "src/parser/jazzikVisitor.java",
"license": "gpl-3.0",
"size": 10234
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 1,726,745
|
private void addPivotDoc(SolrClient client, Object... fields)
throws IOException, SolrServerException {
indexDoc(client, params(), sdoc(fields));
}
private int docNumber = 0;
|
void function(SolrClient client, Object... fields) throws IOException, SolrServerException { indexDoc(client, params(), sdoc(fields)); } private int docNumber = 0;
|
/**
* Builds up a SolrInputDocument using the specified fields, then adds it to the
* specified client as well as the control client
* @see #indexDoc(org.apache.solr.client.solrj.SolrClient,SolrParams,SolrInputDocument...)
* @see #sdoc
*/
|
Builds up a SolrInputDocument using the specified fields, then adds it to the specified client as well as the control client
|
addPivotDoc
|
{
"repo_name": "visouza/solr-5.0.0",
"path": "solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java",
"license": "apache-2.0",
"size": 40551
}
|
[
"java.io.IOException",
"org.apache.solr.client.solrj.SolrClient",
"org.apache.solr.client.solrj.SolrServerException"
] |
import java.io.IOException; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException;
|
import java.io.*; import org.apache.solr.client.solrj.*;
|
[
"java.io",
"org.apache.solr"
] |
java.io; org.apache.solr;
| 350,077
|
public static ArrayList<BaseFile> getFilesList(String path, boolean showHidden) {
File f = new File(path);
ArrayList<BaseFile> files = new ArrayList<>();
try {
if (f.exists() && f.isDirectory()) {
for (File x : f.listFiles()) {
long size = 0;
if (!x.isDirectory()) size = x.length();
BaseFile baseFile=new BaseFile(x.getPath(), parseFilePermission(x),
x.lastModified(), size, x.isDirectory());
baseFile.setName(x.getName());
baseFile.setMode(OpenMode.FILE);
if (showHidden) {
files.add(baseFile);
} else {
if (!x.isHidden()) {
files.add(baseFile);
}
}
}
}
} catch (Exception e) {
}
return files;
}
|
static ArrayList<BaseFile> function(String path, boolean showHidden) { File f = new File(path); ArrayList<BaseFile> files = new ArrayList<>(); try { if (f.exists() && f.isDirectory()) { for (File x : f.listFiles()) { long size = 0; if (!x.isDirectory()) size = x.length(); BaseFile baseFile=new BaseFile(x.getPath(), parseFilePermission(x), x.lastModified(), size, x.isDirectory()); baseFile.setName(x.getName()); baseFile.setMode(OpenMode.FILE); if (showHidden) { files.add(baseFile); } else { if (!x.isHidden()) { files.add(baseFile); } } } } } catch (Exception e) { } return files; }
|
/**
* Loads files in a path using basic filesystem callbacks
* @param path the path
* @param showHidden
* @return
*/
|
Loads files in a path using basic filesystem callbacks
|
getFilesList
|
{
"repo_name": "ugurtufekci/AmazeFileManager",
"path": "src/main/java/com/amaze/filemanager/filesystem/RootHelper.java",
"license": "gpl-3.0",
"size": 17413
}
|
[
"com.amaze.filemanager.utils.OpenMode",
"java.io.File",
"java.util.ArrayList"
] |
import com.amaze.filemanager.utils.OpenMode; import java.io.File; import java.util.ArrayList;
|
import com.amaze.filemanager.utils.*; import java.io.*; import java.util.*;
|
[
"com.amaze.filemanager",
"java.io",
"java.util"
] |
com.amaze.filemanager; java.io; java.util;
| 802,679
|
@Override
public void setDrawDate(java.util.Date drawDate) {
_draw.setDrawDate(drawDate);
}
|
void function(java.util.Date drawDate) { _draw.setDrawDate(drawDate); }
|
/**
* Sets the draw date of this draw.
*
* @param drawDate the draw date of this draw
*/
|
Sets the draw date of this draw
|
setDrawDate
|
{
"repo_name": "aritzg/EuroMillionGame-portlet",
"path": "docroot/WEB-INF/service/net/sareweb/emg/model/DrawWrapper.java",
"license": "gpl-3.0",
"size": 13351
}
|
[
"java.util.Date"
] |
import java.util.Date;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,767,113
|
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String applicationSecurityGroupName);
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync( String resourceGroupName, String applicationSecurityGroupName);
|
/**
* Deletes the specified application security group.
*
* @param resourceGroupName The name of the resource group.
* @param applicationSecurityGroupName The name of the application security group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link Response} on successful completion of {@link Mono}.
*/
|
Deletes the specified application security group
|
deleteWithResponseAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/ApplicationSecurityGroupsClient.java",
"license": "mit",
"size": 25248
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"java.nio.ByteBuffer"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import java.nio.ByteBuffer;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import java.nio.*;
|
[
"com.azure.core",
"java.nio"
] |
com.azure.core; java.nio;
| 1,594,514
|
public static NodeList getNodeList(final Node xmlNode, final String xPathString) throws DSSException {
try {
final XPathExpression expr = createXPathExpression(xPathString);
final NodeList evaluated = (NodeList) expr.evaluate(xmlNode, XPathConstants.NODESET);
return evaluated;
} catch (XPathExpressionException e) {
throw new DSSException(e);
}
}
|
static NodeList function(final Node xmlNode, final String xPathString) throws DSSException { try { final XPathExpression expr = createXPathExpression(xPathString); final NodeList evaluated = (NodeList) expr.evaluate(xmlNode, XPathConstants.NODESET); return evaluated; } catch (XPathExpressionException e) { throw new DSSException(e); } }
|
/**
* Returns the NodeList corresponding to the XPath query.
*
* @param xmlNode
* The node where the search should be performed.
* @param xPathString
* XPath query string
* @return the NodeList corresponding to the XPath query
* @throws DSSException
* if an error occurred
*/
|
Returns the NodeList corresponding to the XPath query
|
getNodeList
|
{
"repo_name": "zsoltii/dss",
"path": "dss-spi/src/main/java/eu/europa/esig/dss/DomUtils.java",
"license": "lgpl-2.1",
"size": 17953
}
|
[
"javax.xml.xpath.XPathConstants",
"javax.xml.xpath.XPathExpression",
"javax.xml.xpath.XPathExpressionException",
"org.w3c.dom.Node",
"org.w3c.dom.NodeList"
] |
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import org.w3c.dom.Node; import org.w3c.dom.NodeList;
|
import javax.xml.xpath.*; import org.w3c.dom.*;
|
[
"javax.xml",
"org.w3c.dom"
] |
javax.xml; org.w3c.dom;
| 1,534,054
|
@Override
protected boolean includeOperation(Method method, String beanKey) {
return isPublicInInterface(method, beanKey);
}
|
boolean function(Method method, String beanKey) { return isPublicInInterface(method, beanKey); }
|
/**
* Check to see if the {@code Method} is declared in
* one of the configured interfaces and that it is public.
* @param method the operation {@code Method}.
* @param beanKey the key associated with the MBean in the
* {@code beans} {@code Map}.
* @return {@code true} if the {@code Method} is declared in one of the
* configured interfaces, otherwise {@code false}.
*/
|
Check to see if the Method is declared in one of the configured interfaces and that it is public
|
includeOperation
|
{
"repo_name": "spring-projects/spring-framework",
"path": "spring-context/src/main/java/org/springframework/jmx/export/assembler/InterfaceBasedMBeanInfoAssembler.java",
"license": "apache-2.0",
"size": 8961
}
|
[
"java.lang.reflect.Method"
] |
import java.lang.reflect.Method;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 72,563
|
private String insertActionsAndSteps() {
String sql = null;
if (DbConnectionFactory.isMySql()) {
sql = MYSQL_INSERT_INTO_INTERMEDIATE_TABLE;
} else if (DbConnectionFactory.isPostgres()) {
sql = POSTGRES_INSERT_INTO_INTERMEDIATE_TABLE;
} else if (DbConnectionFactory.isMsSql()) {
sql = MSSQL_INSERT_INTO_INTERMEDIATE_TABLE;
} else if (DbConnectionFactory.isOracle()) {
sql = ORACLE_INSERT_INTO_INTERMEDIATE_TABLE;
}
return sql;
}
|
String function() { String sql = null; if (DbConnectionFactory.isMySql()) { sql = MYSQL_INSERT_INTO_INTERMEDIATE_TABLE; } else if (DbConnectionFactory.isPostgres()) { sql = POSTGRES_INSERT_INTO_INTERMEDIATE_TABLE; } else if (DbConnectionFactory.isMsSql()) { sql = MSSQL_INSERT_INTO_INTERMEDIATE_TABLE; } else if (DbConnectionFactory.isOracle()) { sql = ORACLE_INSERT_INTO_INTERMEDIATE_TABLE; } return sql; }
|
/**
* Inserts the relationships between workflow actions and the workflow steps where they can
* be used in the new intermediate table.
* @return
*/
|
Inserts the relationships between workflow actions and the workflow steps where they can be used in the new intermediate table
|
insertActionsAndSteps
|
{
"repo_name": "dotCMS/core",
"path": "dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task04305UpdateWorkflowActionTable.java",
"license": "gpl-3.0",
"size": 36884
}
|
[
"com.dotmarketing.db.DbConnectionFactory"
] |
import com.dotmarketing.db.DbConnectionFactory;
|
import com.dotmarketing.db.*;
|
[
"com.dotmarketing.db"
] |
com.dotmarketing.db;
| 840,810
|
protected void doRebind(WebdavRequest request, WebdavResponse response,
DavResource resource) throws IOException, DavException {
if (!resource.exists()) {
response.sendError(DavServletResponse.SC_NOT_FOUND);
}
RebindInfo rebindInfo = request.getRebindInfo();
DavResource oldBinding = getResourceFactory().createResource(request.getHrefLocator(rebindInfo.getHref()), request, response);
if (!(oldBinding instanceof BindableResource)) {
response.sendError(DavServletResponse.SC_METHOD_NOT_ALLOWED);
return;
}
DavResource newBinding = getResourceFactory().createResource(request.getMemberLocator(rebindInfo.getSegment()), request, response);
int status = validateDestination(newBinding, request, false);
if (status > DavServletResponse.SC_NO_CONTENT) {
response.sendError(status);
return;
}
((BindableResource) oldBinding).rebind(resource, newBinding);
response.setStatus(status);
}
|
void function(WebdavRequest request, WebdavResponse response, DavResource resource) throws IOException, DavException { if (!resource.exists()) { response.sendError(DavServletResponse.SC_NOT_FOUND); } RebindInfo rebindInfo = request.getRebindInfo(); DavResource oldBinding = getResourceFactory().createResource(request.getHrefLocator(rebindInfo.getHref()), request, response); if (!(oldBinding instanceof BindableResource)) { response.sendError(DavServletResponse.SC_METHOD_NOT_ALLOWED); return; } DavResource newBinding = getResourceFactory().createResource(request.getMemberLocator(rebindInfo.getSegment()), request, response); int status = validateDestination(newBinding, request, false); if (status > DavServletResponse.SC_NO_CONTENT) { response.sendError(status); return; } ((BindableResource) oldBinding).rebind(resource, newBinding); response.setStatus(status); }
|
/**
* The REBIND method
*
* @param request
* @param response
* @param resource the collection resource to which a new member will be added
* @throws IOException
* @throws DavException
*/
|
The REBIND method
|
doRebind
|
{
"repo_name": "SylvesterAbreu/jackrabbit",
"path": "jackrabbit-webdav/src/main/java/org/apache/jackrabbit/webdav/server/AbstractWebdavServlet.java",
"license": "apache-2.0",
"size": 51686
}
|
[
"java.io.IOException",
"org.apache.jackrabbit.webdav.DavException",
"org.apache.jackrabbit.webdav.DavResource",
"org.apache.jackrabbit.webdav.DavServletResponse",
"org.apache.jackrabbit.webdav.WebdavRequest",
"org.apache.jackrabbit.webdav.WebdavResponse",
"org.apache.jackrabbit.webdav.bind.BindableResource",
"org.apache.jackrabbit.webdav.bind.RebindInfo"
] |
import java.io.IOException; import org.apache.jackrabbit.webdav.DavException; import org.apache.jackrabbit.webdav.DavResource; import org.apache.jackrabbit.webdav.DavServletResponse; import org.apache.jackrabbit.webdav.WebdavRequest; import org.apache.jackrabbit.webdav.WebdavResponse; import org.apache.jackrabbit.webdav.bind.BindableResource; import org.apache.jackrabbit.webdav.bind.RebindInfo;
|
import java.io.*; import org.apache.jackrabbit.webdav.*; import org.apache.jackrabbit.webdav.bind.*;
|
[
"java.io",
"org.apache.jackrabbit"
] |
java.io; org.apache.jackrabbit;
| 2,785,921
|
protected void bindEventAndShow(final View v, ImageView targetImageView){
final BaseSliderView me = this;
|
void function(final View v, ImageView targetImageView){ final BaseSliderView me = this;
|
/**
* When you want to implement your own slider view, please call this method in the end in `getView()` method
* @param v the whole view
* @param targetImageView where to place image
*/
|
When you want to implement your own slider view, please call this method in the end in `getView()` method
|
bindEventAndShow
|
{
"repo_name": "HKMOpen/SogoShoesBoxes",
"path": "shoebox/daimajiaslider/src/main/java/com/daimajia/slider/library/SliderTypes/BaseSliderView.java",
"license": "mit",
"size": 7225
}
|
[
"android.view.View",
"android.widget.ImageView"
] |
import android.view.View; import android.widget.ImageView;
|
import android.view.*; import android.widget.*;
|
[
"android.view",
"android.widget"
] |
android.view; android.widget;
| 2,789,390
|
public void runPartitionContiguityEngine() throws EvaluationException, CloneNotSupportedException{
ConfigFlags.partitionAttribute = this.nfa.getPartitionAttribute();
ConfigFlags.hasPartitionAttribute = true;
this.activeRunsByPartition = new HashMap<Integer, ArrayList<Run>>();
ConfigFlags.timeWindow = this.nfa.getTimeWindow();
ConfigFlags.sequenceLength = this.nfa.getSize();
ConfigFlags.selectionStrategy = this.nfa.getSelectionStrategy();
Event e = null;
long currentTime = 0;
while((e = this.input.popEvent())!= null){
currentTime = System.nanoTime();
this.evaluateRunsForPartitionContiguity(e);
if(this.toDeleteRuns.size() > 0){
this.cleanRunsByPartition();
}
this.createNewRunByPartition(e);
Profiling.totalRunTime += (System.nanoTime() - currentTime);
Profiling.numberOfEvents += 1;
}
}
|
void function() throws EvaluationException, CloneNotSupportedException{ ConfigFlags.partitionAttribute = this.nfa.getPartitionAttribute(); ConfigFlags.hasPartitionAttribute = true; this.activeRunsByPartition = new HashMap<Integer, ArrayList<Run>>(); ConfigFlags.timeWindow = this.nfa.getTimeWindow(); ConfigFlags.sequenceLength = this.nfa.getSize(); ConfigFlags.selectionStrategy = this.nfa.getSelectionStrategy(); Event e = null; long currentTime = 0; while((e = this.input.popEvent())!= null){ currentTime = System.nanoTime(); this.evaluateRunsForPartitionContiguity(e); if(this.toDeleteRuns.size() > 0){ this.cleanRunsByPartition(); } this.createNewRunByPartition(e); Profiling.totalRunTime += (System.nanoTime() - currentTime); Profiling.numberOfEvents += 1; } }
|
/**
* This method is called when the query uses the partition-contiguity selection strategy
* @throws CloneNotSupportedException
*
*/
|
This method is called when the query uses the partition-contiguity selection strategy
|
runPartitionContiguityEngine
|
{
"repo_name": "jonyt/sase",
"path": "src/main/java/edu/umass/cs/sase/engine/Engine.java",
"license": "mit",
"size": 43179
}
|
[
"edu.umass.cs.sase.stream.Event",
"java.util.ArrayList",
"java.util.HashMap",
"net.sourceforge.jeval.EvaluationException"
] |
import edu.umass.cs.sase.stream.Event; import java.util.ArrayList; import java.util.HashMap; import net.sourceforge.jeval.EvaluationException;
|
import edu.umass.cs.sase.stream.*; import java.util.*; import net.sourceforge.jeval.*;
|
[
"edu.umass.cs",
"java.util",
"net.sourceforge.jeval"
] |
edu.umass.cs; java.util; net.sourceforge.jeval;
| 2,756,388
|
public boolean hasNearCache(@Nullable String cacheName, AffinityTopologyVersion topVer) {
return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName);
}
|
boolean function(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName); }
|
/**
* Checks if cache with given name has at least one node with near cache enabled.
*
* @param cacheName Cache name.
* @param topVer Topology version.
* @return {@code True} if cache with given name has at least one node with near cache enabled.
*/
|
Checks if cache with given name has at least one node with near cache enabled
|
hasNearCache
|
{
"repo_name": "ryanzz/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java",
"license": "apache-2.0",
"size": 107855
}
|
[
"org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion",
"org.jetbrains.annotations.Nullable"
] |
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.jetbrains.annotations.Nullable;
|
import org.apache.ignite.internal.processors.affinity.*; import org.jetbrains.annotations.*;
|
[
"org.apache.ignite",
"org.jetbrains.annotations"
] |
org.apache.ignite; org.jetbrains.annotations;
| 2,004,779
|
private void emailZIP(final byte[] zip, final DocbookBuildingOptions docbookBuildingOptions, final String bookName, final String locale)
{
NotificationUtilities.dumpMessageToStdOut("Doing Distribution Pass - Emailing To " + docbookBuildingOptions.getEmailTo());
// create a date formatter
final SimpleDateFormat formatter = new SimpleDateFormat(CommonConstants.FILTER_DISPLAY_DATE_FORMAT);
// Get system properties
final Properties properties = System.getProperties();
// Recipient's email ID needs to be mentioned.
final String to = docbookBuildingOptions.getEmailTo();
// Sender's email ID needs to be mentioned
final String from = "donotreply@redhat.com";
// Get the default Session object.
final Session session = Session.getDefaultInstance(properties);
try
{
// Create a default MimeMessage object.
final MimeMessage message = new MimeMessage(session);
// Set From: header field of the header.
message.setFrom(new InternetAddress(from));
// Set To: header field of the header.
message.addRecipient(Message.RecipientType.TO, new InternetAddress(to));
// Set Subject: header field
message.setSubject("Skynet Docbook Build");
// Create the message part
final BodyPart messageBodyPart = new MimeBodyPart();
// Fill the message
messageBodyPart.setText("The attached file was generated at " + formatter.format(new Date()));
// Create a multipart message
final Multipart multipart = new MimeMultipart();
// Set text message part
multipart.addBodyPart(messageBodyPart);
// Create the attachment
final BodyPart attachmentBodyPart = new MimeBodyPart();
final String filename = bookName + "-" + locale + ".zip";
attachmentBodyPart.setContent(zip, CommonConstants.ZIP_MIME_TYPE);
attachmentBodyPart.setFileName(filename);
// Set text attachment part
multipart.addBodyPart(attachmentBodyPart);
// Send the complete message parts
message.setContent(multipart);
// Send message
Transport.send(message);
}
catch (final MessagingException mex)
{
mex.printStackTrace();
}
}
|
void function(final byte[] zip, final DocbookBuildingOptions docbookBuildingOptions, final String bookName, final String locale) { NotificationUtilities.dumpMessageToStdOut(STR + docbookBuildingOptions.getEmailTo()); final SimpleDateFormat formatter = new SimpleDateFormat(CommonConstants.FILTER_DISPLAY_DATE_FORMAT); final Properties properties = System.getProperties(); final String to = docbookBuildingOptions.getEmailTo(); final String from = STR; final Session session = Session.getDefaultInstance(properties); try { final MimeMessage message = new MimeMessage(session); message.setFrom(new InternetAddress(from)); message.addRecipient(Message.RecipientType.TO, new InternetAddress(to)); message.setSubject(STR); final BodyPart messageBodyPart = new MimeBodyPart(); messageBodyPart.setText(STR + formatter.format(new Date())); final Multipart multipart = new MimeMultipart(); multipart.addBodyPart(messageBodyPart); final BodyPart attachmentBodyPart = new MimeBodyPart(); final String filename = bookName + "-" + locale + ".zip"; attachmentBodyPart.setContent(zip, CommonConstants.ZIP_MIME_TYPE); attachmentBodyPart.setFileName(filename); multipart.addBodyPart(attachmentBodyPart); message.setContent(multipart); Transport.send(message); } catch (final MessagingException mex) { mex.printStackTrace(); } }
|
/**
* This function emails the ZIP file to the user
*
* @param topics
* The collection of topics to process
*/
|
This function emails the ZIP file to the user
|
emailZIP
|
{
"repo_name": "pressgang-ccms/PressGangCCMSServices",
"path": "docbook-builder-component/src/main/java/org/jboss/pressgang/ccms/services/docbookbuilder/DocbookBuildingThread.java",
"license": "gpl-3.0",
"size": 24980
}
|
[
"java.text.SimpleDateFormat",
"java.util.Date",
"java.util.Properties",
"javax.mail.BodyPart",
"javax.mail.Message",
"javax.mail.MessagingException",
"javax.mail.Multipart",
"javax.mail.Session",
"javax.mail.Transport",
"javax.mail.internet.InternetAddress",
"javax.mail.internet.MimeBodyPart",
"javax.mail.internet.MimeMessage",
"javax.mail.internet.MimeMultipart",
"org.jboss.pressgang.ccms.docbook.compiling.DocbookBuildingOptions",
"org.jboss.pressgang.ccms.utils.common.NotificationUtilities",
"org.jboss.pressgang.ccms.utils.constants.CommonConstants"
] |
import java.text.SimpleDateFormat; import java.util.Date; import java.util.Properties; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.Session; import javax.mail.Transport; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import org.jboss.pressgang.ccms.docbook.compiling.DocbookBuildingOptions; import org.jboss.pressgang.ccms.utils.common.NotificationUtilities; import org.jboss.pressgang.ccms.utils.constants.CommonConstants;
|
import java.text.*; import java.util.*; import javax.mail.*; import javax.mail.internet.*; import org.jboss.pressgang.ccms.docbook.compiling.*; import org.jboss.pressgang.ccms.utils.common.*; import org.jboss.pressgang.ccms.utils.constants.*;
|
[
"java.text",
"java.util",
"javax.mail",
"org.jboss.pressgang"
] |
java.text; java.util; javax.mail; org.jboss.pressgang;
| 1,385,874
|
@NotAuditable
UserTransaction getUserTransaction(boolean readOnly, boolean ignoreSystemReadOnly);
|
UserTransaction getUserTransaction(boolean readOnly, boolean ignoreSystemReadOnly);
|
/**
* Gets a user transaction that supports transaction propagation.
* This is like the EJB <b>REQUIRED</b> transaction attribute.
*
* @param readOnly Set true for a READONLY transaction instance, false otherwise.
* @param ignoreSystemReadOnly <tt>true</tt> to force the read-only flag to be respected regardless
* of the system read-only mode.
* @return the user transaction
*/
|
Gets a user transaction that supports transaction propagation. This is like the EJB REQUIRED transaction attribute
|
getUserTransaction
|
{
"repo_name": "nguyentienlong/community-edition",
"path": "projects/repository/source/java/org/alfresco/service/transaction/TransactionService.java",
"license": "lgpl-3.0",
"size": 5660
}
|
[
"javax.transaction.UserTransaction"
] |
import javax.transaction.UserTransaction;
|
import javax.transaction.*;
|
[
"javax.transaction"
] |
javax.transaction;
| 1,444,468
|
public final Pointcut buildSafePointcut() {
Pointcut pc = getPointcut();
MethodMatcher safeMethodMatcher = MethodMatchers.intersection(
new AdviceExcludingMethodMatcher(this.aspectJAdviceMethod), pc.getMethodMatcher());
return new ComposablePointcut(pc.getClassFilter(), safeMethodMatcher);
}
|
final Pointcut function() { Pointcut pc = getPointcut(); MethodMatcher safeMethodMatcher = MethodMatchers.intersection( new AdviceExcludingMethodMatcher(this.aspectJAdviceMethod), pc.getMethodMatcher()); return new ComposablePointcut(pc.getClassFilter(), safeMethodMatcher); }
|
/**
* Build a 'safe' pointcut that excludes the AspectJ advice method itself.
* @return a composable pointcut that builds on the original AspectJ expression pointcut
* @see #getPointcut()
*/
|
Build a 'safe' pointcut that excludes the AspectJ advice method itself
|
buildSafePointcut
|
{
"repo_name": "shivpun/spring-framework",
"path": "spring-aop/src/main/java/org/springframework/aop/aspectj/AbstractAspectJAdvice.java",
"license": "apache-2.0",
"size": 25032
}
|
[
"org.springframework.aop.MethodMatcher",
"org.springframework.aop.Pointcut",
"org.springframework.aop.support.ComposablePointcut",
"org.springframework.aop.support.MethodMatchers"
] |
import org.springframework.aop.MethodMatcher; import org.springframework.aop.Pointcut; import org.springframework.aop.support.ComposablePointcut; import org.springframework.aop.support.MethodMatchers;
|
import org.springframework.aop.*; import org.springframework.aop.support.*;
|
[
"org.springframework.aop"
] |
org.springframework.aop;
| 550,218
|
WorkflowResult<String> create(AnyObjectTO anyObjectTO);
|
WorkflowResult<String> create(AnyObjectTO anyObjectTO);
|
/**
* Create a anyObject.
*
* @param anyObjectTO anyObject to be created and whether to propagate it as active
* @return anyObject just created
*/
|
Create a anyObject
|
create
|
{
"repo_name": "tmess567/syncope",
"path": "core/workflow-api/src/main/java/org/apache/syncope/core/workflow/api/AnyObjectWorkflowAdapter.java",
"license": "apache-2.0",
"size": 2069
}
|
[
"org.apache.syncope.common.lib.to.AnyObjectTO",
"org.apache.syncope.core.provisioning.api.WorkflowResult"
] |
import org.apache.syncope.common.lib.to.AnyObjectTO; import org.apache.syncope.core.provisioning.api.WorkflowResult;
|
import org.apache.syncope.common.lib.to.*; import org.apache.syncope.core.provisioning.api.*;
|
[
"org.apache.syncope"
] |
org.apache.syncope;
| 2,807,023
|
@Test(timeout = 2000)
public void testAppStateXML() throws Exception {
String appId = submitApplication();
List<AppState> responses = performGetCalls(
RM_WEB_SERVICE_PATH + format(APPS_APPID_STATE, appId),
AppState.class, null, null);
AppState routerResponse = responses.get(0);
AppState rmResponse = responses.get(1);
assertNotNull(routerResponse);
assertNotNull(rmResponse);
assertEquals(
rmResponse.getState(),
routerResponse.getState());
}
|
@Test(timeout = 2000) void function() throws Exception { String appId = submitApplication(); List<AppState> responses = performGetCalls( RM_WEB_SERVICE_PATH + format(APPS_APPID_STATE, appId), AppState.class, null, null); AppState routerResponse = responses.get(0); AppState rmResponse = responses.get(1); assertNotNull(routerResponse); assertNotNull(rmResponse); assertEquals( rmResponse.getState(), routerResponse.getState()); }
|
/**
* This test validates the correctness of
* {@link RMWebServiceProtocol#getAppState()} inside Router.
*/
|
This test validates the correctness of <code>RMWebServiceProtocol#getAppState()</code> inside Router
|
testAppStateXML
|
{
"repo_name": "dennishuo/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/test/java/org/apache/hadoop/yarn/server/router/webapp/TestRouterWebServicesREST.java",
"license": "apache-2.0",
"size": 48882
}
|
[
"java.util.List",
"org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState",
"org.junit.Assert",
"org.junit.Test"
] |
import java.util.List; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState; import org.junit.Assert; import org.junit.Test;
|
import java.util.*; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.*; import org.junit.*;
|
[
"java.util",
"org.apache.hadoop",
"org.junit"
] |
java.util; org.apache.hadoop; org.junit;
| 999,718
|
public void err(String message, Locator overrideLocator) throws SAXException {
if (errorHandler != null) {
SAXParseException spe = new SAXParseException(message, overrideLocator);
errorHandler.error(spe);
}
}
|
void function(String message, Locator overrideLocator) throws SAXException { if (errorHandler != null) { SAXParseException spe = new SAXParseException(message, overrideLocator); errorHandler.error(spe); } }
|
/**
* Emit an error with specified locator.
*
* @param message the error message
* @throws SAXException if something goes wrong
*/
|
Emit an error with specified locator
|
err
|
{
"repo_name": "YOTOV-LIMITED/validator",
"path": "src/nu/validator/checker/Checker.java",
"license": "mit",
"size": 6859
}
|
[
"org.xml.sax.Locator",
"org.xml.sax.SAXException",
"org.xml.sax.SAXParseException"
] |
import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 714,093
|
public boolean process(Person person, long time) {
return process(person, time, true);
}
|
boolean function(Person person, long time) { return process(person, time, true); }
|
/**
* Process this Module with the given Person at the specified time within the simulation.
* Processing will complete if the person dies.
*
* @param person
* : the person being simulated
* @param time
* : the date within the simulated world
* @return completed : whether or not this Module completed.
*/
|
Process this Module with the given Person at the specified time within the simulation. Processing will complete if the person dies
|
process
|
{
"repo_name": "synthetichealth/synthea",
"path": "src/main/java/org/mitre/synthea/engine/Module.java",
"license": "apache-2.0",
"size": 17766
}
|
[
"org.mitre.synthea.world.agents.Person"
] |
import org.mitre.synthea.world.agents.Person;
|
import org.mitre.synthea.world.agents.*;
|
[
"org.mitre.synthea"
] |
org.mitre.synthea;
| 2,746,483
|
WebSiteIsHostnameAvailableResponse isHostnameAvailable(String webSiteName) throws IOException, ServiceException, ParserConfigurationException, SAXException;
|
WebSiteIsHostnameAvailableResponse isHostnameAvailable(String webSiteName) throws IOException, ServiceException, ParserConfigurationException, SAXException;
|
/**
* Determines if a host name is available.
*
* @param webSiteName Required. The name of the web site.
* @throws IOException Signals that an I/O exception of some sort has
* occurred. This class is the general class of exceptions produced by
* failed or interrupted I/O operations.
* @throws ServiceException Thrown if an unexpected response is found.
* @throws ParserConfigurationException Thrown if there was a serious
* configuration error with the document parser.
* @throws SAXException Thrown if there was an error parsing the XML
* response.
* @return The Is Hostname Available Web Site operation response.
*/
|
Determines if a host name is available
|
isHostnameAvailable
|
{
"repo_name": "southworkscom/azure-sdk-for-java",
"path": "service-management/azure-svc-mgmt-websites/src/main/java/com/microsoft/windowsazure/management/websites/WebSiteOperations.java",
"license": "apache-2.0",
"size": 61544
}
|
[
"com.microsoft.windowsazure.exception.ServiceException",
"com.microsoft.windowsazure.management.websites.models.WebSiteIsHostnameAvailableResponse",
"java.io.IOException",
"javax.xml.parsers.ParserConfigurationException",
"org.xml.sax.SAXException"
] |
import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.websites.models.WebSiteIsHostnameAvailableResponse; import java.io.IOException; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException;
|
import com.microsoft.windowsazure.exception.*; import com.microsoft.windowsazure.management.websites.models.*; import java.io.*; import javax.xml.parsers.*; import org.xml.sax.*;
|
[
"com.microsoft.windowsazure",
"java.io",
"javax.xml",
"org.xml.sax"
] |
com.microsoft.windowsazure; java.io; javax.xml; org.xml.sax;
| 1,223,766
|
public void choosePhotoFromGallery() {
Intent galleryIntent = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(galleryIntent, PICTURE_FROM_GALLERY);
}
|
void function() { Intent galleryIntent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); startActivityForResult(galleryIntent, PICTURE_FROM_GALLERY); }
|
/**
* Request to pick image from Gallery.
*/
|
Request to pick image from Gallery
|
choosePhotoFromGallery
|
{
"repo_name": "mlperf/training_results_v0.6",
"path": "Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/apps/android_deploy/app/src/main/java/ml/dmlc/tvm/android/demo/MainActivity.java",
"license": "apache-2.0",
"size": 26571
}
|
[
"android.content.Intent",
"android.provider.MediaStore"
] |
import android.content.Intent; import android.provider.MediaStore;
|
import android.content.*; import android.provider.*;
|
[
"android.content",
"android.provider"
] |
android.content; android.provider;
| 2,274,094
|
public void init(SlingHttpServletRequest request, SlingHttpServletResponse response) throws IOException, JsonException {
max = request.getParameter(PARAM_SIZE) != null ? Integer.parseInt(request.getParameter(PARAM_SIZE)) : NB_MAX;
if (max < 0) {
max = Integer.MAX_VALUE;
}
initInternal(request, response);
}
|
void function(SlingHttpServletRequest request, SlingHttpServletResponse response) throws IOException, JsonException { max = request.getParameter(PARAM_SIZE) != null ? Integer.parseInt(request.getParameter(PARAM_SIZE)) : NB_MAX; if (max < 0) { max = Integer.MAX_VALUE; } initInternal(request, response); }
|
/**
* Init the writer, writes beginning of the output
* @param request request from which writer will output
* @param response response on which writer will output
* @throws IOException error handling streams
* @throws JSONException in case invalid json is written
*/
|
Init the writer, writes beginning of the output
|
init
|
{
"repo_name": "anchela/sling",
"path": "contrib/extensions/sling-pipes/src/main/java/org/apache/sling/pipes/OutputWriter.java",
"license": "apache-2.0",
"size": 3552
}
|
[
"java.io.IOException",
"javax.json.JsonException",
"org.apache.sling.api.SlingHttpServletRequest",
"org.apache.sling.api.SlingHttpServletResponse"
] |
import java.io.IOException; import javax.json.JsonException; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.SlingHttpServletResponse;
|
import java.io.*; import javax.json.*; import org.apache.sling.api.*;
|
[
"java.io",
"javax.json",
"org.apache.sling"
] |
java.io; javax.json; org.apache.sling;
| 256,023
|
public void visit(Pipeline.PipelineVisitor visitor,
Set<PValue> visitedValues) {
if (!finishedSpecifying) {
finishSpecifying();
}
// Visit inputs.
for (Map.Entry<PValue, TransformTreeNode> entry : inputs.entrySet()) {
if (visitedValues.add(entry.getKey())) {
visitor.visitValue(entry.getKey(), entry.getValue());
}
}
if (isCompositeNode()) {
visitor.enterCompositeTransform(this);
for (TransformTreeNode child : parts) {
child.visit(visitor, visitedValues);
}
visitor.leaveCompositeTransform(this);
} else {
visitor.visitTransform(this);
}
// Visit outputs.
for (PValue pValue : getExpandedOutputs()) {
if (visitedValues.add(pValue)) {
visitor.visitValue(pValue, this);
}
}
}
|
void function(Pipeline.PipelineVisitor visitor, Set<PValue> visitedValues) { if (!finishedSpecifying) { finishSpecifying(); } for (Map.Entry<PValue, TransformTreeNode> entry : inputs.entrySet()) { if (visitedValues.add(entry.getKey())) { visitor.visitValue(entry.getKey(), entry.getValue()); } } if (isCompositeNode()) { visitor.enterCompositeTransform(this); for (TransformTreeNode child : parts) { child.visit(visitor, visitedValues); } visitor.leaveCompositeTransform(this); } else { visitor.visitTransform(this); } for (PValue pValue : getExpandedOutputs()) { if (visitedValues.add(pValue)) { visitor.visitValue(pValue, this); } } }
|
/**
* Visit the transform node.
*
* <p> Provides an ordered visit of the input values, the primitive
* transform (or child nodes for composite transforms), then the
* output values.
*/
|
Visit the transform node. Provides an ordered visit of the input values, the primitive transform (or child nodes for composite transforms), then the output values
|
visit
|
{
"repo_name": "haocafes/DataflowJavaSDK",
"path": "sdk/src/main/java/com/google/cloud/dataflow/sdk/runners/TransformTreeNode.java",
"license": "apache-2.0",
"size": 6674
}
|
[
"com.google.cloud.dataflow.sdk.Pipeline",
"com.google.cloud.dataflow.sdk.values.PValue",
"java.util.Map",
"java.util.Set"
] |
import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.values.PValue; import java.util.Map; import java.util.Set;
|
import com.google.cloud.dataflow.sdk.*; import com.google.cloud.dataflow.sdk.values.*; import java.util.*;
|
[
"com.google.cloud",
"java.util"
] |
com.google.cloud; java.util;
| 233,656
|
@Override
public int getSingleElement() {
for (int i = 0; i < (bits.length << LOG_BITS); i++) {
if (member(i)) {
return i;
}
}
return Label.INVALID;
}
|
int function() { for (int i = 0; i < (bits.length << LOG_BITS); i++) { if (member(i)) { return i; } } return Label.INVALID; }
|
/** Get the first element you find and return it. Return Label.INVALID
* otherwise.
*/
|
Get the first element you find and return it. Return Label.INVALID otherwise
|
getSingleElement
|
{
"repo_name": "ekcs/congress",
"path": "thirdparty/antlr3-antlr-3.5/tool/src/main/java/org/antlr/misc/BitSet.java",
"license": "apache-2.0",
"size": 16376
}
|
[
"org.antlr.analysis.Label"
] |
import org.antlr.analysis.Label;
|
import org.antlr.analysis.*;
|
[
"org.antlr.analysis"
] |
org.antlr.analysis;
| 2,593,070
|
private static StatisticsItem getItem(String className, String methodName) {
StatisticsItem item = null;
synchronized (list) {
for (Iterator it = list.iterator(); it.hasNext(); ) {
item = (StatisticsItem)it.next();
if (item.className.equals(className)
&& item.methodName.equals(methodName))
{
return item;
}
}
item = new StatisticsItem(className, methodName);
list.add(item);
}
return item;
}
|
static StatisticsItem function(String className, String methodName) { StatisticsItem item = null; synchronized (list) { for (Iterator it = list.iterator(); it.hasNext(); ) { item = (StatisticsItem)it.next(); if (item.className.equals(className) && item.methodName.equals(methodName)) { return item; } } item = new StatisticsItem(className, methodName); list.add(item); } return item; }
|
/**
* Retrieve a StatisticsItem from the list, creating a new one with a
* count of zero if the item doesn't exist.
*
* @param className class name of item to find
* @param methodName method name of item to find
*
* @return a StatisticsItem matching the given parameters, or a new
* StatisticsItem representing the given parameters
*/
|
Retrieve a StatisticsItem from the list, creating a new one with a count of zero if the item doesn't exist
|
getItem
|
{
"repo_name": "cdegroot/river",
"path": "qa/src/com/sun/jini/test/impl/end2end/jssewrapper/Statistics.java",
"license": "apache-2.0",
"size": 5417
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,500,455
|
public boolean processAddProtocolPersonnelEvent(AddProtocolPersonnelEventBase addProtocolPersonnelEvent) {
boolean isValid = true;
ProtocolPersonBase protocolPerson = addProtocolPersonnelEvent.getProtocolPerson();
if (isEmptyPersonOrRole(protocolPerson)) {
isValid = false;
} else {
List<ProtocolPersonBase> protocolPersons = getProtocolPersons(addProtocolPersonnelEvent);
isValid &= !isDuplicateInvestigator(protocolPerson, protocolPersons, true);
isValid &= !isPISameAsCoI(protocolPerson, protocolPersons);
isValid &= !isDuplicatePerson(protocolPerson, addProtocolPersonnelEvent);
}
return isValid;
}
|
boolean function(AddProtocolPersonnelEventBase addProtocolPersonnelEvent) { boolean isValid = true; ProtocolPersonBase protocolPerson = addProtocolPersonnelEvent.getProtocolPerson(); if (isEmptyPersonOrRole(protocolPerson)) { isValid = false; } else { List<ProtocolPersonBase> protocolPersons = getProtocolPersons(addProtocolPersonnelEvent); isValid &= !isDuplicateInvestigator(protocolPerson, protocolPersons, true); isValid &= !isPISameAsCoI(protocolPerson, protocolPersons); isValid &= !isDuplicatePerson(protocolPerson, addProtocolPersonnelEvent); } return isValid; }
|
/**
* Runs the rules for adding a protocol personnel.
* @param addProtocolPersonnelEvent The event invoking the add protocol personnel rules
* @return True if the protocol personnel is valid, false otherwise
*/
|
Runs the rules for adding a protocol personnel
|
processAddProtocolPersonnelEvent
|
{
"repo_name": "sanjupolus/KC6.oLatest",
"path": "coeus-impl/src/main/java/org/kuali/kra/protocol/personnel/ProtocolPersonnelRuleBase.java",
"license": "agpl-3.0",
"size": 13741
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,488,743
|
public static ClientConfigurationFactory createClientConfigurationFactory() {
return DescriptionFactoryImpl.getClientConfigurationFactory();
}
|
static ClientConfigurationFactory function() { return DescriptionFactoryImpl.getClientConfigurationFactory(); }
|
/**
* Creates Client ConfigurationFactory used to create AxisConfiguration.
*
* @return A Client Configuration Factory's new instance. ClinetConfigurationFactory is
* Singleton.
*/
|
Creates Client ConfigurationFactory used to create AxisConfiguration
|
createClientConfigurationFactory
|
{
"repo_name": "intalio/axis2",
"path": "modules/metadata/src/org/apache/axis2/jaxws/description/DescriptionFactory.java",
"license": "apache-2.0",
"size": 32732
}
|
[
"org.apache.axis2.jaxws.ClientConfigurationFactory",
"org.apache.axis2.jaxws.description.impl.DescriptionFactoryImpl"
] |
import org.apache.axis2.jaxws.ClientConfigurationFactory; import org.apache.axis2.jaxws.description.impl.DescriptionFactoryImpl;
|
import org.apache.axis2.jaxws.*; import org.apache.axis2.jaxws.description.impl.*;
|
[
"org.apache.axis2"
] |
org.apache.axis2;
| 1,644,007
|
public void deletePseudoFile(SrvSession sess, TreeConnection tree, String path)
{
// Access the device context
ContentContext ctx = (ContentContext) tree.getContext();
// Get the file state for the parent folder
String[] paths = FileName.splitPath( path);
FileState fstate = getStateForPath( ctx, paths[0]);
// Check if the folder has any pseudo files
if ( fstate == null || fstate.hasPseudoFiles() == false)
return;
// Remove the pseudo file from the list
fstate.getPseudoFileList().removeFile( paths[1], false);
}
|
void function(SrvSession sess, TreeConnection tree, String path) { ContentContext ctx = (ContentContext) tree.getContext(); String[] paths = FileName.splitPath( path); FileState fstate = getStateForPath( ctx, paths[0]); if ( fstate == null fstate.hasPseudoFiles() == false) return; fstate.getPseudoFileList().removeFile( paths[1], false); }
|
/**
* Delete a pseudo file
*
* @param sess SrvSession
* @param tree TreeConnection
* @param path String
*/
|
Delete a pseudo file
|
deletePseudoFile
|
{
"repo_name": "nguyentienlong/community-edition",
"path": "projects/repository/source/java/org/alfresco/filesys/alfresco/PseudoFileImpl.java",
"license": "lgpl-3.0",
"size": 9819
}
|
[
"org.alfresco.filesys.repo.ContentContext",
"org.alfresco.jlan.server.SrvSession",
"org.alfresco.jlan.server.filesys.FileName",
"org.alfresco.jlan.server.filesys.TreeConnection",
"org.alfresco.jlan.server.filesys.cache.FileState"
] |
import org.alfresco.filesys.repo.ContentContext; import org.alfresco.jlan.server.SrvSession; import org.alfresco.jlan.server.filesys.FileName; import org.alfresco.jlan.server.filesys.TreeConnection; import org.alfresco.jlan.server.filesys.cache.FileState;
|
import org.alfresco.filesys.repo.*; import org.alfresco.jlan.server.*; import org.alfresco.jlan.server.filesys.*; import org.alfresco.jlan.server.filesys.cache.*;
|
[
"org.alfresco.filesys",
"org.alfresco.jlan"
] |
org.alfresco.filesys; org.alfresco.jlan;
| 1,632,704
|
private void writeObject(java.io.ObjectOutputStream s) throws IOException {
Iterator<Entry> i = (size > 0) ? entrySet0().iterator() : null;
// Write out the threshold, loadfactor, and any hidden stuff
s.defaultWriteObject();
// Write out number of buckets
s.writeInt(table.length);
// Write out size (number of Mappings)
s.writeInt(size);
// Write out keys and values (alternating)
if (i != null) {
while (i.hasNext()) {
Entry e = i.next();
s.writeObject(e.getKey());
s.writeInt(e.getValue());
}
}
}
|
void function(java.io.ObjectOutputStream s) throws IOException { Iterator<Entry> i = (size > 0) ? entrySet0().iterator() : null; s.defaultWriteObject(); s.writeInt(table.length); s.writeInt(size); if (i != null) { while (i.hasNext()) { Entry e = i.next(); s.writeObject(e.getKey()); s.writeInt(e.getValue()); } } }
|
/**
* Save the state of the <tt>HashMap</tt> instance to a stream (i.e., serialize it).
*
* @serialData The <i>capacity</i> of the IntHashMap (the length of the bucket array) is emitted
* (int), followed by the <i>size</i> (an int, the number of key-value mappings),
* followed by the key (Object) and value (Object) for each key-value mapping. The
* key-value mappings are emitted in no particular order.
*/
|
Save the state of the HashMap instance to a stream (i.e., serialize it)
|
writeObject
|
{
"repo_name": "prasi-in/geode",
"path": "geode-core/src/main/java/org/apache/geode/cache/query/internal/ObjectIntHashMap.java",
"license": "apache-2.0",
"size": 35898
}
|
[
"java.io.IOException",
"java.util.Iterator"
] |
import java.io.IOException; import java.util.Iterator;
|
import java.io.*; import java.util.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 487,842
|
private void detachPool() {
ServerRegionProxy serverRegionProxy = getServerProxy();
if (serverRegionProxy != null) {
InternalCache internalCache = getCache();
String poolName = this.getPoolName();
PoolImpl pool = (PoolImpl) PoolManager.find(this.getPoolName());
if (poolName != null && pool != null) {
serverRegionProxy
.detach(internalCache.keepDurableSubscriptionsAlive() || pool.getKeepAlive());
} else {
serverRegionProxy.detach(internalCache.keepDurableSubscriptionsAlive());
}
}
}
|
void function() { ServerRegionProxy serverRegionProxy = getServerProxy(); if (serverRegionProxy != null) { InternalCache internalCache = getCache(); String poolName = this.getPoolName(); PoolImpl pool = (PoolImpl) PoolManager.find(this.getPoolName()); if (poolName != null && pool != null) { serverRegionProxy .detach(internalCache.keepDurableSubscriptionsAlive() pool.getKeepAlive()); } else { serverRegionProxy.detach(internalCache.keepDurableSubscriptionsAlive()); } } }
|
/**
* Release the client connection pool if we have one
*
* @since GemFire 5.7
*/
|
Release the client connection pool if we have one
|
detachPool
|
{
"repo_name": "charliemblack/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/LocalRegion.java",
"license": "apache-2.0",
"size": 428144
}
|
[
"org.apache.geode.cache.client.PoolManager",
"org.apache.geode.cache.client.internal.PoolImpl",
"org.apache.geode.cache.client.internal.ServerRegionProxy"
] |
import org.apache.geode.cache.client.PoolManager; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.client.internal.ServerRegionProxy;
|
import org.apache.geode.cache.client.*; import org.apache.geode.cache.client.internal.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 1,883,723
|
@Test
public void testCreateInstance()
{
Lookup prefLook1 = createLookup();
Lookup prefLook2 = createLookup();
Lookup defLook1 = createLookup();
Lookup defLook2 = createLookup();
ConfigurationInterpolator interpolator =
createMock(ConfigurationInterpolator.class);
ConfigurationInterpolator parent =
createMock(ConfigurationInterpolator.class);
InterpolatorSpecification spec =
builder.withPrefixLookup(PREFIX1, prefLook1)
.withDefaultLookup(defLook1)
.withPrefixLookup(PREFIX2, prefLook2)
.withParentInterpolator(parent)
.withDefaultLookup(defLook2)
.withInterpolator(interpolator).create();
assertSame("Wrong interpolator", interpolator, spec.getInterpolator());
assertSame("Wrong parent interpolator", parent,
spec.getParentInterpolator());
checkPrefixLookups(spec, prefLook1, prefLook2);
checkDefaultLookups(spec, defLook1, defLook2);
}
|
void function() { Lookup prefLook1 = createLookup(); Lookup prefLook2 = createLookup(); Lookup defLook1 = createLookup(); Lookup defLook2 = createLookup(); ConfigurationInterpolator interpolator = createMock(ConfigurationInterpolator.class); ConfigurationInterpolator parent = createMock(ConfigurationInterpolator.class); InterpolatorSpecification spec = builder.withPrefixLookup(PREFIX1, prefLook1) .withDefaultLookup(defLook1) .withPrefixLookup(PREFIX2, prefLook2) .withParentInterpolator(parent) .withDefaultLookup(defLook2) .withInterpolator(interpolator).create(); assertSame(STR, interpolator, spec.getInterpolator()); assertSame(STR, parent, spec.getParentInterpolator()); checkPrefixLookups(spec, prefLook1, prefLook2); checkDefaultLookups(spec, defLook1, defLook2); }
|
/**
* Tests whether an instance with all possible properties can be set.
*/
|
Tests whether an instance with all possible properties can be set
|
testCreateInstance
|
{
"repo_name": "mohanaraosv/commons-configuration",
"path": "src/test/java/org/apache/commons/configuration2/interpol/TestInterpolatorSpecification.java",
"license": "apache-2.0",
"size": 9125
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 2,741,596
|
protected TextView createDefaultTabView(Context context) {
TextView textView = new TextView(context);
textView.setGravity(Gravity.CENTER);
textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP);
textView.setTypeface(Typeface.DEFAULT_BOLD);
textView.setLayoutParams(new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
TypedValue outValue = new TypedValue();
getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground,
outValue, true);
textView.setBackgroundResource(outValue.resourceId);
textView.setAllCaps(true);
int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density);
textView.setPadding(padding, padding, padding, padding);
return textView;
}
|
TextView function(Context context) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER); textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP); textView.setTypeface(Typeface.DEFAULT_BOLD); textView.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); TypedValue outValue = new TypedValue(); getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground, outValue, true); textView.setBackgroundResource(outValue.resourceId); textView.setAllCaps(true); int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density); textView.setPadding(padding, padding, padding, padding); return textView; }
|
/**
* Create a default view to be used for tabs. This is called if a custom tab view is not set via
* {@link #setCustomTabView(int, int)}.
*/
|
Create a default view to be used for tabs. This is called if a custom tab view is not set via <code>#setCustomTabView(int, int)</code>
|
createDefaultTabView
|
{
"repo_name": "draekko/afwall",
"path": "aFWall/src/main/java/dev/ukanth/ufirewall/ui/about/SlidingTabLayout.java",
"license": "gpl-3.0",
"size": 11063
}
|
[
"android.content.Context",
"android.graphics.Typeface",
"android.util.TypedValue",
"android.view.Gravity",
"android.view.ViewGroup",
"android.widget.LinearLayout",
"android.widget.TextView"
] |
import android.content.Context; import android.graphics.Typeface; import android.util.TypedValue; import android.view.Gravity; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView;
|
import android.content.*; import android.graphics.*; import android.util.*; import android.view.*; import android.widget.*;
|
[
"android.content",
"android.graphics",
"android.util",
"android.view",
"android.widget"
] |
android.content; android.graphics; android.util; android.view; android.widget;
| 2,093,952
|
@Override
public void initialize(InputSplit split, TaskAttemptContext context) throws IOException {
Preconditions.checkNotNull(split);
if (!(split instanceof DatastoreInputSplit)) {
throw new IOException(
getClass().getName() + " initialized with non-DatastoreInputSplit");
}
this.split = (DatastoreInputSplit) split;
}
|
void function(InputSplit split, TaskAttemptContext context) throws IOException { Preconditions.checkNotNull(split); if (!(split instanceof DatastoreInputSplit)) { throw new IOException( getClass().getName() + STR); } this.split = (DatastoreInputSplit) split; }
|
/**
* Initialize the reader.
*
* @throws IOException if the split provided isn't a DatastoreInputSplit
*/
|
Initialize the reader
|
initialize
|
{
"repo_name": "mikea/appengine-mapreduce",
"path": "java/src/com/google/appengine/tools/mapreduce/DatastoreRecordReader.java",
"license": "apache-2.0",
"size": 4703
}
|
[
"com.google.common.base.Preconditions",
"java.io.IOException",
"org.apache.hadoop.mapreduce.InputSplit",
"org.apache.hadoop.mapreduce.TaskAttemptContext"
] |
import com.google.common.base.Preconditions; import java.io.IOException; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import com.google.common.base.*; import java.io.*; import org.apache.hadoop.mapreduce.*;
|
[
"com.google.common",
"java.io",
"org.apache.hadoop"
] |
com.google.common; java.io; org.apache.hadoop;
| 289,007
|
public List getPageData() {
return pageData;
}
|
List function() { return pageData; }
|
/**
* Returns the data in use for the current page
* @return list of data
*/
|
Returns the data in use for the current page
|
getPageData
|
{
"repo_name": "colloquium/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/taglibs/list/ListTag.java",
"license": "gpl-2.0",
"size": 35442
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,717,243
|
public void setMultiQueryIndex(int index) throws ExecException {
setIndex(index, true);
}
|
void function(int index) throws ExecException { setIndex(index, true); }
|
/**
* Sets the multi-query index of this operator
*
* @param index the position of the parent plan of this operator
* in the enclosed split operator
* @throws ExecException if the index value is bigger then 0x7F
*/
|
Sets the multi-query index of this operator
|
setMultiQueryIndex
|
{
"repo_name": "rekhajoshm/pigfork",
"path": "src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POLocalRearrange.java",
"license": "apache-2.0",
"size": 31817
}
|
[
"org.apache.pig.backend.executionengine.ExecException"
] |
import org.apache.pig.backend.executionengine.ExecException;
|
import org.apache.pig.backend.executionengine.*;
|
[
"org.apache.pig"
] |
org.apache.pig;
| 32,259
|
@Adjacency(label = JAXWS_INTERFACE, direction = Direction.OUT)
public JavaClassModel getInterface();
|
@Adjacency(label = JAXWS_INTERFACE, direction = Direction.OUT) JavaClassModel function();
|
/**
* Contains the JAX-WS implementation class
*/
|
Contains the JAX-WS implementation class
|
getInterface
|
{
"repo_name": "lincolnthree/windup",
"path": "rules-java-ee/addon/src/main/java/org/jboss/windup/rules/apps/javaee/model/JaxWSWebServiceModel.java",
"license": "epl-1.0",
"size": 1439
}
|
[
"com.tinkerpop.blueprints.Direction",
"com.tinkerpop.frames.Adjacency",
"org.jboss.windup.rules.apps.java.model.JavaClassModel"
] |
import com.tinkerpop.blueprints.Direction; import com.tinkerpop.frames.Adjacency; import org.jboss.windup.rules.apps.java.model.JavaClassModel;
|
import com.tinkerpop.blueprints.*; import com.tinkerpop.frames.*; import org.jboss.windup.rules.apps.java.model.*;
|
[
"com.tinkerpop.blueprints",
"com.tinkerpop.frames",
"org.jboss.windup"
] |
com.tinkerpop.blueprints; com.tinkerpop.frames; org.jboss.windup;
| 400,445
|
public int getAvailable() throws IOException {
if (!_didGet)
getConnInput();
// php/164q
if (_isRequestDone)
return 0;
else if (_contentLength > 0)
return _contentLength;
else
return _rs.getAvailable();
}
|
int function() throws IOException { if (!_didGet) getConnInput(); if (_isRequestDone) return 0; else if (_contentLength > 0) return _contentLength; else return _rs.getAvailable(); }
|
/**
* Returns the bytes still available.
*/
|
Returns the bytes still available
|
getAvailable
|
{
"repo_name": "CleverCloud/Bianca",
"path": "bianca/src/main/java/com/clevercloud/vfs/HttpStream.java",
"license": "gpl-2.0",
"size": 23618
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 416,518
|
@Test
public void testGetLongProperty()
{
try
{
Message message = senderSession.createMessage();
message.getLongProperty("prop");
Assert.fail("Should raise a NumberFormatException.\n");
}
catch (NumberFormatException e)
{
}
catch (JMSException e)
{
fail(e);
}
}
|
void function() { try { Message message = senderSession.createMessage(); message.getLongProperty("prop"); Assert.fail(STR); } catch (NumberFormatException e) { } catch (JMSException e) { fail(e); } }
|
/**
* Test that an attempt to get a <code>long</code> property which does not exist throw
* a <code>java.lang.NumberFormatException</code>
*/
|
Test that an attempt to get a <code>long</code> property which does not exist throw a <code>java.lang.NumberFormatException</code>
|
testGetLongProperty
|
{
"repo_name": "ryanemerson/activemq-artemis",
"path": "tests/joram-tests/src/test/java/org/objectweb/jtests/jms/conform/message/properties/MessagePropertyTest.java",
"license": "apache-2.0",
"size": 10943
}
|
[
"javax.jms.JMSException",
"javax.jms.Message",
"org.junit.Assert"
] |
import javax.jms.JMSException; import javax.jms.Message; import org.junit.Assert;
|
import javax.jms.*; import org.junit.*;
|
[
"javax.jms",
"org.junit"
] |
javax.jms; org.junit;
| 289,597
|
@Test
public void testSetFloatGetLegal() throws Exception {
JmsMapMessage mapMessage = factory.createMapMessage();
String name = "myName";
float value = Float.MAX_VALUE;
mapMessage.setFloat(name, value);
assertEquals(value, mapMessage.getFloat(name), 0.0);
assertGetMapEntryEquals(mapMessage, name, String.valueOf(value), String.class);
assertGetMapEntryEquals(mapMessage, name, Double.valueOf(value), Double.class);
}
|
void function() throws Exception { JmsMapMessage mapMessage = factory.createMapMessage(); String name = STR; float value = Float.MAX_VALUE; mapMessage.setFloat(name, value); assertEquals(value, mapMessage.getFloat(name), 0.0); assertGetMapEntryEquals(mapMessage, name, String.valueOf(value), String.class); assertGetMapEntryEquals(mapMessage, name, Double.valueOf(value), Double.class); }
|
/**
* Set a float, then retrieve it as all of the legal type combinations to verify it is
* parsed correctly
*/
|
Set a float, then retrieve it as all of the legal type combinations to verify it is parsed correctly
|
testSetFloatGetLegal
|
{
"repo_name": "avranju/qpid-jms",
"path": "qpid-jms-client/src/test/java/org/apache/qpid/jms/message/JmsMapMessageTest.java",
"license": "apache-2.0",
"size": 35216
}
|
[
"org.junit.Assert"
] |
import org.junit.Assert;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 1,482,598
|
private boolean canStartTaskTracker(JobConf conf) throws IOException, InterruptedException {
TaskTracker tt = null;
try {
tt = new TaskTracker(conf);
} catch(IOException e) {
if (e instanceof java.net.BindException)
return false;
throw e;
}
tt.shutdown();
return true;
}
|
boolean function(JobConf conf) throws IOException, InterruptedException { TaskTracker tt = null; try { tt = new TaskTracker(conf); } catch(IOException e) { if (e instanceof java.net.BindException) return false; throw e; } tt.shutdown(); return true; }
|
/**
* Check whether the TaskTracker can be started.
*/
|
Check whether the TaskTracker can be started
|
canStartTaskTracker
|
{
"repo_name": "kl0u/visco",
"path": "src/test/org/apache/hadoop/mapred/TestMRServerPorts.java",
"license": "apache-2.0",
"size": 7041
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,892,481
|
public IntegrationAccountAgreementInner withHostIdentity(BusinessIdentity hostIdentity) {
this.hostIdentity = hostIdentity;
return this;
}
|
IntegrationAccountAgreementInner function(BusinessIdentity hostIdentity) { this.hostIdentity = hostIdentity; return this; }
|
/**
* Set the hostIdentity value.
*
* @param hostIdentity the hostIdentity value to set
* @return the IntegrationAccountAgreementInner object itself.
*/
|
Set the hostIdentity value
|
withHostIdentity
|
{
"repo_name": "martinsawicki/azure-sdk-for-java",
"path": "azure-mgmt-logic/src/main/java/com/microsoft/azure/management/logic/implementation/IntegrationAccountAgreementInner.java",
"license": "mit",
"size": 6281
}
|
[
"com.microsoft.azure.management.logic.BusinessIdentity"
] |
import com.microsoft.azure.management.logic.BusinessIdentity;
|
import com.microsoft.azure.management.logic.*;
|
[
"com.microsoft.azure"
] |
com.microsoft.azure;
| 1,439,469
|
public void takeSnapshot(@NonNull final SnapshotListener listener) {
if (mGoogleMap == null) {
return;
}
|
void function(@NonNull final SnapshotListener listener) { if (mGoogleMap == null) { return; }
|
/**
* Takes a screenshot of the map.
*/
|
Takes a screenshot of the map
|
takeSnapshot
|
{
"repo_name": "cohenadair/anglers-log",
"path": "android/app/src/main/java/com/cohenadair/anglerslog/fragments/DraggableMapFragment.java",
"license": "gpl-3.0",
"size": 14359
}
|
[
"android.support.annotation.NonNull"
] |
import android.support.annotation.NonNull;
|
import android.support.annotation.*;
|
[
"android.support"
] |
android.support;
| 2,045,508
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.