method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
@RequestMapping(value = "/logout", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE)
public @ResponseBody Map<String, String> logout(HttpSession session) {
SessionData.newSession(session);
return success().build();
}
|
@RequestMapping(value = STR, method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @ResponseBody Map<String, String> function(HttpSession session) { SessionData.newSession(session); return success().build(); }
|
/**
* Logs out by overwriting all session data.
*/
|
Logs out by overwriting all session data
|
logout
|
{
"repo_name": "BlueWizardHat/2fa-demo",
"path": "2fa-demo-webapp/src/main/java/net/bluewizardhat/tfa/web/controller/UserController.java",
"license": "mit",
"size": 8598
}
|
[
"java.util.Map",
"javax.servlet.http.HttpSession",
"net.bluewizardhat.tfa.web.util.SessionData",
"org.springframework.http.MediaType",
"org.springframework.web.bind.annotation.RequestMapping",
"org.springframework.web.bind.annotation.RequestMethod",
"org.springframework.web.bind.annotation.ResponseBody"
] |
import java.util.Map; import javax.servlet.http.HttpSession; import net.bluewizardhat.tfa.web.util.SessionData; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody;
|
import java.util.*; import javax.servlet.http.*; import net.bluewizardhat.tfa.web.util.*; import org.springframework.http.*; import org.springframework.web.bind.annotation.*;
|
[
"java.util",
"javax.servlet",
"net.bluewizardhat.tfa",
"org.springframework.http",
"org.springframework.web"
] |
java.util; javax.servlet; net.bluewizardhat.tfa; org.springframework.http; org.springframework.web;
| 1,563,629
|
@GET
@Path("session/details/{host}/{application}/{session}")
@Produces(MediaType.APPLICATION_JSON)
SessionDetails getSessionDetails(@PathParam("host") String host, @PathParam("application") String application, @PathParam("session") String id) throws AgentException;
|
@Path(STR) @Produces(MediaType.APPLICATION_JSON) SessionDetails getSessionDetails(@PathParam("host") String host, @PathParam(STR) String application, @PathParam(STR) String id) throws AgentException;
|
/**
* Gets the session details.
*
* @param host the host.
* @param application the application.
* @param id the id.
* @return the session details.
* @throws AgentException if the method fails.
*/
|
Gets the session details
|
getSessionDetails
|
{
"repo_name": "lorislab/smonitor",
"path": "smonitor-agent/src/main/java/org/lorislab/smonitor/agent/rs/service/ApplicationService.java",
"license": "apache-2.0",
"size": 6472
}
|
[
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.Produces",
"javax.ws.rs.core.MediaType",
"org.lorislab.smonitor.agent.rs.exception.AgentException",
"org.lorislab.smonitor.connector.model.SessionDetails"
] |
import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.lorislab.smonitor.agent.rs.exception.AgentException; import org.lorislab.smonitor.connector.model.SessionDetails;
|
import javax.ws.rs.*; import javax.ws.rs.core.*; import org.lorislab.smonitor.agent.rs.exception.*; import org.lorislab.smonitor.connector.model.*;
|
[
"javax.ws",
"org.lorislab.smonitor"
] |
javax.ws; org.lorislab.smonitor;
| 2,511,737
|
private void checkRandomBytes(CryptoRandom random, int len) {
byte[] bytes = new byte[len];
byte[] bytes1 = new byte[len];
random.nextBytes(bytes);
random.nextBytes(bytes1);
while (Arrays.equals(bytes1, new byte[len]) || Arrays.equals(bytes, bytes1)) {
random.nextBytes(bytes1);
}
}
|
void function(CryptoRandom random, int len) { byte[] bytes = new byte[len]; byte[] bytes1 = new byte[len]; random.nextBytes(bytes); random.nextBytes(bytes1); while (Arrays.equals(bytes1, new byte[len]) Arrays.equals(bytes, bytes1)) { random.nextBytes(bytes1); } }
|
/**
* Test will timeout if secure random implementation always returns a
* constant value.
*/
|
Test will timeout if secure random implementation always returns a constant value
|
checkRandomBytes
|
{
"repo_name": "kexianda/commons-crypto",
"path": "src/test/java/org/apache/commons/crypto/random/AbstractRandomTest.java",
"license": "apache-2.0",
"size": 2959
}
|
[
"java.util.Arrays"
] |
import java.util.Arrays;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 831,929
|
// check if matcher already exists
TransitionRedefinesAnotherMatcher matcher = engine.getExistingMatcher(querySpecification());
if (matcher == null) {
matcher = new TransitionRedefinesAnotherMatcher(engine);
// do not have to "put" it into engine.matchers, reportMatcherInitialized() will take care of it
}
return matcher;
}
private final static int POSITION_TR = 0;
private final static Logger LOGGER = IncQueryLoggingUtil.getLogger(TransitionRedefinesAnotherMatcher.class);
@Deprecated
public TransitionRedefinesAnotherMatcher(final Notifier emfRoot) throws IncQueryException {
this(IncQueryEngine.on(emfRoot));
}
@Deprecated
public TransitionRedefinesAnotherMatcher(final IncQueryEngine engine) throws IncQueryException {
super(engine, querySpecification());
}
|
TransitionRedefinesAnotherMatcher matcher = engine.getExistingMatcher(querySpecification()); if (matcher == null) { matcher = new TransitionRedefinesAnotherMatcher(engine); } return matcher; } private final static int POSITION_TR = 0; private final static Logger LOGGER = IncQueryLoggingUtil.getLogger(TransitionRedefinesAnotherMatcher.class); public TransitionRedefinesAnotherMatcher(final Notifier emfRoot) throws IncQueryException { this(IncQueryEngine.on(emfRoot)); } public TransitionRedefinesAnotherMatcher(final IncQueryEngine engine) throws IncQueryException { super(engine, querySpecification()); }
|
/**
* Initializes the pattern matcher within an existing EMF-IncQuery engine.
* If the pattern matcher is already constructed in the engine, only a light-weight reference is returned.
* The match set will be incrementally refreshed upon updates.
* @param engine the existing EMF-IncQuery engine in which this matcher will be created.
* @throws IncQueryException if an error occurs during pattern matcher creation
*
*/
|
Initializes the pattern matcher within an existing EMF-IncQuery engine. If the pattern matcher is already constructed in the engine, only a light-weight reference is returned. The match set will be incrementally refreshed upon updates
|
on
|
{
"repo_name": "ELTE-Soft/xUML-RT-Executor",
"path": "plugins/hu.eltesoft.modelexecution.validation/src-gen/hu/eltesoft/modelexecution/validation/TransitionRedefinesAnotherMatcher.java",
"license": "epl-1.0",
"size": 10501
}
|
[
"org.apache.log4j.Logger",
"org.eclipse.emf.common.notify.Notifier",
"org.eclipse.incquery.runtime.api.IncQueryEngine",
"org.eclipse.incquery.runtime.exception.IncQueryException",
"org.eclipse.incquery.runtime.util.IncQueryLoggingUtil"
] |
import org.apache.log4j.Logger; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.incquery.runtime.api.IncQueryEngine; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.eclipse.incquery.runtime.util.IncQueryLoggingUtil;
|
import org.apache.log4j.*; import org.eclipse.emf.common.notify.*; import org.eclipse.incquery.runtime.api.*; import org.eclipse.incquery.runtime.exception.*; import org.eclipse.incquery.runtime.util.*;
|
[
"org.apache.log4j",
"org.eclipse.emf",
"org.eclipse.incquery"
] |
org.apache.log4j; org.eclipse.emf; org.eclipse.incquery;
| 2,014,512
|
return Configuration.getInstance().create(MediaContract.class);
}
|
return Configuration.getInstance().create(MediaContract.class); }
|
/**
* Creates an instance of the <code>MediaServicesContract</code> API.
*
*/
|
Creates an instance of the <code>MediaServicesContract</code> API
|
create
|
{
"repo_name": "flydream2046/azure-sdk-for-java",
"path": "services/azure-media/src/main/java/com/microsoft/windowsazure/services/media/MediaService.java",
"license": "apache-2.0",
"size": 2197
}
|
[
"com.microsoft.windowsazure.Configuration"
] |
import com.microsoft.windowsazure.Configuration;
|
import com.microsoft.windowsazure.*;
|
[
"com.microsoft.windowsazure"
] |
com.microsoft.windowsazure;
| 448,416
|
protected ELContext getELContext() {
return getCurrentInstance().getELContext();
}
|
ELContext function() { return getCurrentInstance().getELContext(); }
|
/**
* Obtains the current JSF EL context.
*
* @return The ELContext object associated with the current request.
*/
|
Obtains the current JSF EL context
|
getELContext
|
{
"repo_name": "nemo-ufes/nemo-utils",
"path": "nemo-utils-jee-wp/src/main/java/br/ufes/inf/nemo/util/ejb3/controller/JSFController.java",
"license": "gpl-2.0",
"size": 27470
}
|
[
"javax.el.ELContext"
] |
import javax.el.ELContext;
|
import javax.el.*;
|
[
"javax.el"
] |
javax.el;
| 2,790,431
|
@Override
@Description("Count of pages replaced by other pages")
public long getEvictCount() {
return recent().getEvictCount();
}
|
@Description(STR) long function() { return recent().getEvictCount(); }
|
/**
* Get the count of valid pages evicted from this <code>BufferPool</code> to
* make room for newly read or created pages.
*
* @return The evicted page count
*/
|
Get the count of valid pages evicted from this <code>BufferPool</code> to make room for newly read or created pages
|
getEvictCount
|
{
"repo_name": "jaytaylor/persistit",
"path": "src/main/java/com/persistit/BufferPoolMXBeanImpl.java",
"license": "epl-1.0",
"size": 7375
}
|
[
"com.persistit.mxbeans.Description"
] |
import com.persistit.mxbeans.Description;
|
import com.persistit.mxbeans.*;
|
[
"com.persistit.mxbeans"
] |
com.persistit.mxbeans;
| 1,695,957
|
public static void analyse(Database db, boolean full) throws SQLException {
Connection conn = db.getConnection();
boolean autoCommit = conn.getAutoCommit();
try {
conn.setAutoCommit(true);
Statement s = conn.createStatement();
if (full) {
s.execute("VACUUM FULL ANALYSE");
} else {
s.execute("ANALYSE");
}
conn.setAutoCommit(autoCommit);
} finally {
conn.setAutoCommit(autoCommit);
conn.close();
}
}
|
static void function(Database db, boolean full) throws SQLException { Connection conn = db.getConnection(); boolean autoCommit = conn.getAutoCommit(); try { conn.setAutoCommit(true); Statement s = conn.createStatement(); if (full) { s.execute(STR); } else { s.execute(STR); } conn.setAutoCommit(autoCommit); } finally { conn.setAutoCommit(autoCommit); conn.close(); } }
|
/**
* Analyse given database, perform vacuum full analyse if full parameter true.
* WARNING: currently PostgreSQL specific
* @param db the database to analyse
* @param full if true perform VACUUM FULL ANALYSE
* @throws SQLException if db problem
*/
|
Analyse given database, perform vacuum full analyse if full parameter true
|
analyse
|
{
"repo_name": "JoeCarlson/intermine",
"path": "intermine/objectstore/main/src/org/intermine/sql/DatabaseUtil.java",
"license": "lgpl-2.1",
"size": 34017
}
|
[
"java.sql.Connection",
"java.sql.SQLException",
"java.sql.Statement"
] |
import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 889,829
|
List<Service> getServicesBlockedOnFacility(PerunSession perunSession, Facility facility);
|
List<Service> getServicesBlockedOnFacility(PerunSession perunSession, Facility facility);
|
/**
* List all the Services that are banned on this facility.
*
* @param perunSession
* @param facility
* @return a list of Services that are denied on the facility
*
*/
|
List all the Services that are banned on this facility
|
getServicesBlockedOnFacility
|
{
"repo_name": "zlamalp/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/bl/ServicesManagerBl.java",
"license": "bsd-2-clause",
"size": 31710
}
|
[
"cz.metacentrum.perun.core.api.Facility",
"cz.metacentrum.perun.core.api.PerunSession",
"cz.metacentrum.perun.core.api.Service",
"java.util.List"
] |
import cz.metacentrum.perun.core.api.Facility; import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.Service; import java.util.List;
|
import cz.metacentrum.perun.core.api.*; import java.util.*;
|
[
"cz.metacentrum.perun",
"java.util"
] |
cz.metacentrum.perun; java.util;
| 2,269,737
|
public boolean usesLocalFiles() throws SQLException {
return false;
}
//
// JDBC-4.0 functions that aren't reliant on Java6
//
|
boolean function() throws SQLException { return false; } //
|
/**
* Does the database store tables in a local file?
*
* @return true if so
* @throws SQLException DOCUMENT ME!
*/
|
Does the database store tables in a local file
|
usesLocalFiles
|
{
"repo_name": "hongliangpan/manydesigns.cn",
"path": "trunk/portofino-database/mysql.src/com/mysql/jdbc/DatabaseMetaData.java",
"license": "lgpl-3.0",
"size": 275823
}
|
[
"java.sql.SQLException"
] |
import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 2,357,005
|
private Class<?> loadClass(final String classname) throws PluginException {
if (null == classname) {
throw new IllegalArgumentException("A null java class name was specified.");
}
if (null != classCache.get(classname)) {
debug("(loadClass) " + classname + ": " + pluginJar);
return classCache.get(classname);
}
debug(String.format("Deleting dependency lib cache %s", getFileCacheDir()));
FileUtils.deleteDir(getFileCacheDir());
File cachedJar = createCachedJar();
debug("loadClass! " + classname + ": " + cachedJar);
final ClassLoader parent = JarPluginProviderLoader.class.getClassLoader();
final Class<?> cls;
// if jar manifest declares secondary lib deps, expand lib into cachedir, and setup classloader to use the libs
Collection<File> extlibs = null;
try {
extlibs = extractDependentLibs();
} catch (IOException e) {
throw new PluginException("Unable to expand plugin libs: " + e.getMessage(), e);
}
try {
final URL url = cachedJar.toURI().toURL();
final URL[] urlarray;
if (null != extlibs && extlibs.size() > 0) {
final ArrayList<URL> urls = new ArrayList<URL>();
urls.add(url);
for (final File extlib : extlibs) {
urls.add(extlib.toURI().toURL());
}
urlarray = urls.toArray(new URL[urls.size()]);
} else {
urlarray = new URL[] { url };
}
final URLClassLoader urlClassLoader = loadLibsFirst ? LocalFirstClassLoader.newInstance(urlarray,
parent) : URLClassLoader.newInstance(urlarray, parent);
cls = Class.forName(classname, true, urlClassLoader);
classCache.put(classname, cls);
} catch (ClassNotFoundException e) {
throw new PluginException("Class not found: " + classname, e);
} catch (MalformedURLException e) {
throw new PluginException("Error loading class: " + classname, e);
} catch (Throwable t) {
throw new PluginException("Error loading class: " + classname, t);
}
return cls;
}
|
Class<?> function(final String classname) throws PluginException { if (null == classname) { throw new IllegalArgumentException(STR); } if (null != classCache.get(classname)) { debug(STR + classname + STR + pluginJar); return classCache.get(classname); } debug(String.format(STR, getFileCacheDir())); FileUtils.deleteDir(getFileCacheDir()); File cachedJar = createCachedJar(); debug(STR + classname + STR + cachedJar); final ClassLoader parent = JarPluginProviderLoader.class.getClassLoader(); final Class<?> cls; Collection<File> extlibs = null; try { extlibs = extractDependentLibs(); } catch (IOException e) { throw new PluginException(STR + e.getMessage(), e); } try { final URL url = cachedJar.toURI().toURL(); final URL[] urlarray; if (null != extlibs && extlibs.size() > 0) { final ArrayList<URL> urls = new ArrayList<URL>(); urls.add(url); for (final File extlib : extlibs) { urls.add(extlib.toURI().toURL()); } urlarray = urls.toArray(new URL[urls.size()]); } else { urlarray = new URL[] { url }; } final URLClassLoader urlClassLoader = loadLibsFirst ? LocalFirstClassLoader.newInstance(urlarray, parent) : URLClassLoader.newInstance(urlarray, parent); cls = Class.forName(classname, true, urlClassLoader); classCache.put(classname, cls); } catch (ClassNotFoundException e) { throw new PluginException(STR + classname, e); } catch (MalformedURLException e) { throw new PluginException(STR + classname, e); } catch (Throwable t) { throw new PluginException(STR + classname, t); } return cls; }
|
/**
* Load a class from the jar file by name
*/
|
Load a class from the jar file by name
|
loadClass
|
{
"repo_name": "tjordanchat/rundeck",
"path": "core/src/main/java/com/dtolabs/rundeck/core/plugins/JarPluginProviderLoader.java",
"license": "apache-2.0",
"size": 24079
}
|
[
"com.dtolabs.rundeck.core.utils.FileUtils",
"java.io.File",
"java.io.IOException",
"java.net.MalformedURLException",
"java.net.URLClassLoader",
"java.util.ArrayList",
"java.util.Collection"
] |
import com.dtolabs.rundeck.core.utils.FileUtils; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collection;
|
import com.dtolabs.rundeck.core.utils.*; import java.io.*; import java.net.*; import java.util.*;
|
[
"com.dtolabs.rundeck",
"java.io",
"java.net",
"java.util"
] |
com.dtolabs.rundeck; java.io; java.net; java.util;
| 2,479,475
|
public static void convertTypes(GeneratorAdapter gen, Type from, Type to) {
if (isPrimitive(from) && isPrimitive(to)) {
gen.cast(from, to);
} else {
if (isPrimitive(from)) {
Type boxed = boxedType(from);
box(gen, boxed);
if (!isAssignableFrom(to, boxed)) {
gen.checkCast(to);
}
} else if (isPrimitive(to)) {
Type boxed = boxedType(to);
if (!isAssignableFrom(to, from)) {
gen.checkCast(boxed);
}
unbox(gen, to);
} else {
if (!isAssignableFrom(to, from)) {
gen.checkCast(to);
}
}
}
}
|
static void function(GeneratorAdapter gen, Type from, Type to) { if (isPrimitive(from) && isPrimitive(to)) { gen.cast(from, to); } else { if (isPrimitive(from)) { Type boxed = boxedType(from); box(gen, boxed); if (!isAssignableFrom(to, boxed)) { gen.checkCast(to); } } else if (isPrimitive(to)) { Type boxed = boxedType(to); if (!isAssignableFrom(to, from)) { gen.checkCast(boxed); } unbox(gen, to); } else { if (!isAssignableFrom(to, from)) { gen.checkCast(to); } } } }
|
/**
* <p>Convert from one type to another.</p>
* <p>The following conversions are allowed:</p>
* <ul>
* <p>primitive -> primitive</p>
* <p>primitive -> wrapper (only direct conversion from a primitive to it's wrapper)</p>
* <p>wrapper -> primitive (again only direct conversion)</p>
* <p>class -> class (may be a no-op or a checkcast)</p>
* </ul>
* @param gen the generator adapter
* @param from the type on top of the stack
* @param to the type to convert to
*/
|
Convert from one type to another. The following conversions are allowed: primitive -> primitive primitive -> wrapper (only direct conversion from a primitive to it's wrapper) wrapper -> primitive (again only direct conversion) class -> class (may be a no-op or a checkcast)
|
convertTypes
|
{
"repo_name": "diesieben07/SevenCommons",
"path": "src/main/java/de/take_weiland/mods/commons/asm/ASMUtils.java",
"license": "lgpl-3.0",
"size": 8654
}
|
[
"org.objectweb.asm.Type",
"org.objectweb.asm.commons.GeneratorAdapter"
] |
import org.objectweb.asm.Type; import org.objectweb.asm.commons.GeneratorAdapter;
|
import org.objectweb.asm.*; import org.objectweb.asm.commons.*;
|
[
"org.objectweb.asm"
] |
org.objectweb.asm;
| 2,152,048
|
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setRet(long value) {
this.ret = value;
}
|
@Generated(value = STR, date = STR, comments = STR) void function(long value) { this.ret = value; }
|
/**
* Sets the value of the ret property.
*
*/
|
Sets the value of the ret property
|
setRet
|
{
"repo_name": "kanonirov/lanb-client",
"path": "src/main/java/ru/lanbilling/webservice/wsdl/DelSharedPostResponse.java",
"license": "mit",
"size": 1717
}
|
[
"javax.annotation.Generated"
] |
import javax.annotation.Generated;
|
import javax.annotation.*;
|
[
"javax.annotation"
] |
javax.annotation;
| 2,764,354
|
public Configuration getFlinkConfiguration() {
return flinkConfig.clone();
}
|
Configuration function() { return flinkConfig.clone(); }
|
/**
* Return the Flink configuration object.
* @return The Flink configuration object
*/
|
Return the Flink configuration object
|
getFlinkConfiguration
|
{
"repo_name": "mylog00/flink",
"path": "flink-clients/src/main/java/org/apache/flink/client/program/ClusterClient.java",
"license": "apache-2.0",
"size": 41167
}
|
[
"org.apache.flink.configuration.Configuration"
] |
import org.apache.flink.configuration.Configuration;
|
import org.apache.flink.configuration.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 1,059,251
|
@Override
public String getJMSProviderName() {
return MetaDataSupport.PROVIDER_NAME;
}
|
String function() { return MetaDataSupport.PROVIDER_NAME; }
|
/**
* Gets the JMS provider name.
*
* @return the JMS provider name
*/
|
Gets the JMS provider name
|
getJMSProviderName
|
{
"repo_name": "apache/qpid-jms",
"path": "qpid-jms-client/src/main/java/org/apache/qpid/jms/JmsConnectionMetaData.java",
"license": "apache-2.0",
"size": 3243
}
|
[
"org.apache.qpid.jms.util.MetaDataSupport"
] |
import org.apache.qpid.jms.util.MetaDataSupport;
|
import org.apache.qpid.jms.util.*;
|
[
"org.apache.qpid"
] |
org.apache.qpid;
| 207,873
|
private char parseSeparator(final String separatorOption, File input) throws IOException {
if (separatorOption.length() == 1) {
return separatorOption.charAt(0);
} else if (separatorOption.equalsIgnoreCase("DETECT")) {
return detectSeparator(input);
} else {
throw new IllegalArgumentException("only a single character or the keyword 'DETECT' is allowed");
}
}
|
char function(final String separatorOption, File input) throws IOException { if (separatorOption.length() == 1) { return separatorOption.charAt(0); } else if (separatorOption.equalsIgnoreCase(STR)) { return detectSeparator(input); } else { throw new IllegalArgumentException(STR); } }
|
/**
* Parses the separator option and returns the separator char.
*
* @param separatorOption the separator option
* @param input the input
* @return the char
* @throws IOException Signals that an I/O exception has occurred.
*/
|
Parses the separator option and returns the separator char
|
parseSeparator
|
{
"repo_name": "arx-deidentifier/arx-cli",
"path": "src/org/deidentifier/arx/cli/CommandLineInterface.java",
"license": "gpl-3.0",
"size": 34483
}
|
[
"java.io.File",
"java.io.IOException"
] |
import java.io.File; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 418,916
|
public Map<String, DataObjectRelationship> getRelationshipMetadata() {
LOG.debug("getRelationshipMetadata() start");
PersistenceStructureService persistenceStructureService = SpringContext.getBean(PersistenceStructureService.class);
Map<String, DataObjectRelationship> relationshipMetadata = new HashMap<String, DataObjectRelationship>();
for (String attributeName : this.getInquirableFieldNames()) {
Map<String, Class<? extends BusinessObject>> primitiveReference = LookupUtils.getPrimitiveReference(newDetailLine, attributeName);
if (primitiveReference != null && !primitiveReference.isEmpty()) {
DataObjectRelationship primitiveRelationship = this.getPrimitiveDataObjectRelationship(persistenceStructureService.getRelationshipMetadata(newDetailLine.getClass(), attributeName));
relationshipMetadata.put(attributeName, primitiveRelationship);
}
}
return relationshipMetadata;
}
|
Map<String, DataObjectRelationship> function() { LOG.debug(STR); PersistenceStructureService persistenceStructureService = SpringContext.getBean(PersistenceStructureService.class); Map<String, DataObjectRelationship> relationshipMetadata = new HashMap<String, DataObjectRelationship>(); for (String attributeName : this.getInquirableFieldNames()) { Map<String, Class<? extends BusinessObject>> primitiveReference = LookupUtils.getPrimitiveReference(newDetailLine, attributeName); if (primitiveReference != null && !primitiveReference.isEmpty()) { DataObjectRelationship primitiveRelationship = this.getPrimitiveDataObjectRelationship(persistenceStructureService.getRelationshipMetadata(newDetailLine.getClass(), attributeName)); relationshipMetadata.put(attributeName, primitiveRelationship); } } return relationshipMetadata; }
|
/**
* get the relationship metadata for the detail line fields
*
* @return the relationship metadata for the detail line fields
*/
|
get the relationship metadata for the detail line fields
|
getRelationshipMetadata
|
{
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/module/ec/document/web/struts/EffortCertificationForm.java",
"license": "apache-2.0",
"size": 22754
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.kuali.kfs.sys.context.SpringContext",
"org.kuali.rice.kns.lookup.LookupUtils",
"org.kuali.rice.krad.bo.BusinessObject",
"org.kuali.rice.krad.bo.DataObjectRelationship",
"org.kuali.rice.krad.service.PersistenceStructureService"
] |
import java.util.HashMap; import java.util.Map; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.rice.kns.lookup.LookupUtils; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.DataObjectRelationship; import org.kuali.rice.krad.service.PersistenceStructureService;
|
import java.util.*; import org.kuali.kfs.sys.context.*; import org.kuali.rice.kns.lookup.*; import org.kuali.rice.krad.bo.*; import org.kuali.rice.krad.service.*;
|
[
"java.util",
"org.kuali.kfs",
"org.kuali.rice"
] |
java.util; org.kuali.kfs; org.kuali.rice;
| 2,157,115
|
@SuppressWarnings("unchecked")
public Object getObject(String s){
if(s == null){
throw new NullPointerException("Key may not be null.");
}
String[] parts = s.split("\\.");
Map<String, Object> last = map;
for(int i = 0; i < parts.length - 1; i++){
Object q = last.get(parts[i]);
if(q == null || q instanceof Map == false){
return null;
}
last = (Map<String, Object>) q;
}
Object o = last.get(parts[parts.length - 1]);
if(o instanceof Map){
HashMap<String, Object> result = new HashMap<String, Object>();
for(Entry<?, ?> e : ((Map<?, ?>) o).entrySet()){
if(e.getKey() instanceof String == false) return o; //Key is not String, we can't help.
result.put((String) e.getKey(), e.getValue());
}
return new ConfigSection(result);
}
return o;
}
|
@SuppressWarnings(STR) Object function(String s){ if(s == null){ throw new NullPointerException(STR); } String[] parts = s.split("\\."); Map<String, Object> last = map; for(int i = 0; i < parts.length - 1; i++){ Object q = last.get(parts[i]); if(q == null q instanceof Map == false){ return null; } last = (Map<String, Object>) q; } Object o = last.get(parts[parts.length - 1]); if(o instanceof Map){ HashMap<String, Object> result = new HashMap<String, Object>(); for(Entry<?, ?> e : ((Map<?, ?>) o).entrySet()){ if(e.getKey() instanceof String == false) return o; result.put((String) e.getKey(), e.getValue()); } return new ConfigSection(result); } return o; }
|
/**
* Fetches the raw object at the given location.
* @param s the key to search for, may contain '.' for subsections.
* @return the object
*/
|
Fetches the raw object at the given location
|
getObject
|
{
"repo_name": "tehnewb/Titan",
"path": "src/org/maxgamer/rs/structure/configs/ConfigSection.java",
"license": "gpl-3.0",
"size": 15001
}
|
[
"java.util.HashMap",
"java.util.Map"
] |
import java.util.HashMap; import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 140,538
|
@Override
public Adapter createProxyServiceAdapter() {
if (proxyServiceItemProvider == null) {
proxyServiceItemProvider = new ProxyServiceItemProvider(this);
}
return proxyServiceItemProvider;
}
protected ProxyWsdlConfigurationItemProvider proxyWsdlConfigurationItemProvider;
|
Adapter function() { if (proxyServiceItemProvider == null) { proxyServiceItemProvider = new ProxyServiceItemProvider(this); } return proxyServiceItemProvider; } protected ProxyWsdlConfigurationItemProvider proxyWsdlConfigurationItemProvider;
|
/**
* This creates an adapter for a {@link org.wso2.developerstudio.eclipse.esb.ProxyService}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This creates an adapter for a <code>org.wso2.developerstudio.eclipse.esb.ProxyService</code>.
|
createProxyServiceAdapter
|
{
"repo_name": "chanakaudaya/developer-studio",
"path": "esb/org.wso2.developerstudio.eclipse.esb.edit/src/org/wso2/developerstudio/eclipse/esb/provider/EsbItemProviderAdapterFactory.java",
"license": "apache-2.0",
"size": 35203
}
|
[
"org.eclipse.emf.common.notify.Adapter"
] |
import org.eclipse.emf.common.notify.Adapter;
|
import org.eclipse.emf.common.notify.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,588,160
|
@Value.Check
protected void check() {
switch (getKind()) {
case SINGLE:
Preconditions.checkArgument(!getLocalNameAndFlavors().equals(""));
Preconditions.checkArgument(!getLocalNameAndFlavors().startsWith("#"));
break;
case PACKAGE:
case RECURSIVE:
Preconditions.checkArgument(getLocalNameAndFlavors().equals(""));
break;
}
}
|
@Value.Check void function() { switch (getKind()) { case SINGLE: Preconditions.checkArgument(!getLocalNameAndFlavors().equals(STR#STR")); break; } }
|
/**
* Validate that target name is only present when necessary
*
* <p>Should we move it to factory {@link BuildTargetPatternParser}?
*/
|
Validate that target name is only present when necessary Should we move it to factory <code>BuildTargetPatternParser</code>
|
check
|
{
"repo_name": "zpao/buck",
"path": "src/com/facebook/buck/core/parser/buildtargetpattern/BuildTargetPattern.java",
"license": "apache-2.0",
"size": 3408
}
|
[
"com.google.common.base.Preconditions",
"org.immutables.value.Value"
] |
import com.google.common.base.Preconditions; import org.immutables.value.Value;
|
import com.google.common.base.*; import org.immutables.value.*;
|
[
"com.google.common",
"org.immutables.value"
] |
com.google.common; org.immutables.value;
| 471,572
|
private boolean addRelatedClass(final RelType relType, final ClassInfo classInfo) {
Set<ClassInfo> classInfoSet = relatedTypeToClassInfoSet.get(relType);
if (classInfoSet == null) {
relatedTypeToClassInfoSet.put(relType, classInfoSet = new HashSet<>(4));
}
return classInfoSet.add(classInfo);
}
|
boolean function(final RelType relType, final ClassInfo classInfo) { Set<ClassInfo> classInfoSet = relatedTypeToClassInfoSet.get(relType); if (classInfoSet == null) { relatedTypeToClassInfoSet.put(relType, classInfoSet = new HashSet<>(4)); } return classInfoSet.add(classInfo); }
|
/**
* Add a class with a given relationship type. Test whether the collection changed as a result of the call.
*/
|
Add a class with a given relationship type. Test whether the collection changed as a result of the call
|
addRelatedClass
|
{
"repo_name": "CiNC0/Cartier",
"path": "cartier-classpath-scanner/src/main/java/xyz/vopen/cartier/classpathscanner/scanner/ClassInfo.java",
"license": "apache-2.0",
"size": 88074
}
|
[
"java.util.HashSet",
"java.util.Set"
] |
import java.util.HashSet; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,249,986
|
ChangeStreamPublisher<Document> watch(ClientSession clientSession);
|
ChangeStreamPublisher<Document> watch(ClientSession clientSession);
|
/**
* Creates a change stream for this database.
*
* @param clientSession the client session with which to associate this operation
* @return the change stream iterable
* @since 1.9
* @mongodb.server.release 4.0
* @mongodb.driver.dochub core/changestreams Change Streams
*/
|
Creates a change stream for this database
|
watch
|
{
"repo_name": "rozza/mongo-java-driver-reactivestreams",
"path": "driver/src/main/com/mongodb/reactivestreams/client/MongoDatabase.java",
"license": "apache-2.0",
"size": 22823
}
|
[
"org.bson.Document"
] |
import org.bson.Document;
|
import org.bson.*;
|
[
"org.bson"
] |
org.bson;
| 2,553,931
|
public void removeAcls(List<OzoneAcl> removeAcls) throws IOException {
Preconditions.checkNotNull(proxy, "Client proxy is not set.");
Preconditions.checkNotNull(removeAcls);
proxy.removeBucketAcls(volumeName, name, removeAcls);
acls.removeAll(removeAcls);
}
|
void function(List<OzoneAcl> removeAcls) throws IOException { Preconditions.checkNotNull(proxy, STR); Preconditions.checkNotNull(removeAcls); proxy.removeBucketAcls(volumeName, name, removeAcls); acls.removeAll(removeAcls); }
|
/**
* Removes ACLs from the bucket.
* @param removeAcls ACLs to be removed
* @throws IOException
*/
|
Removes ACLs from the bucket
|
removeAcls
|
{
"repo_name": "ChetnaChaudhari/hadoop",
"path": "hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneBucket.java",
"license": "apache-2.0",
"size": 10777
}
|
[
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.util.List",
"org.apache.hadoop.ozone.OzoneAcl"
] |
import com.google.common.base.Preconditions; import java.io.IOException; import java.util.List; import org.apache.hadoop.ozone.OzoneAcl;
|
import com.google.common.base.*; import java.io.*; import java.util.*; import org.apache.hadoop.ozone.*;
|
[
"com.google.common",
"java.io",
"java.util",
"org.apache.hadoop"
] |
com.google.common; java.io; java.util; org.apache.hadoop;
| 1,376,984
|
static public <T> IPosOnlyFeedback read(BufferedReader reader, IEntityMapping user_mapping, IEntityMapping item_mapping, boolean ignore_first_line) throws Exception {
if (user_mapping == null) user_mapping = new IdentityMapping();
if (item_mapping == null) item_mapping = new IdentityMapping();
if (ignore_first_line) reader.readLine();
PosOnlyFeedback<SparseBooleanMatrix> feedback = new PosOnlyFeedback<SparseBooleanMatrix>(SparseBooleanMatrix.class);
String line;
while ((line = reader.readLine()) != null ) {
line = line.trim();
if(line.length() == 0) continue;
String[] tokens = line.split(Constants.SPLIT_CHARS, 0);
if(tokens.length < 2) throw new IOException("Expected at least two columns: " + line);
int user_id = user_mapping.toInternalID((tokens[0]));
int item_id = item_mapping.toInternalID((tokens[1]));
feedback.add(user_id, item_id);
}
return feedback;
}
|
static <T> IPosOnlyFeedback function(BufferedReader reader, IEntityMapping user_mapping, IEntityMapping item_mapping, boolean ignore_first_line) throws Exception { if (user_mapping == null) user_mapping = new IdentityMapping(); if (item_mapping == null) item_mapping = new IdentityMapping(); if (ignore_first_line) reader.readLine(); PosOnlyFeedback<SparseBooleanMatrix> feedback = new PosOnlyFeedback<SparseBooleanMatrix>(SparseBooleanMatrix.class); String line; while ((line = reader.readLine()) != null ) { line = line.trim(); if(line.length() == 0) continue; String[] tokens = line.split(Constants.SPLIT_CHARS, 0); if(tokens.length < 2) throw new IOException(STR + line); int user_id = user_mapping.toInternalID((tokens[0])); int item_id = item_mapping.toInternalID((tokens[1])); feedback.add(user_id, item_id); } return feedback; }
|
/**
* Read in implicit feedback data from a TextReader.
*
* Each line must consist of at least two fields, the first being a user identifier, the second
* being an item identifier. Additional fields and empty lines are ignored.
*
* See Constants.SPLIT_CHARS for details of the permissible field separators.
*
* @param reader the TextReader to be read from
* @param user_mapping a user IEntityMapping object
* @param item_mapping an item IEntityMapping object
* @param ignore_first_line if true, ignore the first line
* @return a PosOnlyFeedback object with the user-wise collaborative data
*/
|
Read in implicit feedback data from a TextReader. Each line must consist of at least two fields, the first being a user identifier, the second being an item identifier. Additional fields and empty lines are ignored. See Constants.SPLIT_CHARS for details of the permissible field separators
|
read
|
{
"repo_name": "GaoZhenGit/UISMF",
"path": "MyMediaLite/src/org/mymedialite/io/ItemData.java",
"license": "gpl-3.0",
"size": 4060
}
|
[
"java.io.BufferedReader",
"java.io.IOException",
"org.mymedialite.data.IEntityMapping",
"org.mymedialite.data.IPosOnlyFeedback",
"org.mymedialite.data.IdentityMapping",
"org.mymedialite.data.PosOnlyFeedback",
"org.mymedialite.datatype.SparseBooleanMatrix"
] |
import java.io.BufferedReader; import java.io.IOException; import org.mymedialite.data.IEntityMapping; import org.mymedialite.data.IPosOnlyFeedback; import org.mymedialite.data.IdentityMapping; import org.mymedialite.data.PosOnlyFeedback; import org.mymedialite.datatype.SparseBooleanMatrix;
|
import java.io.*; import org.mymedialite.data.*; import org.mymedialite.datatype.*;
|
[
"java.io",
"org.mymedialite.data",
"org.mymedialite.datatype"
] |
java.io; org.mymedialite.data; org.mymedialite.datatype;
| 1,887,659
|
@Adaptive(CACHE_KEY)
Cache getCache(URL url, Invocation invocation);
|
@Adaptive(CACHE_KEY) Cache getCache(URL url, Invocation invocation);
|
/**
* CacheFactory implementation class needs to implement this return underlying cache instance for method against
* url and invocation.
* @param url
* @param invocation
* @return Instance of Cache containing cached value against method url and invocation.
*/
|
CacheFactory implementation class needs to implement this return underlying cache instance for method against url and invocation
|
getCache
|
{
"repo_name": "lovepoem/dubbo",
"path": "dubbo-filter/dubbo-filter-cache/src/main/java/org/apache/dubbo/cache/CacheFactory.java",
"license": "apache-2.0",
"size": 1885
}
|
[
"org.apache.dubbo.common.extension.Adaptive",
"org.apache.dubbo.rpc.Invocation"
] |
import org.apache.dubbo.common.extension.Adaptive; import org.apache.dubbo.rpc.Invocation;
|
import org.apache.dubbo.common.extension.*; import org.apache.dubbo.rpc.*;
|
[
"org.apache.dubbo"
] |
org.apache.dubbo;
| 2,348,477
|
private void updateInterface(Connection dbc, Date now, DbNodeEntry node,
InetAddress target, InetAddress ifaddr, List<SupportedProtocol> protocols,
IfSnmpCollector snmpc, boolean doesSnmp) throws SQLException {
final String ifaddrString = str(ifaddr);
if (log().isDebugEnabled()) {
log().debug("updateInterface: updating interface "
+ ifaddrString + "(targetIf="
+ str(target) + ")");
if (doesSnmp) {
log().debug("updateInterface: the SNMP collection passed in is "
+ "collected via"
+ (snmpc == null ? "No SnmpCollection passed in (snmpc == null)" : str(snmpc.getCollectorTargetAddress())));
}
}
boolean reparentFlag = false;
boolean newIpIfEntry = false;
int ifIndex = -1;
DbIpInterfaceEntry dbIpIfEntry =
DbIpInterfaceEntry.get(dbc,node.getNodeId(), ifaddr);
if (doesSnmp && snmpc != null && snmpc.hasIpAddrTable()) {
// Attempt to load IP Interface entry from the database
ifIndex = snmpc.getIfIndex(ifaddr);
if (log().isDebugEnabled()) {
log().debug("updateInterface: interface = "
+ ifaddrString + " ifIndex = " + ifIndex
+ ". Checking for this address on other nodes.");
}
// Verify that SNMP collection contains ipAddrTable entries
IpAddrTable ipAddrTable = null;
ipAddrTable = snmpc.getIpAddrTable();
if (ipAddrTable == null) {
log().error("updateInterface: null ipAddrTable in the SNMP "
+ "collection");
} else {
if (ifaddrString.equals("0.0.0.0") || ifaddr.isLoopbackAddress()) {
if (log().isDebugEnabled()) {
log().debug("updateInterface: Skipping address from "
+ "snmpc ipAddrTable "
+ ifaddrString);
}
} else {
if (log().isDebugEnabled()) {
log().debug("updateInterface: Checking address from "
+ "snmpc ipAddrTable "
+ ifaddrString);
}
PreparedStatement stmt = null;
final DBUtils d = new DBUtils(RescanProcessor.class);
try {
stmt = dbc.prepareStatement(SQL_DB_RETRIEVE_OTHER_NODES);
d.watch(stmt);
stmt.setString(1, ifaddrString);
stmt.setInt(2, node.getNodeId());
ResultSet rs = stmt.executeQuery();
d.watch(rs);
while (rs.next()) {
int existingNodeId = rs.getInt(1);
if (log().isDebugEnabled()) {
log().debug("updateInterface: ckecking for "
+ ifaddrString
+ " on existing nodeid "
+ existingNodeId);
}
DbNodeEntry suspectNodeEntry = DbNodeEntry.get(dbc, existingNodeId);
if (suspectNodeEntry == null) {
// This can happen if a node has been deleted.
continue;
}
DbIpInterfaceEntry[] tmpIfArray = suspectNodeEntry.getInterfaces(dbc);
if (areDbInterfacesInSnmpCollection(tmpIfArray, snmpc)) {
for (int i = 0; i < tmpIfArray.length; i++) {
InetAddress addr = tmpIfArray[i].getIfAddress();
int index = snmpc.getIfIndex(addr);
// Skip non-IP or loopback interfaces
final String addrString = str(addr);
if (addrString == null || addrString.equals("0.0.0.0") || addr.isLoopbackAddress()) {
continue;
}
if (log().isDebugEnabled()) {
log().debug("updateInterface: "
+ "reparenting interface "
+ addrString
+ " under node: "
+ node.getNodeId()
+ " from existing node: "
+ existingNodeId);
}
reparentInterface(dbc, addr, index, node.getNodeId(), existingNodeId);
// Create interfaceReparented event
createInterfaceReparentedEvent(node, existingNodeId, addr);
}
if (log().isDebugEnabled()) {
log().debug("updateInterface: interface "
+ ifaddrString
+ " is added to node: "
+ node.getNodeId()
+ " by reparenting from existing "
+ "node: " + existingNodeId);
}
dbIpIfEntry = DbIpInterfaceEntry.get(dbc, node.getNodeId(), ifaddr);
reparentFlag = true;
// delete duplicate node after reparenting.
deleteDuplicateNode(dbc, suspectNodeEntry);
createDuplicateNodeDeletedEvent(suspectNodeEntry);
}
}
}
catch (SQLException e) {
log().error("SQLException while updating interface: " + ifaddrString + " on nodeid: " + node.getNodeId());
throw e;
} finally {
d.cleanUp();
}
}
}
}
if (dbIpIfEntry == null) {
if (log().isDebugEnabled()) {
log().debug("updateInterface: interface " + ifaddr + " ifIndex "
+ ifIndex + " not in database under nodeid "
+ node.getNodeId()
+ ", creating new interface object.");
}
if (ifIndex == -1 && !doesSnmp) {
dbIpIfEntry = DbIpInterfaceEntry.create(node.getNodeId(),
ifaddr);
} else {
dbIpIfEntry = DbIpInterfaceEntry.create(node.getNodeId(),
ifaddr, ifIndex);
dbIpIfEntry.setIfIndex(ifIndex);
}
if (isDuplicateInterface(dbc, ifaddr, node.getNodeId())) {
m_eventList.add(createDuplicateIpAddressEvent(dbIpIfEntry));
}
newIpIfEntry = true;
}
DbIpInterfaceEntry currIpIfEntry =
getNewDbIpInterfaceEntry(node, snmpc, doesSnmp, ifaddr);
if (doesSnmp && snmpc != null) {
// update SNMP info if available
updateSnmpInfo(dbc, node, snmpc, currIpIfEntry.getIfAddress(),
currIpIfEntry.getIfIndex());
}
// update ipinterface for the updating interface
updateInterfaceInfo(dbc, now, node, dbIpIfEntry, currIpIfEntry,
newIpIfEntry, reparentFlag);
// update IfServices for the updating interface
updateServiceInfo(dbc, node, dbIpIfEntry, newIpIfEntry, protocols);
}
|
void function(Connection dbc, Date now, DbNodeEntry node, InetAddress target, InetAddress ifaddr, List<SupportedProtocol> protocols, IfSnmpCollector snmpc, boolean doesSnmp) throws SQLException { final String ifaddrString = str(ifaddr); if (log().isDebugEnabled()) { log().debug(STR + ifaddrString + STR + str(target) + ")"); if (doesSnmp) { log().debug(STR + STR + (snmpc == null ? STR : str(snmpc.getCollectorTargetAddress()))); } } boolean reparentFlag = false; boolean newIpIfEntry = false; int ifIndex = -1; DbIpInterfaceEntry dbIpIfEntry = DbIpInterfaceEntry.get(dbc,node.getNodeId(), ifaddr); if (doesSnmp && snmpc != null && snmpc.hasIpAddrTable()) { ifIndex = snmpc.getIfIndex(ifaddr); if (log().isDebugEnabled()) { log().debug(STR + ifaddrString + STR + ifIndex + STR); } IpAddrTable ipAddrTable = null; ipAddrTable = snmpc.getIpAddrTable(); if (ipAddrTable == null) { log().error(STR + STR); } else { if (ifaddrString.equals(STR) ifaddr.isLoopbackAddress()) { if (log().isDebugEnabled()) { log().debug(STR + STR + ifaddrString); } } else { if (log().isDebugEnabled()) { log().debug(STR + STR + ifaddrString); } PreparedStatement stmt = null; final DBUtils d = new DBUtils(RescanProcessor.class); try { stmt = dbc.prepareStatement(SQL_DB_RETRIEVE_OTHER_NODES); d.watch(stmt); stmt.setString(1, ifaddrString); stmt.setInt(2, node.getNodeId()); ResultSet rs = stmt.executeQuery(); d.watch(rs); while (rs.next()) { int existingNodeId = rs.getInt(1); if (log().isDebugEnabled()) { log().debug(STR + ifaddrString + STR + existingNodeId); } DbNodeEntry suspectNodeEntry = DbNodeEntry.get(dbc, existingNodeId); if (suspectNodeEntry == null) { continue; } DbIpInterfaceEntry[] tmpIfArray = suspectNodeEntry.getInterfaces(dbc); if (areDbInterfacesInSnmpCollection(tmpIfArray, snmpc)) { for (int i = 0; i < tmpIfArray.length; i++) { InetAddress addr = tmpIfArray[i].getIfAddress(); int index = snmpc.getIfIndex(addr); final String addrString = str(addr); if (addrString == null addrString.equals(STR) addr.isLoopbackAddress()) { continue; } if (log().isDebugEnabled()) { log().debug(STR + STR + addrString + STR + node.getNodeId() + STR + existingNodeId); } reparentInterface(dbc, addr, index, node.getNodeId(), existingNodeId); createInterfaceReparentedEvent(node, existingNodeId, addr); } if (log().isDebugEnabled()) { log().debug(STR + ifaddrString + STR + node.getNodeId() + STR + STR + existingNodeId); } dbIpIfEntry = DbIpInterfaceEntry.get(dbc, node.getNodeId(), ifaddr); reparentFlag = true; deleteDuplicateNode(dbc, suspectNodeEntry); createDuplicateNodeDeletedEvent(suspectNodeEntry); } } } catch (SQLException e) { log().error(STR + ifaddrString + STR + node.getNodeId()); throw e; } finally { d.cleanUp(); } } } } if (dbIpIfEntry == null) { if (log().isDebugEnabled()) { log().debug(STR + ifaddr + STR + ifIndex + STR + node.getNodeId() + STR); } if (ifIndex == -1 && !doesSnmp) { dbIpIfEntry = DbIpInterfaceEntry.create(node.getNodeId(), ifaddr); } else { dbIpIfEntry = DbIpInterfaceEntry.create(node.getNodeId(), ifaddr, ifIndex); dbIpIfEntry.setIfIndex(ifIndex); } if (isDuplicateInterface(dbc, ifaddr, node.getNodeId())) { m_eventList.add(createDuplicateIpAddressEvent(dbIpIfEntry)); } newIpIfEntry = true; } DbIpInterfaceEntry currIpIfEntry = getNewDbIpInterfaceEntry(node, snmpc, doesSnmp, ifaddr); if (doesSnmp && snmpc != null) { updateSnmpInfo(dbc, node, snmpc, currIpIfEntry.getIfAddress(), currIpIfEntry.getIfIndex()); } updateInterfaceInfo(dbc, now, node, dbIpIfEntry, currIpIfEntry, newIpIfEntry, reparentFlag); updateServiceInfo(dbc, node, dbIpIfEntry, newIpIfEntry, protocols); }
|
/**
* This method is responsible for updating the ipInterface table entry for a
* specific interface.
*
* @param dbc
* Database Connection
* @param now
* Date/time to be associated with the update.
* @param node
* Node entry for the node being rescanned
* @param target
* Target interface (from IfCollector.getTarget())
* @param ifaddr
* Interface being updated.
* @param protocols
* Protocols supported by the interface.
* @param snmpc
* SNMP collector or null if SNMP not supported.
* @param doesSnmp
* Indicates that the interface supports SNMP
*
* @throws SQLException
* if there is a problem updating the ipInterface table.
*/
|
This method is responsible for updating the ipInterface table entry for a specific interface
|
updateInterface
|
{
"repo_name": "vishwaAbhinav/OpenNMS",
"path": "opennms-services/src/main/java/org/opennms/netmgt/capsd/RescanProcessor.java",
"license": "gpl-2.0",
"size": 164395
}
|
[
"java.net.InetAddress",
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.ResultSet",
"java.sql.SQLException",
"java.util.Date",
"java.util.List",
"org.opennms.core.utils.DBUtils",
"org.opennms.core.utils.InetAddressUtils",
"org.opennms.netmgt.capsd.IfCollector",
"org.opennms.netmgt.capsd.snmp.IpAddrTable"
] |
import java.net.InetAddress; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Date; import java.util.List; import org.opennms.core.utils.DBUtils; import org.opennms.core.utils.InetAddressUtils; import org.opennms.netmgt.capsd.IfCollector; import org.opennms.netmgt.capsd.snmp.IpAddrTable;
|
import java.net.*; import java.sql.*; import java.util.*; import org.opennms.core.utils.*; import org.opennms.netmgt.capsd.*; import org.opennms.netmgt.capsd.snmp.*;
|
[
"java.net",
"java.sql",
"java.util",
"org.opennms.core",
"org.opennms.netmgt"
] |
java.net; java.sql; java.util; org.opennms.core; org.opennms.netmgt;
| 364,176
|
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
try {
return super.onInterceptTouchEvent(ev);
} catch (IllegalArgumentException e) {
e.printStackTrace();
return false;
}
}
|
boolean function(MotionEvent ev) { try { return super.onInterceptTouchEvent(ev); } catch (IllegalArgumentException e) { e.printStackTrace(); return false; } }
|
/**
* Bugfix explained in https://github.com/chrisbanes/PhotoView
*/
|
Bugfix explained in HREF
|
onInterceptTouchEvent
|
{
"repo_name": "jinsedeyuzhou/NewsClient",
"path": "library/src/main/java/es/voghdev/pdfviewpager/library/PDFViewPagerZoom.java",
"license": "gpl-3.0",
"size": 2661
}
|
[
"android.view.MotionEvent"
] |
import android.view.MotionEvent;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,760,555
|
void fsError(StaffAttemptID staffId, String message) throws IOException;
|
void fsError(StaffAttemptID staffId, String message) throws IOException;
|
/**
* Report that the staff encounted a local fileSystem error.
* @param staffId the id of staff
* @param message error messages
*/
|
Report that the staff encounted a local fileSystem error
|
fsError
|
{
"repo_name": "LiuJianan/Graduate-Graph",
"path": "src/java/com/chinamobile/bcbsp/workermanager/WorkerAgentProtocol.java",
"license": "apache-2.0",
"size": 3813
}
|
[
"com.chinamobile.bcbsp.util.StaffAttemptID",
"java.io.IOException"
] |
import com.chinamobile.bcbsp.util.StaffAttemptID; import java.io.IOException;
|
import com.chinamobile.bcbsp.util.*; import java.io.*;
|
[
"com.chinamobile.bcbsp",
"java.io"
] |
com.chinamobile.bcbsp; java.io;
| 435,714
|
private Class findBaseParent(final Class type) {
return (Objects.isNull(type) || Object.class.equals(type)
? null
: Optional.ofNullable(type)
.map(Class::getSuperclass)
.filter(this::isBaseType)
.orElse(findBaseParent(type.getSuperclass())));
}
|
Class function(final Class type) { return (Objects.isNull(type) Object.class.equals(type) ? null : Optional.ofNullable(type) .map(Class::getSuperclass) .filter(this::isBaseType) .orElse(findBaseParent(type.getSuperclass()))); }
|
/**
* Find on the parent hierarchy of the given type the Class that is a BaseType
* @param type
* @return
*/
|
Find on the parent hierarchy of the given type the Class that is a BaseType
|
findBaseParent
|
{
"repo_name": "jhrcek/kie-wb-common",
"path": "kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-commons/kie-wb-common-stunner-backend-common/src/main/java/org/kie/workbench/common/stunner/core/backend/definition/adapter/reflect/BackendDefinitionAdapter.java",
"license": "apache-2.0",
"size": 11408
}
|
[
"java.util.Objects",
"java.util.Optional"
] |
import java.util.Objects; import java.util.Optional;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 283,144
|
@Test
public void testChallengePOST() {
final String securityPackage = "Negotiate";
IWindowsCredentialsHandle clientCredentials = null;
WindowsSecurityContextImpl clientContext = null;
try {
// client credentials handle
clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage);
clientCredentials.initialize();
// initial client security context
clientContext = new WindowsSecurityContextImpl();
clientContext.setPrincipalName(WindowsAccountImpl.getCurrentUsername());
clientContext.setCredentialsHandle(clientCredentials.getHandle());
clientContext.setSecurityPackage(securityPackage);
clientContext.initialize(null, null, WindowsAccountImpl.getCurrentUsername());
final SimpleHttpRequest request = new SimpleHttpRequest();
request.setMethod("POST");
request.setContentLength(0);
final String clientToken = BaseEncoding.base64().encode(clientContext.getToken());
request.addHeader("Authorization", securityPackage + " " + clientToken);
final SimpleHttpResponse response = new SimpleHttpResponse();
this.authenticator.authenticate(request, response, null);
Assert.assertTrue(response.getHeader("WWW-Authenticate").startsWith(securityPackage + " "));
Assert.assertEquals("keep-alive", response.getHeader("Connection"));
Assert.assertEquals(2, response.getHeaderNames().length);
Assert.assertEquals(401, response.getStatus());
} finally {
if (clientContext != null) {
clientContext.dispose();
}
if (clientCredentials != null) {
clientCredentials.dispose();
}
}
}
|
void function() { final String securityPackage = STR; IWindowsCredentialsHandle clientCredentials = null; WindowsSecurityContextImpl clientContext = null; try { clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage); clientCredentials.initialize(); clientContext = new WindowsSecurityContextImpl(); clientContext.setPrincipalName(WindowsAccountImpl.getCurrentUsername()); clientContext.setCredentialsHandle(clientCredentials.getHandle()); clientContext.setSecurityPackage(securityPackage); clientContext.initialize(null, null, WindowsAccountImpl.getCurrentUsername()); final SimpleHttpRequest request = new SimpleHttpRequest(); request.setMethod("POST"); request.setContentLength(0); final String clientToken = BaseEncoding.base64().encode(clientContext.getToken()); request.addHeader(STR, securityPackage + " " + clientToken); final SimpleHttpResponse response = new SimpleHttpResponse(); this.authenticator.authenticate(request, response, null); Assert.assertTrue(response.getHeader(STR).startsWith(securityPackage + " ")); Assert.assertEquals(STR, response.getHeader(STR)); Assert.assertEquals(2, response.getHeaderNames().length); Assert.assertEquals(401, response.getStatus()); } finally { if (clientContext != null) { clientContext.dispose(); } if (clientCredentials != null) { clientCredentials.dispose(); } } }
|
/**
* Test challenge post.
*/
|
Test challenge post
|
testChallengePOST
|
{
"repo_name": "victorbriz/waffle",
"path": "Source/JNA/waffle-tomcat6/src/test/java/waffle/apache/NegotiateAuthenticatorTests.java",
"license": "epl-1.0",
"size": 13241
}
|
[
"com.google.common.io.BaseEncoding",
"org.junit.Assert"
] |
import com.google.common.io.BaseEncoding; import org.junit.Assert;
|
import com.google.common.io.*; import org.junit.*;
|
[
"com.google.common",
"org.junit"
] |
com.google.common; org.junit;
| 1,600,483
|
public int regenerateNeededCache(User loggedInUser, String channelLabel) {
channelAdminPermCheck(loggedInUser);
Channel chan = lookupChannelByLabel(loggedInUser, channelLabel);
List<Long> chanList = new ArrayList<Long>();
chanList.add(chan.getId());
ErrataCacheManager.updateCacheForChannelsAsync(chanList);
return 1;
}
|
int function(User loggedInUser, String channelLabel) { channelAdminPermCheck(loggedInUser); Channel chan = lookupChannelByLabel(loggedInUser, channelLabel); List<Long> chanList = new ArrayList<Long>(); chanList.add(chan.getId()); ErrataCacheManager.updateCacheForChannelsAsync(chanList); return 1; }
|
/**
* Regenerate the errata cache for all the systems subscribed to a particular channel
* @param loggedInUser The current user
* @param channelLabel the channel label
* @return int - 1 on success!
*
* @xmlrpc.doc Completely clear and regenerate the needed Errata and Package
* cache for all systems subscribed to the specified channel. This should
* be used only if you believe your cache is incorrect for all the systems
* in a given channel. This will schedule an asynchronous action to actually
* do the processing.
* @xmlrpc.param #session_key()
* @xmlrpc.param #param_desc("string", "channelLabel", "the label of the
* channel")
* @xmlrpc.returntype #return_int_success()
*
*/
|
Regenerate the errata cache for all the systems subscribed to a particular channel
|
regenerateNeededCache
|
{
"repo_name": "ogajduse/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/channel/software/ChannelSoftwareHandler.java",
"license": "gpl-2.0",
"size": 133982
}
|
[
"com.redhat.rhn.domain.channel.Channel",
"com.redhat.rhn.domain.user.User",
"com.redhat.rhn.manager.errata.cache.ErrataCacheManager",
"java.util.ArrayList",
"java.util.List"
] |
import com.redhat.rhn.domain.channel.Channel; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.manager.errata.cache.ErrataCacheManager; import java.util.ArrayList; import java.util.List;
|
import com.redhat.rhn.domain.channel.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.manager.errata.cache.*; import java.util.*;
|
[
"com.redhat.rhn",
"java.util"
] |
com.redhat.rhn; java.util;
| 566,366
|
public OutputStream newOutputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index)));
}
}
|
OutputStream function(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index))); } }
|
/**
* Returns a new unbuffered output stream to write the value at
* {@code index}. If the underlying output stream encounters errors
* when writing to the filesystem, this edit will be aborted when
* {@link #commit} is called. The returned output stream does not throw
* IOExceptions.
*/
|
Returns a new unbuffered output stream to write the value at index. If the underlying output stream encounters errors when writing to the filesystem, this edit will be aborted when <code>#commit</code> is called. The returned output stream does not throw IOExceptions
|
newOutputStream
|
{
"repo_name": "niostar/DouFM",
"path": "app/src/main/java/info/doufm/android/io/DiskLruCache.java",
"license": "mit",
"size": 33957
}
|
[
"java.io.FileOutputStream",
"java.io.IOException",
"java.io.OutputStream"
] |
import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 7,639
|
@Override
@Test(expected = UnsupportedOperationException.class)
public void testPersistNull() {
getDao().persist(null);
}
|
@Test(expected = UnsupportedOperationException.class) void function() { getDao().persist(null); }
|
/**
* Overwritten method from {@link GenericDaoTest}. This method has to be called in a different way because an
* {@link UnsupportedOperationException} is expected.
*/
|
Overwritten method from <code>GenericDaoTest</code>. This method has to be called in a different way because an <code>UnsupportedOperationException</code> is expected
|
testPersistNull
|
{
"repo_name": "physalix-enrollment/physalix",
"path": "User/src/test/java/hsa/awp/user/dao/TestSingleUserDirectoryDao.java",
"license": "gpl-3.0",
"size": 15979
}
|
[
"org.junit.Test"
] |
import org.junit.Test;
|
import org.junit.*;
|
[
"org.junit"
] |
org.junit;
| 259,676
|
@Override
public void flush() throws IOException {
flush(true);
}
|
void function() throws IOException { flush(true); }
|
/**
* Flushes this output stream and forces any buffered output bytes to be written out to the stream.
*
* @throws IOException
* if an I/O error occurs.
*/
|
Flushes this output stream and forces any buffered output bytes to be written out to the stream
|
flush
|
{
"repo_name": "yersan/wildfly-core",
"path": "process-controller/src/main/java/org/jboss/as/process/stdin/BaseNCodecOutputStream.java",
"license": "lgpl-2.1",
"size": 5341
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 66,278
|
public static String getFileSHA1Token(Long fileId, String filename,
User user, Long expire, String type) {
List<String> data = new ArrayList<String>();
data.add(expire.toString());
data.add(user.getId().toString());
data.add(fileId.toString());
data.add(filename);
data.add(type);
return SessionSwap.rhnHmacData(data);
}
|
static String function(Long fileId, String filename, User user, Long expire, String type) { List<String> data = new ArrayList<String>(); data.add(expire.toString()); data.add(user.getId().toString()); data.add(fileId.toString()); data.add(filename); data.add(type); return SessionSwap.rhnHmacData(data); }
|
/**
* get the Hmac SHA1 token use in constructing a package download url
* also useful if verifying a package download url
* @param fileId the file id
* @param filename the filename of the file
* @param user the user requesting the file
* @param expire the expire time
* @param type the type of the download (i.e. package, iso, etc..)
* @return a string representing the hash
*/
|
get the Hmac SHA1 token use in constructing a package download url also useful if verifying a package download url
|
getFileSHA1Token
|
{
"repo_name": "PaulWay/spacewalk",
"path": "java/code/src/com/redhat/rhn/manager/download/DownloadManager.java",
"license": "gpl-2.0",
"size": 7058
}
|
[
"com.redhat.rhn.common.security.SessionSwap",
"com.redhat.rhn.domain.user.User",
"java.util.ArrayList",
"java.util.List"
] |
import com.redhat.rhn.common.security.SessionSwap; import com.redhat.rhn.domain.user.User; import java.util.ArrayList; import java.util.List;
|
import com.redhat.rhn.common.security.*; import com.redhat.rhn.domain.user.*; import java.util.*;
|
[
"com.redhat.rhn",
"java.util"
] |
com.redhat.rhn; java.util;
| 1,898,904
|
private void activate(String id) throws InterruptedException, KeeperException {
log.debug("Activating job: "+id);
//remove job request node
try {
zk.delete(newPath+"/"+id, -1);
} catch (KeeperException.NoNodeException e) {
// Parent gone with the job? Ignore.
}
//create active job node
zk.create(activePath+"/"+id, null, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL);
}
|
void function(String id) throws InterruptedException, KeeperException { log.debug(STR+id); try { zk.delete(newPath+"/"+id, -1); } catch (KeeperException.NoNodeException e) { } zk.create(activePath+"/"+id, null, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); }
|
/**
* Mark job as active
*
* @param id Job id
* @throws InterruptedException
* @throws KeeperException
*/
|
Mark job as active
|
activate
|
{
"repo_name": "vytautas/nfdist",
"path": "src/nfdist/zookeeper/JobQueue.java",
"license": "bsd-2-clause",
"size": 5686
}
|
[
"org.apache.zookeeper.CreateMode",
"org.apache.zookeeper.KeeperException",
"org.apache.zookeeper.ZooDefs"
] |
import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs;
|
import org.apache.zookeeper.*;
|
[
"org.apache.zookeeper"
] |
org.apache.zookeeper;
| 1,454,934
|
private void writeObject(OutputStream output, Object object, boolean topLevel)
throws IOException
{
if (object == null)
{
return;
}
if (object instanceof Operator)
{
writeOperator(output, (Operator) object);
return;
}
if (!(object instanceof COSBase))
{
throw new IOException("Error: Unknown type in object stream:" + object);
}
COSBase base = object instanceof COSObject ? ((COSObject) object).getObject()
: (COSBase) object;
if (base == null)
{
// the object reference can't be dereferenced
// be lenient and write the reference nevertheless
if (!topLevel && object instanceof COSObject)
{
writeObjectReference(output, ((COSObject) object).getKey());
}
return;
}
if (!topLevel && this.compressionPool.contains(base))
{
COSObjectKey key = this.compressionPool.getKey(base);
if (key == null)
{
throw new IOException(
"Error: Adding unknown object reference to object stream:" + object);
}
writeObjectReference(output, key);
}
else if (base instanceof COSString)
{
writeCOSString(output, (COSString) base);
}
else if (base instanceof COSFloat)
{
writeCOSFloat(output, (COSFloat) base);
}
else if (base instanceof COSInteger)
{
writeCOSInteger(output, (COSInteger) base);
}
else if (base instanceof COSBoolean)
{
writeCOSBoolean(output, (COSBoolean) base);
}
else if (base instanceof COSName)
{
writeCOSName(output, (COSName) base);
}
else if (base instanceof COSArray)
{
writeCOSArray(output, (COSArray) base);
}
else if (base instanceof COSDictionary)
{
writeCOSDictionary(output, (COSDictionary) base);
}
else if (base instanceof COSNull)
{
writeCOSNull(output);
}
else
{
throw new IOException("Error: Unknown type in object stream:" + object);
}
}
|
void function(OutputStream output, Object object, boolean topLevel) throws IOException { if (object == null) { return; } if (object instanceof Operator) { writeOperator(output, (Operator) object); return; } if (!(object instanceof COSBase)) { throw new IOException(STR + object); } COSBase base = object instanceof COSObject ? ((COSObject) object).getObject() : (COSBase) object; if (base == null) { if (!topLevel && object instanceof COSObject) { writeObjectReference(output, ((COSObject) object).getKey()); } return; } if (!topLevel && this.compressionPool.contains(base)) { COSObjectKey key = this.compressionPool.getKey(base); if (key == null) { throw new IOException( STR + object); } writeObjectReference(output, key); } else if (base instanceof COSString) { writeCOSString(output, (COSString) base); } else if (base instanceof COSFloat) { writeCOSFloat(output, (COSFloat) base); } else if (base instanceof COSInteger) { writeCOSInteger(output, (COSInteger) base); } else if (base instanceof COSBoolean) { writeCOSBoolean(output, (COSBoolean) base); } else if (base instanceof COSName) { writeCOSName(output, (COSName) base); } else if (base instanceof COSArray) { writeCOSArray(output, (COSArray) base); } else if (base instanceof COSDictionary) { writeCOSDictionary(output, (COSDictionary) base); } else if (base instanceof COSNull) { writeCOSNull(output); } else { throw new IOException(STR + object); } }
|
/**
* This method prepares and writes COS data to the object stream by selecting appropriate specialized methods for
* the content.
*
* @param output The stream, that shall be written to.
* @param object The content, that shall be written.
* @param topLevel True, if the currently written object is a top level entry of this object stream.
* @throws IOException Shall be thrown, when an exception occurred for the write operation.
*/
|
This method prepares and writes COS data to the object stream by selecting appropriate specialized methods for the content
|
writeObject
|
{
"repo_name": "apache/pdfbox",
"path": "pdfbox/src/main/java/org/apache/pdfbox/pdfwriter/compress/COSWriterObjectStream.java",
"license": "apache-2.0",
"size": 15033
}
|
[
"java.io.IOException",
"java.io.OutputStream",
"org.apache.pdfbox.contentstream.operator.Operator",
"org.apache.pdfbox.cos.COSArray",
"org.apache.pdfbox.cos.COSBase",
"org.apache.pdfbox.cos.COSBoolean",
"org.apache.pdfbox.cos.COSDictionary",
"org.apache.pdfbox.cos.COSFloat",
"org.apache.pdfbox.cos.COSInteger",
"org.apache.pdfbox.cos.COSName",
"org.apache.pdfbox.cos.COSNull",
"org.apache.pdfbox.cos.COSObject",
"org.apache.pdfbox.cos.COSObjectKey",
"org.apache.pdfbox.cos.COSString"
] |
import java.io.IOException; import java.io.OutputStream; import org.apache.pdfbox.contentstream.operator.Operator; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSBoolean; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSFloat; import org.apache.pdfbox.cos.COSInteger; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSNull; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.cos.COSObjectKey; import org.apache.pdfbox.cos.COSString;
|
import java.io.*; import org.apache.pdfbox.contentstream.operator.*; import org.apache.pdfbox.cos.*;
|
[
"java.io",
"org.apache.pdfbox"
] |
java.io; org.apache.pdfbox;
| 2,095,883
|
public MetricsResultInfo withSegments(List<MetricsSegmentInfo> segments) {
this.segments = segments;
return this;
}
|
MetricsResultInfo function(List<MetricsSegmentInfo> segments) { this.segments = segments; return this; }
|
/**
* Set segmented metric data (if segmented).
*
* @param segments the segments value to set
* @return the MetricsResultInfo object itself.
*/
|
Set segmented metric data (if segmented)
|
withSegments
|
{
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/applicationinsights/microsoft-azure-applicationinsights-query/src/main/java/com/microsoft/azure/applicationinsights/query/models/MetricsResultInfo.java",
"license": "mit",
"size": 3701
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 71,576
|
@Path("resolve")
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN})
public CLIOutputResponseList resolve(final ResolveRequest request) throws ApiException, IOException {
request.setProjectPath(getAbsoluteProjectPath(request.getProjectPath()));
return subversionApi.resolve(request);
}
|
@Path(STR) @Consumes(MediaType.APPLICATION_JSON) @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN}) CLIOutputResponseList function(final ResolveRequest request) throws ApiException, IOException { request.setProjectPath(getAbsoluteProjectPath(request.getProjectPath())); return subversionApi.resolve(request); }
|
/**
* Resolve conflicts.
*
* @param request
* the resolve conflicts request
* @return the resolve conflicts response
* @throws IOException
* if there is a problem executing the command
* @throws SubversionException
* if there is a Subversion issue
*/
|
Resolve conflicts
|
resolve
|
{
"repo_name": "gazarenkov/che-sketch",
"path": "plugins/plugin-svn/che-plugin-svn-ext-server/src/main/java/org/eclipse/che/plugin/svn/server/rest/SubversionService.java",
"license": "epl-1.0",
"size": 20504
}
|
[
"java.io.IOException",
"javax.ws.rs.Consumes",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"javax.ws.rs.core.MediaType",
"org.eclipse.che.api.core.ApiException",
"org.eclipse.che.plugin.svn.shared.CLIOutputResponseList",
"org.eclipse.che.plugin.svn.shared.ResolveRequest"
] |
import java.io.IOException; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.eclipse.che.api.core.ApiException; import org.eclipse.che.plugin.svn.shared.CLIOutputResponseList; import org.eclipse.che.plugin.svn.shared.ResolveRequest;
|
import java.io.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.eclipse.che.api.core.*; import org.eclipse.che.plugin.svn.shared.*;
|
[
"java.io",
"javax.ws",
"org.eclipse.che"
] |
java.io; javax.ws; org.eclipse.che;
| 2,399,289
|
EReference getEntitlements_Entitlements();
|
EReference getEntitlements_Entitlements();
|
/**
* Returns the meta object for the containment reference list '{@link io.opensemantics.semiotics.model.assessment.Entitlements#getEntitlements <em>Entitlements</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Entitlements</em>'.
* @see io.opensemantics.semiotics.model.assessment.Entitlements#getEntitlements()
* @see #getEntitlements()
* @generated
*/
|
Returns the meta object for the containment reference list '<code>io.opensemantics.semiotics.model.assessment.Entitlements#getEntitlements Entitlements</code>'.
|
getEntitlements_Entitlements
|
{
"repo_name": "CoastalHacking/semiotics-main",
"path": "bundles/io.opensemantics.semiotics.model.assessment/src-gen/io/opensemantics/semiotics/model/assessment/AssessmentPackage.java",
"license": "apache-2.0",
"size": 151116
}
|
[
"org.eclipse.emf.ecore.EReference"
] |
import org.eclipse.emf.ecore.EReference;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,795,331
|
private List<File> getFileListFromNode(CheckableTreeNode node) {
List<File> filesList = new ArrayList<>();
Enumeration<CheckableTreeNode> children = node.depthFirstEnumeration();
List<CheckableTreeNode> nodesToRemove = new ArrayList<>();
for (CheckableTreeNode child : Collections.list(children)) {
if (child.isLeaf() && child.isSelected()) {
File nodeFile = ((FileNodeWrapper) child.getUserObject()).file;
if ((nodeFile != null) && nodeFile.isFile()) {
filesList.add(nodeFile);
nodesToRemove.add(child);
}
}
}
// remove imported files from tree
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
for (CheckableTreeNode nodeToRemove : nodesToRemove) {
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) nodeToRemove.getParent();
model.removeNodeFromParent(nodeToRemove);
// remove empty parent node
while ((parent != null) && parent.isLeaf()) {
DefaultMutableTreeNode pp = (DefaultMutableTreeNode) parent.getParent();
if (pp != null) {
model.removeNodeFromParent(parent);
}
parent = pp;
}
// TODO: update counter / see: getTreeCellRendererComponent for label generation
}
tree.invalidate();
tree.repaint();
return filesList;
}
|
List<File> function(CheckableTreeNode node) { List<File> filesList = new ArrayList<>(); Enumeration<CheckableTreeNode> children = node.depthFirstEnumeration(); List<CheckableTreeNode> nodesToRemove = new ArrayList<>(); for (CheckableTreeNode child : Collections.list(children)) { if (child.isLeaf() && child.isSelected()) { File nodeFile = ((FileNodeWrapper) child.getUserObject()).file; if ((nodeFile != null) && nodeFile.isFile()) { filesList.add(nodeFile); nodesToRemove.add(child); } } } DefaultTreeModel model = (DefaultTreeModel) tree.getModel(); for (CheckableTreeNode nodeToRemove : nodesToRemove) { DefaultMutableTreeNode parent = (DefaultMutableTreeNode) nodeToRemove.getParent(); model.removeNodeFromParent(nodeToRemove); while ((parent != null) && parent.isLeaf()) { DefaultMutableTreeNode pp = (DefaultMutableTreeNode) parent.getParent(); if (pp != null) { model.removeNodeFromParent(parent); } parent = pp; } } tree.invalidate(); tree.repaint(); return filesList; }
|
/**
* Creates a list of {@link File}s for all leaf nodes in the tree structure
* <code>node</code>, which have been marked as <i>selected</i>. <br>
* <br>
* <code>Selected</code> nodes correspond to those entries in the tree,
* whose checkbox is <code>checked</code>.
*
* SIDE EFFECT: The checked nodes are removed from the tree.
*
* @param node
* The root node representing a tree structure.
* @return A list of files of all checked leaf nodes.
*/
|
Creates a list of <code>File</code>s for all leaf nodes in the tree structure <code>node</code>, which have been marked as selected. <code>Selected</code> nodes correspond to those entries in the tree, whose checkbox is <code>checked</code>
|
getFileListFromNode
|
{
"repo_name": "iksmada/DC-UFSCar-ES2-201601-GrupoDilema",
"path": "src/main/java/net/sf/jabref/gui/FindUnlinkedFilesDialog.java",
"license": "gpl-2.0",
"size": 47714
}
|
[
"java.io.File",
"java.util.ArrayList",
"java.util.Collections",
"java.util.Enumeration",
"java.util.List",
"javax.swing.tree.DefaultMutableTreeNode",
"javax.swing.tree.DefaultTreeModel"
] |
import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel;
|
import java.io.*; import java.util.*; import javax.swing.tree.*;
|
[
"java.io",
"java.util",
"javax.swing"
] |
java.io; java.util; javax.swing;
| 966,527
|
private void object2View() {
Employee employee = new Employee();
try {
employee.setId(this.getDataFile().getResultSet().getLong("id"));
employee.setFirstname(this.getDataFile().getResultSet().getString("firstname"));
employee.setLastname(this.getDataFile().getResultSet().getString("lastname"));
employee.setPhoto(
(this.getDataFile().getResultSet().getString("photo") == null)
? "" : this.getDataFile().getResultSet().getString("photo"));
employee.setGender(this.getDataFile().getResultSet().getInt("gender"));
employee.setBornDate(this.getDataFile().getResultSet().getDate("bornDate"));
employee.setHiredDate(this.getDataFile().getResultSet().getDate("hiredDate"));
employee.setSalary(this.getDataFile().getResultSet().getInt("salary"));
this.object2View(employee);
} catch (SQLException ex) {
Logger.getLogger(FraEmployee.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
void function() { Employee employee = new Employee(); try { employee.setId(this.getDataFile().getResultSet().getLong("id")); employee.setFirstname(this.getDataFile().getResultSet().getString(STR)); employee.setLastname(this.getDataFile().getResultSet().getString(STR)); employee.setPhoto( (this.getDataFile().getResultSet().getString("photo") == null) ? STRphotoSTRgenderSTRbornDateSTRhiredDateSTRsalary")); this.object2View(employee); } catch (SQLException ex) { Logger.getLogger(FraEmployee.class.getName()).log(Level.SEVERE, null, ex); } }
|
/**
* Result set -> object
*/
|
Result set -> object
|
object2View
|
{
"repo_name": "jfmendozam/BillApp",
"path": "BillApp/src/billapp/view/FraEmployee.java",
"license": "apache-2.0",
"size": 38363
}
|
[
"java.sql.SQLException",
"java.util.logging.Level",
"java.util.logging.Logger"
] |
import java.sql.SQLException; import java.util.logging.Level; import java.util.logging.Logger;
|
import java.sql.*; import java.util.logging.*;
|
[
"java.sql",
"java.util"
] |
java.sql; java.util;
| 327,203
|
@Test
public void testEmptyValues() throws IOException {
// Initialize data
Table table = getDefaultTable();
int numValues = 10;
byte[] rowKey = dataHelper.randomData("testrow-");
byte[][] quals = dataHelper.randomData("qual-", numValues);
// Insert empty values. Null and byte[0] are interchangeable for puts (but not gets).
Put put = new Put(rowKey);
for (int i = 0; i < numValues; ++i) {
put.addColumn(SharedTestEnvRule.COLUMN_FAMILY, quals[i], i % 2 == 1 ? null : new byte[0]);
}
table.put(put);
// Check values
Get get = new Get(rowKey);
get.addFamily(SharedTestEnvRule.COLUMN_FAMILY);
Result result = table.get(get);
for (int i = 0; i < numValues; ++i) {
Assert.assertTrue(result.containsColumn(SharedTestEnvRule.COLUMN_FAMILY, quals[i]));
Assert.assertArrayEquals(new byte[0], result.getValue(SharedTestEnvRule.COLUMN_FAMILY, quals[i]));
}
// Cleanup
Delete delete = new Delete(rowKey);
table.delete(delete);
table.close();
}
|
void function() throws IOException { Table table = getDefaultTable(); int numValues = 10; byte[] rowKey = dataHelper.randomData(STR); byte[][] quals = dataHelper.randomData("qual-", numValues); Put put = new Put(rowKey); for (int i = 0; i < numValues; ++i) { put.addColumn(SharedTestEnvRule.COLUMN_FAMILY, quals[i], i % 2 == 1 ? null : new byte[0]); } table.put(put); Get get = new Get(rowKey); get.addFamily(SharedTestEnvRule.COLUMN_FAMILY); Result result = table.get(get); for (int i = 0; i < numValues; ++i) { Assert.assertTrue(result.containsColumn(SharedTestEnvRule.COLUMN_FAMILY, quals[i])); Assert.assertArrayEquals(new byte[0], result.getValue(SharedTestEnvRule.COLUMN_FAMILY, quals[i])); } Delete delete = new Delete(rowKey); table.delete(delete); table.close(); }
|
/**
* Requirement 3.11 - Result can contain empty values. (zero-length byte[]).
*/
|
Requirement 3.11 - Result can contain empty values. (zero-length byte[])
|
testEmptyValues
|
{
"repo_name": "kevinsi4508/cloud-bigtable-client",
"path": "bigtable-hbase-1.x-parent/bigtable-hbase-1.x-integration-tests/src/test/java/com/google/cloud/bigtable/hbase/TestGet.java",
"license": "apache-2.0",
"size": 21083
}
|
[
"com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule",
"java.io.IOException",
"org.apache.hadoop.hbase.client.Delete",
"org.apache.hadoop.hbase.client.Get",
"org.apache.hadoop.hbase.client.Put",
"org.apache.hadoop.hbase.client.Result",
"org.apache.hadoop.hbase.client.Table",
"org.junit.Assert"
] |
import com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule; import java.io.IOException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.junit.Assert;
|
import com.google.cloud.bigtable.hbase.test_env.*; import java.io.*; import org.apache.hadoop.hbase.client.*; import org.junit.*;
|
[
"com.google.cloud",
"java.io",
"org.apache.hadoop",
"org.junit"
] |
com.google.cloud; java.io; org.apache.hadoop; org.junit;
| 2,294,724
|
@Authorized(RadiologyPrivileges.GET_RADIOLOGY_STUDIES)
public RadiologyStudy getRadiologyStudy(Integer studyId);
|
@Authorized(RadiologyPrivileges.GET_RADIOLOGY_STUDIES) RadiologyStudy function(Integer studyId);
|
/**
* Get the {@code RadiologyStudy} by its {@code studyId}.
*
* @param studyId the study id of the wanted study
* @return the radiology study matching given study id
* @throws IllegalArgumentException if given null
* @should return radiology study matching given study id
* @should return null if no match was found
* @should throw illegal argument exception if given null
*/
|
Get the RadiologyStudy by its studyId
|
getRadiologyStudy
|
{
"repo_name": "openmrs/openmrs-module-radiologydcm4chee",
"path": "api/src/main/java/org/openmrs/module/radiology/study/RadiologyStudyService.java",
"license": "mpl-2.0",
"size": 3956
}
|
[
"org.openmrs.annotation.Authorized",
"org.openmrs.module.radiology.RadiologyPrivileges"
] |
import org.openmrs.annotation.Authorized; import org.openmrs.module.radiology.RadiologyPrivileges;
|
import org.openmrs.annotation.*; import org.openmrs.module.radiology.*;
|
[
"org.openmrs.annotation",
"org.openmrs.module"
] |
org.openmrs.annotation; org.openmrs.module;
| 568,285
|
public DatagramSenderAndReceiver withLocalPort(int localPort) {
Contract.checkState(state == STOPPED, "Sender/receiver has already been started");
Contract.checkArgument((localPort >= 0) && (localPort <= 65535), "Local port must be valid: {0}", localPort);
localEndPoint = new InetSocketAddress(localPort);
return this;
}
/**
* Sets the message consumer of this sender/receiver.
*
* @throws IllegalArgumentException if {@code messageConsumer} is {@code null}
|
DatagramSenderAndReceiver function(int localPort) { Contract.checkState(state == STOPPED, STR); Contract.checkArgument((localPort >= 0) && (localPort <= 65535), STR, localPort); localEndPoint = new InetSocketAddress(localPort); return this; } /** * Sets the message consumer of this sender/receiver. * * @throws IllegalArgumentException if {@code messageConsumer} is {@code null}
|
/**
* Sets the local port of this sender/receiver.
*
* @throws IllegalArgumentException if {@code localPort} is invalid
* @throws IllegalStateException if this sender/receiver has already been started
*
* @since 1.0
*/
|
Sets the local port of this sender/receiver
|
withLocalPort
|
{
"repo_name": "petrzelenka/sellcom-java",
"path": "src/main/java/org/sellcom/core/net/DatagramSenderAndReceiver.java",
"license": "apache-2.0",
"size": 24981
}
|
[
"java.net.InetSocketAddress",
"org.sellcom.core.Contract"
] |
import java.net.InetSocketAddress; import org.sellcom.core.Contract;
|
import java.net.*; import org.sellcom.core.*;
|
[
"java.net",
"org.sellcom.core"
] |
java.net; org.sellcom.core;
| 2,460,244
|
@Override
public void addPages( )
{
page0_ = new ScenarioPage0( selectionContainer_ );
addPage( page0_ );
if( selectionContainer_ != null ) {
Map< String, String > props = ProjectUtils
.getPropertiesForProject( selectionContainer_.getProject( ) );
if( props.get( "difficulties" ) != null ) //$NON-NLS-1$
{
page1_ = new ScenarioPage1( );
addPage( page1_ );
}
}
page2_ = new ScenarioPage2( );
addPage( page2_ );
super.addPages( );
}
|
void function( ) { page0_ = new ScenarioPage0( selectionContainer_ ); addPage( page0_ ); if( selectionContainer_ != null ) { Map< String, String > props = ProjectUtils .getPropertiesForProject( selectionContainer_.getProject( ) ); if( props.get( STR ) != null ) { page1_ = new ScenarioPage1( ); addPage( page1_ ); } } page2_ = new ScenarioPage2( ); addPage( page2_ ); super.addPages( ); }
|
/**
* Adding the page to the wizard.
*/
|
Adding the page to the wizard
|
addPages
|
{
"repo_name": "RushilPatel/BattleForWesnoth",
"path": "utils/umc_dev/org.wesnoth/src/org/wesnoth/wizards/scenario/ScenarioNewWizard.java",
"license": "gpl-2.0",
"size": 9106
}
|
[
"java.util.Map",
"org.wesnoth.projects.ProjectUtils"
] |
import java.util.Map; import org.wesnoth.projects.ProjectUtils;
|
import java.util.*; import org.wesnoth.projects.*;
|
[
"java.util",
"org.wesnoth.projects"
] |
java.util; org.wesnoth.projects;
| 849,604
|
private String escapeCommata(String arg) {
int level = 0;
StringBuilder result = new StringBuilder(arg.length());
try (Reader r = new StringReader(arg)) {
int c;
char lastChar = ANY_NORMAL_CHAR;
while ((c = r.read()) != -1) {
char nextChar = (char) c;
if (lastChar == '\\') {
lastChar = ANY_NORMAL_CHAR;
} else if (lastChar == '$' && nextChar == '{') {
level++;
lastChar = ANY_NORMAL_CHAR;
} else if (nextChar == '}') {
level--;
lastChar = ANY_NORMAL_CHAR;
} else if (nextChar == ',' && level == 0) {
result.append('\\');
lastChar = ANY_NORMAL_CHAR;
} else {
lastChar = nextChar;
}
result.append(nextChar);
}
} catch (IOException e) {
log.warn("Can't escape commata in input string: {}", arg, e);
return arg;
}
return result.toString();
}
|
String function(String arg) { int level = 0; StringBuilder result = new StringBuilder(arg.length()); try (Reader r = new StringReader(arg)) { int c; char lastChar = ANY_NORMAL_CHAR; while ((c = r.read()) != -1) { char nextChar = (char) c; if (lastChar == '\\') { lastChar = ANY_NORMAL_CHAR; } else if (lastChar == '$' && nextChar == '{') { level++; lastChar = ANY_NORMAL_CHAR; } else if (nextChar == '}') { level--; lastChar = ANY_NORMAL_CHAR; } else if (nextChar == ',' && level == 0) { result.append('\\'); lastChar = ANY_NORMAL_CHAR; } else { lastChar = nextChar; } result.append(nextChar); } } catch (IOException e) { log.warn(STR, arg, e); return arg; } return result.toString(); }
|
/**
* Escape commata that are in the argument but "outside" of variable replacement structures.
*
* @param arg string that should be escaped
* @return escaped string
*/
|
Escape commata that are in the argument but "outside" of variable replacement structures
|
escapeCommata
|
{
"repo_name": "apache/jmeter",
"path": "src/core/src/main/java/org/apache/jmeter/functions/gui/FunctionHelper.java",
"license": "apache-2.0",
"size": 14871
}
|
[
"java.io.IOException",
"java.io.Reader",
"java.io.StringReader"
] |
import java.io.IOException; import java.io.Reader; import java.io.StringReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 258,659
|
static @NonNull OutputResult generateOutput(@NonNull Map<String, ACI> registeredNumbers, @NonNull InputResult inputResult) {
Map<String, ACI> allNumbers = new HashMap<>(registeredNumbers);
Map<String, String> rewrites = new HashMap<>();
for (Map.Entry<String, String> entry : inputResult.getMapOfOriginalToVariant().entrySet()) {
String original = entry.getKey();
String variant = entry.getValue();
if (registeredNumbers.containsKey(original) && registeredNumbers.containsKey(variant)) {
for (FuzzyMatcher matcher: FUZZY_MATCHERS) {
if(matcher.matches(original)) {
if (matcher.isPreferredVariant(original)) {
allNumbers.remove(variant);
} else {
rewrites.put(original, variant);
allNumbers.remove(original);
}
}
}
} else if (registeredNumbers.containsKey(variant)) {
rewrites.put(original, variant);
allNumbers.remove(original);
}
}
return new OutputResult(allNumbers, rewrites);
}
|
static @NonNull OutputResult generateOutput(@NonNull Map<String, ACI> registeredNumbers, @NonNull InputResult inputResult) { Map<String, ACI> allNumbers = new HashMap<>(registeredNumbers); Map<String, String> rewrites = new HashMap<>(); for (Map.Entry<String, String> entry : inputResult.getMapOfOriginalToVariant().entrySet()) { String original = entry.getKey(); String variant = entry.getValue(); if (registeredNumbers.containsKey(original) && registeredNumbers.containsKey(variant)) { for (FuzzyMatcher matcher: FUZZY_MATCHERS) { if(matcher.matches(original)) { if (matcher.isPreferredVariant(original)) { allNumbers.remove(variant); } else { rewrites.put(original, variant); allNumbers.remove(original); } } } } else if (registeredNumbers.containsKey(variant)) { rewrites.put(original, variant); allNumbers.remove(original); } } return new OutputResult(allNumbers, rewrites); }
|
/**
* This should be run on the list of numbers we find out are registered with the server. Based on
* these results and our initial input set, we can decide if we need to rewrite which number we
* have stored locally.
*/
|
This should be run on the list of numbers we find out are registered with the server. Based on these results and our initial input set, we can decide if we need to rewrite which number we have stored locally
|
generateOutput
|
{
"repo_name": "AsamK/TextSecure",
"path": "app/src/main/java/org/thoughtcrime/securesms/contacts/sync/FuzzyPhoneNumberHelper.java",
"license": "gpl-3.0",
"size": 6387
}
|
[
"androidx.annotation.NonNull",
"java.util.HashMap",
"java.util.Map"
] |
import androidx.annotation.NonNull; import java.util.HashMap; import java.util.Map;
|
import androidx.annotation.*; import java.util.*;
|
[
"androidx.annotation",
"java.util"
] |
androidx.annotation; java.util;
| 1,811,151
|
private void initPropertiesPanel() {
// Create table and panel to hold it
propertiesTable = new TellervoPropertySheetTable();
propertiesPanel = new TellervoPropertySheetPanel(propertiesTable);
// Set various properties of the properties panel!
propertiesPanel.setRestoreToggleStates(true);
propertiesPanel.setToolBarVisible(false);
propertiesPanel.setDescriptionVisible(true);
propertiesPanel.setMode(PropertySheet.VIEW_AS_FLAT_LIST);
propertiesPanel.getTable().setRowHeight(24);
propertiesPanel.getTable().setRendererFactory(new TridasPropertyRendererFactory());
propertiesPanel.getTable().setEditorFactory(new TridasPropertyEditorFactory());
propertiesPanel.getTable().addPropertyChangeListener(this);
// Set up button bar
setupButtonBar();
}
|
void function() { propertiesTable = new TellervoPropertySheetTable(); propertiesPanel = new TellervoPropertySheetPanel(propertiesTable); propertiesPanel.setRestoreToggleStates(true); propertiesPanel.setToolBarVisible(false); propertiesPanel.setDescriptionVisible(true); propertiesPanel.setMode(PropertySheet.VIEW_AS_FLAT_LIST); propertiesPanel.getTable().setRowHeight(24); propertiesPanel.getTable().setRendererFactory(new TridasPropertyRendererFactory()); propertiesPanel.getTable().setEditorFactory(new TridasPropertyEditorFactory()); propertiesPanel.getTable().addPropertyChangeListener(this); setupButtonBar(); }
|
/**
* Set up the properties panel
*/
|
Set up the properties panel
|
initPropertiesPanel
|
{
"repo_name": "petebrew/tellervo",
"path": "src/main/java/org/tellervo/desktop/gui/dbbrowse/MetadataBrowser.java",
"license": "gpl-3.0",
"size": 20001
}
|
[
"com.l2fprod.common.propertysheet.PropertySheet",
"org.tellervo.desktop.tridasv2.ui.TellervoPropertySheetPanel",
"org.tellervo.desktop.tridasv2.ui.TellervoPropertySheetTable",
"org.tellervo.desktop.tridasv2.ui.TridasPropertyEditorFactory",
"org.tellervo.desktop.tridasv2.ui.TridasPropertyRendererFactory"
] |
import com.l2fprod.common.propertysheet.PropertySheet; import org.tellervo.desktop.tridasv2.ui.TellervoPropertySheetPanel; import org.tellervo.desktop.tridasv2.ui.TellervoPropertySheetTable; import org.tellervo.desktop.tridasv2.ui.TridasPropertyEditorFactory; import org.tellervo.desktop.tridasv2.ui.TridasPropertyRendererFactory;
|
import com.l2fprod.common.propertysheet.*; import org.tellervo.desktop.tridasv2.ui.*;
|
[
"com.l2fprod.common",
"org.tellervo.desktop"
] |
com.l2fprod.common; org.tellervo.desktop;
| 1,095,768
|
@Before
public void setup() {
reset(parkinglotLogRepository);
vehicle = new VehicleDataBuilder().withId(ID).withPlate(PLATE).withType(VEHICLE_TYPE).withCylinder(CYLINDER)
.build();
parkinglotLogEntity = new ParkinglotLogEntity(0L, vehicle.getPlate(), vehicle.getType(), LocalDateTime.now(),
null);
}
|
void function() { reset(parkinglotLogRepository); vehicle = new VehicleDataBuilder().withId(ID).withPlate(PLATE).withType(VEHICLE_TYPE).withCylinder(CYLINDER) .build(); parkinglotLogEntity = new ParkinglotLogEntity(0L, vehicle.getPlate(), vehicle.getType(), LocalDateTime.now(), null); }
|
/**
* Prepare data for each test.
*/
|
Prepare data for each test
|
setup
|
{
"repo_name": "cortizqgithub/csoftz-ceiba-java-learning",
"path": "app-code/server/parking-service/src/test/java/com/csoftz/ceiba/java/learn/parking/service/test/ParkinglotLogServiceTests.java",
"license": "apache-2.0",
"size": 4808
}
|
[
"com.csoftz.ceiba.java.learn.parking.service.entities.ParkinglotLogEntity",
"com.csoftz.ceiba.java.learn.parking.service.test.domain.data.builder.VehicleDataBuilder",
"java.time.LocalDateTime",
"org.mockito.Mockito"
] |
import com.csoftz.ceiba.java.learn.parking.service.entities.ParkinglotLogEntity; import com.csoftz.ceiba.java.learn.parking.service.test.domain.data.builder.VehicleDataBuilder; import java.time.LocalDateTime; import org.mockito.Mockito;
|
import com.csoftz.ceiba.java.learn.parking.service.entities.*; import com.csoftz.ceiba.java.learn.parking.service.test.domain.data.builder.*; import java.time.*; import org.mockito.*;
|
[
"com.csoftz.ceiba",
"java.time",
"org.mockito"
] |
com.csoftz.ceiba; java.time; org.mockito;
| 2,755,020
|
@Override
public void onItemClick(AdapterView<?> l, View v, int position, long id) {
File file = (File) mAdapter.getItem(position);
if (file != null) {
/// Click on a directory
if (file.isDirectory()) {
// just local updates
listFolder(file);
// notify the click to container Activity
mContainerActivity.onFolderClicked(file);
// save index and top position
saveIndexAndTopPosition(position);
} else { /// Click on a file
ImageView checkBoxV = (ImageView) v.findViewById(R.id.custom_checkbox);
if (checkBoxV != null) {
if (getListView().isItemChecked(position)) {
checkBoxV.setImageResource(R.drawable.ic_checkbox_marked);
} else {
checkBoxV.setImageResource(R.drawable.ic_checkbox_blank_outline);
}
}
// notify the change to the container Activity
mContainerActivity.onFileClicked(file);
}
} else {
Log_OC.w(TAG, "Null object in ListAdapter!!");
}
}
|
void function(AdapterView<?> l, View v, int position, long id) { File file = (File) mAdapter.getItem(position); if (file != null) { if (file.isDirectory()) { listFolder(file); mContainerActivity.onFolderClicked(file); saveIndexAndTopPosition(position); } else { ImageView checkBoxV = (ImageView) v.findViewById(R.id.custom_checkbox); if (checkBoxV != null) { if (getListView().isItemChecked(position)) { checkBoxV.setImageResource(R.drawable.ic_checkbox_marked); } else { checkBoxV.setImageResource(R.drawable.ic_checkbox_blank_outline); } } mContainerActivity.onFileClicked(file); } } else { Log_OC.w(TAG, STR); } }
|
/**
* Checks the file clicked over. Browses inside if it is a directory.
* Notifies the container activity in any case.
*/
|
Checks the file clicked over. Browses inside if it is a directory. Notifies the container activity in any case
|
onItemClick
|
{
"repo_name": "PauloSantos13/android",
"path": "src/com/owncloud/android/ui/fragment/LocalFileListFragment.java",
"license": "gpl-2.0",
"size": 9697
}
|
[
"android.view.View",
"android.widget.AdapterView",
"android.widget.ImageView",
"java.io.File"
] |
import android.view.View; import android.widget.AdapterView; import android.widget.ImageView; import java.io.File;
|
import android.view.*; import android.widget.*; import java.io.*;
|
[
"android.view",
"android.widget",
"java.io"
] |
android.view; android.widget; java.io;
| 1,916,109
|
public T setPageFooterStyle(ReportStyleBuilder style) {
if (style != null) {
getObject().getPageFooterBand().getList().setStyle(style.build());
}
else {
getObject().getPageFooterBand().getList().setStyle(null);
}
return (T) this;
}
|
T function(ReportStyleBuilder style) { if (style != null) { getObject().getPageFooterBand().getList().setStyle(style.build()); } else { getObject().getPageFooterBand().getList().setStyle(null); } return (T) this; }
|
/**
* Sets a page footer band style
*
* @param style the page footer band style
* @return a report builder
*/
|
Sets a page footer band style
|
setPageFooterStyle
|
{
"repo_name": "robcowell/dynamicreports",
"path": "dynamicreports-core/src/main/java/net/sf/dynamicreports/report/builder/ReportBuilder.java",
"license": "lgpl-3.0",
"size": 61004
}
|
[
"net.sf.dynamicreports.report.builder.style.ReportStyleBuilder"
] |
import net.sf.dynamicreports.report.builder.style.ReportStyleBuilder;
|
import net.sf.dynamicreports.report.builder.style.*;
|
[
"net.sf.dynamicreports"
] |
net.sf.dynamicreports;
| 2,876,399
|
@Message(id = 47, value = "Cannot restart server %s as it is not currently started; it is %s")
String cannotRestartServer(String serverName, ServerStatus status);
|
@Message(id = 47, value = STR) String cannotRestartServer(String serverName, ServerStatus status);
|
/**
* A message indicating the server, represented by the {@code serverName} parameter, cannot restart as it is not
* currently started.
*
* @param serverName the name of the server.
* @param status the status of the server.
*
* @return the message.
*/
|
A message indicating the server, represented by the serverName parameter, cannot restart as it is not currently started
|
cannotRestartServer
|
{
"repo_name": "aloubyansky/wildfly-core",
"path": "host-controller/src/main/java/org/jboss/as/host/controller/logging/HostControllerLogger.java",
"license": "lgpl-2.1",
"size": 65292
}
|
[
"org.jboss.as.controller.client.helpers.domain.ServerStatus",
"org.jboss.logging.annotations.Message"
] |
import org.jboss.as.controller.client.helpers.domain.ServerStatus; import org.jboss.logging.annotations.Message;
|
import org.jboss.as.controller.client.helpers.domain.*; import org.jboss.logging.annotations.*;
|
[
"org.jboss.as",
"org.jboss.logging"
] |
org.jboss.as; org.jboss.logging;
| 1,456,656
|
protected void configureShell(Shell newShell) {
super.configureShell(newShell);
if (newConfig) {
newShell.setText(TexlipsePlugin.getResourceString("preferenceViewerDialogAddTitle"));
} else {
newShell.setText(TexlipsePlugin.getResourceString("preferenceViewerDialogEditTitle"));
}
}
|
void function(Shell newShell) { super.configureShell(newShell); if (newConfig) { newShell.setText(TexlipsePlugin.getResourceString(STR)); } else { newShell.setText(TexlipsePlugin.getResourceString(STR)); } }
|
/**
* Set dialog title when the window is created.
*/
|
Set dialog title when the window is created
|
configureShell
|
{
"repo_name": "kolovos/texlipse",
"path": "net.sourceforge.texlipse/src/net/sourceforge/texlipse/viewer/ViewerConfigDialog.java",
"license": "epl-1.0",
"size": 23023
}
|
[
"net.sourceforge.texlipse.TexlipsePlugin",
"org.eclipse.swt.widgets.Shell"
] |
import net.sourceforge.texlipse.TexlipsePlugin; import org.eclipse.swt.widgets.Shell;
|
import net.sourceforge.texlipse.*; import org.eclipse.swt.widgets.*;
|
[
"net.sourceforge.texlipse",
"org.eclipse.swt"
] |
net.sourceforge.texlipse; org.eclipse.swt;
| 2,484,936
|
@Deprecated
public List<AdminEmailAttributes> getAllAdminEmails() {
List<AdminEmailAttributes> list = new LinkedList<AdminEmailAttributes>();
List<AdminEmail> entities = getAdminEmailEntities();
Iterator<AdminEmail> it = entities.iterator();
while (it.hasNext()) {
list.add(new AdminEmailAttributes(it.next()));
}
return list;
}
|
List<AdminEmailAttributes> function() { List<AdminEmailAttributes> list = new LinkedList<AdminEmailAttributes>(); List<AdminEmail> entities = getAdminEmailEntities(); Iterator<AdminEmail> it = entities.iterator(); while (it.hasNext()) { list.add(new AdminEmailAttributes(it.next())); } return list; }
|
/**
* This method is not scalable. Not to be used unless for admin features.
* @return the list of all adminEmails in the database.
*/
|
This method is not scalable. Not to be used unless for admin features
|
getAllAdminEmails
|
{
"repo_name": "belyabl9/teammates",
"path": "src/main/java/teammates/storage/api/AdminEmailsDb.java",
"license": "gpl-2.0",
"size": 11214
}
|
[
"java.util.Iterator",
"java.util.LinkedList",
"java.util.List"
] |
import java.util.Iterator; import java.util.LinkedList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,750,442
|
public Read<T> withCoder(Coder<T> coder) {
checkArgument(coder != null, "CassandraIO.read().withCoder(coder) called with null coder");
return builder().setCoder(coder).build();
}
|
Read<T> function(Coder<T> coder) { checkArgument(coder != null, STR); return builder().setCoder(coder).build(); }
|
/**
* Specify the {@link Coder} used to serialize the entity in the {@link PCollection}.
*/
|
Specify the <code>Coder</code> used to serialize the entity in the <code>PCollection</code>
|
withCoder
|
{
"repo_name": "eljefe6a/incubator-beam",
"path": "sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java",
"license": "apache-2.0",
"size": 18941
}
|
[
"com.google.common.base.Preconditions",
"org.apache.beam.sdk.coders.Coder"
] |
import com.google.common.base.Preconditions; import org.apache.beam.sdk.coders.Coder;
|
import com.google.common.base.*; import org.apache.beam.sdk.coders.*;
|
[
"com.google.common",
"org.apache.beam"
] |
com.google.common; org.apache.beam;
| 218,498
|
@Nonnull
IJSExpression eq (@Nonnull String sValue);
|
IJSExpression eq (@Nonnull String sValue);
|
/**
* Equals '=='.
*
* @param sValue
* value
* @return [this] == value
*/
|
Equals '=='
|
eq
|
{
"repo_name": "phax/ph-oton",
"path": "ph-oton-jscode/src/main/java/com/helger/html/jscode/IJSExpression.java",
"license": "apache-2.0",
"size": 26099
}
|
[
"javax.annotation.Nonnull"
] |
import javax.annotation.Nonnull;
|
import javax.annotation.*;
|
[
"javax.annotation"
] |
javax.annotation;
| 641,180
|
private void initDimensionChunkIndexes() {
for (int i = 0; i < dimColEvaluatorInfoList.size(); i++) {
// find the dimension in the current block dimensions list
CarbonDimension dimensionFromCurrentBlock = segmentProperties
.getDimensionFromCurrentBlock(dimColEvaluatorInfoList.get(i).getDimension());
if (null != dimensionFromCurrentBlock) {
dimColEvaluatorInfoList.get(i).setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
this.dimensionChunkIndex[i] =
dimColEvaluatorInfoList.get(i).getColumnIndexInMinMaxByteArray();
isDimensionPresentInCurrentBlock[i] = true;
}
}
}
|
void function() { for (int i = 0; i < dimColEvaluatorInfoList.size(); i++) { CarbonDimension dimensionFromCurrentBlock = segmentProperties .getDimensionFromCurrentBlock(dimColEvaluatorInfoList.get(i).getDimension()); if (null != dimensionFromCurrentBlock) { dimColEvaluatorInfoList.get(i).setColumnIndex(dimensionFromCurrentBlock.getOrdinal()); this.dimensionChunkIndex[i] = dimColEvaluatorInfoList.get(i).getColumnIndexInMinMaxByteArray(); isDimensionPresentInCurrentBlock[i] = true; } } }
|
/**
* This method will initialize the dimension info for the current block to be
* used for filtering the data
*/
|
This method will initialize the dimension info for the current block to be used for filtering the data
|
initDimensionChunkIndexes
|
{
"repo_name": "jatin9896/incubator-carbondata",
"path": "core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java",
"license": "apache-2.0",
"size": 28111
}
|
[
"org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension"
] |
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
|
import org.apache.carbondata.core.metadata.schema.table.column.*;
|
[
"org.apache.carbondata"
] |
org.apache.carbondata;
| 676,271
|
static List<Contingency> getValidContingencies(List<Contingency> contingencies, Network network) {
Objects.requireNonNull(contingencies);
Objects.requireNonNull(network);
return contingencies.stream()
.filter(c -> c.isValid(network))
.collect(Collectors.toList());
}
|
static List<Contingency> getValidContingencies(List<Contingency> contingencies, Network network) { Objects.requireNonNull(contingencies); Objects.requireNonNull(network); return contingencies.stream() .filter(c -> c.isValid(network)) .collect(Collectors.toList()); }
|
/**
* Return only valid contingencies based on given list of contingencies and network
*/
|
Return only valid contingencies based on given list of contingencies and network
|
getValidContingencies
|
{
"repo_name": "powsybl/powsybl-core",
"path": "contingency/contingency-api/src/main/java/com/powsybl/contingency/ContingencyList.java",
"license": "mpl-2.0",
"size": 2689
}
|
[
"com.powsybl.iidm.network.Network",
"java.util.List",
"java.util.Objects",
"java.util.stream.Collectors"
] |
import com.powsybl.iidm.network.Network; import java.util.List; import java.util.Objects; import java.util.stream.Collectors;
|
import com.powsybl.iidm.network.*; import java.util.*; import java.util.stream.*;
|
[
"com.powsybl.iidm",
"java.util"
] |
com.powsybl.iidm; java.util;
| 2,783,097
|
interface WithPolicyType {
WithCreate withPolicyType(PolicyType policyType);
}
interface WithCreate extends Creatable<PolicySetDefinition>, DefinitionStages.WithDescription, DefinitionStages.WithDisplayName, DefinitionStages.WithMetadata, DefinitionStages.WithParameters, DefinitionStages.WithPolicyType {
}
}
interface Update extends Appliable<PolicySetDefinition>, UpdateStages.WithDescription, UpdateStages.WithDisplayName, UpdateStages.WithMetadata, UpdateStages.WithParameters, UpdateStages.WithPolicyType {
}
|
interface WithPolicyType { WithCreate withPolicyType(PolicyType policyType); } interface WithCreate extends Creatable<PolicySetDefinition>, DefinitionStages.WithDescription, DefinitionStages.WithDisplayName, DefinitionStages.WithMetadata, DefinitionStages.WithParameters, DefinitionStages.WithPolicyType { } } interface Update extends Appliable<PolicySetDefinition>, UpdateStages.WithDescription, UpdateStages.WithDisplayName, UpdateStages.WithMetadata, UpdateStages.WithParameters, UpdateStages.WithPolicyType { }
|
/**
* Specifies policyType.
* @param policyType The type of policy definition. Possible values are NotSpecified, BuiltIn, and Custom. Possible values include: 'NotSpecified', 'BuiltIn', 'Custom'
* @return the next definition stage
*/
|
Specifies policyType
|
withPolicyType
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/policy/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/policy/v2019_06_01/PolicySetDefinition.java",
"license": "mit",
"size": 8068
}
|
[
"com.microsoft.azure.arm.model.Appliable",
"com.microsoft.azure.arm.model.Creatable"
] |
import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable;
|
import com.microsoft.azure.arm.model.*;
|
[
"com.microsoft.azure"
] |
com.microsoft.azure;
| 1,461,517
|
public Set<Entry<K, Collection<V>>> entrySet() {
return mMap.entrySet();
}
|
Set<Entry<K, Collection<V>>> function() { return mMap.entrySet(); }
|
/**
* Invokes <code>entrySet</code> on backing Map.
* @return the underlying entry set.
*/
|
Invokes <code>entrySet</code> on backing Map
|
entrySet
|
{
"repo_name": "RealTimeGenomics/rtg-tools",
"path": "src/com/rtg/util/MultiMap.java",
"license": "bsd-2-clause",
"size": 6819
}
|
[
"java.util.Collection",
"java.util.Map",
"java.util.Set"
] |
import java.util.Collection; import java.util.Map; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,351,931
|
public void getPropertiesFromInputStream(InputStream in) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String strLine;
//Read File Line By Line
try {
while ((strLine = br.readLine()) != null) {
if (strLine.contains(":")) {
String property = strLine.substring(0, strLine.indexOf(":"));
String value = strLine.substring(strLine.indexOf(":") + 1);
put(property, value);
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
|
void function(InputStream in) { BufferedReader br = new BufferedReader(new InputStreamReader(in)); String strLine; try { while ((strLine = br.readLine()) != null) { if (strLine.contains(":")) { String property = strLine.substring(0, strLine.indexOf(":")); String value = strLine.substring(strLine.indexOf(":") + 1); put(property, value); } } } catch (Exception ex) { ex.printStackTrace(); } }
|
/**
* Read bot properties from an input stream.
*
* @param in Input stream
*/
|
Read bot properties from an input stream
|
getPropertiesFromInputStream
|
{
"repo_name": "emixgg/sofia-java",
"path": "src/main/java/com/somospnt/sofiabot/ab/Properties.java",
"license": "lgpl-3.0",
"size": 3395
}
|
[
"java.io.BufferedReader",
"java.io.InputStream",
"java.io.InputStreamReader"
] |
import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,887,782
|
AiModule defaultAiFor(EntityType type);
|
AiModule defaultAiFor(EntityType type);
|
/**
* Returns the AI Module that is the default for a certain type of entity
*/
|
Returns the AI Module that is the default for a certain type of entity
|
defaultAiFor
|
{
"repo_name": "orangelynx/TridentSDK",
"path": "src/main/java/net/tridentsdk/entity/living/ai/AiHandler.java",
"license": "apache-2.0",
"size": 1703
}
|
[
"net.tridentsdk.entity.types.EntityType"
] |
import net.tridentsdk.entity.types.EntityType;
|
import net.tridentsdk.entity.types.*;
|
[
"net.tridentsdk.entity"
] |
net.tridentsdk.entity;
| 1,066,809
|
EDataType getID();
|
EDataType getID();
|
/**
* Returns the meta object for data type '{@link java.lang.String <em>ID</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for data type '<em>ID</em>'.
* @see java.lang.String
* @model instanceClass="java.lang.String"
* extendedMetaData="name='ID' baseType='NCName'"
* @generated
*/
|
Returns the meta object for data type '<code>java.lang.String ID</code>'.
|
getID
|
{
"repo_name": "LangleyStudios/eclipse-avro",
"path": "test/org.eclipse.emf.ecore/src/org/eclipse/emf/ecore/xml/type/XMLTypePackage.java",
"license": "epl-1.0",
"size": 81687
}
|
[
"org.eclipse.emf.ecore.EDataType"
] |
import org.eclipse.emf.ecore.EDataType;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 458,522
|
public void setLegalDetailsLocalService(
LegalDetailsLocalService legalDetailsLocalService) {
this.legalDetailsLocalService = legalDetailsLocalService;
}
|
void function( LegalDetailsLocalService legalDetailsLocalService) { this.legalDetailsLocalService = legalDetailsLocalService; }
|
/**
* Sets the legal details local service.
*
* @param legalDetailsLocalService the legal details local service
*/
|
Sets the legal details local service
|
setLegalDetailsLocalService
|
{
"repo_name": "fraunhoferfokus/govapps",
"path": "data-portlet/src/main/java/de/fraunhofer/fokus/movepla/service/base/MultiMediaServiceBaseImpl.java",
"license": "bsd-3-clause",
"size": 32769
}
|
[
"de.fraunhofer.fokus.movepla.service.LegalDetailsLocalService"
] |
import de.fraunhofer.fokus.movepla.service.LegalDetailsLocalService;
|
import de.fraunhofer.fokus.movepla.service.*;
|
[
"de.fraunhofer.fokus"
] |
de.fraunhofer.fokus;
| 119,625
|
@Override public void setModel(Model model)
{
super.setModel(model);
metric_.setModel(model);
}
|
@Override void function(Model model) { super.setModel(model); metric_.setModel(model); }
|
/** This tells the underlying metric about the model,
* even though the model will not be sending events directly
* to that metric (the events come through this wrapper).
*/
|
This tells the underlying metric about the model, even though the model will not be sending events directly to that metric (the events come through this wrapper)
|
setModel
|
{
"repo_name": "patrickfav/tuwien",
"path": "master/swt workspace/ModelJUnit 2.0 beta1/modeljunit/src/main/java/nz/ac/waikato/modeljunit/coverage/CoverageHistory.java",
"license": "apache-2.0",
"size": 6042
}
|
[
"nz.ac.waikato.modeljunit.Model"
] |
import nz.ac.waikato.modeljunit.Model;
|
import nz.ac.waikato.modeljunit.*;
|
[
"nz.ac.waikato"
] |
nz.ac.waikato;
| 1,040,340
|
@Test
public void TestEvictTopologyFromItself() {
INimbus iNimbus = new TestUtilsForResourceAwareScheduler.INimbusTest();
Map<String, Number> resourceMap = new HashMap<String, Number>();
resourceMap.put(Config.SUPERVISOR_CPU_CAPACITY, 100.0);
resourceMap.put(Config.SUPERVISOR_MEMORY_CAPACITY_MB, 1000.0);
Map<String, SupervisorDetails> supMap = TestUtilsForResourceAwareScheduler.genSupervisors(4, 4, resourceMap);
Config config = new Config();
config.putAll(Utils.readDefaultConfig());
config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_EVICTION_STRATEGY, org.apache.storm.scheduler.resource.strategies.eviction.DefaultEvictionStrategy.class.getName());
config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_PRIORITY_STRATEGY, org.apache.storm.scheduler.resource.strategies.priority.DefaultSchedulingPriorityStrategy.class.getName());
config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, org.apache.storm.scheduler.resource.strategies.scheduling.DefaultResourceAwareStrategy.class.getName());
config.put(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT, 100.0);
config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_OFFHEAP_MEMORY_MB, 500);
config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB, 500);
Map<String, Map<String, Number>> resourceUserPool = new HashMap<String, Map<String, Number>>();
resourceUserPool.put("jerry", new HashMap<String, Number>());
resourceUserPool.get("jerry").put("cpu", 200.0);
resourceUserPool.get("jerry").put("memory", 2000.0);
resourceUserPool.put("bobby", new HashMap<String, Number>());
resourceUserPool.get("bobby").put("cpu", 100.0);
resourceUserPool.get("bobby").put("memory", 1000.0);
resourceUserPool.put("derek", new HashMap<String, Number>());
resourceUserPool.get("derek").put("cpu", 100.0);
resourceUserPool.get("derek").put("memory", 1000.0);
config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_USER_POOLS, resourceUserPool);
Cluster cluster = new Cluster(iNimbus, supMap, new HashMap<String, SchedulerAssignmentImpl>(), config);
config.put(Config.TOPOLOGY_SUBMITTER_USER, "jerry");
TopologyDetails topo1 = TestUtilsForResourceAwareScheduler.getTopology("topo-1", config, 1, 0, 1, 0, currentTime - 2, 20);
TopologyDetails topo2 = TestUtilsForResourceAwareScheduler.getTopology("topo-2", config, 1, 0, 1, 0, currentTime - 2, 20);
TopologyDetails topo3 = TestUtilsForResourceAwareScheduler.getTopology("topo-3", config, 1, 0, 1, 0, currentTime - 2, 29);
TopologyDetails topo4 = TestUtilsForResourceAwareScheduler.getTopology("topo-4", config, 1, 0, 1, 0, currentTime - 2, 10);
config.put(Config.TOPOLOGY_SUBMITTER_USER, "bobby");
TopologyDetails topo5 = TestUtilsForResourceAwareScheduler.getTopology("topo-5", config, 1, 0, 1, 0, currentTime - 2, 10);
config.put(Config.TOPOLOGY_SUBMITTER_USER, "derek");
TopologyDetails topo6 = TestUtilsForResourceAwareScheduler.getTopology("topo-6", config, 1, 0, 1, 0, currentTime - 2, 29);
Map<String, TopologyDetails> topoMap = new HashMap<String, TopologyDetails>();
topoMap.put(topo1.getId(), topo1);
topoMap.put(topo2.getId(), topo2);
topoMap.put(topo5.getId(), topo5);
topoMap.put(topo6.getId(), topo6);
Topologies topologies = new Topologies(topoMap);
ResourceAwareScheduler rs = new ResourceAwareScheduler();
rs.prepare(config);
rs.schedule(topologies, cluster);
for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 2, rs.getUser("jerry").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("jerry").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("jerry").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("jerry").getTopologiesInvalid().size());
for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("derek").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("derek").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("derek").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("derek").getTopologiesInvalid().size());
for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("bobby").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("bobby").getTopologiesPending().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("bobby").getTopologiesInvalid().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("bobby").getTopologiesAttempted().size());
//user jerry submits another topology into a full cluster
// topo3 should not be able to scheduled
topoMap.put(topo3.getId(), topo3);
topologies = new Topologies(topoMap);
rs.schedule(topologies, cluster);
for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 2, rs.getUser("jerry").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("jerry").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 1, rs.getUser("jerry").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("jerry").getTopologiesInvalid().size());
//make sure that topo-3 didn't get scheduled.
Assert.assertEquals("correct topology in attempted queue", rs.getUser("jerry").getTopologiesAttempted().iterator().next().getName(), "topo-3");
for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("derek").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("derek").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("derek").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("derek").getTopologiesInvalid().size());
for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("bobby").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("bobby").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("bobby").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("bobby").getTopologiesInvalid().size());
//user jerry submits another topology but this one should be scheduled since it has higher priority than than the
//rest of jerry's running topologies
topoMap.put(topo4.getId(), topo4);
topologies = new Topologies(topoMap);
rs.schedule(topologies, cluster);
for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 2, rs.getUser("jerry").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("jerry").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 2, rs.getUser("jerry").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("jerry").getTopologiesInvalid().size());
Assert.assertTrue("correct topology in attempted queue", TestUtilsForResourceAwareScheduler.findTopologyInSetFromName("topo-3", rs.getUser("jerry").getTopologiesAttempted()) != null);
//Either topo-1 or topo-2 should have gotten evicted
Assert.assertTrue("correct topology in attempted queue", ((TestUtilsForResourceAwareScheduler.findTopologyInSetFromName("topo-1", rs.getUser("jerry").getTopologiesAttempted())) != null)
|| (TestUtilsForResourceAwareScheduler.findTopologyInSetFromName("topo-2", rs.getUser("jerry").getTopologiesAttempted()) != null));
//assert that topo-4 got scheduled
Assert.assertTrue("correct topology in running queue", TestUtilsForResourceAwareScheduler.findTopologyInSetFromName("topo-4", rs.getUser("jerry").getTopologiesRunning()) != null);
for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("derek").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("derek").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("derek").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("derek").getTopologiesInvalid().size());
for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) {
Assert.assertTrue("Assert scheduling topology success", TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId())));
}
Assert.assertEquals("# of running topologies", 1, rs.getUser("bobby").getTopologiesRunning().size());
Assert.assertEquals("# of pending topologies", 0, rs.getUser("bobby").getTopologiesPending().size());
Assert.assertEquals("# of attempted topologies", 0, rs.getUser("bobby").getTopologiesAttempted().size());
Assert.assertEquals("# of invalid topologies", 0, rs.getUser("bobby").getTopologiesInvalid().size());
}
|
void function() { INimbus iNimbus = new TestUtilsForResourceAwareScheduler.INimbusTest(); Map<String, Number> resourceMap = new HashMap<String, Number>(); resourceMap.put(Config.SUPERVISOR_CPU_CAPACITY, 100.0); resourceMap.put(Config.SUPERVISOR_MEMORY_CAPACITY_MB, 1000.0); Map<String, SupervisorDetails> supMap = TestUtilsForResourceAwareScheduler.genSupervisors(4, 4, resourceMap); Config config = new Config(); config.putAll(Utils.readDefaultConfig()); config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_EVICTION_STRATEGY, org.apache.storm.scheduler.resource.strategies.eviction.DefaultEvictionStrategy.class.getName()); config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_PRIORITY_STRATEGY, org.apache.storm.scheduler.resource.strategies.priority.DefaultSchedulingPriorityStrategy.class.getName()); config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, org.apache.storm.scheduler.resource.strategies.scheduling.DefaultResourceAwareStrategy.class.getName()); config.put(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT, 100.0); config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_OFFHEAP_MEMORY_MB, 500); config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB, 500); Map<String, Map<String, Number>> resourceUserPool = new HashMap<String, Map<String, Number>>(); resourceUserPool.put("jerry", new HashMap<String, Number>()); resourceUserPool.get("jerry").put("cpu", 200.0); resourceUserPool.get("jerry").put(STR, 2000.0); resourceUserPool.put("bobby", new HashMap<String, Number>()); resourceUserPool.get("bobby").put("cpu", 100.0); resourceUserPool.get("bobby").put(STR, 1000.0); resourceUserPool.put("derek", new HashMap<String, Number>()); resourceUserPool.get("derek").put("cpu", 100.0); resourceUserPool.get("derek").put(STR, 1000.0); config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_USER_POOLS, resourceUserPool); Cluster cluster = new Cluster(iNimbus, supMap, new HashMap<String, SchedulerAssignmentImpl>(), config); config.put(Config.TOPOLOGY_SUBMITTER_USER, "jerry"); TopologyDetails topo1 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 20); TopologyDetails topo2 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 20); TopologyDetails topo3 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 29); TopologyDetails topo4 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 10); config.put(Config.TOPOLOGY_SUBMITTER_USER, "bobby"); TopologyDetails topo5 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 10); config.put(Config.TOPOLOGY_SUBMITTER_USER, "derek"); TopologyDetails topo6 = TestUtilsForResourceAwareScheduler.getTopology(STR, config, 1, 0, 1, 0, currentTime - 2, 29); Map<String, TopologyDetails> topoMap = new HashMap<String, TopologyDetails>(); topoMap.put(topo1.getId(), topo1); topoMap.put(topo2.getId(), topo2); topoMap.put(topo5.getId(), topo5); topoMap.put(topo6.getId(), topo6); Topologies topologies = new Topologies(topoMap); ResourceAwareScheduler rs = new ResourceAwareScheduler(); rs.prepare(config); rs.schedule(topologies, cluster); for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 2, rs.getUser("jerry").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesInvalid().size()); for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("derek").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesInvalid().size()); for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("bobby").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesInvalid().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesAttempted().size()); topoMap.put(topo3.getId(), topo3); topologies = new Topologies(topoMap); rs.schedule(topologies, cluster); for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 2, rs.getUser("jerry").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesPending().size()); Assert.assertEquals(STR, 1, rs.getUser("jerry").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesInvalid().size()); Assert.assertEquals(STR, rs.getUser("jerry").getTopologiesAttempted().iterator().next().getName(), STR); for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("derek").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesInvalid().size()); for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("bobby").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesInvalid().size()); topoMap.put(topo4.getId(), topo4); topologies = new Topologies(topoMap); rs.schedule(topologies, cluster); for (TopologyDetails topo : rs.getUser("jerry").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 2, rs.getUser("jerry").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesPending().size()); Assert.assertEquals(STR, 2, rs.getUser("jerry").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("jerry").getTopologiesInvalid().size()); Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.findTopologyInSetFromName(STR, rs.getUser("jerry").getTopologiesAttempted()) != null); Assert.assertTrue(STR, ((TestUtilsForResourceAwareScheduler.findTopologyInSetFromName(STR, rs.getUser("jerry").getTopologiesAttempted())) != null) (TestUtilsForResourceAwareScheduler.findTopologyInSetFromName(STR, rs.getUser("jerry").getTopologiesAttempted()) != null)); Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.findTopologyInSetFromName(STR, rs.getUser("jerry").getTopologiesRunning()) != null); for (TopologyDetails topo : rs.getUser("derek").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("derek").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("derek").getTopologiesInvalid().size()); for (TopologyDetails topo : rs.getUser("bobby").getTopologiesRunning()) { Assert.assertTrue(STR, TestUtilsForResourceAwareScheduler.assertStatusSuccess(cluster.getStatusMap().get(topo.getId()))); } Assert.assertEquals(STR, 1, rs.getUser("bobby").getTopologiesRunning().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesPending().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesAttempted().size()); Assert.assertEquals(STR, 0, rs.getUser("bobby").getTopologiesInvalid().size()); }
|
/**
* If topologies from other users cannot be evicted to make space
* check if there is a topology with lower priority that can be evicted from the current user
*/
|
If topologies from other users cannot be evicted to make space check if there is a topology with lower priority that can be evicted from the current user
|
TestEvictTopologyFromItself
|
{
"repo_name": "adityasharad/storm",
"path": "storm-server/src/test/java/org/apache/storm/scheduler/resource/strategies/eviction/TestDefaultEvictionStrategy.java",
"license": "apache-2.0",
"size": 53544
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.apache.storm.Config",
"org.apache.storm.DaemonConfig",
"org.apache.storm.scheduler.Cluster",
"org.apache.storm.scheduler.INimbus",
"org.apache.storm.scheduler.SchedulerAssignmentImpl",
"org.apache.storm.scheduler.SupervisorDetails",
"org.apache.storm.scheduler.Topologies",
"org.apache.storm.scheduler.TopologyDetails",
"org.apache.storm.scheduler.resource.ResourceAwareScheduler",
"org.apache.storm.scheduler.resource.TestUtilsForResourceAwareScheduler",
"org.apache.storm.utils.Utils",
"org.junit.Assert"
] |
import java.util.HashMap; import java.util.Map; import org.apache.storm.Config; import org.apache.storm.DaemonConfig; import org.apache.storm.scheduler.Cluster; import org.apache.storm.scheduler.INimbus; import org.apache.storm.scheduler.SchedulerAssignmentImpl; import org.apache.storm.scheduler.SupervisorDetails; import org.apache.storm.scheduler.Topologies; import org.apache.storm.scheduler.TopologyDetails; import org.apache.storm.scheduler.resource.ResourceAwareScheduler; import org.apache.storm.scheduler.resource.TestUtilsForResourceAwareScheduler; import org.apache.storm.utils.Utils; import org.junit.Assert;
|
import java.util.*; import org.apache.storm.*; import org.apache.storm.scheduler.*; import org.apache.storm.scheduler.resource.*; import org.apache.storm.utils.*; import org.junit.*;
|
[
"java.util",
"org.apache.storm",
"org.junit"
] |
java.util; org.apache.storm; org.junit;
| 1,964,667
|
protected Integer calcType(String value) {
Integer result = null;
Boolean found = false;
for (Map.Entry<Integer, String> entry : mapAll.entrySet()) {
if (value.equals(entry.getValue())) {
result = entry.getKey();
found = true;
}
}
if (found == false) {
throw new RuntimeException(className + " " + value + " not valid/found");
}
return result;
}
|
Integer function(String value) { Integer result = null; Boolean found = false; for (Map.Entry<Integer, String> entry : mapAll.entrySet()) { if (value.equals(entry.getValue())) { result = entry.getKey(); found = true; } } if (found == false) { throw new RuntimeException(className + " " + value + STR); } return result; }
|
/**
* Find the value and set type and value. Used in constructors.
*
* @param value value
* @return the Key.
*/
|
Find the value and set type and value. Used in constructors
|
calcType
|
{
"repo_name": "fishjd/HappyNewMoonWithReport",
"path": "src/main/java/happynewmoonwithreport/ValueBase.java",
"license": "apache-2.0",
"size": 2521
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,876,516
|
public static <K, I, O, A> StreamReducerSimple<K, I, O, A> create(Function<I, K> keyFunction,
Comparator<K> keyComparator,
Reducer<K, I, O, A> reducer) {
return new StreamReducerSimple<>(keyFunction, keyComparator, reducer);
}
|
static <K, I, O, A> StreamReducerSimple<K, I, O, A> function(Function<I, K> keyFunction, Comparator<K> keyComparator, Reducer<K, I, O, A> reducer) { return new StreamReducerSimple<>(keyFunction, keyComparator, reducer); }
|
/**
* Creates a new instance of StreamReducerSimple
*
* @param keyComparator comparator for compare keys
* @param keyFunction function for counting key
*/
|
Creates a new instance of StreamReducerSimple
|
create
|
{
"repo_name": "softindex/datakernel",
"path": "core-datastream/src/main/java/io/datakernel/datastream/processor/StreamReducerSimple.java",
"license": "apache-2.0",
"size": 2355
}
|
[
"io.datakernel.datastream.processor.StreamReducers",
"java.util.Comparator",
"java.util.function.Function"
] |
import io.datakernel.datastream.processor.StreamReducers; import java.util.Comparator; import java.util.function.Function;
|
import io.datakernel.datastream.processor.*; import java.util.*; import java.util.function.*;
|
[
"io.datakernel.datastream",
"java.util"
] |
io.datakernel.datastream; java.util;
| 1,794,692
|
public List<AggregateCall> getAggCallList() {
return aggCalls;
}
|
List<AggregateCall> function() { return aggCalls; }
|
/**
* Returns a list of calls to aggregate functions.
*
* @return list of calls to aggregate functions
*/
|
Returns a list of calls to aggregate functions
|
getAggCallList
|
{
"repo_name": "sudheeshkatkam/incubator-calcite",
"path": "core/src/main/java/org/apache/calcite/rel/core/Aggregate.java",
"license": "apache-2.0",
"size": 17901
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 300,726
|
public void sendMoviesToPresenter (MoviesWrapper response);
|
void function (MoviesWrapper response);
|
/**
* Sends the PopularMoviesApiResponse thought the communication system
* to be received by the presenter in another module
*
* @param response the response containing a list with movies
*/
|
Sends the PopularMoviesApiResponse thought the communication system to be received by the presenter in another module
|
sendMoviesToPresenter
|
{
"repo_name": "hanhailong/Material-Movies",
"path": "HackVG/domain/src/main/java/com/hackvg/domain/GetMoviesUsecase.java",
"license": "apache-2.0",
"size": 1227
}
|
[
"com.hackvg.model.entities.MoviesWrapper"
] |
import com.hackvg.model.entities.MoviesWrapper;
|
import com.hackvg.model.entities.*;
|
[
"com.hackvg.model"
] |
com.hackvg.model;
| 2,351,114
|
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<MicrosoftGraphDirectoryObjectInner> getCreatedOnBehalfOfAsync(String applicationId);
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphDirectoryObjectInner> getCreatedOnBehalfOfAsync(String applicationId);
|
/**
* Get createdOnBehalfOf from applications.
*
* @param applicationId key: id of application.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is
* rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return createdOnBehalfOf from applications.
*/
|
Get createdOnBehalfOf from applications
|
getCreatedOnBehalfOfAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/ApplicationsClient.java",
"license": "mit",
"size": 113060
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphDirectoryObjectInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphDirectoryObjectInner;
|
import com.azure.core.annotation.*; import com.azure.resourcemanager.authorization.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 2,756,513
|
public void tuneStationAsync(float frequency) {
mFmServiceHandler.removeMessages(FmRadioListener.MSGID_TUNE_FINISHED);
final int bundleSize = 1;
Bundle bundle = new Bundle(bundleSize);
bundle.putFloat(FM_FREQUENCY, frequency);
Message msg = mFmServiceHandler.obtainMessage(FmRadioListener.MSGID_TUNE_FINISHED);
msg.setData(bundle);
mFmServiceHandler.sendMessage(msg);
}
|
void function(float frequency) { mFmServiceHandler.removeMessages(FmRadioListener.MSGID_TUNE_FINISHED); final int bundleSize = 1; Bundle bundle = new Bundle(bundleSize); bundle.putFloat(FM_FREQUENCY, frequency); Message msg = mFmServiceHandler.obtainMessage(FmRadioListener.MSGID_TUNE_FINISHED); msg.setData(bundle); mFmServiceHandler.sendMessage(msg); }
|
/**
* Tune to a station
*
* @param frequency The frequency to tune
*
* @return true, success; false, fail.
*/
|
Tune to a station
|
tuneStationAsync
|
{
"repo_name": "darklord4822/android_device_smart_sprint4g",
"path": "mtk/FmRadio/src/com/mediatek/fmradio/FmRadioService.java",
"license": "gpl-2.0",
"size": 105643
}
|
[
"android.os.Bundle",
"android.os.Message"
] |
import android.os.Bundle; import android.os.Message;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 221,987
|
public void setDateDoc (Timestamp DateDoc)
{
set_Value (COLUMNNAME_DateDoc, DateDoc);
}
|
void function (Timestamp DateDoc) { set_Value (COLUMNNAME_DateDoc, DateDoc); }
|
/** Set Document Date.
@param DateDoc
Date of the Document
*/
|
Set Document Date
|
setDateDoc
|
{
"repo_name": "arthurmelo88/palmetalADP",
"path": "adempiere_360/base/src/org/compiere/model/X_GL_JournalBatch.java",
"license": "gpl-2.0",
"size": 15473
}
|
[
"java.sql.Timestamp"
] |
import java.sql.Timestamp;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 1,579,405
|
public AttributedCharacterIterator getIterator(
AttributedCharacterIterator.Attribute[] attributes) {
return new AttributedIterator(this, attributes, 0, text.length());
}
|
AttributedCharacterIterator function( AttributedCharacterIterator.Attribute[] attributes) { return new AttributedIterator(this, attributes, 0, text.length()); }
|
/**
* Returns an {@code AttributedCharacterIterator} that gives access to the
* complete content of this attributed string. Only attributes contained in
* {@code attributes} are available from this iterator if they are defined
* for this text.
*
* @param attributes
* the array containing attributes that will be in the new
* iterator if they are defined for this text.
* @return the newly created {@code AttributedCharacterIterator}.
*/
|
Returns an AttributedCharacterIterator that gives access to the complete content of this attributed string. Only attributes contained in attributes are available from this iterator if they are defined for this text
|
getIterator
|
{
"repo_name": "xdajog/samsung_sources_i927",
"path": "libcore/luni/src/main/java/java/text/AttributedString.java",
"license": "gpl-2.0",
"size": 28396
}
|
[
"java.text.AttributedCharacterIterator"
] |
import java.text.AttributedCharacterIterator;
|
import java.text.*;
|
[
"java.text"
] |
java.text;
| 2,756,107
|
public Observable<ServiceResponseWithHeaders<Product, LROsPost202NoRetry204Headers>> post202NoRetry204WithServiceResponseAsync(Product product) {
Validator.validate(product);
Observable<Response<ResponseBody>> observable = service.post202NoRetry204(product, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultWithHeadersAsync(observable, new TypeToken<Product>() { }.getType(), LROsPost202NoRetry204Headers.class);
}
|
Observable<ServiceResponseWithHeaders<Product, LROsPost202NoRetry204Headers>> function(Product product) { Validator.validate(product); Observable<Response<ResponseBody>> observable = service.post202NoRetry204(product, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultWithHeadersAsync(observable, new TypeToken<Product>() { }.getType(), LROsPost202NoRetry204Headers.class); }
|
/**
* Long running post request, service returns a 202 to the initial request, with 'Location' header, 204 with noresponse body after success.
*
* @param product Product to put
* @return the observable for the request
*/
|
Long running post request, service returns a 202 to the initial request, with 'Location' header, 204 with noresponse body after success
|
post202NoRetry204WithServiceResponseAsync
|
{
"repo_name": "yugangw-msft/autorest",
"path": "src/generator/AutoRest.Java.Azure.Tests/src/main/java/fixtures/lro/implementation/LROsImpl.java",
"license": "mit",
"size": 358789
}
|
[
"com.google.common.reflect.TypeToken",
"com.microsoft.rest.ServiceResponseWithHeaders",
"com.microsoft.rest.Validator"
] |
import com.google.common.reflect.TypeToken; import com.microsoft.rest.ServiceResponseWithHeaders; import com.microsoft.rest.Validator;
|
import com.google.common.reflect.*; import com.microsoft.rest.*;
|
[
"com.google.common",
"com.microsoft.rest"
] |
com.google.common; com.microsoft.rest;
| 2,687,353
|
protected void drawBar(Canvas canvas, float xMin, float yMin, float xMax, float yMax,
float halfDiffX, int seriesNr, int seriesIndex, Paint paint) {
int scale = mDataset.getSeriesAt(seriesIndex).getScaleNumber();
if (mType == Type.STACKED) {
drawBar(canvas, xMin - halfDiffX, yMax, xMax + halfDiffX, yMin, scale, seriesIndex, paint);
} else {
float startX = xMin - seriesNr * halfDiffX + seriesIndex * 2 * halfDiffX;
drawBar(canvas, startX, yMax, startX + 2 * halfDiffX, yMin, scale, seriesIndex, paint);
}
}
|
void function(Canvas canvas, float xMin, float yMin, float xMax, float yMax, float halfDiffX, int seriesNr, int seriesIndex, Paint paint) { int scale = mDataset.getSeriesAt(seriesIndex).getScaleNumber(); if (mType == Type.STACKED) { drawBar(canvas, xMin - halfDiffX, yMax, xMax + halfDiffX, yMin, scale, seriesIndex, paint); } else { float startX = xMin - seriesNr * halfDiffX + seriesIndex * 2 * halfDiffX; drawBar(canvas, startX, yMax, startX + 2 * halfDiffX, yMin, scale, seriesIndex, paint); } }
|
/**
* Draws a bar.
*
* @param canvas the canvas
* @param xMin the X axis minimum
* @param yMin the Y axis minimum
* @param xMax the X axis maximum
* @param yMax the Y axis maximum
* @param halfDiffX half the size of a bar
* @param seriesNr the total number of series
* @param seriesIndex the current series index
* @param paint the paint
*/
|
Draws a bar
|
drawBar
|
{
"repo_name": "christianverdonk/sensorreadout",
"path": "src/org/achartengine/chart/BarChart.java",
"license": "apache-2.0",
"size": 12497
}
|
[
"android.graphics.Canvas",
"android.graphics.Paint"
] |
import android.graphics.Canvas; import android.graphics.Paint;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 1,951,247
|
private boolean isNewBlock() throws IOException {
return getRealPosition() == this.nextBlockPosition;
}
|
boolean function() throws IOException { return getRealPosition() == this.nextBlockPosition; }
|
/**
* Returns <code>true</code> if the current position is the start of a new block.
*
* @return <code>true</code> if the current position is the start of a new block.
* @throws IOException if an I/O problem occurs.
*/
|
Returns <code>true</code> if the current position is the start of a new block
|
isNewBlock
|
{
"repo_name": "blerer/horizondb",
"path": "src/main/java/io/horizondb/db/btree/AbstractBlockOrganizedByteReader.java",
"license": "apache-2.0",
"size": 8549
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,465,529
|
public void setAccountAuthentication( Node value ) {
Base.set( this.model, this.getResource(), ACCOUNTAUTHENTICATION, value );
}
|
void function( Node value ) { Base.set( this.model, this.getResource(), ACCOUNTAUTHENTICATION, value ); }
|
/**
* Sets a value of property {@code AccountAuthentication} from an RDF2Go {@linkplain Node}.
* First, all existing values are removed, then this value is added. Cardinality constraints are
* not checked, but this method exists only for properties with no {@code minCardinality} or
* {@code minCardinality == 1}.
*
* @param value
* the value to be added
*
* [Generated from RDFReactor template rule #set1dynamic]
*/
|
Sets a value of property AccountAuthentication from an RDF2Go Node. First, all existing values are removed, then this value is added. Cardinality constraints are not checked, but this method exists only for properties with no minCardinality or minCardinality == 1
|
setAccountAuthentication
|
{
"repo_name": "m0ep/master-thesis",
"path": "source/apis/rdf2go/rdf2go-sioc-services-auth/src/main/java/de/m0ep/sioc/services/auth/UserAccount.java",
"license": "mit",
"size": 21163
}
|
[
"org.ontoware.rdf2go.model.node.Node",
"org.ontoware.rdfreactor.runtime.Base"
] |
import org.ontoware.rdf2go.model.node.Node; import org.ontoware.rdfreactor.runtime.Base;
|
import org.ontoware.rdf2go.model.node.*; import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdf2go",
"org.ontoware.rdfreactor"
] |
org.ontoware.rdf2go; org.ontoware.rdfreactor;
| 331,116
|
public static List<String> getRolledBackList() {
return rolledback;
}
|
static List<String> function() { return rolledback; }
|
/**
* Gets the rolled-back List.
*
* @return the upgrade order
*/
|
Gets the rolled-back List
|
getRolledBackList
|
{
"repo_name": "tdefilip/opennms",
"path": "core/upgrade/src/test/java/org/opennms/upgrade/support/UpgradeHelper.java",
"license": "agpl-3.0",
"size": 2348
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,307,853
|
private int getYearIfAny(StatementDocument document, String propertyId) {
TimeValue date = document.findStatementTimeValue(propertyId);
if (date != null && date.getPrecision() >= TimeValue.PREC_YEAR) {
return (int) date.getYear();
} else {
return Integer.MIN_VALUE;
}
}
|
int function(StatementDocument document, String propertyId) { TimeValue date = document.findStatementTimeValue(propertyId); if (date != null && date.getPrecision() >= TimeValue.PREC_YEAR) { return (int) date.getYear(); } else { return Integer.MIN_VALUE; } }
|
/**
* Helper method that finds the first value of a time-valued property (if
* any), and extracts an integer year. It checks if the value has sufficient
* precision to extract an exact year.
*
* @param document
* the document to extract the data from
* @param the
* string id of the property to look for
* @return the year, or Interger.MIN_VALUE if none was found
*/
|
Helper method that finds the first value of a time-valued property (if any), and extracts an integer year. It checks if the value has sufficient precision to extract an exact year
|
getYearIfAny
|
{
"repo_name": "notconfusing/Wikidata-Toolkit",
"path": "wdtk-examples/src/main/java/org/wikidata/wdtk/examples/LifeExpectancyProcessor.java",
"license": "apache-2.0",
"size": 5620
}
|
[
"org.wikidata.wdtk.datamodel.interfaces.StatementDocument",
"org.wikidata.wdtk.datamodel.interfaces.TimeValue"
] |
import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.TimeValue;
|
import org.wikidata.wdtk.datamodel.interfaces.*;
|
[
"org.wikidata.wdtk"
] |
org.wikidata.wdtk;
| 245,957
|
public static Integer [] allPossiblePolynomials() {
List<Integer> result = new ArrayList<Integer>();
for (int i = 0; i < FIELD_SIZE; i++) {
try {
generateLogTable(i);
result.add(i);
}
catch (RuntimeException e) {
// this one didn't work
}
}
return result.toArray(new Integer [result.size()]);
}
}
class InputByteOutputExpCodingLoop extends CodingLoopBase {
|
static Integer [] function() { List<Integer> result = new ArrayList<Integer>(); for (int i = 0; i < FIELD_SIZE; i++) { try { generateLogTable(i); result.add(i); } catch (RuntimeException e) { } } return result.toArray(new Integer [result.size()]); } } class InputByteOutputExpCodingLoop extends CodingLoopBase {
|
/**
* Returns a list of all polynomials that can be used to generate
* the field.
*
* This is never used in the code; it's just here for completeness.
*/
|
Returns a list of all polynomials that can be used to generate the field. This is never used in the code; it's just here for completeness
|
allPossiblePolynomials
|
{
"repo_name": "sridharan99/Cloud-Deduplication",
"path": "Chunklevel/Reed.java",
"license": "apache-2.0",
"size": 71674
}
|
[
"java.util.ArrayList",
"java.util.List"
] |
import java.util.ArrayList; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 947,439
|
Binding createBinding(Element bindingElement) throws BindingException;
|
Binding createBinding(Element bindingElement) throws BindingException;
|
/**
* Creates a binding from the XML config that is supplied as a DOM tree.
*
* <p>The specified element must be a fb:context element.</p>
*
* <p>The binding will <b>not</b> be cached.</p>
*/
|
Creates a binding from the XML config that is supplied as a DOM tree. The specified element must be a fb:context element. The binding will not be cached
|
createBinding
|
{
"repo_name": "apache/cocoon",
"path": "blocks/cocoon-forms/cocoon-forms-impl/src/main/java/org/apache/cocoon/forms/binding/BindingManager.java",
"license": "apache-2.0",
"size": 2034
}
|
[
"org.w3c.dom.Element"
] |
import org.w3c.dom.Element;
|
import org.w3c.dom.*;
|
[
"org.w3c.dom"
] |
org.w3c.dom;
| 543,079
|
@Test
public void findByTeamAndType()
{
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.BISHOP),
is(HexChessToken.WHITE_BISHOP));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.KING), is(HexChessToken.WHITE_KING));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.KNIGHT),
is(HexChessToken.WHITE_KNIGHT));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.PAWN), is(HexChessToken.WHITE_PAWN));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.QUEEN), is(HexChessToken.WHITE_QUEEN));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.ROOK), is(HexChessToken.WHITE_ROOK));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.BISHOP),
is(HexChessToken.BLACK_BISHOP));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.KING), is(HexChessToken.BLACK_KING));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.KNIGHT),
is(HexChessToken.BLACK_KNIGHT));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.PAWN), is(HexChessToken.BLACK_PAWN));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.QUEEN), is(HexChessToken.BLACK_QUEEN));
assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.ROOK), is(HexChessToken.BLACK_ROOK));
}
|
void function() { assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.BISHOP), is(HexChessToken.WHITE_BISHOP)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.KING), is(HexChessToken.WHITE_KING)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.KNIGHT), is(HexChessToken.WHITE_KNIGHT)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.PAWN), is(HexChessToken.WHITE_PAWN)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.QUEEN), is(HexChessToken.WHITE_QUEEN)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.WHITE, TokenType.ROOK), is(HexChessToken.WHITE_ROOK)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.BISHOP), is(HexChessToken.BLACK_BISHOP)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.KING), is(HexChessToken.BLACK_KING)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.KNIGHT), is(HexChessToken.BLACK_KNIGHT)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.PAWN), is(HexChessToken.BLACK_PAWN)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.QUEEN), is(HexChessToken.BLACK_QUEEN)); assertThat(HexChessToken.findByTeamAndType(HexChessTeam.BLACK, TokenType.ROOK), is(HexChessToken.BLACK_ROOK)); }
|
/**
* Test the <code>findByTeamAndType()</code> method.
*/
|
Test the <code>findByTeamAndType()</code> method
|
findByTeamAndType
|
{
"repo_name": "jmthompson2015/vizzini",
"path": "example/src/test/java/org/vizzini/example/boardgame/hexchess/HexChessTokenTest.java",
"license": "mit",
"size": 8003
}
|
[
"org.hamcrest.CoreMatchers",
"org.junit.Assert"
] |
import org.hamcrest.CoreMatchers; import org.junit.Assert;
|
import org.hamcrest.*; import org.junit.*;
|
[
"org.hamcrest",
"org.junit"
] |
org.hamcrest; org.junit;
| 1,989,631
|
public void setAdamicAdarScore(FloatValue adamicAdarScore) {
this.adamicAdarScore = adamicAdarScore;
}
|
void function(FloatValue adamicAdarScore) { this.adamicAdarScore = adamicAdarScore; }
|
/**
* Set the Adamic-Adar score, equal to the sum over common neighbors of
* the inverse logarithm of degree.
*
* @param adamicAdarScore the Adamic-Adar score
*/
|
Set the Adamic-Adar score, equal to the sum over common neighbors of the inverse logarithm of degree
|
setAdamicAdarScore
|
{
"repo_name": "sunjincheng121/flink",
"path": "flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java",
"license": "apache-2.0",
"size": 15286
}
|
[
"org.apache.flink.types.FloatValue"
] |
import org.apache.flink.types.FloatValue;
|
import org.apache.flink.types.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 269,929
|
public static Collection<InetAddress> toInetAddresses(ClusterNode node) throws IgniteCheckedException {
return toInetAddresses(node.addresses(), node.hostNames());
}
|
static Collection<InetAddress> function(ClusterNode node) throws IgniteCheckedException { return toInetAddresses(node.addresses(), node.hostNames()); }
|
/**
* Returns tha list of resolved inet addresses. First addresses are resolved by host names,
* if this attempt fails then the addresses are resolved by ip addresses.
*
* @param node Grid node.
* @return Inet addresses for given addresses and host names.
* @throws IgniteCheckedException If non of addresses can be resolved.
*/
|
Returns tha list of resolved inet addresses. First addresses are resolved by host names, if this attempt fails then the addresses are resolved by ip addresses
|
toInetAddresses
|
{
"repo_name": "murador/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 294985
}
|
[
"java.net.InetAddress",
"java.util.Collection",
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.cluster.ClusterNode"
] |
import java.net.InetAddress; import java.util.Collection; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode;
|
import java.net.*; import java.util.*; import org.apache.ignite.*; import org.apache.ignite.cluster.*;
|
[
"java.net",
"java.util",
"org.apache.ignite"
] |
java.net; java.util; org.apache.ignite;
| 1,945,205
|
public int getResponseCode() throws IOException {
if (conn == null) {
throw new IllegalArgumentException("No connection has been made");
}
return conn.getResponseCode();
}
|
int function() throws IOException { if (conn == null) { throw new IllegalArgumentException(STR); } return conn.getResponseCode(); }
|
/**
* Get the response code.
*
* @return the response code
* @throws java.io.IOException on I/O error
*/
|
Get the response code
|
getResponseCode
|
{
"repo_name": "TealCube/SquirrelID",
"path": "src/main/java/com/sk89q/squirrelid/util/HttpRequest.java",
"license": "lgpl-3.0",
"size": 15161
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 994,248
|
@Generated(hash = 713229351)
public void update() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.update(this);
}
|
@Generated(hash = 713229351) void function() { if (myDao == null) { throw new DaoException(STR); } myDao.update(this); }
|
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#update(Object)}.
* Entity must attached to an entity context.
*/
|
Convenient call for <code>org.greenrobot.greendao.AbstractDao#update(Object)</code>. Entity must attached to an entity context
|
update
|
{
"repo_name": "schaemik/literacyapp-android",
"path": "app/src/main/java/org/literacyapp/model/content/Number.java",
"license": "apache-2.0",
"size": 5872
}
|
[
"org.greenrobot.greendao.DaoException",
"org.greenrobot.greendao.annotation.Generated"
] |
import org.greenrobot.greendao.DaoException; import org.greenrobot.greendao.annotation.Generated;
|
import org.greenrobot.greendao.*; import org.greenrobot.greendao.annotation.*;
|
[
"org.greenrobot.greendao"
] |
org.greenrobot.greendao;
| 2,682,417
|
public static List<ResourceRegion> toResourceRegions(List<HttpRange> ranges, Resource resource) {
if (CollectionUtils.isEmpty(ranges)) {
return Collections.emptyList();
}
List<ResourceRegion> regions = new ArrayList<ResourceRegion>(ranges.size());
for (HttpRange range : ranges) {
regions.add(range.toResourceRegion(resource));
}
return regions;
}
|
static List<ResourceRegion> function(List<HttpRange> ranges, Resource resource) { if (CollectionUtils.isEmpty(ranges)) { return Collections.emptyList(); } List<ResourceRegion> regions = new ArrayList<ResourceRegion>(ranges.size()); for (HttpRange range : ranges) { regions.add(range.toResourceRegion(resource)); } return regions; }
|
/**
* Convert each {@code HttpRange} into a {@code ResourceRegion}, selecting the
* appropriate segment of the given {@code Resource} using HTTP Range information.
* @param ranges the list of ranges
* @param resource the resource to select the regions from
* @return the list of regions for the given resource
* @since 4.3
*/
|
Convert each HttpRange into a ResourceRegion, selecting the appropriate segment of the given Resource using HTTP Range information
|
toResourceRegions
|
{
"repo_name": "Permafrost/Tundra.java",
"path": "src/main/java/permafrost/tundra/org/springframework/http/HttpRange.java",
"license": "mit",
"size": 12299
}
|
[
"java.util.ArrayList",
"java.util.Collections",
"java.util.List"
] |
import java.util.ArrayList; import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 220,152
|
public List<Long> getRetryMulticastIds() {
return retryMulticastIds;
}
|
List<Long> function() { return retryMulticastIds; }
|
/**
* Gets additional ids if more than one multicast message was sent.
*/
|
Gets additional ids if more than one multicast message was sent
|
getRetryMulticastIds
|
{
"repo_name": "ioksrom/KitAlumniApp-Server",
"path": "src/main/java/edu/kit/isco/KitAlumniApp/server/gcm/MulticastResult.java",
"license": "apache-2.0",
"size": 3892
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,806,082
|
// <editor-fold defaultstate="collapsed" desc=" Generated Code ">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
eyeDropper = new javax.swing.JButton();
magPanel = new MagnifyingPanel();
activeColor = new JXColorSelectionButton();
hexColor = new javax.swing.JTextField();
JTextArea jTextArea1 = new JTextArea();
jLabel1 = new javax.swing.JLabel();
rgbColor = new javax.swing.JTextField();
JLabel jLabel2 = new JLabel();
setLayout(new java.awt.GridBagLayout());
eyeDropper.setText("eye");
add(eyeDropper, new java.awt.GridBagConstraints());
magPanel.setLayout(new java.awt.BorderLayout());
magPanel.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0)));
magPanel.setMinimumSize(new java.awt.Dimension(100, 100));
magPanel.setPreferredSize(new java.awt.Dimension(100, 100));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.gridheight = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 12);
add(magPanel, gridBagConstraints);
activeColor.setEnabled(false);
activeColor.setPreferredSize(new java.awt.Dimension(40, 40));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
add(activeColor, gridBagConstraints);
hexColor.setEditable(false);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
add(hexColor, gridBagConstraints);
jTextArea1.setColumns(20);
jTextArea1.setEditable(false);
jTextArea1.setLineWrap(true);
jTextArea1.setRows(5);
jTextArea1.setText("Drag the magnifying glass to select a color from the screen.");
jTextArea1.setWrapStyleWord(true);
jTextArea1.setOpaque(false);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 10.0;
gridBagConstraints.weighty = 10.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 7, 0);
add(jTextArea1, gridBagConstraints);
jLabel1.setText("#");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.insets = new java.awt.Insets(0, 4, 0, 4);
add(jLabel1, gridBagConstraints);
rgbColor.setEditable(false);
rgbColor.setText("255,255,255");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
add(rgbColor, gridBagConstraints);
jLabel2.setText("RGB");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.insets = new java.awt.Insets(0, 4, 0, 4);
add(jLabel2, gridBagConstraints);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton activeColor;
private javax.swing.JButton eyeDropper;
private javax.swing.JTextField hexColor;
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel magPanel;
private javax.swing.JTextField rgbColor;
// End of variables declaration//GEN-END:variables
/**
* {@inheritDoc}
|
void function() { java.awt.GridBagConstraints gridBagConstraints; eyeDropper = new javax.swing.JButton(); magPanel = new MagnifyingPanel(); activeColor = new JXColorSelectionButton(); hexColor = new javax.swing.JTextField(); JTextArea jTextArea1 = new JTextArea(); jLabel1 = new javax.swing.JLabel(); rgbColor = new javax.swing.JTextField(); JLabel jLabel2 = new JLabel(); setLayout(new java.awt.GridBagLayout()); eyeDropper.setText("eye"); add(eyeDropper, new java.awt.GridBagConstraints()); magPanel.setLayout(new java.awt.BorderLayout()); magPanel.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0))); magPanel.setMinimumSize(new java.awt.Dimension(100, 100)); magPanel.setPreferredSize(new java.awt.Dimension(100, 100)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.gridheight = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 12); add(magPanel, gridBagConstraints); activeColor.setEnabled(false); activeColor.setPreferredSize(new java.awt.Dimension(40, 40)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 3; gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0); add(activeColor, gridBagConstraints); hexColor.setEditable(false); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 1; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0); add(hexColor, gridBagConstraints); jTextArea1.setColumns(20); jTextArea1.setEditable(false); jTextArea1.setLineWrap(true); jTextArea1.setRows(5); jTextArea1.setText(STR); jTextArea1.setWrapStyleWord(true); jTextArea1.setOpaque(false); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridwidth = 2; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH; gridBagConstraints.weightx = 10.0; gridBagConstraints.weighty = 10.0; gridBagConstraints.insets = new java.awt.Insets(0, 0, 7, 0); add(jTextArea1, gridBagConstraints); jLabel1.setText("#"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 4, 0, 4); add(jLabel1, gridBagConstraints); rgbColor.setEditable(false); rgbColor.setText(STR); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 2; gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0); add(rgbColor, gridBagConstraints); jLabel2.setText("RGB"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 4, 0, 4); add(jLabel2, gridBagConstraints); } private javax.swing.JButton activeColor; private javax.swing.JButton eyeDropper; private javax.swing.JTextField hexColor; private javax.swing.JLabel jLabel1; private javax.swing.JPanel magPanel; private javax.swing.JTextField rgbColor; /** * {@inheritDoc}
|
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
|
This method is called from within the constructor to initialize the form. always regenerated by the Form Editor
|
initComponents
|
{
"repo_name": "syncer/swingx",
"path": "swingx-core/src/main/java/org/jdesktop/swingx/color/EyeDropperColorChooserPanel.java",
"license": "lgpl-2.1",
"size": 11124
}
|
[
"java.awt.Color",
"javax.swing.JLabel",
"javax.swing.JPanel",
"javax.swing.JTextArea",
"org.jdesktop.swingx.JXColorSelectionButton"
] |
import java.awt.Color; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextArea; import org.jdesktop.swingx.JXColorSelectionButton;
|
import java.awt.*; import javax.swing.*; import org.jdesktop.swingx.*;
|
[
"java.awt",
"javax.swing",
"org.jdesktop.swingx"
] |
java.awt; javax.swing; org.jdesktop.swingx;
| 2,514,152
|
protected void addSupportedModesPropertyDescriptor(Object object) {
// begin-extension-code
itemPropertyDescriptors
.add(createItemPropertyDescriptor
// end-extension-code
(((ComposeableAdapterFactory) adapterFactory)
.getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_SystemConfiguration_supportedModes_feature"), //$NON-NLS-1$
getString(
"_UI_PropertyDescriptor_description", "_UI_SystemConfiguration_supportedModes_feature", "_UI_SystemConfiguration_type"), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
ConfigurationPackage.Literals.SYSTEM_CONFIGURATION__SUPPORTED_MODES,
true, false, true, null, null,
// begin-extension-code
null));
// end-extension-code
}
|
void function(Object object) { itemPropertyDescriptors .add(createItemPropertyDescriptor (((ComposeableAdapterFactory) adapterFactory) .getRootAdapterFactory(), getResourceLocator(), getString(STR), getString( STR, STR, STR), ConfigurationPackage.Literals.SYSTEM_CONFIGURATION__SUPPORTED_MODES, true, false, true, null, null, null)); }
|
/**
* This adds a property descriptor for the Supported Modes feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds a property descriptor for the Supported Modes feature.
|
addSupportedModesPropertyDescriptor
|
{
"repo_name": "smadelenat/CapellaModeAutomata",
"path": "Language/Configuration/com.thalesgroup.trt.mde.vp.configuration.model.edit/src/com/thalesgroup/trt/mde/vp/configuration/configuration/provider/SystemConfigurationItemProvider.java",
"license": "epl-1.0",
"size": 11232
}
|
[
"com.thalesgroup.trt.mde.vp.configuration.configuration.ConfigurationPackage",
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory"
] |
import com.thalesgroup.trt.mde.vp.configuration.configuration.ConfigurationPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
|
import com.thalesgroup.trt.mde.vp.configuration.configuration.*; import org.eclipse.emf.edit.provider.*;
|
[
"com.thalesgroup.trt",
"org.eclipse.emf"
] |
com.thalesgroup.trt; org.eclipse.emf;
| 1,020,551
|
public void readNull()
throws IOException {
int tag = read();
switch (tag) {
case 'N':
return;
default:
throw expect("null", tag);
}
}
|
void function() throws IOException { int tag = read(); switch (tag) { case 'N': return; default: throw expect("null", tag); } }
|
/**
* Reads a null
* <p>
* <pre>
* N
* </pre>
*/
|
Reads a null <code> N </code>
|
readNull
|
{
"repo_name": "delavior/dubbo",
"path": "hessian-lite/src/main/java/com/alibaba/com/caucho/hessian/io/Hessian2Input.java",
"license": "apache-2.0",
"size": 85774
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,012,860
|
public static void main(String[] args) {
BasicConfigurator.configure(new ConsoleAppender(
new PatternLayout("%d [%-25t] %-5p: %m%n")));
CmdLineParser parser = new CmdLineParser();
CmdLineParser.Option help = parser.addBooleanOption('h', "help");
CmdLineParser.Option output = parser.addStringOption('o', "output");
CmdLineParser.Option iface = parser.addStringOption('i', "iface");
CmdLineParser.Option seedTime = parser.addIntegerOption('s', "seed");
CmdLineParser.Option maxUpload = parser.addDoubleOption('u', "max-upload");
CmdLineParser.Option maxDownload = parser.addDoubleOption('d', "max-download");
try {
parser.parse(args);
} catch (CmdLineParser.OptionException oe) {
System.err.println(oe.getMessage());
usage(System.err);
System.exit(1);
}
// Display help and exit if requested
if (Boolean.TRUE.equals((Boolean)parser.getOptionValue(help))) {
usage(System.out);
System.exit(0);
}
String outputValue = (String)parser.getOptionValue(output,
DEFAULT_OUTPUT_DIRECTORY);
String ifaceValue = (String)parser.getOptionValue(iface);
int seedTimeValue = (Integer)parser.getOptionValue(seedTime, -1);
double maxDownloadRate = (Double)parser.getOptionValue(maxDownload, 0.0);
double maxUploadRate = (Double)parser.getOptionValue(maxUpload, 0.0);
String[] otherArgs = parser.getRemainingArgs();
if (otherArgs.length != 1) {
usage(System.err);
System.exit(1);
}
try {
Client c = new Client(
getIPv4Address(ifaceValue),
SharedTorrent.fromFile(
new File(otherArgs[0]),
new File(outputValue)));
c.setMaxDownloadRate(maxDownloadRate);
c.setMaxUploadRate(maxUploadRate);
// Set a shutdown hook that will stop the sharing/seeding and send
// a STOPPED announce request.
Runtime.getRuntime().addShutdownHook(
new Thread(new Client.ClientShutdown(c, null)));
c.share(seedTimeValue);
if (Client.ClientState.ERROR.equals(c.getState())) {
System.exit(1);
}
} catch (Exception e) {
logger.error("Fatal error: {}", e.getMessage(), e);
System.exit(2);
}
}
|
static void function(String[] args) { BasicConfigurator.configure(new ConsoleAppender( new PatternLayout(STR))); CmdLineParser parser = new CmdLineParser(); CmdLineParser.Option help = parser.addBooleanOption('h', "help"); CmdLineParser.Option output = parser.addStringOption('o', STR); CmdLineParser.Option iface = parser.addStringOption('i', "iface"); CmdLineParser.Option seedTime = parser.addIntegerOption('s', "seed"); CmdLineParser.Option maxUpload = parser.addDoubleOption('u', STR); CmdLineParser.Option maxDownload = parser.addDoubleOption('d', STR); try { parser.parse(args); } catch (CmdLineParser.OptionException oe) { System.err.println(oe.getMessage()); usage(System.err); System.exit(1); } if (Boolean.TRUE.equals((Boolean)parser.getOptionValue(help))) { usage(System.out); System.exit(0); } String outputValue = (String)parser.getOptionValue(output, DEFAULT_OUTPUT_DIRECTORY); String ifaceValue = (String)parser.getOptionValue(iface); int seedTimeValue = (Integer)parser.getOptionValue(seedTime, -1); double maxDownloadRate = (Double)parser.getOptionValue(maxDownload, 0.0); double maxUploadRate = (Double)parser.getOptionValue(maxUpload, 0.0); String[] otherArgs = parser.getRemainingArgs(); if (otherArgs.length != 1) { usage(System.err); System.exit(1); } try { Client c = new Client( getIPv4Address(ifaceValue), SharedTorrent.fromFile( new File(otherArgs[0]), new File(outputValue))); c.setMaxDownloadRate(maxDownloadRate); c.setMaxUploadRate(maxUploadRate); Runtime.getRuntime().addShutdownHook( new Thread(new Client.ClientShutdown(c, null))); c.share(seedTimeValue); if (Client.ClientState.ERROR.equals(c.getState())) { System.exit(1); } } catch (Exception e) { logger.error(STR, e.getMessage(), e); System.exit(2); } }
|
/**
* Main client entry point for stand-alone operation.
*/
|
Main client entry point for stand-alone operation
|
main
|
{
"repo_name": "KaustubhDeshmukh/Peer-CDS",
"path": "src/main/java/com/turn/ttorrent/cli/ClientMain.java",
"license": "apache-2.0",
"size": 5786
}
|
[
"com.turn.ttorrent.client.Client",
"com.turn.ttorrent.client.SharedTorrent",
"java.io.File",
"org.apache.log4j.BasicConfigurator",
"org.apache.log4j.ConsoleAppender",
"org.apache.log4j.PatternLayout"
] |
import com.turn.ttorrent.client.Client; import com.turn.ttorrent.client.SharedTorrent; import java.io.File; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.PatternLayout;
|
import com.turn.ttorrent.client.*; import java.io.*; import org.apache.log4j.*;
|
[
"com.turn.ttorrent",
"java.io",
"org.apache.log4j"
] |
com.turn.ttorrent; java.io; org.apache.log4j;
| 1,907,758
|
public String[] getHeaders() throws IOException {
checkClosed();
if (headersHolder.Headers == null) {
return null;
} else {
// use clone here to prevent the outside code from
// setting values on the array directly, which would
// throw off the index lookup based on header name
String[] clone = new String[headersHolder.Length];
System.arraycopy(headersHolder.Headers, 0, clone, 0,
headersHolder.Length);
return clone;
}
}
|
String[] function() throws IOException { checkClosed(); if (headersHolder.Headers == null) { return null; } else { String[] clone = new String[headersHolder.Length]; System.arraycopy(headersHolder.Headers, 0, clone, 0, headersHolder.Length); return clone; } }
|
/**
* Returns the header values as a string array.
*
* @return The header values as a String array.
* @exception IOException
* Thrown if this object has already been closed.
*/
|
Returns the header values as a string array
|
getHeaders
|
{
"repo_name": "alrocar/POIProxy",
"path": "es.alrocar.poiproxy/src/test/java/es/alrocar/csv/CsvReader.java",
"license": "apache-2.0",
"size": 47568
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,837,887
|
public void setup(QAConfig sysConfig) throws Exception {
super.setup(sysConfig);
parse();
}
|
void function(QAConfig sysConfig) throws Exception { super.setup(sysConfig); parse(); }
|
/**
* Invoke parent's setup and parser
* @exception TestException will usually indicate an "unresolved"
* condition because at this point the test has not yet begun.
*/
|
Invoke parent's setup and parser
|
setup
|
{
"repo_name": "cdegroot/river",
"path": "qa/src/com/sun/jini/test/impl/mercury/AdminIFShutdownTest.java",
"license": "apache-2.0",
"size": 10729
}
|
[
"com.sun.jini.qa.harness.QAConfig"
] |
import com.sun.jini.qa.harness.QAConfig;
|
import com.sun.jini.qa.harness.*;
|
[
"com.sun.jini"
] |
com.sun.jini;
| 986,838
|
// [TARGET getDefaultAcl(Entity)]
public Acl getDefaultAcl() {
// [START getDefaultAcl]
Acl acl = bucket.getDefaultAcl(User.ofAllAuthenticatedUsers());
// [END getDefaultAcl]
return acl;
}
|
Acl function() { Acl acl = bucket.getDefaultAcl(User.ofAllAuthenticatedUsers()); return acl; }
|
/**
* Example of getting the default ACL entry for an entity.
*/
|
Example of getting the default ACL entry for an entity
|
getDefaultAcl
|
{
"repo_name": "jabubake/google-cloud-java",
"path": "google-cloud-examples/src/main/java/com/google/cloud/examples/storage/snippets/BucketSnippets.java",
"license": "apache-2.0",
"size": 10243
}
|
[
"com.google.cloud.storage.Acl"
] |
import com.google.cloud.storage.Acl;
|
import com.google.cloud.storage.*;
|
[
"com.google.cloud"
] |
com.google.cloud;
| 572,468
|
public static void writeFieldHeader(boolean first, Writer writer, String boundary, String name) throws IOException {
if ((name == null) || (name.length() == 0)) {
throw new IllegalArgumentException("Field name cannot be null or empty.");
}
if (!first) {
writer.write(NEWLINE);
}
writer.write(BOUNDARY_PREFIX);
writer.write(boundary);
writer.write(NEWLINE);
// write content header
writer.write("Content-Disposition: form-data; name=\"" + name + "\"");
writer.write(NEWLINE);
writer.write(NEWLINE);
}
|
static void function(boolean first, Writer writer, String boundary, String name) throws IOException { if ((name == null) (name.length() == 0)) { throw new IllegalArgumentException(STR); } if (!first) { writer.write(NEWLINE); } writer.write(BOUNDARY_PREFIX); writer.write(boundary); writer.write(NEWLINE); writer.write(STRSTR\""); writer.write(NEWLINE); writer.write(NEWLINE); }
|
/**
* Writes the header portion for a HTTP field other than a file upload or multipart field
*
* @param first true if this is the first item; false otherwise
* @param writer the writer
* @param name the field name
* @param boundary the content boundary
* @throws IOException on input/output errors
*/
|
Writes the header portion for a HTTP field other than a file upload or multipart field
|
writeFieldHeader
|
{
"repo_name": "appnativa/rare",
"path": "source/rare/core/com/appnativa/rare/net/FormHelper.java",
"license": "gpl-3.0",
"size": 19514
}
|
[
"java.io.IOException",
"java.io.Writer"
] |
import java.io.IOException; import java.io.Writer;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,096,894
|
DiscoveryNode getPreferredTargetNode();
|
DiscoveryNode getPreferredTargetNode();
|
/**
* Returns the preferred discovery node for this request. The remote cluster client will attempt to send
* this request directly to this node. Otherwise, it will send the request as a proxy action that will
* be routed by the remote cluster to this node.
*
* @return preferred discovery node
*/
|
Returns the preferred discovery node for this request. The remote cluster client will attempt to send this request directly to this node. Otherwise, it will send the request as a proxy action that will be routed by the remote cluster to this node
|
getPreferredTargetNode
|
{
"repo_name": "ern/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareRequest.java",
"license": "apache-2.0",
"size": 872
}
|
[
"org.elasticsearch.cluster.node.DiscoveryNode"
] |
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.*;
|
[
"org.elasticsearch.cluster"
] |
org.elasticsearch.cluster;
| 768,007
|
protected void writeStatisticsReport(Collection<GLInterfaceBatchStatisticsReportDetailTableRow> statisticsReportRows) {
//now print the statistics report.....
long totalNumberOfGLEntries = 0;
long totalNumberOfExceptions = 0;
for (GLInterfaceBatchStatisticsReportDetailTableRow statisticsReportRow: statisticsReportRows) {
if (!statisticsHeaderWritten) {
//write the header line....
gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine("Document Type\t\tNumber of Gl Entries\t\tNumber of Exceptions");
gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine("-------------\t\t--------------------\t\t--------------------");
statisticsHeaderWritten = true;
}
totalNumberOfGLEntries += statisticsReportRow.getGlEntriesGenerated();
totalNumberOfExceptions += statisticsReportRow.getNumberOfExceptions();
gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine("%s\t\t\t\t%9d\t\t\t\t%9d", statisticsReportRow.getDocumentType(), statisticsReportRow.getGlEntriesGenerated(), statisticsReportRow.getNumberOfExceptions());
}
//writes the total line of the report....
gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(" \t\t--------------------\t\t--------------------");
gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine("%s\t\t\t\t%9d\t\t\t\t%9d", "Total", totalNumberOfGLEntries, totalNumberOfExceptions);
}
|
void function(Collection<GLInterfaceBatchStatisticsReportDetailTableRow> statisticsReportRows) { long totalNumberOfGLEntries = 0; long totalNumberOfExceptions = 0; for (GLInterfaceBatchStatisticsReportDetailTableRow statisticsReportRow: statisticsReportRows) { if (!statisticsHeaderWritten) { gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(STR); gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(STR); statisticsHeaderWritten = true; } totalNumberOfGLEntries += statisticsReportRow.getGlEntriesGenerated(); totalNumberOfExceptions += statisticsReportRow.getNumberOfExceptions(); gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(STR, statisticsReportRow.getDocumentType(), statisticsReportRow.getGlEntriesGenerated(), statisticsReportRow.getNumberOfExceptions()); } gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(STR); gLInterfaceBatchStatisticsReportsWriterService.writeStatisticLine(STR, "Total", totalNumberOfGLEntries, totalNumberOfExceptions); }
|
/**
* method to write the statistics report....
* @param statisticsReportRows Collection of statistics detail rows
*/
|
method to write the statistics report...
|
writeStatisticsReport
|
{
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/module/endow/batch/service/impl/GeneralLedgerInterfaceBatchProcessServiceImpl.java",
"license": "apache-2.0",
"size": 58405
}
|
[
"java.util.Collection",
"org.kuali.kfs.module.endow.businessobject.GLInterfaceBatchStatisticsReportDetailTableRow"
] |
import java.util.Collection; import org.kuali.kfs.module.endow.businessobject.GLInterfaceBatchStatisticsReportDetailTableRow;
|
import java.util.*; import org.kuali.kfs.module.endow.businessobject.*;
|
[
"java.util",
"org.kuali.kfs"
] |
java.util; org.kuali.kfs;
| 388,752
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.