method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
public TypeBinding postConversionType(Scope scope) {
TypeBinding convertedType = this.resolvedType;
int runtimeType = (this.implicitConversion & TypeIds.IMPLICIT_CONVERSION_MASK) >> 4;
switch (runtimeType) {
case T_boolean:
convertedType = TypeBinding.BOOLEAN;
break;
case T_byte:
convertedType = TypeBinding.BYTE;
break;
case T_short:
convertedType = TypeBinding.SHORT;
break;
case T_char:
convertedType = TypeBinding.CHAR;
break;
case T_int:
convertedType = TypeBinding.INT;
break;
case T_float:
convertedType = TypeBinding.FLOAT;
break;
case T_long:
convertedType = TypeBinding.LONG;
break;
case T_double:
convertedType = TypeBinding.DOUBLE;
break;
default:
}
if ((this.implicitConversion & TypeIds.BOXING) != 0) {
convertedType = scope.environment().computeBoxingType(convertedType);
}
return convertedType;
}
|
TypeBinding function(Scope scope) { TypeBinding convertedType = this.resolvedType; int runtimeType = (this.implicitConversion & TypeIds.IMPLICIT_CONVERSION_MASK) >> 4; switch (runtimeType) { case T_boolean: convertedType = TypeBinding.BOOLEAN; break; case T_byte: convertedType = TypeBinding.BYTE; break; case T_short: convertedType = TypeBinding.SHORT; break; case T_char: convertedType = TypeBinding.CHAR; break; case T_int: convertedType = TypeBinding.INT; break; case T_float: convertedType = TypeBinding.FLOAT; break; case T_long: convertedType = TypeBinding.LONG; break; case T_double: convertedType = TypeBinding.DOUBLE; break; default: } if ((this.implicitConversion & TypeIds.BOXING) != 0) { convertedType = scope.environment().computeBoxingType(convertedType); } return convertedType; }
|
/**
* Returns the type of the expression after required implicit conversions. When expression type gets promoted
* or inserted a generic cast, the converted type will differ from the resolved type (surface side-effects from
* #computeConversion(...)).
*
* @return the type after implicit conversion
*/
|
Returns the type of the expression after required implicit conversions. When expression type gets promoted or inserted a generic cast, the converted type will differ from the resolved type (surface side-effects from #computeConversion(...))
|
postConversionType
|
{
"repo_name": "riuvshin/che-plugins",
"path": "plugin-java/che-plugin-java-ext-java/src/main/java/org/eclipse/che/ide/ext/java/jdt/internal/compiler/ast/Expression.java",
"license": "epl-1.0",
"size": 52682
}
|
[
"org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.Scope",
"org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.TypeBinding",
"org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.TypeIds"
] |
import org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.Scope; import org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.TypeBinding; import org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.TypeIds;
|
import org.eclipse.che.ide.ext.java.jdt.internal.compiler.lookup.*;
|
[
"org.eclipse.che"
] |
org.eclipse.che;
| 1,577,732
|
@Test
public void testProjectSideBarView()
throws Exception {
VelocityContext context = VelocityContextTestUtil.getInstance();
Project project = MockProject.getMockProject();
// Intentionally tries to inject a Javascript.
project.setDescription("<script>window.alert(\"hacked\")</script>");
context.put("project", project);
context.put("admins", "admin_name");
context.put("userpermission", "admin_permission");
String result = VelocityTemplateTestUtil.renderTemplate("projectsidebar", context);
String actual = FileAssertion.surroundWithHtmlTag(result);
WebFileAssertion.assertStringEqualFileContent("project-side-bar.html", actual);
}
|
void function() throws Exception { VelocityContext context = VelocityContextTestUtil.getInstance(); Project project = MockProject.getMockProject(); project.setDescription(STRhacked\STR); context.put(STR, project); context.put(STR, STR); context.put(STR, STR); String result = VelocityTemplateTestUtil.renderTemplate(STR, context); String actual = FileAssertion.surroundWithHtmlTag(result); WebFileAssertion.assertStringEqualFileContent(STR, actual); }
|
/**
* Test project side bar view.
*
* The project description should be HTML encoded to avoid cross site scripting issues.
*
* @throws Exception the exception
*/
|
Test project side bar view. The project description should be HTML encoded to avoid cross site scripting issues
|
testProjectSideBarView
|
{
"repo_name": "binhnv/azkaban",
"path": "azkaban-webserver/src/test/java/azkaban/webapp/servlet/ProjectSideBarViewTest.java",
"license": "apache-2.0",
"size": 1299
}
|
[
"org.apache.velocity.VelocityContext"
] |
import org.apache.velocity.VelocityContext;
|
import org.apache.velocity.*;
|
[
"org.apache.velocity"
] |
org.apache.velocity;
| 287,624
|
private void setHighwayCounts(){
log.info("Maintaining highway counters");
long lastId = 0;
List<Way> dupIdHighways = new ArrayList<>();
for (ConvertedWay cw :roads){
if (!cw.isValid())
continue;
Way way = cw.getWay();
if (way.getId() == lastId) {
log.debug("Road with identical id:", way.getId());
dupIdHighways.add(way);
continue;
}
lastId = way.getId();
List<Coord> points = way.getPoints();
for (Coord p:points){
p.incHighwayCount();
}
}
// go through all duplicated highways and increase the highway counter of all crossroads
for (Way way : dupIdHighways) {
List<Coord> points = way.getPoints();
// increase the highway counter of the first and last point
points.get(0).incHighwayCount();
points.get(points.size()-1).incHighwayCount();
// for all other points increase the counter only if other roads are connected
for (int i = 1; i < points.size()-1; i++) {
Coord p = points.get(i);
if (p.getHighwayCount() > 1) {
// this is a crossroads - mark that the duplicated way is also part of it
p.incHighwayCount();
}
}
}
}
|
void function(){ log.info(STR); long lastId = 0; List<Way> dupIdHighways = new ArrayList<>(); for (ConvertedWay cw :roads){ if (!cw.isValid()) continue; Way way = cw.getWay(); if (way.getId() == lastId) { log.debug(STR, way.getId()); dupIdHighways.add(way); continue; } lastId = way.getId(); List<Coord> points = way.getPoints(); for (Coord p:points){ p.incHighwayCount(); } } for (Way way : dupIdHighways) { List<Coord> points = way.getPoints(); points.get(0).incHighwayCount(); points.get(points.size()-1).incHighwayCount(); for (int i = 1; i < points.size()-1; i++) { Coord p = points.get(i); if (p.getHighwayCount() > 1) { p.incHighwayCount(); } } } }
|
/**
* Increment the highway counter for each coord of each road.
* As a result, all road junctions have a count > 1.
*/
|
Increment the highway counter for each coord of each road. As a result, all road junctions have a count > 1
|
setHighwayCounts
|
{
"repo_name": "openstreetmap/mkgmap",
"path": "src/uk/me/parabola/mkgmap/osmstyle/StyledConverter.java",
"license": "gpl-2.0",
"size": 76098
}
|
[
"java.util.ArrayList",
"java.util.List",
"uk.me.parabola.imgfmt.app.Coord",
"uk.me.parabola.mkgmap.reader.osm.Way"
] |
import java.util.ArrayList; import java.util.List; import uk.me.parabola.imgfmt.app.Coord; import uk.me.parabola.mkgmap.reader.osm.Way;
|
import java.util.*; import uk.me.parabola.imgfmt.app.*; import uk.me.parabola.mkgmap.reader.osm.*;
|
[
"java.util",
"uk.me.parabola"
] |
java.util; uk.me.parabola;
| 2,752,423
|
private int[] readArr(Scanner scr) {
String line = scr.nextLine().trim();
String[] data = line.split(" ");
int[] ret = new int[data.length];
for (int i = 0; i < ret.length; i++) {
ret[i] = Integer.parseInt(data[i]);
}
return ret;
}
|
int[] function(Scanner scr) { String line = scr.nextLine().trim(); String[] data = line.split(" "); int[] ret = new int[data.length]; for (int i = 0; i < ret.length; i++) { ret[i] = Integer.parseInt(data[i]); } return ret; }
|
/**
* Read in an int array of coefficients
*
* @param scr - Scanner to read form
* @return
*/
|
Read in an int array of coefficients
|
readArr
|
{
"repo_name": "midkiffj/knapsacks",
"path": "src/Problems/Fractional.java",
"license": "gpl-3.0",
"size": 8949
}
|
[
"java.util.Scanner"
] |
import java.util.Scanner;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,635,742
|
public static Observable<JsonPipelineOutput> zipWithLowestMaxAge(
Observable<JsonPipeline> pipelines,
Func1<List<JsonPipelineOutput>, JsonPipelineOutput> zipFunc) {
Observable<Observable<JsonPipelineOutput>> outputs = pipelines.map(pipeline -> pipeline.getOutput());
return zipWithLowestMaxAgeInternal(outputs, zipFunc);
}
|
static Observable<JsonPipelineOutput> function( Observable<JsonPipeline> pipelines, Func1<List<JsonPipelineOutput>, JsonPipelineOutput> zipFunc) { Observable<Observable<JsonPipelineOutput>> outputs = pipelines.map(pipeline -> pipeline.getOutput()); return zipWithLowestMaxAgeInternal(outputs, zipFunc); }
|
/**
* Aggregates multiple resources fetched with different {@link JsonPipeline} instances, into a single
* {@link JsonPipelineOutput} and ensures sure that the max-age Cache-Control-Header is set to the minimum value of
* all aggregated responses.
* @param pipelines an observable that emits MULTIPLE {@link JsonPipeline}s
* @param zipFunc a lambda that is given the list of all {@link JsonPipelineOutput}s when they have been retrieved
* @return a new observable that emits the aggregated JsonPipelineOutput with the correct max-age
*/
|
Aggregates multiple resources fetched with different <code>JsonPipeline</code> instances, into a single <code>JsonPipelineOutput</code> and ensures sure that the max-age Cache-Control-Header is set to the minimum value of all aggregated responses
|
zipWithLowestMaxAge
|
{
"repo_name": "wcm-io-caravan/caravan-pipeline",
"path": "api/src/main/java/io/wcm/caravan/pipeline/cache/CacheControlUtils.java",
"license": "apache-2.0",
"size": 5448
}
|
[
"io.wcm.caravan.pipeline.JsonPipeline",
"io.wcm.caravan.pipeline.JsonPipelineOutput",
"java.util.List"
] |
import io.wcm.caravan.pipeline.JsonPipeline; import io.wcm.caravan.pipeline.JsonPipelineOutput; import java.util.List;
|
import io.wcm.caravan.pipeline.*; import java.util.*;
|
[
"io.wcm.caravan",
"java.util"
] |
io.wcm.caravan; java.util;
| 736,176
|
public static ChannelBuffer wrappedBuffer(ByteOrder endianness, byte[] array) {
if (endianness == BIG_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new BigEndianHeapChannelBuffer(array);
} else if (endianness == LITTLE_ENDIAN) {
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new LittleEndianHeapChannelBuffer(array);
} else {
throw new NullPointerException("endianness");
}
}
|
static ChannelBuffer function(ByteOrder endianness, byte[] array) { if (endianness == BIG_ENDIAN) { if (array.length == 0) { return EMPTY_BUFFER; } return new BigEndianHeapChannelBuffer(array); } else if (endianness == LITTLE_ENDIAN) { if (array.length == 0) { return EMPTY_BUFFER; } return new LittleEndianHeapChannelBuffer(array); } else { throw new NullPointerException(STR); } }
|
/**
* Creates a new buffer which wraps the specified {@code array} with the
* specified {@code endianness}. A modification on the specified array's
* content will be visible to the returned buffer.
*/
|
Creates a new buffer which wraps the specified array with the specified endianness. A modification on the specified array's content will be visible to the returned buffer
|
wrappedBuffer
|
{
"repo_name": "nyankosama/simple-netty-source",
"path": "src/main/java/org/jboss/netty/buffer/impl/ChannelBuffers.java",
"license": "apache-2.0",
"size": 39250
}
|
[
"java.nio.ByteOrder",
"org.jboss.netty.buffer.ChannelBuffer"
] |
import java.nio.ByteOrder; import org.jboss.netty.buffer.ChannelBuffer;
|
import java.nio.*; import org.jboss.netty.buffer.*;
|
[
"java.nio",
"org.jboss.netty"
] |
java.nio; org.jboss.netty;
| 2,044,507
|
private DataApprovalWorkflow getAndValidateWorkflow( String ds, String wf )
throws WebMessageException
{
if ( ds != null )
{
DataSet dataSet = dataSetService.getDataSet( ds );
if ( dataSet == null )
{
throw new WebMessageException( WebMessageUtils.conflict( "Data set does not exist: " + ds ) );
}
if ( dataSet.getWorkflow() == null )
{
throw new WebMessageException( WebMessageUtils.conflict( "Data set does not have an approval workflow: " + ds ) );
}
return dataSet.getWorkflow();
}
else if ( wf != null )
{
DataApprovalWorkflow workflow = dataApprovalService.getWorkflow( wf );
if ( workflow == null )
{
throw new WebMessageException( WebMessageUtils.conflict( "Data approval workflow does not exist: " + wf ) );
}
return workflow;
}
else
{
throw new WebMessageException( WebMessageUtils.conflict( "Either data set or data approval workflow must be specified" ) );
}
}
|
DataApprovalWorkflow function( String ds, String wf ) throws WebMessageException { if ( ds != null ) { DataSet dataSet = dataSetService.getDataSet( ds ); if ( dataSet == null ) { throw new WebMessageException( WebMessageUtils.conflict( STR + ds ) ); } if ( dataSet.getWorkflow() == null ) { throw new WebMessageException( WebMessageUtils.conflict( STR + ds ) ); } return dataSet.getWorkflow(); } else if ( wf != null ) { DataApprovalWorkflow workflow = dataApprovalService.getWorkflow( wf ); if ( workflow == null ) { throw new WebMessageException( WebMessageUtils.conflict( STR + wf ) ); } return workflow; } else { throw new WebMessageException( WebMessageUtils.conflict( STR ) ); } }
|
/**
* Validates the input parameters and returns a data approval workflow.
*
* @param ds the data set identifier.
* @param wf the data approval workflow identifier.
* @return a DataApprovalWorkflow.
* @throws WebMessageException if object is not found.
*/
|
Validates the input parameters and returns a data approval workflow
|
getAndValidateWorkflow
|
{
"repo_name": "uonafya/jphes-core",
"path": "dhis-2/dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/DataApprovalController.java",
"license": "bsd-3-clause",
"size": 30535
}
|
[
"org.hisp.dhis.dataapproval.DataApprovalWorkflow",
"org.hisp.dhis.dataset.DataSet",
"org.hisp.dhis.dxf2.webmessage.WebMessageException",
"org.hisp.dhis.dxf2.webmessage.WebMessageUtils"
] |
import org.hisp.dhis.dataapproval.DataApprovalWorkflow; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dxf2.webmessage.WebMessageException; import org.hisp.dhis.dxf2.webmessage.WebMessageUtils;
|
import org.hisp.dhis.dataapproval.*; import org.hisp.dhis.dataset.*; import org.hisp.dhis.dxf2.webmessage.*;
|
[
"org.hisp.dhis"
] |
org.hisp.dhis;
| 325,348
|
public void addListener(String address, OSCListener listener)
{
addressToClassTable.put(address, listener);
}
|
void function(String address, OSCListener listener) { addressToClassTable.put(address, listener); }
|
/**
* Adds the listener.
*
* @param address
* the address
* @param listener
* the listener
*/
|
Adds the listener
|
addListener
|
{
"repo_name": "synergynet/synergynet3.1",
"path": "synergynet3.1-parent/multiplicity3-input/src/main/java/com/illposed/osc/utility/OSCPacketDispatcher.java",
"license": "bsd-3-clause",
"size": 4275
}
|
[
"com.illposed.osc.OSCListener"
] |
import com.illposed.osc.OSCListener;
|
import com.illposed.osc.*;
|
[
"com.illposed.osc"
] |
com.illposed.osc;
| 1,410,449
|
public void searchTopicSubscriptions(String queueNamePattern, String identifierPattern, int
ownNodeIdIndex, boolean isNameExactMatch, boolean isIdentifierExactMatch) {
WebElement queueNamePatternField = driver.findElement(By.name(UIElementMapper.getInstance()
.getElement("mb.search.topic.name.pattern.tag.name")));
queueNamePatternField.clear();
queueNamePatternField.sendKeys(queueNamePattern);
WebElement queueIdentifierPatternField = driver.findElement(By.name(UIElementMapper.getInstance()
.getElement("mb.search.topic.identifier.pattern.tag.name")));
queueIdentifierPatternField.clear();
queueIdentifierPatternField.sendKeys(identifierPattern);
WebElement topicNameExactMatchField = driver.findElement(
By.name(UIElementMapper.getInstance().getElement("mb.search.topic.name.exactmatch.tag.name")));
// Set the name exact match check box state based on the test input
if (isNameExactMatch != topicNameExactMatchField.isSelected()) {
topicNameExactMatchField.click();
}
WebElement topicIdentifierExactMatchField = driver.findElement(
By.name(UIElementMapper.getInstance().getElement("mb.search.topic.identifier.exactmatch.tag.name")));
// Set the identifier exact match check box state based on the test input
if (isIdentifierExactMatch != topicIdentifierExactMatchField.isSelected()) {
topicIdentifierExactMatchField.click();
}
Select ownNodeIdDropdown = new Select(driver.findElement(By.id(UIElementMapper.getInstance()
.getElement("mb.search.topic.own.node.id.element.id"))));
ownNodeIdDropdown.selectByIndex(ownNodeIdIndex);
driver.findElement(By.xpath(UIElementMapper.getInstance()
.getElement("mb.search.topic.search.button.xpath"))).click();
}
|
void function(String queueNamePattern, String identifierPattern, int ownNodeIdIndex, boolean isNameExactMatch, boolean isIdentifierExactMatch) { WebElement queueNamePatternField = driver.findElement(By.name(UIElementMapper.getInstance() .getElement(STR))); queueNamePatternField.clear(); queueNamePatternField.sendKeys(queueNamePattern); WebElement queueIdentifierPatternField = driver.findElement(By.name(UIElementMapper.getInstance() .getElement(STR))); queueIdentifierPatternField.clear(); queueIdentifierPatternField.sendKeys(identifierPattern); WebElement topicNameExactMatchField = driver.findElement( By.name(UIElementMapper.getInstance().getElement(STR))); if (isNameExactMatch != topicNameExactMatchField.isSelected()) { topicNameExactMatchField.click(); } WebElement topicIdentifierExactMatchField = driver.findElement( By.name(UIElementMapper.getInstance().getElement(STR))); if (isIdentifierExactMatch != topicIdentifierExactMatchField.isSelected()) { topicIdentifierExactMatchField.click(); } Select ownNodeIdDropdown = new Select(driver.findElement(By.id(UIElementMapper.getInstance() .getElement(STR)))); ownNodeIdDropdown.selectByIndex(ownNodeIdIndex); driver.findElement(By.xpath(UIElementMapper.getInstance() .getElement(STR))).click(); }
|
/**
* Search topic subscriptions according to the search criteria.
*
* @param queueNamePattern string pattern of the topic name (* for all)
* @param identifierPattern string pattern of the identifier (* for all)
* @param ownNodeIdIndex index of the node Id in the dropdown the subscriptions belong to
* @return number of subscriptions listed under search result
*/
|
Search topic subscriptions according to the search criteria
|
searchTopicSubscriptions
|
{
"repo_name": "milindaperera/product-ei",
"path": "integration/broker-tests/tests-common/integration-tests-utils/src/main/java/org/wso2/mb/integration/common/utils/ui/pages/main/TopicSubscriptionsPage.java",
"license": "apache-2.0",
"size": 10517
}
|
[
"org.openqa.selenium.By",
"org.openqa.selenium.WebElement",
"org.openqa.selenium.support.ui.Select",
"org.wso2.mb.integration.common.utils.ui.UIElementMapper"
] |
import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.Select; import org.wso2.mb.integration.common.utils.ui.UIElementMapper;
|
import org.openqa.selenium.*; import org.openqa.selenium.support.ui.*; import org.wso2.mb.integration.common.utils.ui.*;
|
[
"org.openqa.selenium",
"org.wso2.mb"
] |
org.openqa.selenium; org.wso2.mb;
| 2,502,802
|
EClass getTStructMethod();
|
EClass getTStructMethod();
|
/**
* Returns the meta object for class '{@link org.eclipse.n4js.ts.types.TStructMethod <em>TStruct Method</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>TStruct Method</em>'.
* @see org.eclipse.n4js.ts.types.TStructMethod
* @generated
*/
|
Returns the meta object for class '<code>org.eclipse.n4js.ts.types.TStructMethod TStruct Method</code>'.
|
getTStructMethod
|
{
"repo_name": "lbeurerkellner/n4js",
"path": "plugins/org.eclipse.n4js.ts.model/emf-gen/org/eclipse/n4js/ts/types/TypesPackage.java",
"license": "epl-1.0",
"size": 538237
}
|
[
"org.eclipse.emf.ecore.EClass"
] |
import org.eclipse.emf.ecore.EClass;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,864,392
|
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, NoSuchAlgorithmException, InvalidKeySpecException {
Map<String, String[]> parametros = request.getParameterMap();
String messageToUser = null;
RegistroServicios servicios = new RegistroServicios();
User usuario = servicios.tratarParametro(parametros);
if (!servicios.thisUserExist(usuario)) {
usuario.setPassword( PasswordHash.getInstance().createHash(usuario.getPassword()));
usuario.setCodigo_activacion(PasswordHash.getInstance().createHash(usuario.getEmail()));
servicios.insertUser(usuario);
}else{
messageToUser = Constantes.messageUserExist;
}
//MandarMail mail = new MandarMail();
//mail.mandarMail("ricardo@aol.com", "Soy JAVA", "hola");
if (messageToUser != null) {
}
}
|
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, NoSuchAlgorithmException, InvalidKeySpecException { Map<String, String[]> parametros = request.getParameterMap(); String messageToUser = null; RegistroServicios servicios = new RegistroServicios(); User usuario = servicios.tratarParametro(parametros); if (!servicios.thisUserExist(usuario)) { usuario.setPassword( PasswordHash.getInstance().createHash(usuario.getPassword())); usuario.setCodigo_activacion(PasswordHash.getInstance().createHash(usuario.getEmail())); servicios.insertUser(usuario); }else{ messageToUser = Constantes.messageUserExist; } if (messageToUser != null) { } }
|
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
|
Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods
|
processRequest
|
{
"repo_name": "ricardo7227/DAW",
"path": "Servidor/databaseJava/src/main/java/servlets/MailServlet.java",
"license": "gpl-3.0",
"size": 4093
}
|
[
"java.io.IOException",
"java.security.NoSuchAlgorithmException",
"java.security.spec.InvalidKeySpecException",
"java.util.Map",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] |
import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.security.spec.InvalidKeySpecException; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
|
import java.io.*; import java.security.*; import java.security.spec.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*;
|
[
"java.io",
"java.security",
"java.util",
"javax.servlet"
] |
java.io; java.security; java.util; javax.servlet;
| 33,071
|
public static List<POICategory> GetAll(
AccessTokenOAuth accessTokenPregenerated) throws IOException,
TopoosException {
return Translator.GetAll(accessTokenPregenerated);
}
|
static List<POICategory> function( AccessTokenOAuth accessTokenPregenerated) throws IOException, TopoosException { return Translator.GetAll(accessTokenPregenerated); }
|
/**
* Gets the list of POI categories.
*
* @param accessTokenPregenerated (required) access_token to user resources
* @return List<POICategory>
* @throws IOException Signals that an I/O exception has occurred.
* @throws TopoosException the topoos exception
*/
|
Gets the list of POI categories
|
GetAll
|
{
"repo_name": "topoos/topoos_sdk_android",
"path": "src/topoos/POICategories/Operations.java",
"license": "apache-2.0",
"size": 6209
}
|
[
"java.io.IOException",
"java.util.List"
] |
import java.io.IOException; import java.util.List;
|
import java.io.*; import java.util.*;
|
[
"java.io",
"java.util"
] |
java.io; java.util;
| 1,992,944
|
public IdGenerator getGenerator(String name);
|
IdGenerator function(String name);
|
/**
* Retrieve the id-generator by name.
*
* @param name The generator name.
*
* @return The generator, or null.
*/
|
Retrieve the id-generator by name
|
getGenerator
|
{
"repo_name": "kevin-chen-hw/LDAE",
"path": "com.huawei.soa.ldae/src/main/java/org/hibernate/cfg/Mappings.java",
"license": "lgpl-2.1",
"size": 27333
}
|
[
"org.hibernate.mapping.IdGenerator"
] |
import org.hibernate.mapping.IdGenerator;
|
import org.hibernate.mapping.*;
|
[
"org.hibernate.mapping"
] |
org.hibernate.mapping;
| 819,938
|
protected boolean isBetterLocation(Location location, Location currentBestLocation) {
if (currentBestLocation == null) {
// A new location is always better than no location
return true;
}
// Check whether the new location fix is newer or older
long timeDelta = location.getTime() - currentBestLocation.getTime();
boolean isSignificantlyNewer = timeDelta > TWO_MINUTES;
boolean isSignificantlyOlder = timeDelta < -TWO_MINUTES;
boolean isNewer = timeDelta > 0;
// If it's been more than two minutes since the current location, use the new location
// because the user has likely moved
if (isSignificantlyNewer) {
return true;
// If the new location is more than two minutes older, it must be worse
} else if (isSignificantlyOlder) {
return false;
}
// Check whether the new location fix is more or less accurate
int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation.getAccuracy());
boolean isLessAccurate = accuracyDelta > 0;
boolean isMoreAccurate = accuracyDelta < 0;
boolean isSignificantlyLessAccurate = accuracyDelta > 200;
// Check if the old and new location are from the same provider
boolean isFromSameProvider = isSameProvider(location.getProvider(),
currentBestLocation.getProvider());
// Determine location quality using a combination of timeliness and accuracy
if (isMoreAccurate) {
return true;
} else if (isNewer && !isLessAccurate) {
return true;
} else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) {
return true;
}
return false;
}
|
boolean function(Location location, Location currentBestLocation) { if (currentBestLocation == null) { return true; } long timeDelta = location.getTime() - currentBestLocation.getTime(); boolean isSignificantlyNewer = timeDelta > TWO_MINUTES; boolean isSignificantlyOlder = timeDelta < -TWO_MINUTES; boolean isNewer = timeDelta > 0; if (isSignificantlyNewer) { return true; } else if (isSignificantlyOlder) { return false; } int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation.getAccuracy()); boolean isLessAccurate = accuracyDelta > 0; boolean isMoreAccurate = accuracyDelta < 0; boolean isSignificantlyLessAccurate = accuracyDelta > 200; boolean isFromSameProvider = isSameProvider(location.getProvider(), currentBestLocation.getProvider()); if (isMoreAccurate) { return true; } else if (isNewer && !isLessAccurate) { return true; } else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) { return true; } return false; }
|
/** Determines whether one Location reading is better than the current Location fix
* @param location The new Location that you want to evaluate
* @param currentBestLocation The current Location fix, to which you want to compare the new one
*/
|
Determines whether one Location reading is better than the current Location fix
|
isBetterLocation
|
{
"repo_name": "tum-social-gaming/SocialGamingClient",
"path": "src/de/tum/socialcomp/android/sensor/LocationChangeListener.java",
"license": "apache-2.0",
"size": 4529
}
|
[
"android.location.Location"
] |
import android.location.Location;
|
import android.location.*;
|
[
"android.location"
] |
android.location;
| 2,514,262
|
public void addContext(Context context)
{
Class<? extends Annotation> scopeType = context.getScope();
Context oldContext = _contextMap.get(scopeType);
if (oldContext == null) {
_contextMap.put(context.getScope(), context);
}
else {
// ioc/0p41 - CDI TCK
RuntimeException exn
= new IllegalStateException(L.l("{0} is an invalid new context because @{1} is already registered as a scope",
context, scopeType.getName()));
_contextMap.put(context.getScope(), new ErrorContext(exn, context));
}
}
|
void function(Context context) { Class<? extends Annotation> scopeType = context.getScope(); Context oldContext = _contextMap.get(scopeType); if (oldContext == null) { _contextMap.put(context.getScope(), context); } else { RuntimeException exn = new IllegalStateException(L.l(STR, context, scopeType.getName())); _contextMap.put(context.getScope(), new ErrorContext(exn, context)); } }
|
/**
* Adds a new scope context
*/
|
Adds a new scope context
|
addContext
|
{
"repo_name": "CleverCloud/Quercus",
"path": "resin/src/main/java/com/caucho/config/inject/InjectManager.java",
"license": "gpl-2.0",
"size": 120951
}
|
[
"com.caucho.config.scope.ErrorContext",
"java.lang.annotation.Annotation",
"javax.enterprise.context.spi.Context"
] |
import com.caucho.config.scope.ErrorContext; import java.lang.annotation.Annotation; import javax.enterprise.context.spi.Context;
|
import com.caucho.config.scope.*; import java.lang.annotation.*; import javax.enterprise.context.spi.*;
|
[
"com.caucho.config",
"java.lang",
"javax.enterprise"
] |
com.caucho.config; java.lang; javax.enterprise;
| 425,948
|
public void testIsMixin() throws RepositoryException {
NodeTypeIterator primaryTypes = manager.getPrimaryNodeTypes();
assertFalse("testIsMixin() must return false if applied on a " +
"primary node type",
primaryTypes.nextNodeType().isMixin());
// if a mixin node type exist, test if isMixin() returns true
NodeTypeIterator mixinTypes = manager.getMixinNodeTypes();
if (getSize(mixinTypes) > 0) {
// need to re-aquire iterator {@link #getSize} may consume iterator
mixinTypes = manager.getMixinNodeTypes();
assertTrue("testIsMixin() must return true if applied on a " +
"mixin node type",
mixinTypes.nextNodeType().isMixin());
}
// else skip the test for mixin node types
}
|
void function() throws RepositoryException { NodeTypeIterator primaryTypes = manager.getPrimaryNodeTypes(); assertFalse(STR + STR, primaryTypes.nextNodeType().isMixin()); NodeTypeIterator mixinTypes = manager.getMixinNodeTypes(); if (getSize(mixinTypes) > 0) { mixinTypes = manager.getMixinNodeTypes(); assertTrue(STR + STR, mixinTypes.nextNodeType().isMixin()); } }
|
/**
* Test if isMixin() returns false if applied on a primary node type and true
* on a mixin node type.
*/
|
Test if isMixin() returns false if applied on a primary node type and true on a mixin node type
|
testIsMixin
|
{
"repo_name": "jalkanen/Priha",
"path": "tests/tck/org/apache/jackrabbit/test/api/nodetype/NodeTypeTest.java",
"license": "apache-2.0",
"size": 14796
}
|
[
"javax.jcr.RepositoryException",
"javax.jcr.nodetype.NodeTypeIterator"
] |
import javax.jcr.RepositoryException; import javax.jcr.nodetype.NodeTypeIterator;
|
import javax.jcr.*; import javax.jcr.nodetype.*;
|
[
"javax.jcr"
] |
javax.jcr;
| 2,183,505
|
@Override
protected Object encode(ChannelHandlerContext ctx, Channel channel, Object msg)
throws Exception {
NettyDataPack dataPack = (NettyDataPack)msg;
List<ByteBuffer> origs = dataPack.getDatas();
List<ByteBuffer> bbs = new ArrayList<ByteBuffer>(origs.size() * 2 + 1);
bbs.add(getPackHeader(dataPack)); // prepend a pack header including serial number and list size
for (ByteBuffer b : origs) {
bbs.add(getLengthHeader(b)); // for each buffer prepend length field
bbs.add(b);
}
return ChannelBuffers
.wrappedBuffer(bbs.toArray(new ByteBuffer[bbs.size()]));
}
|
Object function(ChannelHandlerContext ctx, Channel channel, Object msg) throws Exception { NettyDataPack dataPack = (NettyDataPack)msg; List<ByteBuffer> origs = dataPack.getDatas(); List<ByteBuffer> bbs = new ArrayList<ByteBuffer>(origs.size() * 2 + 1); bbs.add(getPackHeader(dataPack)); for (ByteBuffer b : origs) { bbs.add(getLengthHeader(b)); bbs.add(b); } return ChannelBuffers .wrappedBuffer(bbs.toArray(new ByteBuffer[bbs.size()])); }
|
/**
* encode msg to ChannelBuffer
* @param msg NettyDataPack from
* NettyServerAvroHandler/NettyClientAvroHandler in the pipeline
* @return encoded ChannelBuffer
*/
|
encode msg to ChannelBuffer
|
encode
|
{
"repo_name": "cloudera/avro",
"path": "lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java",
"license": "apache-2.0",
"size": 5423
}
|
[
"java.nio.ByteBuffer",
"java.util.ArrayList",
"java.util.List",
"org.jboss.netty.buffer.ChannelBuffers",
"org.jboss.netty.channel.Channel",
"org.jboss.netty.channel.ChannelHandlerContext"
] |
import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelHandlerContext;
|
import java.nio.*; import java.util.*; import org.jboss.netty.buffer.*; import org.jboss.netty.channel.*;
|
[
"java.nio",
"java.util",
"org.jboss.netty"
] |
java.nio; java.util; org.jboss.netty;
| 136,986
|
EAttribute getEventStatistic_AvgDuration();
|
EAttribute getEventStatistic_AvgDuration();
|
/**
* Returns the meta object for the attribute '{@link ch.elexis.core.ui.usage.model.EventStatistic#getAvgDuration <em>Avg Duration</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Avg Duration</em>'.
* @see ch.elexis.core.ui.usage.model.EventStatistic#getAvgDuration()
* @see #getEventStatistic()
* @generated
*/
|
Returns the meta object for the attribute '<code>ch.elexis.core.ui.usage.model.EventStatistic#getAvgDuration Avg Duration</code>'.
|
getEventStatistic_AvgDuration
|
{
"repo_name": "elexis/elexis-3-core",
"path": "bundles/ch.elexis.core.ui.usage/src-gen/ch/elexis/core/ui/usage/model/ModelPackage.java",
"license": "epl-1.0",
"size": 19236
}
|
[
"org.eclipse.emf.ecore.EAttribute"
] |
import org.eclipse.emf.ecore.EAttribute;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 1,259,377
|
public int count() {
int count = 0;
for (List<ConsumerRecord<K, V>> recs : this.records.values())
count += recs.size();
return count;
}
private static class ConcatenatedIterable<K, V> implements Iterable<ConsumerRecord<K, V>> {
private final Iterable<? extends Iterable<ConsumerRecord<K, V>>> iterables;
public ConcatenatedIterable(Iterable<? extends Iterable<ConsumerRecord<K, V>>> iterables) {
this.iterables = iterables;
}
|
int function() { int count = 0; for (List<ConsumerRecord<K, V>> recs : this.records.values()) count += recs.size(); return count; } private static class ConcatenatedIterable<K, V> implements Iterable<ConsumerRecord<K, V>> { private final Iterable<? extends Iterable<ConsumerRecord<K, V>>> iterables; public ConcatenatedIterable(Iterable<? extends Iterable<ConsumerRecord<K, V>>> iterables) { this.iterables = iterables; }
|
/**
* The number of records for all topics
*/
|
The number of records for all topics
|
count
|
{
"repo_name": "lemonJun/TakinMQ",
"path": "takinmq-kclient/src/main/java/org/apache/kafka/clients/consumer/ConsumerRecords.java",
"license": "apache-2.0",
"size": 4699
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,976,937
|
@Test
public void testCorruptTwoOutOfThreeReplicas() throws Exception {
final short repl = 3;
final int corruptBlocReplicas = 2;
for (int i = 0; i < 2; i++) {
String fileName =
"/tmp/testClientReportBadBlock/CorruptTwoOutOfThreeReplicas"+ i;
Path filePath = new Path(fileName);
createAFileWithCorruptedBlockReplicas(filePath, repl, corruptBlocReplicas);
int replicaCount = 0;
while (replicaCount != repl - corruptBlocReplicas) {
if (i == 0) {
dfsClientReadFile(filePath);
} else {
dfsClientReadFileFromPosition(filePath);
}
LocatedBlocks blocks = dfs.dfs.getNamenode().
getBlockLocations(filePath.toString(), 0, Long.MAX_VALUE);
replicaCount = blocks.get(0).getLocations().length;
}
verifyFirstBlockCorrupted(filePath, false);
int expectedReplicaCount = repl-corruptBlocReplicas;
verifyCorruptedBlockCount(filePath, expectedReplicaCount);
verifyFsckHealth("Target Replicas is 3 but found 1 live replica");
testFsckListCorruptFilesBlocks(filePath, 0);
}
}
|
void function() throws Exception { final short repl = 3; final int corruptBlocReplicas = 2; for (int i = 0; i < 2; i++) { String fileName = STR+ i; Path filePath = new Path(fileName); createAFileWithCorruptedBlockReplicas(filePath, repl, corruptBlocReplicas); int replicaCount = 0; while (replicaCount != repl - corruptBlocReplicas) { if (i == 0) { dfsClientReadFile(filePath); } else { dfsClientReadFileFromPosition(filePath); } LocatedBlocks blocks = dfs.dfs.getNamenode(). getBlockLocations(filePath.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; } verifyFirstBlockCorrupted(filePath, false); int expectedReplicaCount = repl-corruptBlocReplicas; verifyCorruptedBlockCount(filePath, expectedReplicaCount); verifyFsckHealth(STR); testFsckListCorruptFilesBlocks(filePath, 0); } }
|
/**
* This test creates a file with three block replicas. Corrupt two of the
* replicas. Make dfs client read the file. The corrupted blocks with their
* owner data nodes should be reported to the name node.
*/
|
This test creates a file with three block replicas. Corrupt two of the replicas. Make dfs client read the file. The corrupted blocks with their owner data nodes should be reported to the name node
|
testCorruptTwoOutOfThreeReplicas
|
{
"repo_name": "mapr/hadoop-common",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientReportBadBlock.java",
"license": "apache-2.0",
"size": 13332
}
|
[
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hdfs.protocol.LocatedBlocks"
] |
import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,142,739
|
public static <NodeType extends IGraphNode<NodeType> & ISelectableNode> void expandSelectionUp(
final ISelectableGraph<NodeType> graph) {
Preconditions.checkNotNull(graph, "Error: Graph argument can't be null");
final ArrayList<NodeType> toSelect = new ArrayList<NodeType>();
final INodeFilter<NodeType> deselectedFilter = StandardFilters.getDeselectedFilter();
|
static <NodeType extends IGraphNode<NodeType> & ISelectableNode> void function( final ISelectableGraph<NodeType> graph) { Preconditions.checkNotNull(graph, STR); final ArrayList<NodeType> toSelect = new ArrayList<NodeType>(); final INodeFilter<NodeType> deselectedFilter = StandardFilters.getDeselectedFilter();
|
/**
* Expands the current selection up. This means that all parents of the currently selected nodes
* are selected too.
*
* @param <NodeType> Type of the nodes in the graph.
*
* @param graph The graph in question.
*/
|
Expands the current selection up. This means that all parents of the currently selected nodes are selected too
|
expandSelectionUp
|
{
"repo_name": "guiquanz/binnavi",
"path": "src/main/java/com/google/security/zynamics/zylib/gui/zygraph/helpers/GraphHelpers.java",
"license": "apache-2.0",
"size": 18670
}
|
[
"com.google.common.base.Preconditions",
"com.google.security.zynamics.zylib.types.graphs.IGraphNode",
"java.util.ArrayList"
] |
import com.google.common.base.Preconditions; import com.google.security.zynamics.zylib.types.graphs.IGraphNode; import java.util.ArrayList;
|
import com.google.common.base.*; import com.google.security.zynamics.zylib.types.graphs.*; import java.util.*;
|
[
"com.google.common",
"com.google.security",
"java.util"
] |
com.google.common; com.google.security; java.util;
| 651,201
|
public static boolean isInClassPath(URL location) throws MalformedURLException {
String classPath = System.getProperty("java.class.path");
StringTokenizer st = new StringTokenizer(classPath, File.pathSeparator);
while (st.hasMoreTokens()) {
String path =st.nextToken();
if (location.equals(new File(path).toURI().toURL())) {
return true;
}
}
return false;
}
|
static boolean function(URL location) throws MalformedURLException { String classPath = System.getProperty(STR); StringTokenizer st = new StringTokenizer(classPath, File.pathSeparator); while (st.hasMoreTokens()) { String path =st.nextToken(); if (location.equals(new File(path).toURI().toURL())) { return true; } } return false; }
|
/**
* Returns true if the specified location is in the JVM classpath. This may
* ignore additions to the classpath that are not reflected by the value in
* <code>System.getProperty("java.class.path")</code>.
*
* @param location the directory or jar URL to test for
* @return true if location is in the JVM classpath
* @throws MalformedURLException
*/
|
Returns true if the specified location is in the JVM classpath. This may ignore additions to the classpath that are not reflected by the value in <code>System.getProperty("java.class.path")</code>
|
isInClassPath
|
{
"repo_name": "sshcherbakov/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/lang/SystemUtils.java",
"license": "apache-2.0",
"size": 9232
}
|
[
"java.io.File",
"java.net.MalformedURLException",
"java.util.StringTokenizer"
] |
import java.io.File; import java.net.MalformedURLException; import java.util.StringTokenizer;
|
import java.io.*; import java.net.*; import java.util.*;
|
[
"java.io",
"java.net",
"java.util"
] |
java.io; java.net; java.util;
| 1,428,562
|
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller<PollResult<String>, String> beginStopPacketCapture(
String resourceGroupName, String gatewayName, VpnGatewayPacketCaptureStopParameters parameters) {
return beginStopPacketCaptureAsync(resourceGroupName, gatewayName, parameters).getSyncPoller();
}
|
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<String>, String> function( String resourceGroupName, String gatewayName, VpnGatewayPacketCaptureStopParameters parameters) { return beginStopPacketCaptureAsync(resourceGroupName, gatewayName, parameters).getSyncPoller(); }
|
/**
* Stops packet capture on vpn gateway in the specified resource group.
*
* @param resourceGroupName The resource group name of the VpnGateway.
* @param gatewayName The name of the gateway.
* @param parameters Vpn gateway packet capture parameters supplied to stop packet capture on vpn gateway.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
|
Stops packet capture on vpn gateway in the specified resource group
|
beginStopPacketCapture
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/VpnGatewaysClientImpl.java",
"license": "mit",
"size": 124002
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.polling.SyncPoller",
"com.azure.resourcemanager.network.models.VpnGatewayPacketCaptureStopParameters"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.network.models.VpnGatewayPacketCaptureStopParameters;
|
import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.polling.*; import com.azure.resourcemanager.network.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 682,133
|
public void load(RulesDefinition.NewRepository repo, Reader reader) {
XMLInputFactory xmlFactory = XMLInputFactory.newInstance();
xmlFactory.setProperty(XMLInputFactory.IS_COALESCING, Boolean.TRUE);
xmlFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, Boolean.FALSE);
// just so it won't try to load DTD in if there's DOCTYPE
xmlFactory.setProperty(XMLInputFactory.SUPPORT_DTD, Boolean.FALSE);
xmlFactory.setProperty(XMLInputFactory.IS_VALIDATING, Boolean.FALSE);
SMInputFactory inputFactory = new SMInputFactory(xmlFactory);
try {
SMHierarchicCursor rootC = inputFactory.rootElementCursor(reader);
rootC.advance(); // <rules>
SMInputCursor rulesC = rootC.childElementCursor("rule");
while (rulesC.getNext() != null) {
// <rule>
processRule(repo, rulesC);
}
} catch (XMLStreamException e) {
throw new IllegalStateException("XML is not valid", e);
}
}
|
void function(RulesDefinition.NewRepository repo, Reader reader) { XMLInputFactory xmlFactory = XMLInputFactory.newInstance(); xmlFactory.setProperty(XMLInputFactory.IS_COALESCING, Boolean.TRUE); xmlFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, Boolean.FALSE); xmlFactory.setProperty(XMLInputFactory.SUPPORT_DTD, Boolean.FALSE); xmlFactory.setProperty(XMLInputFactory.IS_VALIDATING, Boolean.FALSE); SMInputFactory inputFactory = new SMInputFactory(xmlFactory); try { SMHierarchicCursor rootC = inputFactory.rootElementCursor(reader); rootC.advance(); SMInputCursor rulesC = rootC.childElementCursor("rule"); while (rulesC.getNext() != null) { processRule(repo, rulesC); } } catch (XMLStreamException e) { throw new IllegalStateException(STR, e); } }
|
/**
* Loads rules by reading the XML input stream. The reader is not closed by the method, so it
* should be handled by the caller.
* @since 4.3
*/
|
Loads rules by reading the XML input stream. The reader is not closed by the method, so it should be handled by the caller
|
load
|
{
"repo_name": "joansmith/sonarqube",
"path": "sonar-plugin-api/src/main/java/org/sonar/api/server/rule/RulesDefinitionXmlLoader.java",
"license": "lgpl-3.0",
"size": 16696
}
|
[
"java.io.Reader",
"javax.xml.stream.XMLInputFactory",
"javax.xml.stream.XMLStreamException",
"org.codehaus.staxmate.SMInputFactory",
"org.codehaus.staxmate.in.SMHierarchicCursor",
"org.codehaus.staxmate.in.SMInputCursor"
] |
import java.io.Reader; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import org.codehaus.staxmate.SMInputFactory; import org.codehaus.staxmate.in.SMHierarchicCursor; import org.codehaus.staxmate.in.SMInputCursor;
|
import java.io.*; import javax.xml.stream.*; import org.codehaus.staxmate.*; import org.codehaus.staxmate.in.*;
|
[
"java.io",
"javax.xml",
"org.codehaus.staxmate"
] |
java.io; javax.xml; org.codehaus.staxmate;
| 2,015,180
|
@Test
public void testMaintenanceStopTests()
{
ITestAction mockTest = createMock(ITestAction.class);
expect(mockTest.getActionType())
.andReturn("MockTest").times(2);
replay(mockTest);
this.rig.register(mockTest, ActionType.TEST);
verify(mockTest);
reset(mockTest);
mockTest.stopTest();
expectLastCall();
replay(mockTest);
assertTrue(this.rig.setMaintenance(true, "Test reason", false));
assertFalse(this.rig.isNotInMaintenance());
assertEquals("Test reason", this.rig.getMaintenanceReason());
reset(mockTest);
mockTest.startTest();
expectLastCall();
replay(mockTest);
assertTrue(this.rig.setMaintenance(false, null, true));
assertTrue(this.rig.isNotInMaintenance());
assertNull(this.rig.getMaintenanceReason());
verify(mockTest);
}
|
void function() { ITestAction mockTest = createMock(ITestAction.class); expect(mockTest.getActionType()) .andReturn(STR).times(2); replay(mockTest); this.rig.register(mockTest, ActionType.TEST); verify(mockTest); reset(mockTest); mockTest.stopTest(); expectLastCall(); replay(mockTest); assertTrue(this.rig.setMaintenance(true, STR, false)); assertFalse(this.rig.isNotInMaintenance()); assertEquals(STR, this.rig.getMaintenanceReason()); reset(mockTest); mockTest.startTest(); expectLastCall(); replay(mockTest); assertTrue(this.rig.setMaintenance(false, null, true)); assertTrue(this.rig.isNotInMaintenance()); assertNull(this.rig.getMaintenanceReason()); verify(mockTest); }
|
/**
* Tests the <code>AbstractRig.setMaintenance</code> method with
* a session active. This should terminate the session and revoke
* the user.
*/
|
Tests the <code>AbstractRig.setMaintenance</code> method with a session active. This should terminate the session and revoke the user
|
testMaintenanceStopTests
|
{
"repo_name": "sahara-labs/rig-client",
"path": "src/au/edu/uts/eng/remotelabs/rigclient/rig/tests/AbstractRigTester.java",
"license": "bsd-3-clause",
"size": 42602
}
|
[
"au.edu.uts.eng.remotelabs.rigclient.rig.AbstractRig",
"au.edu.uts.eng.remotelabs.rigclient.rig.ITestAction",
"org.easymock.EasyMock"
] |
import au.edu.uts.eng.remotelabs.rigclient.rig.AbstractRig; import au.edu.uts.eng.remotelabs.rigclient.rig.ITestAction; import org.easymock.EasyMock;
|
import au.edu.uts.eng.remotelabs.rigclient.rig.*; import org.easymock.*;
|
[
"au.edu.uts",
"org.easymock"
] |
au.edu.uts; org.easymock;
| 260,737
|
@Override
void setIndex(Transaction xa,
byte []block, int rowOffset,
long rowAddr, QueryContext context)
throws SQLException
{
BTree index = getIndex();
if (index == null)
return;
index.insert(block, rowOffset + _columnOffset, 2, rowAddr, false);
}
|
void setIndex(Transaction xa, byte []block, int rowOffset, long rowAddr, QueryContext context) throws SQLException { BTree index = getIndex(); if (index == null) return; index.insert(block, rowOffset + _columnOffset, 2, rowAddr, false); }
|
/**
* Sets any index for the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param rowAddr the address of the row
*/
|
Sets any index for the column
|
setIndex
|
{
"repo_name": "christianchristensen/resin",
"path": "modules/resin/src/com/caucho/db/table/ShortColumn.java",
"license": "gpl-2.0",
"size": 7781
}
|
[
"com.caucho.db.index.BTree",
"com.caucho.db.sql.QueryContext",
"com.caucho.db.xa.Transaction",
"java.sql.SQLException"
] |
import com.caucho.db.index.BTree; import com.caucho.db.sql.QueryContext; import com.caucho.db.xa.Transaction; import java.sql.SQLException;
|
import com.caucho.db.index.*; import com.caucho.db.sql.*; import com.caucho.db.xa.*; import java.sql.*;
|
[
"com.caucho.db",
"java.sql"
] |
com.caucho.db; java.sql;
| 2,561,469
|
void save(Media media);
|
void save(Media media);
|
/**
* Save a media into DB.
*
* @param media the media
*/
|
Save a media into DB
|
save
|
{
"repo_name": "resourcepool/dashboard",
"path": "dashboard-back/src/main/java/io/resourcepool/dashboard/dao/MediaDao.java",
"license": "apache-2.0",
"size": 1389
}
|
[
"io.resourcepool.dashboard.model.Media"
] |
import io.resourcepool.dashboard.model.Media;
|
import io.resourcepool.dashboard.model.*;
|
[
"io.resourcepool.dashboard"
] |
io.resourcepool.dashboard;
| 549,803
|
@AfterAll
public static void afterTests()
{
Medias.setResourcesDirectory(null);
}
|
static void function() { Medias.setResourcesDirectory(null); }
|
/**
* Clean up test.
*/
|
Clean up test
|
afterTests
|
{
"repo_name": "b3dgs/lionengine",
"path": "lionengine-game/src/test/java/com/b3dgs/lionengine/game/feature/ActionConfigTest.java",
"license": "gpl-3.0",
"size": 4155
}
|
[
"com.b3dgs.lionengine.Medias"
] |
import com.b3dgs.lionengine.Medias;
|
import com.b3dgs.lionengine.*;
|
[
"com.b3dgs.lionengine"
] |
com.b3dgs.lionengine;
| 1,249,537
|
public List removeQueuedFilterProfileMsgs(InternalDistributedMember member) {
synchronized (this.filterProfileMsgQueue) {
if (this.filterProfileMsgQueue.containsKey(member)) {
return new LinkedList(this.filterProfileMsgQueue.remove(member));
}
}
return Collections.emptyList();
}
|
List function(InternalDistributedMember member) { synchronized (this.filterProfileMsgQueue) { if (this.filterProfileMsgQueue.containsKey(member)) { return new LinkedList(this.filterProfileMsgQueue.remove(member)); } } return Collections.emptyList(); }
|
/**
* Removes the filter profile messages from the queue that are received while the members cache
* profile exchange was in progress.
*
* @param member whose messages are returned.
* @return filter profile messages that are queued for the member.
*/
|
Removes the filter profile messages from the queue that are received while the members cache profile exchange was in progress
|
removeQueuedFilterProfileMsgs
|
{
"repo_name": "charliemblack/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/FilterProfile.java",
"license": "apache-2.0",
"size": 78958
}
|
[
"java.util.Collections",
"java.util.LinkedList",
"java.util.List",
"org.apache.geode.distributed.internal.membership.InternalDistributedMember"
] |
import java.util.Collections; import java.util.LinkedList; import java.util.List; import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
|
import java.util.*; import org.apache.geode.distributed.internal.membership.*;
|
[
"java.util",
"org.apache.geode"
] |
java.util; org.apache.geode;
| 1,623,835
|
private Optional<String> getPreferredNode(final List<String> nodeNames) {
if (CollectionUtils.isNotEmpty(nodeNames)) {
for (final String nodeName : nodeNames) {
final String possibleRack = racksPerNode.get(nodeName);
if (possibleRack != null
&& freeNodesPerRack.get(possibleRack).containsKey(nodeName)) {
return Optional.of(nodeName);
}
}
}
return Optional.empty();
}
|
Optional<String> function(final List<String> nodeNames) { if (CollectionUtils.isNotEmpty(nodeNames)) { for (final String nodeName : nodeNames) { final String possibleRack = racksPerNode.get(nodeName); if (possibleRack != null && freeNodesPerRack.get(possibleRack).containsKey(nodeName)) { return Optional.of(nodeName); } } } return Optional.empty(); }
|
/**
* Returns the node name of the container to be allocated if it's available, selected from the list of preferred
* node names. If the list is empty, then an empty optional is returned
*
* @param nodeNames
* the list of preferred nodes
* @return the node name where to allocate the container
*/
|
Returns the node name of the container to be allocated if it's available, selected from the list of preferred node names. If the list is empty, then an empty optional is returned
|
getPreferredNode
|
{
"repo_name": "zerg-junior/incubator-reef",
"path": "lang/java/reef-runtime-local/src/main/java/org/apache/reef/runtime/local/driver/ContainerManager.java",
"license": "apache-2.0",
"size": 16089
}
|
[
"java.util.List",
"org.apache.reef.util.CollectionUtils",
"org.apache.reef.util.Optional"
] |
import java.util.List; import org.apache.reef.util.CollectionUtils; import org.apache.reef.util.Optional;
|
import java.util.*; import org.apache.reef.util.*;
|
[
"java.util",
"org.apache.reef"
] |
java.util; org.apache.reef;
| 593,747
|
public SDBResult replaceAttributes(String identifier, Map<String, Set<String>> attributes) throws SDBException {
Map<String, String> params = new HashMap<String, String>();
params.put("DomainName", domainName);
params.put("ItemName", identifier);
int i=1;
for (String key : attributes.keySet()) {
Set<String> vals = attributes.get(key);
if (vals != null && vals.size() > 0) {
Iterator<String> iter = vals.iterator();
while (iter.hasNext()) {
String val = iter.next();
params.put("Attribute."+i+".Name", key);
params.put("Attribute."+i+".Value", val);
params.put("Attribute."+i+".Replace", "true");
i++;
}
}
}
GetMethod method = new GetMethod();
try {
PutAttributesResponse response =
makeRequestInt(method, "PutAttributes", params, PutAttributesResponse.class);
if (cache != null) {
// create new item object
Item newItem = new ItemVO(identifier);
Map<String, Set<String>> attrs = newItem.getAttributes();
// throw attrs into it
attrs.putAll(attributes);
Item old = cache.getItem(identifier);
if (old != null) {
// merge cached attrs
attrs.putAll(old.getAttributes());
}
// place/replace item in cache
cache.putItem(newItem);
}
return new SDBResult(null,
response.getResponseMetadata().getRequestId(),
response.getResponseMetadata().getBoxUsage());
} finally {
method.releaseConnection();
}
}
|
SDBResult function(String identifier, Map<String, Set<String>> attributes) throws SDBException { Map<String, String> params = new HashMap<String, String>(); params.put(STR, domainName); params.put(STR, identifier); int i=1; for (String key : attributes.keySet()) { Set<String> vals = attributes.get(key); if (vals != null && vals.size() > 0) { Iterator<String> iter = vals.iterator(); while (iter.hasNext()) { String val = iter.next(); params.put(STR+i+".Name", key); params.put(STR+i+STR, val); params.put(STR+i+STR, "true"); i++; } } } GetMethod method = new GetMethod(); try { PutAttributesResponse response = makeRequestInt(method, STR, params, PutAttributesResponse.class); if (cache != null) { Item newItem = new ItemVO(identifier); Map<String, Set<String>> attrs = newItem.getAttributes(); attrs.putAll(attributes); Item old = cache.getItem(identifier); if (old != null) { attrs.putAll(old.getAttributes()); } cache.putItem(newItem); } return new SDBResult(null, response.getResponseMetadata().getRequestId(), response.getResponseMetadata().getBoxUsage()); } finally { method.releaseConnection(); } }
|
/**
* Replace attributes on an item. Using this call will force attribute values to be
* with the new ones supplied.
*
* @param identifier the name of the item to be added
* @param attributes the attributes to associate with this item
* @throws SDBException wraps checked exceptions
*/
|
Replace attributes on an item. Using this call will force attribute values to be with the new ones supplied
|
replaceAttributes
|
{
"repo_name": "jonnyzzz/maragogype",
"path": "branches/newsdb/java/com/xerox/amazonws/simpledb/Domain.java",
"license": "apache-2.0",
"size": 16530
}
|
[
"com.xerox.amazonws.typica.sdb.jaxb.PutAttributesResponse",
"java.util.HashMap",
"java.util.Iterator",
"java.util.Map",
"java.util.Set",
"org.apache.commons.httpclient.methods.GetMethod"
] |
import com.xerox.amazonws.typica.sdb.jaxb.PutAttributesResponse; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.commons.httpclient.methods.GetMethod;
|
import com.xerox.amazonws.typica.sdb.jaxb.*; import java.util.*; import org.apache.commons.httpclient.methods.*;
|
[
"com.xerox.amazonws",
"java.util",
"org.apache.commons"
] |
com.xerox.amazonws; java.util; org.apache.commons;
| 1,243,585
|
public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype) throws GroupsException;
|
EntityIdentifier[] function(String query, int method, Class leaftype) throws GroupsException;
|
/**
* Find EntityIdentifiers for groups whose name matches the query string
* according to the specified method and matches the provided leaf type
*/
|
Find EntityIdentifiers for groups whose name matches the query string according to the specified method and matches the provided leaf type
|
searchForGroups
|
{
"repo_name": "vbonamy/esup-uportal",
"path": "uportal-war/src/main/java/org/jasig/portal/groups/IGroupService.java",
"license": "apache-2.0",
"size": 4961
}
|
[
"org.jasig.portal.EntityIdentifier"
] |
import org.jasig.portal.EntityIdentifier;
|
import org.jasig.portal.*;
|
[
"org.jasig.portal"
] |
org.jasig.portal;
| 78,090
|
public GQuery keydown(int key) {
return trigger(Event.ONKEYDOWN, key);
}
|
GQuery function(int key) { return trigger(Event.ONKEYDOWN, key); }
|
/**
* Trigger a keydown event passing the key pushed.
*/
|
Trigger a keydown event passing the key pushed
|
keydown
|
{
"repo_name": "stori-es/stori_es",
"path": "dashboard/src/main/java/com/google/gwt/query/client/GQuery.java",
"license": "apache-2.0",
"size": 177285
}
|
[
"com.google.gwt.user.client.Event"
] |
import com.google.gwt.user.client.Event;
|
import com.google.gwt.user.client.*;
|
[
"com.google.gwt"
] |
com.google.gwt;
| 2,364,304
|
protected static synchronized void init(JHOVE2 jhove2)
throws JHOVE2Exception
{
if (spaces == null) {
spaces = new TreeSet<ColourSpace>();
Properties props = jhove2.getConfigInfo().getProperties("ColourSpaces");
if (props != null) {
Set<String> set = props.stringPropertyNames();
Iterator<String> iter = set.iterator();
while (iter.hasNext()) {
String sig = iter.next();
String spa = props.getProperty(sig);
ColourSpace space = new ColourSpace(sig, spa);
spaces.add(space);
}
}
}
}
|
static synchronized void function(JHOVE2 jhove2) throws JHOVE2Exception { if (spaces == null) { spaces = new TreeSet<ColourSpace>(); Properties props = jhove2.getConfigInfo().getProperties(STR); if (props != null) { Set<String> set = props.stringPropertyNames(); Iterator<String> iter = set.iterator(); while (iter.hasNext()) { String sig = iter.next(); String spa = props.getProperty(sig); ColourSpace space = new ColourSpace(sig, spa); spaces.add(space); } } } }
|
/** Initialize the colour spaces.
* @param jhove2 JHOVE2 framework
* @throws JHOVE2Exception
*/
|
Initialize the colour spaces
|
init
|
{
"repo_name": "opf-labs/jhove2",
"path": "src/main/java/org/jhove2/module/format/icc/field/ColourSpace.java",
"license": "bsd-2-clause",
"size": 5884
}
|
[
"java.util.Iterator",
"java.util.Properties",
"java.util.Set",
"java.util.TreeSet",
"org.jhove2.core.JHOVE2Exception"
] |
import java.util.Iterator; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.jhove2.core.JHOVE2Exception;
|
import java.util.*; import org.jhove2.core.*;
|
[
"java.util",
"org.jhove2.core"
] |
java.util; org.jhove2.core;
| 1,198,773
|
public static String gensalt(int log_rounds) {
return gensalt(log_rounds, new SecureRandom());
}
|
static String function(int log_rounds) { return gensalt(log_rounds, new SecureRandom()); }
|
/**
* Generate a salt for use with the BCrypt.hashpw() method
* @param log_rounds the log2 of the number of rounds of
* hashing to apply - the work factor therefore increases as
* 2**log_rounds.
* @return an encoded salt value
*/
|
Generate a salt for use with the BCrypt.hashpw() method
|
gensalt
|
{
"repo_name": "griffon/griffon-bcrypt-plugin",
"path": "src/main/griffon/plugins/bcrypt/BCrypt.java",
"license": "apache-2.0",
"size": 27235
}
|
[
"java.security.SecureRandom"
] |
import java.security.SecureRandom;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 1,465,855
|
public void updateRMDelegationToken(
RMDelegationTokenIdentifier rmDTIdentifier, Long renewDate) {
handleStoreEvent(new RMStateStoreRMDTEvent(rmDTIdentifier, renewDate,
RMStateStoreEventType.UPDATE_DELEGATION_TOKEN));
}
|
void function( RMDelegationTokenIdentifier rmDTIdentifier, Long renewDate) { handleStoreEvent(new RMStateStoreRMDTEvent(rmDTIdentifier, renewDate, RMStateStoreEventType.UPDATE_DELEGATION_TOKEN)); }
|
/**
* RMDTSecretManager call this to update the state of a delegation token
* and sequence number
*/
|
RMDTSecretManager call this to update the state of a delegation token and sequence number
|
updateRMDelegationToken
|
{
"repo_name": "Microsoft-CISL/hadoop-prototype",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java",
"license": "apache-2.0",
"size": 43006
}
|
[
"org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier"
] |
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
|
import org.apache.hadoop.yarn.security.client.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 893,808
|
void createOrUpdate(String versionId, JsonSchema schema);
|
void createOrUpdate(String versionId, JsonSchema schema);
|
/**
* Creates or updates the versionId to the given schema
* @param versionId
* @param schema
*/
|
Creates or updates the versionId to the given schema
|
createOrUpdate
|
{
"repo_name": "Sage-Bionetworks/Synapse-Repository-Services",
"path": "lib/jdomodels/src/main/java/org/sagebionetworks/repo/model/dbo/schema/ValidationJsonSchemaIndexDao.java",
"license": "apache-2.0",
"size": 740
}
|
[
"org.sagebionetworks.repo.model.schema.JsonSchema"
] |
import org.sagebionetworks.repo.model.schema.JsonSchema;
|
import org.sagebionetworks.repo.model.schema.*;
|
[
"org.sagebionetworks.repo"
] |
org.sagebionetworks.repo;
| 430,889
|
@Test
void testBinaryOperatorOnAttribute() throws PebbleException, IOException {
PebbleEngine pebble = new PebbleEngine.Builder().loader(new StringLoader())
.strictVariables(false).build();
String source =
"{{ 1 + item.changeInt }} " + "{{ 1 - item.changeInt }} " + "{{ 2 * item.changeInt }} "
+ "{{ 11 / item.changeInt }} " + "{{ 4 % item.changeInt }}";
PebbleTemplate template = pebble.getTemplate(source);
Map<String, Object> context = new HashMap<>();
context.put("item", new Item());
Writer writer = new StringWriter();
template.evaluate(writer, context);
assertEquals("4 -2 6 3 1", writer.toString());
}
|
void testBinaryOperatorOnAttribute() throws PebbleException, IOException { PebbleEngine pebble = new PebbleEngine.Builder().loader(new StringLoader()) .strictVariables(false).build(); String source = STR + STR + STR + STR + STR; PebbleTemplate template = pebble.getTemplate(source); Map<String, Object> context = new HashMap<>(); context.put("item", new Item()); Writer writer = new StringWriter(); template.evaluate(writer, context); assertEquals(STR, writer.toString()); }
|
/**
* Problem existed where getAttribute would return an Object type which was an invalid operand for
* java's algebraic operators.
*/
|
Problem existed where getAttribute would return an Object type which was an invalid operand for java's algebraic operators
|
testBinaryOperatorOnAttribute
|
{
"repo_name": "mbosecke/pebble",
"path": "pebble/src/test/java/com/mitchellbosecke/pebble/LogicTest.java",
"license": "bsd-3-clause",
"size": 39092
}
|
[
"com.mitchellbosecke.pebble.error.PebbleException",
"com.mitchellbosecke.pebble.loader.StringLoader",
"com.mitchellbosecke.pebble.template.PebbleTemplate",
"java.io.IOException",
"java.io.StringWriter",
"java.io.Writer",
"java.util.HashMap",
"java.util.Map",
"org.junit.jupiter.api.Assertions"
] |
import com.mitchellbosecke.pebble.error.PebbleException; import com.mitchellbosecke.pebble.loader.StringLoader; import com.mitchellbosecke.pebble.template.PebbleTemplate; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.Assertions;
|
import com.mitchellbosecke.pebble.error.*; import com.mitchellbosecke.pebble.loader.*; import com.mitchellbosecke.pebble.template.*; import java.io.*; import java.util.*; import org.junit.jupiter.api.*;
|
[
"com.mitchellbosecke.pebble",
"java.io",
"java.util",
"org.junit.jupiter"
] |
com.mitchellbosecke.pebble; java.io; java.util; org.junit.jupiter;
| 1,850,059
|
@Deprecated
public void addLayoutComponent( String name, Component comp )
{
// nothing here
}
|
void function( String name, Component comp ) { }
|
/**
* Deprecated layout function for named components. A no-op for us.
*/
|
Deprecated layout function for named components. A no-op for us
|
addLayoutComponent
|
{
"repo_name": "sirinath/kdgcommons",
"path": "src/main/java/net/sf/kdgcommons/swing/layout/CompactGridLayout.java",
"license": "apache-2.0",
"size": 12120
}
|
[
"java.awt.Component"
] |
import java.awt.Component;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,722,562
|
List<Entry> loadDitStructureRules( Schema... schemas ) throws LdapException, IOException;
|
List<Entry> loadDitStructureRules( Schema... schemas ) throws LdapException, IOException;
|
/**
* Build a list of DitStructureRules read from the underlying storage for
* a list of specific schema.
*
* @param schemas the schemas from which DitStructureRules are loaded
* @throws Exception if there are failures accessing DitStructureRule information
*/
|
Build a list of DitStructureRules read from the underlying storage for a list of specific schema
|
loadDitStructureRules
|
{
"repo_name": "darranl/directory-shared",
"path": "ldap/model/src/main/java/org/apache/directory/api/ldap/model/schema/registries/SchemaLoader.java",
"license": "apache-2.0",
"size": 10278
}
|
[
"java.io.IOException",
"java.util.List",
"org.apache.directory.api.ldap.model.entry.Entry",
"org.apache.directory.api.ldap.model.exception.LdapException"
] |
import java.io.IOException; import java.util.List; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapException;
|
import java.io.*; import java.util.*; import org.apache.directory.api.ldap.model.entry.*; import org.apache.directory.api.ldap.model.exception.*;
|
[
"java.io",
"java.util",
"org.apache.directory"
] |
java.io; java.util; org.apache.directory;
| 1,837,775
|
public static void initializeTextures()
{
ContextCapabilities contextcapabilities = GLContext.getCapabilities();
arbMultitexture = contextcapabilities.GL_ARB_multitexture && !contextcapabilities.OpenGL13;
arbTextureEnvCombine = contextcapabilities.GL_ARB_texture_env_combine && !contextcapabilities.OpenGL13;
if (arbMultitexture)
{
logText = logText + "Using ARB_multitexture.\n";
defaultTexUnit = 33984;
lightmapTexUnit = 33985;
GL_TEXTURE2 = 33986;
}
else
{
logText = logText + "Using GL 1.3 multitexturing.\n";
defaultTexUnit = 33984;
lightmapTexUnit = 33985;
GL_TEXTURE2 = 33986;
}
if (arbTextureEnvCombine)
{
logText = logText + "Using ARB_texture_env_combine.\n";
GL_COMBINE = 34160;
GL_INTERPOLATE = 34165;
GL_PRIMARY_COLOR = 34167;
GL_CONSTANT = 34166;
GL_PREVIOUS = 34168;
GL_COMBINE_RGB = 34161;
GL_SOURCE0_RGB = 34176;
GL_SOURCE1_RGB = 34177;
GL_SOURCE2_RGB = 34178;
GL_OPERAND0_RGB = 34192;
GL_OPERAND1_RGB = 34193;
GL_OPERAND2_RGB = 34194;
GL_COMBINE_ALPHA = 34162;
GL_SOURCE0_ALPHA = 34184;
GL_SOURCE1_ALPHA = 34185;
GL_SOURCE2_ALPHA = 34186;
GL_OPERAND0_ALPHA = 34200;
GL_OPERAND1_ALPHA = 34201;
GL_OPERAND2_ALPHA = 34202;
}
else
{
logText = logText + "Using GL 1.3 texture combiners.\n";
GL_COMBINE = 34160;
GL_INTERPOLATE = 34165;
GL_PRIMARY_COLOR = 34167;
GL_CONSTANT = 34166;
GL_PREVIOUS = 34168;
GL_COMBINE_RGB = 34161;
GL_SOURCE0_RGB = 34176;
GL_SOURCE1_RGB = 34177;
GL_SOURCE2_RGB = 34178;
GL_OPERAND0_RGB = 34192;
GL_OPERAND1_RGB = 34193;
GL_OPERAND2_RGB = 34194;
GL_COMBINE_ALPHA = 34162;
GL_SOURCE0_ALPHA = 34184;
GL_SOURCE1_ALPHA = 34185;
GL_SOURCE2_ALPHA = 34186;
GL_OPERAND0_ALPHA = 34200;
GL_OPERAND1_ALPHA = 34201;
GL_OPERAND2_ALPHA = 34202;
}
extBlendFuncSeparate = contextcapabilities.GL_EXT_blend_func_separate && !contextcapabilities.OpenGL14;
openGL14 = contextcapabilities.OpenGL14 || contextcapabilities.GL_EXT_blend_func_separate;
framebufferSupported = openGL14 && (contextcapabilities.GL_ARB_framebuffer_object || contextcapabilities.GL_EXT_framebuffer_object || contextcapabilities.OpenGL30);
if (framebufferSupported)
{
logText = logText + "Using framebuffer objects because ";
if (contextcapabilities.OpenGL30)
{
logText = logText + "OpenGL 3.0 is supported and separate blending is supported.\n";
framebufferType = OpenGlHelper.FboMode.BASE;
GL_FRAMEBUFFER = 36160;
GL_RENDERBUFFER = 36161;
GL_COLOR_ATTACHMENT0 = 36064;
GL_DEPTH_ATTACHMENT = 36096;
GL_FRAMEBUFFER_COMPLETE = 36053;
GL_FB_INCOMPLETE_ATTACHMENT = 36054;
GL_FB_INCOMPLETE_MISS_ATTACH = 36055;
GL_FB_INCOMPLETE_DRAW_BUFFER = 36059;
GL_FB_INCOMPLETE_READ_BUFFER = 36060;
}
else if (contextcapabilities.GL_ARB_framebuffer_object)
{
logText = logText + "ARB_framebuffer_object is supported and separate blending is supported.\n";
framebufferType = OpenGlHelper.FboMode.ARB;
GL_FRAMEBUFFER = 36160;
GL_RENDERBUFFER = 36161;
GL_COLOR_ATTACHMENT0 = 36064;
GL_DEPTH_ATTACHMENT = 36096;
GL_FRAMEBUFFER_COMPLETE = 36053;
GL_FB_INCOMPLETE_MISS_ATTACH = 36055;
GL_FB_INCOMPLETE_ATTACHMENT = 36054;
GL_FB_INCOMPLETE_DRAW_BUFFER = 36059;
GL_FB_INCOMPLETE_READ_BUFFER = 36060;
}
else if (contextcapabilities.GL_EXT_framebuffer_object)
{
logText = logText + "EXT_framebuffer_object is supported.\n";
framebufferType = OpenGlHelper.FboMode.EXT;
GL_FRAMEBUFFER = 36160;
GL_RENDERBUFFER = 36161;
GL_COLOR_ATTACHMENT0 = 36064;
GL_DEPTH_ATTACHMENT = 36096;
GL_FRAMEBUFFER_COMPLETE = 36053;
GL_FB_INCOMPLETE_MISS_ATTACH = 36055;
GL_FB_INCOMPLETE_ATTACHMENT = 36054;
GL_FB_INCOMPLETE_DRAW_BUFFER = 36059;
GL_FB_INCOMPLETE_READ_BUFFER = 36060;
}
}
else
{
logText = logText + "Not using framebuffer objects because ";
logText = logText + "OpenGL 1.4 is " + (contextcapabilities.OpenGL14 ? "" : "not ") + "supported, ";
logText = logText + "EXT_blend_func_separate is " + (contextcapabilities.GL_EXT_blend_func_separate ? "" : "not ") + "supported, ";
logText = logText + "OpenGL 3.0 is " + (contextcapabilities.OpenGL30 ? "" : "not ") + "supported, ";
logText = logText + "ARB_framebuffer_object is " + (contextcapabilities.GL_ARB_framebuffer_object ? "" : "not ") + "supported, and ";
logText = logText + "EXT_framebuffer_object is " + (contextcapabilities.GL_EXT_framebuffer_object ? "" : "not ") + "supported.\n";
}
openGL21 = contextcapabilities.OpenGL21;
shadersAvailable = openGL21 || contextcapabilities.GL_ARB_vertex_shader && contextcapabilities.GL_ARB_fragment_shader && contextcapabilities.GL_ARB_shader_objects;
logText = logText + "Shaders are " + (shadersAvailable ? "" : "not ") + "available because ";
if (shadersAvailable)
{
if (contextcapabilities.OpenGL21)
{
logText = logText + "OpenGL 2.1 is supported.\n";
arbShaders = false;
GL_LINK_STATUS = 35714;
GL_COMPILE_STATUS = 35713;
GL_VERTEX_SHADER = 35633;
GL_FRAGMENT_SHADER = 35632;
}
else
{
logText = logText + "ARB_shader_objects, ARB_vertex_shader, and ARB_fragment_shader are supported.\n";
arbShaders = true;
GL_LINK_STATUS = 35714;
GL_COMPILE_STATUS = 35713;
GL_VERTEX_SHADER = 35633;
GL_FRAGMENT_SHADER = 35632;
}
}
else
{
logText = logText + "OpenGL 2.1 is " + (contextcapabilities.OpenGL21 ? "" : "not ") + "supported, ";
logText = logText + "ARB_shader_objects is " + (contextcapabilities.GL_ARB_shader_objects ? "" : "not ") + "supported, ";
logText = logText + "ARB_vertex_shader is " + (contextcapabilities.GL_ARB_vertex_shader ? "" : "not ") + "supported, and ";
logText = logText + "ARB_fragment_shader is " + (contextcapabilities.GL_ARB_fragment_shader ? "" : "not ") + "supported.\n";
}
shadersSupported = framebufferSupported && shadersAvailable;
String s = GL11.glGetString(GL11.GL_VENDOR).toLowerCase();
nvidia = s.contains("nvidia");
arbVbo = !contextcapabilities.OpenGL15 && contextcapabilities.GL_ARB_vertex_buffer_object;
vboSupported = contextcapabilities.OpenGL15 || arbVbo;
logText = logText + "VBOs are " + (vboSupported ? "" : "not ") + "available because ";
if (vboSupported)
{
if (arbVbo)
{
logText = logText + "ARB_vertex_buffer_object is supported.\n";
GL_STATIC_DRAW = 35044;
GL_ARRAY_BUFFER = 34962;
}
else
{
logText = logText + "OpenGL 1.5 is supported.\n";
GL_STATIC_DRAW = 35044;
GL_ARRAY_BUFFER = 34962;
}
}
ati = s.contains("ati");
if (ati)
{
if (vboSupported)
{
vboSupportedAti = true;
}
else
{
GameSettings.Options.RENDER_DISTANCE.setValueMax(16.0F);
}
}
try
{
Processor[] aprocessor = (new SystemInfo()).getHardware().getProcessors();
cpu = String.format("%dx %s", new Object[] {Integer.valueOf(aprocessor.length), aprocessor[0]}).replaceAll("\\s+", " ");
}
catch (Throwable var3)
{
;
}
}
|
static void function() { ContextCapabilities contextcapabilities = GLContext.getCapabilities(); arbMultitexture = contextcapabilities.GL_ARB_multitexture && !contextcapabilities.OpenGL13; arbTextureEnvCombine = contextcapabilities.GL_ARB_texture_env_combine && !contextcapabilities.OpenGL13; if (arbMultitexture) { logText = logText + STR; defaultTexUnit = 33984; lightmapTexUnit = 33985; GL_TEXTURE2 = 33986; } else { logText = logText + STR; defaultTexUnit = 33984; lightmapTexUnit = 33985; GL_TEXTURE2 = 33986; } if (arbTextureEnvCombine) { logText = logText + STR; GL_COMBINE = 34160; GL_INTERPOLATE = 34165; GL_PRIMARY_COLOR = 34167; GL_CONSTANT = 34166; GL_PREVIOUS = 34168; GL_COMBINE_RGB = 34161; GL_SOURCE0_RGB = 34176; GL_SOURCE1_RGB = 34177; GL_SOURCE2_RGB = 34178; GL_OPERAND0_RGB = 34192; GL_OPERAND1_RGB = 34193; GL_OPERAND2_RGB = 34194; GL_COMBINE_ALPHA = 34162; GL_SOURCE0_ALPHA = 34184; GL_SOURCE1_ALPHA = 34185; GL_SOURCE2_ALPHA = 34186; GL_OPERAND0_ALPHA = 34200; GL_OPERAND1_ALPHA = 34201; GL_OPERAND2_ALPHA = 34202; } else { logText = logText + STR; GL_COMBINE = 34160; GL_INTERPOLATE = 34165; GL_PRIMARY_COLOR = 34167; GL_CONSTANT = 34166; GL_PREVIOUS = 34168; GL_COMBINE_RGB = 34161; GL_SOURCE0_RGB = 34176; GL_SOURCE1_RGB = 34177; GL_SOURCE2_RGB = 34178; GL_OPERAND0_RGB = 34192; GL_OPERAND1_RGB = 34193; GL_OPERAND2_RGB = 34194; GL_COMBINE_ALPHA = 34162; GL_SOURCE0_ALPHA = 34184; GL_SOURCE1_ALPHA = 34185; GL_SOURCE2_ALPHA = 34186; GL_OPERAND0_ALPHA = 34200; GL_OPERAND1_ALPHA = 34201; GL_OPERAND2_ALPHA = 34202; } extBlendFuncSeparate = contextcapabilities.GL_EXT_blend_func_separate && !contextcapabilities.OpenGL14; openGL14 = contextcapabilities.OpenGL14 contextcapabilities.GL_EXT_blend_func_separate; framebufferSupported = openGL14 && (contextcapabilities.GL_ARB_framebuffer_object contextcapabilities.GL_EXT_framebuffer_object contextcapabilities.OpenGL30); if (framebufferSupported) { logText = logText + STR; if (contextcapabilities.OpenGL30) { logText = logText + STR; framebufferType = OpenGlHelper.FboMode.BASE; GL_FRAMEBUFFER = 36160; GL_RENDERBUFFER = 36161; GL_COLOR_ATTACHMENT0 = 36064; GL_DEPTH_ATTACHMENT = 36096; GL_FRAMEBUFFER_COMPLETE = 36053; GL_FB_INCOMPLETE_ATTACHMENT = 36054; GL_FB_INCOMPLETE_MISS_ATTACH = 36055; GL_FB_INCOMPLETE_DRAW_BUFFER = 36059; GL_FB_INCOMPLETE_READ_BUFFER = 36060; } else if (contextcapabilities.GL_ARB_framebuffer_object) { logText = logText + STR; framebufferType = OpenGlHelper.FboMode.ARB; GL_FRAMEBUFFER = 36160; GL_RENDERBUFFER = 36161; GL_COLOR_ATTACHMENT0 = 36064; GL_DEPTH_ATTACHMENT = 36096; GL_FRAMEBUFFER_COMPLETE = 36053; GL_FB_INCOMPLETE_MISS_ATTACH = 36055; GL_FB_INCOMPLETE_ATTACHMENT = 36054; GL_FB_INCOMPLETE_DRAW_BUFFER = 36059; GL_FB_INCOMPLETE_READ_BUFFER = 36060; } else if (contextcapabilities.GL_EXT_framebuffer_object) { logText = logText + STR; framebufferType = OpenGlHelper.FboMode.EXT; GL_FRAMEBUFFER = 36160; GL_RENDERBUFFER = 36161; GL_COLOR_ATTACHMENT0 = 36064; GL_DEPTH_ATTACHMENT = 36096; GL_FRAMEBUFFER_COMPLETE = 36053; GL_FB_INCOMPLETE_MISS_ATTACH = 36055; GL_FB_INCOMPLETE_ATTACHMENT = 36054; GL_FB_INCOMPLETE_DRAW_BUFFER = 36059; GL_FB_INCOMPLETE_READ_BUFFER = 36060; } } else { logText = logText + STR; logText = logText + STR + (contextcapabilities.OpenGL14 ? STRnot STRsupported, STREXT_blend_func_separate is " + (contextcapabilities.GL_EXT_blend_func_separate ? STRnot STRsupported, STROpenGL 3.0 is " + (contextcapabilities.OpenGL30 ? STRnot STRsupported, STRARB_framebuffer_object is " + (contextcapabilities.GL_ARB_framebuffer_object ? STRnot STRsupported, and STREXT_framebuffer_object is " + (contextcapabilities.GL_EXT_framebuffer_object ? STRnot STRsupported.\nSTRShaders are " + (shadersAvailable ? STRnot STRavailable because STROpenGL 2.1 is supported.\nSTRARB_shader_objects, ARB_vertex_shader, and ARB_fragment_shader are supported.\nSTROpenGL 2.1 is " + (contextcapabilities.OpenGL21 ? STRnot STRsupported, STRARB_shader_objects is " + (contextcapabilities.GL_ARB_shader_objects ? STRnot STRsupported, STRARB_vertex_shader is " + (contextcapabilities.GL_ARB_vertex_shader ? STRnot STRsupported, and STRARB_fragment_shader is " + (contextcapabilities.GL_ARB_fragment_shader ? STRnot STRsupported.\nSTRnvidiaSTRVBOs are " + (vboSupported ? STRnot STRavailable because STRARB_vertex_buffer_object is supported.\nSTROpenGL 1.5 is supported.\nSTRatiSTR%dx %sSTR\\s+STR "); } catch (Throwable var3) { ; } }
|
/**
* Initializes the texture constants to be used when rendering lightmap values
*/
|
Initializes the texture constants to be used when rendering lightmap values
|
initializeTextures
|
{
"repo_name": "danielyc/test-1.9.4",
"path": "build/tmp/recompileMc/sources/net/minecraft/client/renderer/OpenGlHelper.java",
"license": "gpl-3.0",
"size": 32008
}
|
[
"org.lwjgl.opengl.ContextCapabilities",
"org.lwjgl.opengl.GLContext"
] |
import org.lwjgl.opengl.ContextCapabilities; import org.lwjgl.opengl.GLContext;
|
import org.lwjgl.opengl.*;
|
[
"org.lwjgl.opengl"
] |
org.lwjgl.opengl;
| 1,913,448
|
public final void assignPlus( final Real3 im ) {
x += im.x;
y += im.y;
z += im.z;
}
|
final void function( final Real3 im ) { x += im.x; y += im.y; z += im.z; }
|
/**
* Adds purely imagrinary quaternion given by <code>im</code> to this.
*/
|
Adds purely imagrinary quaternion given by <code>im</code> to this
|
assignPlus
|
{
"repo_name": "jupsal/schmies-jTEM",
"path": "libUnzipped/de/jtem/mfc/field/Quaternion.java",
"license": "bsd-2-clause",
"size": 43415
}
|
[
"de.jtem.mfc.vector.Real3"
] |
import de.jtem.mfc.vector.Real3;
|
import de.jtem.mfc.vector.*;
|
[
"de.jtem.mfc"
] |
de.jtem.mfc;
| 2,243,675
|
@Override
public void contributeToToolBar(IToolBarManager toolBarManager) {
toolBarManager.add(new Separator("monitoring-settings"));
toolBarManager.add(new Separator("monitoring-additions"));
}
|
void function(IToolBarManager toolBarManager) { toolBarManager.add(new Separator(STR)); toolBarManager.add(new Separator(STR)); }
|
/**
* This adds Separators for editor additions to the tool bar.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds Separators for editor additions to the tool bar.
|
contributeToToolBar
|
{
"repo_name": "occiware/OCCI-Studio",
"path": "plugins/org.eclipse.cmf.occi.monitoring.editor/src-gen/org/eclipse/cmf/occi/monitoring/presentation/MonitoringActionBarContributor.java",
"license": "epl-1.0",
"size": 14469
}
|
[
"org.eclipse.jface.action.IToolBarManager",
"org.eclipse.jface.action.Separator"
] |
import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.Separator;
|
import org.eclipse.jface.action.*;
|
[
"org.eclipse.jface"
] |
org.eclipse.jface;
| 2,230,432
|
if (array.rank() <= 2) {
array.subiRowVector(stats.getLower());
array.diviRowVector(stats.getRange());
}
// if feature Rank is 3 (time series) samplesxfeaturesxtimesteps
// if feature Rank is 4 (images) samplesxchannelsxrowsxcols
// both cases operations should be carried out in dimension 1
else {
Nd4j.getExecutioner().execAndReturn(new BroadcastSubOp(array, stats.getLower(), array, 1));
Nd4j.getExecutioner().execAndReturn(new BroadcastDivOp(array, stats.getRange(), array, 1));
}
// Scale by target range
array.muli(maxRange - minRange);
// Add target range minimum values
array.addi(minRange);
if (maskArray != null) {
DataSetUtil.setMaskedValuesToZero(array, maskArray);
}
}
|
if (array.rank() <= 2) { array.subiRowVector(stats.getLower()); array.diviRowVector(stats.getRange()); } else { Nd4j.getExecutioner().execAndReturn(new BroadcastSubOp(array, stats.getLower(), array, 1)); Nd4j.getExecutioner().execAndReturn(new BroadcastDivOp(array, stats.getRange(), array, 1)); } array.muli(maxRange - minRange); array.addi(minRange); if (maskArray != null) { DataSetUtil.setMaskedValuesToZero(array, maskArray); } }
|
/**
* Normalize a data array
*
* @param array the data to normalize
* @param stats statistics of the data population
*/
|
Normalize a data array
|
preProcess
|
{
"repo_name": "huitseeker/nd4j",
"path": "nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/MinMaxStrategy.java",
"license": "apache-2.0",
"size": 3734
}
|
[
"org.nd4j.linalg.api.ops.impl.broadcast.BroadcastDivOp",
"org.nd4j.linalg.api.ops.impl.broadcast.BroadcastSubOp",
"org.nd4j.linalg.dataset.api.DataSetUtil",
"org.nd4j.linalg.factory.Nd4j"
] |
import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastDivOp; import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastSubOp; import org.nd4j.linalg.dataset.api.DataSetUtil; import org.nd4j.linalg.factory.Nd4j;
|
import org.nd4j.linalg.api.ops.impl.broadcast.*; import org.nd4j.linalg.dataset.api.*; import org.nd4j.linalg.factory.*;
|
[
"org.nd4j.linalg"
] |
org.nd4j.linalg;
| 2,132,610
|
public boolean replaceReservedWords( RowMetaInterface fields ) {
boolean hasReservedWords = false;
for ( int i = 0; i < fields.size(); i++ ) {
ValueMetaInterface v = fields.getValueMeta( i );
if ( isReservedWord( v.getName() ) ) {
hasReservedWords = true;
v.setName( quoteField( v.getName() ) );
}
}
return hasReservedWords;
}
|
boolean function( RowMetaInterface fields ) { boolean hasReservedWords = false; for ( int i = 0; i < fields.size(); i++ ) { ValueMetaInterface v = fields.getValueMeta( i ); if ( isReservedWord( v.getName() ) ) { hasReservedWords = true; v.setName( quoteField( v.getName() ) ); } } return hasReservedWords; }
|
/**
* Checks the fields specified for reserved words and quotes them.
*
* @param fields
* the list of fields to check
* @return true if one or more values have a name that is a reserved word on this database type.
*/
|
Checks the fields specified for reserved words and quotes them
|
replaceReservedWords
|
{
"repo_name": "pavel-sakun/pentaho-kettle",
"path": "core/src/main/java/org/pentaho/di/core/database/DatabaseMeta.java",
"license": "apache-2.0",
"size": 93751
}
|
[
"org.pentaho.di.core.row.RowMetaInterface",
"org.pentaho.di.core.row.ValueMetaInterface"
] |
import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface;
|
import org.pentaho.di.core.row.*;
|
[
"org.pentaho.di"
] |
org.pentaho.di;
| 2,606,566
|
public static Icon createWithResource(Context context, @DrawableRes int resId) {
if (context == null) {
throw new IllegalArgumentException("Context must not be null.");
}
final Icon rep = new Icon(TYPE_RESOURCE);
rep.mInt1 = resId;
rep.mString1 = context.getPackageName();
return rep;
}
|
static Icon function(Context context, @DrawableRes int resId) { if (context == null) { throw new IllegalArgumentException(STR); } final Icon rep = new Icon(TYPE_RESOURCE); rep.mInt1 = resId; rep.mString1 = context.getPackageName(); return rep; }
|
/**
* Create an Icon pointing to a drawable resource.
* @param context The context for the application whose resources should be used to resolve the
* given resource ID.
* @param resId ID of the drawable resource
*/
|
Create an Icon pointing to a drawable resource
|
createWithResource
|
{
"repo_name": "OmniEvo/android_frameworks_base",
"path": "graphics/java/android/graphics/drawable/Icon.java",
"license": "gpl-3.0",
"size": 27504
}
|
[
"android.annotation.DrawableRes",
"android.content.Context"
] |
import android.annotation.DrawableRes; import android.content.Context;
|
import android.annotation.*; import android.content.*;
|
[
"android.annotation",
"android.content"
] |
android.annotation; android.content;
| 161,601
|
private void formatAndLog(int level, String format, Object arg1,
Object arg2) {
if (!isLevelEnabled(level)) {
return;
}
FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
log(level, tp.getMessage(), tp.getThrowable());
}
|
void function(int level, String format, Object arg1, Object arg2) { if (!isLevelEnabled(level)) { return; } FormattingTuple tp = MessageFormatter.format(format, arg1, arg2); log(level, tp.getMessage(), tp.getThrowable()); }
|
/**
* For formatted messages, first substitute arguments and then log.
*
* @param level
* @param format
* @param arg1
* @param arg2
*/
|
For formatted messages, first substitute arguments and then log
|
formatAndLog
|
{
"repo_name": "PRECISE/ROSLab",
"path": "lib/slf4j-1.7.10/slf4j-simple/src/main/java/org/slf4j/impl/SimpleLogger.java",
"license": "apache-2.0",
"size": 23793
}
|
[
"org.slf4j.helpers.FormattingTuple",
"org.slf4j.helpers.MessageFormatter"
] |
import org.slf4j.helpers.FormattingTuple; import org.slf4j.helpers.MessageFormatter;
|
import org.slf4j.helpers.*;
|
[
"org.slf4j.helpers"
] |
org.slf4j.helpers;
| 528,949
|
String[] mergeProperties(String[] p1, String[] p2) {
if (p1 == null && p2 == null) {
return new String[0];
}
if (p1 == null || p2 == null) {
return (p1 == null ? p2 : p1);
}
HashMap map = new HashMap();
for (int i = 0; i < p1.length; i += 2) {
map.put(p1[i], p1[i + 1]);
}
for (int i = 0; i < p2.length; i += 2) {
map.put(p2[i], p2[i + 1]);
}
Set keys = map.keySet();
Iterator it = keys.iterator();
String[] ret = new String[keys.size() * 2];
int i = 0;
while (it.hasNext()) {
String key = (String) it.next();
String value = (String) map.get(key);
ret[i++] = key;
ret[i++] = value;
}
return ret;
}
|
String[] mergeProperties(String[] p1, String[] p2) { if (p1 == null && p2 == null) { return new String[0]; } if (p1 == null p2 == null) { return (p1 == null ? p2 : p1); } HashMap map = new HashMap(); for (int i = 0; i < p1.length; i += 2) { map.put(p1[i], p1[i + 1]); } for (int i = 0; i < p2.length; i += 2) { map.put(p2[i], p2[i + 1]); } Set keys = map.keySet(); Iterator it = keys.iterator(); String[] ret = new String[keys.size() * 2]; int i = 0; while (it.hasNext()) { String key = (String) it.next(); String value = (String) map.get(key); ret[i++] = key; ret[i++] = value; } return ret; }
|
/**
* Merge two sets of property definitions. The property key/value
* pairs present in the given string arrays are combined into a
* single array. If a property key is defined in both arrays, the
* value in <code>p2</code> overrides the value in
* <code>p1</code>. The original ordering is not retained.
*
* @param p1 an array of property key/value pairs
* @param p2 an array of property key/value pairs
* @return an array containing the combined propeties key/value pairs
*/
|
Merge two sets of property definitions. The property key/value pairs present in the given string arrays are combined into a single array. If a property key is defined in both arrays, the value in <code>p2</code> overrides the value in <code>p1</code>. The original ordering is not retained
|
mergeProperties
|
{
"repo_name": "trasukg/river-qa-2.2",
"path": "qa/src/com/sun/jini/qa/harness/QAConfig.java",
"license": "apache-2.0",
"size": 104943
}
|
[
"java.util.HashMap",
"java.util.Iterator",
"java.util.Set"
] |
import java.util.HashMap; import java.util.Iterator; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 79,864
|
public @Bean ReloadableResourceBundleMessageSource messageSource () {
final ReloadableResourceBundleMessageSource source = new ReloadableResourceBundleMessageSource ();
if (!hasDojoProduction) {
source.setCacheSeconds (0);
}
source.setBasenames ("WEB-INF/i18n/messages");
source.setFallbackToSystemLocale (false);
return source;
}
|
@Bean ReloadableResourceBundleMessageSource function () { final ReloadableResourceBundleMessageSource source = new ReloadableResourceBundleMessageSource (); if (!hasDojoProduction) { source.setCacheSeconds (0); } source.setBasenames (STR); source.setFallbackToSystemLocale (false); return source; }
|
/**
* Resolves localized messages*.properties and application.properties files in the application to allow for internationalization.
* The messages*.properties files translate Roo generated messages which are part of the admin interface, the application.properties
* resource bundle localizes all application specific messages such as entity names and menu items.
*/
|
Resolves localized messages*.properties and application.properties files in the application to allow for internationalization. The messages*.properties files translate Roo generated messages which are part of the admin interface, the application.properties resource bundle localizes all application specific messages such as entity names and menu items
|
messageSource
|
{
"repo_name": "CDS-VRN/InSpider",
"path": "admin/src/main/java/nl/ipo/cds/admin/config/AdminWebMvcConfig.java",
"license": "gpl-3.0",
"size": 9599
}
|
[
"org.springframework.context.annotation.Bean",
"org.springframework.context.support.ReloadableResourceBundleMessageSource"
] |
import org.springframework.context.annotation.Bean; import org.springframework.context.support.ReloadableResourceBundleMessageSource;
|
import org.springframework.context.annotation.*; import org.springframework.context.support.*;
|
[
"org.springframework.context"
] |
org.springframework.context;
| 2,553,771
|
ServiceResponse<Colors> getNotExpandable() throws ErrorException, IOException;
|
ServiceResponse<Colors> getNotExpandable() throws ErrorException, IOException;
|
/**
* Get enum value 'red color' from enumeration of 'red color', 'green-color', 'blue_color'.
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the Colors object wrapped in {@link ServiceResponse} if successful.
*/
|
Get enum value 'red color' from enumeration of 'red color', 'green-color', 'blue_color'
|
getNotExpandable
|
{
"repo_name": "haocs/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodystring/Enums.java",
"license": "mit",
"size": 7927
}
|
[
"com.microsoft.rest.ServiceResponse",
"java.io.IOException"
] |
import com.microsoft.rest.ServiceResponse; import java.io.IOException;
|
import com.microsoft.rest.*; import java.io.*;
|
[
"com.microsoft.rest",
"java.io"
] |
com.microsoft.rest; java.io;
| 2,003,208
|
protected boolean matchChildSequence(QName element, int event)
throws XNIException {
// need to resize fCurrentChildSequence
if (fCurrentChildDepth >= fCurrentChildSequence.length) {
int tmpCurrentChildSequence[] = new int[fCurrentChildSequence.length];
System.arraycopy(fCurrentChildSequence, 0, tmpCurrentChildSequence,
0, fCurrentChildSequence.length);
// Increase the size by a factor of 2 (?)
fCurrentChildSequence = new int[fCurrentChildDepth * 2];
System.arraycopy(tmpCurrentChildSequence, 0, fCurrentChildSequence,
0, tmpCurrentChildSequence.length);
}
//
if (fIsResolveElement) {
// start
if (event == XPointerPart.EVENT_ELEMENT_START) {
fCurrentChildSequence[fCurrentChildDepth] = fCurrentChildPosition;
fCurrentChildDepth++;
// reset the current child position
fCurrentChildPosition = 1;
//if (!fSchemeNameFound) {
if ((fCurrentChildDepth <= fFoundDepth) || (fFoundDepth == 0)) {
if (checkMatch()) {
fIsElementFound = true;
fFoundDepth = fCurrentChildDepth;
} else {
fIsElementFound = false;
fFoundDepth = 0;
}
}
} else if (event == XPointerPart.EVENT_ELEMENT_END) {
if (fCurrentChildDepth == fFoundDepth) {
fIsElementFound = true;
} else if (((fCurrentChildDepth < fFoundDepth) && (fFoundDepth != 0))
|| ((fCurrentChildDepth > fFoundDepth) // or empty element found
&& (fFoundDepth == 0))) {
fIsElementFound = false;
}
// reset array position of last child
fCurrentChildSequence[fCurrentChildDepth] = 0;
fCurrentChildDepth--;
fCurrentChildPosition = fCurrentChildSequence[fCurrentChildDepth] + 1;
} else if (event == XPointerPart.EVENT_ELEMENT_EMPTY) {
fCurrentChildSequence[fCurrentChildDepth] = fCurrentChildPosition;
fCurrentChildPosition++;
// Donot check for empty elements if the empty element is
// a child of a found parent element
//if (!fIsElementFound) {
if (checkMatch()) {
fIsElementFound = true;
fWasOnlyEmptyElementFound = true;
} else {
fIsElementFound = false;
}
//}
}
}
return fIsElementFound;
}
|
boolean function(QName element, int event) throws XNIException { if (fCurrentChildDepth >= fCurrentChildSequence.length) { int tmpCurrentChildSequence[] = new int[fCurrentChildSequence.length]; System.arraycopy(fCurrentChildSequence, 0, tmpCurrentChildSequence, 0, fCurrentChildSequence.length); fCurrentChildSequence = new int[fCurrentChildDepth * 2]; System.arraycopy(tmpCurrentChildSequence, 0, fCurrentChildSequence, 0, tmpCurrentChildSequence.length); } if (fIsResolveElement) { if (event == XPointerPart.EVENT_ELEMENT_START) { fCurrentChildSequence[fCurrentChildDepth] = fCurrentChildPosition; fCurrentChildDepth++; fCurrentChildPosition = 1; if ((fCurrentChildDepth <= fFoundDepth) (fFoundDepth == 0)) { if (checkMatch()) { fIsElementFound = true; fFoundDepth = fCurrentChildDepth; } else { fIsElementFound = false; fFoundDepth = 0; } } } else if (event == XPointerPart.EVENT_ELEMENT_END) { if (fCurrentChildDepth == fFoundDepth) { fIsElementFound = true; } else if (((fCurrentChildDepth < fFoundDepth) && (fFoundDepth != 0)) ((fCurrentChildDepth > fFoundDepth) && (fFoundDepth == 0))) { fIsElementFound = false; } fCurrentChildSequence[fCurrentChildDepth] = 0; fCurrentChildDepth--; fCurrentChildPosition = fCurrentChildSequence[fCurrentChildDepth] + 1; } else if (event == XPointerPart.EVENT_ELEMENT_EMPTY) { fCurrentChildSequence[fCurrentChildDepth] = fCurrentChildPosition; fCurrentChildPosition++; if (checkMatch()) { fIsElementFound = true; fWasOnlyEmptyElementFound = true; } else { fIsElementFound = false; } } } return fIsElementFound; }
|
/**
* Matches the current element position in the document tree with the
* element position specified in the element XPointer scheme.
*
* @param event
* @return boolean - true if the current element position in the document
* tree matches theelement position specified in the element XPointer
* scheme.
*/
|
Matches the current element position in the document tree with the element position specified in the element XPointer scheme
|
matchChildSequence
|
{
"repo_name": "BIORIMP/biorimp",
"path": "BIO-RIMP/test_data/code/xerces/src/org/apache/xerces/xpointer/ElementSchemePointer.java",
"license": "gpl-2.0",
"size": 30582
}
|
[
"org.apache.xerces.xni.QName",
"org.apache.xerces.xni.XNIException"
] |
import org.apache.xerces.xni.QName; import org.apache.xerces.xni.XNIException;
|
import org.apache.xerces.xni.*;
|
[
"org.apache.xerces"
] |
org.apache.xerces;
| 3,303
|
public synchronized void restartNameNode(int nnIndex, boolean waitActive,
String... args) throws IOException {
NameNodeInfo info = getNN(nnIndex);
StartupOption startOpt = info.startOpt;
shutdownNameNode(nnIndex);
if (args.length != 0) {
startOpt = null;
} else {
args = createArgs(startOpt);
}
NameNode nn = NameNode.createNameNode(args, info.conf);
info.nameNode = nn;
info.setStartOpt(startOpt);
if (waitActive) {
waitClusterUp();
LOG.info("Restarted the namenode");
waitActive();
}
}
|
synchronized void function(int nnIndex, boolean waitActive, String... args) throws IOException { NameNodeInfo info = getNN(nnIndex); StartupOption startOpt = info.startOpt; shutdownNameNode(nnIndex); if (args.length != 0) { startOpt = null; } else { args = createArgs(startOpt); } NameNode nn = NameNode.createNameNode(args, info.conf); info.nameNode = nn; info.setStartOpt(startOpt); if (waitActive) { waitClusterUp(); LOG.info(STR); waitActive(); } }
|
/**
* Restart the namenode at a given index. Optionally wait for the cluster
* to become active.
*/
|
Restart the namenode at a given index. Optionally wait for the cluster to become active
|
restartNameNode
|
{
"repo_name": "szegedim/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 117291
}
|
[
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.HdfsServerConstants",
"org.apache.hadoop.hdfs.server.namenode.NameNode"
] |
import java.io.IOException; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import java.io.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.namenode.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,070,603
|
static public void createInputFile(MiniCluster miniCluster, String fileName,
String[] inputData)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
createInputFile(fs, fileName, inputData);
}
|
static void function(MiniCluster miniCluster, String fileName, String[] inputData) throws IOException { FileSystem fs = miniCluster.getFileSystem(); createInputFile(fs, fileName, inputData); }
|
/**
* Helper to create a dfs file on the Minicluster DFS with given
* input data for use in test cases.
*
* @param miniCluster reference to the Minicluster where the file should be created
* @param fileName pathname of the file to be created
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @throws IOException
*/
|
Helper to create a dfs file on the Minicluster DFS with given input data for use in test cases
|
createInputFile
|
{
"repo_name": "bsmedberg/pig",
"path": "test/org/apache/pig/test/Util.java",
"license": "apache-2.0",
"size": 44312
}
|
[
"java.io.IOException",
"org.apache.hadoop.fs.FileSystem"
] |
import java.io.IOException; import org.apache.hadoop.fs.FileSystem;
|
import java.io.*; import org.apache.hadoop.fs.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 2,165,876
|
@Override
final void write (RecordOutputStream s) throws IOException {
s.startRecord (ModuleSerializationTags.EXPRESSION_RECORD_CASE, serializationSchema);
s.writeUTF (baseRecordPatternVarName);
conditionExpr.write (s);
resultExpr.write (s);
s.writeIntCompressed (fieldBindingVarMap.size());
for (final Map.Entry<FieldName, String> entry : fieldBindingVarMap.entrySet()) {
FieldName fn = entry.getKey();
String binding = entry.getValue();
FieldNameIO.writeFieldName(fn, s);
s.writeUTF (binding);
}
s.endRecord ();
}
|
final void write (RecordOutputStream s) throws IOException { s.startRecord (ModuleSerializationTags.EXPRESSION_RECORD_CASE, serializationSchema); s.writeUTF (baseRecordPatternVarName); conditionExpr.write (s); resultExpr.write (s); s.writeIntCompressed (fieldBindingVarMap.size()); for (final Map.Entry<FieldName, String> entry : fieldBindingVarMap.entrySet()) { FieldName fn = entry.getKey(); String binding = entry.getValue(); FieldNameIO.writeFieldName(fn, s); s.writeUTF (binding); } s.endRecord (); }
|
/**
* Write this instance of RecordCase to the RecordOutputStream.
* @param s
* @throws IOException
*/
|
Write this instance of RecordCase to the RecordOutputStream
|
write
|
{
"repo_name": "levans/Open-Quark",
"path": "src/CAL_Platform/src/org/openquark/cal/compiler/Expression.java",
"license": "bsd-3-clause",
"size": 130740
}
|
[
"java.io.IOException",
"java.util.Map",
"org.openquark.cal.internal.serialization.ModuleSerializationTags",
"org.openquark.cal.internal.serialization.RecordOutputStream"
] |
import java.io.IOException; import java.util.Map; import org.openquark.cal.internal.serialization.ModuleSerializationTags; import org.openquark.cal.internal.serialization.RecordOutputStream;
|
import java.io.*; import java.util.*; import org.openquark.cal.internal.serialization.*;
|
[
"java.io",
"java.util",
"org.openquark.cal"
] |
java.io; java.util; org.openquark.cal;
| 1,966,360
|
public static void printVector(Vector V) {
printVector(V, 4);
}
|
static void function(Vector V) { printVector(V, 4); }
|
/**
* Print a row vector.
*
* @param V a dense or sparse vector
*/
|
Print a row vector
|
printVector
|
{
"repo_name": "MingjieQian/LAML",
"path": "src/ml/utils/Printer.java",
"license": "apache-2.0",
"size": 19651
}
|
[
"la.vector.Vector"
] |
import la.vector.Vector;
|
import la.vector.*;
|
[
"la.vector"
] |
la.vector;
| 160,257
|
@Nullable ContainerNode extractAttributes(@NonNull MapEntryNode route);
|
@Nullable ContainerNode extractAttributes(@NonNull MapEntryNode route);
|
/**
* Extract attributes from an route entry.
*
* @param route Route entry
* @return Associated attributes, potentially null
* @throws NullPointerException if route is null
*/
|
Extract attributes from an route entry
|
extractAttributes
|
{
"repo_name": "opendaylight/bgpcep",
"path": "bgp/rib-spi/src/main/java/org/opendaylight/protocol/bgp/rib/spi/RIBSupport.java",
"license": "epl-1.0",
"size": 13909
}
|
[
"org.eclipse.jdt.annotation.NonNull",
"org.eclipse.jdt.annotation.Nullable",
"org.opendaylight.yangtools.yang.data.api.schema.ContainerNode",
"org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode"
] |
import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode; import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
|
import org.eclipse.jdt.annotation.*; import org.opendaylight.yangtools.yang.data.api.schema.*;
|
[
"org.eclipse.jdt",
"org.opendaylight.yangtools"
] |
org.eclipse.jdt; org.opendaylight.yangtools;
| 553,635
|
public void addPropertyChangeListener(PropertyChangeListener l)
{
pcs.addPropertyChangeListener(l);
}
|
void function(PropertyChangeListener l) { pcs.addPropertyChangeListener(l); }
|
/**
* Adds property change listener.
*
* @param l new listener.
*/
|
Adds property change listener
|
addPropertyChangeListener
|
{
"repo_name": "tcolar/fantomidemodule",
"path": "src/net/colar/netbeans/fan/debugger/FanDebugPathProvider.java",
"license": "artistic-2.0",
"size": 26938
}
|
[
"java.beans.PropertyChangeListener"
] |
import java.beans.PropertyChangeListener;
|
import java.beans.*;
|
[
"java.beans"
] |
java.beans;
| 1,398,279
|
public static void copy(InputStream in, OutputStream out) throws IOException {
out = new BufferedOutputStream(out, 0x1000);
in = new BufferedInputStream(in, 0x1000);
// Copy the contents from the input stream to the output stream.
while (true) {
int b = in.read();
if (b == -1) {
break;
}
out.write(b);
}
out.flush();
}
|
static void function(InputStream in, OutputStream out) throws IOException { out = new BufferedOutputStream(out, 0x1000); in = new BufferedInputStream(in, 0x1000); while (true) { int b = in.read(); if (b == -1) { break; } out.write(b); } out.flush(); }
|
/**
* Copy the contents of the input stream {@code in} to the output stream {@code out}.
*
* @param in the stream to read
* @param out the stream to write
* @throws IOException when the stream(s) cannot be accessed
*/
|
Copy the contents of the input stream in to the output stream out
|
copy
|
{
"repo_name": "jisqyv/appinventor-sources",
"path": "appinventor/components/src/com/google/appinventor/components/runtime/util/FileUtil.java",
"license": "apache-2.0",
"size": 54685
}
|
[
"java.io.BufferedInputStream",
"java.io.BufferedOutputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.OutputStream"
] |
import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 209,743
|
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<WebhookInner>> updateWithResponseAsync(
String resourceGroupName,
String automationAccountName,
String webhookName,
WebhookUpdateParameters parameters) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (automationAccountName == null) {
return Mono
.error(new IllegalArgumentException("Parameter automationAccountName is required and cannot be null."));
}
if (webhookName == null) {
return Mono.error(new IllegalArgumentException("Parameter webhookName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String apiVersion = "2015-10-31";
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.update(
this.client.getEndpoint(),
resourceGroupName,
automationAccountName,
webhookName,
this.client.getSubscriptionId(),
apiVersion,
parameters,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<WebhookInner>> function( String resourceGroupName, String automationAccountName, String webhookName, WebhookUpdateParameters parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (automationAccountName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (webhookName == null) { return Mono.error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (parameters == null) { return Mono.error(new IllegalArgumentException(STR)); } else { parameters.validate(); } final String apiVersion = STR; final String accept = STR; return FluxUtil .withContext( context -> service .update( this.client.getEndpoint(), resourceGroupName, automationAccountName, webhookName, this.client.getSubscriptionId(), apiVersion, parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); }
|
/**
* Update the webhook identified by webhook name.
*
* @param resourceGroupName Name of an Azure Resource group.
* @param automationAccountName The name of the automation account.
* @param webhookName The webhook name.
* @param parameters The update parameters for webhook.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return definition of the webhook type.
*/
|
Update the webhook identified by webhook name
|
updateWithResponseAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/automation/azure-resourcemanager-automation/src/main/java/com/azure/resourcemanager/automation/implementation/WebhooksClientImpl.java",
"license": "mit",
"size": 63264
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.FluxUtil",
"com.azure.resourcemanager.automation.fluent.models.WebhookInner",
"com.azure.resourcemanager.automation.models.WebhookUpdateParameters"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.FluxUtil; import com.azure.resourcemanager.automation.fluent.models.WebhookInner; import com.azure.resourcemanager.automation.models.WebhookUpdateParameters;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.automation.fluent.models.*; import com.azure.resourcemanager.automation.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 1,244,784
|
@Override
public Adapter createTeacherAdapter() {
if (teacherItemProvider == null) {
teacherItemProvider = new TeacherItemProvider(this);
}
return teacherItemProvider;
}
protected YearItemProvider yearItemProvider;
|
Adapter function() { if (teacherItemProvider == null) { teacherItemProvider = new TeacherItemProvider(this); } return teacherItemProvider; } protected YearItemProvider yearItemProvider;
|
/**
* This creates an adapter for a {@link school.Teacher}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This creates an adapter for a <code>school.Teacher</code>.
|
createTeacherAdapter
|
{
"repo_name": "tht-krisztian/EMF-IncQuery-Examples",
"path": "school/school.edit/src/school/provider/SchoolItemProviderAdapterFactory.java",
"license": "epl-1.0",
"size": 10305
}
|
[
"org.eclipse.emf.common.notify.Adapter"
] |
import org.eclipse.emf.common.notify.Adapter;
|
import org.eclipse.emf.common.notify.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,341,376
|
@Override
public PublicKey byteArrayToPublicKey(byte[] key) {
try {
KeyFactory keyFactory = KeyFactory.getInstance("RSA", CRYPTO_PROVIDER);
return keyFactory.generatePublic(new X509EncodedKeySpec(key));
} catch (InvalidKeySpecException e) {
return null; // Key in byte array uses invalid format
} catch (NoSuchAlgorithmException | NoSuchProviderException e) {
ErrorLoggingSingleton log = ErrorLoggingSingleton.getInstance();
log.storeError(ErrorLoggingSingleton.getExceptionStackTraceAsFormattedString(e));
// Bouncy Castle is included and all algorithms/paddings are supported in Bouncy Castle
throw new RuntimeException(e);
}
}
|
PublicKey function(byte[] key) { try { KeyFactory keyFactory = KeyFactory.getInstance("RSA", CRYPTO_PROVIDER); return keyFactory.generatePublic(new X509EncodedKeySpec(key)); } catch (InvalidKeySpecException e) { return null; } catch (NoSuchAlgorithmException NoSuchProviderException e) { ErrorLoggingSingleton log = ErrorLoggingSingleton.getInstance(); log.storeError(ErrorLoggingSingleton.getExceptionStackTraceAsFormattedString(e)); throw new RuntimeException(e); } }
|
/**
* Creates a PublicKey object from the raw key bytes obtained through publicKeyToByteArray.
*
* @param key The key bytes.
* @return A PublicKey that encapsulates the raw key or null if the key's format is invalid.
*/
|
Creates a PublicKey object from the raw key bytes obtained through publicKeyToByteArray
|
byteArrayToPublicKey
|
{
"repo_name": "timberdoodle/TimberdoodleApp",
"path": "project/app/src/main/java/de/tu_darmstadt/timberdoodle/friendcipher/FriendCipher.java",
"license": "gpl-2.0",
"size": 15055
}
|
[
"de.tu_darmstadt.adtn.errorlogger.ErrorLoggingSingleton",
"java.security.KeyFactory",
"java.security.NoSuchAlgorithmException",
"java.security.NoSuchProviderException",
"java.security.PublicKey",
"java.security.spec.InvalidKeySpecException",
"java.security.spec.X509EncodedKeySpec"
] |
import de.tu_darmstadt.adtn.errorlogger.ErrorLoggingSingleton; import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PublicKey; import java.security.spec.InvalidKeySpecException; import java.security.spec.X509EncodedKeySpec;
|
import de.tu_darmstadt.adtn.errorlogger.*; import java.security.*; import java.security.spec.*;
|
[
"de.tu_darmstadt.adtn",
"java.security"
] |
de.tu_darmstadt.adtn; java.security;
| 2,133,879
|
public static List<Transaction> getTransactionsForUser(String userId, String password)
{
if(!validateUser(userId, password))
return null;
Session session = HibernateCore.getSession();
session.beginTransaction();
String hql = "from Transaction t where t.userId=? order by date desc";
Query hQuery = session.createQuery(hql).setString(0, userId);
List<Transaction> results = hQuery.list();
session.getTransaction().commit();
return (results);
}
|
static List<Transaction> function(String userId, String password) { if(!validateUser(userId, password)) return null; Session session = HibernateCore.getSession(); session.beginTransaction(); String hql = STR; Query hQuery = session.createQuery(hql).setString(0, userId); List<Transaction> results = hQuery.list(); session.getTransaction().commit(); return (results); }
|
/**
* method to get all transactions by a user
* @param userId the user's id
* @return the list of transactions
* */
|
method to get all transactions by a user
|
getTransactionsForUser
|
{
"repo_name": "animesks/projects",
"path": "Non_Academic/TarangStockExchange_2011/src/slambook/slamXchange/persistence/UserDAO.java",
"license": "gpl-2.0",
"size": 8699
}
|
[
"java.util.List",
"org.hibernate.Query",
"org.hibernate.Session"
] |
import java.util.List; import org.hibernate.Query; import org.hibernate.Session;
|
import java.util.*; import org.hibernate.*;
|
[
"java.util",
"org.hibernate"
] |
java.util; org.hibernate;
| 297,079
|
@Override public void enterProtectedRule(@NotNull PJParser.ProtectedRuleContext ctx) { }
|
@Override public void enterProtectedRule(@NotNull PJParser.ProtectedRuleContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
exitAnnotationTypeBody
|
{
"repo_name": "Diolor/PJ",
"path": "src/main/java/com/lorentzos/pj/PJBaseListener.java",
"license": "mit",
"size": 73292
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 782,528
|
public File getCurrentDir() {
return new File(root, STORAGE_DIR_CURRENT);
}
|
File function() { return new File(root, STORAGE_DIR_CURRENT); }
|
/**
* Directory {@code current} contains latest files defining
* the file system meta-data.
*
* @return the directory path
*/
|
Directory current contains latest files defining the file system meta-data
|
getCurrentDir
|
{
"repo_name": "songweijia/fffs",
"path": "sources/hadoop-2.4.1-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java",
"license": "apache-2.0",
"size": 35481
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,333,893
|
public Map<String, String> getReportsLevel2(final Map<String, String> replacements) {
return getReports(filterLevel2, replacements);
}
|
Map<String, String> function(final Map<String, String> replacements) { return getReports(filterLevel2, replacements); }
|
/**
* DOCUMENT ME!
*
* @param replacements DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
|
DOCUMENT ME
|
getReportsLevel2
|
{
"repo_name": "cismet/report-generator",
"path": "src/main/java/de/cismet/custom/wrrl/reportgenerator/WRRLReportProvider.java",
"license": "lgpl-3.0",
"size": 10393
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,365,632
|
public void start() throws ConfigurationException {
if (connectionFactory == null) {
throw new ConfigurationException("can not start without client socket factory");
}
try {
server = new ServerSocket(port);
} catch (IOException e) {
System.out.println(new LogEntry(e.getMessage(), e));
throw new ConfigurationException("Cannot start server at port " + port, e);
}
System.out.println(new LogEntry("starting socket server at port " + port));
serverThread = new Thread(this);
serverThread.start();
}
|
void function() throws ConfigurationException { if (connectionFactory == null) { throw new ConfigurationException(STR); } try { server = new ServerSocket(port); } catch (IOException e) { System.out.println(new LogEntry(e.getMessage(), e)); throw new ConfigurationException(STR + port, e); } System.out.println(new LogEntry(STR + port)); serverThread = new Thread(this); serverThread.start(); }
|
/**
* Starts the service. Instantiates a server socket and starts a thread that
* handles incoming client connections.
*/
|
Starts the service. Instantiates a server socket and starts a thread that handles incoming client connections
|
start
|
{
"repo_name": "Boncode/Iglu-Common",
"path": "src/main/java/org/ijsberg/iglu/server/connection/socket/module/StandardSocketServer.java",
"license": "lgpl-3.0",
"size": 6825
}
|
[
"java.io.IOException",
"java.net.ServerSocket",
"org.ijsberg.iglu.configuration.ConfigurationException",
"org.ijsberg.iglu.logging.LogEntry"
] |
import java.io.IOException; import java.net.ServerSocket; import org.ijsberg.iglu.configuration.ConfigurationException; import org.ijsberg.iglu.logging.LogEntry;
|
import java.io.*; import java.net.*; import org.ijsberg.iglu.configuration.*; import org.ijsberg.iglu.logging.*;
|
[
"java.io",
"java.net",
"org.ijsberg.iglu"
] |
java.io; java.net; org.ijsberg.iglu;
| 1,840,128
|
@SuppressWarnings("unchecked")
private <T> T invokeFunction(String name, Object... args) {
try {
return (T)this.jsEngine.invokeFunction(name, args);
}
catch (ScriptException se) {
throw new IllegalStateException(se);
}
catch (NoSuchMethodException nsme) {
throw new IllegalStateException(nsme);
}
}
|
@SuppressWarnings(STR) <T> T function(String name, Object... args) { try { return (T)this.jsEngine.invokeFunction(name, args); } catch (ScriptException se) { throw new IllegalStateException(se); } catch (NoSuchMethodException nsme) { throw new IllegalStateException(nsme); } }
|
/**
* Invoke the function with the given name and arguments and return the
* result.
*
* @param <T> The type of the result.
* @param name The name of the function.
* @param args The arguments.
* @return The result.
*/
|
Invoke the function with the given name and arguments and return the result
|
invokeFunction
|
{
"repo_name": "kjots/json-toolkit",
"path": "json-object.js/src/test/java/org/kjots/json/object/js/impl/JsJsonObjectMapImplTest.java",
"license": "apache-2.0",
"size": 4447
}
|
[
"javax.script.ScriptException"
] |
import javax.script.ScriptException;
|
import javax.script.*;
|
[
"javax.script"
] |
javax.script;
| 2,108,796
|
protected void addHeating_setpointPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_House_heating_setpoint_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_House_heating_setpoint_feature", "_UI_House_type"),
VisGridPackage.eINSTANCE.getHouse_Heating_setpoint(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
|
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), VisGridPackage.eINSTANCE.getHouse_Heating_setpoint(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
|
/**
* This adds a property descriptor for the Heating setpoint feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
|
This adds a property descriptor for the Heating setpoint feature.
|
addHeating_setpointPropertyDescriptor
|
{
"repo_name": "mikesligo/visGrid",
"path": "ie.tcd.gmf.visGrid.edit/src/visGrid/provider/HouseItemProvider.java",
"license": "gpl-3.0",
"size": 120584
}
|
[
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor"
] |
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
|
import org.eclipse.emf.edit.provider.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 2,019,269
|
public void setAllowPopupsEnabled(boolean allow) {
setContentSettingEnabled(ContentSettingsType.CONTENT_SETTINGS_TYPE_POPUPS, allow);
}
|
void function(boolean allow) { setContentSettingEnabled(ContentSettingsType.CONTENT_SETTINGS_TYPE_POPUPS, allow); }
|
/**
* Sets the preferences on whether to enable/disable popups
*
* @param allow attribute to enable/disable popups
*/
|
Sets the preferences on whether to enable/disable popups
|
setAllowPopupsEnabled
|
{
"repo_name": "mogoweb/365browser",
"path": "app/src/main/java/org/chromium/chrome/browser/preferences/PrefServiceBridge.java",
"license": "apache-2.0",
"size": 38376
}
|
[
"org.chromium.chrome.browser.ContentSettingsType"
] |
import org.chromium.chrome.browser.ContentSettingsType;
|
import org.chromium.chrome.browser.*;
|
[
"org.chromium.chrome"
] |
org.chromium.chrome;
| 372,995
|
//set the look and feel to be windows if on a windows computer
if (System.getProperty("os.name").toLowerCase().contains("windows"))
try {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel");
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e) {
logger.log(Level.INFO,"You are not using windows, reverting to default look and feel");
}
Logger.getGlobal().setLevel(Level.INFO);
//set properties for tool-tips (used as in program help)
ToolTipManager.sharedInstance().setInitialDelay(500);
new MainFrame().setVisible(true);
}
|
if (System.getProperty(STR).toLowerCase().contains(STR)) try { UIManager.setLookAndFeel(STR); } catch (ClassNotFoundException InstantiationException IllegalAccessException UnsupportedLookAndFeelException e) { logger.log(Level.INFO,STR); } Logger.getGlobal().setLevel(Level.INFO); ToolTipManager.sharedInstance().setInitialDelay(500); new MainFrame().setVisible(true); }
|
/**
* The entry point for the entire program
* @param args system arguments
*/
|
The entry point for the entire program
|
main
|
{
"repo_name": "qhadron/Personality_Survey",
"path": "src/Launcher.java",
"license": "gpl-2.0",
"size": 1509
}
|
[
"java.util.logging.Level",
"java.util.logging.Logger",
"javax.swing.ToolTipManager",
"javax.swing.UIManager",
"javax.swing.UnsupportedLookAndFeelException"
] |
import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.ToolTipManager; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException;
|
import java.util.logging.*; import javax.swing.*;
|
[
"java.util",
"javax.swing"
] |
java.util; javax.swing;
| 154,921
|
void processOutdatedItem(CompileContext context, String url, @Nullable ValidityState state);
|
void processOutdatedItem(CompileContext context, String url, @Nullable ValidityState state);
|
/**
* Called when the compiler detects that an item in the output directory is outdated
* and will be recompiled. Note that this method will be called before, and independently from,
* subsequent calls to {@link #process}.
*
* @param context the current compile context.
* @param url the URL of a file in the output directory which will be recompiled.
* @param state the validity state of the file specified by {@code url}.
*/
|
Called when the compiler detects that an item in the output directory is outdated and will be recompiled. Note that this method will be called before, and independently from, subsequent calls to <code>#process</code>
|
processOutdatedItem
|
{
"repo_name": "goodwinnk/intellij-community",
"path": "java/compiler/openapi/src/com/intellij/openapi/compiler/PackagingCompiler.java",
"license": "apache-2.0",
"size": 2075
}
|
[
"org.jetbrains.annotations.Nullable"
] |
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,331,641
|
String getHtmlFragment(JRHtmlExporterContext exporterContext, JRGenericPrintElement element);
|
String getHtmlFragment(JRHtmlExporterContext exporterContext, JRGenericPrintElement element);
|
/**
* Returns an HTML fragment that is to be inserted in the export output.
*
* @param element the generic print element
* @return the HTML fragment that represents the exported element
*/
|
Returns an HTML fragment that is to be inserted in the export output
|
getHtmlFragment
|
{
"repo_name": "sikachu/jasperreports",
"path": "src/net/sf/jasperreports/engine/export/GenericElementHtmlHandler.java",
"license": "lgpl-3.0",
"size": 1674
}
|
[
"net.sf.jasperreports.engine.JRGenericPrintElement"
] |
import net.sf.jasperreports.engine.JRGenericPrintElement;
|
import net.sf.jasperreports.engine.*;
|
[
"net.sf.jasperreports"
] |
net.sf.jasperreports;
| 864,221
|
protected boolean isMatched(GenericFile<T> file, boolean isDirectory, List<T> files) {
String name = file.getFileNameOnly();
// folders/names starting with dot is always skipped (eg. ".", ".camel",
// ".camelLock")
if (name.startsWith(".")) {
return false;
}
// lock files should be skipped
if (name.endsWith(FileComponent.DEFAULT_LOCK_FILE_POSTFIX)) {
return false;
}
if (endpoint.getFilter() != null) {
if (!endpoint.getFilter().accept(file)) {
return false;
}
}
if (endpoint.getAntFilter() != null) {
if (!endpoint.getAntFilter().accept(file)) {
return false;
}
}
if (isDirectory && endpoint.getFilterDirectory() != null) {
// create a dummy exchange as Exchange is needed for expression
// evaluation
Exchange dummy = endpoint.createExchange(file);
boolean matches = endpoint.getFilterDirectory().matches(dummy);
if (!matches) {
return false;
}
}
// directories are regarded as matched if filter accepted them
if (isDirectory) {
return true;
}
// exclude take precedence over include
if (excludePattern != null) {
if (excludePattern.matcher(name).matches()) {
return false;
}
}
if (includePattern != null) {
if (!includePattern.matcher(name).matches()) {
return false;
}
}
// use file expression for a simple dynamic file filter
if (endpoint.getFileName() != null) {
fileExpressionResult = evaluateFileExpression();
if (fileExpressionResult != null) {
if (!name.equals(fileExpressionResult)) {
return false;
}
}
}
if (endpoint.getFilterFile() != null) {
// create a dummy exchange as Exchange is needed for expression
// evaluation
Exchange dummy = endpoint.createExchange(file);
boolean matches = endpoint.getFilterFile().matches(dummy);
if (!matches) {
return false;
}
}
// if done file name is enabled, then the file is only valid if a done
// file exists
if (endpoint.getDoneFileName() != null) {
// done file must be in same path as the file
String doneFileName = endpoint.createDoneFileName(file.getAbsoluteFilePath());
StringHelper.notEmpty(doneFileName, "doneFileName", endpoint);
// is it a done file name?
if (endpoint.isDoneFile(file.getFileNameOnly())) {
LOG.trace("Skipping done file: {}", file);
return false;
}
if (!isMatched(file, doneFileName, files)) {
return false;
}
}
return true;
}
|
boolean function(GenericFile<T> file, boolean isDirectory, List<T> files) { String name = file.getFileNameOnly(); if (name.startsWith(".")) { return false; } if (name.endsWith(FileComponent.DEFAULT_LOCK_FILE_POSTFIX)) { return false; } if (endpoint.getFilter() != null) { if (!endpoint.getFilter().accept(file)) { return false; } } if (endpoint.getAntFilter() != null) { if (!endpoint.getAntFilter().accept(file)) { return false; } } if (isDirectory && endpoint.getFilterDirectory() != null) { Exchange dummy = endpoint.createExchange(file); boolean matches = endpoint.getFilterDirectory().matches(dummy); if (!matches) { return false; } } if (isDirectory) { return true; } if (excludePattern != null) { if (excludePattern.matcher(name).matches()) { return false; } } if (includePattern != null) { if (!includePattern.matcher(name).matches()) { return false; } } if (endpoint.getFileName() != null) { fileExpressionResult = evaluateFileExpression(); if (fileExpressionResult != null) { if (!name.equals(fileExpressionResult)) { return false; } } } if (endpoint.getFilterFile() != null) { Exchange dummy = endpoint.createExchange(file); boolean matches = endpoint.getFilterFile().matches(dummy); if (!matches) { return false; } } if (endpoint.getDoneFileName() != null) { String doneFileName = endpoint.createDoneFileName(file.getAbsoluteFilePath()); StringHelper.notEmpty(doneFileName, STR, endpoint); if (endpoint.isDoneFile(file.getFileNameOnly())) { LOG.trace(STR, file); return false; } if (!isMatched(file, doneFileName, files)) { return false; } } return true; }
|
/**
* Strategy to perform file matching based on endpoint configuration.
* <p/>
* Will always return <tt>false</tt> for certain files/folders:
* <ul>
* <li>Starting with a dot</li>
* <li>lock files</li>
* </ul>
* And then <tt>true</tt> for directories.
*
* @param file the file
* @param isDirectory whether the file is a directory or a file
* @param files files in the directory
* @return <tt>true</tt> if the file is matched, <tt>false</tt> if not
*/
|
Strategy to perform file matching based on endpoint configuration. Will always return false for certain files/folders: Starting with a dot lock files And then true for directories
|
isMatched
|
{
"repo_name": "DariusX/camel",
"path": "components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java",
"license": "apache-2.0",
"size": 29770
}
|
[
"java.util.List",
"org.apache.camel.Exchange",
"org.apache.camel.util.StringHelper"
] |
import java.util.List; import org.apache.camel.Exchange; import org.apache.camel.util.StringHelper;
|
import java.util.*; import org.apache.camel.*; import org.apache.camel.util.*;
|
[
"java.util",
"org.apache.camel"
] |
java.util; org.apache.camel;
| 2,336,289
|
public File[] getAllBlockFiles(ExtendedBlock block) {
if (dataNodes.size() == 0) return new File[0];
ArrayList<File> list = new ArrayList<File>();
for (int i=0; i < dataNodes.size(); i++) {
File blockFile = getBlockFile(i, block);
if (blockFile != null) {
list.add(blockFile);
}
}
return list.toArray(new File[list.size()]);
}
|
File[] function(ExtendedBlock block) { if (dataNodes.size() == 0) return new File[0]; ArrayList<File> list = new ArrayList<File>(); for (int i=0; i < dataNodes.size(); i++) { File blockFile = getBlockFile(i, block); if (blockFile != null) { list.add(blockFile); } } return list.toArray(new File[list.size()]); }
|
/**
* Get all files related to a block from all the datanodes
* @param block block for which corresponding files are needed
*/
|
Get all files related to a block from all the datanodes
|
getAllBlockFiles
|
{
"repo_name": "apurtell/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 126970
}
|
[
"java.io.File",
"java.util.ArrayList",
"org.apache.hadoop.hdfs.protocol.ExtendedBlock"
] |
import java.io.File; import java.util.ArrayList; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
import java.io.*; import java.util.*; import org.apache.hadoop.hdfs.protocol.*;
|
[
"java.io",
"java.util",
"org.apache.hadoop"
] |
java.io; java.util; org.apache.hadoop;
| 1,985,745
|
public PathIterator getPathIterator(AffineTransform at) {
return path.getPathIterator(at);
}
|
PathIterator function(AffineTransform at) { return path.getPathIterator(at); }
|
/**
* Delegates to the enclosed <code>GeneralPath</code>.
*/
|
Delegates to the enclosed <code>GeneralPath</code>
|
getPathIterator
|
{
"repo_name": "jensnerche/plantuml",
"path": "src/net/sourceforge/plantuml/ugraphic/arc/ExtendedGeneralPath.java",
"license": "gpl-2.0",
"size": 21378
}
|
[
"java.awt.geom.AffineTransform",
"java.awt.geom.PathIterator"
] |
import java.awt.geom.AffineTransform; import java.awt.geom.PathIterator;
|
import java.awt.geom.*;
|
[
"java.awt"
] |
java.awt;
| 2,189,469
|
@PUT
@Path("{connInstanceKey}")
@Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
void update(@NotNull @PathParam("connInstanceKey") Long connInstanceKey, @NotNull ConnInstanceTO connInstanceTO);
|
@Path(STR) @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON }) void update(@NotNull @PathParam(STR) Long connInstanceKey, @NotNull ConnInstanceTO connInstanceTO);
|
/**
* Updates the connector instance matching the provided id.
*
* @param connInstanceKey connector instance id to be updated
* @param connInstanceTO connector instance to be stored
*/
|
Updates the connector instance matching the provided id
|
update
|
{
"repo_name": "massx1/syncope",
"path": "common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ConnectorService.java",
"license": "apache-2.0",
"size": 8193
}
|
[
"javax.validation.constraints.NotNull",
"javax.ws.rs.Consumes",
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.core.MediaType",
"org.apache.syncope.common.lib.to.ConnInstanceTO"
] |
import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.MediaType; import org.apache.syncope.common.lib.to.ConnInstanceTO;
|
import javax.validation.constraints.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.apache.syncope.common.lib.to.*;
|
[
"javax.validation",
"javax.ws",
"org.apache.syncope"
] |
javax.validation; javax.ws; org.apache.syncope;
| 1,441,190
|
// ============================================================================================
// SETTERS
// ============================================================================================
public void setElements(ArrayList<VibrationElement> _elements) {
length = findLength(_elements);
elements = _elements.toArray(new VibrationElement[_elements.size()]);
}
// ============================================================================================
// CONSTRUCTOR
// ============================================================================================
public UserVibration(int _id, String _name, int _length, VibrationElement[] _elements) {
super(_id, Vibration.TYPE_LONG, _name);
length = _length;
elements = _elements;
}
public UserVibration(int _id) {
super(_id, Vibration.TYPE_LONG, "");
}
|
void function(ArrayList<VibrationElement> _elements) { length = findLength(_elements); elements = _elements.toArray(new VibrationElement[_elements.size()]); } public UserVibration(int _id, String _name, int _length, VibrationElement[] _elements) { super(_id, Vibration.TYPE_LONG, _name); length = _length; elements = _elements; } public UserVibration(int _id) { super(_id, Vibration.TYPE_LONG, ""); }
|
/**
* Set array of vibration elements
*/
|
Set array of vibration elements
|
setElements
|
{
"repo_name": "IvanPosohov/customize-vibrancy",
"path": "src/ru/ivanp/vibro/vibrations/UserVibration.java",
"license": "apache-2.0",
"size": 3043
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 689,568
|
public void setRootViewId(int id) {
mRootView = (ViewGroup) mInflater.inflate(id, null);
mTrack = (ViewGroup) mRootView.findViewById(R.id.tracks);
mArrowDown = (ImageView) mRootView.findViewById(R.id.arrow_down);
mArrowUp = (ImageView) mRootView.findViewById(R.id.arrow_up);
mScroller = (ScrollView) mRootView.findViewById(R.id.scroller);
//This was previously defined on show() method, moved here to prevent force close that occured
//when tapping fastly on a view to show quickaction dialog.
//Thanx to zammbi (github.com/zammbi)
mRootView.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
setContentView(mRootView);
}
|
void function(int id) { mRootView = (ViewGroup) mInflater.inflate(id, null); mTrack = (ViewGroup) mRootView.findViewById(R.id.tracks); mArrowDown = (ImageView) mRootView.findViewById(R.id.arrow_down); mArrowUp = (ImageView) mRootView.findViewById(R.id.arrow_up); mScroller = (ScrollView) mRootView.findViewById(R.id.scroller); mRootView.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); setContentView(mRootView); }
|
/**
* Set root view.
*
* @param id Layout resource id
*/
|
Set root view
|
setRootViewId
|
{
"repo_name": "ememo87/Library-SlidingMenu",
"path": "src/com/slidingmenu/lib/tools/QuickAction.java",
"license": "apache-2.0",
"size": 11198
}
|
[
"android.view.ViewGroup",
"android.widget.ImageView",
"android.widget.ScrollView"
] |
import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ScrollView;
|
import android.view.*; import android.widget.*;
|
[
"android.view",
"android.widget"
] |
android.view; android.widget;
| 2,697,724
|
public static Object addCollection(Object list, Collection<?> collection)
{
Iterator<?> i=collection.iterator();
while(i.hasNext())
list=LazyList.add(list,i.next());
return list;
}
|
static Object function(Object list, Collection<?> collection) { Iterator<?> i=collection.iterator(); while(i.hasNext()) list=LazyList.add(list,i.next()); return list; }
|
/** Add the contents of a Collection to a LazyList
* @param list The list to add to or null if none yet created.
* @param collection The Collection whose contents should be added.
* @return The lazylist created or added to.
*/
|
Add the contents of a Collection to a LazyList
|
addCollection
|
{
"repo_name": "adhish20/XMPP-IoT-Client-with-Cordova",
"path": "iea/plugins/cordova-plugin-websocket/src/android/org/eclipse/jetty/util/LazyList.java",
"license": "mit",
"size": 14948
}
|
[
"java.util.Collection",
"java.util.Iterator"
] |
import java.util.Collection; import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,539,772
|
private void loadFile(String fileName, String prefix) throws IOException
{
Properties props = new Properties();
packagerListener.packagerMsg("Loading " + fileName,
PackagerListener.MSG_VERBOSE);
FileInputStream fis = new FileInputStream("");
try
{
props.load(fis);
}
finally
{
fis.close();
}
addProperties(props, prefix);
}
|
void function(String fileName, String prefix) throws IOException { Properties props = new Properties(); packagerListener.packagerMsg(STR + fileName, PackagerListener.MSG_VERBOSE); FileInputStream fis = new FileInputStream(""); try { props.load(fis); } finally { fis.close(); } addProperties(props, prefix); }
|
/**
* load properties from a file
*
* @param fileName name of the file to load
* @param prefix prefix to to be automatically added to the property name, can be null
*/
|
load properties from a file
|
loadFile
|
{
"repo_name": "maichler/izpack",
"path": "izpack-compiler/src/main/java/com/izforge/izpack/compiler/data/PropertyManager.java",
"license": "apache-2.0",
"size": 10759
}
|
[
"com.izforge.izpack.compiler.listener.PackagerListener",
"java.io.FileInputStream",
"java.io.IOException",
"java.util.Properties"
] |
import com.izforge.izpack.compiler.listener.PackagerListener; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties;
|
import com.izforge.izpack.compiler.listener.*; import java.io.*; import java.util.*;
|
[
"com.izforge.izpack",
"java.io",
"java.util"
] |
com.izforge.izpack; java.io; java.util;
| 1,982,207
|
protected void logResources() throws SystemException {
if (tc.isEntryEnabled())
Tr.entry(tc, "logResources", _resourcesLogged);
if (!_resourcesLogged) {
for (int i = 0; i < _resourceObjects.size(); i++) {
final JTAResource resource = _resourceObjects.get(i);
if (resource.getResourceStatus() == StatefulResource.PREPARED) {
recordLog(resource);
}
}
_resourcesLogged = true;
}
if (tc.isEntryEnabled())
Tr.exit(tc, "logResources");
}
|
void function() throws SystemException { if (tc.isEntryEnabled()) Tr.entry(tc, STR, _resourcesLogged); if (!_resourcesLogged) { for (int i = 0; i < _resourceObjects.size(); i++) { final JTAResource resource = _resourceObjects.get(i); if (resource.getResourceStatus() == StatefulResource.PREPARED) { recordLog(resource); } } _resourcesLogged = true; } if (tc.isEntryEnabled()) Tr.exit(tc, STR); }
|
/**
* Log any prepared resources
*/
|
Log any prepared resources
|
logResources
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.tx.core/src/com/ibm/tx/jta/impl/RegisteredResources.java",
"license": "epl-1.0",
"size": 124704
}
|
[
"com.ibm.websphere.ras.Tr",
"com.ibm.ws.Transaction",
"javax.transaction.SystemException"
] |
import com.ibm.websphere.ras.Tr; import com.ibm.ws.Transaction; import javax.transaction.SystemException;
|
import com.ibm.websphere.ras.*; import com.ibm.ws.*; import javax.transaction.*;
|
[
"com.ibm.websphere",
"com.ibm.ws",
"javax.transaction"
] |
com.ibm.websphere; com.ibm.ws; javax.transaction;
| 930,000
|
public void testConstructor_badconverterObject_DateTimeZone() throws Throwable {
try {
ConverterManager.getInstance().addInstantConverter(MockZeroNullIntegerConverter.INSTANCE);
MutableDateTime test = new MutableDateTime(new Integer(0), GregorianChronology.getInstance());
assertEquals(ISOChronology.getInstance(), test.getChronology());
assertEquals(0L, test.getMillis());
} finally {
ConverterManager.getInstance().removeInstantConverter(MockZeroNullIntegerConverter.INSTANCE);
}
}
|
void function() throws Throwable { try { ConverterManager.getInstance().addInstantConverter(MockZeroNullIntegerConverter.INSTANCE); MutableDateTime test = new MutableDateTime(new Integer(0), GregorianChronology.getInstance()); assertEquals(ISOChronology.getInstance(), test.getChronology()); assertEquals(0L, test.getMillis()); } finally { ConverterManager.getInstance().removeInstantConverter(MockZeroNullIntegerConverter.INSTANCE); } }
|
/**
* Test constructor (Object, DateTimeZone)
*/
|
Test constructor (Object, DateTimeZone)
|
testConstructor_badconverterObject_DateTimeZone
|
{
"repo_name": "likecool21/joda-time-2.3-Testing",
"path": "src/test/java/org/joda/time/TestMutableDateTime_Constructors.java",
"license": "apache-2.0",
"size": 23628
}
|
[
"org.joda.time.chrono.GregorianChronology",
"org.joda.time.chrono.ISOChronology",
"org.joda.time.convert.ConverterManager",
"org.joda.time.convert.MockZeroNullIntegerConverter"
] |
import org.joda.time.chrono.GregorianChronology; import org.joda.time.chrono.ISOChronology; import org.joda.time.convert.ConverterManager; import org.joda.time.convert.MockZeroNullIntegerConverter;
|
import org.joda.time.chrono.*; import org.joda.time.convert.*;
|
[
"org.joda.time"
] |
org.joda.time;
| 2,344,460
|
private static boolean isInterested(String group, String name, String type, Map<String, String> tags) {
if (group.equals(KAFKA_SERVER)) {
return (INTERESTED_TOPIC_METRIC_NAMES.contains(name) && BROKER_TOPIC_METRICS_GROUP.equals(type)) || (
INTERESTED_SERVER_METRIC_NAMES.contains(name) && REQUEST_KAFKA_HANDLER_POOL_GROUP.equals(type));
} else if (group.equals(KAFKA_NETWORK) && INTERESTED_NETWORK_METRIC_NAMES.contains(name)) {
return REQUEST_CHANNEL_GROUP.equals(type)
|| (REQUEST_METRICS_GROUP.equals(type) && INTERESTED_REQUEST_TYPE.contains(tags.get(REQUEST_TYPE_KEY)));
} else if (group.equals(KAFKA_LOG) && INTERESTED_LOG_METRIC_NAMES.contains(name)) {
return LOG_GROUP.equals(type) || LOG_FLUSH_STATS_GROUP.equals(type);
}
return false;
}
|
static boolean function(String group, String name, String type, Map<String, String> tags) { if (group.equals(KAFKA_SERVER)) { return (INTERESTED_TOPIC_METRIC_NAMES.contains(name) && BROKER_TOPIC_METRICS_GROUP.equals(type)) ( INTERESTED_SERVER_METRIC_NAMES.contains(name) && REQUEST_KAFKA_HANDLER_POOL_GROUP.equals(type)); } else if (group.equals(KAFKA_NETWORK) && INTERESTED_NETWORK_METRIC_NAMES.contains(name)) { return REQUEST_CHANNEL_GROUP.equals(type) (REQUEST_METRICS_GROUP.equals(type) && INTERESTED_REQUEST_TYPE.contains(tags.get(REQUEST_TYPE_KEY))); } else if (group.equals(KAFKA_LOG) && INTERESTED_LOG_METRIC_NAMES.contains(name)) { return LOG_GROUP.equals(type) LOG_FLUSH_STATS_GROUP.equals(type); } return false; }
|
/**
* Check if a metric is an interested metric.
*/
|
Check if a metric is an interested metric
|
isInterested
|
{
"repo_name": "becketqin/cruise-control",
"path": "cruise-control-metrics-reporter/src/main/java/com/linkedin/kafka/cruisecontrol/metricsreporter/metric/MetricsUtils.java",
"license": "bsd-2-clause",
"size": 18589
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 934,226
|
@JsonProperty("features")
public List<F> getFeatures() {
if (features == null) {
features = new ArrayList<>();
}
return features;
}
|
@JsonProperty(STR) List<F> function() { if (features == null) { features = new ArrayList<>(); } return features; }
|
/**
* Return the features of this collection.
*
* @return the features of this collection
*/
|
Return the features of this collection
|
getFeatures
|
{
"repo_name": "bremersee/geojson",
"path": "geojson/src/main/java/org/bremersee/geojson/AbstractGeoJsonFeatureCollection.java",
"license": "apache-2.0",
"size": 3608
}
|
[
"com.fasterxml.jackson.annotation.JsonProperty",
"java.util.ArrayList",
"java.util.List"
] |
import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.List;
|
import com.fasterxml.jackson.annotation.*; import java.util.*;
|
[
"com.fasterxml.jackson",
"java.util"
] |
com.fasterxml.jackson; java.util;
| 1,191,635
|
private void checkIfNotMixingRhels(VDS vds, VDSGroup vdsGroup) {
if (vds.getHostOs() == null) {
return;
}
String[] hostOsInfo = vds.getHostOs().split("-");
if (hostOsInfo.length != 3) {
return;
}
String newOsName = hostOsInfo[0].trim();
String newRelease = hostOsInfo[2].trim();
// both the CentOS and RHEL has osName RHEL
if (newOsName.equals("RHEL") || newOsName.equals("oVirt Node") || newOsName.equals("RHEV Hypervisor")) {
VDS beforeRhel = vdsDao.getFirstUpRhelForVdsGroup(vdsGroup.getId());
boolean firstHostInCluster = beforeRhel == null;
if (firstHostInCluster) {
// no need to do any checks
return;
}
// if not first host in cluster, need to check if the version is the same
if (beforeRhel.getHostOs() == null) {
return;
}
String[] prevOsInfo = beforeRhel.getHostOs().split("-");
if (prevOsInfo.length != 3) {
return;
}
String prevRelease = prevOsInfo[2].trim();
boolean addingRhel6toRhel7 = newRelease.contains("el6") && prevRelease.contains("el7");
boolean addingRhel7toRhel6 = newRelease.contains("el7") && prevRelease.contains("el6");
if (addingRhel7toRhel6 || addingRhel6toRhel7) {
Map<String, String> customLogValues = new HashMap<>();
customLogValues.put("previousRhel", beforeRhel.getHostOs());
customLogValues.put("addingRhel", vds.getHostOs());
vdsNonOperational(vds, NonOperationalReason.MIXING_RHEL_VERSIONS_IN_CLUSTER, customLogValues);
vds.setStatus(VDSStatus.NonOperational);
}
}
}
|
void function(VDS vds, VDSGroup vdsGroup) { if (vds.getHostOs() == null) { return; } String[] hostOsInfo = vds.getHostOs().split("-"); if (hostOsInfo.length != 3) { return; } String newOsName = hostOsInfo[0].trim(); String newRelease = hostOsInfo[2].trim(); if (newOsName.equals("RHEL") newOsName.equals(STR) newOsName.equals(STR)) { VDS beforeRhel = vdsDao.getFirstUpRhelForVdsGroup(vdsGroup.getId()); boolean firstHostInCluster = beforeRhel == null; if (firstHostInCluster) { return; } if (beforeRhel.getHostOs() == null) { return; } String[] prevOsInfo = beforeRhel.getHostOs().split("-"); if (prevOsInfo.length != 3) { return; } String prevRelease = prevOsInfo[2].trim(); boolean addingRhel6toRhel7 = newRelease.contains("el6") && prevRelease.contains("el7"); boolean addingRhel7toRhel6 = newRelease.contains("el7") && prevRelease.contains("el6"); if (addingRhel7toRhel6 addingRhel6toRhel7) { Map<String, String> customLogValues = new HashMap<>(); customLogValues.put(STR, beforeRhel.getHostOs()); customLogValues.put(STR, vds.getHostOs()); vdsNonOperational(vds, NonOperationalReason.MIXING_RHEL_VERSIONS_IN_CLUSTER, customLogValues); vds.setStatus(VDSStatus.NonOperational); } } }
|
/**
* Sets the new host to non-operational if adding a RHEL6 machine to a cluster with RHEL7s or RHEL7 to cluster with RHEL6s
*
* It tries to be as non-invasive as possible and only if the above is the case, turns the host into non-operational.
*/
|
Sets the new host to non-operational if adding a RHEL6 machine to a cluster with RHEL7s or RHEL7 to cluster with RHEL6s It tries to be as non-invasive as possible and only if the above is the case, turns the host into non-operational
|
checkIfNotMixingRhels
|
{
"repo_name": "jtux270/translate",
"path": "ovirt/backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/VirtMonitoringStrategy.java",
"license": "gpl-3.0",
"size": 8301
}
|
[
"java.util.HashMap",
"java.util.Map",
"org.ovirt.engine.core.common.businessentities.NonOperationalReason",
"org.ovirt.engine.core.common.businessentities.VDSGroup",
"org.ovirt.engine.core.common.businessentities.VDSStatus"
] |
import java.util.HashMap; import java.util.Map; import org.ovirt.engine.core.common.businessentities.NonOperationalReason; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.VDSStatus;
|
import java.util.*; import org.ovirt.engine.core.common.businessentities.*;
|
[
"java.util",
"org.ovirt.engine"
] |
java.util; org.ovirt.engine;
| 1,676,157
|
private static void checkForPresenceOrAbsenceOfEventInList(
String eventLabel, Combo combo, boolean checkForPresence,
IWizard wizard)
{
// check that the event of the given label is (or isn't,
// according to the given switch) one of the available choices
int index = combo.indexOf(eventLabel);
TestCase.assertTrue("Event " + eventLabel + " is "
+ (checkForPresence ? "not " : "") + "available for selection.",
checkForPresence ? index != -1 : index == -1);
// if absence is being checked for
if (!checkForPresence) {
// cancel the dialog, since we know there will be nothing to select
wizard.performCancel();
}
// otherwise
else {
// select the event of the given label in the combo, in case the
// caller is going to finish the wizard
combo.select(index);
}
}
|
static void function( String eventLabel, Combo combo, boolean checkForPresence, IWizard wizard) { int index = combo.indexOf(eventLabel); TestCase.assertTrue(STR + eventLabel + STR + (checkForPresence ? STR : STRavailable for selection.", checkForPresence ? index != -1 : index == -1); if (!checkForPresence) { wizard.performCancel(); } else { combo.select(index); } }
|
/**
* Asserts that the event of the given label is present (or absent,
* depending on the given parameter) in the given combo being shown in the
* given wizard. If absence is checked for, the dialog is cancelled.
*/
|
Asserts that the event of the given label is present (or absent, depending on the given parameter) in the given combo being shown in the given wizard. If absence is checked for, the dialog is cancelled
|
checkForPresenceOrAbsenceOfEventInList
|
{
"repo_name": "HebaKhaled/bposs",
"path": "src/com.mentor.nucleus.bp.core.test/src/com/mentor/nucleus/bp/core/test/util/EventSelectionUtil.java",
"license": "apache-2.0",
"size": 9670
}
|
[
"junit.framework.TestCase",
"org.eclipse.jface.wizard.IWizard",
"org.eclipse.swt.widgets.Combo"
] |
import junit.framework.TestCase; import org.eclipse.jface.wizard.IWizard; import org.eclipse.swt.widgets.Combo;
|
import junit.framework.*; import org.eclipse.jface.wizard.*; import org.eclipse.swt.widgets.*;
|
[
"junit.framework",
"org.eclipse.jface",
"org.eclipse.swt"
] |
junit.framework; org.eclipse.jface; org.eclipse.swt;
| 659,980
|
public static boolean isAccessible(final Member m) {
return m != null && Modifier.isPublic(m.getModifiers()) && !m.isSynthetic();
}
|
static boolean function(final Member m) { return m != null && Modifier.isPublic(m.getModifiers()) && !m.isSynthetic(); }
|
/**
* This method check if the {@code Member} is accessible.
* @param m - the member.
* @return - Returns (true) if the member is accessible, otherwise return (false).
*/
|
This method check if the Member is accessible
|
isAccessible
|
{
"repo_name": "bearsoft5/Flamingo",
"path": "src/flamingo/reflect/MemberUtils.java",
"license": "apache-2.0",
"size": 3129
}
|
[
"java.lang.reflect.Member",
"java.lang.reflect.Modifier"
] |
import java.lang.reflect.Member; import java.lang.reflect.Modifier;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 1,482,258
|
public Message prepareMessage() throws MessagingException, IOException {
Properties props = new Properties();
String protocol = getProtocol();
// set properties using JAF
props.setProperty("mail." + protocol + ".host", smtpServer);
props.setProperty("mail." + protocol + ".port", getPort());
props.setProperty("mail." + protocol + ".auth", Boolean.toString(useAuthentication));
// set timeout
props.setProperty("mail." + protocol + ".timeout", getTimeout());
props.setProperty("mail." + protocol + ".connectiontimeout", getConnectionTimeout());
if (useStartTLS || useSSL) {
try {
String allProtocols = StringUtils.join(
SSLContext.getDefault().getSupportedSSLParameters().getProtocols(), " ");
logger.info("Use ssl/tls protocols for mail: " + allProtocols);
props.setProperty("mail." + protocol + ".ssl.protocols", allProtocols);
} catch (Exception e) {
logger.error("Problem setting ssl/tls protocols for mail", e);
}
}
if (enableDebug) {
props.setProperty("mail.debug","true");
}
if (useStartTLS) {
props.setProperty("mail.smtp.starttls.enable", "true");
if (enforceStartTLS){
// Requires JavaMail 1.4.2+
props.setProperty("mail.smtp.starttls.require", "true");
}
}
if (trustAllCerts) {
if (useSSL) {
props.setProperty("mail.smtps.ssl.socketFactory.class", TRUST_ALL_SOCKET_FACTORY);
props.setProperty("mail.smtps.ssl.socketFactory.fallback", "false");
} else if (useStartTLS) {
props.setProperty("mail.smtp.ssl.socketFactory.class", TRUST_ALL_SOCKET_FACTORY);
props.setProperty("mail.smtp.ssl.socketFactory.fallback", "false");
}
} else if (useLocalTrustStore){
File truststore = new File(trustStoreToUse);
logger.info("load local truststore - try to load truststore from: "+truststore.getAbsolutePath());
if(!truststore.exists()){
logger.info("load local truststore -Failed to load truststore from: "+truststore.getAbsolutePath());
truststore = new File(FileServer.getFileServer().getBaseDir(), trustStoreToUse);
logger.info("load local truststore -Attempting to read truststore from: "+truststore.getAbsolutePath());
if(!truststore.exists()){
logger.info("load local truststore -Failed to load truststore from: "+truststore.getAbsolutePath() + ". Local truststore not available, aborting execution.");
throw new IOException("Local truststore file not found. Also not available under : " + truststore.getAbsolutePath());
}
}
if (useSSL) {
// Requires JavaMail 1.4.2+
props.put("mail.smtps.ssl.socketFactory", new LocalTrustStoreSSLSocketFactory(truststore));
props.put("mail.smtps.ssl.socketFactory.fallback", "false");
} else if (useStartTLS) {
// Requires JavaMail 1.4.2+
props.put("mail.smtp.ssl.socketFactory", new LocalTrustStoreSSLSocketFactory(truststore));
props.put("mail.smtp.ssl.socketFactory.fallback", "false");
}
}
session = Session.getInstance(props, null);
Message message;
if (sendEmlMessage) {
message = new MimeMessage(session, new BufferedInputStream(new FileInputStream(emlMessage)));
} else {
message = new MimeMessage(session);
// handle body and attachments
Multipart multipart = new MimeMultipart();
final int attachmentCount = attachments.size();
if (plainBody &&
(attachmentCount == 0 || (mailBody.length() == 0 && attachmentCount == 1))) {
if (attachmentCount == 1) { // i.e. mailBody is empty
File first = attachments.get(0);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(first));
message.setText(IOUtils.toString(is));
} finally {
IOUtils.closeQuietly(is);
}
} else {
message.setText(mailBody);
}
} else {
BodyPart body = new MimeBodyPart();
body.setText(mailBody);
multipart.addBodyPart(body);
for (File f : attachments) {
BodyPart attach = new MimeBodyPart();
attach.setFileName(f.getName());
attach.setDataHandler(new DataHandler(new FileDataSource(f.getAbsolutePath())));
multipart.addBodyPart(attach);
}
message.setContent(multipart);
}
}
// set from field and subject
if (null != sender) {
message.setFrom(new InternetAddress(sender));
}
if (null != replyTo) {
InternetAddress[] to = new InternetAddress[replyTo.size()];
message.setReplyTo(replyTo.toArray(to));
}
if(null != subject) {
message.setSubject(subject);
}
if (receiverTo != null) {
InternetAddress[] to = new InternetAddress[receiverTo.size()];
receiverTo.toArray(to);
message.setRecipients(Message.RecipientType.TO, to);
}
if (receiverCC != null) {
InternetAddress[] cc = new InternetAddress[receiverCC.size()];
receiverCC.toArray(cc);
message.setRecipients(Message.RecipientType.CC, cc);
}
if (receiverBCC != null) {
InternetAddress[] bcc = new InternetAddress[receiverBCC.size()];
receiverBCC.toArray(bcc);
message.setRecipients(Message.RecipientType.BCC, bcc);
}
for (int i = 0; i < headerFields.size(); i++) {
Argument argument = (Argument)((TestElementProperty)headerFields.get(i)).getObjectValue();
message.setHeader(argument.getName(), argument.getValue());
}
message.saveChanges();
return message;
}
|
Message function() throws MessagingException, IOException { Properties props = new Properties(); String protocol = getProtocol(); props.setProperty("mail." + protocol + ".host", smtpServer); props.setProperty("mail." + protocol + ".port", getPort()); props.setProperty("mail." + protocol + ".auth", Boolean.toString(useAuthentication)); props.setProperty("mail." + protocol + STR, getTimeout()); props.setProperty("mail." + protocol + STR, getConnectionTimeout()); if (useStartTLS useSSL) { try { String allProtocols = StringUtils.join( SSLContext.getDefault().getSupportedSSLParameters().getProtocols(), " "); logger.info(STR + allProtocols); props.setProperty("mail." + protocol + STR, allProtocols); } catch (Exception e) { logger.error(STR, e); } } if (enableDebug) { props.setProperty(STR,"true"); } if (useStartTLS) { props.setProperty(STR, "true"); if (enforceStartTLS){ props.setProperty(STR, "true"); } } if (trustAllCerts) { if (useSSL) { props.setProperty(STR, TRUST_ALL_SOCKET_FACTORY); props.setProperty(STR, "false"); } else if (useStartTLS) { props.setProperty(STR, TRUST_ALL_SOCKET_FACTORY); props.setProperty(STR, "false"); } } else if (useLocalTrustStore){ File truststore = new File(trustStoreToUse); logger.info(STR+truststore.getAbsolutePath()); if(!truststore.exists()){ logger.info(STR+truststore.getAbsolutePath()); truststore = new File(FileServer.getFileServer().getBaseDir(), trustStoreToUse); logger.info(STR+truststore.getAbsolutePath()); if(!truststore.exists()){ logger.info(STR+truststore.getAbsolutePath() + STR); throw new IOException(STR + truststore.getAbsolutePath()); } } if (useSSL) { props.put(STR, new LocalTrustStoreSSLSocketFactory(truststore)); props.put(STR, "false"); } else if (useStartTLS) { props.put(STR, new LocalTrustStoreSSLSocketFactory(truststore)); props.put(STR, "false"); } } session = Session.getInstance(props, null); Message message; if (sendEmlMessage) { message = new MimeMessage(session, new BufferedInputStream(new FileInputStream(emlMessage))); } else { message = new MimeMessage(session); Multipart multipart = new MimeMultipart(); final int attachmentCount = attachments.size(); if (plainBody && (attachmentCount == 0 (mailBody.length() == 0 && attachmentCount == 1))) { if (attachmentCount == 1) { File first = attachments.get(0); InputStream is = null; try { is = new BufferedInputStream(new FileInputStream(first)); message.setText(IOUtils.toString(is)); } finally { IOUtils.closeQuietly(is); } } else { message.setText(mailBody); } } else { BodyPart body = new MimeBodyPart(); body.setText(mailBody); multipart.addBodyPart(body); for (File f : attachments) { BodyPart attach = new MimeBodyPart(); attach.setFileName(f.getName()); attach.setDataHandler(new DataHandler(new FileDataSource(f.getAbsolutePath()))); multipart.addBodyPart(attach); } message.setContent(multipart); } } if (null != sender) { message.setFrom(new InternetAddress(sender)); } if (null != replyTo) { InternetAddress[] to = new InternetAddress[replyTo.size()]; message.setReplyTo(replyTo.toArray(to)); } if(null != subject) { message.setSubject(subject); } if (receiverTo != null) { InternetAddress[] to = new InternetAddress[receiverTo.size()]; receiverTo.toArray(to); message.setRecipients(Message.RecipientType.TO, to); } if (receiverCC != null) { InternetAddress[] cc = new InternetAddress[receiverCC.size()]; receiverCC.toArray(cc); message.setRecipients(Message.RecipientType.CC, cc); } if (receiverBCC != null) { InternetAddress[] bcc = new InternetAddress[receiverBCC.size()]; receiverBCC.toArray(bcc); message.setRecipients(Message.RecipientType.BCC, bcc); } for (int i = 0; i < headerFields.size(); i++) { Argument argument = (Argument)((TestElementProperty)headerFields.get(i)).getObjectValue(); message.setHeader(argument.getName(), argument.getValue()); } message.saveChanges(); return message; }
|
/**
* Prepares message prior to be sent via execute()-method, i.e. sets
* properties such as protocol, authentication, etc.
*
* @return Message-object to be sent to execute()-method
* @throws MessagingException
* when problems constructing or sending the mail occur
* @throws IOException
* when the mail content can not be read or truststore problems
* are detected
*/
|
Prepares message prior to be sent via execute()-method, i.e. sets properties such as protocol, authentication, etc
|
prepareMessage
|
{
"repo_name": "hizhangqi/jmeter-1",
"path": "src/protocol/mail/org/apache/jmeter/protocol/smtp/sampler/protocol/SendMailCommand.java",
"license": "apache-2.0",
"size": 26974
}
|
[
"java.io.BufferedInputStream",
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStream",
"java.util.Properties",
"javax.activation.DataHandler",
"javax.activation.FileDataSource",
"javax.mail.BodyPart",
"javax.mail.Message",
"javax.mail.MessagingException",
"javax.mail.Multipart",
"javax.mail.Session",
"javax.mail.internet.InternetAddress",
"javax.mail.internet.MimeBodyPart",
"javax.mail.internet.MimeMessage",
"javax.mail.internet.MimeMultipart",
"javax.net.ssl.SSLContext",
"org.apache.commons.io.IOUtils",
"org.apache.commons.lang3.StringUtils",
"org.apache.jmeter.config.Argument",
"org.apache.jmeter.services.FileServer",
"org.apache.jmeter.testelement.property.TestElementProperty"
] |
import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import javax.activation.DataHandler; import javax.activation.FileDataSource; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.Session; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import javax.net.ssl.SSLContext; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.config.Argument; import org.apache.jmeter.services.FileServer; import org.apache.jmeter.testelement.property.TestElementProperty;
|
import java.io.*; import java.util.*; import javax.activation.*; import javax.mail.*; import javax.mail.internet.*; import javax.net.ssl.*; import org.apache.commons.io.*; import org.apache.commons.lang3.*; import org.apache.jmeter.config.*; import org.apache.jmeter.services.*; import org.apache.jmeter.testelement.property.*;
|
[
"java.io",
"java.util",
"javax.activation",
"javax.mail",
"javax.net",
"org.apache.commons",
"org.apache.jmeter"
] |
java.io; java.util; javax.activation; javax.mail; javax.net; org.apache.commons; org.apache.jmeter;
| 1,178,120
|
public List<AbstractActivityInstance> getActivitiesWithState(State state) {
List<AbstractActivityInstance> activityInstances = new ArrayList<>();
for (FragmentInstance fragmentInstance : caze.getFragmentInstances().values()) {
for (ControlNodeInstance nodeInstance : fragmentInstance.getControlNodeInstanceIdToInstance().values()) {
if (nodeInstance instanceof AbstractActivityInstance && nodeInstance.getState() == state) {
activityInstances.add((AbstractActivityInstance) nodeInstance);
}
}
}
return activityInstances;
}
|
List<AbstractActivityInstance> function(State state) { List<AbstractActivityInstance> activityInstances = new ArrayList<>(); for (FragmentInstance fragmentInstance : caze.getFragmentInstances().values()) { for (ControlNodeInstance nodeInstance : fragmentInstance.getControlNodeInstanceIdToInstance().values()) { if (nodeInstance instanceof AbstractActivityInstance && nodeInstance.getState() == state) { activityInstances.add((AbstractActivityInstance) nodeInstance); } } } return activityInstances; }
|
/**
* Get all ActivityInstances in all Fragment Instances that are in a
* specific State.
*
* @param state
* @return Collection of ActivityInstances
*/
|
Get all ActivityInstances in all Fragment Instances that are in a specific State
|
getActivitiesWithState
|
{
"repo_name": "bptlab/JEngine",
"path": "src/main/java/de/hpi/bpt/chimera/execution/CaseExecutioner.java",
"license": "mit",
"size": 22339
}
|
[
"de.hpi.bpt.chimera.execution.controlnodes.ControlNodeInstance",
"de.hpi.bpt.chimera.execution.controlnodes.State",
"de.hpi.bpt.chimera.execution.controlnodes.activity.AbstractActivityInstance",
"java.util.ArrayList",
"java.util.List"
] |
import de.hpi.bpt.chimera.execution.controlnodes.ControlNodeInstance; import de.hpi.bpt.chimera.execution.controlnodes.State; import de.hpi.bpt.chimera.execution.controlnodes.activity.AbstractActivityInstance; import java.util.ArrayList; import java.util.List;
|
import de.hpi.bpt.chimera.execution.controlnodes.*; import de.hpi.bpt.chimera.execution.controlnodes.activity.*; import java.util.*;
|
[
"de.hpi.bpt",
"java.util"
] |
de.hpi.bpt; java.util;
| 2,131,381
|
public Collection<URI> getNameEditsDirs(int nnIndex) throws IOException {
return FSNamesystem.getNamespaceEditsDirs(nameNodes[nnIndex].conf);
}
|
Collection<URI> function(int nnIndex) throws IOException { return FSNamesystem.getNamespaceEditsDirs(nameNodes[nnIndex].conf); }
|
/**
* Get the directories where the namenode stores its edits.
*/
|
Get the directories where the namenode stores its edits
|
getNameEditsDirs
|
{
"repo_name": "ZhangXFeng/hadoop",
"path": "src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 100357
}
|
[
"java.io.IOException",
"java.util.Collection",
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem"
] |
import java.io.IOException; import java.util.Collection; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
import java.io.*; import java.util.*; import org.apache.hadoop.hdfs.server.namenode.*;
|
[
"java.io",
"java.util",
"org.apache.hadoop"
] |
java.io; java.util; org.apache.hadoop;
| 1,029,218
|
public Collection<OriginEntryGroup> getRecentGroups(Date day);
|
Collection<OriginEntryGroup> function(Date day);
|
/**
* Fetches groups created on or after the given date
*
* @param day the date origin entry groups to return must have been created on or after
* @return a Collection of origin entry groups created on or after that day
*/
|
Fetches groups created on or after the given date
|
getRecentGroups
|
{
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-core/src/main/java/org/kuali/kfs/gl/dataaccess/OriginEntryGroupDao.java",
"license": "agpl-3.0",
"size": 3854
}
|
[
"java.sql.Date",
"java.util.Collection",
"org.kuali.kfs.gl.businessobject.OriginEntryGroup"
] |
import java.sql.Date; import java.util.Collection; import org.kuali.kfs.gl.businessobject.OriginEntryGroup;
|
import java.sql.*; import java.util.*; import org.kuali.kfs.gl.businessobject.*;
|
[
"java.sql",
"java.util",
"org.kuali.kfs"
] |
java.sql; java.util; org.kuali.kfs;
| 2,125,315
|
public static java.util.List extractNeuExamMotorList(ims.domain.ILightweightDomainFactory domainFactory, ims.generalmedical.vo.NeuroMotorFindingsShortVoCollection voCollection)
{
return extractNeuExamMotorList(domainFactory, voCollection, null, new HashMap());
}
|
static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.generalmedical.vo.NeuroMotorFindingsShortVoCollection voCollection) { return extractNeuExamMotorList(domainFactory, voCollection, null, new HashMap()); }
|
/**
* Create the ims.medical.domain.objects.NeuExamMotor list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
|
Create the ims.medical.domain.objects.NeuExamMotor list from the value object collection
|
extractNeuExamMotorList
|
{
"repo_name": "FreudianNM/openMAXIMS",
"path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/generalmedical/vo/domain/NeuroMotorFindingsShortVoAssembler.java",
"license": "agpl-3.0",
"size": 19222
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,207,947
|
private boolean validateChainContainer(ComboBox box) {
String format = controller.dataBean.getAttributeValue(OUTPUTFORMAT);
if (format == null) {
box.setStyle(FX_BORDER_COLOR_RED);
statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_NO_FORMAT));
}
MIMETypes mtypes = Config.getInstance().getMimeTypes();
MIMEType mtype = mtypes.findByName(format);
ProcessingStepConfiguration cfg =
(ProcessingStepConfiguration) box.getValue();
ObservableList<ProcessingStepConfiguration> items =
(ObservableList<ProcessingStepConfiguration>) box.getItems();
if (format != null && mtype == null) {
box.setStyle(FX_BORDER_COLOR_RED);
for (ProcessingStepConfiguration cfgI : items) {
cfgI.setCompatible(false);
//Workaround to force cell update
items.set(items.indexOf(cfgI), cfgI);
}
statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_FORMAT_NOT_FOUND));
return false;
}
//Mark items that are incompatible
for (ProcessingStepConfiguration cfgI : items) {
if (format != null) {
cfgI.setCompatible(
cfgI.isCompatibleWithFormat(mtype.getType()));
} else {
cfgI.setCompatible(false);
}
items.set(items.indexOf(cfgI), cfgI);
}
if (format == null) {
return false;
}
if (cfg == null) {
box.setStyle(FX_BORDER_COLOR_NULL);
return true;
}
if (cfg.isCompatible()) {
box.setStyle(FX_BORDER_COLOR_NULL);
} else {
box.setStyle(FX_BORDER_COLOR_RED);
statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_NOT_COMPATIBLE,
box.getValue()));
}
return cfg.isCompatible();
}
|
boolean function(ComboBox box) { String format = controller.dataBean.getAttributeValue(OUTPUTFORMAT); if (format == null) { box.setStyle(FX_BORDER_COLOR_RED); statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_NO_FORMAT)); } MIMETypes mtypes = Config.getInstance().getMimeTypes(); MIMEType mtype = mtypes.findByName(format); ProcessingStepConfiguration cfg = (ProcessingStepConfiguration) box.getValue(); ObservableList<ProcessingStepConfiguration> items = (ObservableList<ProcessingStepConfiguration>) box.getItems(); if (format != null && mtype == null) { box.setStyle(FX_BORDER_COLOR_RED); for (ProcessingStepConfiguration cfgI : items) { cfgI.setCompatible(false); items.set(items.indexOf(cfgI), cfgI); } statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_FORMAT_NOT_FOUND)); return false; } for (ProcessingStepConfiguration cfgI : items) { if (format != null) { cfgI.setCompatible( cfgI.isCompatibleWithFormat(mtype.getType())); } else { cfgI.setCompatible(false); } items.set(items.indexOf(cfgI), cfgI); } if (format == null) { return false; } if (cfg == null) { box.setStyle(FX_BORDER_COLOR_NULL); return true; } if (cfg.isCompatible()) { box.setStyle(FX_BORDER_COLOR_NULL); } else { box.setStyle(FX_BORDER_COLOR_RED); statusLogController.setStatusTextUI(I18n.format(GUI_PROCESS_NOT_COMPATIBLE, box.getValue())); } return cfg.isCompatible(); }
|
/**
* Validates the chain items of a ComboBox
* and marks the box according to the chosen item.
*
* @param box
* Item to validate
* @return True if chosen item is valid, else false
*/
|
Validates the chain items of a ComboBox and marks the box according to the chosen item
|
validateChainContainer
|
{
"repo_name": "gdi-by/downloadclient",
"path": "src/main/java/de/bayern/gdi/gui/controller/ProcessingChainController.java",
"license": "apache-2.0",
"size": 8310
}
|
[
"de.bayern.gdi.config.Config",
"de.bayern.gdi.model.MIMEType",
"de.bayern.gdi.model.MIMETypes",
"de.bayern.gdi.model.ProcessingStepConfiguration",
"de.bayern.gdi.utils.I18n"
] |
import de.bayern.gdi.config.Config; import de.bayern.gdi.model.MIMEType; import de.bayern.gdi.model.MIMETypes; import de.bayern.gdi.model.ProcessingStepConfiguration; import de.bayern.gdi.utils.I18n;
|
import de.bayern.gdi.config.*; import de.bayern.gdi.model.*; import de.bayern.gdi.utils.*;
|
[
"de.bayern.gdi"
] |
de.bayern.gdi;
| 2,130,166
|
// ==========================================================================
// ==========================================================================
public void delete_class_property(String name, DbDatum[] properties) throws DevFailed {
databaseDAO.delete_class_property(this, name, properties);
}
|
void function(String name, DbDatum[] properties) throws DevFailed { databaseDAO.delete_class_property(this, name, properties); }
|
/**
* Delete a list of properties for the specified object.
*
* @param name Class name.
* @param properties Property DbDatum objects.
* @throws DevFailed in case of database access failed
*/
|
Delete a list of properties for the specified object
|
delete_class_property
|
{
"repo_name": "tango-controls/JTango",
"path": "common/src/main/java/fr/esrf/TangoApi/Database.java",
"license": "lgpl-3.0",
"size": 86099
}
|
[
"fr.esrf.Tango"
] |
import fr.esrf.Tango;
|
import fr.esrf.*;
|
[
"fr.esrf"
] |
fr.esrf;
| 163,871
|
@Override
public double getY( int index ) {
return m_List.get( index ).getY();
}
|
double function( int index ) { return m_List.get( index ).getY(); }
|
/**
* Return the Y value of the point at the index in the first inner polygon
*/
|
Return the Y value of the point at the index in the first inner polygon
|
getY
|
{
"repo_name": "rhilker/ReadXplorer",
"path": "readxplorer-tools-gasv/src/main/java/gasv/geom/PolySimple.java",
"license": "gpl-3.0",
"size": 11909
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,020,871
|
public static void ensureInitialized(
Context context, boolean shouldDeleteOldWorkaroundLibraries)
throws ProcessInitException {
synchronized (sLock) {
if (sInitialized) {
// Already initialized, nothing to do.
return;
}
loadAlreadyLocked(context, shouldDeleteOldWorkaroundLibraries);
initializeAlreadyLocked(CommandLine.getJavaSwitchesOrNull());
}
}
|
static void function( Context context, boolean shouldDeleteOldWorkaroundLibraries) throws ProcessInitException { synchronized (sLock) { if (sInitialized) { return; } loadAlreadyLocked(context, shouldDeleteOldWorkaroundLibraries); initializeAlreadyLocked(CommandLine.getJavaSwitchesOrNull()); } }
|
/**
* This method blocks until the library is fully loaded and initialized.
*
* @param context The context in which the method is called, the caller
* may pass in a null context if it doesn't know in which context it
* is running, or it doesn't need to work around the issue
* http://b/13216167.
*
* When the context is not null and native library was not extracted
* by Android package manager, the LibraryLoader class
* will extract the native libraries from APK. This is a hack used to
* work around some Sony devices with the following platform bug:
* http://b/13216167.
*
* @param shouldDeleteOldWorkaroundLibraries The flag tells whether the method
* should delete the old workaround libraries or not.
*/
|
This method blocks until the library is fully loaded and initialized
|
ensureInitialized
|
{
"repo_name": "TeamEOS/external_chromium_org",
"path": "base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java",
"license": "bsd-3-clause",
"size": 10583
}
|
[
"android.content.Context",
"org.chromium.base.CommandLine"
] |
import android.content.Context; import org.chromium.base.CommandLine;
|
import android.content.*; import org.chromium.base.*;
|
[
"android.content",
"org.chromium.base"
] |
android.content; org.chromium.base;
| 36,794
|
public static <T> Promise<T> run(Deferred.Builder deferredBuilder, Task.Do<T> callback) {
return Tasks.of(deferredBuilder, callback).start();
}
|
static <T> Promise<T> function(Deferred.Builder deferredBuilder, Task.Do<T> callback) { return Tasks.of(deferredBuilder, callback).start(); }
|
/**
* Runs the callback in a separate thread, delivering the messages to the returned {@link
* me.tatarka.ipromise.Promise}. If the promise is canceled, the thread is interrupted. This is
* equivalent to: {@code Task.of(deferredBuilder, callback).start()}.
*
* @param deferredBuilder the deferred builder
* @param callback teh callback
* @param <T> the message type
* @return the promise that will receive the messages
*/
|
Runs the callback in a separate thread, delivering the messages to the returned <code>me.tatarka.ipromise.Promise</code>. If the promise is canceled, the thread is interrupted. This is equivalent to: Task.of(deferredBuilder, callback).start()
|
run
|
{
"repo_name": "evant/ipromise",
"path": "ipromise/src/main/java/me/tatarka/ipromise/task/Tasks.java",
"license": "apache-2.0",
"size": 5150
}
|
[
"me.tatarka.ipromise.Deferred",
"me.tatarka.ipromise.Promise"
] |
import me.tatarka.ipromise.Deferred; import me.tatarka.ipromise.Promise;
|
import me.tatarka.ipromise.*;
|
[
"me.tatarka.ipromise"
] |
me.tatarka.ipromise;
| 1,146,608
|
@Override
public void putAll(Iterator<? extends RevObject> objects, final BulkOpListener listener) {
checkState(isOpen(), "db is closed");
ByteArrayOutputStream rawOut = new ByteArrayOutputStream();
while (objects.hasNext()) {
RevObject object = objects.next();
rawOut.reset();
writeObject(object, rawOut);
final byte[] rawData = rawOut.toByteArray();
final ObjectId id = object.getId();
final boolean added = putInternal(id, rawData);
if (added) {
listener.inserted(object.getId(), rawData.length);
} else {
listener.found(object.getId(), null);
}
}
}
|
void function(Iterator<? extends RevObject> objects, final BulkOpListener listener) { checkState(isOpen(), STR); ByteArrayOutputStream rawOut = new ByteArrayOutputStream(); while (objects.hasNext()) { RevObject object = objects.next(); rawOut.reset(); writeObject(object, rawOut); final byte[] rawData = rawOut.toByteArray(); final ObjectId id = object.getId(); final boolean added = putInternal(id, rawData); if (added) { listener.inserted(object.getId(), rawData.length); } else { listener.found(object.getId(), null); } } }
|
/**
* This default implementation calls {@link #putInternal(ObjectId, byte[])} for each object;
* subclasses may override if appropriate.
*/
|
This default implementation calls <code>#putInternal(ObjectId, byte[])</code> for each object; subclasses may override if appropriate
|
putAll
|
{
"repo_name": "smesdaghi/geogig",
"path": "src/core/src/main/java/org/locationtech/geogig/storage/AbstractObjectDatabase.java",
"license": "bsd-3-clause",
"size": 9541
}
|
[
"com.google.common.base.Preconditions",
"java.io.ByteArrayOutputStream",
"java.util.Iterator",
"org.locationtech.geogig.api.ObjectId",
"org.locationtech.geogig.api.RevObject"
] |
import com.google.common.base.Preconditions; import java.io.ByteArrayOutputStream; import java.util.Iterator; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.api.RevObject;
|
import com.google.common.base.*; import java.io.*; import java.util.*; import org.locationtech.geogig.api.*;
|
[
"com.google.common",
"java.io",
"java.util",
"org.locationtech.geogig"
] |
com.google.common; java.io; java.util; org.locationtech.geogig;
| 164,856
|
public JTextField getField() {
return this.field;
}
|
JTextField function() { return this.field; }
|
/**
* Returns the {@code JTextField} used to display the selected file.
*
* @return the {@code JTextField} used to display the selected file
*/
|
Returns the JTextField used to display the selected file
|
getField
|
{
"repo_name": "sing-group/aibench-project",
"path": "aibench-workbench/src/main/java/es/uvigo/ei/aibench/workbench/inputgui/FileParamProvider.java",
"license": "lgpl-3.0",
"size": 11249
}
|
[
"javax.swing.JTextField"
] |
import javax.swing.JTextField;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 831,405
|
public static String findEditTable(String tables) {
if (!TextUtils.isEmpty(tables)) {
// find the first word terminated by either a space or a comma
int spacepos = tables.indexOf(' ');
int commapos = tables.indexOf(',');
if (spacepos > 0 && (spacepos < commapos || commapos < 0)) {
return tables.substring(0, spacepos);
} else if (commapos > 0 && (commapos < spacepos || spacepos < 0) ) {
return tables.substring(0, commapos);
}
return tables;
} else {
throw new IllegalStateException("Invalid tables");
}
}
/**
* Compiles an SQL statement into a reusable pre-compiled statement object.
* The parameters are identical to {@link #execSQL(String)}. You may put ?s in the
* statement and fill in those values with {@link SQLiteProgram#bindString}
|
static String function(String tables) { if (!TextUtils.isEmpty(tables)) { int spacepos = tables.indexOf(' '); int commapos = tables.indexOf(','); if (spacepos > 0 && (spacepos < commapos commapos < 0)) { return tables.substring(0, spacepos); } else if (commapos > 0 && (commapos < spacepos spacepos < 0) ) { return tables.substring(0, commapos); } return tables; } else { throw new IllegalStateException(STR); } } /** * Compiles an SQL statement into a reusable pre-compiled statement object. * The parameters are identical to {@link #execSQL(String)}. You may put ?s in the * statement and fill in those values with {@link SQLiteProgram#bindString}
|
/**
* Finds the name of the first table, which is editable.
*
* @param tables a list of tables
* @return the first table listed
*/
|
Finds the name of the first table, which is editable
|
findEditTable
|
{
"repo_name": "szpaddy/android-4.1.2_r2-core",
"path": "java/android/database/sqlite/SQLiteDatabase.java",
"license": "apache-2.0",
"size": 94045
}
|
[
"android.text.TextUtils"
] |
import android.text.TextUtils;
|
import android.text.*;
|
[
"android.text"
] |
android.text;
| 2,449,936
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.