method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
void setRelatedAssociation(DEAssociation value);
|
void setRelatedAssociation(DEAssociation value);
|
/**
* Sets the value of the '{@link fr.unice.spinefm.ConfigurationModel.Link#getRelatedAssociation <em>Related Association</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Related Association</em>' reference.
* @see #getRelatedAssociation()
* @generated
*/
|
Sets the value of the '<code>fr.unice.spinefm.ConfigurationModel.Link#getRelatedAssociation Related Association</code>' reference.
|
setRelatedAssociation
|
{
"repo_name": "surli/spinefm",
"path": "spinefm-eclipseplugins-root/spinefm-core/src-gen/fr/unice/spinefm/ConfigurationModel/Link.java",
"license": "mit",
"size": 4702
}
|
[
"fr.unice.spinefm.MSPLModel"
] |
import fr.unice.spinefm.MSPLModel;
|
import fr.unice.spinefm.*;
|
[
"fr.unice.spinefm"
] |
fr.unice.spinefm;
| 1,446,857
|
public ActionForward saveDaiaForm(final ActionMapping mp, final ActionForm fm, final HttpServletRequest rq,
final HttpServletResponse rp) {
final Auth auth = new Auth();
// make sure the user is logged in
if (!auth.isLogin(rq)) {
return mp.findForward(Result.ERROR_TIMEOUT.getValue());
}
// check access rights
if (!auth.isBibliothekar(rq) && !auth.isAdmin(rq)) {
return mp.findForward(Result.ERROR_MISSING_RIGHTS.getValue());
}
String forward = Result.FAILURE.getValue();
final DaiaParam dp = (DaiaParam) fm;
final UserInfo ui = (UserInfo) rq.getSession().getAttribute("userinfo");
final Text cn = new Text();
try {
final Message message = new Message();
// make sure base url is valid
final Check ck = new Check();
if (ck.isUrl(dp.getBaseurl())) {
forward = Result.SUCCESS.getValue();
// delete DAIA-Param for account
dp.delete(ui.getKonto(), cn.getConnection());
// save new DAIA-Param
final Long newDaiaParamID = dp.save(ui.getKonto(), cn.getConnection());
// do we need to update USE_DID in bestellform_param?
final BestellFormParam bf = new BestellFormParam();
final List<BestellFormParam> bfps = bf.getAllBestellFormParam(ui.getKonto(), cn.getConnection());
for (final BestellFormParam bfp : bfps) {
if (bfp.getUse_did() != null && !bfp.getUse_did().equals(Long.valueOf("0"))) {
// set new DAIA-Param ID
bfp.setUse_did(newDaiaParamID);
bf.update(bfp, cn.getConnection());
}
}
// set success message
message.setMessage("message.modifyuser");
message.setLink("externalform.do?method=prepDaiaForm");
rq.setAttribute("message", message);
} else {
forward = "missingvalues";
message.setMessage("error.url");
rq.setAttribute("message", message);
rq.setAttribute("daiaparam", dp);
}
} finally {
cn.close();
}
return mp.findForward(forward);
}
|
ActionForward function(final ActionMapping mp, final ActionForm fm, final HttpServletRequest rq, final HttpServletResponse rp) { final Auth auth = new Auth(); if (!auth.isLogin(rq)) { return mp.findForward(Result.ERROR_TIMEOUT.getValue()); } if (!auth.isBibliothekar(rq) && !auth.isAdmin(rq)) { return mp.findForward(Result.ERROR_MISSING_RIGHTS.getValue()); } String forward = Result.FAILURE.getValue(); final DaiaParam dp = (DaiaParam) fm; final UserInfo ui = (UserInfo) rq.getSession().getAttribute(STR); final Text cn = new Text(); try { final Message message = new Message(); final Check ck = new Check(); if (ck.isUrl(dp.getBaseurl())) { forward = Result.SUCCESS.getValue(); dp.delete(ui.getKonto(), cn.getConnection()); final Long newDaiaParamID = dp.save(ui.getKonto(), cn.getConnection()); final BestellFormParam bf = new BestellFormParam(); final List<BestellFormParam> bfps = bf.getAllBestellFormParam(ui.getKonto(), cn.getConnection()); for (final BestellFormParam bfp : bfps) { if (bfp.getUse_did() != null && !bfp.getUse_did().equals(Long.valueOf("0"))) { bfp.setUse_did(newDaiaParamID); bf.update(bfp, cn.getConnection()); } } message.setMessage(STR); message.setLink(STR); rq.setAttribute(STR, message); } else { forward = STR; message.setMessage(STR); rq.setAttribute(STR, message); rq.setAttribute(STR, dp); } } finally { cn.close(); } return mp.findForward(forward); }
|
/**
* Save an external order form.
*/
|
Save an external order form
|
saveDaiaForm
|
{
"repo_name": "gbv/doctor-doc",
"path": "source/ch/dbs/actions/bestellung/BestellformAction.java",
"license": "gpl-2.0",
"size": 84477
}
|
[
"ch.dbs.entity.BestellFormParam",
"ch.dbs.entity.DaiaParam",
"ch.dbs.entity.Text",
"ch.dbs.form.Message",
"ch.dbs.form.UserInfo",
"java.util.List",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"org.apache.struts.action.ActionForm",
"org.apache.struts.action.ActionForward",
"org.apache.struts.action.ActionMapping"
] |
import ch.dbs.entity.BestellFormParam; import ch.dbs.entity.DaiaParam; import ch.dbs.entity.Text; import ch.dbs.form.Message; import ch.dbs.form.UserInfo; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping;
|
import ch.dbs.entity.*; import ch.dbs.form.*; import java.util.*; import javax.servlet.http.*; import org.apache.struts.action.*;
|
[
"ch.dbs.entity",
"ch.dbs.form",
"java.util",
"javax.servlet",
"org.apache.struts"
] |
ch.dbs.entity; ch.dbs.form; java.util; javax.servlet; org.apache.struts;
| 2,098,906
|
static Type extractBoundForTypeVariable(TypeVariable typeVariable) {
Type[] bounds = typeVariable.getBounds();
if (bounds.length == 0) {
return Object.class;
}
Type bound = bounds[0];
if (bound instanceof TypeVariable) {
bound = extractBoundForTypeVariable((TypeVariable) bound);
}
return bound;
}
|
static Type extractBoundForTypeVariable(TypeVariable typeVariable) { Type[] bounds = typeVariable.getBounds(); if (bounds.length == 0) { return Object.class; } Type bound = bounds[0]; if (bound instanceof TypeVariable) { bound = extractBoundForTypeVariable((TypeVariable) bound); } return bound; }
|
/**
* Extracts the bound <code>Type</code> for a given {@link TypeVariable}.
*/
|
Extracts the bound <code>Type</code> for a given <code>TypeVariable</code>
|
extractBoundForTypeVariable
|
{
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "src/org/springframework/core/GenericTypeResolver.java",
"license": "apache-2.0",
"size": 9688
}
|
[
"java.lang.reflect.Type",
"java.lang.reflect.TypeVariable"
] |
import java.lang.reflect.Type; import java.lang.reflect.TypeVariable;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 469,214
|
public static int getApisPerPageInPublisher() {
String paginationLimit = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService()
.getAPIManagerConfiguration().getFirstProperty(APIConstants.API_PUBLISHER_APIS_PER_PAGE);
if (paginationLimit != null) {
return Integer.parseInt(paginationLimit);
}
return 0;
}
|
static int function() { String paginationLimit = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getFirstProperty(APIConstants.API_PUBLISHER_APIS_PER_PAGE); if (paginationLimit != null) { return Integer.parseInt(paginationLimit); } return 0; }
|
/**
* Used to get the custom pagination limit for publisher
*
* @return returns the publisher pagination value from api-manager.xml
*/
|
Used to get the custom pagination limit for publisher
|
getApisPerPageInPublisher
|
{
"repo_name": "tharikaGitHub/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/utils/APIUtil.java",
"license": "apache-2.0",
"size": 563590
}
|
[
"org.wso2.carbon.apimgt.impl.APIConstants",
"org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder"
] |
import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
|
import org.wso2.carbon.apimgt.impl.*; import org.wso2.carbon.apimgt.impl.internal.*;
|
[
"org.wso2.carbon"
] |
org.wso2.carbon;
| 1,323,843
|
private void shutdownTaskProcess(ForkingTaskRunnerWorkItem taskInfo)
{
if (taskInfo.processHolder != null) {
// Will trigger normal failure mechanisms due to process exit
LOGGER.info("Closing output stream to task[%s].", taskInfo.getTask().getId());
try {
taskInfo.processHolder.process.getOutputStream().close();
}
catch (Exception e) {
LOGGER.warn(e, "Failed to close stdout to task[%s]. Destroying task.", taskInfo.getTask().getId());
taskInfo.processHolder.process.destroy();
}
}
}
protected static class ForkingTaskRunnerWorkItem extends TaskRunnerWorkItem
{
private final Task task;
private volatile boolean shutdown = false;
private volatile ProcessHolder processHolder = null;
private ForkingTaskRunnerWorkItem(
Task task,
ListenableFuture<TaskStatus> statusFuture
)
{
super(task.getId(), statusFuture);
this.task = task;
}
|
void function(ForkingTaskRunnerWorkItem taskInfo) { if (taskInfo.processHolder != null) { LOGGER.info(STR, taskInfo.getTask().getId()); try { taskInfo.processHolder.process.getOutputStream().close(); } catch (Exception e) { LOGGER.warn(e, STR, taskInfo.getTask().getId()); taskInfo.processHolder.process.destroy(); } } } protected static class ForkingTaskRunnerWorkItem extends TaskRunnerWorkItem { private final Task task; private volatile boolean shutdown = false; private volatile ProcessHolder processHolder = null; private ForkingTaskRunnerWorkItem( Task task, ListenableFuture<TaskStatus> statusFuture ) { super(task.getId(), statusFuture); this.task = task; }
|
/**
* Close task output stream (input stream of process) sending EOF telling process to terminate, destroying the process
* if an exception is encountered.
*/
|
Close task output stream (input stream of process) sending EOF telling process to terminate, destroying the process if an exception is encountered
|
shutdownTaskProcess
|
{
"repo_name": "himanshug/druid",
"path": "indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java",
"license": "apache-2.0",
"size": 27573
}
|
[
"com.google.common.util.concurrent.ListenableFuture",
"org.apache.druid.indexer.TaskStatus",
"org.apache.druid.indexing.common.task.Task"
] |
import com.google.common.util.concurrent.ListenableFuture; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.task.Task;
|
import com.google.common.util.concurrent.*; import org.apache.druid.indexer.*; import org.apache.druid.indexing.common.task.*;
|
[
"com.google.common",
"org.apache.druid"
] |
com.google.common; org.apache.druid;
| 2,558,493
|
@XmlElement(name="well")
@XmlElementWrapper(name="wells")
public void setWells(List<SimpleWellXMLInteger> wells) {
this.wells = wells;
}
|
@XmlElement(name="well") @XmlElementWrapper(name="wells") void function(List<SimpleWellXMLInteger> wells) { this.wells = wells; }
|
/**
* Sets the list of set wells.
* @param List<SimpleWellXMLInteger> the list of set wells
*/
|
Sets the list of set wells
|
setWells
|
{
"repo_name": "jessemull/MicroFlex",
"path": "src/main/java/com/github/jessemull/microflex/integerflex/io/WellSetXMLInteger.java",
"license": "apache-2.0",
"size": 4578
}
|
[
"java.util.List",
"javax.xml.bind.annotation.XmlElement",
"javax.xml.bind.annotation.XmlElementWrapper"
] |
import java.util.List; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper;
|
import java.util.*; import javax.xml.bind.annotation.*;
|
[
"java.util",
"javax.xml"
] |
java.util; javax.xml;
| 2,673,243
|
public void setLongNull( int nIndex )
{
try
{
_statement.setNull( nIndex, Types.BIGINT );
logParameter( nIndex, "null" );
}
catch( SQLException e )
{
free( );
throw new AppException( getErrorMessage( e ), e );
}
}
|
void function( int nIndex ) { try { _statement.setNull( nIndex, Types.BIGINT ); logParameter( nIndex, "null" ); } catch( SQLException e ) { free( ); throw new AppException( getErrorMessage( e ), e ); } }
|
/**
* Sets null value for a "long" column
*
* @param nIndex
* the index
*/
|
Sets null value for a "long" column
|
setLongNull
|
{
"repo_name": "rzara/lutece-core",
"path": "src/java/fr/paris/lutece/util/sql/DAOUtil.java",
"license": "bsd-3-clause",
"size": 83663
}
|
[
"fr.paris.lutece.portal.service.util.AppException",
"java.sql.SQLException",
"java.sql.Types"
] |
import fr.paris.lutece.portal.service.util.AppException; import java.sql.SQLException; import java.sql.Types;
|
import fr.paris.lutece.portal.service.util.*; import java.sql.*;
|
[
"fr.paris.lutece",
"java.sql"
] |
fr.paris.lutece; java.sql;
| 860,027
|
public byte[][] getRegionSplitStartKeys(byte[] startKey, byte[] endKey, int numRegions){
assertTrue(numRegions>3);
byte [][] tmpSplitKeys = Bytes.split(startKey, endKey, numRegions - 3);
byte [][] result = new byte[tmpSplitKeys.length+1][];
System.arraycopy(tmpSplitKeys, 0, result, 1, tmpSplitKeys.length);
result[0] = HConstants.EMPTY_BYTE_ARRAY;
return result;
}
|
byte[][] function(byte[] startKey, byte[] endKey, int numRegions){ assertTrue(numRegions>3); byte [][] tmpSplitKeys = Bytes.split(startKey, endKey, numRegions - 3); byte [][] result = new byte[tmpSplitKeys.length+1][]; System.arraycopy(tmpSplitKeys, 0, result, 1, tmpSplitKeys.length); result[0] = HConstants.EMPTY_BYTE_ARRAY; return result; }
|
/**
* Create region split keys between startkey and endKey
*
* @param startKey
* @param endKey
* @param numRegions the number of regions to be created. it has to be greater than 3.
* @return resulting split keys
*/
|
Create region split keys between startkey and endKey
|
getRegionSplitStartKeys
|
{
"repo_name": "francisliu/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 169730
}
|
[
"org.apache.hadoop.hbase.util.Bytes",
"org.junit.Assert"
] |
import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert;
|
import org.apache.hadoop.hbase.util.*; import org.junit.*;
|
[
"org.apache.hadoop",
"org.junit"
] |
org.apache.hadoop; org.junit;
| 1,283,472
|
boolean checkStructuralEquivalenceHelper(
ObjectType otherObject, EquivalenceMethod eqMethod, EqCache eqCache) {
if (this.isTemplatizedType() && this.toMaybeTemplatizedType().wrapsSameRawType(otherObject)) {
return this.getTemplateTypeMap().checkEquivalenceHelper(
otherObject.getTemplateTypeMap(), eqMethod, eqCache, SubtypingMode.NORMAL);
}
MatchStatus result = eqCache.checkCache(this, otherObject);
if (result != null) {
return result.subtypeValue();
}
Set<String> keySet = getPropertyNames();
Set<String> otherKeySet = otherObject.getPropertyNames();
if (!otherKeySet.equals(keySet)) {
eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH);
return false;
}
for (String key : keySet) {
if (!otherObject.getPropertyType(key).checkEquivalenceHelper(
getPropertyType(key), eqMethod, eqCache)) {
eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH);
return false;
}
}
eqCache.updateCache(this, otherObject, MatchStatus.MATCH);
return true;
}
|
boolean checkStructuralEquivalenceHelper( ObjectType otherObject, EquivalenceMethod eqMethod, EqCache eqCache) { if (this.isTemplatizedType() && this.toMaybeTemplatizedType().wrapsSameRawType(otherObject)) { return this.getTemplateTypeMap().checkEquivalenceHelper( otherObject.getTemplateTypeMap(), eqMethod, eqCache, SubtypingMode.NORMAL); } MatchStatus result = eqCache.checkCache(this, otherObject); if (result != null) { return result.subtypeValue(); } Set<String> keySet = getPropertyNames(); Set<String> otherKeySet = otherObject.getPropertyNames(); if (!otherKeySet.equals(keySet)) { eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH); return false; } for (String key : keySet) { if (!otherObject.getPropertyType(key).checkEquivalenceHelper( getPropertyType(key), eqMethod, eqCache)) { eqCache.updateCache(this, otherObject, MatchStatus.NOT_MATCH); return false; } } eqCache.updateCache(this, otherObject, MatchStatus.MATCH); return true; }
|
/**
* Check for structural equivalence with {@code that}.
* (e.g. two @record types with the same prototype properties)
*/
|
Check for structural equivalence with that. (e.g. two @record types with the same prototype properties)
|
checkStructuralEquivalenceHelper
|
{
"repo_name": "Pimm/closure-compiler",
"path": "src/com/google/javascript/rhino/jstype/ObjectType.java",
"license": "apache-2.0",
"size": 26312
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,314,599
|
private float[] getMapWorldBounds() {
float[] nswe = getMapWorldBoundsE6();
float n = nswe[0] / LibraryConstants.E6;
float s = nswe[1] / LibraryConstants.E6;
float w = nswe[2] / LibraryConstants.E6;
float e = nswe[3] / LibraryConstants.E6;
return new float[]{n, s, w, e};
}
|
float[] function() { float[] nswe = getMapWorldBoundsE6(); float n = nswe[0] / LibraryConstants.E6; float s = nswe[1] / LibraryConstants.E6; float w = nswe[2] / LibraryConstants.E6; float e = nswe[3] / LibraryConstants.E6; return new float[]{n, s, w, e}; }
|
/**
* Retrieves the map world bounds in degrees.
*
* @return the [n,s,w,e] in degrees.
*/
|
Retrieves the map world bounds in degrees
|
getMapWorldBounds
|
{
"repo_name": "integracaomt/MTISIG4",
"path": "geopaparazzi.app/src/eu/hydrologis/geopaparazzi/maps/MapsActivity.java",
"license": "gpl-3.0",
"size": 68324
}
|
[
"eu.geopaparazzi.library.util.LibraryConstants"
] |
import eu.geopaparazzi.library.util.LibraryConstants;
|
import eu.geopaparazzi.library.util.*;
|
[
"eu.geopaparazzi.library"
] |
eu.geopaparazzi.library;
| 2,677,472
|
@Test
public void testReportGauge_withJSONHavingIntValueAtLocationAndDynamicPrefix() throws Exception {
final StatsDClient client = Mockito.mock(StatsDClient.class);
StatsdExtractedMetricsReporterConfiguration cfg = new StatsdExtractedMetricsReporterConfiguration("host", 8125, "prefix");
cfg.addMetricConfig(new StatsdMetricConfig("path.to.field", new JsonContentReference(new String[]{"prefix"}, JsonContentType.STRING), StatsdMetricType.COUNTER, new JsonContentReference(new String[]{"path","field"}, JsonContentType.INTEGER), false));
StatsdExtractedMetricsReporter reporter = new StatsdExtractedMetricsReporter(cfg);
reporter.setStatsdClient(client);
reporter.reportGauge(new StatsdMetricConfig("reportValue", new JsonContentReference(new String[]{"prefix"}, JsonContentType.STRING), StatsdMetricType.GAUGE, new JsonContentReference(new String[]{"key"}, JsonContentType.INTEGER), false), new JSONObject("{\"prefix\":\"pref\",\"key\":123}"));
Mockito.verify(client).gauge("pref.reportValue", Long.valueOf(123));
}
|
void function() throws Exception { final StatsDClient client = Mockito.mock(StatsDClient.class); StatsdExtractedMetricsReporterConfiguration cfg = new StatsdExtractedMetricsReporterConfiguration("host", 8125, STR); cfg.addMetricConfig(new StatsdMetricConfig(STR, new JsonContentReference(new String[]{STR}, JsonContentType.STRING), StatsdMetricType.COUNTER, new JsonContentReference(new String[]{"path","field"}, JsonContentType.INTEGER), false)); StatsdExtractedMetricsReporter reporter = new StatsdExtractedMetricsReporter(cfg); reporter.setStatsdClient(client); reporter.reportGauge(new StatsdMetricConfig(STR, new JsonContentReference(new String[]{STR}, JsonContentType.STRING), StatsdMetricType.GAUGE, new JsonContentReference(new String[]{"key"}, JsonContentType.INTEGER), false), new JSONObject("{\"prefix\":\"pref\",\"key\":123}")); Mockito.verify(client).gauge(STR, Long.valueOf(123)); }
|
/**
* Test case for {@link StatsdExtractedMetricsReporter#reportGauge(StatsdMetricConfig, JSONObject)} being provided a json which has a value at the referenced location and
* a dynamic prefix is requested
*/
|
Test case for <code>StatsdExtractedMetricsReporter#reportGauge(StatsdMetricConfig, JSONObject)</code> being provided a json which has a value at the referenced location and a dynamic prefix is requested
|
testReportGauge_withJSONHavingIntValueAtLocationAndDynamicPrefix
|
{
"repo_name": "ottogroup/flink-operator-library",
"path": "src/test/java/com/ottogroup/bi/streaming/operator/json/statsd/StatsdExtractedMetricsReporterTest.java",
"license": "apache-2.0",
"size": 58212
}
|
[
"com.ottogroup.bi.streaming.operator.json.JsonContentReference",
"com.ottogroup.bi.streaming.operator.json.JsonContentType",
"com.timgroup.statsd.StatsDClient",
"org.apache.sling.commons.json.JSONObject",
"org.mockito.Mockito"
] |
import com.ottogroup.bi.streaming.operator.json.JsonContentReference; import com.ottogroup.bi.streaming.operator.json.JsonContentType; import com.timgroup.statsd.StatsDClient; import org.apache.sling.commons.json.JSONObject; import org.mockito.Mockito;
|
import com.ottogroup.bi.streaming.operator.json.*; import com.timgroup.statsd.*; import org.apache.sling.commons.json.*; import org.mockito.*;
|
[
"com.ottogroup.bi",
"com.timgroup.statsd",
"org.apache.sling",
"org.mockito"
] |
com.ottogroup.bi; com.timgroup.statsd; org.apache.sling; org.mockito;
| 2,243,257
|
public void setInternationalCurrencySymbol(String value) {
if (value == null) {
currency = null;
intlCurrencySymbol = null;
return;
}
if (value.equals(intlCurrencySymbol)) {
return;
}
try {
currency = Currency.getInstance(value);
currencySymbol = currency.getSymbol(locale);
} catch (IllegalArgumentException e) {
currency = null;
}
intlCurrencySymbol = value;
}
|
void function(String value) { if (value == null) { currency = null; intlCurrencySymbol = null; return; } if (value.equals(intlCurrencySymbol)) { return; } try { currency = Currency.getInstance(value); currencySymbol = currency.getSymbol(locale); } catch (IllegalArgumentException e) { currency = null; } intlCurrencySymbol = value; }
|
/**
* Sets the international currency symbol.
* <p>
* The currency and currency symbol are also updated if {@code value} is a
* valid ISO4217 currency code.
* <p>
* The min and max number of fraction digits stay the same.
*
* @param value
* the currency code.
*/
|
Sets the international currency symbol. The currency and currency symbol are also updated if value is a valid ISO4217 currency code. The min and max number of fraction digits stay the same
|
setInternationalCurrencySymbol
|
{
"repo_name": "freeVM/freeVM",
"path": "enhanced/java/classlib/modules/text/src/main/java/java/text/DecimalFormatSymbols.java",
"license": "apache-2.0",
"size": 18965
}
|
[
"java.util.Currency"
] |
import java.util.Currency;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,210,585
|
private NameInformation getEnclosingFunctionDependencyScope(NodeTraversal t) {
Node function = t.getEnclosingFunction();
if (function == null) {
return null;
}
NameInformation ref = scopes.get(function);
if (ref != null) {
return ref;
}
// Function expression. try to get a name from the parent var
// declaration or assignment.
Node parent = function.getParent();
if (parent != null) {
// Account for functions defined in the form:
// var a = cond ? function a() {} : function b() {};
while (parent.getType() == Token.HOOK) {
parent = parent.getParent();
}
if (parent.getType() == Token.NAME) {
return scopes.get(parent);
}
if (parent.getType() == Token.ASSIGN) {
return scopes.get(parent);
}
}
return null;
}
|
NameInformation function(NodeTraversal t) { Node function = t.getEnclosingFunction(); if (function == null) { return null; } NameInformation ref = scopes.get(function); if (ref != null) { return ref; } Node parent = function.getParent(); if (parent != null) { while (parent.getType() == Token.HOOK) { parent = parent.getParent(); } if (parent.getType() == Token.NAME) { return scopes.get(parent); } if (parent.getType() == Token.ASSIGN) { return scopes.get(parent); } } return null; }
|
/**
* Get dependency scope defined by the enclosing function, or null.
* If enclosing function is a function expression, determine scope based on
* its parent if the parent node is a variable declaration or
* assignment.
*/
|
Get dependency scope defined by the enclosing function, or null. If enclosing function is a function expression, determine scope based on its parent if the parent node is a variable declaration or assignment
|
getEnclosingFunctionDependencyScope
|
{
"repo_name": "Dandandan/wikiprogramming",
"path": "jsrepl/tools/closure-compiler/trunk/src/com/google/javascript/jscomp/NameAnalyzer.java",
"license": "mit",
"size": 60058
}
|
[
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.Token"
] |
import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token;
|
import com.google.javascript.rhino.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 332,661
|
@Test
public void testReloadFeed() throws Exception {
BookMarkLoadListener listener = null;
try {
final IFeed feed = DynamicDAO.save(new Feed(new URI("http://www.rssowl.org/rssowl2dg/tests/manager/rss_2_0.xml"))); //$NON-NLS-1$
IBookMark bookmark = createBookMark(feed);
final AtomicBoolean bool1 = new AtomicBoolean(false);
final AtomicBoolean bool2 = new AtomicBoolean(false);
assertTrue(bookmark.getNewsRefs().isEmpty());
assertTrue(bookmark.getNewsRefs(INews.State.getVisible()).isEmpty());
assertTrue(bookmark.getNews().isEmpty());
assertTrue(bookmark.getNews(INews.State.getVisible()).isEmpty());
assertEquals(0, bookmark.getNewsCount(INews.State.getVisible()));
|
void function() throws Exception { BookMarkLoadListener listener = null; try { final IFeed feed = DynamicDAO.save(new Feed(new URI("http: IBookMark bookmark = createBookMark(feed); final AtomicBoolean bool1 = new AtomicBoolean(false); final AtomicBoolean bool2 = new AtomicBoolean(false); assertTrue(bookmark.getNewsRefs().isEmpty()); assertTrue(bookmark.getNewsRefs(INews.State.getVisible()).isEmpty()); assertTrue(bookmark.getNews().isEmpty()); assertTrue(bookmark.getNews(INews.State.getVisible()).isEmpty()); assertEquals(0, bookmark.getNewsCount(INews.State.getVisible()));
|
/**
* Reload a Feed.
*
* @throws Exception
*/
|
Reload a Feed
|
testReloadFeed
|
{
"repo_name": "YixingCheng/RSSOwl",
"path": "org.rssowl.core.tests/src/org/rssowl/core/tests/controller/ControllerTestNetwork.java",
"license": "epl-1.0",
"size": 11380
}
|
[
"java.util.concurrent.atomic.AtomicBoolean",
"org.junit.Assert",
"org.rssowl.core.internal.persist.Feed",
"org.rssowl.core.persist.IBookMark",
"org.rssowl.core.persist.IFeed",
"org.rssowl.core.persist.INews",
"org.rssowl.core.persist.dao.DynamicDAO",
"org.rssowl.ui.internal.Controller"
] |
import java.util.concurrent.atomic.AtomicBoolean; import org.junit.Assert; import org.rssowl.core.internal.persist.Feed; import org.rssowl.core.persist.IBookMark; import org.rssowl.core.persist.IFeed; import org.rssowl.core.persist.INews; import org.rssowl.core.persist.dao.DynamicDAO; import org.rssowl.ui.internal.Controller;
|
import java.util.concurrent.atomic.*; import org.junit.*; import org.rssowl.core.internal.persist.*; import org.rssowl.core.persist.*; import org.rssowl.core.persist.dao.*; import org.rssowl.ui.internal.*;
|
[
"java.util",
"org.junit",
"org.rssowl.core",
"org.rssowl.ui"
] |
java.util; org.junit; org.rssowl.core; org.rssowl.ui;
| 54,616
|
@Test
public void noAdditionalMutationRequired() throws Exception
{
Map<Replica, Mutation> repairs = new HashMap<>();
repairs.put(replica1, mutation(cell2));
repairs.put(replica2, mutation(cell1));
InstrumentedReadRepairHandler handler = createRepairHandler(repairs, 2);
handler.sendInitialRepairs();
handler.ack(target1);
handler.ack(target2);
// both replicas have acked, we shouldn't send anything else out
handler.mutationsSent.clear();
handler.maybeSendAdditionalWrites(0, TimeUnit.NANOSECONDS);
Assert.assertTrue(handler.mutationsSent.isEmpty());
}
|
void function() throws Exception { Map<Replica, Mutation> repairs = new HashMap<>(); repairs.put(replica1, mutation(cell2)); repairs.put(replica2, mutation(cell1)); InstrumentedReadRepairHandler handler = createRepairHandler(repairs, 2); handler.sendInitialRepairs(); handler.ack(target1); handler.ack(target2); handler.mutationsSent.clear(); handler.maybeSendAdditionalWrites(0, TimeUnit.NANOSECONDS); Assert.assertTrue(handler.mutationsSent.isEmpty()); }
|
/**
* If we've received enough acks, we shouldn't send any additional mutations
*/
|
If we've received enough acks, we shouldn't send any additional mutations
|
noAdditionalMutationRequired
|
{
"repo_name": "spodkowinski/cassandra",
"path": "test/unit/org/apache/cassandra/service/reads/repair/BlockingReadRepairTest.java",
"license": "apache-2.0",
"size": 11637
}
|
[
"java.util.HashMap",
"java.util.Map",
"java.util.concurrent.TimeUnit",
"org.apache.cassandra.db.Mutation",
"org.apache.cassandra.locator.Replica",
"org.junit.Assert"
] |
import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.cassandra.db.Mutation; import org.apache.cassandra.locator.Replica; import org.junit.Assert;
|
import java.util.*; import java.util.concurrent.*; import org.apache.cassandra.db.*; import org.apache.cassandra.locator.*; import org.junit.*;
|
[
"java.util",
"org.apache.cassandra",
"org.junit"
] |
java.util; org.apache.cassandra; org.junit;
| 859,676
|
protected void decorate() {
setBorderPainted(false);
setOpaque(true);
setContentAreaFilled(false);
setMargin(new Insets(1, 1, 1, 1));
|
void function() { setBorderPainted(false); setOpaque(true); setContentAreaFilled(false); setMargin(new Insets(1, 1, 1, 1));
|
/**
* Decorates the button with the approriate UI configurations.
*/
|
Decorates the button with the approriate UI configurations
|
decorate
|
{
"repo_name": "vipinraj/Spark",
"path": "core/src/main/java/org/jivesoftware/spark/component/RolloverButton.java",
"license": "apache-2.0",
"size": 2619
}
|
[
"java.awt.Insets"
] |
import java.awt.Insets;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 1,291,806
|
public byte[] getPolicyQualifier()
{
if (qualifier == null)
{
return null;
}
return (byte[])qualifier.clone();
}
/**
* Return a printable representation of this
* <code>PolicyQualifierInfo</code>.<br />
* <br />
* Uses {@link org.bouncycastle.asn1.ASN1InputStream ASN1InputStream},
* {@link org.bouncycastle.asn1.ASN1Object ASN1Object}
|
byte[] function() { if (qualifier == null) { return null; } return (byte[])qualifier.clone(); } /** * Return a printable representation of this * <code>PolicyQualifierInfo</code>.<br /> * <br /> * Uses {@link org.bouncycastle.asn1.ASN1InputStream ASN1InputStream}, * {@link org.bouncycastle.asn1.ASN1Object ASN1Object}
|
/**
* Returns the ASN.1 DER encoded form of the <code>qualifier</code> field
* of this <code>PolicyQualifierInfo</code>.
*
* @return the ASN.1 DER encoded bytes of the <code>qualifier</code>
* field. Note that a copy is returned, so the data is cloned each
* time this method is called.
*/
|
Returns the ASN.1 DER encoded form of the <code>qualifier</code> field of this <code>PolicyQualifierInfo</code>
|
getPolicyQualifier
|
{
"repo_name": "GaloisInc/hacrypto",
"path": "src/Java/BouncyCastle/BouncyCastle-1.50/core/src/main/jdk1.1/java/security/cert/PolicyQualifierInfo.java",
"license": "bsd-3-clause",
"size": 7476
}
|
[
"org.bouncycastle.asn1.ASN1InputStream",
"org.bouncycastle.asn1.ASN1Object"
] |
import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1Object;
|
import org.bouncycastle.asn1.*;
|
[
"org.bouncycastle.asn1"
] |
org.bouncycastle.asn1;
| 2,733,198
|
@Test(timeout = 60000)
public void testListEncryptionZonesAsNonSuperUser() throws Exception {
final UserGroupInformation user = UserGroupInformation.
createUserForTesting("user", new String[] { "mygroup" });
final Path testRoot = new Path("/tmp/TestEncryptionZones");
final Path superPath = new Path(testRoot, "superuseronly");
final Path allPath = new Path(testRoot, "accessall");
fsWrapper.mkdir(superPath, new FsPermission((short) 0700), true);
dfsAdmin.createEncryptionZone(superPath, TEST_KEY);
fsWrapper.mkdir(allPath, new FsPermission((short) 0707), true);
dfsAdmin.createEncryptionZone(allPath, TEST_KEY);
|
@Test(timeout = 60000) void function() throws Exception { final UserGroupInformation user = UserGroupInformation. createUserForTesting("user", new String[] { STR }); final Path testRoot = new Path(STR); final Path superPath = new Path(testRoot, STR); final Path allPath = new Path(testRoot, STR); fsWrapper.mkdir(superPath, new FsPermission((short) 0700), true); dfsAdmin.createEncryptionZone(superPath, TEST_KEY); fsWrapper.mkdir(allPath, new FsPermission((short) 0707), true); dfsAdmin.createEncryptionZone(allPath, TEST_KEY);
|
/**
* Test listing encryption zones as a non super user.
*/
|
Test listing encryption zones as a non super user
|
testListEncryptionZonesAsNonSuperUser
|
{
"repo_name": "korrelate/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java",
"license": "apache-2.0",
"size": 52301
}
|
[
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.fs.permission.FsPermission",
"org.apache.hadoop.security.UserGroupInformation",
"org.junit.Test"
] |
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Test;
|
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.security.*; import org.junit.*;
|
[
"org.apache.hadoop",
"org.junit"
] |
org.apache.hadoop; org.junit;
| 173,784
|
ServiceCall<Void> getBooleanTrueAsync(final ServiceCallback<Void> serviceCallback);
|
ServiceCall<Void> getBooleanTrueAsync(final ServiceCallback<Void> serviceCallback);
|
/**
* Get true Boolean value on path.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
|
Get true Boolean value on path
|
getBooleanTrueAsync
|
{
"repo_name": "yugangw-msft/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/url/Queries.java",
"license": "mit",
"size": 53223
}
|
[
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback"
] |
import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 1,955,277
|
public List<Exception> getWarnings() {
return this.warnings == null ? Collections.<Exception>emptyList() : this.warnings;
}
|
List<Exception> function() { return this.warnings == null ? Collections.<Exception>emptyList() : this.warnings; }
|
/**
* Gets the complete list of exceptions returned from all flush operations invoked upon this loader.
*
* @return errors that occurred during flushes
*/
|
Gets the complete list of exceptions returned from all flush operations invoked upon this loader
|
getWarnings
|
{
"repo_name": "amir20/jpile",
"path": "src/main/java/com/opower/persistence/jpile/loader/InfileObjectLoader.java",
"license": "mit",
"size": 6115
}
|
[
"java.util.Collections",
"java.util.List"
] |
import java.util.Collections; import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 192,285
|
public GenericUDTF getGenericUDTF() {
// GenericUDTF is stateful too, copy
if (genericUDTF == null) {
return null;
}
return FunctionRegistry.cloneGenericUDTF(genericUDTF);
}
|
GenericUDTF function() { if (genericUDTF == null) { return null; } return FunctionRegistry.cloneGenericUDTF(genericUDTF); }
|
/**
* Get a new GenericUDTF object for the function.
*/
|
Get a new GenericUDTF object for the function
|
getGenericUDTF
|
{
"repo_name": "vergilchiu/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java",
"license": "apache-2.0",
"size": 8331
}
|
[
"org.apache.hadoop.hive.ql.udf.generic.GenericUDTF"
] |
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
|
import org.apache.hadoop.hive.ql.udf.generic.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 836,132
|
@NotNull
Optional<TableHandle> getTableHandle(QualifiedTableName tableName);
|
Optional<TableHandle> getTableHandle(QualifiedTableName tableName);
|
/**
* Returns a table handle for the specified table name.
*/
|
Returns a table handle for the specified table name
|
getTableHandle
|
{
"repo_name": "ddd332/presto-0.54",
"path": "presto-main/src/main/java/com/facebook/presto/metadata/Metadata.java",
"license": "apache-2.0",
"size": 3947
}
|
[
"com.facebook.presto.spi.TableHandle",
"com.google.common.base.Optional"
] |
import com.facebook.presto.spi.TableHandle; import com.google.common.base.Optional;
|
import com.facebook.presto.spi.*; import com.google.common.base.*;
|
[
"com.facebook.presto",
"com.google.common"
] |
com.facebook.presto; com.google.common;
| 1,503,766
|
public final double[ ][ ][ ] forward( double[ ][ ][ ] spaceTime ) {
double[ ][ ][ ] spaceHilb = null;
try {
spaceHilb = _basicTransform.forward( spaceTime );
} catch( JWaveException e ) {
e.showMessage( );
e.printStackTrace( );
} // try
return spaceHilb;
} // forward
|
final double[ ][ ][ ] function( double[ ][ ][ ] spaceTime ) { double[ ][ ][ ] spaceHilb = null; try { spaceHilb = _basicTransform.forward( spaceTime ); } catch( JWaveException e ) { e.showMessage( ); e.printStackTrace( ); } return spaceHilb; }
|
/**
* Performs the 3-D forward transform of the specified BasicWave object.
*
* @date 10.07.2010 18:15:22
* @author Christian Scheiblich (cscheiblich@gmail.com)
* @param matrixTime
* coefficients of 2-D time domain; internal M(i),N(j),O(k)
* @return coefficients of 2-D frequency or Hilbert domain
*/
|
Performs the 3-D forward transform of the specified BasicWave object
|
forward
|
{
"repo_name": "RaineForest/ECG-Viewer",
"path": "JWave/src/math/jwave/Transform.java",
"license": "gpl-2.0",
"size": 16283
}
|
[
"math.jwave.exceptions.JWaveException"
] |
import math.jwave.exceptions.JWaveException;
|
import math.jwave.exceptions.*;
|
[
"math.jwave.exceptions"
] |
math.jwave.exceptions;
| 1,433,989
|
public WorldMap getWorldMap(int id) {
WorldMap map = worldMaps.get(id);
if (map == null) {
throw new WorldMapNotExistException("Map: " + id + " not exist!");
}
return map;
}
|
WorldMap function(int id) { WorldMap map = worldMaps.get(id); if (map == null) { throw new WorldMapNotExistException(STR + id + STR); } return map; }
|
/**
* Return World Map by id
*
* @param id - id of world map.
* @return World map.
*/
|
Return World Map by id
|
getWorldMap
|
{
"repo_name": "GiGatR00n/Aion-Core-v4.7.5",
"path": "AC-Game/src/com/aionemu/gameserver/world/World.java",
"license": "gpl-2.0",
"size": 17978
}
|
[
"com.aionemu.gameserver.world.exceptions.WorldMapNotExistException"
] |
import com.aionemu.gameserver.world.exceptions.WorldMapNotExistException;
|
import com.aionemu.gameserver.world.exceptions.*;
|
[
"com.aionemu.gameserver"
] |
com.aionemu.gameserver;
| 1,753,707
|
public void debugLegalMoves(Object o, List<Move> moves)
{
String sender = o.getClass().getCanonicalName();
if(debug || isWhiteListed(sender))
{
System.out.print(sender + ": ");
System.out.println("Legal moves: ");
for(int i=0; i<moves.size(); i++)
{
System.out.print("card -> " + moves.get(i).getCard() + ", ");
System.out.print("positions -> ");
for(int j=0; j<moves.get(i).getPositions().length; j+= 2){
System.out.print("s: " + moves.get(i).getPositions()[j] + ", ");
System.out.print("t: " + moves.get(i).getPositions()[j + 1]);
if(j != moves.get(i).getPositions().length - 2){
System.out.print(" / ");
}
}
System.out.println();
}
}
}
|
void function(Object o, List<Move> moves) { String sender = o.getClass().getCanonicalName(); if(debug isWhiteListed(sender)) { System.out.print(sender + STR); System.out.println(STR); for(int i=0; i<moves.size(); i++) { System.out.print(STR + moves.get(i).getCard() + STR); System.out.print(STR); for(int j=0; j<moves.get(i).getPositions().length; j+= 2){ System.out.print(STR + moves.get(i).getPositions()[j] + STR); System.out.print(STR + moves.get(i).getPositions()[j + 1]); if(j != moves.get(i).getPositions().length - 2){ System.out.print(STR); } } System.out.println(); } } }
|
/**
* prints the moves in a human readable form
* @param o the object which sent the message
* @param moves to be shown
*/
|
prints the moves in a human readable form
|
debugLegalMoves
|
{
"repo_name": "retoo/bodesuri",
"path": "src/intelliDOG/ai/utils/DebugMsg.java",
"license": "gpl-2.0",
"size": 7188
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,067,717
|
@Generated
@Selector("setMinFreeDiskSpaceLimit:")
public native void setMinFreeDiskSpaceLimit(long value);
|
@Selector(STR) native void function(long value);
|
/**
* [@property] minFreeDiskSpaceLimit
* <p>
* Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
* <p>
* This property specifies a hard lower limit on the amount of free space that must remain on a target volume for recording to continue. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error.
*/
|
[@property] minFreeDiskSpaceLimit Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
|
setMinFreeDiskSpaceLimit
|
{
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/avfoundation/AVCaptureFileOutput.java",
"license": "apache-2.0",
"size": 15277
}
|
[
"org.moe.natj.objc.ann.Selector"
] |
import org.moe.natj.objc.ann.Selector;
|
import org.moe.natj.objc.ann.*;
|
[
"org.moe.natj"
] |
org.moe.natj;
| 960,372
|
public static LinkProperties loadPropertiesIntoOMGraphic(DataInput dis, OMGraphic omg,
LinkProperties propertiesBuffer)
throws IOException {
LinkProperties readProperties = (LinkProperties) read(dis, propertiesBuffer).clone();
readProperties.setProperties(omg); // load them into OMGraphic..
return readProperties;
}
|
static LinkProperties function(DataInput dis, OMGraphic omg, LinkProperties propertiesBuffer) throws IOException { LinkProperties readProperties = (LinkProperties) read(dis, propertiesBuffer).clone(); readProperties.setProperties(omg); return readProperties; }
|
/**
* New, static method for more efficient property handling and loading the
* properties into the OMGraphic.
*
* @param dis
* @param omg
* @return if there are no properties, the EMPTY_PROPERTIES object is
* returned. If there are properties and props == null, then a new
* LinkProperties object is allocated and returned, otherwise, props
* is returned. The OMGraphic appObject is set with the read
* properties.
*/
|
New, static method for more efficient property handling and loading the properties into the OMGraphic
|
loadPropertiesIntoOMGraphic
|
{
"repo_name": "d2fn/passage",
"path": "src/main/java/com/bbn/openmap/layer/link/LinkProperties.java",
"license": "mit",
"size": 15581
}
|
[
"com.bbn.openmap.omGraphics.OMGraphic",
"java.io.DataInput",
"java.io.IOException"
] |
import com.bbn.openmap.omGraphics.OMGraphic; import java.io.DataInput; import java.io.IOException;
|
import com.bbn.openmap.*; import java.io.*;
|
[
"com.bbn.openmap",
"java.io"
] |
com.bbn.openmap; java.io;
| 1,707,949
|
public void processFile(File file) {
if (configurationFile == null) {
throw new BuildException("configurationFile not set");
}
try {
Model model = getDirectDataLoader().getIntegrationWriter().getModel();
DelimitedFileConfiguration dfc;
try {
dfc = new DelimitedFileConfiguration(model, new FileInputStream(configurationFile));
} catch (Exception e) {
throw new BuildException("unable to read configuration for "
+ this.getClass().getName(), e);
}
executeInternal(dfc, file);
} catch (ObjectStoreException e) {
throw new BuildException("ObjectStore problem while processing: " + file);
}
}
|
void function(File file) { if (configurationFile == null) { throw new BuildException(STR); } try { Model model = getDirectDataLoader().getIntegrationWriter().getModel(); DelimitedFileConfiguration dfc; try { dfc = new DelimitedFileConfiguration(model, new FileInputStream(configurationFile)); } catch (Exception e) { throw new BuildException(STR + this.getClass().getName(), e); } executeInternal(dfc, file); } catch (ObjectStoreException e) { throw new BuildException(STR + file); } }
|
/**
* Query all objects of the class given by the className specified in the configurationFile.
* Set fields in the objects by using the tab separated files as input.
* @param file the File to process
* @throws BuildException if an ObjectStore method fails
*/
|
Query all objects of the class given by the className specified in the configurationFile. Set fields in the objects by using the tab separated files as input
|
processFile
|
{
"repo_name": "joshkh/intermine",
"path": "intermine/integrate/main/src/org/intermine/task/TSVFileReaderTask.java",
"license": "lgpl-2.1",
"size": 5175
}
|
[
"java.io.File",
"java.io.FileInputStream",
"org.apache.tools.ant.BuildException",
"org.intermine.metadata.Model",
"org.intermine.objectstore.ObjectStoreException"
] |
import java.io.File; import java.io.FileInputStream; import org.apache.tools.ant.BuildException; import org.intermine.metadata.Model; import org.intermine.objectstore.ObjectStoreException;
|
import java.io.*; import org.apache.tools.ant.*; import org.intermine.metadata.*; import org.intermine.objectstore.*;
|
[
"java.io",
"org.apache.tools",
"org.intermine.metadata",
"org.intermine.objectstore"
] |
java.io; org.apache.tools; org.intermine.metadata; org.intermine.objectstore;
| 2,751,895
|
public Collection getPropertyNames()
{
return Collections.unmodifiableCollection(this.properties.keySet());
}
|
Collection function() { return Collections.unmodifiableCollection(this.properties.keySet()); }
|
/**
* Get the names of all properties defined for this BlockState
*/
|
Get the names of all properties defined for this BlockState
|
getPropertyNames
|
{
"repo_name": "kelthalorn/ConquestCraft",
"path": "build/tmp/recompSrc/net/minecraft/block/state/BlockState.java",
"license": "lgpl-2.1",
"size": 9735
}
|
[
"java.util.Collection",
"java.util.Collections"
] |
import java.util.Collection; import java.util.Collections;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 340,869
|
MessageT compose(Context context, String source, Object... inputs);
|
MessageT compose(Context context, String source, Object... inputs);
|
/**
* Composes a message.
*
* @param context The compose context.
* @param source The source message.
* @param inputs The message parameters.
*
* @return the composed message.
*/
|
Composes a message
|
compose
|
{
"repo_name": "CubeEngine/Dirigent",
"path": "src/main/java/org/cubeengine/dirigent/Dirigent.java",
"license": "mit",
"size": 3074
}
|
[
"org.cubeengine.dirigent.context.Context"
] |
import org.cubeengine.dirigent.context.Context;
|
import org.cubeengine.dirigent.context.*;
|
[
"org.cubeengine.dirigent"
] |
org.cubeengine.dirigent;
| 81,998
|
public ColorStateList getColorStateList(int id) throws NotFoundException {
TypedValue value;
synchronized (mAccessLock) {
value = mTmpValue;
if (value == null) {
value = new TypedValue();
} else {
mTmpValue = null;
}
getValue(id, value, true);
}
ColorStateList res = loadColorStateList(value, id);
synchronized (mAccessLock) {
if (mTmpValue == null) {
mTmpValue = value;
}
}
return res;
}
|
ColorStateList function(int id) throws NotFoundException { TypedValue value; synchronized (mAccessLock) { value = mTmpValue; if (value == null) { value = new TypedValue(); } else { mTmpValue = null; } getValue(id, value, true); } ColorStateList res = loadColorStateList(value, id); synchronized (mAccessLock) { if (mTmpValue == null) { mTmpValue = value; } } return res; }
|
/**
* Return a color state list associated with a particular resource ID. The
* resource may contain either a single raw color value, or a complex
* {@link android.content.res.ColorStateList} holding multiple possible colors.
*
* @param id The desired resource identifier of a {@link ColorStateList},
* as generated by the aapt tool. This integer encodes the package, type, and resource
* entry. The value 0 is an invalid identifier.
*
* @throws NotFoundException Throws NotFoundException if the given ID does not exist.
*
* @return Returns a ColorStateList object containing either a single
* solid color or multiple colors that can be selected based on a state.
*/
|
Return a color state list associated with a particular resource ID. The resource may contain either a single raw color value, or a complex <code>android.content.res.ColorStateList</code> holding multiple possible colors
|
getColorStateList
|
{
"repo_name": "indashnet/InDashNet.Open.UN2000",
"path": "android/frameworks/base/core/java/android/content/res/Resources.java",
"license": "apache-2.0",
"size": 101025
}
|
[
"android.util.TypedValue"
] |
import android.util.TypedValue;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 1,335,166
|
public boolean isCovered(List<TestCase> tests) {
for (TestCase test : tests) {
if (isCovered(test))
return true;
}
return false;
}
|
boolean function(List<TestCase> tests) { for (TestCase test : tests) { if (isCovered(test)) return true; } return false; }
|
/**
* Determine if there is an existing test case covering this goal
*
* @param tests
* a {@link java.util.List} object.
* @return a boolean.
*/
|
Determine if there is an existing test case covering this goal
|
isCovered
|
{
"repo_name": "claudejin/evosuite",
"path": "client/src/main/java/org/evosuite/testcase/TestFitnessFunction.java",
"license": "lgpl-3.0",
"size": 5622
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,879,137
|
public synchronized void unregisterIndex(
String index,
String hostName,
int port,
AsyncMethodCallback<Void> resultHandler) {
final String hostAndPortKey = hostName + port;
StreamingClientServiceClient client =
clientsByHostAndPort.get(hostAndPortKey);
if (client != null) {
Set<String> indices = indicesByClient.get(client);
indices.remove(index);
if (indices.isEmpty()) {
clientsByHostAndPort.remove(hostAndPortKey);
client = null;
}
}
resultHandler.onComplete(null);
}
|
synchronized void function( String index, String hostName, int port, AsyncMethodCallback<Void> resultHandler) { final String hostAndPortKey = hostName + port; StreamingClientServiceClient client = clientsByHostAndPort.get(hostAndPortKey); if (client != null) { Set<String> indices = indicesByClient.get(client); indices.remove(index); if (indices.isEmpty()) { clientsByHostAndPort.remove(hostAndPortKey); client = null; } } resultHandler.onComplete(null); }
|
/**
* Unregisters a client from receiving a statistics stream
*
* @param index
* Index of the statistics of the stream
* @param hostName
* Hostname of the client
* @param port
* Port number at which the StreamingServer at the client side is
* listening to
* @param resultHandler
* Callback to be called as soon as the client is unregistered
*/
|
Unregisters a client from receiving a statistics stream
|
unregisterIndex
|
{
"repo_name": "Completionary/completionProxy",
"path": "src/main/java/de/completionary/proxy/analytics/StatisticsStreamDispatcher.java",
"license": "gpl-2.0",
"size": 10924
}
|
[
"de.completionary.proxy.thrift.clients.StreamingClientServiceClient",
"java.util.Set",
"org.apache.thrift.async.AsyncMethodCallback"
] |
import de.completionary.proxy.thrift.clients.StreamingClientServiceClient; import java.util.Set; import org.apache.thrift.async.AsyncMethodCallback;
|
import de.completionary.proxy.thrift.clients.*; import java.util.*; import org.apache.thrift.async.*;
|
[
"de.completionary.proxy",
"java.util",
"org.apache.thrift"
] |
de.completionary.proxy; java.util; org.apache.thrift;
| 1,100,483
|
private static double getZeroXOfControlPoligon(final NubSpline nubSpline) {
final Iterator<PolarCP> iter = nubSpline.controlPoints.iterator();
// the CPs we are working with.
PolarCP firstCP, secondCP;
// Determines if one of the CPs is bigger than zero.
boolean firstIsPositive, secondIsPositive;
// Determines if one of the CPs is smaller than zero.
boolean firstIsNegative, secondIsNegative;
// the x values of the CPs we are working with.
double firstX = 0.0, secondX;
// initialize
secondCP = iter.next();
secondX = secondCP.getCp().x;
secondIsPositive = secondX > EPSILON;
secondIsNegative = secondX < -EPSILON;
while (iter.hasNext()) {
// Proceed to next line between CPs:
// Update first CP.
firstCP = secondCP;
firstX = secondX;
firstIsPositive = secondIsPositive;
firstIsNegative = secondIsNegative;
// Update second CP.
secondCP = iter.next();
secondX = secondCP.getCp().x;
secondIsPositive = secondX > EPSILON;
secondIsNegative = secondX < -EPSILON;
// Check if there is a zero at the second CP.
if (!(secondIsPositive || secondIsNegative)) {
return getTFromPolar(secondCP.getPolarCoordinate());
}
// check for zero in the line between the two CPs
if (firstIsPositive && secondIsNegative || firstIsNegative && secondIsPositive) {
double factor;
factor = firstX / (firstX - secondX);
final double t1 = getTFromPolar(firstCP.getPolarCoordinate());
final double t2 = getTFromPolar(secondCP.getPolarCoordinate());
return factor * t1 + (1 - factor) * t2;
}
}
return 0.0;
}
// ########################################################################################
// Bezier
|
static double function(final NubSpline nubSpline) { final Iterator<PolarCP> iter = nubSpline.controlPoints.iterator(); PolarCP firstCP, secondCP; boolean firstIsPositive, secondIsPositive; boolean firstIsNegative, secondIsNegative; double firstX = 0.0, secondX; secondCP = iter.next(); secondX = secondCP.getCp().x; secondIsPositive = secondX > EPSILON; secondIsNegative = secondX < -EPSILON; while (iter.hasNext()) { firstCP = secondCP; firstX = secondX; firstIsPositive = secondIsPositive; firstIsNegative = secondIsNegative; secondCP = iter.next(); secondX = secondCP.getCp().x; secondIsPositive = secondX > EPSILON; secondIsNegative = secondX < -EPSILON; if (!(secondIsPositive secondIsNegative)) { return getTFromPolar(secondCP.getPolarCoordinate()); } if (firstIsPositive && secondIsNegative firstIsNegative && secondIsPositive) { double factor; factor = firstX / (firstX - secondX); final double t1 = getTFromPolar(firstCP.getPolarCoordinate()); final double t2 = getTFromPolar(secondCP.getPolarCoordinate()); return factor * t1 + (1 - factor) * t2; } } return 0.0; }
|
/**
* Returns a progression value (t) of the first zero (X-value) in the control polygon. If the polygon
* starts at zero, this zero is ignored and the next zero is returned. If there is no zero found,
* 0.0 is returned.
*
* @param nubSpline The NubSpline those zero of control polygon to calculate.
* @return The progression value t. min[knotVector] < t <= max[knotVector]
*/
|
Returns a progression value (t) of the first zero (X-value) in the control polygon. If the polygon starts at zero, this zero is ignored and the next zero is returned. If there is no zero found, 0.0 is returned
|
getZeroXOfControlPoligon
|
{
"repo_name": "ExplorViz/ExplorViz",
"path": "src-external/de/cau/cs/kieler/klay/layered/p5edges/splines/NubSpline.java",
"license": "apache-2.0",
"size": 43267
}
|
[
"java.util.Iterator"
] |
import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 474,288
|
@CheckForNull
public DistributionRequestType getRequestType() {
return get(PROPERTY_REQUEST_TYPE, DistributionRequestType.class);
}
|
DistributionRequestType function() { return get(PROPERTY_REQUEST_TYPE, DistributionRequestType.class); }
|
/**
* get the request type associated to the package holding this info
*
* @return the request type
*/
|
get the request type associated to the package holding this info
|
getRequestType
|
{
"repo_name": "plutext/sling",
"path": "contrib/extensions/distribution/core/src/main/java/org/apache/sling/distribution/packaging/DistributionPackageInfo.java",
"license": "apache-2.0",
"size": 4091
}
|
[
"org.apache.sling.distribution.DistributionRequestType"
] |
import org.apache.sling.distribution.DistributionRequestType;
|
import org.apache.sling.distribution.*;
|
[
"org.apache.sling"
] |
org.apache.sling;
| 620,834
|
@MethodStats
public AuthenticationContext authenticate(SignedJWTInfo signedJWTInfo, MessageContext synCtx)
throws APISecurityException {
String apiContext = (String) synCtx.getProperty(RESTConstants.REST_API_CONTEXT);
String apiVersion = (String) synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION);
org.apache.axis2.context.MessageContext axis2MsgContext =
((Axis2MessageContext) synCtx).getAxis2MessageContext();
String httpMethod = (String) axis2MsgContext.getProperty(Constants.Configuration.HTTP_METHOD);
String matchingResource = (String) synCtx.getProperty(APIConstants.API_ELECTED_RESOURCE);
String jwtTokenIdentifier = getJWTTokenIdentifier(signedJWTInfo);
String jwtHeader = signedJWTInfo.getSignedJWT().getHeader().toString();
try {
X509Certificate clientCertificate = Utils.getClientCertificate(axis2MsgContext);
signedJWTInfo.setX509ClientCertificate(clientCertificate);
} catch (APIManagementException e) {
log.error("Error while obtaining client certificate. " + GatewayUtils.getMaskedToken(jwtHeader));
}
if (StringUtils.isNotEmpty(jwtTokenIdentifier)) {
if (RevokedJWTDataHolder.isJWTTokenSignatureExistsInRevokedMap(jwtTokenIdentifier)) {
if (log.isDebugEnabled()) {
log.debug("Token retrieved from the revoked jwt token map. Token: " + GatewayUtils.
getMaskedToken(jwtHeader));
}
log.error("Invalid JWT token. " + GatewayUtils.getMaskedToken(jwtHeader));
throw new APISecurityException(APISecurityConstants.API_AUTH_INVALID_CREDENTIALS,
"Invalid JWT token");
}
}
JWTValidationInfo jwtValidationInfo = getJwtValidationInfo(signedJWTInfo, jwtTokenIdentifier);
if (jwtValidationInfo != null) {
if (jwtValidationInfo.isValid()) {
// Validate subscriptions
APIKeyValidationInfoDTO apiKeyValidationInfoDTO;
log.debug("Begin subscription validation via Key Manager: " + jwtValidationInfo.getKeyManager());
apiKeyValidationInfoDTO = validateSubscriptionUsingKeyManager(synCtx, jwtValidationInfo);
if (log.isDebugEnabled()) {
log.debug("Subscription validation via Key Manager. Status: "
+ apiKeyValidationInfoDTO.isAuthorized());
}
if (!apiKeyValidationInfoDTO.isAuthorized()){
log.debug(
"User is NOT authorized to access the Resource. API Subscription validation failed.");
throw new APISecurityException(apiKeyValidationInfoDTO.getValidationStatus(),
"User is NOT authorized to access the Resource. API Subscription validation failed.");
}
// Validate scopes
validateScopes(apiContext, apiVersion, matchingResource, httpMethod, jwtValidationInfo, signedJWTInfo);
synCtx.setProperty(APIMgtGatewayConstants.SCOPES, jwtValidationInfo.getScopes().toString());
if (apiKeyValidationInfoDTO.isAuthorized()) {
synCtx.setProperty(APIMgtGatewayConstants.API_PUBLISHER, apiKeyValidationInfoDTO.getApiPublisher());
synCtx.setProperty("API_NAME", apiKeyValidationInfoDTO.getApiName());
if (APIConstants.GRAPHQL_API.equals(synCtx.getProperty(APIConstants.API_TYPE))) {
synCtx.setProperty(APIConstants.MAXIMUM_QUERY_DEPTH,
apiKeyValidationInfoDTO.getGraphQLMaxDepth());
synCtx.setProperty(APIConstants.MAXIMUM_QUERY_COMPLEXITY,
apiKeyValidationInfoDTO.getGraphQLMaxComplexity());
}
log.debug("JWT authentication successful.");
}
log.debug("JWT authentication successful.");
String endUserToken = null;
if (jwtGenerationEnabled) {
JWTInfoDto jwtInfoDto = GatewayUtils
.generateJWTInfoDto(null, jwtValidationInfo, apiKeyValidationInfoDTO, synCtx);
endUserToken = generateAndRetrieveJWTToken(jwtTokenIdentifier, jwtInfoDto);
}
return GatewayUtils.generateAuthenticationContext(jwtTokenIdentifier, jwtValidationInfo, apiKeyValidationInfoDTO,
endUserToken, true);
} else {
throw new APISecurityException(jwtValidationInfo.getValidationCode(),
APISecurityConstants.getAuthenticationFailureMessage(jwtValidationInfo.getValidationCode()));
}
} else {
throw new APISecurityException(APISecurityConstants.API_AUTH_GENERAL_ERROR,
APISecurityConstants.API_AUTH_GENERAL_ERROR_MESSAGE);
}
}
|
AuthenticationContext function(SignedJWTInfo signedJWTInfo, MessageContext synCtx) throws APISecurityException { String apiContext = (String) synCtx.getProperty(RESTConstants.REST_API_CONTEXT); String apiVersion = (String) synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION); org.apache.axis2.context.MessageContext axis2MsgContext = ((Axis2MessageContext) synCtx).getAxis2MessageContext(); String httpMethod = (String) axis2MsgContext.getProperty(Constants.Configuration.HTTP_METHOD); String matchingResource = (String) synCtx.getProperty(APIConstants.API_ELECTED_RESOURCE); String jwtTokenIdentifier = getJWTTokenIdentifier(signedJWTInfo); String jwtHeader = signedJWTInfo.getSignedJWT().getHeader().toString(); try { X509Certificate clientCertificate = Utils.getClientCertificate(axis2MsgContext); signedJWTInfo.setX509ClientCertificate(clientCertificate); } catch (APIManagementException e) { log.error(STR + GatewayUtils.getMaskedToken(jwtHeader)); } if (StringUtils.isNotEmpty(jwtTokenIdentifier)) { if (RevokedJWTDataHolder.isJWTTokenSignatureExistsInRevokedMap(jwtTokenIdentifier)) { if (log.isDebugEnabled()) { log.debug(STR + GatewayUtils. getMaskedToken(jwtHeader)); } log.error(STR + GatewayUtils.getMaskedToken(jwtHeader)); throw new APISecurityException(APISecurityConstants.API_AUTH_INVALID_CREDENTIALS, STR); } } JWTValidationInfo jwtValidationInfo = getJwtValidationInfo(signedJWTInfo, jwtTokenIdentifier); if (jwtValidationInfo != null) { if (jwtValidationInfo.isValid()) { APIKeyValidationInfoDTO apiKeyValidationInfoDTO; log.debug(STR + jwtValidationInfo.getKeyManager()); apiKeyValidationInfoDTO = validateSubscriptionUsingKeyManager(synCtx, jwtValidationInfo); if (log.isDebugEnabled()) { log.debug(STR + apiKeyValidationInfoDTO.isAuthorized()); } if (!apiKeyValidationInfoDTO.isAuthorized()){ log.debug( STR); throw new APISecurityException(apiKeyValidationInfoDTO.getValidationStatus(), STR); } validateScopes(apiContext, apiVersion, matchingResource, httpMethod, jwtValidationInfo, signedJWTInfo); synCtx.setProperty(APIMgtGatewayConstants.SCOPES, jwtValidationInfo.getScopes().toString()); if (apiKeyValidationInfoDTO.isAuthorized()) { synCtx.setProperty(APIMgtGatewayConstants.API_PUBLISHER, apiKeyValidationInfoDTO.getApiPublisher()); synCtx.setProperty(STR, apiKeyValidationInfoDTO.getApiName()); if (APIConstants.GRAPHQL_API.equals(synCtx.getProperty(APIConstants.API_TYPE))) { synCtx.setProperty(APIConstants.MAXIMUM_QUERY_DEPTH, apiKeyValidationInfoDTO.getGraphQLMaxDepth()); synCtx.setProperty(APIConstants.MAXIMUM_QUERY_COMPLEXITY, apiKeyValidationInfoDTO.getGraphQLMaxComplexity()); } log.debug(STR); } log.debug(STR); String endUserToken = null; if (jwtGenerationEnabled) { JWTInfoDto jwtInfoDto = GatewayUtils .generateJWTInfoDto(null, jwtValidationInfo, apiKeyValidationInfoDTO, synCtx); endUserToken = generateAndRetrieveJWTToken(jwtTokenIdentifier, jwtInfoDto); } return GatewayUtils.generateAuthenticationContext(jwtTokenIdentifier, jwtValidationInfo, apiKeyValidationInfoDTO, endUserToken, true); } else { throw new APISecurityException(jwtValidationInfo.getValidationCode(), APISecurityConstants.getAuthenticationFailureMessage(jwtValidationInfo.getValidationCode())); } } else { throw new APISecurityException(APISecurityConstants.API_AUTH_GENERAL_ERROR, APISecurityConstants.API_AUTH_GENERAL_ERROR_MESSAGE); } }
|
/**
* Authenticates the given request with a JWT token to see if an API consumer is allowed to access
* a particular API or not.
*
* @param signedJWTInfo The JWT token sent with the API request
* @param synCtx The message to be authenticated
* @return an AuthenticationContext object which contains the authentication information
* @throws APISecurityException in case of authentication failure
*/
|
Authenticates the given request with a JWT token to see if an API consumer is allowed to access a particular API or not
|
authenticate
|
{
"repo_name": "fazlan-nazeem/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.gateway/src/main/java/org/wso2/carbon/apimgt/gateway/handlers/security/jwt/JWTValidator.java",
"license": "apache-2.0",
"size": 32132
}
|
[
"javax.security.cert.X509Certificate",
"org.apache.axis2.Constants",
"org.apache.commons.lang.StringUtils",
"org.apache.synapse.MessageContext",
"org.apache.synapse.core.axis2.Axis2MessageContext",
"org.apache.synapse.rest.RESTConstants",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.common.gateway.dto.JWTInfoDto",
"org.wso2.carbon.apimgt.common.gateway.dto.JWTValidationInfo",
"org.wso2.carbon.apimgt.gateway.APIMgtGatewayConstants",
"org.wso2.carbon.apimgt.gateway.handlers.Utils",
"org.wso2.carbon.apimgt.gateway.handlers.security.APISecurityConstants",
"org.wso2.carbon.apimgt.gateway.handlers.security.APISecurityException",
"org.wso2.carbon.apimgt.gateway.handlers.security.AuthenticationContext",
"org.wso2.carbon.apimgt.gateway.jwt.RevokedJWTDataHolder",
"org.wso2.carbon.apimgt.gateway.utils.GatewayUtils",
"org.wso2.carbon.apimgt.impl.APIConstants",
"org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO",
"org.wso2.carbon.apimgt.impl.jwt.SignedJWTInfo"
] |
import javax.security.cert.X509Certificate; import org.apache.axis2.Constants; import org.apache.commons.lang.StringUtils; import org.apache.synapse.MessageContext; import org.apache.synapse.core.axis2.Axis2MessageContext; import org.apache.synapse.rest.RESTConstants; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.common.gateway.dto.JWTInfoDto; import org.wso2.carbon.apimgt.common.gateway.dto.JWTValidationInfo; import org.wso2.carbon.apimgt.gateway.APIMgtGatewayConstants; import org.wso2.carbon.apimgt.gateway.handlers.Utils; import org.wso2.carbon.apimgt.gateway.handlers.security.APISecurityConstants; import org.wso2.carbon.apimgt.gateway.handlers.security.APISecurityException; import org.wso2.carbon.apimgt.gateway.handlers.security.AuthenticationContext; import org.wso2.carbon.apimgt.gateway.jwt.RevokedJWTDataHolder; import org.wso2.carbon.apimgt.gateway.utils.GatewayUtils; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO; import org.wso2.carbon.apimgt.impl.jwt.SignedJWTInfo;
|
import javax.security.cert.*; import org.apache.axis2.*; import org.apache.commons.lang.*; import org.apache.synapse.*; import org.apache.synapse.core.axis2.*; import org.apache.synapse.rest.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.common.gateway.dto.*; import org.wso2.carbon.apimgt.gateway.*; import org.wso2.carbon.apimgt.gateway.handlers.*; import org.wso2.carbon.apimgt.gateway.handlers.security.*; import org.wso2.carbon.apimgt.gateway.jwt.*; import org.wso2.carbon.apimgt.gateway.utils.*; import org.wso2.carbon.apimgt.impl.*; import org.wso2.carbon.apimgt.impl.dto.*; import org.wso2.carbon.apimgt.impl.jwt.*;
|
[
"javax.security",
"org.apache.axis2",
"org.apache.commons",
"org.apache.synapse",
"org.wso2.carbon"
] |
javax.security; org.apache.axis2; org.apache.commons; org.apache.synapse; org.wso2.carbon;
| 1,860,669
|
@Test
public void testDeleteKeys() throws Exception {
Key key = makeKey("bird", "finch").build();
DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
Mutation exceptedMutation = makeDelete(key).build();
assertEquals(deleteKeyFn.apply(key), exceptedMutation);
}
|
void function() throws Exception { Key key = makeKey("bird", "finch").build(); DeleteKeyFn deleteKeyFn = new DeleteKeyFn(); Mutation exceptedMutation = makeDelete(key).build(); assertEquals(deleteKeyFn.apply(key), exceptedMutation); }
|
/**
* Test that valid keys are transformed to delete mutations.
*/
|
Test that valid keys are transformed to delete mutations
|
testDeleteKeys
|
{
"repo_name": "amitsela/incubator-beam",
"path": "sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java",
"license": "apache-2.0",
"size": 40238
}
|
[
"com.google.datastore.v1.Key",
"com.google.datastore.v1.Mutation",
"com.google.datastore.v1.client.DatastoreHelper",
"org.apache.beam.sdk.io.gcp.datastore.DatastoreV1",
"org.junit.Assert"
] |
import com.google.datastore.v1.Key; import com.google.datastore.v1.Mutation; import com.google.datastore.v1.client.DatastoreHelper; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1; import org.junit.Assert;
|
import com.google.datastore.v1.*; import com.google.datastore.v1.client.*; import org.apache.beam.sdk.io.gcp.datastore.*; import org.junit.*;
|
[
"com.google.datastore",
"org.apache.beam",
"org.junit"
] |
com.google.datastore; org.apache.beam; org.junit;
| 634,633
|
@Test
public void testInsertRepoFileAndFetchItAgain() {
RepoFileMetaData newRepoFileMap = getNewIsoRepoFile();
repoFileMetaDataDao.addRepoFileMap(newRepoFileMap);
List<RepoFileMetaData> listOfRepoFiles = repoFileMetaDataDao
.getRepoListForStorageDomain(FixturesTool.STORAGE_DOAMIN_NFS_ISO,
FileTypeExtension.ISO);
assertNotNull(listOfRepoFiles);
assertSame(true, !listOfRepoFiles.isEmpty());
assertSame(
true,
listOfRepoFiles.get(0).getRepoFileName()
.equals(newRepoFileMap.getRepoFileName()));
assertSame(true,
listOfRepoFiles.get(0).getLastRefreshed() == newRepoFileMap
.getLastRefreshed());
assertSame(true,
listOfRepoFiles.get(0).getSize() == newRepoFileMap.getSize());
assertSame(
true,
listOfRepoFiles.get(0).getRepoDomainId()
.equals(newRepoFileMap.getRepoDomainId()));
}
|
void function() { RepoFileMetaData newRepoFileMap = getNewIsoRepoFile(); repoFileMetaDataDao.addRepoFileMap(newRepoFileMap); List<RepoFileMetaData> listOfRepoFiles = repoFileMetaDataDao .getRepoListForStorageDomain(FixturesTool.STORAGE_DOAMIN_NFS_ISO, FileTypeExtension.ISO); assertNotNull(listOfRepoFiles); assertSame(true, !listOfRepoFiles.isEmpty()); assertSame( true, listOfRepoFiles.get(0).getRepoFileName() .equals(newRepoFileMap.getRepoFileName())); assertSame(true, listOfRepoFiles.get(0).getLastRefreshed() == newRepoFileMap .getLastRefreshed()); assertSame(true, listOfRepoFiles.get(0).getSize() == newRepoFileMap.getSize()); assertSame( true, listOfRepoFiles.get(0).getRepoDomainId() .equals(newRepoFileMap.getRepoDomainId())); }
|
/**
* Test when insert row and fetching it later.
*/
|
Test when insert row and fetching it later
|
testInsertRepoFileAndFetchItAgain
|
{
"repo_name": "derekhiggins/ovirt-engine",
"path": "backend/manager/modules/dal/src/test/java/org/ovirt/engine/core/dao/RepoFileMetaDataDAOTest.java",
"license": "apache-2.0",
"size": 13014
}
|
[
"java.util.List",
"org.junit.Assert",
"org.ovirt.engine.core.common.businessentities.FileTypeExtension",
"org.ovirt.engine.core.common.businessentities.RepoFileMetaData"
] |
import java.util.List; import org.junit.Assert; import org.ovirt.engine.core.common.businessentities.FileTypeExtension; import org.ovirt.engine.core.common.businessentities.RepoFileMetaData;
|
import java.util.*; import org.junit.*; import org.ovirt.engine.core.common.businessentities.*;
|
[
"java.util",
"org.junit",
"org.ovirt.engine"
] |
java.util; org.junit; org.ovirt.engine;
| 86,176
|
public void killTask(TaskAttemptID taskId) throws IOException {
ensureState(JobState.RUNNING);
info.killTask(org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId),
false);
}
|
void function(TaskAttemptID taskId) throws IOException { ensureState(JobState.RUNNING); info.killTask(org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId), false); }
|
/**
* Kill indicated task attempt.
*
* @param taskId the id of the task to be terminated.
* @throws IOException
*/
|
Kill indicated task attempt
|
killTask
|
{
"repo_name": "YuMatsuzawa/HadoopEclipseProject",
"path": "src/mapred/org/apache/hadoop/mapreduce/Job.java",
"license": "apache-2.0",
"size": 18269
}
|
[
"java.io.IOException",
"org.apache.hadoop.mapreduce.TaskAttemptID"
] |
import java.io.IOException; import org.apache.hadoop.mapreduce.TaskAttemptID;
|
import java.io.*; import org.apache.hadoop.mapreduce.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,188,122
|
@Override
public Response getAPIDocumentContentByDocumentId(String apiId, String documentId,
String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
DocumentationContent docContent = apiProvider.getDocumentationContent(apiId, documentId, tenantDomain);
if (docContent == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
// gets the content depending on the type of the document
if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.FILE)) {
String contentType = docContent.getResourceFile().getContentType();
contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType;
String name = docContent.getResourceFile().getName();
return Response.ok(docContent.getResourceFile().getContent())
.header(RestApiConstants.HEADER_CONTENT_TYPE, contentType)
.header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"")
.build();
} else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.INLINE)
|| docContent.getSourceType().equals(DocumentationContent.ContentSourceType.MARKDOWN)) {
String content = docContent.getTextContent();
return Response.ok(content)
.header(RestApiConstants.HEADER_CONTENT_TYPE, APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE)
.build();
} else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.URL)) {
String sourceUrl = docContent.getTextContent();
return Response.seeOther(new URI(sourceUrl)).build();
}
} catch (APIManagementException e) {
// Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving source URI location of " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
|
Response function(String apiId, String documentId, String ifNoneMatch, MessageContext messageContext) { try { APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider(); String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain(); DocumentationContent docContent = apiProvider.getDocumentationContent(apiId, documentId, tenantDomain); if (docContent == null) { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log); return null; } if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.FILE)) { String contentType = docContent.getResourceFile().getContentType(); contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType; String name = docContent.getResourceFile().getName(); return Response.ok(docContent.getResourceFile().getContent()) .header(RestApiConstants.HEADER_CONTENT_TYPE, contentType) .header(RestApiConstants.HEADER_CONTENT_DISPOSITION, STRSTR\STRAuthorization failure while retrieving document : STR of API STRError while retrieving document STR of the API STRError while retrieving source URI location of " + documentId; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; }
|
/**
* Retrieves the content of a document
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifNoneMatch If-None-Match header value
* @return Content of the document/ either inline/file or source url as a redirection
*/
|
Retrieves the content of a document
|
getAPIDocumentContentByDocumentId
|
{
"repo_name": "Rajith90/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ApisApiServiceImpl.java",
"license": "apache-2.0",
"size": 253126
}
|
[
"javax.ws.rs.core.Response",
"org.apache.cxf.jaxrs.ext.MessageContext",
"org.wso2.carbon.apimgt.api.APIProvider",
"org.wso2.carbon.apimgt.api.model.DocumentationContent",
"org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil",
"org.wso2.carbon.apimgt.rest.api.common.RestApiConstants",
"org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil"
] |
import javax.ws.rs.core.Response; import org.apache.cxf.jaxrs.ext.MessageContext; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.model.DocumentationContent; import org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil; import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
|
import javax.ws.rs.core.*; import org.apache.cxf.jaxrs.ext.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.rest.api.common.*; import org.wso2.carbon.apimgt.rest.api.util.utils.*;
|
[
"javax.ws",
"org.apache.cxf",
"org.wso2.carbon"
] |
javax.ws; org.apache.cxf; org.wso2.carbon;
| 1,753,098
|
public Transition inflateTransition(int resource) {
XmlResourceParser parser = mContext.getResources().getXml(resource);
try {
return createTransitionFromXml(parser, Xml.asAttributeSet(parser), null);
} catch (XmlPullParserException e) {
throw new InflateException(e.getMessage(), e);
} catch (IOException e) {
throw new InflateException(
parser.getPositionDescription() + ": " + e.getMessage(), e);
} finally {
parser.close();
}
}
|
Transition function(int resource) { XmlResourceParser parser = mContext.getResources().getXml(resource); try { return createTransitionFromXml(parser, Xml.asAttributeSet(parser), null); } catch (XmlPullParserException e) { throw new InflateException(e.getMessage(), e); } catch (IOException e) { throw new InflateException( parser.getPositionDescription() + STR + e.getMessage(), e); } finally { parser.close(); } }
|
/**
* Loads a {@link Transition} object from a resource
*
* @param resource The resource id of the transition to load
* @return The loaded Transition object
* @throws android.content.res.Resources.NotFoundException when the
* transition cannot be loaded
*/
|
Loads a <code>Transition</code> object from a resource
|
inflateTransition
|
{
"repo_name": "AndroidX/androidx",
"path": "transition/transition/src/main/java/androidx/transition/TransitionInflater.java",
"license": "apache-2.0",
"size": 14661
}
|
[
"android.content.res.XmlResourceParser",
"android.util.Xml",
"android.view.InflateException",
"java.io.IOException",
"org.xmlpull.v1.XmlPullParserException"
] |
import android.content.res.XmlResourceParser; import android.util.Xml; import android.view.InflateException; import java.io.IOException; import org.xmlpull.v1.XmlPullParserException;
|
import android.content.res.*; import android.util.*; import android.view.*; import java.io.*; import org.xmlpull.v1.*;
|
[
"android.content",
"android.util",
"android.view",
"java.io",
"org.xmlpull.v1"
] |
android.content; android.util; android.view; java.io; org.xmlpull.v1;
| 1,678,806
|
public void exitResetMode() throws IOException {
inReset = false;
if (clearMarkFlag ) {
// If a flag was set to clear mark, do the reinit now.
// See clearMark()
reinitialize();
return;
}
if (!fileCache.isActive) {
memCache.reinitialize(false);
}
}
|
void function() throws IOException { inReset = false; if (clearMarkFlag ) { reinitialize(); return; } if (!fileCache.isActive) { memCache.reinitialize(false); } }
|
/**
* This function is called the ValuesIterator when a mark is called
* outside of a reset zone.
*/
|
This function is called the ValuesIterator when a mark is called outside of a reset zone
|
exitResetMode
|
{
"repo_name": "dennishuo/hadoop",
"path": "hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java",
"license": "apache-2.0",
"size": 18966
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,285,960
|
public ArrayList<Block> createBlocks(int textStart, int textEnd, int minNode, int maxNode) {
ArrayList<Block> blocks = new ArrayList<Block>();
int startNode = Math.max(getBlockStart(textStart), minNode);
int endNode = Math.min(getBlockEnd(textEnd), maxNode);
if (DEBUG) {
debug("Creating block: " +
"text pos: " + textStart + "-" + textEnd + "\n" +
"node pos: " + startNode + "-" + endNode + "\n" +
plainText.substring(textStart, textEnd));
}
// Split up the block [start, end) into one or more blocks that
// are well-formed, and begins at a "line" boundary.
int blockStart = -1;
for (int n = startNode; n < endNode;) {
// The node n spans [nBegin, nEnd]
int nBegin = begins.get(n);
int nEnd = ends.get(n);
if (blockStart == -1) {
// Check if this is a valid start node
if (nBegin >= n && nEnd <= endNode &&
canBeginBlockAt(n)) {
blockStart = n;
n = nEnd + 1;
} else {
n++;
}
continue;
}
// If the node [nBegin, nEnd) lies completely within
// the region then proceed to the (nEnd + 1).
if (nBegin >= blockStart && nEnd < endNode) {
n = nEnd + 1;
continue;
}
// If we got here, we have to break up the region into one
// or more blocks because the current node cannot be included
// in the region.
if (DEBUG) {
debug("Forcing new block: " + n + " (" + nBegin + " " + nEnd +
") exceeds (" + blockStart + " " + endNode + ")");
}
Block b = new Block();
b.start_node = blockStart;
b.end_node = n;
blocks.add(b);
blockStart = -1;
n++;
}
// Last block
if (blockStart != -1) {
Block b = new Block();
b.start_node = blockStart;
b.end_node = endNode;
blocks.add(b);
}
if (DEBUG) {
for (int i = 0; i < blocks.size(); i++) {
Block b = blocks.get(i);
debug("Block " + i + "/" + blocks.size() + ": " +
b.start_node + "-" + b.end_node + " " +
getPlainText(b.start_node, b.end_node));
}
}
return blocks;
}
|
ArrayList<Block> function(int textStart, int textEnd, int minNode, int maxNode) { ArrayList<Block> blocks = new ArrayList<Block>(); int startNode = Math.max(getBlockStart(textStart), minNode); int endNode = Math.min(getBlockEnd(textEnd), maxNode); if (DEBUG) { debug(STR + STR + textStart + "-" + textEnd + "\n" + STR + startNode + "-" + endNode + "\n" + plainText.substring(textStart, textEnd)); } int blockStart = -1; for (int n = startNode; n < endNode;) { int nBegin = begins.get(n); int nEnd = ends.get(n); if (blockStart == -1) { if (nBegin >= n && nEnd <= endNode && canBeginBlockAt(n)) { blockStart = n; n = nEnd + 1; } else { n++; } continue; } if (nBegin >= blockStart && nEnd < endNode) { n = nEnd + 1; continue; } if (DEBUG) { debug(STR + n + STR + nBegin + " " + nEnd + STR + blockStart + " " + endNode + ")"); } Block b = new Block(); b.start_node = blockStart; b.end_node = n; blocks.add(b); blockStart = -1; n++; } if (blockStart != -1) { Block b = new Block(); b.start_node = blockStart; b.end_node = endNode; blocks.add(b); } if (DEBUG) { for (int i = 0; i < blocks.size(); i++) { Block b = blocks.get(i); debug(STR + i + "/" + blocks.size() + STR + b.start_node + "-" + b.end_node + " " + getPlainText(b.start_node, b.end_node)); } } return blocks; }
|
/**
* Creates a list of Blocks, given a text-range.
* We may create multiple blocks if one single well-formed Block cannot be
* created.
*
* @param textStart beginning plain-text offset
* @param textEnd beginning plain-text offset
* @param minNode the smallest node number
* @param maxNode the largest node number
* @return a list of 0 or more Block objects, never null
*/
|
Creates a list of Blocks, given a text-range. We may create multiple blocks if one single well-formed Block cannot be created
|
createBlocks
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "packages/apps/UnifiedEmail/src/com/google/android/mail/common/html/parser/HtmlTree.java",
"license": "gpl-3.0",
"size": 33618
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 595,614
|
private static String getPatternForCategory( String category )
{
String pattern;
if ( DesignChoiceConstants.STRING_FORMAT_TYPE_UPPERCASE.equals( category ) )
{
pattern = ">"; //$NON-NLS-1$
}
else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_LOWERCASE.equals( category ) )
{
pattern = "<"; //$NON-NLS-1$
}
else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_ZIP_CODE.equals( category ) )
{
pattern = Messages.getString( "FormatStringPage.simpleTexZipCodeFormat" ); //$NON-NLS-1$
// pattern = "@@@@@"; //$NON-NLS-1$
}
else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_ZIP_CODE_4.equals( category ) )
{
pattern = Messages.getString( "FormatStringPage.simpleTexZipCode4Format" ); //$NON-NLS-1$
// pattern = "@@@@@-@@@@"; //$NON-NLS-1$
}
else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_PHONE_NUMBER.equals( category ) )
{
pattern = Messages.getString( "FormatStringPage.phoneNumberFormat" ); //$NON-NLS-1$
// pattern = "(@@@)@@@-@@@@"; //$NON-NLS-1$
}
else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_SOCIAL_SECURITY_NUMBER.equals( category ) )
{
pattern = Messages.getString( "FormatStringPage.securityNumberFormat" ); //$NON-NLS-1$
// pattern = "@@@-@@-@@@@"; //$NON-NLS-1$
}
else if ( category.equals( "^" ) ) //$NON-NLS-1$
{
pattern = category;
}
else
{
pattern = ""; //$NON-NLS-1$
}
return pattern;
}
|
static String function( String category ) { String pattern; if ( DesignChoiceConstants.STRING_FORMAT_TYPE_UPPERCASE.equals( category ) ) { pattern = ">"; } else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_LOWERCASE.equals( category ) ) { pattern = "<"; } else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_ZIP_CODE.equals( category ) ) { pattern = Messages.getString( STR ); } else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_ZIP_CODE_4.equals( category ) ) { pattern = Messages.getString( STR ); } else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_PHONE_NUMBER.equals( category ) ) { pattern = Messages.getString( STR ); } else if ( DesignChoiceConstants.STRING_FORMAT_TYPE_SOCIAL_SECURITY_NUMBER.equals( category ) ) { pattern = Messages.getString( STR ); } else if ( category.equals( "^" ) ) { pattern = category; } else { pattern = ""; } return pattern; }
|
/**
* Retrieves format pattern from arrays given format type categorys.
*
* @param category
* Given format type category.
* @return The corresponding format pattern string.
*/
|
Retrieves format pattern from arrays given format type categorys
|
getPatternForCategory
|
{
"repo_name": "sguan-actuate/birt",
"path": "UI/org.eclipse.birt.report.designer.core/src/org/eclipse/birt/report/designer/util/FormatStringPattern.java",
"license": "epl-1.0",
"size": 3979
}
|
[
"org.eclipse.birt.report.designer.nls.Messages",
"org.eclipse.birt.report.model.api.elements.DesignChoiceConstants"
] |
import org.eclipse.birt.report.designer.nls.Messages; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants;
|
import org.eclipse.birt.report.designer.nls.*; import org.eclipse.birt.report.model.api.elements.*;
|
[
"org.eclipse.birt"
] |
org.eclipse.birt;
| 1,546,100
|
public void processingInstruction(String target, String data)
throws SAXException
{
if (m_firstTagNotEmitted)
{
flush();
}
m_handler.processingInstruction(target, data);
}
|
void function(String target, String data) throws SAXException { if (m_firstTagNotEmitted) { flush(); } m_handler.processingInstruction(target, data); }
|
/**
* Pass the call on to the underlying handler
* @see org.xml.sax.ContentHandler#processingInstruction(String, String)
*/
|
Pass the call on to the underlying handler
|
processingInstruction
|
{
"repo_name": "mirkosertic/Bytecoder",
"path": "classlib/java.xml/src/main/resources/META-INF/modules/java.xml/classes/com/sun/org/apache/xml/internal/serializer/ToUnknownStream.java",
"license": "apache-2.0",
"size": 38096
}
|
[
"org.xml.sax.SAXException"
] |
import org.xml.sax.SAXException;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 584,594
|
interface UpdateStages {
interface WithTags {
Update withTags(Map<String, String> tags);
}
}
|
interface UpdateStages { interface WithTags { Update withTags(Map<String, String> tags); } }
|
/**
* Specifies the tags property: The resource tags..
*
* @param tags The resource tags.
* @return the next definition stage.
*/
|
Specifies the tags property: The resource tags.
|
withTags
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/delegatednetwork/azure-resourcemanager-delegatednetwork/src/main/java/com/azure/resourcemanager/delegatednetwork/models/DelegatedController.java",
"license": "mit",
"size": 6920
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,731,214
|
public void testOneDest(Connection conn, Session sess, Destination cons_dest, String prod_broker_url, String cons_broker_url, int num_msg) throws Exception {
int echo_id;
EchoService echo_svc;
String echo_queue_name;
Destination prod_dest;
MessageProducer msg_prod;
synchronized (this) {
echo_id = this.nextEchoId;
this.nextEchoId++;
}
echo_queue_name = "echo.queue." + echo_id;
LOG.trace("destroying the echo queue in case an old one exists");
removeQueue(conn, echo_queue_name);
echo_svc = new EchoService(echo_queue_name, prod_broker_url);
echo_svc.start();
LOG.trace("Creating echo queue and producer");
prod_dest = sess.createQueue(echo_queue_name);
msg_prod = sess.createProducer(prod_dest);
testMessages(sess, msg_prod, cons_dest, num_msg);
echo_svc.shutdown();
msg_prod.close();
}
|
void function(Connection conn, Session sess, Destination cons_dest, String prod_broker_url, String cons_broker_url, int num_msg) throws Exception { int echo_id; EchoService echo_svc; String echo_queue_name; Destination prod_dest; MessageProducer msg_prod; synchronized (this) { echo_id = this.nextEchoId; this.nextEchoId++; } echo_queue_name = STR + echo_id; LOG.trace(STR); removeQueue(conn, echo_queue_name); echo_svc = new EchoService(echo_queue_name, prod_broker_url); echo_svc.start(); LOG.trace(STR); prod_dest = sess.createQueue(echo_queue_name); msg_prod = sess.createProducer(prod_dest); testMessages(sess, msg_prod, cons_dest, num_msg); echo_svc.shutdown(); msg_prod.close(); }
|
/**
* Test one destination between the given "producer broker" and
* "consumer broker" specified.
*/
|
Test one destination between the given "producer broker" and "consumer broker" specified
|
testOneDest
|
{
"repo_name": "ryanemerson/activemq-artemis",
"path": "tests/activemq5-unit-tests/src/test/java/org/apache/activemq/bugs/AMQ3274Test.java",
"license": "apache-2.0",
"size": 23419
}
|
[
"javax.jms.Connection",
"javax.jms.Destination",
"javax.jms.MessageProducer",
"javax.jms.Session"
] |
import javax.jms.Connection; import javax.jms.Destination; import javax.jms.MessageProducer; import javax.jms.Session;
|
import javax.jms.*;
|
[
"javax.jms"
] |
javax.jms;
| 149,292
|
public static <V, T> MutableMap<V, BigDecimal> sumByBigDecimal(Iterable<T> iterable, Function<T, V> groupBy, Function<? super T, BigDecimal> function)
{
if (iterable instanceof List)
{
return ListIterate.sumByBigDecimal((List<T>) iterable, groupBy, function);
}
if (iterable != null)
{
return IterableIterate.sumByBigDecimal(iterable, groupBy, function);
}
throw new IllegalArgumentException("Cannot perform an sumByBigDecimal on null");
}
|
static <V, T> MutableMap<V, BigDecimal> function(Iterable<T> iterable, Function<T, V> groupBy, Function<? super T, BigDecimal> function) { if (iterable instanceof List) { return ListIterate.sumByBigDecimal((List<T>) iterable, groupBy, function); } if (iterable != null) { return IterableIterate.sumByBigDecimal(iterable, groupBy, function); } throw new IllegalArgumentException(STR); }
|
/**
* Groups and sums the values of the iterable using the two specified functions.
*
* @since 6.0
*/
|
Groups and sums the values of the iterable using the two specified functions
|
sumByBigDecimal
|
{
"repo_name": "bhav0904/eclipse-collections",
"path": "eclipse-collections/src/main/java/org/eclipse/collections/impl/utility/Iterate.java",
"license": "bsd-3-clause",
"size": 139941
}
|
[
"java.math.BigDecimal",
"java.util.List",
"org.eclipse.collections.api.block.function.Function",
"org.eclipse.collections.api.map.MutableMap",
"org.eclipse.collections.impl.utility.internal.IterableIterate"
] |
import java.math.BigDecimal; import java.util.List; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.map.MutableMap; import org.eclipse.collections.impl.utility.internal.IterableIterate;
|
import java.math.*; import java.util.*; import org.eclipse.collections.api.block.function.*; import org.eclipse.collections.api.map.*; import org.eclipse.collections.impl.utility.internal.*;
|
[
"java.math",
"java.util",
"org.eclipse.collections"
] |
java.math; java.util; org.eclipse.collections;
| 18,035
|
public Set<PlayerClientInfo> getPlayerList() {
return playerList;
}
|
Set<PlayerClientInfo> function() { return playerList; }
|
/**
* Gets the list of players from the current server.
*
* @return List of players
*/
|
Gets the list of players from the current server
|
getPlayerList
|
{
"repo_name": "benruijl/WalledIn",
"path": "src/main/java/walledin/game/network/client/Client.java",
"license": "gpl-3.0",
"size": 18543
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 331,435
|
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Void> deleteAsync(String resourceGroupName, String serviceName, String opid, String ifMatch) {
return deleteWithResponseAsync(resourceGroupName, serviceName, opid, ifMatch)
.flatMap((Response<Void> res) -> Mono.empty());
}
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> function(String resourceGroupName, String serviceName, String opid, String ifMatch) { return deleteWithResponseAsync(resourceGroupName, serviceName, opid, ifMatch) .flatMap((Response<Void> res) -> Mono.empty()); }
|
/**
* Deletes specific OpenID Connect Provider of the API Management service instance.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param opid Identifier of the OpenID Connect Provider.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET
* request or it should be * for unconditional update.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
|
Deletes specific OpenID Connect Provider of the API Management service instance
|
deleteAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/implementation/OpenIdConnectProvidersClientImpl.java",
"license": "mit",
"size": 80530
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 2,761,483
|
public static boolean equals(JRPropertiesMap p1, JRPropertiesMap p2)
{
// treating null and empty as the same
if (p1 == null || !p1.hasProperties())
{
return p2 == null || !p2.hasProperties();
}
if (p2 == null || !p2.hasProperties())
{
return false;
}
String[] names1 = p1.getPropertyNames();
String[] names2 = p2.getPropertyNames();
if (names1.length != names2.length)
{
return false;
}
for (int i = 0; i < names1.length; i++)
{
String name1 = names1[i];
String name2 = names2[i];
if (!equals(name1, name2))
{
return false;
}
String value1 = p1.getProperty(name1);
String value2 = p1.getProperty(name1);
if (!equals(value1, value2))
{
return false;
}
}
return true;
}
|
static boolean function(JRPropertiesMap p1, JRPropertiesMap p2) { if (p1 == null !p1.hasProperties()) { return p2 == null !p2.hasProperties(); } if (p2 == null !p2.hasProperties()) { return false; } String[] names1 = p1.getPropertyNames(); String[] names2 = p2.getPropertyNames(); if (names1.length != names2.length) { return false; } for (int i = 0; i < names1.length; i++) { String name1 = names1[i]; String name2 = names2[i]; if (!equals(name1, name2)) { return false; } String value1 = p1.getProperty(name1); String value2 = p1.getProperty(name1); if (!equals(value1, value2)) { return false; } } return true; }
|
/**
* Determines whether two property sets are identical.
*
* @param p1
* @param p2
* @return whether the two property sets are identical
*/
|
Determines whether two property sets are identical
|
equals
|
{
"repo_name": "aleatorio12/ProVentasConnector",
"path": "jasperreports-6.2.1-project/jasperreports-6.2.1/src/net/sf/jasperreports/engine/util/ObjectUtils.java",
"license": "gpl-3.0",
"size": 8191
}
|
[
"net.sf.jasperreports.engine.JRPropertiesMap"
] |
import net.sf.jasperreports.engine.JRPropertiesMap;
|
import net.sf.jasperreports.engine.*;
|
[
"net.sf.jasperreports"
] |
net.sf.jasperreports;
| 107,726
|
public static Bitmap rotateBitmapByDegree(Bitmap bm, int degree) {
if(degree == 0 || degree == -1){
return bm;
}
Bitmap returnBm = null;
// generate the matrix
Matrix matrix = new Matrix();
matrix.postRotate(degree);
try {
returnBm = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
e.printStackTrace();
}
if (returnBm == null) {
returnBm = bm;
}
if (bm != returnBm) {
bm.recycle();
}
return returnBm;
}
|
static Bitmap function(Bitmap bm, int degree) { if(degree == 0 degree == -1){ return bm; } Bitmap returnBm = null; Matrix matrix = new Matrix(); matrix.postRotate(degree); try { returnBm = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true); } catch (OutOfMemoryError e) { e.printStackTrace(); } if (returnBm == null) { returnBm = bm; } if (bm != returnBm) { bm.recycle(); } return returnBm; }
|
/**
* rotate bitmap
*
* @param bm
* @param degree
* @return new bitmap which is rotate from bm
*/
|
rotate bitmap
|
rotateBitmapByDegree
|
{
"repo_name": "cowthan/AyoWeibo",
"path": "ayoimageloader/src/main/java/org/ayo/imageloader/VanGogh.java",
"license": "apache-2.0",
"size": 5964
}
|
[
"android.graphics.Bitmap",
"android.graphics.Matrix"
] |
import android.graphics.Bitmap; import android.graphics.Matrix;
|
import android.graphics.*;
|
[
"android.graphics"
] |
android.graphics;
| 1,671,368
|
@Bean
@DependsOn("jobRepository")
public JobLauncherWithAdditionalRestartCapabilities jobLauncher() {
this.jobLauncher = new JobLauncherWithAdditionalRestartCapabilities();
try {
this.jobLauncher.setJobRepository(this.jobRepository.getObject());
} catch (Exception e) {
throw new BeanCreationException("Could not create BatchJobOperator", e);
}
return this.jobLauncher;
}
|
@DependsOn(STR) JobLauncherWithAdditionalRestartCapabilities function() { this.jobLauncher = new JobLauncherWithAdditionalRestartCapabilities(); try { this.jobLauncher.setJobRepository(this.jobRepository.getObject()); } catch (Exception e) { throw new BeanCreationException(STR, e); } return this.jobLauncher; }
|
/**
* This method is creating jobLauncher bean
*
* @return SimpleJobLauncher
*/
|
This method is creating jobLauncher bean
|
jobLauncher
|
{
"repo_name": "SimonHuber/oasp4j",
"path": "samples/core/src/main/java/io/oasp/gastronomy/restaurant/general/configuration/BeansBatchConfig.java",
"license": "apache-2.0",
"size": 5711
}
|
[
"io.oasp.module.batch.common.impl.JobLauncherWithAdditionalRestartCapabilities",
"org.springframework.beans.factory.BeanCreationException",
"org.springframework.context.annotation.DependsOn"
] |
import io.oasp.module.batch.common.impl.JobLauncherWithAdditionalRestartCapabilities; import org.springframework.beans.factory.BeanCreationException; import org.springframework.context.annotation.DependsOn;
|
import io.oasp.module.batch.common.impl.*; import org.springframework.beans.factory.*; import org.springframework.context.annotation.*;
|
[
"io.oasp.module",
"org.springframework.beans",
"org.springframework.context"
] |
io.oasp.module; org.springframework.beans; org.springframework.context;
| 404,489
|
public void testGetTreatingPhysicianFullName() throws Exception{
studySubject.setOtherTreatingPhysician("Steven Chang");
assertEquals("Wrong other treating physician","Steven Chang",studySubject.getTreatingPhysicianFullName());
Investigator investigator = registerMockFor(Investigator.class);
HealthcareSiteInvestigator healthcareSiteInvestigator = registerMockFor(HealthcareSiteInvestigator.class);
healthcareSiteInvestigator.setInvestigator(investigator);
StudyInvestigator treatingPhysician = registerMockFor(StudyInvestigator.class);
treatingPhysician.setHealthcareSiteInvestigator(healthcareSiteInvestigator);
studySubject.setTreatingPhysician(treatingPhysician);
EasyMock.expect(treatingPhysician.getHealthcareSiteInvestigator()).andReturn(healthcareSiteInvestigator);
EasyMock.expect(healthcareSiteInvestigator.getInvestigator()).andReturn(investigator);
EasyMock.expect(investigator.getFullName()).andReturn("Dr. Richard Baker");
replayMocks();
assertEquals("Wrong treating physician","Dr. Richard Baker",studySubject.getTreatingPhysicianFullName());
}
|
void function() throws Exception{ studySubject.setOtherTreatingPhysician(STR); assertEquals(STR,STR,studySubject.getTreatingPhysicianFullName()); Investigator investigator = registerMockFor(Investigator.class); HealthcareSiteInvestigator healthcareSiteInvestigator = registerMockFor(HealthcareSiteInvestigator.class); healthcareSiteInvestigator.setInvestigator(investigator); StudyInvestigator treatingPhysician = registerMockFor(StudyInvestigator.class); treatingPhysician.setHealthcareSiteInvestigator(healthcareSiteInvestigator); studySubject.setTreatingPhysician(treatingPhysician); EasyMock.expect(treatingPhysician.getHealthcareSiteInvestigator()).andReturn(healthcareSiteInvestigator); EasyMock.expect(healthcareSiteInvestigator.getInvestigator()).andReturn(investigator); EasyMock.expect(investigator.getFullName()).andReturn(STR); replayMocks(); assertEquals(STR,STR,studySubject.getTreatingPhysicianFullName()); }
|
/**
* Test get treating physician full name.
*
* @throws Exception the exception
*/
|
Test get treating physician full name
|
testGetTreatingPhysicianFullName
|
{
"repo_name": "NCIP/c3pr",
"path": "codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/StudySubjectTest.java",
"license": "bsd-3-clause",
"size": 170087
}
|
[
"org.easymock.classextension.EasyMock"
] |
import org.easymock.classextension.EasyMock;
|
import org.easymock.classextension.*;
|
[
"org.easymock.classextension"
] |
org.easymock.classextension;
| 2,022,585
|
protected void invokeCustomInitMethod(
String beanName, Object bean, String initMethodName, boolean enforceInitMethod) throws Throwable {
Method initMethod = BeanUtils.findMethod(bean.getClass(), initMethodName, null);
if (initMethod == null) {
if (enforceInitMethod) {
throw new NoSuchMethodException("Couldn't find an init method named '" + initMethodName +
"' on bean with name '" + beanName + "'");
}
else {
if (logger.isDebugEnabled()) {
logger.debug("No default init method named '" + initMethodName +
"' found on bean with name '" + beanName + "'");
}
// Ignore non-existent default lifecycle methods.
return;
}
}
if (logger.isDebugEnabled()) {
logger.debug("Invoking init method '" + initMethodName + "' on bean with name '" + beanName + "'");
}
ReflectionUtils.makeAccessible(initMethod);
try {
initMethod.invoke(bean, (Object[]) null);
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
}
|
void function( String beanName, Object bean, String initMethodName, boolean enforceInitMethod) throws Throwable { Method initMethod = BeanUtils.findMethod(bean.getClass(), initMethodName, null); if (initMethod == null) { if (enforceInitMethod) { throw new NoSuchMethodException(STR + initMethodName + STR + beanName + "'"); } else { if (logger.isDebugEnabled()) { logger.debug(STR + initMethodName + STR + beanName + "'"); } return; } } if (logger.isDebugEnabled()) { logger.debug(STR + initMethodName + STR + beanName + "'"); } ReflectionUtils.makeAccessible(initMethod); try { initMethod.invoke(bean, (Object[]) null); } catch (InvocationTargetException ex) { throw ex.getTargetException(); } }
|
/**
* Invoke the specified custom init method on the given bean.
* Called by invokeInitMethods.
* <p>Can be overridden in subclasses for custom resolution of init
* methods with arguments.
* @param beanName the bean name in the factory (for debugging purposes)
* @param bean the new bean instance we may need to initialize
* @param initMethodName the name of the custom init method
* @param enforceInitMethod indicates whether the defined init method needs to exist
* @see #invokeInitMethods
*/
|
Invoke the specified custom init method on the given bean. Called by invokeInitMethods. Can be overridden in subclasses for custom resolution of init methods with arguments
|
invokeCustomInitMethod
|
{
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "src/org/springframework/beans/factory/support/AbstractAutowireCapableBeanFactory.java",
"license": "apache-2.0",
"size": 59184
}
|
[
"java.lang.reflect.InvocationTargetException",
"java.lang.reflect.Method",
"org.springframework.beans.BeanUtils",
"org.springframework.util.ReflectionUtils"
] |
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.springframework.beans.BeanUtils; import org.springframework.util.ReflectionUtils;
|
import java.lang.reflect.*; import org.springframework.beans.*; import org.springframework.util.*;
|
[
"java.lang",
"org.springframework.beans",
"org.springframework.util"
] |
java.lang; org.springframework.beans; org.springframework.util;
| 647,886
|
public java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI> getSubterm_multisets_ScalarProductHLAPI(){
java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI>();
for (Term elemnt : getSubterm()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.ScalarProductImpl.class)){
retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI(
(fr.lip6.move.pnml.hlpn.multisets.ScalarProduct)elemnt
));
}
}
return retour;
}
|
java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.ScalarProductImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.ScalarProductHLAPI( (fr.lip6.move.pnml.hlpn.multisets.ScalarProduct)elemnt )); } } return retour; }
|
/**
* This accessor return a list of encapsulated subelement, only of ScalarProductHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/
|
This accessor return a list of encapsulated subelement, only of ScalarProductHLAPI kind. WARNING : this method can creates a lot of new object in memory
|
getSubterm_multisets_ScalarProductHLAPI
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/lists/hlapi/SublistHLAPI.java",
"license": "epl-1.0",
"size": 111755
}
|
[
"fr.lip6.move.pnml.hlpn.terms.Term",
"java.util.ArrayList",
"java.util.List"
] |
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
|
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
|
[
"fr.lip6.move",
"java.util"
] |
fr.lip6.move; java.util;
| 283,809
|
private void makeGithubSearchQuery() {
String githubQuery = mSearchBoxEditText.getText().toString();
// TODO (17) If no search was entered, indicate that there isn't anything to search for and return
if (githubQuery.isEmpty() || githubQuery == null) {
return;
}
URL githubSearchUrl = NetworkUtils.buildUrl(githubQuery);
mUrlDisplayTextView.setText(githubSearchUrl.toString());
// TODO (18) Remove the call to execute the AsyncTask
// TODO (19) Create a bundle called queryBundle
// TODO (20) Use putString with SEARCH_QUERY_URL_EXTRA as the key and the String value of the URL as the value
Bundle queryBundle = new Bundle();
queryBundle.putString(SEARCH_QUERY_URL_EXTRA, githubQuery.toString());
// TODO (21) Call getSupportLoaderManager and store it in a LoaderManager variable
// TODO (22) Get our Loader by calling getLoader and passing the ID we specified
// TODO (23) If the Loader was null, initialize it. Else, restart it.
LoaderManager loaderManager = getSupportLoaderManager();
Loader loader = loaderManager.getLoader(GITHUB_SEARCH_LOADER);
if (loader == null) {
loaderManager.initLoader(GITHUB_SEARCH_LOADER, queryBundle, this);
} else {
loaderManager.restartLoader(GITHUB_SEARCH_LOADER, queryBundle, this);
}
}
|
void function() { String githubQuery = mSearchBoxEditText.getText().toString(); if (githubQuery.isEmpty() githubQuery == null) { return; } URL githubSearchUrl = NetworkUtils.buildUrl(githubQuery); mUrlDisplayTextView.setText(githubSearchUrl.toString()); Bundle queryBundle = new Bundle(); queryBundle.putString(SEARCH_QUERY_URL_EXTRA, githubQuery.toString()); LoaderManager loaderManager = getSupportLoaderManager(); Loader loader = loaderManager.getLoader(GITHUB_SEARCH_LOADER); if (loader == null) { loaderManager.initLoader(GITHUB_SEARCH_LOADER, queryBundle, this); } else { loaderManager.restartLoader(GITHUB_SEARCH_LOADER, queryBundle, this); } }
|
/**
* This method retrieves the search text from the EditText, constructs the
* URL (using {@link NetworkUtils}) for the github repository you'd like to find, displays
* that URL in a TextView, and finally request that an AsyncTaskLoader performs the GET request.
*/
|
This method retrieves the search text from the EditText, constructs the URL (using <code>NetworkUtils</code>) for the github repository you'd like to find, displays that URL in a TextView, and finally request that an AsyncTaskLoader performs the GET request
|
makeGithubSearchQuery
|
{
"repo_name": "jerrykuo7727/ud851-Exercises",
"path": "Lesson05b-Smarter-GitHub-Repo-Search/T05b.02-Exercise-AddAsyncTaskLoader/app/src/main/java/com/example/android/asynctaskloader/MainActivity.java",
"license": "apache-2.0",
"size": 9309
}
|
[
"android.os.Bundle",
"android.support.v4.app.LoaderManager",
"android.support.v4.content.Loader",
"com.example.android.asynctaskloader.utilities.NetworkUtils"
] |
import android.os.Bundle; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import com.example.android.asynctaskloader.utilities.NetworkUtils;
|
import android.os.*; import android.support.v4.app.*; import android.support.v4.content.*; import com.example.android.asynctaskloader.utilities.*;
|
[
"android.os",
"android.support",
"com.example.android"
] |
android.os; android.support; com.example.android;
| 2,907,809
|
UserAccount createFor(String username, String password, Person person,
List<SimpleGrantedAuthority> authorities, boolean encrypted);
/**
* Searches for a user account
*
* @param username the user name
* @return the account or an empty {@link Optional}
|
UserAccount createFor(String username, String password, Person person, List<SimpleGrantedAuthority> authorities, boolean encrypted); /** * Searches for a user account * * @param username the user name * @return the account or an empty {@link Optional}
|
/**
* Creates a new user account for the given person
*
* @param username the person's username
* @param password the person's password
* @param person the person the account is created for
* @param authorities the authorities the account should possess
* @param encrypted whether the password is already encrypted
* @return the user account
*/
|
Creates a new user account for the given person
|
createFor
|
{
"repo_name": "portux/NAJU-Adebar",
"path": "src/main/java/de/naju/adebar/app/security/user/UserAccountManager.java",
"license": "gpl-3.0",
"size": 3454
}
|
[
"de.naju.adebar.model.human.Person",
"java.util.List",
"java.util.Optional",
"org.springframework.security.core.authority.SimpleGrantedAuthority"
] |
import de.naju.adebar.model.human.Person; import java.util.List; import java.util.Optional; import org.springframework.security.core.authority.SimpleGrantedAuthority;
|
import de.naju.adebar.model.human.*; import java.util.*; import org.springframework.security.core.authority.*;
|
[
"de.naju.adebar",
"java.util",
"org.springframework.security"
] |
de.naju.adebar; java.util; org.springframework.security;
| 126,455
|
public boolean onKeyUp(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK && mSlidingMenu.isMenuShowing()) {
showContent();
return true;
}
return false;
}
|
boolean function(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && mSlidingMenu.isMenuShowing()) { showContent(); return true; } return false; }
|
/**
* On key up.
*
* @param keyCode the key code
* @param event the event
* @return true, if successful
*/
|
On key up
|
onKeyUp
|
{
"repo_name": "liangdas/showapp",
"path": "SlidingMenuLib/src/com/slidingmenu/lib/app/SlidingActivityHelper.java",
"license": "apache-2.0",
"size": 6373
}
|
[
"android.view.KeyEvent"
] |
import android.view.KeyEvent;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,353,162
|
public Optional<OperatorBackPressureStats> getOperatorBackPressureStats(ExecutionJobVertex vertex) {
synchronized (lock) {
final OperatorBackPressureStats stats = operatorStatsCache.getIfPresent(vertex);
if (stats == null || backPressureStatsRefreshInterval <= System.currentTimeMillis() - stats.getEndTimestamp()) {
triggerBackPressureRequestInternal(vertex);
}
return Optional.ofNullable(stats);
}
}
|
Optional<OperatorBackPressureStats> function(ExecutionJobVertex vertex) { synchronized (lock) { final OperatorBackPressureStats stats = operatorStatsCache.getIfPresent(vertex); if (stats == null backPressureStatsRefreshInterval <= System.currentTimeMillis() - stats.getEndTimestamp()) { triggerBackPressureRequestInternal(vertex); } return Optional.ofNullable(stats); } }
|
/**
* Returns back pressure statistics for a operator. Automatically triggers task back pressure
* sampling if statistics are not available or outdated.
*
* @param vertex Operator to get the stats for.
* @return Back pressure statistics for an operator
*/
|
Returns back pressure statistics for a operator. Automatically triggers task back pressure sampling if statistics are not available or outdated
|
getOperatorBackPressureStats
|
{
"repo_name": "tzulitai/flink",
"path": "flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/legacy/backpressure/BackPressureStatsTrackerImpl.java",
"license": "apache-2.0",
"size": 9120
}
|
[
"java.util.Optional",
"org.apache.flink.runtime.executiongraph.ExecutionJobVertex"
] |
import java.util.Optional; import org.apache.flink.runtime.executiongraph.ExecutionJobVertex;
|
import java.util.*; import org.apache.flink.runtime.executiongraph.*;
|
[
"java.util",
"org.apache.flink"
] |
java.util; org.apache.flink;
| 760,710
|
public List<Number> getDataValues() {
if (dataColumn.getDataType() == DataTypes.Double) {
return ((List<Number>) data);
} else {
List<Number> values = new ArrayList<>();
switch (dataColumn.getDataType()) {
case Integer:
for (int v : (List<Integer>) data) {
values.add(v);
}
break;
case Float:
for (Object v : (List<Object>) data) {
if (v == null){
values.add(Float.NaN);
} else {
values.add((float)v);
}
}
break;
case String:
for (String v : (List<String>)data){
if (v.isEmpty())
values.add(Double.NaN);
else
values.add(Double.parseDouble(v));
}
break;
case Date:
for (Date v : (List<Date>)data){
values.add(DateUtil.toOADate(v));
}
break;
}
return values;
}
}
|
List<Number> function() { if (dataColumn.getDataType() == DataTypes.Double) { return ((List<Number>) data); } else { List<Number> values = new ArrayList<>(); switch (dataColumn.getDataType()) { case Integer: for (int v : (List<Integer>) data) { values.add(v); } break; case Float: for (Object v : (List<Object>) data) { if (v == null){ values.add(Float.NaN); } else { values.add((float)v); } } break; case String: for (String v : (List<String>)data){ if (v.isEmpty()) values.add(Double.NaN); else values.add(Double.parseDouble(v)); } break; case Date: for (Date v : (List<Date>)data){ values.add(DateUtil.toOADate(v)); } break; } return values; } }
|
/**
* Get number data list
*
* @return Number data list
*/
|
Get number data list
|
getDataValues
|
{
"repo_name": "meteoinfo/meteoinfolib",
"path": "src/org/meteoinfo/table/ColumnData.java",
"license": "lgpl-3.0",
"size": 44765
}
|
[
"java.util.ArrayList",
"java.util.Date",
"java.util.List",
"org.meteoinfo.data.DataTypes",
"org.meteoinfo.global.util.DateUtil"
] |
import java.util.ArrayList; import java.util.Date; import java.util.List; import org.meteoinfo.data.DataTypes; import org.meteoinfo.global.util.DateUtil;
|
import java.util.*; import org.meteoinfo.data.*; import org.meteoinfo.global.util.*;
|
[
"java.util",
"org.meteoinfo.data",
"org.meteoinfo.global"
] |
java.util; org.meteoinfo.data; org.meteoinfo.global;
| 659,109
|
ApplicationDto application = createApplication();
LogSchemaDto logSchema = createLogSchema(application.getId());
FileData library = client.generateRecordStructureLibrary(application.getId(), logSchema.getMajorVersion());
Assert.assertNotNull(library);
Assert.assertFalse(strIsEmpty(library.getFileName()));
Assert.assertNotNull(library.getData());
}
|
ApplicationDto application = createApplication(); LogSchemaDto logSchema = createLogSchema(application.getId()); FileData library = client.generateRecordStructureLibrary(application.getId(), logSchema.getMajorVersion()); Assert.assertNotNull(library); Assert.assertFalse(strIsEmpty(library.getFileName())); Assert.assertNotNull(library.getData()); }
|
/**
* Test generate record library.
*
* @throws TException the t exception
* @throws IOException Signals that an I/O exception has occurred.
*/
|
Test generate record library
|
testGenerateRecordLibrary
|
{
"repo_name": "kallelzied/kaa",
"path": "server/control/src/test/java/org/kaaproject/kaa/server/control/ControlServerRecordLibraryIT.java",
"license": "apache-2.0",
"size": 3086
}
|
[
"org.junit.Assert",
"org.kaaproject.kaa.common.dto.ApplicationDto",
"org.kaaproject.kaa.common.dto.logs.LogSchemaDto",
"org.kaaproject.kaa.server.common.thrift.gen.control.FileData"
] |
import org.junit.Assert; import org.kaaproject.kaa.common.dto.ApplicationDto; import org.kaaproject.kaa.common.dto.logs.LogSchemaDto; import org.kaaproject.kaa.server.common.thrift.gen.control.FileData;
|
import org.junit.*; import org.kaaproject.kaa.common.dto.*; import org.kaaproject.kaa.common.dto.logs.*; import org.kaaproject.kaa.server.common.thrift.gen.control.*;
|
[
"org.junit",
"org.kaaproject.kaa"
] |
org.junit; org.kaaproject.kaa;
| 465,097
|
@NonNull
Set<String> getAvailableCameraIds();
|
Set<String> getAvailableCameraIds();
|
/**
* Gets the ids of all available cameras.
*
* @return the list of available cameras
*/
|
Gets the ids of all available cameras
|
getAvailableCameraIds
|
{
"repo_name": "AndroidX/androidx",
"path": "camera/camera-core/src/main/java/androidx/camera/core/impl/CameraFactory.java",
"license": "apache-2.0",
"size": 3147
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,415,598
|
public static ImageDescriptor getImageDescriptor(String path)
{
return imageDescriptorFromPlugin(PLUGIN_ID, path);
}
public ScriptingUIPlugin()
{
}
|
static ImageDescriptor function(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } public ScriptingUIPlugin() { }
|
/**
* Returns an image descriptor for the image file at the given plug-in relative path
*
* @param path
* the path
* @return the image descriptor
*/
|
Returns an image descriptor for the image file at the given plug-in relative path
|
getImageDescriptor
|
{
"repo_name": "shakaran/studio3",
"path": "plugins/com.aptana.scripting.ui/src/com/aptana/scripting/ui/ScriptingUIPlugin.java",
"license": "gpl-3.0",
"size": 3376
}
|
[
"org.eclipse.jface.resource.ImageDescriptor"
] |
import org.eclipse.jface.resource.ImageDescriptor;
|
import org.eclipse.jface.resource.*;
|
[
"org.eclipse.jface"
] |
org.eclipse.jface;
| 57,493
|
@VisibleForTesting
static void configureBlockSize(
Table table, Configuration conf) throws IOException {
StringBuilder blockSizeConfigValue = new StringBuilder();
HTableDescriptor tableDescriptor = table.getTableDescriptor();
if (tableDescriptor == null) {
// could happen with mock table instance
return;
}
Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
int i = 0;
for (HColumnDescriptor familyDescriptor : families) {
if (i++ > 0) {
blockSizeConfigValue.append('&');
}
blockSizeConfigValue.append(URLEncoder.encode(
familyDescriptor.getNameAsString(), "UTF-8"));
blockSizeConfigValue.append('=');
blockSizeConfigValue.append(URLEncoder.encode(
String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
}
// Get rid of the last ampersand
conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, blockSizeConfigValue.toString());
}
|
static void configureBlockSize( Table table, Configuration conf) throws IOException { StringBuilder blockSizeConfigValue = new StringBuilder(); HTableDescriptor tableDescriptor = table.getTableDescriptor(); if (tableDescriptor == null) { return; } Collection<HColumnDescriptor> families = tableDescriptor.getFamilies(); int i = 0; for (HColumnDescriptor familyDescriptor : families) { if (i++ > 0) { blockSizeConfigValue.append('&'); } blockSizeConfigValue.append(URLEncoder.encode( familyDescriptor.getNameAsString(), "UTF-8")); blockSizeConfigValue.append('='); blockSizeConfigValue.append(URLEncoder.encode( String.valueOf(familyDescriptor.getBlocksize()), "UTF-8")); } conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, blockSizeConfigValue.toString()); }
|
/**
* Serialize column family to block size map to configuration.
* Invoked while configuring the MR job for incremental load.
*
* @param table to read the properties from
* @param conf to persist serialized values into
* @throws IOException
* on failure to read column family descriptors
*/
|
Serialize column family to block size map to configuration. Invoked while configuring the MR job for incremental load
|
configureBlockSize
|
{
"repo_name": "baishuo/hbase-1.0.0-cdh5.4.7_baishuo",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java",
"license": "apache-2.0",
"size": 29732
}
|
[
"java.io.IOException",
"java.net.URLEncoder",
"java.util.Collection",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.HTableDescriptor",
"org.apache.hadoop.hbase.client.Table"
] |
import java.io.IOException; import java.net.URLEncoder; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.Table;
|
import java.io.*; import java.net.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*;
|
[
"java.io",
"java.net",
"java.util",
"org.apache.hadoop"
] |
java.io; java.net; java.util; org.apache.hadoop;
| 1,596,595
|
@NonNull
public LiveData<PreviewView.StreamState> getPreviewStreamState() {
return mPreviewView.getPreviewStreamState();
}
|
LiveData<PreviewView.StreamState> function() { return mPreviewView.getPreviewStreamState(); }
|
/**
* Gets the {@link LiveData} of the underlying {@link PreviewView}'s
* {@link PreviewView.StreamState}.
*
* @return A {@link LiveData} containing the {@link PreviewView.StreamState}. Apps can either
* get current value by {@link LiveData#getValue()} or register a observer by
* {@link LiveData#observe}.
* @see PreviewView#getPreviewStreamState()
*/
|
Gets the <code>LiveData</code> of the underlying <code>PreviewView</code>'s <code>PreviewView.StreamState</code>
|
getPreviewStreamState
|
{
"repo_name": "WhisperSystems/TextSecure",
"path": "app/src/main/java/androidx/camera/view/SignalCameraView.java",
"license": "gpl-3.0",
"size": 27646
}
|
[
"androidx.lifecycle.LiveData"
] |
import androidx.lifecycle.LiveData;
|
import androidx.lifecycle.*;
|
[
"androidx.lifecycle"
] |
androidx.lifecycle;
| 2,089,131
|
public java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI> getSubterm_multisets_CardinalityHLAPI(){
java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI>();
for (Term elemnt : getSubterm()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.CardinalityImpl.class)){
retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI(
(fr.lip6.move.pnml.hlpn.multisets.Cardinality)elemnt
));
}
}
return retour;
}
|
java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.CardinalityImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.CardinalityHLAPI( (fr.lip6.move.pnml.hlpn.multisets.Cardinality)elemnt )); } } return retour; }
|
/**
* This accessor return a list of encapsulated subelement, only of CardinalityHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/
|
This accessor return a list of encapsulated subelement, only of CardinalityHLAPI kind. WARNING : this method can creates a lot of new object in memory
|
getSubterm_multisets_CardinalityHLAPI
|
{
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/finiteIntRanges/hlapi/LessThanOrEqualHLAPI.java",
"license": "epl-1.0",
"size": 108879
}
|
[
"fr.lip6.move.pnml.hlpn.terms.Term",
"java.util.ArrayList",
"java.util.List"
] |
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
|
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
|
[
"fr.lip6.move",
"java.util"
] |
fr.lip6.move; java.util;
| 914,902
|
public Path[] getSelectedRowUUIDs() {
Set<T> selectedRows = selectionModel.getSelectedSet();
// Compatibility with existing API
if ( selectedRows.size() == 0 ) {
return null;
}
// Create the array of Paths
Path[] uuids = new PathImpl[selectedRows.size()];
int rowCount = 0;
for (T row : selectedRows) {
uuids[rowCount++] = row.getPath();
}
return uuids;
}
|
Path[] function() { Set<T> selectedRows = selectionModel.getSelectedSet(); if ( selectedRows.size() == 0 ) { return null; } Path[] uuids = new PathImpl[selectedRows.size()]; int rowCount = 0; for (T row : selectedRows) { uuids[rowCount++] = row.getPath(); } return uuids; }
|
/**
* Return an array of selected Paths. API is maintained for backwards
* compatibility of legacy code with AssetItemGrid's implementation
*
* @return
*/
|
Return an array of selected Paths. API is maintained for backwards compatibility of legacy code with AssetItemGrid's implementation
|
getSelectedRowUUIDs
|
{
"repo_name": "psiroky/guvnor",
"path": "guvnor-webapp-core/src/main/java/org/drools/guvnor/client/widgets/tables/AbstractAssetPagedTable.java",
"license": "apache-2.0",
"size": 10018
}
|
[
"java.util.Set",
"org.drools.guvnor.client.rpc.Path",
"org.drools.guvnor.client.rpc.PathImpl"
] |
import java.util.Set; import org.drools.guvnor.client.rpc.Path; import org.drools.guvnor.client.rpc.PathImpl;
|
import java.util.*; import org.drools.guvnor.client.rpc.*;
|
[
"java.util",
"org.drools.guvnor"
] |
java.util; org.drools.guvnor;
| 2,728,288
|
private static void setMethodAccessible(Method method) {
try {
//
// XXX Default access superclass workaround
//
// When a public class has a default access superclass
// with public methods, these methods are accessible.
// Calling them from compiled code works fine.
//
// Unfortunately, using reflection to invoke these methods
// seems to (wrongly) to prevent access even when the method
// modifer is public.
//
// The following workaround solves the problem but will only
// work from sufficiently privilages code.
//
// Better workarounds would be greatfully accepted.
//
method.setAccessible(true);
} catch (SecurityException se) {
// log but continue just in case the method.invoke works anyway
if (!loggedAccessibleWarning) {
boolean vulnerableJVM = false;
try {
String specVersion = System.getProperty("java.specification.version");
if (specVersion.charAt(0) == '1' &&
(specVersion.charAt(2) == '0' ||
specVersion.charAt(2) == '1' ||
specVersion.charAt(2) == '2' ||
specVersion.charAt(2) == '3')) {
vulnerableJVM = true;
}
} catch (SecurityException e) {
// don't know - so display warning
vulnerableJVM = true;
}
loggedAccessibleWarning = true;
}
}
}
|
static void function(Method method) { try { } catch (SecurityException se) { if (!loggedAccessibleWarning) { boolean vulnerableJVM = false; try { String specVersion = System.getProperty(STR); if (specVersion.charAt(0) == '1' && (specVersion.charAt(2) == '0' specVersion.charAt(2) == '1' specVersion.charAt(2) == '2' specVersion.charAt(2) == '3')) { vulnerableJVM = true; } } catch (SecurityException e) { vulnerableJVM = true; } loggedAccessibleWarning = true; } } }
|
/**
* Try to make the method accessible
* @param method The source arguments
*/
|
Try to make the method accessible
|
setMethodAccessible
|
{
"repo_name": "OpenSoftwareSolutions/PDFReporter",
"path": "pdfreporter-extensions/src/org/oss/pdfreporter/uses/org/apache/commons/beanutils/MethodUtils.java",
"license": "lgpl-3.0",
"size": 50544
}
|
[
"java.lang.reflect.Method"
] |
import java.lang.reflect.Method;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 1,731,807
|
public LifecycleCallbackType<InterceptorType<T>> createPostConstruct()
{
return new LifecycleCallbackTypeImpl<InterceptorType<T>>(this, "post-construct", childNode);
}
|
LifecycleCallbackType<InterceptorType<T>> function() { return new LifecycleCallbackTypeImpl<InterceptorType<T>>(this, STR, childNode); }
|
/**
* Creates a new <code>post-construct</code> element
* @return the new created instance of <code>LifecycleCallbackType<InterceptorType<T>></code>
*/
|
Creates a new <code>post-construct</code> element
|
createPostConstruct
|
{
"repo_name": "forge/javaee-descriptors",
"path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/ejbjar32/InterceptorTypeImpl.java",
"license": "epl-1.0",
"size": 60039
}
|
[
"org.jboss.shrinkwrap.descriptor.api.ejbjar32.InterceptorType",
"org.jboss.shrinkwrap.descriptor.api.javaee7.LifecycleCallbackType",
"org.jboss.shrinkwrap.descriptor.impl.javaee7.LifecycleCallbackTypeImpl"
] |
import org.jboss.shrinkwrap.descriptor.api.ejbjar32.InterceptorType; import org.jboss.shrinkwrap.descriptor.api.javaee7.LifecycleCallbackType; import org.jboss.shrinkwrap.descriptor.impl.javaee7.LifecycleCallbackTypeImpl;
|
import org.jboss.shrinkwrap.descriptor.api.ejbjar32.*; import org.jboss.shrinkwrap.descriptor.api.javaee7.*; import org.jboss.shrinkwrap.descriptor.impl.javaee7.*;
|
[
"org.jboss.shrinkwrap"
] |
org.jboss.shrinkwrap;
| 2,413,894
|
public interface OnClose extends Callback<ShardLock> {
OnClose EMPTY = new OnClose() {
@Override
public void handle(ShardLock Lock) {
}
};
}
private static class StoreStatsCache extends SingleObjectCache<StoreStats> {
private final Directory directory;
public StoreStatsCache(TimeValue refreshInterval, Directory directory) throws IOException {
super(refreshInterval, new StoreStats(estimateSize(directory)));
this.directory = directory;
}
|
interface OnClose extends Callback<ShardLock> { OnClose EMPTY = new OnClose() { public void function(ShardLock Lock) { } }; } private static class StoreStatsCache extends SingleObjectCache<StoreStats> { private final Directory directory; public StoreStatsCache(TimeValue refreshInterval, Directory directory) throws IOException { super(refreshInterval, new StoreStats(estimateSize(directory))); this.directory = directory; }
|
/**
* This method is called while the provided {@link org.elasticsearch.env.ShardLock} is held.
* This method is only called once after all resources for a store are released.
*/
|
This method is called while the provided <code>org.elasticsearch.env.ShardLock</code> is held. This method is only called once after all resources for a store are released
|
handle
|
{
"repo_name": "yanjunh/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/index/store/Store.java",
"license": "apache-2.0",
"size": 64125
}
|
[
"java.io.IOException",
"org.apache.lucene.store.Directory",
"org.apache.lucene.store.Lock",
"org.elasticsearch.common.unit.TimeValue",
"org.elasticsearch.common.util.Callback",
"org.elasticsearch.common.util.SingleObjectCache",
"org.elasticsearch.env.ShardLock"
] |
import java.io.IOException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.Lock; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.env.ShardLock;
|
import java.io.*; import org.apache.lucene.store.*; import org.elasticsearch.common.unit.*; import org.elasticsearch.common.util.*; import org.elasticsearch.env.*;
|
[
"java.io",
"org.apache.lucene",
"org.elasticsearch.common",
"org.elasticsearch.env"
] |
java.io; org.apache.lucene; org.elasticsearch.common; org.elasticsearch.env;
| 2,364,023
|
public void updateItem(long identifier) {
if (mDrawer != null && mAdapter != null && mItemAdapter.getAdapterItems() != null && identifier != -1) {
IDrawerItem drawerItem = DrawerUtils.getDrawerItem(getDrawerItems(), identifier);
for (int i = 0; i < mItemAdapter.getAdapterItems().size(); i++) {
if (mItemAdapter.getAdapterItems().get(i).getIdentifier() == drawerItem.getIdentifier()) {
IDrawerItem miniDrawerItem = generateMiniDrawerItem(drawerItem);
if (miniDrawerItem != null) {
mItemAdapter.set(i, miniDrawerItem);
}
}
}
}
}
|
void function(long identifier) { if (mDrawer != null && mAdapter != null && mItemAdapter.getAdapterItems() != null && identifier != -1) { IDrawerItem drawerItem = DrawerUtils.getDrawerItem(getDrawerItems(), identifier); for (int i = 0; i < mItemAdapter.getAdapterItems().size(); i++) { if (mItemAdapter.getAdapterItems().get(i).getIdentifier() == drawerItem.getIdentifier()) { IDrawerItem miniDrawerItem = generateMiniDrawerItem(drawerItem); if (miniDrawerItem != null) { mItemAdapter.set(i, miniDrawerItem); } } } } }
|
/**
* update a MiniDrawerItem (after updating the main Drawer) via its identifier
*
* @param identifier the identifier of the item which was updated
*/
|
update a MiniDrawerItem (after updating the main Drawer) via its identifier
|
updateItem
|
{
"repo_name": "natodemon/Lunary-Ethereum-Wallet",
"path": "materialdrawer/src/main/java/com/mikepenz/materialdrawer/MiniDrawer.java",
"license": "gpl-3.0",
"size": 19246
}
|
[
"com.mikepenz.materialdrawer.model.interfaces.IDrawerItem"
] |
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
|
import com.mikepenz.materialdrawer.model.interfaces.*;
|
[
"com.mikepenz.materialdrawer"
] |
com.mikepenz.materialdrawer;
| 2,279,253
|
public void setForeColor(Color c) {
LayoutNorthArrow.this.setForeColor(c);
}
|
void function(Color c) { LayoutNorthArrow.this.setForeColor(c); }
|
/**
* Set foreground color
*
* @param c Foreground color
*/
|
Set foreground color
|
setForeColor
|
{
"repo_name": "meteoinfo/meteoinfolib",
"path": "src/org/meteoinfo/layout/LayoutNorthArrow.java",
"license": "lgpl-3.0",
"size": 12864
}
|
[
"java.awt.Color"
] |
import java.awt.Color;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 624,226
|
EAttribute getBeforeCode__Code_1();
|
EAttribute getBeforeCode__Code_1();
|
/**
* Returns the meta object for the attribute '{@link cruise.umple.umple.BeforeCode_#getCode_1 <em>Code 1</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Code 1</em>'.
* @see cruise.umple.umple.BeforeCode_#getCode_1()
* @see #getBeforeCode_()
* @generated
*/
|
Returns the meta object for the attribute '<code>cruise.umple.umple.BeforeCode_#getCode_1 Code 1</code>'.
|
getBeforeCode__Code_1
|
{
"repo_name": "ahmedvc/umple",
"path": "cruise.umple.xtext/src-gen/cruise/umple/umple/UmplePackage.java",
"license": "mit",
"size": 485842
}
|
[
"org.eclipse.emf.ecore.EAttribute"
] |
import org.eclipse.emf.ecore.EAttribute;
|
import org.eclipse.emf.ecore.*;
|
[
"org.eclipse.emf"
] |
org.eclipse.emf;
| 392,213
|
public static TransactionContent<String, Vault<BookableSection>, Set<Seat>> getSeatPrebookCancelContent(final HashSet<Seat> seats) {
TransactionContent<String, Vault<BookableSection>, Set<Seat>> content
= new TransactionContent<String, Vault<BookableSection>, Set<Seat>>() {
/**/
private static final long serialVersionUID = -5015419185790185115L;
|
static TransactionContent<String, Vault<BookableSection>, Set<Seat>> function(final HashSet<Seat> seats) { TransactionContent<String, Vault<BookableSection>, Set<Seat>> content = new TransactionContent<String, Vault<BookableSection>, Set<Seat>>() { /**/ private static final long serialVersionUID = -5015419185790185115L;
|
/**
* Get the {@link TransactionContent} for cancelling prebookings
* @param sections The seats we wish to cancel
* @return The generated transaction content
*/
|
Get the <code>TransactionContent</code> for cancelling prebookings
|
getSeatPrebookCancelContent
|
{
"repo_name": "balazspete/multi-hop-train-journey-booking",
"path": "src/node/company/TransactionContentGenerator.java",
"license": "mit",
"size": 7143
}
|
[
"java.util.HashSet",
"java.util.Set"
] |
import java.util.HashSet; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,279,759
|
public T caseAnnotation(Annotation object) {
return null;
}
|
T function(Annotation object) { return null; }
|
/**
* Returns the result of interpreting the object as an instance of '<em>Annotation</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
*
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Annotation</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
|
Returns the result of interpreting the object as an instance of 'Annotation'. This implementation returns null; returning a non-null result will terminate the switch.
|
caseAnnotation
|
{
"repo_name": "sourcepit/common-modeling",
"path": "gen/main/emf/org/sourcepit/common/modeling/util/CommonModelingSwitch.java",
"license": "apache-2.0",
"size": 10248
}
|
[
"org.sourcepit.common.modeling.Annotation"
] |
import org.sourcepit.common.modeling.Annotation;
|
import org.sourcepit.common.modeling.*;
|
[
"org.sourcepit.common"
] |
org.sourcepit.common;
| 2,176,317
|
String getAvailablekey() throws UnexplainableDatabaseServiceException;
|
String getAvailablekey() throws UnexplainableDatabaseServiceException;
|
/**
* Get a brand new random available key.
* @return
*/
|
Get a brand new random available key
|
getAvailablekey
|
{
"repo_name": "clement-elbaz/somedamnmusic",
"path": "src/main/java/com/somedamnmusic/apis/DatabaseService.java",
"license": "apache-2.0",
"size": 1038
}
|
[
"com.somedamnmusic.database.UnexplainableDatabaseServiceException"
] |
import com.somedamnmusic.database.UnexplainableDatabaseServiceException;
|
import com.somedamnmusic.database.*;
|
[
"com.somedamnmusic.database"
] |
com.somedamnmusic.database;
| 1,652,274
|
public Map<String, Double> toNormalizedMap() {
Map<String, Double> ret = this.normalizedResources.toNormalizedMap();
ret.put(Constants.COMMON_OFFHEAP_MEMORY_RESOURCE_NAME, offHeap);
ret.put(Constants.COMMON_ONHEAP_MEMORY_RESOURCE_NAME, onHeap);
return ret;
}
|
Map<String, Double> function() { Map<String, Double> ret = this.normalizedResources.toNormalizedMap(); ret.put(Constants.COMMON_OFFHEAP_MEMORY_RESOURCE_NAME, offHeap); ret.put(Constants.COMMON_ONHEAP_MEMORY_RESOURCE_NAME, onHeap); return ret; }
|
/**
* Convert to a map that is used by configuration and the UI.
* @return a map with the key as the resource name and the value the resource amount.
*/
|
Convert to a map that is used by configuration and the UI
|
toNormalizedMap
|
{
"repo_name": "hmcl/storm-apache",
"path": "storm-server/src/main/java/org/apache/storm/scheduler/resource/normalization/NormalizedResourceRequest.java",
"license": "apache-2.0",
"size": 11203
}
|
[
"java.util.Map",
"org.apache.storm.Constants"
] |
import java.util.Map; import org.apache.storm.Constants;
|
import java.util.*; import org.apache.storm.*;
|
[
"java.util",
"org.apache.storm"
] |
java.util; org.apache.storm;
| 2,074,477
|
@PublicEvolving
public RestartStrategies.RestartStrategyConfiguration getRestartStrategy() {
return config.getRestartStrategy();
}
|
RestartStrategies.RestartStrategyConfiguration function() { return config.getRestartStrategy(); }
|
/**
* Returns the specified restart strategy configuration.
*
* @return The restart strategy configuration to be used
*/
|
Returns the specified restart strategy configuration
|
getRestartStrategy
|
{
"repo_name": "bowenli86/flink",
"path": "flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java",
"license": "apache-2.0",
"size": 60087
}
|
[
"org.apache.flink.api.common.restartstrategy.RestartStrategies"
] |
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
|
import org.apache.flink.api.common.restartstrategy.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 1,116,149
|
public static Field[] getClassFields(Class<?> cls) {
List<Field> fields = new ArrayList<Field>();
for (Class<?> c = cls; c != null; c = c.getSuperclass()) {
fields.addAll(Arrays.asList(c.getDeclaredFields()));
}
return fields.toArray(new Field[fields.size()]);
}
|
static Field[] function(Class<?> cls) { List<Field> fields = new ArrayList<Field>(); for (Class<?> c = cls; c != null; c = c.getSuperclass()) { fields.addAll(Arrays.asList(c.getDeclaredFields())); } return fields.toArray(new Field[fields.size()]); }
|
/**
* Returns all fields declared in the specified class (including inherited).
*
* @param cls
* the class which fields need to be returned.
* @return the declared fields.
*/
|
Returns all fields declared in the specified class (including inherited)
|
getClassFields
|
{
"repo_name": "SHAF-WORK/shaf",
"path": "core/src/main/java/org/shaf/core/util/ClassUtils.java",
"license": "apache-2.0",
"size": 21199
}
|
[
"java.lang.reflect.Field",
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List"
] |
import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.List;
|
import java.lang.reflect.*; import java.util.*;
|
[
"java.lang",
"java.util"
] |
java.lang; java.util;
| 2,062,375
|
public List<RoleName> getRoles() {
List<RoleName> roleNames = new ArrayList<RoleName>();
List<RuleTemplateAttributeBo> templateAttributes = getRuleTemplateAttributes();
for (RuleTemplateAttributeBo templateAttribute : templateAttributes) {
if (!templateAttribute.isWorkflowAttribute())
{
continue;
}
roleNames.addAll(KEWServiceLocator.getWorkflowRuleAttributeMediator().getRoleNames(templateAttribute));
}
return roleNames;
}
|
List<RoleName> function() { List<RoleName> roleNames = new ArrayList<RoleName>(); List<RuleTemplateAttributeBo> templateAttributes = getRuleTemplateAttributes(); for (RuleTemplateAttributeBo templateAttribute : templateAttributes) { if (!templateAttribute.isWorkflowAttribute()) { continue; } roleNames.addAll(KEWServiceLocator.getWorkflowRuleAttributeMediator().getRoleNames(templateAttribute)); } return roleNames; }
|
/**
* Returns a List of Roles from all RoleAttributes attached to this template.
*
* @return list of roles
*/
|
Returns a List of Roles from all RoleAttributes attached to this template
|
getRoles
|
{
"repo_name": "bhutchinson/rice",
"path": "rice-middleware/impl/src/main/java/org/kuali/rice/kew/rule/bo/RuleTemplateBo.java",
"license": "apache-2.0",
"size": 13080
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.kuali.rice.kew.api.rule.RoleName",
"org.kuali.rice.kew.service.KEWServiceLocator"
] |
import java.util.ArrayList; import java.util.List; import org.kuali.rice.kew.api.rule.RoleName; import org.kuali.rice.kew.service.KEWServiceLocator;
|
import java.util.*; import org.kuali.rice.kew.api.rule.*; import org.kuali.rice.kew.service.*;
|
[
"java.util",
"org.kuali.rice"
] |
java.util; org.kuali.rice;
| 1,046,243
|
public MailText getMessage(Locale locale) {
MailText texts = message.get(locale);
if(texts==null) {
texts = new MailText();
message.put(locale,texts);
}
return texts;
}
|
MailText function(Locale locale) { MailText texts = message.get(locale); if(texts==null) { texts = new MailText(); message.put(locale,texts); } return texts; }
|
/**
* Return message in specific language
* (empty message if not present)
*
* @param locale language
* @return the message
*/
|
Return message in specific language (empty message if not present)
|
getMessage
|
{
"repo_name": "zlamalp/perun",
"path": "perun-base/src/main/java/cz/metacentrum/perun/registrar/model/ApplicationMail.java",
"license": "bsd-2-clause",
"size": 6440
}
|
[
"java.util.Locale"
] |
import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 943,709
|
public static void writeFile(Pointer image, String fileName) throws NIVisionException {
Pointer p = new Pointer(fileName.length() + 1);
p.setString(0, fileName);
setWriteFileAllowed(true);
try {
assertCleanStatus(imaqWriteFileFn.call3(image, p, 0)); //zero is unused color table
} finally {
p.free();
}
}
|
static void function(Pointer image, String fileName) throws NIVisionException { Pointer p = new Pointer(fileName.length() + 1); p.setString(0, fileName); setWriteFileAllowed(true); try { assertCleanStatus(imaqWriteFileFn.call3(image, p, 0)); } finally { p.free(); } }
|
/**
* Write an image to the given file.
*
* Supported extensions:
* .aipd or .apd AIPD
* .bmp BMP
* .jpg or .jpeg JPEG
* .jp2 JPEG2000
* .png PNG
* .tif or .tiff TIFF
*
* @param image The image to write to a file.
* @param fileName The name of the destination file.
*/
|
Write an image to the given file. Supported extensions: .aipd or .apd AIPD .bmp BMP .jpg or .jpeg JPEG .jp2 JPEG2000 .png PNG .tif or .tiff TIFF
|
writeFile
|
{
"repo_name": "1684Chimeras/2014Robot",
"path": "2014CompetitionRobot/src/org/chimeras1684/year2014/vision/ARVision.java",
"license": "mit",
"size": 92379
}
|
[
"com.sun.cldc.jna.Pointer",
"edu.wpi.first.wpilibj.image.NIVisionException"
] |
import com.sun.cldc.jna.Pointer; import edu.wpi.first.wpilibj.image.NIVisionException;
|
import com.sun.cldc.jna.*; import edu.wpi.first.wpilibj.image.*;
|
[
"com.sun.cldc",
"edu.wpi.first"
] |
com.sun.cldc; edu.wpi.first;
| 1,315,267
|
public Object authenticate( Request request ) {
for( AuthenticationHandler h : authenticationHandlers ) {
if( h.supports( null, request ) ) {
Object o = h.authenticate( null, request );
if( o == null ) {
log.warn( "authentication failed by AuthenticationHandler:" + h.getClass() );
}
return o;
}
}
if( request.getAuthorization() == null ) {
// note that this is completely normal, so just TRACE
if( log.isTraceEnabled() ) {
log.trace( "No AuthenticationHandler supports this request - no authorisation given in request" );
}
} else {
// authorisation was present in the request, but no handlers accepted it - probably a config problem
if( log.isWarnEnabled() ) {
log.warn( "No AuthenticationHandler supports this request with scheme:" + request.getAuthorization().getScheme() );
}
}
return null;
}
|
Object function( Request request ) { for( AuthenticationHandler h : authenticationHandlers ) { if( h.supports( null, request ) ) { Object o = h.authenticate( null, request ); if( o == null ) { log.warn( STR + h.getClass() ); } return o; } } if( request.getAuthorization() == null ) { if( log.isTraceEnabled() ) { log.trace( STR ); } } else { if( log.isWarnEnabled() ) { log.warn( STR + request.getAuthorization().getScheme() ); } } return null; }
|
/**
* Looks for an AuthenticationHandler which supports the given resource and
* authorization header, and then returns the result of that handler's
* authenticate method.
*
* Returns null if no handlers support the request
*
* @param request
* @return
*/
|
Looks for an AuthenticationHandler which supports the given resource and authorization header, and then returns the result of that handler's authenticate method. Returns null if no handlers support the request
|
authenticate
|
{
"repo_name": "skoulouzis/lobcder",
"path": "milton2/milton-server-ce/src/main/java/io/milton/http/http11/auth/PreAuthenticationFilter.java",
"license": "apache-2.0",
"size": 5899
}
|
[
"io.milton.http.AuthenticationHandler",
"io.milton.http.Request"
] |
import io.milton.http.AuthenticationHandler; import io.milton.http.Request;
|
import io.milton.http.*;
|
[
"io.milton.http"
] |
io.milton.http;
| 1,236,394
|
public GeneratedValue<T> strategy(String strategy)
{
childNode.attribute("strategy", strategy);
return this;
}
|
GeneratedValue<T> function(String strategy) { childNode.attribute(STR, strategy); return this; }
|
/**
* Sets the <code>strategy</code> attribute
* @param strategy the value for the attribute <code>strategy</code>
* @return the current instance of <code>GeneratedValue<T></code>
*/
|
Sets the <code>strategy</code> attribute
|
strategy
|
{
"repo_name": "forge/javaee-descriptors",
"path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/orm10/GeneratedValueImpl.java",
"license": "epl-1.0",
"size": 4454
}
|
[
"org.jboss.shrinkwrap.descriptor.api.orm10.GeneratedValue"
] |
import org.jboss.shrinkwrap.descriptor.api.orm10.GeneratedValue;
|
import org.jboss.shrinkwrap.descriptor.api.orm10.*;
|
[
"org.jboss.shrinkwrap"
] |
org.jboss.shrinkwrap;
| 1,800,650
|
private void hideNativeScrollbars() {
m_nativeScrollbarWidth = AbstractNativeScrollbar.getNativeScrollbarWidth();
getScrollableElement().getStyle().setMarginRight(-(m_nativeScrollbarWidth + 10), Unit.PX);
}
|
void function() { m_nativeScrollbarWidth = AbstractNativeScrollbar.getNativeScrollbarWidth(); getScrollableElement().getStyle().setMarginRight(-(m_nativeScrollbarWidth + 10), Unit.PX); }
|
/**
* Hide the native scrollbars. We call this after attaching to ensure that we
* inherit the direction (rtl or ltr).
*/
|
Hide the native scrollbars. We call this after attaching to ensure that we inherit the direction (rtl or ltr)
|
hideNativeScrollbars
|
{
"repo_name": "it-tavis/opencms-core",
"path": "src-gwt/org/opencms/gwt/client/ui/CmsScrollPanelImpl.java",
"license": "lgpl-2.1",
"size": 15710
}
|
[
"com.google.gwt.dom.client.Style",
"com.google.gwt.user.client.ui.AbstractNativeScrollbar"
] |
import com.google.gwt.dom.client.Style; import com.google.gwt.user.client.ui.AbstractNativeScrollbar;
|
import com.google.gwt.dom.client.*; import com.google.gwt.user.client.ui.*;
|
[
"com.google.gwt"
] |
com.google.gwt;
| 1,375,588
|
private NativeObject onLabelPointStyleCallback(NativeObject item) {
// gets callback
TooltipLabelCallback callback = getLabelCallback();
// gets the items
TooltipItem tooltipItem = TooltipItem.FACTORY.create(item);
// checks if callback is consistent
if (callback != null) {
// invokes callback
TooltipLabelPointStyle result = callback.onLabelPointStyle(getChart(), tooltipItem);
// checks if result is consistent
if (result != null) {
return result.getObject();
}
}
// instances to return
TooltipLabelPointStyle defaultPointSTyle = Defaults.get().invokeTooltipsCallbackOnLabelPointStyle(getChart(), tooltipItem);
// checks if instance is consistent
if (defaultPointSTyle != null) {
return defaultPointSTyle.getObject();
}
// default result
return DEFAULT_LABEL_POINT_STYLE.getObject();
}
|
NativeObject function(NativeObject item) { TooltipLabelCallback callback = getLabelCallback(); TooltipItem tooltipItem = TooltipItem.FACTORY.create(item); if (callback != null) { TooltipLabelPointStyle result = callback.onLabelPointStyle(getChart(), tooltipItem); if (result != null) { return result.getObject(); } } TooltipLabelPointStyle defaultPointSTyle = Defaults.get().invokeTooltipsCallbackOnLabelPointStyle(getChart(), tooltipItem); if (defaultPointSTyle != null) { return defaultPointSTyle.getObject(); } return DEFAULT_LABEL_POINT_STYLE.getObject(); }
|
/**
* Manage the LABEL callback invocation
*
* @param item tooltip item
* @return label color object to apply to tooltip item
*/
|
Manage the LABEL callback invocation
|
onLabelPointStyleCallback
|
{
"repo_name": "pepstock-org/Charba",
"path": "src/org/pepstock/charba/client/configuration/TooltipsCallbacks.java",
"license": "apache-2.0",
"size": 22247
}
|
[
"org.pepstock.charba.client.Defaults",
"org.pepstock.charba.client.callbacks.TooltipLabelCallback",
"org.pepstock.charba.client.commons.NativeObject",
"org.pepstock.charba.client.items.TooltipItem",
"org.pepstock.charba.client.items.TooltipLabelPointStyle"
] |
import org.pepstock.charba.client.Defaults; import org.pepstock.charba.client.callbacks.TooltipLabelCallback; import org.pepstock.charba.client.commons.NativeObject; import org.pepstock.charba.client.items.TooltipItem; import org.pepstock.charba.client.items.TooltipLabelPointStyle;
|
import org.pepstock.charba.client.*; import org.pepstock.charba.client.callbacks.*; import org.pepstock.charba.client.commons.*; import org.pepstock.charba.client.items.*;
|
[
"org.pepstock.charba"
] |
org.pepstock.charba;
| 2,271,189
|
protected void update() {
synchronized (taskTrackerManager) {
synchronized (this) {
ClusterStatus clusterStatus = taskTrackerManager.getClusterStatus();
// Recompute locality delay from JobTracker heartbeat interval if enabled.
// This will also lock the JT, so do it outside of a fair scheduler lock.
if (autoComputeLocalityDelay) {
JobTracker jobTracker = (JobTracker) taskTrackerManager;
localityDelay = Math.min(MAX_AUTOCOMPUTED_LOCALITY_DELAY,
(long) (1.5 * jobTracker.getNextHeartbeatInterval()));
}
// Reload allocations file if it hasn't been loaded in a while
poolMgr.reloadAllocsIfNecessary();
// Remove any jobs that have stopped running
List<JobInProgress> toRemove = new ArrayList<JobInProgress>();
for (JobInProgress job: infos.keySet()) {
int runState = job.getStatus().getRunState();
if (runState == JobStatus.SUCCEEDED || runState == JobStatus.FAILED
|| runState == JobStatus.KILLED) {
toRemove.add(job);
}
}
for (JobInProgress job: toRemove) {
infos.remove(job);
poolMgr.removeJob(job);
}
updateRunnability(); // Set job runnability based on user/pool limits
// Update demands of jobs and pools
for (Pool pool: poolMgr.getPools()) {
pool.getMapSchedulable().updateDemand();
pool.getReduceSchedulable().updateDemand();
}
// Compute fair shares based on updated demands
List<PoolSchedulable> mapScheds = getPoolSchedulables(TaskType.MAP);
List<PoolSchedulable> reduceScheds = getPoolSchedulables(TaskType.REDUCE);
SchedulingAlgorithms.computeFairShares(
mapScheds, clusterStatus.getMaxMapTasks());
SchedulingAlgorithms.computeFairShares(
reduceScheds, clusterStatus.getMaxReduceTasks());
// Use the computed shares to assign shares within each pool
for (Pool pool: poolMgr.getPools()) {
pool.getMapSchedulable().redistributeShare();
pool.getReduceSchedulable().redistributeShare();
}
if (preemptionEnabled)
updatePreemptionVariables();
}
}
}
|
void function() { synchronized (taskTrackerManager) { synchronized (this) { ClusterStatus clusterStatus = taskTrackerManager.getClusterStatus(); if (autoComputeLocalityDelay) { JobTracker jobTracker = (JobTracker) taskTrackerManager; localityDelay = Math.min(MAX_AUTOCOMPUTED_LOCALITY_DELAY, (long) (1.5 * jobTracker.getNextHeartbeatInterval())); } poolMgr.reloadAllocsIfNecessary(); List<JobInProgress> toRemove = new ArrayList<JobInProgress>(); for (JobInProgress job: infos.keySet()) { int runState = job.getStatus().getRunState(); if (runState == JobStatus.SUCCEEDED runState == JobStatus.FAILED runState == JobStatus.KILLED) { toRemove.add(job); } } for (JobInProgress job: toRemove) { infos.remove(job); poolMgr.removeJob(job); } updateRunnability(); for (Pool pool: poolMgr.getPools()) { pool.getMapSchedulable().updateDemand(); pool.getReduceSchedulable().updateDemand(); } List<PoolSchedulable> mapScheds = getPoolSchedulables(TaskType.MAP); List<PoolSchedulable> reduceScheds = getPoolSchedulables(TaskType.REDUCE); SchedulingAlgorithms.computeFairShares( mapScheds, clusterStatus.getMaxMapTasks()); SchedulingAlgorithms.computeFairShares( reduceScheds, clusterStatus.getMaxReduceTasks()); for (Pool pool: poolMgr.getPools()) { pool.getMapSchedulable().redistributeShare(); pool.getReduceSchedulable().redistributeShare(); } if (preemptionEnabled) updatePreemptionVariables(); } } }
|
/**
* Recompute the internal variables used by the scheduler - per-job weights,
* fair shares, deficits, minimum slot allocations, and numbers of running
* and needed tasks of each type.
*/
|
Recompute the internal variables used by the scheduler - per-job weights, fair shares, deficits, minimum slot allocations, and numbers of running and needed tasks of each type
|
update
|
{
"repo_name": "InMobi/hadoop",
"path": "src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java",
"license": "apache-2.0",
"size": 40286
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.mapreduce.TaskType"
] |
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.mapreduce.TaskType;
|
import java.util.*; import org.apache.hadoop.mapreduce.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,625,323
|
public void flush() throws IOException {
}
|
void function() throws IOException { }
|
/**
* Flush blob; dummy to satisfy OutputStream class.
*/
|
Flush blob; dummy to satisfy OutputStream class
|
flush
|
{
"repo_name": "hyc/BerkeleyDB",
"path": "lang/sql/jdbc/SQLite/Blob.java",
"license": "agpl-3.0",
"size": 5973
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,599,840
|
public void stopMusicPlayer() {
if (player == null)
return;
player.stop();
player.release();
player = null;
Log.w(TAG, "stopMusicPlayer");
}
|
void function() { if (player == null) return; player.stop(); player.release(); player = null; Log.w(TAG, STR); }
|
/**
* Cleans resources from Android's native MediaPlayer.
*
* @note According to the MediaPlayer guide, you should release
* the MediaPlayer as often as possible.
* For example, when losing Audio Focus for an extended
* period of time.
*/
|
Cleans resources from Android's native MediaPlayer
|
stopMusicPlayer
|
{
"repo_name": "alexdantas/kure-music-player",
"path": "app/src/main/java/com/kure/musicplayer/services/ServicePlayMusic.java",
"license": "gpl-3.0",
"size": 34886
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 2,878,791
|
private static void autodetectProcessor( CSVColumnConf csvConf, Class<?> type, String prefix )
{
if( String.class.isAssignableFrom(type) ) return;
final CSVFieldProcessorConf processor = new CSVFieldProcessorConf();
final CSVFieldConverterConf converter = new CSVFieldConverterConf();
if( Enum.class.isAssignableFrom(type) )
{
converter.setType( prefix + "Enum" );
converter.getParams().put( "enum-type", type.getName() );
}
else
{
converter.setType( prefix + type.getSimpleName() );
}
processor.setConverter( converter );
csvConf.setProcessor( processor );
}
|
static void function( CSVColumnConf csvConf, Class<?> type, String prefix ) { if( String.class.isAssignableFrom(type) ) return; final CSVFieldProcessorConf processor = new CSVFieldProcessorConf(); final CSVFieldConverterConf converter = new CSVFieldConverterConf(); if( Enum.class.isAssignableFrom(type) ) { converter.setType( prefix + "Enum" ); converter.getParams().put( STR, type.getName() ); } else { converter.setType( prefix + type.getSimpleName() ); } processor.setConverter( converter ); csvConf.setProcessor( processor ); }
|
/**
* Tries to detect the processor that fits the given property type.
* If the given type is an instance of one of the registered types
* it creates the related processor.
*
* @param csvConf configuration to populate.
* @param type type to use for auto detection.
* @param prefix the prefix to use, can be 'parse' or 'format'.
*/
|
Tries to detect the processor that fits the given property type. If the given type is an instance of one of the registered types it creates the related processor
|
autodetectProcessor
|
{
"repo_name": "nerd4j/nerd4j-csv",
"path": "src/main/java/org/nerd4j/csv/conf/mapping/ann/AnnotatedConfigurationFactory.java",
"license": "lgpl-3.0",
"size": 24697
}
|
[
"org.nerd4j.csv.conf.mapping.CSVColumnConf",
"org.nerd4j.csv.conf.mapping.CSVFieldConverterConf",
"org.nerd4j.csv.conf.mapping.CSVFieldProcessorConf"
] |
import org.nerd4j.csv.conf.mapping.CSVColumnConf; import org.nerd4j.csv.conf.mapping.CSVFieldConverterConf; import org.nerd4j.csv.conf.mapping.CSVFieldProcessorConf;
|
import org.nerd4j.csv.conf.mapping.*;
|
[
"org.nerd4j.csv"
] |
org.nerd4j.csv;
| 2,501,922
|
@Test
public void whenInvokeDoFilterThenInvokeGetSessionAndChainDoFilter() throws IOException, ServletException {
AuthFilter filter = new AuthFilter();
HttpServletRequest req = mock(HttpServletRequest.class);
HttpServletResponse resp = mock(HttpServletResponse.class);
FilterChain chain = mock(FilterChain.class);
HttpSession session = mock(HttpSession.class);
String login = "login";
when(req.getRequestURI()).thenReturn("");
when(req.getSession()).thenReturn(session);
when(session.getAttribute(login)).thenReturn(login);
when(req.getContextPath()).thenReturn("path");
filter.doFilter(req, resp, chain);
verify(req).getSession();
verify(resp, never()).sendRedirect(String.format("%s/signIn", req.getContextPath()));
verify(chain).doFilter(req, resp);
}
|
void function() throws IOException, ServletException { AuthFilter filter = new AuthFilter(); HttpServletRequest req = mock(HttpServletRequest.class); HttpServletResponse resp = mock(HttpServletResponse.class); FilterChain chain = mock(FilterChain.class); HttpSession session = mock(HttpSession.class); String login = "login"; when(req.getRequestURI()).thenReturn(STRpathSTR%s/signIn", req.getContextPath())); verify(chain).doFilter(req, resp); }
|
/**
* Test doFilter(ServletRequest, ServletResponse, FilterChain) method.
* @throws IOException - IOException.
* @throws ServletException - ServletException.
*/
|
Test doFilter(ServletRequest, ServletResponse, FilterChain) method
|
whenInvokeDoFilterThenInvokeGetSessionAndChainDoFilter
|
{
"repo_name": "MironovVadim/vmironov",
"path": "chapter_009/src/test/java/ru/job4j/servlets/AuthFilterTest.java",
"license": "apache-2.0",
"size": 3533
}
|
[
"java.io.IOException",
"javax.servlet.FilterChain",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"javax.servlet.http.HttpSession",
"org.mockito.Mockito"
] |
import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.mockito.Mockito;
|
import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.mockito.*;
|
[
"java.io",
"javax.servlet",
"org.mockito"
] |
java.io; javax.servlet; org.mockito;
| 590,387
|
private TradingDayPrices[] createIncreasingPrices() {
final LocalDate[] dates = { LocalDate.of(2017, 9, 18), LocalDate.of(2017, 9, 19), LocalDate.of(2017, 9, 20),
LocalDate.of(2017, 9, 21), LocalDate.of(2017, 9, 22), LocalDate.of(2017, 9, 25),
LocalDate.of(2017, 9, 26), LocalDate.of(2017, 9, 27), LocalDate.of(2017, 9, 28),
LocalDate.of(2017, 9, 29) };
final double[] close = { 8, 8, 8, 8, 9, 10, 11, 12, 13, 14 };
return createPrices(dates, close);
}
|
TradingDayPrices[] function() { final LocalDate[] dates = { LocalDate.of(2017, 9, 18), LocalDate.of(2017, 9, 19), LocalDate.of(2017, 9, 20), LocalDate.of(2017, 9, 21), LocalDate.of(2017, 9, 22), LocalDate.of(2017, 9, 25), LocalDate.of(2017, 9, 26), LocalDate.of(2017, 9, 27), LocalDate.of(2017, 9, 28), LocalDate.of(2017, 9, 29) }; final double[] close = { 8, 8, 8, 8, 9, 10, 11, 12, 13, 14 }; return createPrices(dates, close); }
|
/**
* Ten closing prices that start increasing after the forth entry, starting at
* LocalDate.of(2017, 9, 18).
*/
|
Ten closing prices that start increasing after the forth entry, starting at LocalDate.of(2017, 9, 18)
|
createIncreasingPrices
|
{
"repo_name": "CjHare/systematic-trading",
"path": "systematic-trading-maths/src/test/java/com/systematic/trading/maths/indicator/rs/ClosingPriceRelativeStrengthCalculatorTest.java",
"license": "mit",
"size": 13772
}
|
[
"com.systematic.trading.model.price.TradingDayPrices",
"java.time.LocalDate"
] |
import com.systematic.trading.model.price.TradingDayPrices; import java.time.LocalDate;
|
import com.systematic.trading.model.price.*; import java.time.*;
|
[
"com.systematic.trading",
"java.time"
] |
com.systematic.trading; java.time;
| 336,057
|
public static CamelExecutionException wrapCamelExecutionException(Exchange exchange, Throwable e) {
if (e instanceof CamelExecutionException) {
// don't double wrap
return (CamelExecutionException)e;
} else {
return new CamelExecutionException("Exception occurred during execution", exchange, e);
}
}
|
static CamelExecutionException function(Exchange exchange, Throwable e) { if (e instanceof CamelExecutionException) { return (CamelExecutionException)e; } else { return new CamelExecutionException(STR, exchange, e); } }
|
/**
* Wraps the caused exception in a {@link CamelExecutionException} if its not
* already such an exception.
*
* @param e the caused exception
* @return the wrapper exception
*/
|
Wraps the caused exception in a <code>CamelExecutionException</code> if its not already such an exception
|
wrapCamelExecutionException
|
{
"repo_name": "jonmcewen/camel",
"path": "camel-core/src/main/java/org/apache/camel/util/ObjectHelper.java",
"license": "apache-2.0",
"size": 79778
}
|
[
"org.apache.camel.CamelExecutionException",
"org.apache.camel.Exchange"
] |
import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange;
|
import org.apache.camel.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 1,080,822
|
public static void reduceArrivalRateListNoise(List<ArrivalRateTuple> arrivalRates,
double period) {
ExtractionDataContainer reductionContainer = new ExtractionDataContainer(arrivalRates, period, 0, null, null, null);
reduceArrivalRateListNoise(reductionContainer, true);
}
|
static void function(List<ArrivalRateTuple> arrivalRates, double period) { ExtractionDataContainer reductionContainer = new ExtractionDataContainer(arrivalRates, period, 0, null, null, null); reduceArrivalRateListNoise(reductionContainer, true); }
|
/**
* Reduces noise of a given arrival rate list with an expected seasonal period.
* @param arrivalRates The arrival rates for which to reduce noise.
* @param period Seasonal period within the arrival rate list.
*/
|
Reduces noise of a given arrival rate list with an expected seasonal period
|
reduceArrivalRateListNoise
|
{
"repo_name": "joakimkistowski/LIMBO",
"path": "dlim.generator/src/tools/descartes/dlim/extractor/ModelExtractor.java",
"license": "epl-1.0",
"size": 25846
}
|
[
"java.util.List",
"tools.descartes.dlim.extractor.utils.ExtractionDataContainer",
"tools.descartes.dlim.generator.ArrivalRateTuple"
] |
import java.util.List; import tools.descartes.dlim.extractor.utils.ExtractionDataContainer; import tools.descartes.dlim.generator.ArrivalRateTuple;
|
import java.util.*; import tools.descartes.dlim.extractor.utils.*; import tools.descartes.dlim.generator.*;
|
[
"java.util",
"tools.descartes.dlim"
] |
java.util; tools.descartes.dlim;
| 1,882,516
|
public Participant getParticipant ( String root )
{
return service_.getParticipant ( root );
}
|
Participant function ( String root ) { return service_.getParticipant ( root ); }
|
/**
* Get the participant for the given root. Needed for recovery of JCA
* inbound transactions.
*
* @param root
* @return The participant.
*/
|
Get the participant for the given root. Needed for recovery of JCA inbound transactions
|
getParticipant
|
{
"repo_name": "hmalphettes/atomikos-essentials-3.5.8-osgified-sandbox",
"path": "com.atomikos.transactions/src/com/atomikos/icatch/imp/BaseTransactionManager.java",
"license": "apache-2.0",
"size": 15492
}
|
[
"com.atomikos.icatch.Participant"
] |
import com.atomikos.icatch.Participant;
|
import com.atomikos.icatch.*;
|
[
"com.atomikos.icatch"
] |
com.atomikos.icatch;
| 1,689,250
|
protected void removeServices(final OperationContext context, final ServiceName parentService, final ModelNode parentModel) throws OperationFailedException {
context.removeService(parentService);
}
|
void function(final OperationContext context, final ServiceName parentService, final ModelNode parentModel) throws OperationFailedException { context.removeService(parentService); }
|
/**
* Removes services. This default implementation simply
* {@link OperationContext#removeService(ServiceController) instructs the context to remove the parentService}.
* Subclasses could use the provided {@code parentModel} to identify and remove other services.
*
* @param context the operation context
* @param parentService the name of the parent service
* @param parentModel the model associated with the parent resource, including nodes for any child resources
*
* @throws OperationFailedException if there is a problem removing the services
*/
|
Removes services. This default implementation simply <code>OperationContext#removeService(ServiceController) instructs the context to remove the parentService</code>. Subclasses could use the provided parentModel to identify and remove other services
|
removeServices
|
{
"repo_name": "jamezp/wildfly-core",
"path": "controller/src/main/java/org/jboss/as/controller/RestartParentResourceHandlerBase.java",
"license": "lgpl-2.1",
"size": 9803
}
|
[
"org.jboss.dmr.ModelNode",
"org.jboss.msc.service.ServiceName"
] |
import org.jboss.dmr.ModelNode; import org.jboss.msc.service.ServiceName;
|
import org.jboss.dmr.*; import org.jboss.msc.service.*;
|
[
"org.jboss.dmr",
"org.jboss.msc"
] |
org.jboss.dmr; org.jboss.msc;
| 1,362,320
|
private int
getEndIndentations(Region prevLine, Region line, Region nextLine) {
int compareToPrevLineEnd = compareLinePositions(prevLine, line, true);
int compareToNextLineEnd = compareLinePositions(line, nextLine, true);
int comparePrevToNextLineEnd =
compareLinePositions(prevLine, nextLine, true);
if (compareToPrevLineEnd == 0 && compareToNextLineEnd == 1) {
return 1;
} else if (compareToPrevLineEnd == 1 && compareToNextLineEnd == -1
&& comparePrevToNextLineEnd == 0) {
return 2;
} else if (compareToPrevLineEnd == 1 && compareToNextLineEnd == 0) {
return 3;
} else if (compareToPrevLineEnd == -1 && compareToNextLineEnd == 0) {
return 4;
} else if (compareToPrevLineEnd == -1 && compareToNextLineEnd == 1) {
return 5;
} else if (compareToPrevLineEnd == 0 && compareToNextLineEnd == -1) {
return 6;
} else {
return 0;
}
}
|
int function(Region prevLine, Region line, Region nextLine) { int compareToPrevLineEnd = compareLinePositions(prevLine, line, true); int compareToNextLineEnd = compareLinePositions(line, nextLine, true); int comparePrevToNextLineEnd = compareLinePositions(prevLine, nextLine, true); if (compareToPrevLineEnd == 0 && compareToNextLineEnd == 1) { return 1; } else if (compareToPrevLineEnd == 1 && compareToNextLineEnd == -1 && comparePrevToNextLineEnd == 0) { return 2; } else if (compareToPrevLineEnd == 1 && compareToNextLineEnd == 0) { return 3; } else if (compareToPrevLineEnd == -1 && compareToNextLineEnd == 0) { return 4; } else if (compareToPrevLineEnd == -1 && compareToNextLineEnd == 1) { return 5; } else if (compareToPrevLineEnd == 0 && compareToNextLineEnd == -1) { return 6; } else { return 0; } }
|
/**
* Compares the end indentations of three line. The return value have to be
* considered as binary value. If the return value is 6 = 110, the previous
* and the current line are intended (but not the next line).
*
* @param prevLine
* the previous line.
* @param line
* the line to analyze.
* @param nextLine
* the next line.
* @return a integer, identifying the endIndentations of the three lines.
*/
|
Compares the end indentations of three line. The return value have to be considered as binary value. If the return value is 6 = 110, the previous and the current line are intended (but not the next line)
|
getEndIndentations
|
{
"repo_name": "ckorzen/icecite",
"path": "pdf-machine/src/main/java/de/freiburg/iif/extraction/references/ReferencesMetadataMatcher.java",
"license": "apache-2.0",
"size": 55410
}
|
[
"de.freiburg.iif.model.Region"
] |
import de.freiburg.iif.model.Region;
|
import de.freiburg.iif.model.*;
|
[
"de.freiburg.iif"
] |
de.freiburg.iif;
| 2,900,781
|
public final synchronized void requestCompleted(final HttpContext context) {
if (this.completed) {
return;
}
this.completed = true;
try {
this.result = buildResult(context);
} catch (final Exception ex) {
this.ex = ex;
} finally {
releaseResources();
}
}
|
final synchronized void function(final HttpContext context) { if (this.completed) { return; } this.completed = true; try { this.result = buildResult(context); } catch (final Exception ex) { this.ex = ex; } finally { releaseResources(); } }
|
/**
* Use {@link #buildResult(HttpContext)} instead.
*/
|
Use <code>#buildResult(HttpContext)</code> instead
|
requestCompleted
|
{
"repo_name": "vuzzan/openclinic",
"path": "src/org/apache/http/nio/protocol/AbstractAsyncRequestConsumer.java",
"license": "apache-2.0",
"size": 6599
}
|
[
"org.apache.http.protocol.HttpContext"
] |
import org.apache.http.protocol.HttpContext;
|
import org.apache.http.protocol.*;
|
[
"org.apache.http"
] |
org.apache.http;
| 1,130,819
|
void invoke(ClientType clientType) {
try {
LOGGER.info("Starting service.");
ExecutorService service = ClientServiceFactory.startClientService(_system, clientType, _jobCounter);
LOGGER.info("Service started.");
Thread currentThread = Thread.currentThread();
while (!currentThread.isInterrupted()) {
try {
Thread.sleep(500);
} catch (InterruptedException ex) {
currentThread.interrupt();
break;
}
}
LOGGER.info("Stopping service.");
service.shutdownNow();
try {
if (!service.awaitTermination(60000, TimeUnit.MILLISECONDS)) {
LOGGER.warn("Shutdown timed out after 60 seconds. Exiting.");
}
} catch (InterruptedException iex) {
LOGGER.warn("Forcing shutdown.");
}
LOGGER.info("Service stopped.");
} catch (Exception ex) {
throw new SystemException("There was a problem invoking the committer.", ex);
} finally {
if (_system != null) {
_system.stop();
}
LOGGER.info("Finished");
} // end try-catch-finally
}
}
|
void invoke(ClientType clientType) { try { LOGGER.info(STR); ExecutorService service = ClientServiceFactory.startClientService(_system, clientType, _jobCounter); LOGGER.info(STR); Thread currentThread = Thread.currentThread(); while (!currentThread.isInterrupted()) { try { Thread.sleep(500); } catch (InterruptedException ex) { currentThread.interrupt(); break; } } LOGGER.info(STR); service.shutdownNow(); try { if (!service.awaitTermination(60000, TimeUnit.MILLISECONDS)) { LOGGER.warn(STR); } } catch (InterruptedException iex) { LOGGER.warn(STR); } LOGGER.info(STR); } catch (Exception ex) { throw new SystemException(STR, ex); } finally { if (_system != null) { _system.stop(); } LOGGER.info(STR); } } }
|
/**
* This method creates multiple threads to dequeue messages from message queue and push into TSDB. One thread is created for every type. Depending on
* message queue connection count, multiple threads may be created for Metric type
*
* @param clientType The type of client to invoke.
*
* @throws SystemException if an error occurs
*/
|
This method creates multiple threads to dequeue messages from message queue and push into TSDB. One thread is created for every type. Depending on message queue connection count, multiple threads may be created for Metric type
|
invoke
|
{
"repo_name": "SalesforceEng/Argus",
"path": "ArgusClient/src/main/java/com/salesforce/dva/argus/client/Main.java",
"license": "bsd-3-clause",
"size": 10947
}
|
[
"com.salesforce.dva.argus.system.SystemException",
"java.util.concurrent.ExecutorService",
"java.util.concurrent.TimeUnit"
] |
import com.salesforce.dva.argus.system.SystemException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit;
|
import com.salesforce.dva.argus.system.*; import java.util.concurrent.*;
|
[
"com.salesforce.dva",
"java.util"
] |
com.salesforce.dva; java.util;
| 660,580
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.