method stringlengths 13 441k | clean_method stringlengths 7 313k | doc stringlengths 17 17.3k | comment stringlengths 3 1.42k | method_name stringlengths 1 273 | extra dict | imports list | imports_info stringlengths 19 34.8k | cluster_imports_info stringlengths 15 3.66k | libraries list | libraries_info stringlengths 6 661 | id int64 0 2.92M |
|---|---|---|---|---|---|---|---|---|---|---|---|
private static int quantityCompareTo(IQuantity q1, IQuantity q2) {
try {
if (!q1.unit().equals(q2.unit())) {
org.matheclipse.core.tensor.qty.UnitConvert unitConvert =
org.matheclipse.core.tensor.qty.UnitConvert.SI();
q2 = (IQuantity) unitConvert.to(q1.unit()).apply(q2);
}
if (q1.unit().equals(q2.unit())) {
return q1.value().compareTo(q2.value());
}
} catch (RuntimeException rex) {
//
}
return Integer.MIN_VALUE;
} | static int function(IQuantity q1, IQuantity q2) { try { if (!q1.unit().equals(q2.unit())) { org.matheclipse.core.tensor.qty.UnitConvert unitConvert = org.matheclipse.core.tensor.qty.UnitConvert.SI(); q2 = (IQuantity) unitConvert.to(q1.unit()).apply(q2); } if (q1.unit().equals(q2.unit())) { return q1.value().compareTo(q2.value()); } } catch (RuntimeException rex) { return Integer.MIN_VALUE; } | /**
* If the <code>IQuantity#compareTo()</code> method could be executed because the same unit types
* could be used for comparison, return the result <code>-1, 0 or 1</code> otherwise return <code>
* Integer.MIN_VALUE</code>
*
* @param q1
* @param q2
* @return <code>Integer.MIN_VALUE</code> if the <code>compareTo()</code> method could not be
* executed, because of different unit types
*/ | If the <code>IQuantity#compareTo()</code> method could be executed because the same unit types could be used for comparison, return the result <code>-1, 0 or 1</code> otherwise return <code> Integer.MIN_VALUE</code> | quantityCompareTo | {
"repo_name": "axkr/symja_android_library",
"path": "symja_android_library/matheclipse-core/src/main/java/org/matheclipse/core/builtin/BooleanFunctions.java",
"license": "gpl-3.0",
"size": 131754
} | [
"org.matheclipse.core.tensor.qty.IQuantity"
] | import org.matheclipse.core.tensor.qty.IQuantity; | import org.matheclipse.core.tensor.qty.*; | [
"org.matheclipse.core"
] | org.matheclipse.core; | 1,419,970 |
@Nullable
@SuppressWarnings("unchecked")
public T get(SkyFunction.Environment env) throws InterruptedException {
PrecomputedValue value = (PrecomputedValue) env.getValue(key);
if (value == null) {
return null;
}
return (T) value.get();
} | @SuppressWarnings(STR) T function(SkyFunction.Environment env) throws InterruptedException { PrecomputedValue value = (PrecomputedValue) env.getValue(key); if (value == null) { return null; } return (T) value.get(); } | /**
* Retrieves the value of this variable from Skyframe.
*
* <p>If the value was not set, an exception will be raised.
*/ | Retrieves the value of this variable from Skyframe. If the value was not set, an exception will be raised | get | {
"repo_name": "spxtr/bazel",
"path": "src/main/java/com/google/devtools/build/lib/skyframe/PrecomputedValue.java",
"license": "apache-2.0",
"size": 6599
} | [
"com.google.devtools.build.skyframe.SkyFunction"
] | import com.google.devtools.build.skyframe.SkyFunction; | import com.google.devtools.build.skyframe.*; | [
"com.google.devtools"
] | com.google.devtools; | 2,879,264 |
@Test
public void testExportPartPartial() throws Exception {
int[][] rows1 = {{1, 2, 1, 1}, {3, 4, 2, 2}, {5, 6, 1, 2}, {7, 8, 2, 2}};
runStatementOnDriver("drop table if exists T");
runStatementOnDriver("drop table if exists TImport ");
runStatementOnDriver("create table TImport (a int, b int) partitioned by (p int, q int) " +
"stored as ORC TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create table T (a int, b int) partitioned by (p int, q int) stored as " +
"ORC");
runStatementOnDriver("insert into T partition(p,q)" + makeValuesClause(rows1));
runStatementOnDriver("export table T partition(p=1) to '" + getTestDataDir() + "/export'");
runStatementOnDriver("import table TImport from '" + getTestDataDir() + "/export'");
List<String> rs1 = runStatementOnDriver("select * from TImport order by a, b");
int[][] res = {{1, 2, 1, 1}, {5, 6, 1, 2}};
Assert.assertEquals("Content didn't match rs", stringifyValues(res), rs1);
} | void function() throws Exception { int[][] rows1 = {{1, 2, 1, 1}, {3, 4, 2, 2}, {5, 6, 1, 2}, {7, 8, 2, 2}}; runStatementOnDriver(STR); runStatementOnDriver(STR); runStatementOnDriver(STR + STR); runStatementOnDriver(STR + "ORC"); runStatementOnDriver(STR + makeValuesClause(rows1)); runStatementOnDriver(STR + getTestDataDir() + STR); runStatementOnDriver(STR + getTestDataDir() + STR); List<String> rs1 = runStatementOnDriver(STR); int[][] res = {{1, 2, 1, 1}, {5, 6, 1, 2}}; Assert.assertEquals(STR, stringifyValues(res), rs1); } | /**
* Export partitioned table with partial partition spec.
*/ | Export partitioned table with partial partition spec | testExportPartPartial | {
"repo_name": "b-slim/hive",
"path": "ql/src/test/org/apache/hadoop/hive/ql/TestTxnExIm.java",
"license": "apache-2.0",
"size": 26242
} | [
"java.util.List",
"org.junit.Assert"
] | import java.util.List; import org.junit.Assert; | import java.util.*; import org.junit.*; | [
"java.util",
"org.junit"
] | java.util; org.junit; | 2,623,360 |
Logger logger = Logger.getLogger();
logger.log(Log.DEBUG, "ReceiveTransitionsIntentService - onHandleIntent");
Intent broadcastIntent = new Intent(GeofenceTransitionIntent);
notifier = new GeoNotificationNotifier(
(NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE),
this
);
// First check for errors
GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent);
if (geofencingEvent.hasError()) {
// Get the error code with a static method
int errorCode = geofencingEvent.getErrorCode();
String error = "Location Services error: " + Integer.toString(errorCode);
// Log the error
logger.log(Log.ERROR, error);
broadcastIntent.putExtra("error", error);
} else {
// Get the type of transition (entry or exit)
int transitionType = geofencingEvent.getGeofenceTransition();
if ((transitionType == Geofence.GEOFENCE_TRANSITION_ENTER)
|| (transitionType == Geofence.GEOFENCE_TRANSITION_EXIT)) {
logger.log(Log.DEBUG, "Geofence transition detected");
List<Geofence> triggerList = geofencingEvent.getTriggeringGeofences();
List<GeoNotification> geoNotifications = new ArrayList<GeoNotification>();
for (Geofence fence : triggerList) {
String fenceId = fence.getRequestId();
GeoNotification geoNotification = store
.getGeoNotification(fenceId);
if (geoNotification != null) {
if (geoNotification.notification != null) {
if(geoNotification.isScheduled() && geoNotification.isFrequencyOk()){
geoNotification.notification.setLastTriggered();
store.setGeoNotification(geoNotification);
notifier.notify(geoNotification.notification);
}
}
geoNotification.transitionType = transitionType;
geoNotifications.add(geoNotification);
}
}
if (geoNotifications.size() > 0) {
broadcastIntent.putExtra("transitionData", Gson.get().toJson(geoNotifications));
GeofencePlugin.onTransitionReceived(geoNotifications);
}
} else {
String error = "Geofence transition error: " + transitionType;
logger.log(Log.ERROR, error);
broadcastIntent.putExtra("error", error);
}
}
sendBroadcast(broadcastIntent);
} | Logger logger = Logger.getLogger(); logger.log(Log.DEBUG, STR); Intent broadcastIntent = new Intent(GeofenceTransitionIntent); notifier = new GeoNotificationNotifier( (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE), this ); GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent); if (geofencingEvent.hasError()) { int errorCode = geofencingEvent.getErrorCode(); String error = STR + Integer.toString(errorCode); logger.log(Log.ERROR, error); broadcastIntent.putExtra("error", error); } else { int transitionType = geofencingEvent.getGeofenceTransition(); if ((transitionType == Geofence.GEOFENCE_TRANSITION_ENTER) (transitionType == Geofence.GEOFENCE_TRANSITION_EXIT)) { logger.log(Log.DEBUG, STR); List<Geofence> triggerList = geofencingEvent.getTriggeringGeofences(); List<GeoNotification> geoNotifications = new ArrayList<GeoNotification>(); for (Geofence fence : triggerList) { String fenceId = fence.getRequestId(); GeoNotification geoNotification = store .getGeoNotification(fenceId); if (geoNotification != null) { if (geoNotification.notification != null) { if(geoNotification.isScheduled() && geoNotification.isFrequencyOk()){ geoNotification.notification.setLastTriggered(); store.setGeoNotification(geoNotification); notifier.notify(geoNotification.notification); } } geoNotification.transitionType = transitionType; geoNotifications.add(geoNotification); } } if (geoNotifications.size() > 0) { broadcastIntent.putExtra(STR, Gson.get().toJson(geoNotifications)); GeofencePlugin.onTransitionReceived(geoNotifications); } } else { String error = STR + transitionType; logger.log(Log.ERROR, error); broadcastIntent.putExtra("error", error); } } sendBroadcast(broadcastIntent); } | /**
* Handles incoming intents
*
* @param intent
* The Intent sent by Location Services. This Intent is provided
* to Location Services (inside a PendingIntent) when you call
* addGeofences()
*/ | Handles incoming intents | onHandleIntent | {
"repo_name": "QuantumRand/cordova-plugin-geofence",
"path": "src/android/ReceiveTransitionsIntentService.java",
"license": "apache-2.0",
"size": 4057
} | [
"android.app.NotificationManager",
"android.content.Context",
"android.content.Intent",
"android.util.Log",
"com.google.android.gms.location.Geofence",
"com.google.android.gms.location.GeofencingEvent",
"java.util.ArrayList",
"java.util.List"
] | import android.app.NotificationManager; import android.content.Context; import android.content.Intent; import android.util.Log; import com.google.android.gms.location.Geofence; import com.google.android.gms.location.GeofencingEvent; import java.util.ArrayList; import java.util.List; | import android.app.*; import android.content.*; import android.util.*; import com.google.android.gms.location.*; import java.util.*; | [
"android.app",
"android.content",
"android.util",
"com.google.android",
"java.util"
] | android.app; android.content; android.util; com.google.android; java.util; | 237,108 |
@VisibleForTesting
void printModuleGraphJsonTo(Appendable out) throws IOException {
out.append(compiler.getDegenerateModuleGraph().toJson().toString());
} | void printModuleGraphJsonTo(Appendable out) throws IOException { out.append(compiler.getDegenerateModuleGraph().toJson().toString()); } | /**
* Prints the current module graph as JSON.
*/ | Prints the current module graph as JSON | printModuleGraphJsonTo | {
"repo_name": "GerHobbelt/closure-compiler",
"path": "src/com/google/javascript/jscomp/AbstractCommandLineRunner.java",
"license": "apache-2.0",
"size": 97801
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 166,465 |
@SuppressWarnings("TypeMayBeWeakened")
void writeHashMap(HashMap<?, ?> map, long loadFactorFieldOff, boolean set) throws IOException {
int size = map.size();
writeInt(size);
writeFloat(getFloat(map, loadFactorFieldOff));
for (Map.Entry<?, ?> e : map.entrySet()) {
writeObject0(e.getKey());
if (!set)
writeObject0(e.getValue());
}
} | @SuppressWarnings(STR) void writeHashMap(HashMap<?, ?> map, long loadFactorFieldOff, boolean set) throws IOException { int size = map.size(); writeInt(size); writeFloat(getFloat(map, loadFactorFieldOff)); for (Map.Entry<?, ?> e : map.entrySet()) { writeObject0(e.getKey()); if (!set) writeObject0(e.getValue()); } } | /**
* Writes {@link HashMap}.
*
* @param map Map.
* @param loadFactorFieldOff Load factor field offset.
* @param set Whether writing underlying map from {@link HashSet}.
* @throws IOException In case of error.
*/ | Writes <code>HashMap</code> | writeHashMap | {
"repo_name": "NSAmelchev/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/marshaller/optimized/OptimizedObjectOutputStream.java",
"license": "apache-2.0",
"size": 25535
} | [
"java.io.IOException",
"java.util.HashMap",
"java.util.Map"
] | import java.io.IOException; import java.util.HashMap; import java.util.Map; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 2,397,000 |
public void setCurrentImage(BufferedImage value) {
m_CurrentFile = null;
m_PaintPanel.setCurrentImage(value);
updateImageProperties();
} | void function(BufferedImage value) { m_CurrentFile = null; m_PaintPanel.setCurrentImage(value); updateImageProperties(); } | /**
* Sets the underlying image. Removes the filename.
*
* @param value the image to display
*/ | Sets the underlying image. Removes the filename | setCurrentImage | {
"repo_name": "automenta/adams-core",
"path": "src/main/java/adams/gui/visualization/image/ImagePanel.java",
"license": "gpl-3.0",
"size": 45877
} | [
"java.awt.image.BufferedImage"
] | import java.awt.image.BufferedImage; | import java.awt.image.*; | [
"java.awt"
] | java.awt; | 985,997 |
Collection<String> getActionPlugins() throws Exception; | Collection<String> getActionPlugins() throws Exception; | /**
* Get all list of plugins configured on the system.
*
* @return List of plugins configured on the definitions service
* @throws Exception on an problem
*/ | Get all list of plugins configured on the system | getActionPlugins | {
"repo_name": "jsanda/hawkular-alerts",
"path": "hawkular-alerts-api/src/main/java/org/hawkular/alerts/api/services/DefinitionsService.java",
"license": "apache-2.0",
"size": 32807
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,146,118 |
Quota verify(T key, AbstractQuotaCountingOffset countingOffset) throws QuotaException; | Quota verify(T key, AbstractQuotaCountingOffset countingOffset) throws QuotaException; | /**
* Verifies if the quota is full or not enough from a given quota key and adding a counting
* offset.
* If full then a quota full exception is throwed.
* If not enough then a quota not enough exception is throwed.
* @param key
* @param countingOffset
* @return the quota used by the verify treatment
* @throws QuotaException
*/ | Verifies if the quota is full or not enough from a given quota key and adding a counting offset. If full then a quota full exception is throwed. If not enough then a quota not enough exception is throwed | verify | {
"repo_name": "ebonnet/Silverpeas-Core",
"path": "core-library/src/main/java/org/silverpeas/core/admin/quota/service/QuotaService.java",
"license": "agpl-3.0",
"size": 3538
} | [
"org.silverpeas.core.admin.quota.exception.QuotaException",
"org.silverpeas.core.admin.quota.model.Quota",
"org.silverpeas.core.admin.quota.offset.AbstractQuotaCountingOffset"
] | import org.silverpeas.core.admin.quota.exception.QuotaException; import org.silverpeas.core.admin.quota.model.Quota; import org.silverpeas.core.admin.quota.offset.AbstractQuotaCountingOffset; | import org.silverpeas.core.admin.quota.exception.*; import org.silverpeas.core.admin.quota.model.*; import org.silverpeas.core.admin.quota.offset.*; | [
"org.silverpeas.core"
] | org.silverpeas.core; | 462,512 |
private UpdateTMWithXLIFFResult updateTMWithXliff(
String xliffContent,
TMTextUnitVariant.Status importStatus,
AbstractImportTranslationsStep abstractImportTranslationsStep) throws OkapiBadFilterInputException {
logger.debug("Configuring pipeline for localized XLIFF processing");
IPipelineDriver driver = new PipelineDriver();
driver.addStep(new RawDocumentToFilterEventsStep(new XLIFFFilter()));
driver.addStep(getConfiguredQualityStep());
IntegrityCheckStep integrityCheckStep = new IntegrityCheckStep();
driver.addStep(integrityCheckStep);
abstractImportTranslationsStep.setImportWithStatus(importStatus);
driver.addStep(abstractImportTranslationsStep);
//TODO(P1) It sounds like it's not possible to the XLIFFFilter for the output
// because the note is readonly mode and we need to override it to provide more information
logger.debug("Prepare FilterEventsWriterStep to use an XLIFFWriter with outputstream (allows only one doc to be processed)");
FilterEventsWriterStep filterEventsWriterStep = new FilterEventsWriterStep(new XLIFFWriter());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
filterEventsWriterStep.setOutputStream(byteArrayOutputStream);
filterEventsWriterStep.setOutputEncoding(StandardCharsets.UTF_8.toString());
driver.addStep(filterEventsWriterStep);
// We need to read first the target language, because if we wait for okapi to read
// it from the file it is too late to write the output with the XLIFFWriter
// (missing target language)
String targetLanguage = xliffUtils.getTargetLanguage(xliffContent);
LocaleId targetLocaleId = targetLanguage != null ? LocaleId.fromBCP47(targetLanguage) : LocaleId.EMPTY;
RawDocument rawDocument = new RawDocument(xliffContent, LocaleId.ENGLISH, targetLocaleId);
driver.addBatchItem(rawDocument, RawDocument.getFakeOutputURIForStream(), null);
logger.debug("Start processing batch");
driver.processBatch();
logger.debug("Get the Import report");
ImportTranslationsStepAnnotation importTranslationsStepAnnotation = rawDocument.getAnnotation(ImportTranslationsStepAnnotation.class);
UpdateTMWithXLIFFResult updateReport = new UpdateTMWithXLIFFResult();
updateReport.setXliffContent(StreamUtil.getUTF8OutputStreamAsString(byteArrayOutputStream));
updateReport.setComment(importTranslationsStepAnnotation.getComment());
return updateReport;
} | UpdateTMWithXLIFFResult function( String xliffContent, TMTextUnitVariant.Status importStatus, AbstractImportTranslationsStep abstractImportTranslationsStep) throws OkapiBadFilterInputException { logger.debug(STR); IPipelineDriver driver = new PipelineDriver(); driver.addStep(new RawDocumentToFilterEventsStep(new XLIFFFilter())); driver.addStep(getConfiguredQualityStep()); IntegrityCheckStep integrityCheckStep = new IntegrityCheckStep(); driver.addStep(integrityCheckStep); abstractImportTranslationsStep.setImportWithStatus(importStatus); driver.addStep(abstractImportTranslationsStep); logger.debug(STR); FilterEventsWriterStep filterEventsWriterStep = new FilterEventsWriterStep(new XLIFFWriter()); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); filterEventsWriterStep.setOutputStream(byteArrayOutputStream); filterEventsWriterStep.setOutputEncoding(StandardCharsets.UTF_8.toString()); driver.addStep(filterEventsWriterStep); String targetLanguage = xliffUtils.getTargetLanguage(xliffContent); LocaleId targetLocaleId = targetLanguage != null ? LocaleId.fromBCP47(targetLanguage) : LocaleId.EMPTY; RawDocument rawDocument = new RawDocument(xliffContent, LocaleId.ENGLISH, targetLocaleId); driver.addBatchItem(rawDocument, RawDocument.getFakeOutputURIForStream(), null); logger.debug(STR); driver.processBatch(); logger.debug(STR); ImportTranslationsStepAnnotation importTranslationsStepAnnotation = rawDocument.getAnnotation(ImportTranslationsStepAnnotation.class); UpdateTMWithXLIFFResult updateReport = new UpdateTMWithXLIFFResult(); updateReport.setXliffContent(StreamUtil.getUTF8OutputStreamAsString(byteArrayOutputStream)); updateReport.setComment(importTranslationsStepAnnotation.getComment()); return updateReport; } | /**
* Update TM with XLIFF.
*
* @param xliffContent The content of the localized XLIFF TODO(P1) Use BCP47
* tag instead of Locale object?
* @param importStatus specific status to use when importing translation
* @param abstractImportTranslationsStep defines which import logic to apply
* @return the imported XLIFF with information for each text unit about the
* import process
* @throws OkapiBadFilterInputException
*/ | Update TM with XLIFF | updateTMWithXliff | {
"repo_name": "box/mojito",
"path": "webapp/src/main/java/com/box/l10n/mojito/service/tm/TMService.java",
"license": "apache-2.0",
"size": 53813
} | [
"com.box.l10n.mojito.common.StreamUtil",
"com.box.l10n.mojito.entity.TMTextUnitVariant",
"com.box.l10n.mojito.okapi.AbstractImportTranslationsStep",
"com.box.l10n.mojito.okapi.ImportTranslationsStepAnnotation",
"com.box.l10n.mojito.okapi.RawDocument",
"com.box.l10n.mojito.okapi.Status",
"com.box.l10n.mo... | import com.box.l10n.mojito.common.StreamUtil; import com.box.l10n.mojito.entity.TMTextUnitVariant; import com.box.l10n.mojito.okapi.AbstractImportTranslationsStep; import com.box.l10n.mojito.okapi.ImportTranslationsStepAnnotation; import com.box.l10n.mojito.okapi.RawDocument; import com.box.l10n.mojito.okapi.Status; import com.box.l10n.mojito.okapi.XLIFFWriter; import com.box.l10n.mojito.service.assetintegritychecker.integritychecker.IntegrityCheckStep; import java.io.ByteArrayOutputStream; import java.nio.charset.StandardCharsets; import net.sf.okapi.common.LocaleId; import net.sf.okapi.common.exceptions.OkapiBadFilterInputException; import net.sf.okapi.common.pipelinedriver.IPipelineDriver; import net.sf.okapi.common.pipelinedriver.PipelineDriver; import net.sf.okapi.filters.xliff.XLIFFFilter; import net.sf.okapi.steps.common.FilterEventsWriterStep; import net.sf.okapi.steps.common.RawDocumentToFilterEventsStep; | import com.box.l10n.mojito.common.*; import com.box.l10n.mojito.entity.*; import com.box.l10n.mojito.okapi.*; import com.box.l10n.mojito.service.assetintegritychecker.integritychecker.*; import java.io.*; import java.nio.charset.*; import net.sf.okapi.common.*; import net.sf.okapi.common.exceptions.*; import net.sf.okapi.common.pipelinedriver.*; import net.sf.okapi.filters.xliff.*; import net.sf.okapi.steps.common.*; | [
"com.box.l10n",
"java.io",
"java.nio",
"net.sf.okapi"
] | com.box.l10n; java.io; java.nio; net.sf.okapi; | 2,816,147 |
private boolean showTable(Graph graph, Table table) {
if (table != null && !tableNodes.containsKey(table)) {
Node n = graph.addNode();
n.setString("label", table.getName());
String tooltip = tableRenderer.getToolTip(table);
n.setString("tooltip", tooltip);
tableNodes.put(table, n);
return true;
}
return false;
}
| boolean function(Graph graph, Table table) { if (table != null && !tableNodes.containsKey(table)) { Node n = graph.addNode(); n.setString("label", table.getName()); String tooltip = tableRenderer.getToolTip(table); n.setString(STR, tooltip); tableNodes.put(table, n); return true; } return false; } | /**
* Creates visible node for given table.
*
* @param graph the graph
* @param table the table to show
*/ | Creates visible node for given table | showTable | {
"repo_name": "Recombine/jailer",
"path": "src/main/net/sf/jailer/ui/graphical_view/GraphicalDataModelView.java",
"license": "apache-2.0",
"size": 62732
} | [
"net.sf.jailer.datamodel.Table"
] | import net.sf.jailer.datamodel.Table; | import net.sf.jailer.datamodel.*; | [
"net.sf.jailer"
] | net.sf.jailer; | 1,985,910 |
public static void addAuthenticationRequestToCache(String key, AuthenticationRequestCacheEntry authReqEntry) {
AuthenticationRequestCacheKey cacheKey = new AuthenticationRequestCacheKey(key);
AuthenticationRequestCache.getInstance().addToCache(cacheKey, authReqEntry);
} | static void function(String key, AuthenticationRequestCacheEntry authReqEntry) { AuthenticationRequestCacheKey cacheKey = new AuthenticationRequestCacheKey(key); AuthenticationRequestCache.getInstance().addToCache(cacheKey, authReqEntry); } | /**
* To add authentication request cache entry to cache
*
* @param key cache entry key
* @param authReqEntry AuthenticationReqCache Entry.
*/ | To add authentication request cache entry to cache | addAuthenticationRequestToCache | {
"repo_name": "godwinamila/carbon-identity",
"path": "components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/util/FrameworkUtils.java",
"license": "apache-2.0",
"size": 44327
} | [
"org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCache",
"org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheEntry",
"org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheKey"
] | import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCache; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheEntry; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheKey; | import org.wso2.carbon.identity.application.authentication.framework.cache.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 1,234,264 |
public void setRequestProperty(String key, String value) throws IOException {
if (conn == null) {
throw new IOException("Cannot open output stream on non opened connection");
}
conn.setRequestProperty(key, value);
} | void function(String key, String value) throws IOException { if (conn == null) { throw new IOException(STR); } conn.setRequestProperty(key, value); } | /**
* Sets the general request property. If a property with the key already exists,
* overwrite its value with the new value.
*
* NOTE: HTTP requires all request properties which can legally have multiple instances
* with the same key to use a comma-seperated list syntax which enables multiple
* properties to be appended into a single property.
*
* @param key the keyword by which the request is known (e.g., "accept").
* @param value the value associated with it.
*/ | Sets the general request property. If a property with the key already exists, overwrite its value with the new value. with the same key to use a comma-seperated list syntax which enables multiple properties to be appended into a single property | setRequestProperty | {
"repo_name": "zhangdakun/funasyn",
"path": "externals/java-sdk/common/src/main/java-se-se/com/funambol/platform/HttpConnectionAdapter.java",
"license": "agpl-3.0",
"size": 17520
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,178,913 |
List<URI> getContexts() throws OntoDriverException; | List<URI> getContexts() throws OntoDriverException; | /**
* Gets a set of currently available contexts in the underlying repository.
* <p>
* Note that the default context is not included in the result.
*
* @return List of context URIs
* @throws OntoDriverException If an ontology access error occurs
* @throws IllegalStateException If called on a closed connection
*/ | Gets a set of currently available contexts in the underlying repository. Note that the default context is not included in the result | getContexts | {
"repo_name": "kbss-cvut/jopa",
"path": "ontodriver-api/src/main/java/cz/cvut/kbss/ontodriver/Connection.java",
"license": "lgpl-3.0",
"size": 7812
} | [
"cz.cvut.kbss.ontodriver.exception.OntoDriverException",
"java.util.List"
] | import cz.cvut.kbss.ontodriver.exception.OntoDriverException; import java.util.List; | import cz.cvut.kbss.ontodriver.exception.*; import java.util.*; | [
"cz.cvut.kbss",
"java.util"
] | cz.cvut.kbss; java.util; | 163,783 |
Object instance = invocation.getParameters().toArray()[0];
UnitOfWork uow = xrService.getORSession().acquireUnitOfWork();
uow.registerNewObject(instance);
uow.commit();
return null;
}
| Object instance = invocation.getParameters().toArray()[0]; UnitOfWork uow = xrService.getORSession().acquireUnitOfWork(); uow.registerNewObject(instance); uow.commit(); return null; } | /**
* Execute <tt>INSERT</tt> operation on the database
* @param xrService parent <code>XRService</code> that owns this <code>Operation</code>
* @param invocation contains runtime argument values to be bound to the list of
* {@link Parameter}'s.
* @return result - <code>null</code> as the underlying <tt>INSERT</tt> operation on
* the database does not return a value.
*
* @see {@link Operation}
*/ | Execute INSERT operation on the database | invoke | {
"repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs",
"path": "dbws/org.eclipse.persistence.dbws/src/org/eclipse/persistence/internal/xr/InsertOperation.java",
"license": "epl-1.0",
"size": 1996
} | [
"org.eclipse.persistence.sessions.UnitOfWork"
] | import org.eclipse.persistence.sessions.UnitOfWork; | import org.eclipse.persistence.sessions.*; | [
"org.eclipse.persistence"
] | org.eclipse.persistence; | 1,180,772 |
public synchronized void fit(@NonNull DataSetIterator source) {
if (zoo == null) {
zoo = new Trainer[workers];
for (int cnt = 0; cnt < workers; cnt++) {
zoo[cnt] = new Trainer(cnt, model);
zoo[cnt].start();
}
}
source.reset();
DataSetIterator iterator;
if (prefetchSize > 0 && source.asyncSupported()) {
iterator = new AsyncDataSetIterator(source, prefetchSize);
} else iterator = source;
AtomicInteger locker = new AtomicInteger(0);
while (iterator.hasNext()) {
DataSet dataSet = iterator.next();
int pos = locker.getAndIncrement();
zoo[pos].feedDataSet(dataSet);
if (pos + 1 == workers || !iterator.hasNext()) {
iterationsCounter.incrementAndGet();
for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt ++) {
try {
zoo[cnt].waitTillRunning();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
if (iterationsCounter.get() % averagingFrequency == 0 && pos + 1 == workers) {
double score = 0.0;
if (!legacyAveraging || Nd4j.getAffinityManager().getNumberOfDevices() == 1) {
List<INDArray> params = new ArrayList<>();
for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) {
params.add(zoo[cnt].getModel().params());
score += zoo[cnt].getModel().score();
}
Nd4j.averageAndPropagate(model.params(), params);
} else {
INDArray params = Nd4j.zeros(model.params().shape());
int cnt = 0;
for (; cnt < workers && cnt < locker.get(); cnt++) {
params.addi(zoo[cnt].getModel().params());
score += zoo[cnt].getModel().score();
}
params.divi(cnt);
model.setParams(params);
}
score /= Math.min(workers, locker.get());
// TODO: improve this
if (reportScore)
logger.info("Averaged score: " + score);
// averaging updaters state
if (model instanceof MultiLayerNetwork) {
if (averageUpdaters) {
Updater updater = ((MultiLayerNetwork) model).getUpdater();
if (updater != null && updater.getStateViewArray() != null) {
if (!legacyAveraging || Nd4j.getAffinityManager().getNumberOfDevices() == 1) {
List<INDArray> updaters = new ArrayList<>();
for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) {
updaters.add(((MultiLayerNetwork) zoo[cnt].getModel()).getUpdater().getStateViewArray());
}
Nd4j.averageAndPropagate(updater.getStateViewArray(), updaters);
} else {
INDArray state = Nd4j.zeros(updater.getStateViewArray().shape());
int cnt = 0;
for (; cnt < workers && cnt < locker.get(); cnt++) {
state.addi(((MultiLayerNetwork) zoo[cnt].getModel()).getUpdater().getStateViewArray().dup());
}
state.divi(cnt);
updater.setStateViewArray((MultiLayerNetwork) model, state, false);
}
}
}
((MultiLayerNetwork) model).setScore(score);
} else if (model instanceof ComputationGraph) {
if (averageUpdaters) {
ComputationGraphUpdater updater = ((ComputationGraph) model).getUpdater();
if (updater != null && updater.getStateViewArray() != null) {
if (!legacyAveraging || Nd4j.getAffinityManager().getNumberOfDevices() == 1) {
List<INDArray> updaters = new ArrayList<>();
for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) {
updaters.add(((ComputationGraph) zoo[cnt].getModel()).getUpdater().getStateViewArray());
}
Nd4j.averageAndPropagate(updater.getStateViewArray(), updaters);
} else {
INDArray state = Nd4j.zeros(updater.getStateViewArray().shape());
int cnt = 0;
for (; cnt < workers && cnt < locker.get(); cnt++) {
state.addi(((ComputationGraph) zoo[cnt].getModel()).getUpdater().getStateViewArray());
}
state.divi(cnt);
updater.setStateViewArray(state);
}
}
}
((ComputationGraph) model).setScore(score);
}
if (legacyAveraging && Nd4j.getAffinityManager().getNumberOfDevices() > 1) {
for (int cnt = 0; cnt < workers; cnt++) {
zoo[cnt].updateModel(model);
}
}
}
locker.set(0);
}
}
logger.debug("Iterations passed: {}", iterationsCounter.get());
iterationsCounter.set(0);
}
public static class Builder {
private Model model;
private int workers = 2;
private int prefetchSize = 16;
private int averagingFrequency = 1;
private boolean reportScore = false;
private boolean averageUpdaters = true;
private boolean legacyAveraging = true;
public Builder(@NonNull MultiLayerNetwork mln) {
model = mln;
}
public Builder(@NonNull ComputationGraph graph) {
model = graph;
} | synchronized void function(@NonNull DataSetIterator source) { if (zoo == null) { zoo = new Trainer[workers]; for (int cnt = 0; cnt < workers; cnt++) { zoo[cnt] = new Trainer(cnt, model); zoo[cnt].start(); } } source.reset(); DataSetIterator iterator; if (prefetchSize > 0 && source.asyncSupported()) { iterator = new AsyncDataSetIterator(source, prefetchSize); } else iterator = source; AtomicInteger locker = new AtomicInteger(0); while (iterator.hasNext()) { DataSet dataSet = iterator.next(); int pos = locker.getAndIncrement(); zoo[pos].feedDataSet(dataSet); if (pos + 1 == workers !iterator.hasNext()) { iterationsCounter.incrementAndGet(); for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt ++) { try { zoo[cnt].waitTillRunning(); } catch (Exception e) { throw new RuntimeException(e); } } if (iterationsCounter.get() % averagingFrequency == 0 && pos + 1 == workers) { double score = 0.0; if (!legacyAveraging Nd4j.getAffinityManager().getNumberOfDevices() == 1) { List<INDArray> params = new ArrayList<>(); for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) { params.add(zoo[cnt].getModel().params()); score += zoo[cnt].getModel().score(); } Nd4j.averageAndPropagate(model.params(), params); } else { INDArray params = Nd4j.zeros(model.params().shape()); int cnt = 0; for (; cnt < workers && cnt < locker.get(); cnt++) { params.addi(zoo[cnt].getModel().params()); score += zoo[cnt].getModel().score(); } params.divi(cnt); model.setParams(params); } score /= Math.min(workers, locker.get()); if (reportScore) logger.info(STR + score); if (model instanceof MultiLayerNetwork) { if (averageUpdaters) { Updater updater = ((MultiLayerNetwork) model).getUpdater(); if (updater != null && updater.getStateViewArray() != null) { if (!legacyAveraging Nd4j.getAffinityManager().getNumberOfDevices() == 1) { List<INDArray> updaters = new ArrayList<>(); for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) { updaters.add(((MultiLayerNetwork) zoo[cnt].getModel()).getUpdater().getStateViewArray()); } Nd4j.averageAndPropagate(updater.getStateViewArray(), updaters); } else { INDArray state = Nd4j.zeros(updater.getStateViewArray().shape()); int cnt = 0; for (; cnt < workers && cnt < locker.get(); cnt++) { state.addi(((MultiLayerNetwork) zoo[cnt].getModel()).getUpdater().getStateViewArray().dup()); } state.divi(cnt); updater.setStateViewArray((MultiLayerNetwork) model, state, false); } } } ((MultiLayerNetwork) model).setScore(score); } else if (model instanceof ComputationGraph) { if (averageUpdaters) { ComputationGraphUpdater updater = ((ComputationGraph) model).getUpdater(); if (updater != null && updater.getStateViewArray() != null) { if (!legacyAveraging Nd4j.getAffinityManager().getNumberOfDevices() == 1) { List<INDArray> updaters = new ArrayList<>(); for (int cnt = 0; cnt < workers && cnt < locker.get(); cnt++) { updaters.add(((ComputationGraph) zoo[cnt].getModel()).getUpdater().getStateViewArray()); } Nd4j.averageAndPropagate(updater.getStateViewArray(), updaters); } else { INDArray state = Nd4j.zeros(updater.getStateViewArray().shape()); int cnt = 0; for (; cnt < workers && cnt < locker.get(); cnt++) { state.addi(((ComputationGraph) zoo[cnt].getModel()).getUpdater().getStateViewArray()); } state.divi(cnt); updater.setStateViewArray(state); } } } ((ComputationGraph) model).setScore(score); } if (legacyAveraging && Nd4j.getAffinityManager().getNumberOfDevices() > 1) { for (int cnt = 0; cnt < workers; cnt++) { zoo[cnt].updateModel(model); } } } locker.set(0); } } logger.debug(STR, iterationsCounter.get()); iterationsCounter.set(0); } public static class Builder { private Model model; private int workers = 2; private int prefetchSize = 16; private int averagingFrequency = 1; private boolean reportScore = false; private boolean averageUpdaters = true; private boolean legacyAveraging = true; public Builder(@NonNull MultiLayerNetwork mln) { model = mln; } public Builder(@NonNull ComputationGraph graph) { model = graph; } | /**
* This method takes DataSetIterator, and starts training over it by scheduling DataSets to different executors
*
* @param source
*/ | This method takes DataSetIterator, and starts training over it by scheduling DataSets to different executors | fit | {
"repo_name": "xuzhongxing/deeplearning4j",
"path": "deeplearning4j-core/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java",
"license": "apache-2.0",
"size": 25921
} | [
"java.util.ArrayList",
"java.util.List",
"java.util.concurrent.atomic.AtomicInteger",
"org.deeplearning4j.datasets.iterator.AsyncDataSetIterator",
"org.deeplearning4j.nn.api.Model",
"org.deeplearning4j.nn.api.Updater",
"org.deeplearning4j.nn.graph.ComputationGraph",
"org.deeplearning4j.nn.multilayer.M... | import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.deeplearning4j.datasets.iterator.AsyncDataSetIterator; import org.deeplearning4j.nn.api.Model; import org.deeplearning4j.nn.api.Updater; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.updater.graph.ComputationGraphUpdater; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.api.DataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.factory.Nd4j; | import java.util.*; import java.util.concurrent.atomic.*; import org.deeplearning4j.datasets.iterator.*; import org.deeplearning4j.nn.api.*; import org.deeplearning4j.nn.graph.*; import org.deeplearning4j.nn.multilayer.*; import org.deeplearning4j.nn.updater.graph.*; import org.nd4j.linalg.api.ndarray.*; import org.nd4j.linalg.dataset.api.*; import org.nd4j.linalg.dataset.api.iterator.*; import org.nd4j.linalg.factory.*; | [
"java.util",
"org.deeplearning4j.datasets",
"org.deeplearning4j.nn",
"org.nd4j.linalg"
] | java.util; org.deeplearning4j.datasets; org.deeplearning4j.nn; org.nd4j.linalg; | 1,574,129 |
public static byte[] scryptDigest(byte[] input) {
try {
return SCrypt.scrypt(input, input, 1024, 1, 1, 32);
} catch (Exception e) {
return null;
}
} | static byte[] function(byte[] input) { try { return SCrypt.scrypt(input, input, 1024, 1, 1, 32); } catch (Exception e) { return null; } } | /**
* Calculates the Scrypt hash of the given bytes. This is
* standard procedure in Mintcoin for hashing blocks.
* The resulting hash is in big endian form.
*/ | Calculates the Scrypt hash of the given bytes. This is standard procedure in Mintcoin for hashing blocks. The resulting hash is in big endian form | scryptDigest | {
"repo_name": "keremhd/mintcoinj",
"path": "core/src/main/java/com/google/bitcoin/core/Utils.java",
"license": "apache-2.0",
"size": 23894
} | [
"com.lambdaworks.crypto.SCrypt"
] | import com.lambdaworks.crypto.SCrypt; | import com.lambdaworks.crypto.*; | [
"com.lambdaworks.crypto"
] | com.lambdaworks.crypto; | 595,046 |
PendingResult<GetCapabilityResult> getCapability(GoogleApiClient client, String capability, @NodeFilterType int nodeFilter); | PendingResult<GetCapabilityResult> getCapability(GoogleApiClient client, String capability, @NodeFilterType int nodeFilter); | /**
* Returns information about a capabilities, including the nodes that declare this capability.
* The filter parameter controls whether all nodes are returned, {@link #FILTER_ALL}, or only
* those that are currently reachable by this node, {@link #FILTER_REACHABLE}.
* <p/>
* The local node will never be returned in the set of nodes.
*/ | Returns information about a capabilities, including the nodes that declare this capability. The filter parameter controls whether all nodes are returned, <code>#FILTER_ALL</code>, or only those that are currently reachable by this node, <code>#FILTER_REACHABLE</code>. The local node will never be returned in the set of nodes | getCapability | {
"repo_name": "microg/android_packages_apps_GmsCore",
"path": "play-services-wearable/src/main/java/com/google/android/gms/wearable/CapabilityApi.java",
"license": "apache-2.0",
"size": 8380
} | [
"com.google.android.gms.common.api.GoogleApiClient",
"com.google.android.gms.common.api.PendingResult"
] | import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.PendingResult; | import com.google.android.gms.common.api.*; | [
"com.google.android"
] | com.google.android; | 2,502,339 |
public void marshal(OutputStream os) throws IOException {
// PrintStream headerStream = new PrintStream(os, false, "US-ASCII");
// this.marshalStartLine(headerStream);
this.marshalStartLine(os);
if (this.encoding != null) {
ByteArrayOutputStream byteOs = new ByteArrayOutputStream();
OutputStream encodedStream = this.encoding.getEncoder(byteOs);
encodedStream.write(this.content);
encodedStream.close();
byte[] encodedContent = byteOs.toByteArray();
this.setHeader(HttpConstants.Headers.CONTENT_LENGTH, String.valueOf(encodedContent.length));
this.marshalHeaders(os);
os.write(encodedContent);
} else {
this.marshalHeaders(os);
this.marshalContent(os);
}
os.flush();
}
| void function(OutputStream os) throws IOException { this.marshalStartLine(os); if (this.encoding != null) { ByteArrayOutputStream byteOs = new ByteArrayOutputStream(); OutputStream encodedStream = this.encoding.getEncoder(byteOs); encodedStream.write(this.content); encodedStream.close(); byte[] encodedContent = byteOs.toByteArray(); this.setHeader(HttpConstants.Headers.CONTENT_LENGTH, String.valueOf(encodedContent.length)); this.marshalHeaders(os); os.write(encodedContent); } else { this.marshalHeaders(os); this.marshalContent(os); } os.flush(); } | /**
* Writes the HTTP message represented by this instance to
* a given {@link java.io.OutputStream}.
* @param os
* The stream to write to.
* @throws IOException
* If an error occurs writing to the stream or if
* any required character set cannot be loaded.
*/ | Writes the HTTP message represented by this instance to a given <code>java.io.OutputStream</code> | marshal | {
"repo_name": "lukehutch/gribbit-rox",
"path": "attic/src/com/flat502/rox/http/HttpMessage.java",
"license": "bsd-3-clause",
"size": 11158
} | [
"java.io.ByteArrayOutputStream",
"java.io.IOException",
"java.io.OutputStream"
] | import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 257,964 |
public static List<String> getChildrenUri(PersistenceManagerService persistenceManagerService, SearchService searchService, NodeRef node, List<String> paths){
FileInfo nodeInfo = persistenceManagerService.getFileInfo(node);
if (nodeInfo.isFolder()) {
List<FileInfo> children = persistenceManagerService.list(node);
for (FileInfo child : children) {
getChildrenUri(persistenceManagerService, searchService, child.getNodeRef(), paths);
}
} else {
List<FileInfo> pathParts = null;
try {
pathParts = persistenceManagerService.getNamePath(persistenceManagerService.getCompanyHomeNodeRef(), node);
} catch (FileNotFoundException e) {
logger.error("ERROR: ", e);
}
String nodePath = "";
for (FileInfo pathPart : pathParts) {
nodePath = nodePath + "/" + pathPart.getName();
}
paths.add(nodePath);
}
return paths;
} | static List<String> function(PersistenceManagerService persistenceManagerService, SearchService searchService, NodeRef node, List<String> paths){ FileInfo nodeInfo = persistenceManagerService.getFileInfo(node); if (nodeInfo.isFolder()) { List<FileInfo> children = persistenceManagerService.list(node); for (FileInfo child : children) { getChildrenUri(persistenceManagerService, searchService, child.getNodeRef(), paths); } } else { List<FileInfo> pathParts = null; try { pathParts = persistenceManagerService.getNamePath(persistenceManagerService.getCompanyHomeNodeRef(), node); } catch (FileNotFoundException e) { logger.error(STR, e); } String nodePath = STR/" + pathPart.getName(); } paths.add(nodePath); } return paths; } | /**
* Return non-folder paths
*/ | Return non-folder paths | getChildrenUri | {
"repo_name": "hlim/studio",
"path": "alfresco-svcs/src/main/java/org/craftercms/cstudio/alfresco/dm/util/DmUtils.java",
"license": "gpl-3.0",
"size": 17113
} | [
"java.util.List",
"org.alfresco.service.cmr.model.FileInfo",
"org.alfresco.service.cmr.model.FileNotFoundException",
"org.alfresco.service.cmr.repository.NodeRef",
"org.craftercms.cstudio.alfresco.service.api.PersistenceManagerService",
"org.craftercms.cstudio.alfresco.service.api.SearchService"
] | import java.util.List; import org.alfresco.service.cmr.model.FileInfo; import org.alfresco.service.cmr.model.FileNotFoundException; import org.alfresco.service.cmr.repository.NodeRef; import org.craftercms.cstudio.alfresco.service.api.PersistenceManagerService; import org.craftercms.cstudio.alfresco.service.api.SearchService; | import java.util.*; import org.alfresco.service.cmr.model.*; import org.alfresco.service.cmr.repository.*; import org.craftercms.cstudio.alfresco.service.api.*; | [
"java.util",
"org.alfresco.service",
"org.craftercms.cstudio"
] | java.util; org.alfresco.service; org.craftercms.cstudio; | 831,283 |
protected PermissionStatus getUpgradePermission() {
return defaultPermission;
} | PermissionStatus function() { return defaultPermission; } | /**
* Return the default path permission when upgrading from releases with no
* permissions (<=0.15) to releases with permissions (>=0.16)
*/ | Return the default path permission when upgrading from releases with no permissions (=0.16) | getUpgradePermission | {
"repo_name": "andy8788/hadoop-hdfs",
"path": "src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java",
"license": "apache-2.0",
"size": 214042
} | [
"org.apache.hadoop.fs.permission.PermissionStatus"
] | import org.apache.hadoop.fs.permission.PermissionStatus; | import org.apache.hadoop.fs.permission.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,530,242 |
@Override
public Lock getDistributedLock(Object key) throws IllegalStateException {
checkReadiness();
checkForLimitedOrNoAccess();
Scope theScope = getAttributes().getScope();
Assert.assertTrue(theScope == Scope.LOCAL);
throw new IllegalStateException(
"Only supported for GLOBAL scope, not LOCAL");
} | Lock function(Object key) throws IllegalStateException { checkReadiness(); checkForLimitedOrNoAccess(); Scope theScope = getAttributes().getScope(); Assert.assertTrue(theScope == Scope.LOCAL); throw new IllegalStateException( STR); } | /**
* This implementation only checks readiness and scope
*/ | This implementation only checks readiness and scope | getDistributedLock | {
"repo_name": "PurelyApplied/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/LocalRegion.java",
"license": "apache-2.0",
"size": 391137
} | [
"java.util.concurrent.locks.Lock",
"org.apache.geode.cache.Scope",
"org.apache.geode.internal.Assert"
] | import java.util.concurrent.locks.Lock; import org.apache.geode.cache.Scope; import org.apache.geode.internal.Assert; | import java.util.concurrent.locks.*; import org.apache.geode.cache.*; import org.apache.geode.internal.*; | [
"java.util",
"org.apache.geode"
] | java.util; org.apache.geode; | 1,585,765 |
public interface DynamicNumeric extends DynamicData {
public default Number getDataNumeric()
{
return (Number)getDataObject();
} | interface DynamicNumeric extends DynamicData { public default Number function() { return (Number)getDataObject(); } | /**
* Gets the data as a Number
*
* @return the data as a number
*/ | Gets the data as a Number | getDataNumeric | {
"repo_name": "OSEHRA/ISAAC",
"path": "core/api/src/main/java/sh/isaac/api/component/semantic/version/dynamic/types/DynamicNumeric.java",
"license": "apache-2.0",
"size": 1967
} | [
"sh.isaac.api.component.semantic.version.dynamic.DynamicData"
] | import sh.isaac.api.component.semantic.version.dynamic.DynamicData; | import sh.isaac.api.component.semantic.version.dynamic.*; | [
"sh.isaac.api"
] | sh.isaac.api; | 2,069,671 |
@Override
protected void msaProcedure() {
Stopwatch procTimer = new Stopwatch();
setStatus(ProcedureStatus.INITIALIZATION);
setInitialized(false);
int poolSize = getParameter(MSAGlobalParameters.POOL_SIZE);
double initialProp = getParameter(MSAGlobalParameters.POOL_INITIAL_PROPORTION);
int sampledReqCount = getParameter(MSAGlobalParameters.SAMPLED_REQUEST_COUNT);
ScenarioGeneratorParam params = new ScenarioGeneratorParam((int) (poolSize * initialProp),
getParameter(MSAGlobalParameters.GEN_MAX_SCEN_OPT_TIME), new RequestSamplerParam(
sampledReqCount));
procTimer.start();
MSALogging.getProcedureLogger().info("Scenario pool initialization started: %s", params);
mComponentManager.generateScenarios(params);
setStatus(ProcedureStatus.INITIALIZED);
setInitialized(true);
procTimer.stop();
MSALogging.getProcedureLogger().info(
"Scenario pool initialization terminated in %ss (%s scenarios in the pool)",
procTimer.readTimeS(), mPool.size());
MSALogging.getProcedureLogger().debug("Current state: %ss", this);
mEventFactory.raiseOptimizeEvent();
setStatus(ProcedureStatus.RUNNING);
while (isRunning()) {
// TODO add periodic cleaning of the pool
MSALogging.getProcedureLogger().lowDebug("Processing of the next event");
try {
waitUntilUnpaused();
} catch (InterruptedException e) {
// The waiting has been interrupted
// - Log the error
MSALogging
.getProcedureLogger()
.exception(
"MSASequential.msaProcedure while waiting for the next event, aborting the MSA procedure",
e);
// - Terminate the MSA procedure
setStatus(ProcedureStatus.EXCEPTION);
stop();
}
// Get the next event
setCurrentEvent(null);
try {
setCurrentEvent(this.takeNextEvent());
} catch (InterruptedException e) {
MSALogging
.getProcedureLogger()
.exception(
"MSASequential.msaProcedure while retrieving the next event, aborting the MSA procedure",
e);
// - Terminate the MSA procedure
setStatus(ProcedureStatus.EXCEPTION);
stop();
}
// Abort if the the procedure is no longer running
if (!checkRunningState()) {
break;
}
// If the event is null, then this iteration should be
// aborted
if (getCurrentEvent() == null) {
break;
}
MSALogging.getProcedureLogger().lowDebug("Next event successfully retreived: %s",
getCurrentEvent());
MSALogging.getProcedureLogger().lowDebug("Event queue: %s)",
Arrays.toString(mEventQueue.getPendingEvents()));
// Get the associated event handler
IEventHandler<MSAEvent> handler = mEventHandlerManager
.getEventHandler(getCurrentEvent());
// Check if the handler exists
if (handler == null) {
// Log the error
MSALogging
.getProcedureLogger()
.error("MSA procedure was not able to handle the event : there is no hanlder associated with this event (%s)",
getCurrentEvent());
} else {
MSALogging.getProcedureLogger().lowDebug(
"Event handler successfully retreived (%s)", handler);
// Abort if the the procedure is no longer running
if (!checkRunningState()) {
break;
}
// Execute callbacks
callbacks(MSA_NEW_EVENT, getCurrentEvent(), handler);
// Abort if the the procedure is no longer running
if (!checkRunningState()) {
break;
}
// Check if the handler can handle the event
if (handler.canHandleEvent(getCurrentEvent())) {
try {
waitUntilUnpaused();
} catch (InterruptedException e) {
// The waiting has been interrupted
// - Log the error
MSALogging
.getProcedureLogger()
.exception(
"MSASequential.msaProcedure while waiting for the next event, aborting the MSA procedure",
e);
// - Terminate the MSA procedure
stop();
}
// Handle the event
// Add parameters when relevant
if (getCurrentEvent() instanceof GenerateEvent) {
((GenerateEvent) getCurrentEvent()).setParameters(getGenerateParameters());
} else if (getCurrentEvent() instanceof OptimizeEvent) {
((OptimizeEvent) getCurrentEvent())
.setParameters(getOptimizeParameters(getCurrentEvent()));
}
MSALogging.getProcedureLogger().lowDebug(
"Handling of the event %s by handler %s", getCurrentEvent(), handler);
// Execute callbacks
callbacks(MSA_EVENT_HANDLING_START, getCurrentEvent(), handler);
// Abort if the the procedure is no longer running
if (!checkRunningState()) {
break;
}
// Handle the event
try {
handler.handleEvent(getCurrentEvent());
} catch (EventHandlingException e) {
// Log the exception
MSALogging.getProcedureLogger().error(
"MSA procedure was not able to handle the event %s",
getCurrentEvent(), e);
}
} else {
// The handler cannot handle the event: log the error
MSALogging
.getProcedureLogger()
.warn("MSA procedure was not able to handle the next event: the associated handler %s cannot handle the event %s, current state: %s",
handler, getCurrentEvent(), this);
}
}
MSALogging.getProcedureLogger().lowDebug("Event handling finished");
// Abort if the the procedure is no longer running
if (!checkRunningState()) {
break;
}
// Execute the callbacks
callbacks(MSA_EVENT_HANDLING_END, getCurrentEvent(), handler);
}
setStatus(ProcedureStatus.TERMINATED);
}
| void function() { Stopwatch procTimer = new Stopwatch(); setStatus(ProcedureStatus.INITIALIZATION); setInitialized(false); int poolSize = getParameter(MSAGlobalParameters.POOL_SIZE); double initialProp = getParameter(MSAGlobalParameters.POOL_INITIAL_PROPORTION); int sampledReqCount = getParameter(MSAGlobalParameters.SAMPLED_REQUEST_COUNT); ScenarioGeneratorParam params = new ScenarioGeneratorParam((int) (poolSize * initialProp), getParameter(MSAGlobalParameters.GEN_MAX_SCEN_OPT_TIME), new RequestSamplerParam( sampledReqCount)); procTimer.start(); MSALogging.getProcedureLogger().info(STR, params); mComponentManager.generateScenarios(params); setStatus(ProcedureStatus.INITIALIZED); setInitialized(true); procTimer.stop(); MSALogging.getProcedureLogger().info( STR, procTimer.readTimeS(), mPool.size()); MSALogging.getProcedureLogger().debug(STR, this); mEventFactory.raiseOptimizeEvent(); setStatus(ProcedureStatus.RUNNING); while (isRunning()) { MSALogging.getProcedureLogger().lowDebug(STR); try { waitUntilUnpaused(); } catch (InterruptedException e) { MSALogging .getProcedureLogger() .exception( STR, e); setStatus(ProcedureStatus.EXCEPTION); stop(); } setCurrentEvent(null); try { setCurrentEvent(this.takeNextEvent()); } catch (InterruptedException e) { MSALogging .getProcedureLogger() .exception( STR, e); setStatus(ProcedureStatus.EXCEPTION); stop(); } if (!checkRunningState()) { break; } if (getCurrentEvent() == null) { break; } MSALogging.getProcedureLogger().lowDebug(STR, getCurrentEvent()); MSALogging.getProcedureLogger().lowDebug(STR, Arrays.toString(mEventQueue.getPendingEvents())); IEventHandler<MSAEvent> handler = mEventHandlerManager .getEventHandler(getCurrentEvent()); if (handler == null) { MSALogging .getProcedureLogger() .error(STR, getCurrentEvent()); } else { MSALogging.getProcedureLogger().lowDebug( STR, handler); if (!checkRunningState()) { break; } callbacks(MSA_NEW_EVENT, getCurrentEvent(), handler); if (!checkRunningState()) { break; } if (handler.canHandleEvent(getCurrentEvent())) { try { waitUntilUnpaused(); } catch (InterruptedException e) { MSALogging .getProcedureLogger() .exception( STR, e); stop(); } if (getCurrentEvent() instanceof GenerateEvent) { ((GenerateEvent) getCurrentEvent()).setParameters(getGenerateParameters()); } else if (getCurrentEvent() instanceof OptimizeEvent) { ((OptimizeEvent) getCurrentEvent()) .setParameters(getOptimizeParameters(getCurrentEvent())); } MSALogging.getProcedureLogger().lowDebug( STR, getCurrentEvent(), handler); callbacks(MSA_EVENT_HANDLING_START, getCurrentEvent(), handler); if (!checkRunningState()) { break; } try { handler.handleEvent(getCurrentEvent()); } catch (EventHandlingException e) { MSALogging.getProcedureLogger().error( STR, getCurrentEvent(), e); } } else { MSALogging .getProcedureLogger() .warn(STR, handler, getCurrentEvent(), this); } } MSALogging.getProcedureLogger().lowDebug(STR); if (!checkRunningState()) { break; } callbacks(MSA_EVENT_HANDLING_END, getCurrentEvent(), handler); } setStatus(ProcedureStatus.TERMINATED); } | /**
* The main method for the MSA procedure
*/ | The main method for the MSA procedure | msaProcedure | {
"repo_name": "vpillac/vroom",
"path": "jMSA/src/vroom/optimization/online/jmsa/MSASequential.java",
"license": "gpl-3.0",
"size": 12228
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 2,173,807 |
public static Path getMobFamilyPath(Path regionPath, String familyName) {
return new Path(regionPath, familyName);
} | static Path function(Path regionPath, String familyName) { return new Path(regionPath, familyName); } | /**
* Gets the family dir of the mob files.
* It's {HBASE_DIR}/mobdir/{namespace}/{tableName}/{regionEncodedName}/{columnFamilyName}.
* @param regionPath The path of mob region which is a dummy one.
* @param familyName The current family name.
* @return The family dir of the mob files.
*/ | Gets the family dir of the mob files. It's {HBASE_DIR}/mobdir/{namespace}/{tableName}/{regionEncodedName}/{columnFamilyName} | getMobFamilyPath | {
"repo_name": "gustavoanatoly/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java",
"license": "apache-2.0",
"size": 39255
} | [
"org.apache.hadoop.fs.Path"
] | import org.apache.hadoop.fs.Path; | import org.apache.hadoop.fs.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,947,813 |
public final StyleableProperty<Font> createStyleableFontProperty(
S styleable,
String propertyName,
String cssProperty,
Function<S, StyleableProperty<Font>> function,
Font initialValue) {
return createStyleableFontProperty(styleable, propertyName, cssProperty, function, initialValue, true);
}
/**
* Create a StyleableProperty<Font>. The initial value defaults to {@link javafx.scene.text.Font#getDefault()} | final StyleableProperty<Font> function( S styleable, String propertyName, String cssProperty, Function<S, StyleableProperty<Font>> function, Font initialValue) { return createStyleableFontProperty(styleable, propertyName, cssProperty, function, initialValue, true); } /** * Create a StyleableProperty<Font>. The initial value defaults to {@link javafx.scene.text.Font#getDefault()} | /**
* Create a StyleableProperty<Font> with initial value. The inherit flag defaults to true.
* @param styleable The <code>this</code> reference of the returned property. This is also the property bean.
* @param propertyName The field name of the StyleableProperty<Font>
* @param cssProperty The CSS property name
* @param function A function that returns the StyleableProperty<Font> that was created by this method call.
* @param initialValue The initial value of the property. CSS may reset the property to this value.
*/ | Create a StyleableProperty<Font> with initial value. The inherit flag defaults to true | createStyleableFontProperty | {
"repo_name": "teamfx/openjfx-8u-dev-rt",
"path": "modules/graphics/src/main/java/javafx/css/StyleablePropertyFactory.java",
"license": "gpl-2.0",
"size": 105926
} | [
"java.util.function.Function"
] | import java.util.function.Function; | import java.util.function.*; | [
"java.util"
] | java.util; | 1,702,272 |
if (minLength == null) {
return new BigInteger("0");
} else {
return minLength;
}
}
/**
* Sets the value of the minLength property.
*
* @param value
* allowed object is
* {@link BigInteger } | if (minLength == null) { return new BigInteger("0"); } else { return minLength; } } /** * Sets the value of the minLength property. * * @param value * allowed object is * {@link BigInteger } | /**
* Gets the value of the minLength property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/ | Gets the value of the minLength property | getMinLength | {
"repo_name": "rob3rtb/atdl4j",
"path": "src/main/java/org/atdl4j/fixatdl/core/StringT.java",
"license": "mit",
"size": 3251
} | [
"java.math.BigInteger"
] | import java.math.BigInteger; | import java.math.*; | [
"java.math"
] | java.math; | 235,646 |
public Object loadInstrument(SecurityContext ctx, long instrumentID)
throws DSOutOfServiceException, DSAccessException;
| Object function(SecurityContext ctx, long instrumentID) throws DSOutOfServiceException, DSAccessException; | /**
* Loads the instrument.
*
* @param ctx The security context.
* @param instrumentID The id of the instrument.
* @return See above.
* @throws DSOutOfServiceException If the connection is broken, or logged
* in.
* @throws DSAccessException If an error occurred while trying to
* retrieve data from OMEDS service.
*/ | Loads the instrument | loadInstrument | {
"repo_name": "jballanc/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/env/data/OmeroMetadataService.java",
"license": "gpl-2.0",
"size": 30267
} | [
"org.openmicroscopy.shoola.env.data.util.SecurityContext"
] | import org.openmicroscopy.shoola.env.data.util.SecurityContext; | import org.openmicroscopy.shoola.env.data.util.*; | [
"org.openmicroscopy.shoola"
] | org.openmicroscopy.shoola; | 1,572,824 |
@Test
public final void testReadDataCompleteReadIOException() throws IOException {
// Setup the resources for the test.
TestAbstractSerialPort port = Mockito.spy(new TestAbstractSerialPort("COM1", 9600));
byte[] data = new byte[20];
int offset = 0;
int length = data.length;
Mockito.doThrow(new IOException("IO exception: Read")).when(mockInputStream).read(data, offset, length);
exception.expect(IOException.class);
exception.expectMessage(is(equalTo("IO exception: Read")));
// Call the method under test.
port.readData(data, offset, length);
}
| final void function() throws IOException { TestAbstractSerialPort port = Mockito.spy(new TestAbstractSerialPort("COM1", 9600)); byte[] data = new byte[20]; int offset = 0; int length = data.length; Mockito.doThrow(new IOException(STR)).when(mockInputStream).read(data, offset, length); exception.expect(IOException.class); exception.expectMessage(is(equalTo(STR))); port.readData(data, offset, length); } | /**
* Test method for {@link com.digi.xbee.api.connection.serial.AbstractSerialPort#readData(byte[], int, int)}.
*
* @throws IOException
*/ | Test method for <code>com.digi.xbee.api.connection.serial.AbstractSerialPort#readData(byte[], int, int)</code> | testReadDataCompleteReadIOException | {
"repo_name": "digidotcom/XBeeJavaLibrary",
"path": "library/src/test/java/com/digi/xbee/api/connection/serial/AbstractSerialPortTest.java",
"license": "mpl-2.0",
"size": 65354
} | [
"java.io.IOException",
"org.mockito.Mockito"
] | import java.io.IOException; import org.mockito.Mockito; | import java.io.*; import org.mockito.*; | [
"java.io",
"org.mockito"
] | java.io; org.mockito; | 489,705 |
public float readCumulative() {
SharedPreferences sharedPref = getPreferences(Context.MODE_PRIVATE);
return sharedPref.getFloat(Constants.CUMULATIVE_PREFS_KEY, 0);
} | float function() { SharedPreferences sharedPref = getPreferences(Context.MODE_PRIVATE); return sharedPref.getFloat(Constants.CUMULATIVE_PREFS_KEY, 0); } | /**
* Read cumulative value from shared preferences
*
* @return float of cumulative value from stored shared prefs
*/ | Read cumulative value from shared preferences | readCumulative | {
"repo_name": "szeidner/movement-gauge",
"path": "app/src/main/java/com/stevezeidner/movementgauge/MainActivity.java",
"license": "apache-2.0",
"size": 10214
} | [
"android.content.Context",
"android.content.SharedPreferences",
"com.stevezeidner.movementgauge.core.Constants"
] | import android.content.Context; import android.content.SharedPreferences; import com.stevezeidner.movementgauge.core.Constants; | import android.content.*; import com.stevezeidner.movementgauge.core.*; | [
"android.content",
"com.stevezeidner.movementgauge"
] | android.content; com.stevezeidner.movementgauge; | 2,223,821 |
public OperationInfo setCreatedDateTime(OffsetDateTime createdDateTime) {
this.createdDateTime = createdDateTime;
return this;
} | OperationInfo function(OffsetDateTime createdDateTime) { this.createdDateTime = createdDateTime; return this; } | /**
* Set the createdDateTime property: Date and time (UTC) when the operation was created.
*
* @param createdDateTime the createdDateTime value to set.
* @return the OperationInfo object itself.
*/ | Set the createdDateTime property: Date and time (UTC) when the operation was created | setCreatedDateTime | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/formrecognizer/azure-ai-formrecognizer/src/main/java/com/azure/ai/formrecognizer/implementation/models/OperationInfo.java",
"license": "mit",
"size": 6787
} | [
"java.time.OffsetDateTime"
] | import java.time.OffsetDateTime; | import java.time.*; | [
"java.time"
] | java.time; | 61,684 |
public void setStatus(NarrativeStatusEnum theValue) {
getStatus().setValueAsEnum(theValue);
} | void function(NarrativeStatusEnum theValue) { getStatus().setValueAsEnum(theValue); } | /**
* Sets the value(s) for <b>status</b> (generated | extensions | additional)
*
* <p>
* <b>Definition:</b>
* The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data
* </p>
*/ | Sets the value(s) for status (generated | extensions | additional) Definition: The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data | setStatus | {
"repo_name": "eug48/hapi-fhir",
"path": "hapi-fhir-structures-dstu/src/main/java/ca/uhn/fhir/model/dstu/composite/NarrativeDt.java",
"license": "apache-2.0",
"size": 4485
} | [
"ca.uhn.fhir.model.dstu.valueset.NarrativeStatusEnum"
] | import ca.uhn.fhir.model.dstu.valueset.NarrativeStatusEnum; | import ca.uhn.fhir.model.dstu.valueset.*; | [
"ca.uhn.fhir"
] | ca.uhn.fhir; | 660,370 |
public List<WSDLElement> getElementsOfLocalSimpleType(LocalSimpleType localSimpleType, String targetNamespace) {
List<WSDLElement> wsdlElements = new ArrayList<>();
if (localSimpleType.getRestriction() != null) {
org.ow2.easywsdl.schema.org.w3._2001.xmlschema.Restriction restrictionType = localSimpleType
.getRestriction();
WSDLElement wsdlElm = new WSDLElement(Constants.RESTRICTION_BASE_TYPE, getTypeWithNamespace
(targetNamespace, restrictionType.getBase()));
wsdlElements.add(wsdlElm);
} else if (localSimpleType.getList() != null) {
org.ow2.easywsdl.schema.org.w3._2001.xmlschema.List list = localSimpleType.getList();
if (list.getItemType() != null) {
WSDLElement wsdlElm = new WSDLElement("list_type", getTypeWithNamespace(targetNamespace, list
.getItemType()));
wsdlElements.add(wsdlElm);
} else if (list.getSimpleType() != null) {
wsdlElements = getElementsOfLocalSimpleType(list.getSimpleType(), targetNamespace);
}
} else if (localSimpleType.getUnion() != null) {
Union union = localSimpleType.getUnion();
List<QName> simpleTypeNames = union.getMemberTypes();
for (QName simpleTypeName : simpleTypeNames) {
WSDLElement wsdlElement = new WSDLElement("union_param", getTypeWithNamespace(targetNamespace,
simpleTypeName));
wsdlElements.add(wsdlElement);
}
}
return wsdlElements;
} | List<WSDLElement> function(LocalSimpleType localSimpleType, String targetNamespace) { List<WSDLElement> wsdlElements = new ArrayList<>(); if (localSimpleType.getRestriction() != null) { org.ow2.easywsdl.schema.org.w3._2001.xmlschema.Restriction restrictionType = localSimpleType .getRestriction(); WSDLElement wsdlElm = new WSDLElement(Constants.RESTRICTION_BASE_TYPE, getTypeWithNamespace (targetNamespace, restrictionType.getBase())); wsdlElements.add(wsdlElm); } else if (localSimpleType.getList() != null) { org.ow2.easywsdl.schema.org.w3._2001.xmlschema.List list = localSimpleType.getList(); if (list.getItemType() != null) { WSDLElement wsdlElm = new WSDLElement(STR, getTypeWithNamespace(targetNamespace, list .getItemType())); wsdlElements.add(wsdlElm); } else if (list.getSimpleType() != null) { wsdlElements = getElementsOfLocalSimpleType(list.getSimpleType(), targetNamespace); } } else if (localSimpleType.getUnion() != null) { Union union = localSimpleType.getUnion(); List<QName> simpleTypeNames = union.getMemberTypes(); for (QName simpleTypeName : simpleTypeNames) { WSDLElement wsdlElement = new WSDLElement(STR, getTypeWithNamespace(targetNamespace, simpleTypeName)); wsdlElements.add(wsdlElement); } } return wsdlElements; } | /**
* This method returns a list of sub elements when a local simple type is given
*
* @param localSimpleType LocalSimpleType element
* @param targetNamespace TargetNamespace of the element
* @return A list of sub elements
*/ | This method returns a list of sub elements when a local simple type is given | getElementsOfLocalSimpleType | {
"repo_name": "wso2/carbon-governance-extensions",
"path": "components/governance-extensions/org.wso2.carbon.governance.soap.viewer/src/main/java/org/wso2/carbon/governance/soap/viewer/WSDLVisualizer.java",
"license": "apache-2.0",
"size": 66401
} | [
"java.util.ArrayList",
"java.util.List",
"javax.xml.namespace.QName",
"org.ow2.easywsdl.schema.api.Restriction",
"org.ow2.easywsdl.schema.org.w3._2001.xmlschema.LocalSimpleType",
"org.ow2.easywsdl.schema.org.w3._2001.xmlschema.Union"
] | import java.util.ArrayList; import java.util.List; import javax.xml.namespace.QName; import org.ow2.easywsdl.schema.api.Restriction; import org.ow2.easywsdl.schema.org.w3._2001.xmlschema.LocalSimpleType; import org.ow2.easywsdl.schema.org.w3._2001.xmlschema.Union; | import java.util.*; import javax.xml.namespace.*; import org.ow2.easywsdl.schema.api.*; import org.ow2.easywsdl.schema.org.w3.*; | [
"java.util",
"javax.xml",
"org.ow2.easywsdl"
] | java.util; javax.xml; org.ow2.easywsdl; | 871,086 |
@Override
public boolean start() throws ContainerException {
ContainerConfig.Configuration cfg = ContainerConfig.getConfiguration(name, configFile);
String dispatcherName = ContainerConfig.getPropertyValue(cfg, "dispatcher-name", "JavaMailDispatcher");
String delegatorName = ContainerConfig.getPropertyValue(cfg, "delegator-name", "default");
this.deleteMail = "true".equals(ContainerConfig.getPropertyValue(cfg, "delete-mail", "false"));
this.delegator = DelegatorFactory.getDelegator(delegatorName);
this.dispatcher = ServiceContainer.getLocalDispatcher(dispatcherName, delegator);
this.timerDelay = ContainerConfig.getPropertyValue(cfg, "poll-delay", 300000);
this.maxSize = ContainerConfig.getPropertyValue(cfg, "maxSize", 1000000); // maximum size in bytes
// load the userLogin object
String runAsUser = ContainerConfig.getPropertyValue(cfg, "run-as-user", "system");
try {
this.userLogin = EntityQuery.use(delegator).from("UserLogin").where("userLoginId", runAsUser).queryOne();
} catch (GenericEntityException e) {
Debug.logError(e, "Unable to load run-as-user UserLogin; cannot start container", module);
return false;
}
// load the MCA configuration
ServiceMcaUtil.readConfig();
// load the listeners
List<ContainerConfig.Configuration.Property> configs = cfg.getPropertiesWithValue("store-listener");
for (ContainerConfig.Configuration.Property prop: configs) {
Session session = this.makeSession(prop);
Store store = this.getStore(session);
if (store != null) {
stores.put(store, session);
store.addStoreListener(new LoggingStoreListener());
}
}
// start the polling timer
if (stores != null) {
pollTimer.scheduleAtFixedRate(new PollerTask(dispatcher, userLogin), timerDelay, timerDelay, TimeUnit.MILLISECONDS);
} else {
Debug.logWarning("No JavaMail Store(s) configured; poller disabled.", module);
}
return true;
} | boolean function() throws ContainerException { ContainerConfig.Configuration cfg = ContainerConfig.getConfiguration(name, configFile); String dispatcherName = ContainerConfig.getPropertyValue(cfg, STR, STR); String delegatorName = ContainerConfig.getPropertyValue(cfg, STR, STR); this.deleteMail = "true".equals(ContainerConfig.getPropertyValue(cfg, STR, "false")); this.delegator = DelegatorFactory.getDelegator(delegatorName); this.dispatcher = ServiceContainer.getLocalDispatcher(dispatcherName, delegator); this.timerDelay = ContainerConfig.getPropertyValue(cfg, STR, 300000); this.maxSize = ContainerConfig.getPropertyValue(cfg, STR, 1000000); String runAsUser = ContainerConfig.getPropertyValue(cfg, STR, STR); try { this.userLogin = EntityQuery.use(delegator).from(STR).where(STR, runAsUser).queryOne(); } catch (GenericEntityException e) { Debug.logError(e, STR, module); return false; } ServiceMcaUtil.readConfig(); List<ContainerConfig.Configuration.Property> configs = cfg.getPropertiesWithValue(STR); for (ContainerConfig.Configuration.Property prop: configs) { Session session = this.makeSession(prop); Store store = this.getStore(session); if (store != null) { stores.put(store, session); store.addStoreListener(new LoggingStoreListener()); } } if (stores != null) { pollTimer.scheduleAtFixedRate(new PollerTask(dispatcher, userLogin), timerDelay, timerDelay, TimeUnit.MILLISECONDS); } else { Debug.logWarning(STR, module); } return true; } | /**
* Start the container
*
* @return true if server started
* @throws org.apache.ofbiz.base.container.ContainerException
*
*/ | Start the container | start | {
"repo_name": "rohankarthik/Ofbiz",
"path": "framework/service/src/main/java/org/apache/ofbiz/service/mail/JavaMailContainer.java",
"license": "apache-2.0",
"size": 15663
} | [
"java.util.List",
"java.util.concurrent.TimeUnit",
"javax.mail.Session",
"javax.mail.Store",
"org.apache.ofbiz.base.container.ContainerConfig",
"org.apache.ofbiz.base.container.ContainerException",
"org.apache.ofbiz.base.util.Debug",
"org.apache.ofbiz.entity.DelegatorFactory",
"org.apache.ofbiz.enti... | import java.util.List; import java.util.concurrent.TimeUnit; import javax.mail.Session; import javax.mail.Store; import org.apache.ofbiz.base.container.ContainerConfig; import org.apache.ofbiz.base.container.ContainerException; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.entity.DelegatorFactory; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.service.ServiceContainer; | import java.util.*; import java.util.concurrent.*; import javax.mail.*; import org.apache.ofbiz.base.container.*; import org.apache.ofbiz.base.util.*; import org.apache.ofbiz.entity.*; import org.apache.ofbiz.entity.util.*; import org.apache.ofbiz.service.*; | [
"java.util",
"javax.mail",
"org.apache.ofbiz"
] | java.util; javax.mail; org.apache.ofbiz; | 415,010 |
public ActionForward performReportDump(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
ReportRunnerForm reportRunnerForm = (ReportRunnerForm) form;
String principalName = GlobalVariables.getUserSession().getPerson().getPrincipalId();
int selectIndex = this.getSelectedLine(request);
String reportModeName = reportRunnerForm.getBudgetConstructionDocumentReportModes().get(selectIndex).getReportModeName();
Collection reportSet = new ArrayList();
String jasperFileName = null;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
switch (selectIndex) {
case 0: {
jasperFileName = "BudgetAccountObjectDetail";
SpringContext.getBean(BudgetConstructionDocumentAccountObjectDetailReportService.class).updateDocumentAccountObjectDetailReportTable(principalName, reportRunnerForm.getDocumentNumber(), reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber());
reportSet = SpringContext.getBean(BudgetConstructionDocumentAccountObjectDetailReportService.class).buildReports(principalName);
break;
}
case 1: {
jasperFileName = "BudgetAccountSalaryDetail";
reportSet = SpringContext.getBean(BudgetConstructionAccountSalaryDetailReportService.class).buildReports(reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber());
break;
}
case 2: {
jasperFileName = "BudgetAccountMonthlyDetail";
reportSet = SpringContext.getBean(BudgetConstructionAccountMonthlyDetailReportService.class).buildReports(reportRunnerForm.getDocumentNumber(), reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber());
break;
}
case 3: {
return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.ACCOUNT_EXPORT.reportModeName), true);
}
case 4: {
return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.FUNDING_EXPORT.reportModeName), true);
}
case 5: {
return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.MONTHLY_EXPORT.reportModeName), true);
}
}
if (reportSet.isEmpty()) {
List<String> messageList = new ArrayList<String>();
messageList.add(BCConstants.Report.MSG_REPORT_NO_DATA);
SpringContext.getBean(BudgetConstructionReportsServiceHelper.class).generatePdf(messageList, baos);
WebUtils.saveMimeOutputStreamAsFile(response, ReportGeneration.PDF_MIME_TYPE, baos, jasperFileName + ReportGeneration.PDF_FILE_EXTENSION);
}
else {
ResourceBundle resourceBundle = ResourceBundle.getBundle(BCConstants.Report.REPORT_MESSAGES_CLASSPATH, Locale.getDefault());
Map<String, Object> reportData = new HashMap<String, Object>();
reportData.put(JRParameter.REPORT_RESOURCE_BUNDLE, resourceBundle);
SpringContext.getBean(ReportGenerationService.class).generateReportToOutputStream(reportData, reportSet, BCConstants.Report.REPORT_TEMPLATE_CLASSPATH + jasperFileName, baos);
WebUtils.saveMimeOutputStreamAsFile(response, ReportGeneration.PDF_MIME_TYPE, baos, jasperFileName + ReportGeneration.PDF_FILE_EXTENSION);
}
return null;
} | ActionForward function(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ReportRunnerForm reportRunnerForm = (ReportRunnerForm) form; String principalName = GlobalVariables.getUserSession().getPerson().getPrincipalId(); int selectIndex = this.getSelectedLine(request); String reportModeName = reportRunnerForm.getBudgetConstructionDocumentReportModes().get(selectIndex).getReportModeName(); Collection reportSet = new ArrayList(); String jasperFileName = null; ByteArrayOutputStream baos = new ByteArrayOutputStream(); switch (selectIndex) { case 0: { jasperFileName = STR; SpringContext.getBean(BudgetConstructionDocumentAccountObjectDetailReportService.class).updateDocumentAccountObjectDetailReportTable(principalName, reportRunnerForm.getDocumentNumber(), reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber()); reportSet = SpringContext.getBean(BudgetConstructionDocumentAccountObjectDetailReportService.class).buildReports(principalName); break; } case 1: { jasperFileName = STR; reportSet = SpringContext.getBean(BudgetConstructionAccountSalaryDetailReportService.class).buildReports(reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber()); break; } case 2: { jasperFileName = STR; reportSet = SpringContext.getBean(BudgetConstructionAccountMonthlyDetailReportService.class).buildReports(reportRunnerForm.getDocumentNumber(), reportRunnerForm.getUniversityFiscalYear(), reportRunnerForm.getChartOfAccountsCode(), reportRunnerForm.getAccountNumber(), reportRunnerForm.getSubAccountNumber()); break; } case 3: { return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.ACCOUNT_EXPORT.reportModeName), true); } case 4: { return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.FUNDING_EXPORT.reportModeName), true); } case 5: { return new ActionForward(buildReportExportForwardURL(reportRunnerForm, mapping, BudgetConstructionReportMode.MONTHLY_EXPORT.reportModeName), true); } } if (reportSet.isEmpty()) { List<String> messageList = new ArrayList<String>(); messageList.add(BCConstants.Report.MSG_REPORT_NO_DATA); SpringContext.getBean(BudgetConstructionReportsServiceHelper.class).generatePdf(messageList, baos); WebUtils.saveMimeOutputStreamAsFile(response, ReportGeneration.PDF_MIME_TYPE, baos, jasperFileName + ReportGeneration.PDF_FILE_EXTENSION); } else { ResourceBundle resourceBundle = ResourceBundle.getBundle(BCConstants.Report.REPORT_MESSAGES_CLASSPATH, Locale.getDefault()); Map<String, Object> reportData = new HashMap<String, Object>(); reportData.put(JRParameter.REPORT_RESOURCE_BUNDLE, resourceBundle); SpringContext.getBean(ReportGenerationService.class).generateReportToOutputStream(reportData, reportSet, BCConstants.Report.REPORT_TEMPLATE_CLASSPATH + jasperFileName, baos); WebUtils.saveMimeOutputStreamAsFile(response, ReportGeneration.PDF_MIME_TYPE, baos, jasperFileName + ReportGeneration.PDF_FILE_EXTENSION); } return null; } | /**
* Runs the reports or dump selected by the user using the BudgetConstructionDocumentReportMode to help determine the various
* objects needed to actually build the report data and render the report.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/ | Runs the reports or dump selected by the user using the BudgetConstructionDocumentReportMode to help determine the various objects needed to actually build the report data and render the report | performReportDump | {
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/module/bc/document/web/struts/ReportRunnerAction.java",
"license": "apache-2.0",
"size": 7885
} | [
"java.io.ByteArrayOutputStream",
"java.util.ArrayList",
"java.util.Collection",
"java.util.HashMap",
"java.util.List",
"java.util.Locale",
"java.util.Map",
"java.util.ResourceBundle",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"net.sf.jasperreports.engine.... | import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import net.sf.jasperreports.engine.JRParameter; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.kuali.kfs.module.bc.BCConstants; import org.kuali.kfs.module.bc.BudgetConstructionReportMode; import org.kuali.kfs.module.bc.document.service.BudgetConstructionAccountMonthlyDetailReportService; import org.kuali.kfs.module.bc.document.service.BudgetConstructionAccountSalaryDetailReportService; import org.kuali.kfs.module.bc.document.service.BudgetConstructionDocumentAccountObjectDetailReportService; import org.kuali.kfs.module.bc.document.service.BudgetConstructionReportsServiceHelper; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.sys.service.ReportGenerationService; import org.kuali.rice.kns.util.WebUtils; import org.kuali.rice.krad.util.GlobalVariables; | import java.io.*; import java.util.*; import javax.servlet.http.*; import net.sf.jasperreports.engine.*; import org.apache.struts.action.*; import org.kuali.kfs.module.bc.*; import org.kuali.kfs.module.bc.document.service.*; import org.kuali.kfs.sys.*; import org.kuali.kfs.sys.context.*; import org.kuali.kfs.sys.service.*; import org.kuali.rice.kns.util.*; import org.kuali.rice.krad.util.*; | [
"java.io",
"java.util",
"javax.servlet",
"net.sf.jasperreports",
"org.apache.struts",
"org.kuali.kfs",
"org.kuali.rice"
] | java.io; java.util; javax.servlet; net.sf.jasperreports; org.apache.struts; org.kuali.kfs; org.kuali.rice; | 1,976,283 |
void filter (List<RangeParam> rangeParams)
{
List<Point> points = this.getPointsFromData ();
List<Point> newPoints = new ArrayList<Point>(points.size ());
for (Point point : points)
{
if (point.isSetSpectralAxis () &&
point.getSpectralAxis ().isSetValue () &&
point.getSpectralAxis ().getValue ().isSetValue ())
{
Double value = (Double)point.getSpectralAxis ().getValue ().getCastValue ();
String unit = point.getSpectralAxis ().getValue ().getUnit ();
for (RangeParam rangeParam : rangeParams)
{
// verify the units are the same
if ((point.getSpectralAxis ().getValue ().isSetUnit () &&
rangeParam.isSetUnit () &&
!unit.equals (rangeParam.getUnit ())) ||
(point.getSpectralAxis ().getValue ().isSetUnit () ^
rangeParam.isSetUnit ()))
continue;
if (value <= rangeParam.getMax ())
{
if (value >= rangeParam.getMin ())
newPoints.add(point);
else
break; // point not included
}
}
}
}
if (this.isSetData ())
{
ArrayOfPoint pointData = (ArrayOfPoint)this.data;
pointData.setPoint (newPoints);
}
} | void filter (List<RangeParam> rangeParams) { List<Point> points = this.getPointsFromData (); List<Point> newPoints = new ArrayList<Point>(points.size ()); for (Point point : points) { if (point.isSetSpectralAxis () && point.getSpectralAxis ().isSetValue () && point.getSpectralAxis ().getValue ().isSetValue ()) { Double value = (Double)point.getSpectralAxis ().getValue ().getCastValue (); String unit = point.getSpectralAxis ().getValue ().getUnit (); for (RangeParam rangeParam : rangeParams) { if ((point.getSpectralAxis ().getValue ().isSetUnit () && rangeParam.isSetUnit () && !unit.equals (rangeParam.getUnit ())) (point.getSpectralAxis ().getValue ().isSetUnit () ^ rangeParam.isSetUnit ())) continue; if (value <= rangeParam.getMax ()) { if (value >= rangeParam.getMin ()) newPoints.add(point); else break; } } } } if (this.isSetData ()) { ArrayOfPoint pointData = (ArrayOfPoint)this.data; pointData.setPoint (newPoints); } } | /**
* Filter the segment based on the spectral axis value and the
* specificed list of sorted ranges . Spectral values which fall
* outside of the specified ranges will be removed. The input
* range list is expected to be sorted.
*
*/ | Filter the segment based on the spectral axis value and the specificed list of sorted ranges . Spectral values which fall outside of the specified ranges will be removed. The input range list is expected to be sorted | filter | {
"repo_name": "ChandraCXC/sedlib",
"path": "src/main/java/cfa/vo/sedlib/Segment.java",
"license": "apache-2.0",
"size": 65462
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,033,469 |
KeyHolder executeAndReturnKeyHolder(SqlParameterSource parameterSource); | KeyHolder executeAndReturnKeyHolder(SqlParameterSource parameterSource); | /**
* Execute the insert using the values passed in and return the generated keys.
* <p>This requires that the name of the columns with auto generated keys have been specified.
* This method will always return a KeyHolder but the caller must verify that it actually
* contains the generated keys.
* @param parameterSource the SqlParameterSource containing values to use for insert
* @return the KeyHolder containing all generated keys
*/ | Execute the insert using the values passed in and return the generated keys. This requires that the name of the columns with auto generated keys have been specified. This method will always return a KeyHolder but the caller must verify that it actually contains the generated keys | executeAndReturnKeyHolder | {
"repo_name": "spring-projects/spring-framework",
"path": "spring-jdbc/src/main/java/org/springframework/jdbc/core/simple/SimpleJdbcInsertOperations.java",
"license": "apache-2.0",
"size": 6010
} | [
"org.springframework.jdbc.core.namedparam.SqlParameterSource",
"org.springframework.jdbc.support.KeyHolder"
] | import org.springframework.jdbc.core.namedparam.SqlParameterSource; import org.springframework.jdbc.support.KeyHolder; | import org.springframework.jdbc.core.namedparam.*; import org.springframework.jdbc.support.*; | [
"org.springframework.jdbc"
] | org.springframework.jdbc; | 1,668,669 |
String validatePreferredEmailChange(PerunSession sess, User user, String token) throws PrivilegeException, UserNotExistsException, WrongAttributeAssignmentException, AttributeNotExistsException, WrongReferenceAttributeValueException, WrongAttributeValueException; | String validatePreferredEmailChange(PerunSession sess, User user, String token) throws PrivilegeException, UserNotExistsException, WrongAttributeAssignmentException, AttributeNotExistsException, WrongReferenceAttributeValueException, WrongAttributeValueException; | /**
* Validate change of user's preferred email address.
* New email address is set as value of
* urn:perun:user:attribute-def:def:preferredMail attribute.
*
* @param sess PerunSession
* @param user User to validate email address for
* @param token token for the email change request to validate
*
* @throws InternalErrorException
* @throws PrivilegeException
* @throws UserNotExistsException
* @throws WrongAttributeAssignmentException
* @throws AttributeNotExistsException
* @throws WrongReferenceAttributeValueException
* @throws WrongAttributeValueException
*
* @return String return new preferred email
*/ | Validate change of user's preferred email address. New email address is set as value of urn:perun:user:attribute-def:def:preferredMail attribute | validatePreferredEmailChange | {
"repo_name": "mvocu/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/api/UsersManager.java",
"license": "bsd-2-clause",
"size": 59026
} | [
"cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException",
"cz.metacentrum.perun.core.api.exceptions.PrivilegeException",
"cz.metacentrum.perun.core.api.exceptions.UserNotExistsException",
"cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException",
"cz.metacentrum.perun.core.a... | import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.PrivilegeException; import cz.metacentrum.perun.core.api.exceptions.UserNotExistsException; import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException; import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException; import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException; | import cz.metacentrum.perun.core.api.exceptions.*; | [
"cz.metacentrum.perun"
] | cz.metacentrum.perun; | 415,227 |
public String getAvailableServiceOperationsToExport(JavaType serviceClass); | String function(JavaType serviceClass); | /**
* <p>
* List methods available to export as web service operations.
* </p>
* <p>
* <code>serviceClass</code> must be annotated with @GvNIXWebService.
* </p>
* <p>
* Retrieves method names which aren't annotated with @GvNIXWebMethod.
* </p>
*
* @param serviceClass class to search available methods to export as web
* service operations.
*/ | List methods available to export as web service operations. <code>serviceClass</code> must be annotated with @GvNIXWebService. Retrieves method names which aren't annotated with @GvNIXWebMethod. | getAvailableServiceOperationsToExport | {
"repo_name": "osroca/gvnix",
"path": "addon-service/addon/src/main/java/org/gvnix/service/roo/addon/addon/ws/export/WSExportOperations.java",
"license": "gpl-3.0",
"size": 4450
} | [
"org.springframework.roo.model.JavaType"
] | import org.springframework.roo.model.JavaType; | import org.springframework.roo.model.*; | [
"org.springframework.roo"
] | org.springframework.roo; | 1,189,844 |
public static final void startCreditScreen(Context context, int wavRefId,
int txtRefId, int finalImageRef, String finalScoreText) {
// create the new title screen intent
Intent creditsIntent = new Intent(context, CreditsScreen.class);
creditsIntent.addFlags(Intent.FLAG_ACTIVITY_NO_HISTORY);
creditsIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (wavRefId != -1) {
creditsIntent
.putExtra(CreditsScreen.wavReferenceIDString, wavRefId);
}
if (txtRefId != -1) {
creditsIntent
.putExtra(CreditsScreen.txtReferenceIDString, txtRefId);
}
if (null != finalScoreText) {
creditsIntent.putExtra(CreditsScreen.finalScoreString,
finalScoreText);
}
if (finalImageRef != -1) {
creditsIntent.putExtra(CreditsScreen.finalImageIDString,
finalImageRef);
}
// start title screen.
context.startActivity(creditsIntent);
} | static final void function(Context context, int wavRefId, int txtRefId, int finalImageRef, String finalScoreText) { Intent creditsIntent = new Intent(context, CreditsScreen.class); creditsIntent.addFlags(Intent.FLAG_ACTIVITY_NO_HISTORY); creditsIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); if (wavRefId != -1) { creditsIntent .putExtra(CreditsScreen.wavReferenceIDString, wavRefId); } if (txtRefId != -1) { creditsIntent .putExtra(CreditsScreen.txtReferenceIDString, txtRefId); } if (null != finalScoreText) { creditsIntent.putExtra(CreditsScreen.finalScoreString, finalScoreText); } if (finalImageRef != -1) { creditsIntent.putExtra(CreditsScreen.finalImageIDString, finalImageRef); } context.startActivity(creditsIntent); } | /**
* Start credit screen.
*
* @param context
* the context
* @param wavRefId
* the wav ref id
* @param txtRefId
* the txt ref id
* @param finalScoreText
* the final score text
*/ | Start credit screen | startCreditScreen | {
"repo_name": "huntergdavis/AndroidEasyGameUtils",
"path": "src/com/hunterdavis/gameutils/credits/CreditsScreen.java",
"license": "bsd-3-clause",
"size": 4546
} | [
"android.content.Context",
"android.content.Intent"
] | import android.content.Context; import android.content.Intent; | import android.content.*; | [
"android.content"
] | android.content; | 2,623,512 |
boolean isDataSetOutdated(DataSetDef def); | boolean isDataSetOutdated(DataSetDef def); | /**
* Check if the specified data set definition is outdated. This means that the data set might have been updated at
* origin. When this happens, any data hold by the provider can be considered stale and a refresh is needed.
*
* @param def The data set definition to check for
* @return true if the data set has become stale. false otherwise.
*/ | Check if the specified data set definition is outdated. This means that the data set might have been updated at origin. When this happens, any data hold by the provider can be considered stale and a refresh is needed | isDataSetOutdated | {
"repo_name": "porcelli-forks/dashbuilder",
"path": "dashbuilder-shared/dashbuilder-dataset-api/src/main/java/org/dashbuilder/dataprovider/DataSetProvider.java",
"license": "apache-2.0",
"size": 3409
} | [
"org.dashbuilder.dataset.def.DataSetDef"
] | import org.dashbuilder.dataset.def.DataSetDef; | import org.dashbuilder.dataset.def.*; | [
"org.dashbuilder.dataset"
] | org.dashbuilder.dataset; | 1,050,046 |
public void addCustId(String CustId,long ValidFrom,long ValidTo,int BalanceGroup)
{
CustInfo tmpCustInfo;
// See if we already have ID for this customer
if (!CustIDCache.containsKey(CustId))
{
// Check validity dates
if (ValidTo <= ValidFrom)
{
// Otherwise write an error and ignore it
OpenRate.getOpenRateFrameworkLog().error("Customer ID <" + CustId + "> valid from <" + ValidFrom + "> is after valid to <" + ValidTo + ">. Add failed.");
return;
}
// Create the new entry for the customer ID
tmpCustInfo = new CustInfo();
tmpCustInfo.CPI = new ArrayList<>();
tmpCustInfo.ERAList = new ConcurrentHashMap<>(10);
tmpCustInfo.UTCValidFrom = ValidFrom;
tmpCustInfo.UTCValidTo = ValidTo;
tmpCustInfo.BalanceGroup = BalanceGroup;
CustIDCache.put(CustId,tmpCustInfo);
}
else
{
// Otherwise write an error and ignore it
OpenRate.getOpenRateFrameworkLog().error("Customer ID <" + CustId + "> already exists. Add failed.");
}
} | void function(String CustId,long ValidFrom,long ValidTo,int BalanceGroup) { CustInfo tmpCustInfo; if (!CustIDCache.containsKey(CustId)) { if (ValidTo <= ValidFrom) { OpenRate.getOpenRateFrameworkLog().error(STR + CustId + STR + ValidFrom + STR + ValidTo + STR); return; } tmpCustInfo = new CustInfo(); tmpCustInfo.CPI = new ArrayList<>(); tmpCustInfo.ERAList = new ConcurrentHashMap<>(10); tmpCustInfo.UTCValidFrom = ValidFrom; tmpCustInfo.UTCValidTo = ValidTo; tmpCustInfo.BalanceGroup = BalanceGroup; CustIDCache.put(CustId,tmpCustInfo); } else { OpenRate.getOpenRateFrameworkLog().error(STR + CustId + STR); } } | /**
* Add a Customer object into the CustomerCache.
*
* @param CustId The customer identifier
* @param ValidFrom Valid from date of the customer relationship
* @param ValidTo Valid to date of the customer relationship
* @param BalanceGroup The ID of the counter balance group
*/ | Add a Customer object into the CustomerCache | addCustId | {
"repo_name": "skymania/OpenRate",
"path": "src/main/java/OpenRate/cache/CustomerCache.java",
"license": "gpl-2.0",
"size": 39943
} | [
"java.util.ArrayList",
"java.util.concurrent.ConcurrentHashMap"
] | import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; | import java.util.*; import java.util.concurrent.*; | [
"java.util"
] | java.util; | 26,992 |
ServiceCall<Void> getMethodLocalNullAsync(final ServiceCallback<Void> serviceCallback);
ServiceResponse<Void> getMethodLocalNull(String apiVersion) throws ErrorException, IOException; | ServiceCall<Void> getMethodLocalNullAsync(final ServiceCallback<Void> serviceCallback); ServiceResponse<Void> getMethodLocalNull(String apiVersion) throws ErrorException, IOException; | /**
* Get method with api-version modeled in the method. pass in api-version = null to succeed.
*
* @param apiVersion This should appear as a method parameter, use value null, this should result in no serialized parameter
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the {@link ServiceResponse} object if successful.
*/ | Get method with api-version modeled in the method. pass in api-version = null to succeed | getMethodLocalNull | {
"repo_name": "haocs/autorest",
"path": "src/generator/AutoRest.Java.Azure.Tests/src/main/java/fixtures/azurespecials/ApiVersionLocals.java",
"license": "mit",
"size": 6039
} | [
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceResponse",
"java.io.IOException"
] | import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceResponse; import java.io.IOException; | import com.microsoft.rest.*; import java.io.*; | [
"com.microsoft.rest",
"java.io"
] | com.microsoft.rest; java.io; | 854,038 |
public interface TreeRowClosure<L, T extends L> {
public boolean apply(BPlusTree<L, T> tree, BPlusIO<L> io, long pageAddr, int idx)
throws IgniteCheckedException;
} | interface TreeRowClosure<L, T extends L> { public boolean function(BPlusTree<L, T> tree, BPlusIO<L> io, long pageAddr, int idx) throws IgniteCheckedException; } | /**
* Performs inspection or operation on a specified row and returns true if this row is
* required or matches or /operation successful (depending on the context).
*
* @param tree The tree.
* @param io Th tree IO object.
* @param pageAddr The page address.
* @param idx The item index.
* @return {@code True} if the item passes the predicate.
* @throws IgniteCheckedException If failed.
*/ | Performs inspection or operation on a specified row and returns true if this row is required or matches or /operation successful (depending on the context) | apply | {
"repo_name": "sk0x50/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/tree/BPlusTree.java",
"license": "apache-2.0",
"size": 160683
} | [
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusIO"
] | import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusIO; | import org.apache.ignite.*; import org.apache.ignite.internal.processors.cache.persistence.tree.io.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,947,692 |
public ClassFactoryCollector getClassFactoryCollector() {
return cc;
} | ClassFactoryCollector function() { return cc; } | /**
* Returns the object factory.
*
* @return The object factory.
*/ | Returns the object factory | getClassFactoryCollector | {
"repo_name": "EgorZhuk/pentaho-reporting",
"path": "engine/core/src/main/java/org/pentaho/reporting/engine/classic/core/modules/parser/extwriter/ObjectWriter.java",
"license": "lgpl-2.1",
"size": 11047
} | [
"org.pentaho.reporting.engine.classic.core.modules.parser.ext.factory.base.ClassFactoryCollector"
] | import org.pentaho.reporting.engine.classic.core.modules.parser.ext.factory.base.ClassFactoryCollector; | import org.pentaho.reporting.engine.classic.core.modules.parser.ext.factory.base.*; | [
"org.pentaho.reporting"
] | org.pentaho.reporting; | 1,868,792 |
public void updateTab(int titleRes, Class<?> fragmentClass, Bundle args, int position) {
if (position >= 0 && position < tabs.size()) {
// update tab info
tabs.set(position, new TabInfo(position, fragmentClass, args, titleRes));
// find current fragment of tab
Fragment oldFragment = fragmentManager
.findFragmentByTag(makeFragmentName(viewPager.getId(), getItemId(position)));
// remove it
FragmentTransaction transaction = fragmentManager.beginTransaction();
transaction.remove(oldFragment);
transaction.commit();
fragmentManager.executePendingTransactions();
}
} | void function(int titleRes, Class<?> fragmentClass, Bundle args, int position) { if (position >= 0 && position < tabs.size()) { tabs.set(position, new TabInfo(position, fragmentClass, args, titleRes)); Fragment oldFragment = fragmentManager .findFragmentByTag(makeFragmentName(viewPager.getId(), getItemId(position))); FragmentTransaction transaction = fragmentManager.beginTransaction(); transaction.remove(oldFragment); transaction.commit(); fragmentManager.executePendingTransactions(); } } | /**
* Update an existing tab. Make sure to call {@link #notifyTabsChanged} afterwards.
*/ | Update an existing tab. Make sure to call <code>#notifyTabsChanged</code> afterwards | updateTab | {
"repo_name": "graham22/SkyeTracker",
"path": "code/Android/app/src/main/java/com/skye/skyetracker/TabStripAdapter.java",
"license": "apache-2.0",
"size": 6185
} | [
"android.app.Fragment",
"android.app.FragmentTransaction",
"android.os.Bundle"
] | import android.app.Fragment; import android.app.FragmentTransaction; import android.os.Bundle; | import android.app.*; import android.os.*; | [
"android.app",
"android.os"
] | android.app; android.os; | 423,875 |
public boolean validateReferenceRange_validateTypeCode(ReferenceRange referenceRange, DiagnosticChain diagnostics,
Map<Object, Object> context) {
return referenceRange.validateTypeCode(diagnostics, context);
}
| boolean function(ReferenceRange referenceRange, DiagnosticChain diagnostics, Map<Object, Object> context) { return referenceRange.validateTypeCode(diagnostics, context); } | /**
* Validates the validateTypeCode constraint of '<em>Reference Range</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | Validates the validateTypeCode constraint of 'Reference Range'. | validateReferenceRange_validateTypeCode | {
"repo_name": "drbgfc/mdht",
"path": "cda/plugins/org.openhealthtools.mdht.uml.cda/src/org/openhealthtools/mdht/uml/cda/util/CDAValidator.java",
"license": "epl-1.0",
"size": 206993
} | [
"java.util.Map",
"org.eclipse.emf.common.util.DiagnosticChain",
"org.openhealthtools.mdht.uml.cda.ReferenceRange"
] | import java.util.Map; import org.eclipse.emf.common.util.DiagnosticChain; import org.openhealthtools.mdht.uml.cda.ReferenceRange; | import java.util.*; import org.eclipse.emf.common.util.*; import org.openhealthtools.mdht.uml.cda.*; | [
"java.util",
"org.eclipse.emf",
"org.openhealthtools.mdht"
] | java.util; org.eclipse.emf; org.openhealthtools.mdht; | 1,728,521 |
public TypeInfo getSchemaTypeInfo(){
if(needsSyncData()) {
synchronizeData();
}
return this;
}
//
// Public methods
// | TypeInfo function(){ if(needsSyncData()) { synchronizeData(); } return this; } // | /**
* Method getSchemaTypeInfo.
* @return TypeInfo
*/ | Method getSchemaTypeInfo | getSchemaTypeInfo | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/com/sun/org/apache/xerces/internal/dom/ElementImpl.java",
"license": "apache-2.0",
"size": 42956
} | [
"org.w3c.dom.TypeInfo"
] | import org.w3c.dom.TypeInfo; | import org.w3c.dom.*; | [
"org.w3c.dom"
] | org.w3c.dom; | 1,110,897 |
//EntityTeleportEvent event = new EntityTeleportEvent(entity, entity.getLocation(), to);
//Bukkit.getPluginManager().callEvent(event);
//if (event.isCancelled()) return;
if (entity.isInsideVehicle()) {
Entity vehicle = entity.getVehicle();
vehicle.eject();
vehicle.teleport(to);
entity.teleport(to);
vehicle.setPassenger(entity);
} else if (entity.getPassenger() != null) {
Entity passenger = entity.getPassenger();
entity.eject();
entity.teleport(to);
passenger.teleport(to);
entity.setPassenger(passenger);
} else entity.teleport(to);
} | if (entity.isInsideVehicle()) { Entity vehicle = entity.getVehicle(); vehicle.eject(); vehicle.teleport(to); entity.teleport(to); vehicle.setPassenger(entity); } else if (entity.getPassenger() != null) { Entity passenger = entity.getPassenger(); entity.eject(); entity.teleport(to); passenger.teleport(to); entity.setPassenger(passenger); } else entity.teleport(to); } | /**
* Teleport an entity and the vehicle it is inside of, or vice versa.
*
* @param entity The entity/vehicle to be teleported.
* @param to The destination.
*/ | Teleport an entity and the vehicle it is inside of, or vice versa | teleport | {
"repo_name": "DemigodsRPG/Demigods3",
"path": "Demigods-Engine/src/main/java/com/demigodsrpg/demigods/engine/util/Vehicles.java",
"license": "mit",
"size": 1113
} | [
"org.bukkit.entity.Entity"
] | import org.bukkit.entity.Entity; | import org.bukkit.entity.*; | [
"org.bukkit.entity"
] | org.bukkit.entity; | 1,435,900 |
public static boolean is(final Statement statement, final Class<? extends Statement> stmtClass) {
return as(statement, stmtClass) != null;
}
| static boolean function(final Statement statement, final Class<? extends Statement> stmtClass) { return as(statement, stmtClass) != null; } | /**
* Returns whether the provided statement has the provided type.
*
* @param statement the statement to test
* @param stmtClass the type to test the statement against
* @return {@code true} if the provided statement has the provided type,
* {@code false} otherwise
*/ | Returns whether the provided statement has the provided type | is | {
"repo_name": "rpau/AutoRefactor",
"path": "plugin/src/main/java/org/autorefactor/jdt/internal/corext/dom/ASTNodes.java",
"license": "epl-1.0",
"size": 104241
} | [
"org.eclipse.jdt.core.dom.Statement"
] | import org.eclipse.jdt.core.dom.Statement; | import org.eclipse.jdt.core.dom.*; | [
"org.eclipse.jdt"
] | org.eclipse.jdt; | 2,516,253 |
public Target getTarget(String targetName) throws NoSuchTargetException {
Target target = targets.get(targetName);
if (target != null) {
return target;
}
// No such target.
// If there's a file on the disk that's not mentioned in the BUILD file,
// produce a more informative error. NOTE! this code path is only executed
// on failure, which is (relatively) very rare. In the common case no
// stat(2) is executed.
Path filename = getPackageDirectory().getRelative(targetName);
String suffix;
if (!new PathFragment(targetName).isNormalized()) {
// Don't check for file existence in this case because the error message
// would be confusing and wrong. If the targetName is "foo/bar/.", and
// there is a directory "foo/bar", it doesn't mean that "//pkg:foo/bar/."
// is a valid label.
suffix = "";
} else if (filename.isDirectory()) {
suffix = "; however, a source directory of this name exists. (Perhaps add "
+ "'exports_files([\"" + targetName + "\"])' to " + name + "/BUILD, or define a "
+ "filegroup?)";
} else if (filename.exists()) {
suffix = "; however, a source file of this name exists. (Perhaps add "
+ "'exports_files([\"" + targetName + "\"])' to " + name + "/BUILD?)";
} else {
String suggestion = SpellChecker.suggest(targetName, targets.keySet());
if (suggestion != null) {
suffix = " (did you mean '" + suggestion + "'?)";
} else {
suffix = "";
}
}
throw makeNoSuchTargetException(targetName, suffix);
} | Target function(String targetName) throws NoSuchTargetException { Target target = targets.get(targetName); if (target != null) { return target; } Path filename = getPackageDirectory().getRelative(targetName); String suffix; if (!new PathFragment(targetName).isNormalized()) { suffix = STR; however, a source directory of this name exists. (Perhaps add STR'exports_files([\STR\STR + name + "/BUILD, or define a STRfilegroup?)"; } else if (filename.exists()) { suffix = "; however, a source file of this name exists. (Perhaps add STR'exports_files([\STR\STR + name + STR; } else { String suggestion = SpellChecker.suggest(targetName, targets.keySet()); if (suggestion != null) { suffix = STR + suggestion + "'?)"; } else { suffix = ""; } } throw makeNoSuchTargetException(targetName, suffix); } | /**
* Returns the target (a member of this package) whose name is "targetName".
* First rules are searched, then output files, then input files. The target
* name must be valid, as defined by {@code LabelValidator#validateTargetName}.
*
* @throws NoSuchTargetException if the specified target was not found.
*/ | Returns the target (a member of this package) whose name is "targetName". First rules are searched, then output files, then input files. The target name must be valid, as defined by LabelValidator#validateTargetName | getTarget | {
"repo_name": "UrbanCompass/bazel",
"path": "src/main/java/com/google/devtools/build/lib/packages/Package.java",
"license": "apache-2.0",
"size": 52858
} | [
"com.google.devtools.build.lib.util.SpellChecker",
"com.google.devtools.build.lib.vfs.Path",
"com.google.devtools.build.lib.vfs.PathFragment"
] | import com.google.devtools.build.lib.util.SpellChecker; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; | import com.google.devtools.build.lib.util.*; import com.google.devtools.build.lib.vfs.*; | [
"com.google.devtools"
] | com.google.devtools; | 2,437,490 |
public TopHitsAggregationBuilder scriptField(String name, Script script) {
if (name == null) {
throw new IllegalArgumentException("scriptField [name] must not be null: [" + name + "]");
}
if (script == null) {
throw new IllegalArgumentException("scriptField [script] must not be null: [" + name + "]");
}
scriptField(name, script, false);
return this;
} | TopHitsAggregationBuilder function(String name, Script script) { if (name == null) { throw new IllegalArgumentException(STR + name + "]"); } if (script == null) { throw new IllegalArgumentException(STR + name + "]"); } scriptField(name, script, false); return this; } | /**
* Adds a script field under the given name with the provided script.
*
* @param name
* The name of the field
* @param script
* The script
*/ | Adds a script field under the given name with the provided script | scriptField | {
"repo_name": "dpursehouse/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java",
"license": "apache-2.0",
"size": 31738
} | [
"org.elasticsearch.script.Script"
] | import org.elasticsearch.script.Script; | import org.elasticsearch.script.*; | [
"org.elasticsearch.script"
] | org.elasticsearch.script; | 1,830,592 |
File tmpFile = File.createTempFile("ode", null, tempRoot);
Files.write(data, tmpFile);
return tmpFile;
} | File tmpFile = File.createTempFile("ode", null, tempRoot); Files.write(data, tmpFile); return tmpFile; } | /**
* Creates a temporary file for the given data.
*
* @param data data to write into the temporary file
* @return file descriptor for temporary file
* @throws IOException
*/ | Creates a temporary file for the given data | createTempFile | {
"repo_name": "tvomf/appinventor-mapps",
"path": "appinventor/appengine/src/com/google/appinventor/server/project/utils/TempFiles.java",
"license": "apache-2.0",
"size": 1192
} | [
"com.google.common.io.Files",
"java.io.File"
] | import com.google.common.io.Files; import java.io.File; | import com.google.common.io.*; import java.io.*; | [
"com.google.common",
"java.io"
] | com.google.common; java.io; | 851,579 |
public ApiCall<CoinbaseResponse<Transaction>> showTransaction(@NonNull String accountId,
@NonNull String transactionId,
@Nullable ExpandField... expandOptions) {
return transactionsApi.showTransaction(accountId, transactionId, toValueSet(expandOptions));
}
/**
* <p>
* Send funds to a bitcoin address, bitcoin cash address, litecoin address, ethereum address, or email address.
* No transaction fees are required for off blockchain bitcoin transactions.
* <p>
* It’s recommended to always supply a unique {@link SendMoneyRequest#setIdem(String) idem} field for each transaction.
* This prevents you from sending the same transaction twice if there has been an unexpected network outage or other issue.
* <p>
* When used with OAuth2 authentication, this endpoint requires two factor authentication
* unless used with {@link com.coinbase.Scope.Wallet.Transactions#SEND_BYPASS_2FA wallet:transactions:send:bypass-2fa} scope.
* <p>
* In other case, call it with {@code twoFactorAuthToken == null}, server will respond with
* status code {@link com.coinbase.errors.Error 400} and {@link com.coinbase.errors.Error#getMessage() message}
* indicating two factor authentication is required. After this, you can request {@code twoFactorAuthToken} | ApiCall<CoinbaseResponse<Transaction>> function(@NonNull String accountId, @NonNull String transactionId, @Nullable ExpandField... expandOptions) { return transactionsApi.showTransaction(accountId, transactionId, toValueSet(expandOptions)); } /** * <p> * Send funds to a bitcoin address, bitcoin cash address, litecoin address, ethereum address, or email address. * No transaction fees are required for off blockchain bitcoin transactions. * <p> * It’s recommended to always supply a unique {@link SendMoneyRequest#setIdem(String) idem} field for each transaction. * This prevents you from sending the same transaction twice if there has been an unexpected network outage or other issue. * <p> * When used with OAuth2 authentication, this endpoint requires two factor authentication * unless used with {@link com.coinbase.Scope.Wallet.Transactions#SEND_BYPASS_2FA wallet:transactions:send:bypass-2fa} scope. * <p> * In other case, call it with {@code twoFactorAuthToken == null}, server will respond with * status code {@link com.coinbase.errors.Error 400} and {@link com.coinbase.errors.Error#getMessage() message} * indicating two factor authentication is required. After this, you can request {@code twoFactorAuthToken} | /**
* Show an individual transaction for an account. See {@link Transaction transaction} resource for more information.
* <p>
* SCOPES<br/>
* <ul>
* <li>{@link com.coinbase.Scope.Wallet.Transactions#READ wallet:transactions:read}</li>
* </ul>
*
* @param accountId account id.
* @param transactionId transaction id.
* @param expandOptions {@link ExpandField expand fields} list.
* @return {@link ApiCall call} for getting transaction information.
* @see <a href="https://developers.coinbase.com/api/v2#show-a-transaction">online docs: Show Transaction</a>.
*/ | Show an individual transaction for an account. See <code>Transaction transaction</code> resource for more information. SCOPES <code>com.coinbase.Scope.Wallet.Transactions#READ wallet:transactions:read</code> | showTransaction | {
"repo_name": "coinbase/coinbase-java",
"path": "coinbase-java/src/main/java/com/coinbase/resources/transactions/TransactionsResource.java",
"license": "apache-2.0",
"size": 17299
} | [
"androidx.annotation.NonNull",
"androidx.annotation.Nullable",
"com.coinbase.CoinbaseResponse",
"com.coinbase.network.ApiCall",
"com.coinbase.resources.ExpandUtils",
"com.coinbase.resources.transactions.Transaction"
] | import androidx.annotation.NonNull; import androidx.annotation.Nullable; import com.coinbase.CoinbaseResponse; import com.coinbase.network.ApiCall; import com.coinbase.resources.ExpandUtils; import com.coinbase.resources.transactions.Transaction; | import androidx.annotation.*; import com.coinbase.*; import com.coinbase.network.*; import com.coinbase.resources.*; import com.coinbase.resources.transactions.*; | [
"androidx.annotation",
"com.coinbase",
"com.coinbase.network",
"com.coinbase.resources"
] | androidx.annotation; com.coinbase; com.coinbase.network; com.coinbase.resources; | 1,359,123 |
protected String deliverChat (SpeakService speakSvc, String message, byte mode)
{
// run the message through our mogrification process
message = mogrifyChat(message, mode, true, mode != ChatCodes.EMOTE_MODE);
// mogrification may result in something being turned into a slash command, in which case
// we have to run everything through again from the start
if (message.startsWith("/")) {
return requestChat(speakSvc, message, false);
}
// make sure this client is not restricted from performing this chat message for some
// reason or other
String errmsg = checkCanChat(speakSvc, message, mode);
if (errmsg != null) {
return errmsg;
}
// speak on the specified service
requestSpeak(speakSvc, message, mode);
return ChatCodes.SUCCESS;
} | String function (SpeakService speakSvc, String message, byte mode) { message = mogrifyChat(message, mode, true, mode != ChatCodes.EMOTE_MODE); if (message.startsWith("/")) { return requestChat(speakSvc, message, false); } String errmsg = checkCanChat(speakSvc, message, mode); if (errmsg != null) { return errmsg; } requestSpeak(speakSvc, message, mode); return ChatCodes.SUCCESS; } | /**
* Delivers a plain chat message (not a slash command) on the specified speak service in the
* specified mode. The message will be mogrified and filtered prior to delivery.
*
* @return {@link ChatCodes#SUCCESS} if the message was delivered or a string indicating why it
* failed.
*/ | Delivers a plain chat message (not a slash command) on the specified speak service in the specified mode. The message will be mogrified and filtered prior to delivery | deliverChat | {
"repo_name": "threerings/narya",
"path": "core/src/main/java/com/threerings/crowd/chat/client/ChatDirector.java",
"license": "lgpl-2.1",
"size": 53168
} | [
"com.threerings.crowd.chat.data.ChatCodes"
] | import com.threerings.crowd.chat.data.ChatCodes; | import com.threerings.crowd.chat.data.*; | [
"com.threerings.crowd"
] | com.threerings.crowd; | 1,030,034 |
public DataCenter getDataCenter(String providerDataCenterId) throws InternalException, CloudException; | DataCenter function(String providerDataCenterId) throws InternalException, CloudException; | /**
* Provides access to the full data center information for the specified data center.
* @param providerDataCenterId the provider-specific identifier that the provider uses to identify the data center
* @return the current state of the desired data center
* @throws InternalException an error occurred locally in processing the request
* @throws CloudException an error occurred within the cloud provider or the cloud provider did not approve of the request
*/ | Provides access to the full data center information for the specified data center | getDataCenter | {
"repo_name": "OSS-TheWeatherCompany/dasein-cloud-core",
"path": "src/main/java/org/dasein/cloud/dc/DataCenterServices.java",
"license": "apache-2.0",
"size": 8677
} | [
"org.dasein.cloud.CloudException",
"org.dasein.cloud.InternalException"
] | import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; | import org.dasein.cloud.*; | [
"org.dasein.cloud"
] | org.dasein.cloud; | 2,888,564 |
public static Collection<?> largerBetween(Collection<?> a, Collection<?> b) {
return a.size() == b.size() || b.size() > a.size() ? b : a;
} | static Collection<?> function(Collection<?> a, Collection<?> b) { return a.size() == b.size() b.size() > a.size() ? b : a; } | /**
* Return the collection, {@code a} or {@code b}, that has the most
* elements. If both collections have the same number of elements, then the
* second collection, {@code b} is returned.
*
* @param a
* @param b
* @return the collection with the most elements
*/ | Return the collection, a or b, that has the most elements. If both collections have the same number of elements, then the second collection, b is returned | largerBetween | {
"repo_name": "dubex/concourse",
"path": "concourse-server/src/main/java/com/cinchapi/concourse/util/TCollections.java",
"license": "apache-2.0",
"size": 2743
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 1,612,477 |
public static <KEYTYPE, DATATYPE extends Comparable <? super DATATYPE>, ITEMTYPE extends ITreeItemWithID <KEYTYPE, DATATYPE, ITEMTYPE>> void sortByValue (@Nonnull final IBasicTree <DATATYPE, ITEMTYPE> aTree)
{
final ComparatorTreeItemDataComparable <DATATYPE, ITEMTYPE> aItemComp = new ComparatorTreeItemDataComparable <DATATYPE, ITEMTYPE> ();
_sort (aTree, aItemComp);
} | static <KEYTYPE, DATATYPE extends Comparable <? super DATATYPE>, ITEMTYPE extends ITreeItemWithID <KEYTYPE, DATATYPE, ITEMTYPE>> void function (@Nonnull final IBasicTree <DATATYPE, ITEMTYPE> aTree) { final ComparatorTreeItemDataComparable <DATATYPE, ITEMTYPE> aItemComp = new ComparatorTreeItemDataComparable <DATATYPE, ITEMTYPE> (); _sort (aTree, aItemComp); } | /**
* Sort each level of the passed tree on the value with the specified
* comparator. This method assumes that the values in the tree item implement
* the {@link Comparable} interface.
*
* @param aTree
* The tree to be sorted.
*/ | Sort each level of the passed tree on the value with the specified comparator. This method assumes that the values in the tree item implement the <code>Comparable</code> interface | sortByValue | {
"repo_name": "lsimons/phloc-schematron-standalone",
"path": "phloc-commons/src/main/java/com/phloc/commons/tree/utils/sort/TreeWithIDSorter.java",
"license": "apache-2.0",
"size": 5345
} | [
"com.phloc.commons.tree.IBasicTree",
"com.phloc.commons.tree.withid.ITreeItemWithID",
"javax.annotation.Nonnull"
] | import com.phloc.commons.tree.IBasicTree; import com.phloc.commons.tree.withid.ITreeItemWithID; import javax.annotation.Nonnull; | import com.phloc.commons.tree.*; import com.phloc.commons.tree.withid.*; import javax.annotation.*; | [
"com.phloc.commons",
"javax.annotation"
] | com.phloc.commons; javax.annotation; | 1,243,677 |
boolean unifyWithSubtype(ObjectType other, List<String> typeParameters,
Multimap<String, JSType> typeMultimap, SubtypeCache subSuperMap) {
if (fn != null) {
if (other.fn == null ||
!fn.unifyWithSubtype(other.fn, typeParameters, typeMultimap, subSuperMap)) {
return false;
}
}
NominalType thisNt = this.nominalType;
NominalType otherNt = other.nominalType;
if (thisNt != null && otherNt != null) {
if (thisNt.unifyWithSubtype(
otherNt, typeParameters, typeMultimap, subSuperMap)) {
return true;
}
if (thisNt.isClass()) {
return false;
}
if (thisNt.isStructuralInterface()) {
if (thisNt.equals(subSuperMap.get(otherNt))) {
return true;
}
subSuperMap = subSuperMap.with(otherNt, thisNt);
}
}
if (thisNt != null && !thisNt.isStructuralInterface() && otherNt == null) {
return false;
}
Set<String> thisProps = thisNt != null && thisNt.isStructuralInterface()
? thisNt.getAllPropsOfInterface() : this.props.keySet();
return unifyPropsWithSubtype(
other, thisProps, typeParameters, typeMultimap, subSuperMap);
} | boolean unifyWithSubtype(ObjectType other, List<String> typeParameters, Multimap<String, JSType> typeMultimap, SubtypeCache subSuperMap) { if (fn != null) { if (other.fn == null !fn.unifyWithSubtype(other.fn, typeParameters, typeMultimap, subSuperMap)) { return false; } } NominalType thisNt = this.nominalType; NominalType otherNt = other.nominalType; if (thisNt != null && otherNt != null) { if (thisNt.unifyWithSubtype( otherNt, typeParameters, typeMultimap, subSuperMap)) { return true; } if (thisNt.isClass()) { return false; } if (thisNt.isStructuralInterface()) { if (thisNt.equals(subSuperMap.get(otherNt))) { return true; } subSuperMap = subSuperMap.with(otherNt, thisNt); } } if (thisNt != null && !thisNt.isStructuralInterface() && otherNt == null) { return false; } Set<String> thisProps = thisNt != null && thisNt.isStructuralInterface() ? thisNt.getAllPropsOfInterface() : this.props.keySet(); return unifyPropsWithSubtype( other, thisProps, typeParameters, typeMultimap, subSuperMap); } | /**
* Unify {@code this}, which may contain free type variables,
* with {@code other}, a concrete type, modifying the supplied
* {@code typeMultimap} to add any new template varaible type bindings.
* @return Whether unification succeeded
*/ | Unify this, which may contain free type variables, with other, a concrete type, modifying the supplied typeMultimap to add any new template varaible type bindings | unifyWithSubtype | {
"repo_name": "superkonduktr/closure-compiler",
"path": "src/com/google/javascript/jscomp/newtypes/ObjectType.java",
"license": "apache-2.0",
"size": 46944
} | [
"com.google.common.collect.Multimap",
"java.util.List",
"java.util.Set"
] | import com.google.common.collect.Multimap; import java.util.List; import java.util.Set; | import com.google.common.collect.*; import java.util.*; | [
"com.google.common",
"java.util"
] | com.google.common; java.util; | 527,916 |
private void fail(boolean clearSession, HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (clearSession) {
HttpSession ssn = req.getSession(false);
if (ssn != null) ssn.removeAttribute("jcifs.http.principal");
}
resp.addHeader("WWW-Authenticate", "Negotiate");
resp.addHeader("WWW-Authenticate", "NTLM");
resp.setHeader("Connection", "close");
resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
resp.flushBuffer();
} | void function(boolean clearSession, HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (clearSession) { HttpSession ssn = req.getSession(false); if (ssn != null) ssn.removeAttribute(STR); } resp.addHeader(STR, STR); resp.addHeader(STR, "NTLM"); resp.setHeader(STR, "close"); resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); resp.flushBuffer(); } | /**
* Action realisee en cas d'echec de l'authentification
*
* @param clearSession indique s'il faut vider la session ou non
* @param req requete
* @param resp reponse
* @throws ServletException
* @throws IOException
*/ | Action realisee en cas d'echec de l'authentification | fail | {
"repo_name": "bedrin/kerb4j",
"path": "kerb4j-server/kerb4j-server-tomcat/src/main/java/com/kerb4j/server/tomcat/SpnegoAuthenticator.java",
"license": "lgpl-2.1",
"size": 11190
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"javax.servlet.http.HttpSession"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 46,529 |
@Override
public void expand() {
expand(TimePeriod.SHORT);
} | void function() { expand(TimePeriod.SHORT); } | /**
* See {@link TreeItem}.
*/ | See <code>TreeItem</code> | expand | {
"repo_name": "djelinek/reddeer",
"path": "plugins/org.eclipse.reddeer.swt/src/org/eclipse/reddeer/swt/impl/tree/AbstractTreeItem.java",
"license": "epl-1.0",
"size": 8606
} | [
"org.eclipse.reddeer.common.wait.TimePeriod"
] | import org.eclipse.reddeer.common.wait.TimePeriod; | import org.eclipse.reddeer.common.wait.*; | [
"org.eclipse.reddeer"
] | org.eclipse.reddeer; | 1,506,834 |
protected void fill() throws IOException {
checkClosed();
// BEGIN android-only
if (nativeEndBufSize > 0) {
ZipFile.RAFStream is = (ZipFile.RAFStream) in;
synchronized (is.mSharedRaf) {
long len = is.mLength - is.mOffset;
if (len > nativeEndBufSize)
len = nativeEndBufSize;
int cnt = inf.setFileInput(is.mSharedRaf.getFD(), is.mOffset,
(int) nativeEndBufSize);
is.skip(cnt);
}
} else {
if ((len = in.read(buf)) > 0) {
inf.setInput(buf, 0, len);
}
}
// END android-only
} | void function() throws IOException { checkClosed(); if (nativeEndBufSize > 0) { ZipFile.RAFStream is = (ZipFile.RAFStream) in; synchronized (is.mSharedRaf) { long len = is.mLength - is.mOffset; if (len > nativeEndBufSize) len = nativeEndBufSize; int cnt = inf.setFileInput(is.mSharedRaf.getFD(), is.mOffset, (int) nativeEndBufSize); is.skip(cnt); } } else { if ((len = in.read(buf)) > 0) { inf.setInput(buf, 0, len); } } } | /**
* Fills the input buffer with data to be decompressed.
*
* @throws IOException
* if an {@code IOException} occurs.
*/ | Fills the input buffer with data to be decompressed | fill | {
"repo_name": "webos21/xi",
"path": "java/jcl/src/java/java/util/zip/InflaterInputStream.java",
"license": "apache-2.0",
"size": 8799
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,861,049 |
@Test
public void testRequestWithSubstrings1EmptyInitial()
{
Dsmlv2Parser parser = null;
try
{
parser = newParser();
parser.setInput( SearchRequestTest.class
.getResource( "filters/request_with_substrings_1_empty_initial.xml" ).openStream(), "UTF-8" );
parser.parse();
}
catch ( Exception e )
{
fail( e.getMessage() );
}
SearchRequest searchRequest = ( SearchRequest ) parser.getBatchRequest().getCurrentRequest();
ExprNode filter = searchRequest.getFilter();
assertTrue( filter instanceof SubstringNode );
SubstringNode substringFilter = ( SubstringNode ) filter;
assertNull( substringFilter.getInitial() );
} | void function() { Dsmlv2Parser parser = null; try { parser = newParser(); parser.setInput( SearchRequestTest.class .getResource( STR ).openStream(), "UTF-8" ); parser.parse(); } catch ( Exception e ) { fail( e.getMessage() ); } SearchRequest searchRequest = ( SearchRequest ) parser.getBatchRequest().getCurrentRequest(); ExprNode filter = searchRequest.getFilter(); assertTrue( filter instanceof SubstringNode ); SubstringNode substringFilter = ( SubstringNode ) filter; assertNull( substringFilter.getInitial() ); } | /**
* Test parsing of a request with a Substrings Filter with 1 emptyInitial element
*/ | Test parsing of a request with a Substrings Filter with 1 emptyInitial element | testRequestWithSubstrings1EmptyInitial | {
"repo_name": "darranl/directory-shared",
"path": "dsml/parser/src/test/java/org/apache/directory/api/dsmlv2/searchRequest/SearchRequestTest.java",
"license": "apache-2.0",
"size": 67826
} | [
"org.apache.directory.api.dsmlv2.Dsmlv2Parser",
"org.apache.directory.api.ldap.model.filter.ExprNode",
"org.apache.directory.api.ldap.model.filter.SubstringNode",
"org.apache.directory.api.ldap.model.message.SearchRequest",
"org.junit.Assert"
] | import org.apache.directory.api.dsmlv2.Dsmlv2Parser; import org.apache.directory.api.ldap.model.filter.ExprNode; import org.apache.directory.api.ldap.model.filter.SubstringNode; import org.apache.directory.api.ldap.model.message.SearchRequest; import org.junit.Assert; | import org.apache.directory.api.dsmlv2.*; import org.apache.directory.api.ldap.model.filter.*; import org.apache.directory.api.ldap.model.message.*; import org.junit.*; | [
"org.apache.directory",
"org.junit"
] | org.apache.directory; org.junit; | 1,657,013 |
public static <T> T waitForProxy(Class<T> protocol,
long clientVersion,
InetSocketAddress addr, Configuration conf,
int rpcTimeout,
long timeout) throws IOException {
return waitForProtocolProxy(protocol, clientVersion, addr,
conf, rpcTimeout, null, timeout).getProxy();
} | static <T> T function(Class<T> protocol, long clientVersion, InetSocketAddress addr, Configuration conf, int rpcTimeout, long timeout) throws IOException { return waitForProtocolProxy(protocol, clientVersion, addr, conf, rpcTimeout, null, timeout).getProxy(); } | /**
* Get a proxy connection to a remote server
*
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
* @param conf configuration to use
* @param rpcTimeout timeout for each RPC
* @param timeout time in milliseconds before giving up
* @return the proxy
* @throws IOException if the far end through a RemoteException
*/ | Get a proxy connection to a remote server | waitForProxy | {
"repo_name": "jaypatil/hadoop",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java",
"license": "gpl-3.0",
"size": 35801
} | [
"java.io.IOException",
"java.net.InetSocketAddress",
"org.apache.hadoop.conf.Configuration"
] | import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; | import java.io.*; import java.net.*; import org.apache.hadoop.conf.*; | [
"java.io",
"java.net",
"org.apache.hadoop"
] | java.io; java.net; org.apache.hadoop; | 2,594,792 |
protected Layer parseLayers( Element layerElem, Layer parent )
throws XMLParsingException, UnknownCRSException {
boolean queryable = XMLTools.getNodeAsBoolean( layerElem, "./@queryable", nsContext, false );
int cascaded = XMLTools.getNodeAsInt( layerElem, "./@cascaded", nsContext, 0 );
boolean opaque = XMLTools.getNodeAsBoolean( layerElem, "./@opaque", nsContext, false );
boolean noSubsets = XMLTools.getNodeAsBoolean( layerElem, "./@noSubsets", nsContext, false );
int fixedWidth = XMLTools.getNodeAsInt( layerElem, "./@fixedWidth", nsContext, 0 );
int fixedHeight = XMLTools.getNodeAsInt( layerElem, "./@fixedHeight", nsContext, 0 );
String name = XMLTools.getNodeAsString( layerElem, "./Name", nsContext, null );
String title = XMLTools.getRequiredNodeAsString( layerElem, "./Title", nsContext );
String layerAbstract = XMLTools.getNodeAsString( layerElem, "./Abstract", nsContext, null );
String[] keywords = XMLTools.getNodesAsStrings( layerElem, "./KeywordList/Keyword",
nsContext );
String[] srs = XMLTools.getNodesAsStrings( layerElem, "./SRS", nsContext );
List<Node> nl = XMLTools.getNodes( layerElem, "./BoundingBox", nsContext );
// TODO replace with Envelope
LayerBoundingBox[] bboxes = null;
if ( nl.size() == 0 && parent != null ) {
// inherit BoundingBoxes from parent layer
bboxes = parent.getBoundingBoxes();
} else {
bboxes = parseLayerBoundingBoxes( nl );
}
Element llBox = (Element) XMLTools.getNode( layerElem, "./LatLonBoundingBox", nsContext );
Envelope llBoundingBox = null;
if ( llBox == null && parent != null ) {
// inherit LatLonBoundingBox parent layer
llBoundingBox = parent.getLatLonBoundingBox();
} else if ( llBox != null ) {
llBoundingBox = parseLatLonBoundingBox( llBox );
} else {
CoordinateSystem crs = CRSFactory.create( "EPSG:4326" );
llBoundingBox = GeometryFactory.createEnvelope( -180, -90, 180, 90, crs );
}
Dimension[] dimensions = parseDimensions( layerElem );
Extent[] extents = parseExtents( layerElem );
Attribution attribution = parseAttribution( layerElem );
AuthorityURL[] authorityURLs = parseAuthorityURLs( layerElem );
MetadataURL[] metadataURLs = parseMetadataURLs( layerElem );
DataURL[] dataURLs = parseDataURL( layerElem );
Identifier[] identifiers = parseIdentifiers( layerElem );
FeatureListURL[] featureListURLs = parseFeatureListURL( layerElem );
Style[] styles = parseStyles( layerElem );
ScaleHint scaleHint = parseScaleHint( layerElem );
Layer layer = new Layer( queryable, cascaded, opaque, noSubsets, fixedWidth, fixedHeight,
name, title, layerAbstract, llBoundingBox, attribution, scaleHint,
keywords, srs, bboxes, dimensions, extents, authorityURLs,
identifiers, metadataURLs, dataURLs, featureListURLs, styles,
null, null, parent );
// get Child layers
nl = XMLTools.getNodes( layerElem, "./Layer", nsContext );
Layer[] layers = new Layer[nl.size()];
for ( int i = 0; i < layers.length; i++ ) {
layers[i] = parseLayers( (Element) nl.get( i ), layer );
}
// set child layers
layer.setLayer( layers );
return layer;
}
| Layer function( Element layerElem, Layer parent ) throws XMLParsingException, UnknownCRSException { boolean queryable = XMLTools.getNodeAsBoolean( layerElem, STR, nsContext, false ); int cascaded = XMLTools.getNodeAsInt( layerElem, STR, nsContext, 0 ); boolean opaque = XMLTools.getNodeAsBoolean( layerElem, STR, nsContext, false ); boolean noSubsets = XMLTools.getNodeAsBoolean( layerElem, STR, nsContext, false ); int fixedWidth = XMLTools.getNodeAsInt( layerElem, STR, nsContext, 0 ); int fixedHeight = XMLTools.getNodeAsInt( layerElem, STR, nsContext, 0 ); String name = XMLTools.getNodeAsString( layerElem, STR, nsContext, null ); String title = XMLTools.getRequiredNodeAsString( layerElem, STR, nsContext ); String layerAbstract = XMLTools.getNodeAsString( layerElem, STR, nsContext, null ); String[] keywords = XMLTools.getNodesAsStrings( layerElem, STR, nsContext ); String[] srs = XMLTools.getNodesAsStrings( layerElem, "./SRS", nsContext ); List<Node> nl = XMLTools.getNodes( layerElem, STR, nsContext ); LayerBoundingBox[] bboxes = null; if ( nl.size() == 0 && parent != null ) { bboxes = parent.getBoundingBoxes(); } else { bboxes = parseLayerBoundingBoxes( nl ); } Element llBox = (Element) XMLTools.getNode( layerElem, STR, nsContext ); Envelope llBoundingBox = null; if ( llBox == null && parent != null ) { llBoundingBox = parent.getLatLonBoundingBox(); } else if ( llBox != null ) { llBoundingBox = parseLatLonBoundingBox( llBox ); } else { CoordinateSystem crs = CRSFactory.create( STR ); llBoundingBox = GeometryFactory.createEnvelope( -180, -90, 180, 90, crs ); } Dimension[] dimensions = parseDimensions( layerElem ); Extent[] extents = parseExtents( layerElem ); Attribution attribution = parseAttribution( layerElem ); AuthorityURL[] authorityURLs = parseAuthorityURLs( layerElem ); MetadataURL[] metadataURLs = parseMetadataURLs( layerElem ); DataURL[] dataURLs = parseDataURL( layerElem ); Identifier[] identifiers = parseIdentifiers( layerElem ); FeatureListURL[] featureListURLs = parseFeatureListURL( layerElem ); Style[] styles = parseStyles( layerElem ); ScaleHint scaleHint = parseScaleHint( layerElem ); Layer layer = new Layer( queryable, cascaded, opaque, noSubsets, fixedWidth, fixedHeight, name, title, layerAbstract, llBoundingBox, attribution, scaleHint, keywords, srs, bboxes, dimensions, extents, authorityURLs, identifiers, metadataURLs, dataURLs, featureListURLs, styles, null, null, parent ); nl = XMLTools.getNodes( layerElem, STR, nsContext ); Layer[] layers = new Layer[nl.size()]; for ( int i = 0; i < layers.length; i++ ) { layers[i] = parseLayers( (Element) nl.get( i ), layer ); } layer.setLayer( layers ); return layer; } | /**
* returns the layers offered by the WMPS
*
* @param layerElem
* @param parent
* @return Layer
* @throws XMLParsingException
* @throws UnknownCRSException
*/ | returns the layers offered by the WMPS | parseLayers | {
"repo_name": "lat-lon/deegree2-base",
"path": "deegree2-core/src/main/java/org/deegree/ogcwebservices/wmps/capabilities/WMPSCapabilitiesDocument.java",
"license": "lgpl-2.1",
"size": 42151
} | [
"java.util.List",
"org.deegree.framework.xml.XMLParsingException",
"org.deegree.framework.xml.XMLTools",
"org.deegree.model.crs.CRSFactory",
"org.deegree.model.crs.CoordinateSystem",
"org.deegree.model.crs.UnknownCRSException",
"org.deegree.model.spatialschema.Envelope",
"org.deegree.model.spatialsche... | import java.util.List; import org.deegree.framework.xml.XMLParsingException; import org.deegree.framework.xml.XMLTools; import org.deegree.model.crs.CRSFactory; import org.deegree.model.crs.CoordinateSystem; import org.deegree.model.crs.UnknownCRSException; import org.deegree.model.spatialschema.Envelope; import org.deegree.model.spatialschema.GeometryFactory; import org.deegree.ogcwebservices.getcapabilities.MetadataURL; import org.deegree.ogcwebservices.wms.capabilities.Attribution; import org.deegree.ogcwebservices.wms.capabilities.AuthorityURL; import org.deegree.ogcwebservices.wms.capabilities.DataURL; import org.deegree.ogcwebservices.wms.capabilities.Dimension; import org.deegree.ogcwebservices.wms.capabilities.Extent; import org.deegree.ogcwebservices.wms.capabilities.FeatureListURL; import org.deegree.ogcwebservices.wms.capabilities.Identifier; import org.deegree.ogcwebservices.wms.capabilities.Layer; import org.deegree.ogcwebservices.wms.capabilities.LayerBoundingBox; import org.deegree.ogcwebservices.wms.capabilities.ScaleHint; import org.deegree.ogcwebservices.wms.capabilities.Style; import org.w3c.dom.Element; import org.w3c.dom.Node; | import java.util.*; import org.deegree.framework.xml.*; import org.deegree.model.crs.*; import org.deegree.model.spatialschema.*; import org.deegree.ogcwebservices.getcapabilities.*; import org.deegree.ogcwebservices.wms.capabilities.*; import org.w3c.dom.*; | [
"java.util",
"org.deegree.framework",
"org.deegree.model",
"org.deegree.ogcwebservices",
"org.w3c.dom"
] | java.util; org.deegree.framework; org.deegree.model; org.deegree.ogcwebservices; org.w3c.dom; | 2,169,652 |
Preconditions.checkNotNull(format);
Preconditions.checkArgument(format.getEncoding() == RowKeyEncoding.HASH_PREFIX);
final byte[] hash = hashKijiRowKey(format, kijiRowKey);
final int hashSize = format.getHashSize();
// Prepend a subset of the hash to the Kiji row key:
final byte[] hbaseRowKey = new byte[hashSize + kijiRowKey.length];
System.arraycopy(hash, 0, hbaseRowKey, 0, hashSize);
System.arraycopy(kijiRowKey, 0, hbaseRowKey, hashSize, kijiRowKey.length);
return new HashPrefixedEntityId(kijiRowKey, hbaseRowKey, format);
} | Preconditions.checkNotNull(format); Preconditions.checkArgument(format.getEncoding() == RowKeyEncoding.HASH_PREFIX); final byte[] hash = hashKijiRowKey(format, kijiRowKey); final int hashSize = format.getHashSize(); final byte[] hbaseRowKey = new byte[hashSize + kijiRowKey.length]; System.arraycopy(hash, 0, hbaseRowKey, 0, hashSize); System.arraycopy(kijiRowKey, 0, hbaseRowKey, hashSize, kijiRowKey.length); return new HashPrefixedEntityId(kijiRowKey, hbaseRowKey, format); } | /**
* Creates a HashPrefixedEntityId from the specified Kiji row key.
*
* @param kijiRowKey Kiji row key.
* @param format Row key hashing specification.
* @return a new HashPrefixedEntityId with the specified Kiji row key.
*/ | Creates a HashPrefixedEntityId from the specified Kiji row key | getEntityId | {
"repo_name": "zenoss/kiji-schema",
"path": "kiji-schema/src/main/java/org/kiji/schema/HashPrefixedEntityId.java",
"license": "apache-2.0",
"size": 5281
} | [
"com.google.common.base.Preconditions",
"org.kiji.schema.avro.RowKeyEncoding"
] | import com.google.common.base.Preconditions; import org.kiji.schema.avro.RowKeyEncoding; | import com.google.common.base.*; import org.kiji.schema.avro.*; | [
"com.google.common",
"org.kiji.schema"
] | com.google.common; org.kiji.schema; | 915,344 |
private void assertExists(String name,
Object value,
String beanOut) throws Exception {
Writer output = new StringWriter();
serializer.serialize(value, output);
String nameTag = "<name>" + name + "</name>";
String valueTag = output.toString();
String msg = "Cannot find property with tag [" + nameTag + "]" +
" in bean [" + beanOut + "]";
assertTrue(msg, beanOut.indexOf(nameTag) > -1);
msg = "Cannot find property value with Value-> [" + valueTag + "]" +
" in bean [" + beanOut + "]";
assertTrue(msg, beanOut.indexOf(valueTag) > -1);
} | void function(String name, Object value, String beanOut) throws Exception { Writer output = new StringWriter(); serializer.serialize(value, output); String nameTag = STR + name + STR; String valueTag = output.toString(); String msg = STR + nameTag + "]" + STR + beanOut + "]"; assertTrue(msg, beanOut.indexOf(nameTag) > -1); msg = STR + valueTag + "]" + STR + beanOut + "]"; assertTrue(msg, beanOut.indexOf(valueTag) > -1); } | /**
* Quick method to assert a property is property constructed bean
* @param map the map obtained by mapifying a bean
* @param name the key name
* @param value the value.
*/ | Quick method to assert a property is property constructed bean | assertExists | {
"repo_name": "colloquium/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/serializer/test/ManagedServerGroupSerializerTest.java",
"license": "gpl-2.0",
"size": 3584
} | [
"java.io.StringWriter",
"java.io.Writer"
] | import java.io.StringWriter; import java.io.Writer; | import java.io.*; | [
"java.io"
] | java.io; | 2,061,507 |
void assertAwaitTimesOut(ConditionObject c, AwaitMethod awaitMethod) {
long timeoutMillis = timeoutMillis();
long startTime;
try {
switch (awaitMethod) {
case awaitTimed:
startTime = System.nanoTime();
assertFalse(c.await(timeoutMillis, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) >= timeoutMillis);
break;
case awaitNanos:
startTime = System.nanoTime();
long nanosTimeout = MILLISECONDS.toNanos(timeoutMillis);
long nanosRemaining = c.awaitNanos(nanosTimeout);
assertTrue(nanosRemaining <= 0);
assertTrue(nanosRemaining > -MILLISECONDS.toNanos(LONG_DELAY_MS));
assertTrue(millisElapsedSince(startTime) >= timeoutMillis);
break;
case awaitUntil:
// We shouldn't assume that nanoTime and currentTimeMillis
// use the same time source, so don't use nanoTime here.
java.util.Date delayedDate = delayedDate(timeoutMillis());
assertFalse(c.awaitUntil(delayedDate(timeoutMillis)));
assertTrue(new java.util.Date().getTime() >= delayedDate.getTime());
break;
default:
throw new UnsupportedOperationException();
}
} catch (InterruptedException ie) { threadUnexpectedException(ie); }
} | void assertAwaitTimesOut(ConditionObject c, AwaitMethod awaitMethod) { long timeoutMillis = timeoutMillis(); long startTime; try { switch (awaitMethod) { case awaitTimed: startTime = System.nanoTime(); assertFalse(c.await(timeoutMillis, MILLISECONDS)); assertTrue(millisElapsedSince(startTime) >= timeoutMillis); break; case awaitNanos: startTime = System.nanoTime(); long nanosTimeout = MILLISECONDS.toNanos(timeoutMillis); long nanosRemaining = c.awaitNanos(nanosTimeout); assertTrue(nanosRemaining <= 0); assertTrue(nanosRemaining > -MILLISECONDS.toNanos(LONG_DELAY_MS)); assertTrue(millisElapsedSince(startTime) >= timeoutMillis); break; case awaitUntil: java.util.Date delayedDate = delayedDate(timeoutMillis()); assertFalse(c.awaitUntil(delayedDate(timeoutMillis))); assertTrue(new java.util.Date().getTime() >= delayedDate.getTime()); break; default: throw new UnsupportedOperationException(); } } catch (InterruptedException ie) { threadUnexpectedException(ie); } } | /**
* Checks that awaiting the given condition times out (using the
* default timeout duration).
*/ | Checks that awaiting the given condition times out (using the default timeout duration) | assertAwaitTimesOut | {
"repo_name": "FauxFaux/jdk9-jdk",
"path": "test/java/util/concurrent/tck/AbstractQueuedSynchronizerTest.java",
"license": "gpl-2.0",
"size": 46541
} | [
"java.util.concurrent.TimeUnit",
"java.util.concurrent.locks.AbstractQueuedSynchronizer"
] | import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.AbstractQueuedSynchronizer; | import java.util.concurrent.*; import java.util.concurrent.locks.*; | [
"java.util"
] | java.util; | 650,393 |
@Override
public Adapter createEStringToEIntegerObjectAdapter() {
if (eStringToEIntegerObjectItemProvider == null) {
eStringToEIntegerObjectItemProvider = new EStringToEIntegerObjectItemProvider(this);
}
return eStringToEIntegerObjectItemProvider;
}
protected EStringToEBooleanObjectItemProvider eStringToEBooleanObjectItemProvider; | Adapter function() { if (eStringToEIntegerObjectItemProvider == null) { eStringToEIntegerObjectItemProvider = new EStringToEIntegerObjectItemProvider(this); } return eStringToEIntegerObjectItemProvider; } protected EStringToEBooleanObjectItemProvider eStringToEBooleanObjectItemProvider; | /**
* This creates an adapter for a {@link java.util.Map.Entry}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This creates an adapter for a <code>java.util.Map.Entry</code>. | createEStringToEIntegerObjectAdapter | {
"repo_name": "EPiCS/soundgates",
"path": "software/editor/Soundgates.edit/src/soundgates/provider/SoundgatesItemProviderAdapterFactory.java",
"license": "mit",
"size": 12039
} | [
"org.eclipse.emf.common.notify.Adapter"
] | import org.eclipse.emf.common.notify.Adapter; | import org.eclipse.emf.common.notify.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,154,034 |
void handleSignalStrengthChange(@Nullable SignalStrength oldSignalStrength, SignalStrength newSignalStrength); | void handleSignalStrengthChange(@Nullable SignalStrength oldSignalStrength, SignalStrength newSignalStrength); | /**
* Called when the signal strength property changes.
*
* @param oldSignalStrength the old signal strength value
* @param newSignalStrength the new signal strength value
*/ | Called when the signal strength property changes | handleSignalStrengthChange | {
"repo_name": "paulianttila/openhab2",
"path": "bundles/org.openhab.binding.lifx/src/main/java/org/openhab/binding/lifx/internal/listener/LifxLightStateListener.java",
"license": "epl-1.0",
"size": 2421
} | [
"org.eclipse.jdt.annotation.Nullable",
"org.openhab.binding.lifx.internal.dto.SignalStrength"
] | import org.eclipse.jdt.annotation.Nullable; import org.openhab.binding.lifx.internal.dto.SignalStrength; | import org.eclipse.jdt.annotation.*; import org.openhab.binding.lifx.internal.dto.*; | [
"org.eclipse.jdt",
"org.openhab.binding"
] | org.eclipse.jdt; org.openhab.binding; | 1,631,299 |
public X509Certificate getSigningCertificate() {
return this.signCert;
} | X509Certificate function() { return this.signCert; } | /**
* Get the X.509 certificate actually used to sign the digest.
* @return the X.509 certificate actually used to sign the digest
*/ | Get the X.509 certificate actually used to sign the digest | getSigningCertificate | {
"repo_name": "venanciolm/afirma-ui-miniapplet_x_x",
"path": "afirma_ui_miniapplet/src/main/java/com/lowagie/text/pdf/PdfPKCS7.java",
"license": "mit",
"size": 60628
} | [
"java.security.cert.X509Certificate"
] | import java.security.cert.X509Certificate; | import java.security.cert.*; | [
"java.security"
] | java.security; | 1,201,920 |
public ResponseMatcher noFault() {
return ResponseMatchers.noFault();
} | ResponseMatcher function() { return ResponseMatchers.noFault(); } | /**
* Expects the response <strong>not</strong> to contain a SOAP fault.
*
* @return the response matcher
*/ | Expects the response not to contain a SOAP fault | noFault | {
"repo_name": "lukas-krecan/smock",
"path": "common/src/main/java/net/javacrumbs/smock/common/server/AbstractCommonWebServiceServerTest.java",
"license": "apache-2.0",
"size": 7310
} | [
"org.springframework.ws.test.server.ResponseMatcher",
"org.springframework.ws.test.server.ResponseMatchers"
] | import org.springframework.ws.test.server.ResponseMatcher; import org.springframework.ws.test.server.ResponseMatchers; | import org.springframework.ws.test.server.*; | [
"org.springframework.ws"
] | org.springframework.ws; | 1,536,329 |
@ServiceMethod(returns = ReturnType.SINGLE)
DomainInner transferOut(String resourceGroupName, String domainName); | @ServiceMethod(returns = ReturnType.SINGLE) DomainInner transferOut(String resourceGroupName, String domainName); | /**
* Transfer out domain to another registrar.
*
* @param resourceGroupName Name of the resource group to which the resource belongs.
* @param domainName Name of domain.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return information about a domain.
*/ | Transfer out domain to another registrar | transferOut | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-appservice/src/main/java/com/azure/resourcemanager/appservice/fluent/DomainsClient.java",
"license": "mit",
"size": 55229
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.appservice.fluent.models.DomainInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.appservice.fluent.models.DomainInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.appservice.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 143,056 |
private NodesToAllocate buildNodesToAllocate(RoutingAllocation allocation,
List<NodeGatewayStartedShards> nodeShardStates,
ShardRouting shardRouting,
boolean forceAllocate) {
List<DecidedNode> yesNodeShards = new ArrayList<>();
List<DecidedNode> throttledNodeShards = new ArrayList<>();
List<DecidedNode> noNodeShards = new ArrayList<>();
for (NodeGatewayStartedShards nodeShardState : nodeShardStates) {
RoutingNode node = allocation.routingNodes().node(nodeShardState.getNode().getId());
if (node == null) {
continue;
}
Decision decision = forceAllocate ? allocation.deciders().canForceAllocatePrimary(shardRouting, node, allocation) :
allocation.deciders().canAllocate(shardRouting, node, allocation);
DecidedNode decidedNode = new DecidedNode(nodeShardState, decision);
if (decision.type() == Type.THROTTLE) {
throttledNodeShards.add(decidedNode);
} else if (decision.type() == Type.NO) {
noNodeShards.add(decidedNode);
} else {
yesNodeShards.add(decidedNode);
}
}
return new NodesToAllocate(Collections.unmodifiableList(yesNodeShards), Collections.unmodifiableList(throttledNodeShards),
Collections.unmodifiableList(noNodeShards));
} | NodesToAllocate function(RoutingAllocation allocation, List<NodeGatewayStartedShards> nodeShardStates, ShardRouting shardRouting, boolean forceAllocate) { List<DecidedNode> yesNodeShards = new ArrayList<>(); List<DecidedNode> throttledNodeShards = new ArrayList<>(); List<DecidedNode> noNodeShards = new ArrayList<>(); for (NodeGatewayStartedShards nodeShardState : nodeShardStates) { RoutingNode node = allocation.routingNodes().node(nodeShardState.getNode().getId()); if (node == null) { continue; } Decision decision = forceAllocate ? allocation.deciders().canForceAllocatePrimary(shardRouting, node, allocation) : allocation.deciders().canAllocate(shardRouting, node, allocation); DecidedNode decidedNode = new DecidedNode(nodeShardState, decision); if (decision.type() == Type.THROTTLE) { throttledNodeShards.add(decidedNode); } else if (decision.type() == Type.NO) { noNodeShards.add(decidedNode); } else { yesNodeShards.add(decidedNode); } } return new NodesToAllocate(Collections.unmodifiableList(yesNodeShards), Collections.unmodifiableList(throttledNodeShards), Collections.unmodifiableList(noNodeShards)); } | /**
* Split the list of node shard states into groups yes/no/throttle based on allocation deciders
*/ | Split the list of node shard states into groups yes/no/throttle based on allocation deciders | buildNodesToAllocate | {
"repo_name": "strapdata/elassandra5-rc",
"path": "core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java",
"license": "apache-2.0",
"size": 32324
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.elasticsearch.cluster.routing.RoutingNode",
"org.elasticsearch.cluster.routing.ShardRouting",
"org.elasticsearch.cluster.routing.allocation.RoutingAllocation",
"org.elasticsearch.cluster.routing.allocation.decider.Decision",
"org.e... | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards; | import java.util.*; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.*; import org.elasticsearch.cluster.routing.allocation.decider.*; import org.elasticsearch.gateway.*; | [
"java.util",
"org.elasticsearch.cluster",
"org.elasticsearch.gateway"
] | java.util; org.elasticsearch.cluster; org.elasticsearch.gateway; | 479,014 |
private Map<String, SortedSet<?>> getRegistry43(
WebRequest request,
IPerson user,
PortletCategory rootCategory,
boolean includeUncategorized) {
Set<IPortletDefinition> portletsNotYetCategorized =
includeUncategorized
? new HashSet<IPortletDefinition>(
portletDefinitionRegistry.getAllPortletDefinitions())
: new HashSet<
IPortletDefinition>(); // Not necessary to fetch them if we're not tracking them
// construct a new channel registry
Map<String, SortedSet<?>> rslt = new TreeMap<String, SortedSet<?>>();
SortedSet<PortletCategoryBean> categories = new TreeSet<PortletCategoryBean>();
// add the root category and all its children to the registry
final Locale locale = getUserLocale(user);
categories.add(
preparePortletCategoryBean(
request, rootCategory, portletsNotYetCategorized, user, locale));
if (includeUncategorized) {
EntityIdentifier ei = user.getEntityIdentifier();
IAuthorizationPrincipal ap =
AuthorizationService.instance().newPrincipal(ei.getKey(), ei.getType());
Set<PortletDefinitionBean> marketplacePortlets = new HashSet<>();
for (IPortletDefinition portlet : portletsNotYetCategorized) {
if (authorizationService.canPrincipalBrowse(ap, portlet)) {
PortletDefinitionBean pdb =
preparePortletDefinitionBean(request, portlet, locale);
marketplacePortlets.add(pdb);
}
}
// construct a new channel category bean for this category
final String uncName = messageSource.getMessage(UNCATEGORIZED, new Object[] {}, locale);
final String uncDescription =
messageSource.getMessage(UNCATEGORIZED_DESC, new Object[] {}, locale);
PortletCategory pc =
new PortletCategory(
uncName); // Use of this String for Id matches earlier version of API
pc.setName(uncName);
pc.setDescription(uncDescription);
PortletCategoryBean unc =
PortletCategoryBean.fromPortletCategory(pc, null, marketplacePortlets);
// Add even if no portlets in category
categories.add(unc);
}
rslt.put("categories", categories);
return rslt;
} | Map<String, SortedSet<?>> function( WebRequest request, IPerson user, PortletCategory rootCategory, boolean includeUncategorized) { Set<IPortletDefinition> portletsNotYetCategorized = includeUncategorized ? new HashSet<IPortletDefinition>( portletDefinitionRegistry.getAllPortletDefinitions()) : new HashSet< IPortletDefinition>(); Map<String, SortedSet<?>> rslt = new TreeMap<String, SortedSet<?>>(); SortedSet<PortletCategoryBean> categories = new TreeSet<PortletCategoryBean>(); final Locale locale = getUserLocale(user); categories.add( preparePortletCategoryBean( request, rootCategory, portletsNotYetCategorized, user, locale)); if (includeUncategorized) { EntityIdentifier ei = user.getEntityIdentifier(); IAuthorizationPrincipal ap = AuthorizationService.instance().newPrincipal(ei.getKey(), ei.getType()); Set<PortletDefinitionBean> marketplacePortlets = new HashSet<>(); for (IPortletDefinition portlet : portletsNotYetCategorized) { if (authorizationService.canPrincipalBrowse(ap, portlet)) { PortletDefinitionBean pdb = preparePortletDefinitionBean(request, portlet, locale); marketplacePortlets.add(pdb); } } final String uncName = messageSource.getMessage(UNCATEGORIZED, new Object[] {}, locale); final String uncDescription = messageSource.getMessage(UNCATEGORIZED_DESC, new Object[] {}, locale); PortletCategory pc = new PortletCategory( uncName); pc.setName(uncName); pc.setDescription(uncDescription); PortletCategoryBean unc = PortletCategoryBean.fromPortletCategory(pc, null, marketplacePortlets); categories.add(unc); } rslt.put(STR, categories); return rslt; } | /**
* Gathers and organizes the response based on the specified rootCategory and the permissions of
* the specified user.
*/ | Gathers and organizes the response based on the specified rootCategory and the permissions of the specified user | getRegistry43 | {
"repo_name": "stalele/uPortal",
"path": "uPortal-api/uPortal-api-rest/src/main/java/org/apereo/portal/layout/dlm/remoting/ChannelListController.java",
"license": "apache-2.0",
"size": 22587
} | [
"java.util.HashSet",
"java.util.Locale",
"java.util.Map",
"java.util.Set",
"java.util.SortedSet",
"java.util.TreeMap",
"java.util.TreeSet",
"org.apereo.portal.EntityIdentifier",
"org.apereo.portal.layout.dlm.remoting.registry.v43.PortletCategoryBean",
"org.apereo.portal.layout.dlm.remoting.registr... | import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.apereo.portal.EntityIdentifier; import org.apereo.portal.layout.dlm.remoting.registry.v43.PortletCategoryBean; import org.apereo.portal.layout.dlm.remoting.registry.v43.PortletDefinitionBean; import org.apereo.portal.portlet.om.IPortletDefinition; import org.apereo.portal.portlet.om.PortletCategory; import org.apereo.portal.security.IAuthorizationPrincipal; import org.apereo.portal.security.IPerson; import org.apereo.portal.services.AuthorizationService; import org.springframework.web.context.request.WebRequest; | import java.util.*; import org.apereo.portal.*; import org.apereo.portal.layout.dlm.remoting.registry.v43.*; import org.apereo.portal.portlet.om.*; import org.apereo.portal.security.*; import org.apereo.portal.services.*; import org.springframework.web.context.request.*; | [
"java.util",
"org.apereo.portal",
"org.springframework.web"
] | java.util; org.apereo.portal; org.springframework.web; | 237,647 |
public static boolean hasPermission(Object object, Permission permission) throws IOException, ServletException {
if (permission == null)
return true;
if (object instanceof AccessControlled)
return ((AccessControlled)object).hasPermission(permission);
else {
List<Ancestor> ancs = Stapler.getCurrentRequest().getAncestors();
for(Ancestor anc : Iterators.reverse(ancs)) {
Object o = anc.getObject();
if (o instanceof AccessControlled) {
return ((AccessControlled)o).hasPermission(permission);
}
}
return Jenkins.getInstance().hasPermission(permission);
}
} | static boolean function(Object object, Permission permission) throws IOException, ServletException { if (permission == null) return true; if (object instanceof AccessControlled) return ((AccessControlled)object).hasPermission(permission); else { List<Ancestor> ancs = Stapler.getCurrentRequest().getAncestors(); for(Ancestor anc : Iterators.reverse(ancs)) { Object o = anc.getObject(); if (o instanceof AccessControlled) { return ((AccessControlled)o).hasPermission(permission); } } return Jenkins.getInstance().hasPermission(permission); } } | /**
* This version is so that the 'hasPermission' can degrade gracefully
* if "it" is not an {@link AccessControlled} object.
*/ | This version is so that the 'hasPermission' can degrade gracefully if "it" is not an <code>AccessControlled</code> object | hasPermission | {
"repo_name": "lilyJi/jenkins",
"path": "core/src/main/java/hudson/Functions.java",
"license": "mit",
"size": 74776
} | [
"hudson.security.AccessControlled",
"hudson.security.Permission",
"hudson.util.Iterators",
"java.io.IOException",
"java.util.List",
"javax.servlet.ServletException",
"org.kohsuke.stapler.Ancestor",
"org.kohsuke.stapler.Stapler"
] | import hudson.security.AccessControlled; import hudson.security.Permission; import hudson.util.Iterators; import java.io.IOException; import java.util.List; import javax.servlet.ServletException; import org.kohsuke.stapler.Ancestor; import org.kohsuke.stapler.Stapler; | import hudson.security.*; import hudson.util.*; import java.io.*; import java.util.*; import javax.servlet.*; import org.kohsuke.stapler.*; | [
"hudson.security",
"hudson.util",
"java.io",
"java.util",
"javax.servlet",
"org.kohsuke.stapler"
] | hudson.security; hudson.util; java.io; java.util; javax.servlet; org.kohsuke.stapler; | 2,678,356 |
@Generated
@Selector("varianceEstimationRadius")
@NUInt
public native long varianceEstimationRadius(); | @Selector(STR) native long function(); | /**
* The radius of the spatial filter used when not enough frames have been accumulated to
* compute variance from accumulated luminance moments. Defaults to 3 resulting in a 7x7 filter.
*/ | The radius of the spatial filter used when not enough frames have been accumulated to compute variance from accumulated luminance moments. Defaults to 3 resulting in a 7x7 filter | varianceEstimationRadius | {
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/metalperformanceshaders/MPSSVGF.java",
"license": "apache-2.0",
"size": 51099
} | [
"org.moe.natj.objc.ann.Selector"
] | import org.moe.natj.objc.ann.Selector; | import org.moe.natj.objc.ann.*; | [
"org.moe.natj"
] | org.moe.natj; | 2,364,529 |
TimeValue newDelay(long remainingDelay, float newRequestsPerSecond) {
if (remainingDelay < 0 || newRequestsPerSecond == 0) {
return timeValueNanos(0);
}
return timeValueNanos(round(remainingDelay * requestsPerSecond / newRequestsPerSecond));
}
} | TimeValue newDelay(long remainingDelay, float newRequestsPerSecond) { if (remainingDelay < 0 newRequestsPerSecond == 0) { return timeValueNanos(0); } return timeValueNanos(round(remainingDelay * requestsPerSecond / newRequestsPerSecond)); } } | /**
* Scale back remaining delay to fit the new delay.
*/ | Scale back remaining delay to fit the new delay | newDelay | {
"repo_name": "nomoa/elasticsearch",
"path": "modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java",
"license": "apache-2.0",
"size": 18751
} | [
"org.elasticsearch.common.unit.TimeValue"
] | import org.elasticsearch.common.unit.TimeValue; | import org.elasticsearch.common.unit.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 1,912,723 |
public void deleteRoleMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
modifyMapping(request.getName(), this::innerDeleteMapping, request, listener);
} | void function(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) { modifyMapping(request.getName(), this::innerDeleteMapping, request, listener); } | /**
* Deletes a named mapping from the index
*/ | Deletes a named mapping from the index | deleteRoleMapping | {
"repo_name": "ern/elasticsearch",
"path": "x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java",
"license": "apache-2.0",
"size": 18536
} | [
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest"
] | import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; | import org.elasticsearch.action.*; import org.elasticsearch.xpack.core.security.action.rolemapping.*; | [
"org.elasticsearch.action",
"org.elasticsearch.xpack"
] | org.elasticsearch.action; org.elasticsearch.xpack; | 1,485,128 |
Catalog c = job.getCatalog();
BindUtils.addFn(c, "pass",
batchSize(), batchTimeout(),
"onyxplatform.test.PassFn", MapFns.emptyMap());
} | Catalog c = job.getCatalog(); BindUtils.addFn(c, "pass", batchSize(), batchTimeout(), STR, MapFns.emptyMap()); } | /**
* Adds an Object instance of the test function to the Job catalog
*/ | Adds an Object instance of the test function to the Job catalog | configureCatalog | {
"repo_name": "onyx-platform/onyx-java",
"path": "test/java/onyxplatform/test/SingleJavaTest.java",
"license": "epl-1.0",
"size": 1215
} | [
"org.onyxplatform.api.java.Catalog",
"org.onyxplatform.api.java.instance.BindUtils",
"org.onyxplatform.api.java.utils.MapFns"
] | import org.onyxplatform.api.java.Catalog; import org.onyxplatform.api.java.instance.BindUtils; import org.onyxplatform.api.java.utils.MapFns; | import org.onyxplatform.api.java.*; import org.onyxplatform.api.java.instance.*; import org.onyxplatform.api.java.utils.*; | [
"org.onyxplatform.api"
] | org.onyxplatform.api; | 1,180,794 |
public AccessibleRole getAccessibleRole() {
return AccessibleRole.MENU;
}
} // class AccessibleAWTMenu | AccessibleRole function() { return AccessibleRole.MENU; } } | /**
* Get the role of this object.
*
* @return an instance of AccessibleRole describing the role of the
* object
*/ | Get the role of this object | getAccessibleRole | {
"repo_name": "mirkosertic/Bytecoder",
"path": "classlib/java.desktop/src/main/resources/META-INF/modules/java.desktop/classes/java/awt/Menu.java",
"license": "apache-2.0",
"size": 20715
} | [
"javax.accessibility.AccessibleRole"
] | import javax.accessibility.AccessibleRole; | import javax.accessibility.*; | [
"javax.accessibility"
] | javax.accessibility; | 748,045 |
public ListView getWrappedList() {
return mList;
} | ListView function() { return mList; } | /**
* Use the method with extreme caution!! Changing any values on the
* underlying ListView might break everything.
*
* @return the ListView backing this view.
*/ | Use the method with extreme caution!! Changing any values on the underlying ListView might break everything | getWrappedList | {
"repo_name": "stone83/ping_tools",
"path": "ccmtlibrary/src/main/java/com/ccmt/library/stickylistheaders/StickyListHeadersListView.java",
"license": "apache-2.0",
"size": 40740
} | [
"android.widget.ListView"
] | import android.widget.ListView; | import android.widget.*; | [
"android.widget"
] | android.widget; | 2,738,429 |
public static IHEAuditor getAuditor()
{
AuditorModuleContext ctx = AuditorModuleContext.getContext();
return ctx.getAuditor(IHEAuditor.class);
}
protected IHEAuditor()
{
this(null, null);
}
protected IHEAuditor(AuditorModuleContext contextToUse)
{
this(contextToUse,contextToUse.getConfig());
}
protected IHEAuditor(AuditorModuleContext contextToUse, AuditorModuleConfig configToUse)
{
context = contextToUse;
config = configToUse;
}
| static IHEAuditor function() { AuditorModuleContext ctx = AuditorModuleContext.getContext(); return ctx.getAuditor(IHEAuditor.class); } protected IHEAuditor() { this(null, null); } protected IHEAuditor(AuditorModuleContext contextToUse) { this(contextToUse,contextToUse.getConfig()); } protected IHEAuditor(AuditorModuleContext contextToUse, AuditorModuleConfig configToUse) { context = contextToUse; config = configToUse; } | /**
* Get an instance of the XDS Document Consumer Auditor from the
* global context
*
* @return XDS Document Consumer Auditor instance
*/ | Get an instance of the XDS Document Consumer Auditor from the global context | getAuditor | {
"repo_name": "oehf/ipf-oht-atna",
"path": "auditor/src/main/java/org/openhealthtools/ihe/atna/auditor/IHEAuditor.java",
"license": "epl-1.0",
"size": 24587
} | [
"org.openhealthtools.ihe.atna.auditor.context.AuditorModuleConfig",
"org.openhealthtools.ihe.atna.auditor.context.AuditorModuleContext"
] | import org.openhealthtools.ihe.atna.auditor.context.AuditorModuleConfig; import org.openhealthtools.ihe.atna.auditor.context.AuditorModuleContext; | import org.openhealthtools.ihe.atna.auditor.context.*; | [
"org.openhealthtools.ihe"
] | org.openhealthtools.ihe; | 266,121 |
public Type script(Expression expression) {
ScriptDefinition answer = new ScriptDefinition(expression);
addOutput(answer);
return asType();
} | Type function(Expression expression) { ScriptDefinition answer = new ScriptDefinition(expression); addOutput(answer); return asType(); } | /**
* Executes a script (do not change the message body).
*
* @param expression the expression used as the script.
* @return the builder
*/ | Executes a script (do not change the message body) | script | {
"repo_name": "davidkarlsen/camel",
"path": "core/camel-core/src/main/java/org/apache/camel/model/ProcessorDefinition.java",
"license": "apache-2.0",
"size": 149529
} | [
"org.apache.camel.Expression"
] | import org.apache.camel.Expression; | import org.apache.camel.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,640,820 |
EReference getMemberPropertyType_Tuple(); | EReference getMemberPropertyType_Tuple(); | /**
* Returns the meta object for the containment reference '{@link net.opengis.wfs20.MemberPropertyType#getTuple <em>Tuple</em>}'. <!--
* begin-user-doc --> <!-- end-user-doc -->
*
* @return the meta object for the containment reference '<em>Tuple</em>'.
* @see net.opengis.wfs20.MemberPropertyType#getTuple()
* @see #getMemberPropertyType()
* @generated
*/ | Returns the meta object for the containment reference '<code>net.opengis.wfs20.MemberPropertyType#getTuple Tuple</code>'. | getMemberPropertyType_Tuple | {
"repo_name": "geotools/geotools",
"path": "modules/ogc/net.opengis.wfs/src/net/opengis/wfs20/Wfs20Package.java",
"license": "lgpl-2.1",
"size": 404067
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,181,539 |
@Nullable
Artifact getDefParser() {
if (!ruleContext.isAttrDefined("$def_parser", LABEL)) {
return null;
}
return ruleContext.getPrerequisiteArtifact("$def_parser", Mode.HOST);
} | Artifact getDefParser() { if (!ruleContext.isAttrDefined(STR, LABEL)) { return null; } return ruleContext.getPrerequisiteArtifact(STR, Mode.HOST); } | /**
* Returns the parser & Windows DEF file generator specified in $def_parser attribute of the rule.
*/ | Returns the parser & Windows DEF file generator specified in $def_parser attribute of the rule | getDefParser | {
"repo_name": "aehlig/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CcCommon.java",
"license": "apache-2.0",
"size": 46129
} | [
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget"
] | import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; | import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.analysis.configuredtargets.*; | [
"com.google.devtools"
] | com.google.devtools; | 992,697 |
public Calendar computeSunsetCalendar(Twilight twilight, Calendar date) {
return getLocalTimeAsCalendar(
computeSolarEventTime(twilight, date, false), date);
} | Calendar function(Twilight twilight, Calendar date) { return getLocalTimeAsCalendar( computeSolarEventTime(twilight, date, false), date); } | /**
* Computes the sunset time for the given {@link Twilight} at the given
* date.
*
* @param twilight
* The {@code Twilight} corresponding to the type of sunset to
* compute.
* @param date
* The {@code Calendar} object representing the date to compute
* the sunset for.
* @return the sunset time as a calendar or null for no sunset
*/ | Computes the sunset time for the given <code>Twilight</code> at the given date | computeSunsetCalendar | {
"repo_name": "rahulnadella/SunriseSunset",
"path": "src/main/java/SunriseSunsetCalculator.java",
"license": "mit",
"size": 17597
} | [
"java.util.Calendar"
] | import java.util.Calendar; | import java.util.*; | [
"java.util"
] | java.util; | 2,593,469 |
public void setDebugDraw(DebugDraw debugDraw) {
m_debugDraw = debugDraw;
}
| void function(DebugDraw debugDraw) { m_debugDraw = debugDraw; } | /**
* Register a routine for debug drawing. The debug draw functions are called
* inside with World.DrawDebugData method. The debug draw object is owned
* by you and must remain in scope.
*
* @param debugDraw
*/ | Register a routine for debug drawing. The debug draw functions are called inside with World.DrawDebugData method. The debug draw object is owned by you and must remain in scope | setDebugDraw | {
"repo_name": "KoriSamui/PlayN",
"path": "gwtbox2d/src/org/jbox2d/dynamics/World.java",
"license": "apache-2.0",
"size": 37716
} | [
"org.jbox2d.callbacks.DebugDraw"
] | import org.jbox2d.callbacks.DebugDraw; | import org.jbox2d.callbacks.*; | [
"org.jbox2d.callbacks"
] | org.jbox2d.callbacks; | 787,293 |
private void readOneStripe(CorruptedBlocks corruptedBlocks)
throws IOException {
resetCurStripeBuffer(true);
// compute stripe range based on pos
final long offsetInBlockGroup = getOffsetInBlockGroup();
final long stripeLen = cellSize * dataBlkNum;
final int stripeIndex = (int) (offsetInBlockGroup / stripeLen);
final int stripeBufOffset = (int) (offsetInBlockGroup % stripeLen);
final int stripeLimit = (int) Math.min(currentLocatedBlock.getBlockSize()
- (stripeIndex * stripeLen), stripeLen);
StripeRange stripeRange =
new StripeRange(offsetInBlockGroup, stripeLimit - stripeBufOffset);
LocatedStripedBlock blockGroup = (LocatedStripedBlock) currentLocatedBlock;
AlignedStripe[] stripes = StripedBlockUtil.divideOneStripe(ecPolicy,
cellSize, blockGroup, offsetInBlockGroup,
offsetInBlockGroup + stripeRange.getLength() - 1, curStripeBuf);
final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup(
blockGroup, cellSize, dataBlkNum, parityBlkNum);
// read the whole stripe
for (AlignedStripe stripe : stripes) {
// Parse group to get chosen DN location
StripeReader sreader = new StatefulStripeReader(stripe, ecPolicy, blks,
blockReaders, corruptedBlocks, decoder, this);
sreader.readStripe();
}
curStripeBuf.position(stripeBufOffset);
curStripeBuf.limit(stripeLimit);
curStripeRange = stripeRange;
} | void function(CorruptedBlocks corruptedBlocks) throws IOException { resetCurStripeBuffer(true); final long offsetInBlockGroup = getOffsetInBlockGroup(); final long stripeLen = cellSize * dataBlkNum; final int stripeIndex = (int) (offsetInBlockGroup / stripeLen); final int stripeBufOffset = (int) (offsetInBlockGroup % stripeLen); final int stripeLimit = (int) Math.min(currentLocatedBlock.getBlockSize() - (stripeIndex * stripeLen), stripeLen); StripeRange stripeRange = new StripeRange(offsetInBlockGroup, stripeLimit - stripeBufOffset); LocatedStripedBlock blockGroup = (LocatedStripedBlock) currentLocatedBlock; AlignedStripe[] stripes = StripedBlockUtil.divideOneStripe(ecPolicy, cellSize, blockGroup, offsetInBlockGroup, offsetInBlockGroup + stripeRange.getLength() - 1, curStripeBuf); final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup( blockGroup, cellSize, dataBlkNum, parityBlkNum); for (AlignedStripe stripe : stripes) { StripeReader sreader = new StatefulStripeReader(stripe, ecPolicy, blks, blockReaders, corruptedBlocks, decoder, this); sreader.readStripe(); } curStripeBuf.position(stripeBufOffset); curStripeBuf.limit(stripeLimit); curStripeRange = stripeRange; } | /**
* Read a new stripe covering the current position, and store the data in the
* {@link #curStripeBuf}.
*/ | Read a new stripe covering the current position, and store the data in the <code>#curStripeBuf</code> | readOneStripe | {
"repo_name": "steveloughran/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java",
"license": "apache-2.0",
"size": 19568
} | [
"java.io.IOException",
"org.apache.hadoop.hdfs.DFSUtilClient",
"org.apache.hadoop.hdfs.protocol.LocatedBlock",
"org.apache.hadoop.hdfs.protocol.LocatedStripedBlock",
"org.apache.hadoop.hdfs.util.StripedBlockUtil"
] | import java.io.IOException; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock; import org.apache.hadoop.hdfs.util.StripedBlockUtil; | import java.io.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,008,136 |
protected ConnectRequestBuilder CONNECT(String pathOrUrl, boolean isFullUrl) {
return CONNECT(pathOrUrl, isFullUrl, true);
} | ConnectRequestBuilder function(String pathOrUrl, boolean isFullUrl) { return CONNECT(pathOrUrl, isFullUrl, true); } | /**
* Starts an Https Client builder for a CONNECT method.
*
* A cookie store is automatically added.
*
* @param pathOrUrl a relative path or a full URL.
*
* @param isFullUrl if the 'pathOrUrl' parameter a full URL? If
* so, it will be used as is. Otherwise it will be appended to the
* base test URL.
*/ | Starts an Https Client builder for a CONNECT method. A cookie store is automatically added | CONNECT | {
"repo_name": "spincast/spincast-framework",
"path": "spincast-testing/spincast-testing-core/src/main/java/org/spincast/testing/core/AppBasedTestingBase.java",
"license": "apache-2.0",
"size": 35096
} | [
"org.spincast.plugins.httpclient.builders.ConnectRequestBuilder"
] | import org.spincast.plugins.httpclient.builders.ConnectRequestBuilder; | import org.spincast.plugins.httpclient.builders.*; | [
"org.spincast.plugins"
] | org.spincast.plugins; | 1,929,767 |
@After
public final void tearDownPersistenceTestCase()
{
if (handleTransaction)
DatabaseResources.getEntityManager().getTransaction().rollback();
DatabaseResources.close();
} | final void function() { if (handleTransaction) DatabaseResources.getEntityManager().getTransaction().rollback(); DatabaseResources.close(); } | /**
* Rollback the database transaction.
*/ | Rollback the database transaction | tearDownPersistenceTestCase | {
"repo_name": "Paolo-Maffei/freebus-fts",
"path": "freebus-fts-client/src/test/java/org/freebus/fts/test_utils/PersistenceTestCase.java",
"license": "gpl-3.0",
"size": 2197
} | [
"org.freebus.fts.persistence.db.DatabaseResources"
] | import org.freebus.fts.persistence.db.DatabaseResources; | import org.freebus.fts.persistence.db.*; | [
"org.freebus.fts"
] | org.freebus.fts; | 1,916,120 |
public static Resource findSubject(Model model,
Property property,
String object )
throws RDFException
{
ResIterator i = model.listSubjectsWithProperty(property);
while (i.hasNext()) {
Resource subject = i.nextResource();
Statement stmt = model.getProperty(subject, property);
Object _object = stmt.getObject();
if (_object instanceof Literal) {
Literal l = (Literal) _object;
if (l.getString().equals(object)) {
return(subject);
}
} else if (_object instanceof Resource) {
Resource r = (Resource) _object;
if (r.getURI().equals(object)) {
return(subject);
}
}
}
throw(new RDFException(RDFException.STATEMENTNOTPRESENT,
"Subject for Property "
+ "'" + property.getURI()
+ "' and Object "
+ "'" + object.toString()
+ "' not found!"));
}
// ************************************************************************ | static Resource function(Model model, Property property, String object ) throws RDFException { ResIterator i = model.listSubjectsWithProperty(property); while (i.hasNext()) { Resource subject = i.nextResource(); Statement stmt = model.getProperty(subject, property); Object _object = stmt.getObject(); if (_object instanceof Literal) { Literal l = (Literal) _object; if (l.getString().equals(object)) { return(subject); } } else if (_object instanceof Resource) { Resource r = (Resource) _object; if (r.getURI().equals(object)) { return(subject); } } } throw(new RDFException(RDFException.STATEMENTNOTPRESENT, STR + "'" + property.getURI() + STR + "'" + object.toString() + STR)); } | /**
* Return the <code>Subject</code> of a <code>Statement</code>
* that has the given <code>Property</code> and <code>Object</code>.
*
* @param model a jena toolkit rdf model
* @param property property of the statement
* @param object object of the statement
* @return the resource representing the subject of the found statement
* @exception RDFException if an error occurs
* @see com.hp.hpl.jena.rdf.model.Model#listSubjectsWithProperty(Property, RDFNode);
* @see #findSubject(Model, Property, RDFNode)
*/ | Return the <code>Subject</code> of a <code>Statement</code> that has the given <code>Property</code> and <code>Object</code> | findSubject | {
"repo_name": "elitak/peertrust",
"path": "src/net/jxta/edutella/util/RdfUtilities.java",
"license": "gpl-2.0",
"size": 16656
} | [
"com.hp.hpl.jena.rdf.model.Literal",
"com.hp.hpl.jena.rdf.model.Model",
"com.hp.hpl.jena.rdf.model.Property",
"com.hp.hpl.jena.rdf.model.RDFException",
"com.hp.hpl.jena.rdf.model.ResIterator",
"com.hp.hpl.jena.rdf.model.Resource",
"com.hp.hpl.jena.rdf.model.Statement"
] | import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFException; import com.hp.hpl.jena.rdf.model.ResIterator; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; | import com.hp.hpl.jena.rdf.model.*; | [
"com.hp.hpl"
] | com.hp.hpl; | 1,223,290 |
public void setLocale(Locale locale) throws CSSException {
parser.setLocale(locale);
} | void function(Locale locale) throws CSSException { parser.setLocale(locale); } | /**
* <b>SAC</b>: Implements {@link org.w3c.css.sac.Parser#setLocale(Locale)}.
*/ | SAC: Implements <code>org.w3c.css.sac.Parser#setLocale(Locale)</code> | setLocale | {
"repo_name": "Uni-Sol/batik",
"path": "sources/org/apache/batik/css/parser/ExtendedParserWrapper.java",
"license": "apache-2.0",
"size": 8987
} | [
"java.util.Locale",
"org.w3c.css.sac.CSSException"
] | import java.util.Locale; import org.w3c.css.sac.CSSException; | import java.util.*; import org.w3c.css.sac.*; | [
"java.util",
"org.w3c.css"
] | java.util; org.w3c.css; | 1,536,169 |
void generateTrunk()
{
BlockPos blockpos = this.basePos;
BlockPos blockpos1 = this.basePos.up(this.height);
Block block = Blocks.log;
this.func_175937_a(blockpos, blockpos1, block);
if (this.trunkSize == 2)
{
this.func_175937_a(blockpos.east(), blockpos1.east(), block);
this.func_175937_a(blockpos.east().south(), blockpos1.east().south(), block);
this.func_175937_a(blockpos.south(), blockpos1.south(), block);
}
} | void generateTrunk() { BlockPos blockpos = this.basePos; BlockPos blockpos1 = this.basePos.up(this.height); Block block = Blocks.log; this.func_175937_a(blockpos, blockpos1, block); if (this.trunkSize == 2) { this.func_175937_a(blockpos.east(), blockpos1.east(), block); this.func_175937_a(blockpos.east().south(), blockpos1.east().south(), block); this.func_175937_a(blockpos.south(), blockpos1.south(), block); } } | /**
* Places the trunk for the big tree that is being generated. Able to generate double-sized trunks by changing a
* field that is always 1 to 2.
*/ | Places the trunk for the big tree that is being generated. Able to generate double-sized trunks by changing a field that is always 1 to 2 | generateTrunk | {
"repo_name": "TorchPowered/CraftBloom",
"path": "src/net/minecraft/world/gen/feature/WorldGenBigTree.java",
"license": "mit",
"size": 12333
} | [
"net.minecraft.block.Block",
"net.minecraft.init.Blocks",
"net.minecraft.util.BlockPos"
] | import net.minecraft.block.Block; import net.minecraft.init.Blocks; import net.minecraft.util.BlockPos; | import net.minecraft.block.*; import net.minecraft.init.*; import net.minecraft.util.*; | [
"net.minecraft.block",
"net.minecraft.init",
"net.minecraft.util"
] | net.minecraft.block; net.minecraft.init; net.minecraft.util; | 1,558,845 |
public static final CButton createCancelButton(final boolean withText)
{
return buildCancelButton()
.useTextFromActionName(withText)
.buildAndGetCButton();
} | static final CButton function(final boolean withText) { return buildCancelButton() .useTextFromActionName(withText) .buildAndGetCButton(); } | /**
* Create Cancel Button with Standard text
*
* @param withText with text
* @return Button
*/ | Create Cancel Button with Standard text | createCancelButton | {
"repo_name": "klst-com/metasfresh",
"path": "de.metas.adempiere.adempiere/client/src/main/java-legacy/org/compiere/apps/ConfirmPanel.java",
"license": "gpl-2.0",
"size": 24441
} | [
"org.compiere.swing.CButton"
] | import org.compiere.swing.CButton; | import org.compiere.swing.*; | [
"org.compiere.swing"
] | org.compiere.swing; | 1,727,964 |
@Override
public void parseArguments(String[] args) {
Getopt g = new Getopt("gate.util.reporting.PRTimeReporter", args,
"i:m:z:s:o:l:h");
int choice;
String argSuppressZeroTimeEntries = null;
while ((choice = g.getopt()) != -1) {
switch (choice) {
// -i inputFile
case 'i':
String argInPath = g.getOptarg();
if (argInPath != null) {
setBenchmarkFile(new File(argInPath));
}
break;
// -m printMedia
case 'm':
String argPrintMedia = g.getOptarg();
if (argPrintMedia != null) {
setPrintMedia(argPrintMedia);
} else {
setPrintMedia(printMedia);
}
break;
// -z suppressZeroTimeEntries
case 'z':
argSuppressZeroTimeEntries = g.getOptarg();
if (argSuppressZeroTimeEntries == null) {
setSuppressZeroTimeEntries(suppressZeroTimeEntries);
}
break;
// -s sortOrder
case 's':
String argSortOrder = g.getOptarg();
if (argSortOrder != null) {
setSortOrder(argSortOrder);
} else {
setSortOrder(sortOrder);
}
break;
// -o ReportFile
case 'o':
String argOutPath = g.getOptarg();
if (argOutPath != null) {
setReportFile(new File(argOutPath));
}
break;
// -l logical start
case 'l':
String argLogicalStart = g.getOptarg();
if (argLogicalStart != null) {
setLogicalStart(argLogicalStart);
}
break;
// -h
case 'h':
case '?':
usage();
System.exit(STATUS_NORMAL);
break;
default:
usage();
System.exit(STATUS_ERROR);
break;
} // getopt switch
}
if (argSuppressZeroTimeEntries != null) {
if (argSuppressZeroTimeEntries.trim().equalsIgnoreCase("true")) {
setSuppressZeroTimeEntries(true);
} else if (argSuppressZeroTimeEntries.trim().equalsIgnoreCase("false")) {
setSuppressZeroTimeEntries(false);
} else {
System.err.println("Suppress Zero Time Entries: parameter value" + NL +
" passed is invalid. Please provide true or false as value.");
usage();
System.exit(STATUS_ERROR);
}
}
}
| void function(String[] args) { Getopt g = new Getopt(STR, args, STR); int choice; String argSuppressZeroTimeEntries = null; while ((choice = g.getopt()) != -1) { switch (choice) { case 'i': String argInPath = g.getOptarg(); if (argInPath != null) { setBenchmarkFile(new File(argInPath)); } break; case 'm': String argPrintMedia = g.getOptarg(); if (argPrintMedia != null) { setPrintMedia(argPrintMedia); } else { setPrintMedia(printMedia); } break; case 'z': argSuppressZeroTimeEntries = g.getOptarg(); if (argSuppressZeroTimeEntries == null) { setSuppressZeroTimeEntries(suppressZeroTimeEntries); } break; case 's': String argSortOrder = g.getOptarg(); if (argSortOrder != null) { setSortOrder(argSortOrder); } else { setSortOrder(sortOrder); } break; case 'o': String argOutPath = g.getOptarg(); if (argOutPath != null) { setReportFile(new File(argOutPath)); } break; case 'l': String argLogicalStart = g.getOptarg(); if (argLogicalStart != null) { setLogicalStart(argLogicalStart); } break; case 'h': case '?': usage(); System.exit(STATUS_NORMAL); break; default: usage(); System.exit(STATUS_ERROR); break; } } if (argSuppressZeroTimeEntries != null) { if (argSuppressZeroTimeEntries.trim().equalsIgnoreCase("true")) { setSuppressZeroTimeEntries(true); } else if (argSuppressZeroTimeEntries.trim().equalsIgnoreCase("false")) { setSuppressZeroTimeEntries(false); } else { System.err.println(STR + NL + STR); usage(); System.exit(STATUS_ERROR); } } } | /**
* Parses the report arguments.
*
* @param args
* A string array containing the command line arguments.
*/ | Parses the report arguments | parseArguments | {
"repo_name": "GateNLP/gate-core",
"path": "src/main/java/gate/util/reporting/PRTimeReporter.java",
"license": "lgpl-3.0",
"size": 45823
} | [
"gnu.getopt.Getopt",
"java.io.File"
] | import gnu.getopt.Getopt; import java.io.File; | import gnu.getopt.*; import java.io.*; | [
"gnu.getopt",
"java.io"
] | gnu.getopt; java.io; | 1,184,429 |
private void addDefaultAspects(ClassDefinition classDef, String path,
Map<QName, Serializable> properties)
{
NodeRef nodeRef = AVMNodeConverter.ToNodeRef(-1, path);
// Get mandatory aspects.
List<AspectDefinition> defaultAspectDefs = classDef.getDefaultAspects();
// add all the aspects (and there dependent aspects recursively).
for (AspectDefinition def : defaultAspectDefs)
{
// invokeBeforeAddAspect(nodeRef, def.getName());
addAspect(nodeRef, def.getName(), Collections.<QName, Serializable>emptyMap());
properties.putAll(getDefaultProperties(def));
// invokeOnAddAspect(nodeRef, def.getName());
// recurse
addDefaultAspects(def, path, properties);
}
}
| void function(ClassDefinition classDef, String path, Map<QName, Serializable> properties) { NodeRef nodeRef = AVMNodeConverter.ToNodeRef(-1, path); List<AspectDefinition> defaultAspectDefs = classDef.getDefaultAspects(); for (AspectDefinition def : defaultAspectDefs) { addAspect(nodeRef, def.getName(), Collections.<QName, Serializable>emptyMap()); properties.putAll(getDefaultProperties(def)); addDefaultAspects(def, path, properties); } } | /**
* Add any aspects that are mandatory for the ClassDefinition.
* @param classDef The ClassDefinition.
* @param path The path to the AVMNode.
* @param properties The in/out map of accumulated properties.
*/ | Add any aspects that are mandatory for the ClassDefinition | addDefaultAspects | {
"repo_name": "loftuxab/community-edition-old",
"path": "projects/repository/source/java/org/alfresco/repo/avm/AVMNodeService.java",
"license": "lgpl-3.0",
"size": 86011
} | [
"java.io.Serializable",
"java.util.Collections",
"java.util.List",
"java.util.Map",
"org.alfresco.service.cmr.dictionary.AspectDefinition",
"org.alfresco.service.cmr.dictionary.ClassDefinition",
"org.alfresco.service.cmr.repository.NodeRef",
"org.alfresco.service.namespace.QName"
] | import java.io.Serializable; import java.util.Collections; import java.util.List; import java.util.Map; import org.alfresco.service.cmr.dictionary.AspectDefinition; import org.alfresco.service.cmr.dictionary.ClassDefinition; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.namespace.QName; | import java.io.*; import java.util.*; import org.alfresco.service.cmr.dictionary.*; import org.alfresco.service.cmr.repository.*; import org.alfresco.service.namespace.*; | [
"java.io",
"java.util",
"org.alfresco.service"
] | java.io; java.util; org.alfresco.service; | 359,952 |
@Override
public void prepareForSave(KualiDocumentEvent event) {
super.prepareForSave(event);
if (StringUtils.isNotBlank(this.getCapitalAssetSystemTypeCode())) {
if (this.getCapitalAssetSystemTypeCode().equals(PurapConstants.CapitalAssetSystemTypes.ONE_SYSTEM) || this.getCapitalAssetSystemTypeCode().equals(PurapConstants.CapitalAssetSystemTypes.MULTIPLE)) {
//If the system state is ONE or MULT, we have to remove all the systems on the items because it's not applicable.
for (PurchasingCapitalAssetItem camsItem : this.getPurchasingCapitalAssetItems()) {
camsItem.setPurchasingCapitalAssetSystem(null);
}
}
}
if (event instanceof RouteDocumentEvent || event instanceof ApproveDocumentEvent) {
boolean defaultUseTaxIndicatorValue = SpringContext.getBean(PurchasingService.class).getDefaultUseTaxIndicatorValue(this);
SpringContext.getBean(PurapService.class).updateUseTaxIndicator(this, defaultUseTaxIndicatorValue);
}
} | void function(KualiDocumentEvent event) { super.prepareForSave(event); if (StringUtils.isNotBlank(this.getCapitalAssetSystemTypeCode())) { if (this.getCapitalAssetSystemTypeCode().equals(PurapConstants.CapitalAssetSystemTypes.ONE_SYSTEM) this.getCapitalAssetSystemTypeCode().equals(PurapConstants.CapitalAssetSystemTypes.MULTIPLE)) { for (PurchasingCapitalAssetItem camsItem : this.getPurchasingCapitalAssetItems()) { camsItem.setPurchasingCapitalAssetSystem(null); } } } if (event instanceof RouteDocumentEvent event instanceof ApproveDocumentEvent) { boolean defaultUseTaxIndicatorValue = SpringContext.getBean(PurchasingService.class).getDefaultUseTaxIndicatorValue(this); SpringContext.getBean(PurapService.class).updateUseTaxIndicator(this, defaultUseTaxIndicatorValue); } } | /**
* Overrides the method in PurchasingAccountsPayableDocumentBase to remove the
* purchasingCapitalAssetSystem when the system type is either ONE or MULT.
*
* @see org.kuali.kfs.module.purap.document.PurchasingAccountsPayableDocumentBase#prepareForSave(org.kuali.rice.krad.rule.event.KualiDocumentEvent)
*/ | Overrides the method in PurchasingAccountsPayableDocumentBase to remove the purchasingCapitalAssetSystem when the system type is either ONE or MULT | prepareForSave | {
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-purap/src/main/java/org/kuali/kfs/module/purap/document/PurchasingDocumentBase.java",
"license": "agpl-3.0",
"size": 54402
} | [
"org.apache.commons.lang.StringUtils",
"org.kuali.kfs.krad.rules.rule.event.ApproveDocumentEvent",
"org.kuali.kfs.krad.rules.rule.event.KualiDocumentEvent",
"org.kuali.kfs.krad.rules.rule.event.RouteDocumentEvent",
"org.kuali.kfs.module.purap.PurapConstants",
"org.kuali.kfs.module.purap.businessobject.Pur... | import org.apache.commons.lang.StringUtils; import org.kuali.kfs.krad.rules.rule.event.ApproveDocumentEvent; import org.kuali.kfs.krad.rules.rule.event.KualiDocumentEvent; import org.kuali.kfs.krad.rules.rule.event.RouteDocumentEvent; import org.kuali.kfs.module.purap.PurapConstants; import org.kuali.kfs.module.purap.businessobject.PurchasingCapitalAssetItem; import org.kuali.kfs.module.purap.document.service.PurapService; import org.kuali.kfs.module.purap.document.service.PurchasingService; import org.kuali.kfs.sys.context.SpringContext; | import org.apache.commons.lang.*; import org.kuali.kfs.krad.rules.rule.event.*; import org.kuali.kfs.module.purap.*; import org.kuali.kfs.module.purap.businessobject.*; import org.kuali.kfs.module.purap.document.service.*; import org.kuali.kfs.sys.context.*; | [
"org.apache.commons",
"org.kuali.kfs"
] | org.apache.commons; org.kuali.kfs; | 148,222 |
protected void openXmlPng(BasicGraphEditor editor, File file) throws IOException {
Map<String, String> text = mxPngTextDecoder.decodeCompressedText(new FileInputStream(
file));
if (text != null) {
String value = text.get("mxGraphModel");
if (value != null) {
Document document = mxXmlUtils.parseXml(URLDecoder.decode(value, "UTF-8"));
mxCodec codec = new mxCodec(document);
codec.decode(document.getDocumentElement(), editor.getGraphComponent()
.getGraph().getModel());
editor.setCurrentFile(file);
resetEditor(editor);
return;
}
}
JOptionPane.showMessageDialog(editor, mxResources.get("imageContainsNoDiagramData"));
} | void function(BasicGraphEditor editor, File file) throws IOException { Map<String, String> text = mxPngTextDecoder.decodeCompressedText(new FileInputStream( file)); if (text != null) { String value = text.get(STR); if (value != null) { Document document = mxXmlUtils.parseXml(URLDecoder.decode(value, "UTF-8")); mxCodec codec = new mxCodec(document); codec.decode(document.getDocumentElement(), editor.getGraphComponent() .getGraph().getModel()); editor.setCurrentFile(file); resetEditor(editor); return; } } JOptionPane.showMessageDialog(editor, mxResources.get(STR)); } | /**
* Reads XML+PNG format.
*/ | Reads XML+PNG format | openXmlPng | {
"repo_name": "dwslab/RoCA",
"path": "src/main/java/de/dwslab/risk/gui/jgraphx/EditorActions.java",
"license": "mit",
"size": 59357
} | [
"com.mxgraph.io.mxCodec",
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"java.net.URLDecoder",
"java.util.Map",
"javax.swing.JOptionPane",
"org.w3c.dom.Document"
] | import com.mxgraph.io.mxCodec; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URLDecoder; import java.util.Map; import javax.swing.JOptionPane; import org.w3c.dom.Document; | import com.mxgraph.io.*; import java.io.*; import java.net.*; import java.util.*; import javax.swing.*; import org.w3c.dom.*; | [
"com.mxgraph.io",
"java.io",
"java.net",
"java.util",
"javax.swing",
"org.w3c.dom"
] | com.mxgraph.io; java.io; java.net; java.util; javax.swing; org.w3c.dom; | 303,699 |
public FedoraResource getResource(final Transaction transaction, final FedoraId fedoraID)
throws PathNotFoundException; | FedoraResource function(final Transaction transaction, final FedoraId fedoraID) throws PathNotFoundException; | /**
* Get a FedoraResource for existing resource
*
* @param transaction The transaction associated with this request or null if not in a transaction.
* @param fedoraID The identifier for the resource.
* @return The resource.
* @throws PathNotFoundException If the identifier cannot be found.
*/ | Get a FedoraResource for existing resource | getResource | {
"repo_name": "whikloj/fcrepo4",
"path": "fcrepo-kernel-api/src/main/java/org/fcrepo/kernel/api/models/ResourceFactory.java",
"license": "apache-2.0",
"size": 4186
} | [
"org.fcrepo.kernel.api.Transaction",
"org.fcrepo.kernel.api.exception.PathNotFoundException",
"org.fcrepo.kernel.api.identifiers.FedoraId"
] | import org.fcrepo.kernel.api.Transaction; import org.fcrepo.kernel.api.exception.PathNotFoundException; import org.fcrepo.kernel.api.identifiers.FedoraId; | import org.fcrepo.kernel.api.*; import org.fcrepo.kernel.api.exception.*; import org.fcrepo.kernel.api.identifiers.*; | [
"org.fcrepo.kernel"
] | org.fcrepo.kernel; | 599,637 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.