method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
private void encodeAndDecodeWithAssertions(File source)
throws FileNotFoundException, IOException {
// encode the file to a String and test that it is valid Base64
String encodedString = readClearFileCreateEncodedString(source);
assertStringContainsValidBase64(encodedString);
// read the encoded String and encode it to the target file.
File target = createTempFile();
writeClearFileFromEncodedString(target, encodedString);
// test the files.
assertFileContentsAreEqual(source, target);
}
|
void function(File source) throws FileNotFoundException, IOException { String encodedString = readClearFileCreateEncodedString(source); assertStringContainsValidBase64(encodedString); File target = createTempFile(); writeClearFileFromEncodedString(target, encodedString); assertFileContentsAreEqual(source, target); }
|
/**
* Encode the source file to a String, then decode back to a file. Check
* that the String is valid Base64, and that the target file has the same
* contents as the source file.
*/
|
Encode the source file to a String, then decode back to a file. Check that the String is valid Base64, and that the target file has the same contents as the source file
|
encodeAndDecodeWithAssertions
|
{
"repo_name": "hbarnard/fcrepo-phaidra",
"path": "fcrepo-server/src/test/java/org/fcrepo/server/test/JournalBase64Test.java",
"license": "apache-2.0",
"size": 10353
}
|
[
"java.io.File",
"java.io.FileNotFoundException",
"java.io.IOException"
] |
import java.io.File; import java.io.FileNotFoundException; import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 922,783
|
private static PathStrategy getPathStrategy(Context context, String authority) {
PathStrategy strat;
synchronized (sCache) {
strat = sCache.get(authority);
if (strat == null) {
try {
strat = parsePathStrategy(context, authority);
} catch (IOException e) {
throw new IllegalArgumentException(
"Failed to parse " + META_DATA_FILE_PROVIDER_PATHS + " meta-data", e);
} catch (XmlPullParserException e) {
throw new IllegalArgumentException(
"Failed to parse " + META_DATA_FILE_PROVIDER_PATHS + " meta-data", e);
}
sCache.put(authority, strat);
}
}
return strat;
}
|
static PathStrategy function(Context context, String authority) { PathStrategy strat; synchronized (sCache) { strat = sCache.get(authority); if (strat == null) { try { strat = parsePathStrategy(context, authority); } catch (IOException e) { throw new IllegalArgumentException( STR + META_DATA_FILE_PROVIDER_PATHS + STR, e); } catch (XmlPullParserException e) { throw new IllegalArgumentException( STR + META_DATA_FILE_PROVIDER_PATHS + STR, e); } sCache.put(authority, strat); } } return strat; }
|
/**
* Return {@link PathStrategy} for given authority, either by parsing or
* returning from cache.
*/
|
Return <code>PathStrategy</code> for given authority, either by parsing or returning from cache
|
getPathStrategy
|
{
"repo_name": "forgodsake/TowerPlus",
"path": "Android/src/com/fuav/android/data/provider/FileProvider.java",
"license": "gpl-3.0",
"size": 33666
}
|
[
"android.content.Context",
"java.io.IOException",
"org.xmlpull.v1.XmlPullParserException"
] |
import android.content.Context; import java.io.IOException; import org.xmlpull.v1.XmlPullParserException;
|
import android.content.*; import java.io.*; import org.xmlpull.v1.*;
|
[
"android.content",
"java.io",
"org.xmlpull.v1"
] |
android.content; java.io; org.xmlpull.v1;
| 1,288,299
|
public static Object getRes(Model m, Object pro, Object obj)
throws Exception {
List al = getResList(m, pro, obj);
if (al.size() == 1) {
return al.get(0);
} else if (al.size() == 0) {
return null;
}
return al;
}
|
static Object function(Model m, Object pro, Object obj) throws Exception { List al = getResList(m, pro, obj); if (al.size() == 1) { return al.get(0); } else if (al.size() == 0) { return null; } return al; }
|
/**
* Gets the res.
*
* @param m the m
* @param pro the pro
* @param obj the obj
* @return the res
* @throws Exception the exception
*/
|
Gets the res
|
getRes
|
{
"repo_name": "pipseq/semantic",
"path": "src/main/org/pipseq/rdf/jena/model/Triple.java",
"license": "mit",
"size": 15265
}
|
[
"com.hp.hpl.jena.rdf.model.Model",
"java.util.List"
] |
import com.hp.hpl.jena.rdf.model.Model; import java.util.List;
|
import com.hp.hpl.jena.rdf.model.*; import java.util.*;
|
[
"com.hp.hpl",
"java.util"
] |
com.hp.hpl; java.util;
| 1,178,309
|
private JButtonPadrao getBtnNovo() {
if (btnNovo == null) {
btnNovo = new JButtonPadrao("NOVO", "btnNovo", "button");
btnNovo.setFocusPainted(false);
btnNovo.setContentAreaFilled(false);
btnNovo.setBorderPainted(false);
btnNovo.setBackground((Color) null);
btnNovo.setBounds(100, 93, 90, 90);
btnNovo.addActionListener(this);
}
return btnNovo;
}
|
JButtonPadrao function() { if (btnNovo == null) { btnNovo = new JButtonPadrao("NOVO", STR, STR); btnNovo.setFocusPainted(false); btnNovo.setContentAreaFilled(false); btnNovo.setBorderPainted(false); btnNovo.setBackground((Color) null); btnNovo.setBounds(100, 93, 90, 90); btnNovo.addActionListener(this); } return btnNovo; }
|
/**
* This method initializes btnNovo
*
* @return javax.swing.JButton
*/
|
This method initializes btnNovo
|
getBtnNovo
|
{
"repo_name": "DiegoEveling/RHFacil",
"path": "src/telas/TelaCadastroSetor.java",
"license": "gpl-3.0",
"size": 12688
}
|
[
"java.awt.Color"
] |
import java.awt.Color;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,874,167
|
DropRequestDTO getFlowFileDropRequest(String connectionId, String dropRequestId);
|
DropRequestDTO getFlowFileDropRequest(String connectionId, String dropRequestId);
|
/**
* Gets the specified flow file drop request.
*
* @param connectionId The ID of the connection
* @param dropRequestId The flow file drop request
* @return The DropRequest
*/
|
Gets the specified flow file drop request
|
getFlowFileDropRequest
|
{
"repo_name": "mattyb149/nifi",
"path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java",
"license": "apache-2.0",
"size": 87066
}
|
[
"org.apache.nifi.web.api.dto.DropRequestDTO"
] |
import org.apache.nifi.web.api.dto.DropRequestDTO;
|
import org.apache.nifi.web.api.dto.*;
|
[
"org.apache.nifi"
] |
org.apache.nifi;
| 2,231,456
|
public static void close(Connection connection, PreparedStatement stmt,
ResultSet results) {
close(results);
close(stmt);
close(connection);
}
|
static void function(Connection connection, PreparedStatement stmt, ResultSet results) { close(results); close(stmt); close(connection); }
|
/**
* Close all resources
*/
|
Close all resources
|
close
|
{
"repo_name": "XClouded/t4f-core",
"path": "java/aop/src/main/java/io/aos/aop/proxy/p1/DaoUtil.java",
"license": "apache-2.0",
"size": 4384
}
|
[
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.ResultSet"
] |
import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 302,119
|
private void stopScrolling() {
mScroller.forceFinished(true);
if (Build.VERSION.SDK_INT >= 11) {
mAutoCenterAnimator.cancel();
}
onScrollFinished();
}
|
void function() { mScroller.forceFinished(true); if (Build.VERSION.SDK_INT >= 11) { mAutoCenterAnimator.cancel(); } onScrollFinished(); }
|
/**
* Force a stop to all pie motion. Called when the user taps during a fling.
*/
|
Force a stop to all pie motion. Called when the user taps during a fling
|
stopScrolling
|
{
"repo_name": "xiongjin2015/myself",
"path": "myself/src/main/java/com/haha/myself/widget/PieChart.java",
"license": "apache-2.0",
"size": 33626
}
|
[
"android.os.Build"
] |
import android.os.Build;
|
import android.os.*;
|
[
"android.os"
] |
android.os;
| 1,557,952
|
public Set<String> getOutputOptions();
|
Set<String> function();
|
/**
* Method to get all keys to the information stored in this object.
*
* @return a <code>Set</code> with all keys held in this instance of the object
*/
|
Method to get all keys to the information stored in this object
|
getOutputOptions
|
{
"repo_name": "paolopavan/biojava",
"path": "biojava-ws/src/main/java/org/biojava/nbio/ws/alignment/RemotePairwiseAlignmentOutputProperties.java",
"license": "lgpl-2.1",
"size": 1966
}
|
[
"java.util.Set"
] |
import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 339,658
|
public long append(HRegionInfo info, byte [] tableName, WALEdit edits,
UUID clusterId, final long now, HTableDescriptor htd)
throws IOException {
return append(info, tableName, edits, clusterId, now, htd, true);
}
class LogSyncer extends Thread {
private final long optionalFlushInterval;
// List of pending writes to the HLog. There corresponds to transactions
// that have not yet returned to the client. We keep them cached here
// instead of writing them to HDFS piecemeal, because the HDFS write
// method is pretty heavyweight as far as locking is concerned. The
// goal is to increase the batchsize for writing-to-hdfs as well as
// sync-to-hdfs, so that we can get better system throughput.
private List<Entry> pendingWrites = new LinkedList<Entry>();
LogSyncer(long optionalFlushInterval) {
this.optionalFlushInterval = optionalFlushInterval;
}
|
long function(HRegionInfo info, byte [] tableName, WALEdit edits, UUID clusterId, final long now, HTableDescriptor htd) throws IOException { return append(info, tableName, edits, clusterId, now, htd, true); } class LogSyncer extends Thread { private final long optionalFlushInterval; private List<Entry> pendingWrites = new LinkedList<Entry>(); LogSyncer(long optionalFlushInterval) { this.optionalFlushInterval = optionalFlushInterval; }
|
/**
* Append a set of edits to the log. Log edits are keyed by (encoded)
* regionName, rowname, and log-sequence-id. The HLog is flushed
* after this transaction is written to the log.
*
* @param info
* @param tableName
* @param edits
* @param clusterId The originating clusterId for this edit (for replication)
* @param now
* @return txid of this transaction
* @throws IOException
*/
|
Append a set of edits to the log. Log edits are keyed by (encoded) regionName, rowname, and log-sequence-id. The HLog is flushed after this transaction is written to the log
|
append
|
{
"repo_name": "bcopeland/hbase-thrift",
"path": "src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java",
"license": "apache-2.0",
"size": 64035
}
|
[
"java.io.IOException",
"java.util.LinkedList",
"java.util.List",
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.HTableDescriptor"
] |
import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor;
|
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.*;
|
[
"java.io",
"java.util",
"org.apache.hadoop"
] |
java.io; java.util; org.apache.hadoop;
| 831,467
|
public AuthenticationMethodType getAuthenticationMethodTypeForIdentifier(int id) {
for (AuthenticationMethodType t : getAuthenticationMethodTypes())
if (t.getUniqueIdentifier() == id) return t;
return null;
}
|
AuthenticationMethodType function(int id) { for (AuthenticationMethodType t : getAuthenticationMethodTypes()) if (t.getUniqueIdentifier() == id) return t; return null; }
|
/**
* Gets the authentication method type for a given identifier.
*
* @param id the id
* @return the authentication method type for identifier
*/
|
Gets the authentication method type for a given identifier
|
getAuthenticationMethodTypeForIdentifier
|
{
"repo_name": "meitar/zaproxy",
"path": "zap/src/main/java/org/zaproxy/zap/extension/authentication/ExtensionAuthentication.java",
"license": "apache-2.0",
"size": 14591
}
|
[
"org.zaproxy.zap.authentication.AuthenticationMethodType"
] |
import org.zaproxy.zap.authentication.AuthenticationMethodType;
|
import org.zaproxy.zap.authentication.*;
|
[
"org.zaproxy.zap"
] |
org.zaproxy.zap;
| 799,293
|
public void store(GridCacheContext cctx, GridQueryTypeDescriptor type, CacheDataRow row)
throws IgniteCheckedException;
|
void function(GridCacheContext cctx, GridQueryTypeDescriptor type, CacheDataRow row) throws IgniteCheckedException;
|
/**
* Updates index. Note that key is unique for cache, so if cache contains multiple indexes
* the key should be removed from indexes other than one being updated.
*
* @param cctx Cache context.
* @param type Type descriptor.
* @param row New row.
* @throws IgniteCheckedException If failed.
*/
|
Updates index. Note that key is unique for cache, so if cache contains multiple indexes the key should be removed from indexes other than one being updated
|
store
|
{
"repo_name": "ntikhonov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/query/GridQueryIndexing.java",
"license": "apache-2.0",
"size": 11676
}
|
[
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.processors.cache.GridCacheContext",
"org.apache.ignite.internal.processors.cache.persistence.CacheDataRow"
] |
import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow;
|
import org.apache.ignite.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.persistence.*;
|
[
"org.apache.ignite"
] |
org.apache.ignite;
| 732,062
|
@Test
public void testClearPropertySynchronized() {
config.clearProperty(PROP);
sync.verify(Methods.BEGIN_WRITE, Methods.END_WRITE);
}
|
void function() { config.clearProperty(PROP); sync.verify(Methods.BEGIN_WRITE, Methods.END_WRITE); }
|
/**
* Tests the correct synchronization of clearProperty().
*/
|
Tests the correct synchronization of clearProperty()
|
testClearPropertySynchronized
|
{
"repo_name": "apache/commons-configuration",
"path": "src/test/java/org/apache/commons/configuration2/TestAbstractConfigurationSynchronization.java",
"license": "apache-2.0",
"size": 7556
}
|
[
"org.apache.commons.configuration2.SynchronizerTestImpl"
] |
import org.apache.commons.configuration2.SynchronizerTestImpl;
|
import org.apache.commons.configuration2.*;
|
[
"org.apache.commons"
] |
org.apache.commons;
| 2,910,876
|
private void openRegistryBrowser() {
hide();
try {
IRegistryConnection[] registryConnections = CAppEnvironment.getRegistryHandler()
.getRegistryConnections();
if (registryConnections.length == 0) {
RegistryConnection registryConnection = new RegistryConnection();
try {
registryConnection.setURL(new URL(DEFAULT_REGISTRY_URL));
} catch (MalformedURLException e) {
log.error(ERROR_REGISTRY_URL, e);
IStatus editorStatus = new Status(IStatus.ERROR, Activator.PLUGIN_ID, REASON_REGISTRY_URL);
ErrorDialog.openError(Display.getCurrent().getActiveShell(), ERROR_MSG_HEADER,
ERROR_REGISTRY_URL, editorStatus);
}
registryConnection.setPath(C_REG_PATH_PREFIX);
}
IRegistryData selectedPathData = CAppEnvironment.getRegistryHandler()
.selectRegistryPath(registryConnections, REGISTRY_BROWSER,
SELECT_REGISTRY_RESOURCE, IRegistryHandler.SELECTED_REGISTRY_RESOURCE);
DataMapperSchemaEditorUtil schemaEditorUtil = new DataMapperSchemaEditorUtil(inputFile);
String schemaFilePath = schemaEditorUtil.createDiagram(selectedPathData, schemaType);
if (!schemaFilePath.isEmpty()) {
setSelectedPath(schemaFilePath);
if (Messages.LoadInputSchemaAction_SchemaTypeInput.equals(schemaType)) {
InputEditPart iep = (InputEditPart) selectedEP;
iep.resetInputTreeFromFile(schemaFilePath);
} else if (Messages.LoadOutputSchemaAction_SchemaTypeOutput.equals(schemaType)) {
OutputEditPart iep = (OutputEditPart) selectedEP;
iep.resetOutputTreeFromFile(schemaFilePath);
}
}
} catch (Exception e) {
log.error(ERROR_REGISTRY_BROWSER, e);
IStatus editorStatus = new Status(IStatus.ERROR, Activator.PLUGIN_ID, REASON_REGISTRY_BROWSER);
ErrorDialog.openError(Display.getCurrent().getActiveShell(), ERROR_MSG_HEADER,
ERROR_REGISTRY_BROWSER, editorStatus);
} finally {
show();
}
}
|
void function() { hide(); try { IRegistryConnection[] registryConnections = CAppEnvironment.getRegistryHandler() .getRegistryConnections(); if (registryConnections.length == 0) { RegistryConnection registryConnection = new RegistryConnection(); try { registryConnection.setURL(new URL(DEFAULT_REGISTRY_URL)); } catch (MalformedURLException e) { log.error(ERROR_REGISTRY_URL, e); IStatus editorStatus = new Status(IStatus.ERROR, Activator.PLUGIN_ID, REASON_REGISTRY_URL); ErrorDialog.openError(Display.getCurrent().getActiveShell(), ERROR_MSG_HEADER, ERROR_REGISTRY_URL, editorStatus); } registryConnection.setPath(C_REG_PATH_PREFIX); } IRegistryData selectedPathData = CAppEnvironment.getRegistryHandler() .selectRegistryPath(registryConnections, REGISTRY_BROWSER, SELECT_REGISTRY_RESOURCE, IRegistryHandler.SELECTED_REGISTRY_RESOURCE); DataMapperSchemaEditorUtil schemaEditorUtil = new DataMapperSchemaEditorUtil(inputFile); String schemaFilePath = schemaEditorUtil.createDiagram(selectedPathData, schemaType); if (!schemaFilePath.isEmpty()) { setSelectedPath(schemaFilePath); if (Messages.LoadInputSchemaAction_SchemaTypeInput.equals(schemaType)) { InputEditPart iep = (InputEditPart) selectedEP; iep.resetInputTreeFromFile(schemaFilePath); } else if (Messages.LoadOutputSchemaAction_SchemaTypeOutput.equals(schemaType)) { OutputEditPart iep = (OutputEditPart) selectedEP; iep.resetOutputTreeFromFile(schemaFilePath); } } } catch (Exception e) { log.error(ERROR_REGISTRY_BROWSER, e); IStatus editorStatus = new Status(IStatus.ERROR, Activator.PLUGIN_ID, REASON_REGISTRY_BROWSER); ErrorDialog.openError(Display.getCurrent().getActiveShell(), ERROR_MSG_HEADER, ERROR_REGISTRY_BROWSER, editorStatus); } finally { show(); } }
|
/**
* Open Registry browser
*/
|
Open Registry browser
|
openRegistryBrowser
|
{
"repo_name": "splinter/developer-studio",
"path": "data-mapper/org.wso2.developerstudio.visualdatamapper.diagram/src/dataMapper/diagram/custom/util/SchemaKeyEditorDialog.java",
"license": "apache-2.0",
"size": 15499
}
|
[
"java.net.MalformedURLException",
"org.eclipse.core.runtime.IStatus",
"org.eclipse.core.runtime.Status",
"org.eclipse.jface.dialogs.ErrorDialog",
"org.eclipse.swt.widgets.Display",
"org.wso2.developerstudio.eclipse.capp.core.artifacts.manager.CAppEnvironment",
"org.wso2.developerstudio.eclipse.capp.core.model.RegistryConnection",
"org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryConnection",
"org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryData",
"org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryHandler",
"org.wso2.developerstudio.visualdatamapper.diagram.Activator"
] |
import java.net.MalformedURLException; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.dialogs.ErrorDialog; import org.eclipse.swt.widgets.Display; import org.wso2.developerstudio.eclipse.capp.core.artifacts.manager.CAppEnvironment; import org.wso2.developerstudio.eclipse.capp.core.model.RegistryConnection; import org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryConnection; import org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryData; import org.wso2.developerstudio.eclipse.greg.core.interfaces.IRegistryHandler; import org.wso2.developerstudio.visualdatamapper.diagram.Activator;
|
import java.net.*; import org.eclipse.core.runtime.*; import org.eclipse.jface.dialogs.*; import org.eclipse.swt.widgets.*; import org.wso2.developerstudio.eclipse.capp.core.artifacts.manager.*; import org.wso2.developerstudio.eclipse.capp.core.model.*; import org.wso2.developerstudio.eclipse.greg.core.interfaces.*; import org.wso2.developerstudio.visualdatamapper.diagram.*;
|
[
"java.net",
"org.eclipse.core",
"org.eclipse.jface",
"org.eclipse.swt",
"org.wso2.developerstudio"
] |
java.net; org.eclipse.core; org.eclipse.jface; org.eclipse.swt; org.wso2.developerstudio;
| 136,230
|
protected void populateAttributeDefinition(BaseAttributeDefinition definition) {
definition.setDependencyOnly(isDependencyOnly());
if (getDisplayNames() != null) {
definition.getDisplayNames().putAll(getDisplayNames());
}
if (getDisplayDescriptions() != null) {
definition.getDisplayDescriptions().putAll(getDisplayDescriptions());
}
if (getDependencyIds() != null) {
definition.getDependencyIds().addAll(getDependencyIds());
}
if (getAttributeEncoders() != null) {
definition.getAttributeEncoders().addAll(getAttributeEncoders());
}
definition.setId(getPluginId());
definition.setSourceAttributeID(getSourceAttributeId());
}
|
void function(BaseAttributeDefinition definition) { definition.setDependencyOnly(isDependencyOnly()); if (getDisplayNames() != null) { definition.getDisplayNames().putAll(getDisplayNames()); } if (getDisplayDescriptions() != null) { definition.getDisplayDescriptions().putAll(getDisplayDescriptions()); } if (getDependencyIds() != null) { definition.getDependencyIds().addAll(getDependencyIds()); } if (getAttributeEncoders() != null) { definition.getAttributeEncoders().addAll(getAttributeEncoders()); } definition.setId(getPluginId()); definition.setSourceAttributeID(getSourceAttributeId()); }
|
/**
* Populates the attribute definition with information from this factory.
*
* @param definition attribute definition to populate
*/
|
Populates the attribute definition with information from this factory
|
populateAttributeDefinition
|
{
"repo_name": "brainysmith/shibboleth-common",
"path": "src/main/java/edu/internet2/middleware/shibboleth/common/config/attribute/resolver/attributeDefinition/BaseAttributeDefinitionFactoryBean.java",
"license": "apache-2.0",
"size": 5806
}
|
[
"edu.internet2.middleware.shibboleth.common.attribute.resolver.provider.attributeDefinition.BaseAttributeDefinition"
] |
import edu.internet2.middleware.shibboleth.common.attribute.resolver.provider.attributeDefinition.BaseAttributeDefinition;
|
import edu.internet2.middleware.shibboleth.common.attribute.resolver.provider.*;
|
[
"edu.internet2.middleware"
] |
edu.internet2.middleware;
| 533,157
|
@Override
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
debugCodeCall("supportsMixedCaseQuotedIdentifiers");
String m = getMode();
if (m.equals("MySQL")) {
return false;
}
return true;
}
|
boolean function() throws SQLException { debugCodeCall(STR); String m = getMode(); if (m.equals("MySQL")) { return false; } return true; }
|
/**
* Checks if a table created with CREATE TABLE "Test"(ID INT) is a different
* table than a table created with CREATE TABLE TEST(ID INT).
*
* @return true usually, and false in MySQL mode
*/
|
Checks if a table created with CREATE TABLE "Test"(ID INT) is a different table than a table created with CREATE TABLE TEST(ID INT)
|
supportsMixedCaseQuotedIdentifiers
|
{
"repo_name": "paulnguyen/data",
"path": "sqldbs/h2java/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java",
"license": "apache-2.0",
"size": 102972
}
|
[
"java.sql.SQLException"
] |
import java.sql.SQLException;
|
import java.sql.*;
|
[
"java.sql"
] |
java.sql;
| 1,604,782
|
protected void setField(Object o, String name, int index, Object value) {
Field f;
try {
f = o.getClass().getField(name);
Array.set(f.get(o), index, value);
}
catch (Exception e) {
e.printStackTrace();
}
}
|
void function(Object o, String name, int index, Object value) { Field f; try { f = o.getClass().getField(name); Array.set(f.get(o), index, value); } catch (Exception e) { e.printStackTrace(); } }
|
/**
* sets the specified field in an array.
*
* @param o the object to set the field for
* @param name the name of the field
* @param index the index in the array
* @param value the new value of the field
*/
|
sets the specified field in an array
|
setField
|
{
"repo_name": "brsaran/ACPP",
"path": "wekafiles/packages/LibSVM/src/main/java/weka/classifiers/functions/LibSVM.java",
"license": "gpl-2.0",
"size": 50093
}
|
[
"java.lang.reflect.Array",
"java.lang.reflect.Field"
] |
import java.lang.reflect.Array; import java.lang.reflect.Field;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 2,865,830
|
public ImageIcon getBiggestIcon();
|
ImageIcon function();
|
/**
* Gets the plug-in's icon, should the biggest available raster image
*
* @return The plug-in's icon, <code>null</code> if raster image is missing
*/
|
Gets the plug-in's icon, should the biggest available raster image
|
getBiggestIcon
|
{
"repo_name": "maandree/paradis",
"path": "src/org/nongnu/paradis/plugin/PluginV1.java",
"license": "gpl-3.0",
"size": 2999
}
|
[
"javax.swing.ImageIcon"
] |
import javax.swing.ImageIcon;
|
import javax.swing.*;
|
[
"javax.swing"
] |
javax.swing;
| 1,261,980
|
@SideOnly(Side.CLIENT)
public void applyOrientationToEntity(Entity entityToUpdate)
{
}
|
@SideOnly(Side.CLIENT) void function(Entity entityToUpdate) { }
|
/**
* Applies this entity's orientation (pitch/yaw) to another entity. Used to update passenger orientation.
*/
|
Applies this entity's orientation (pitch/yaw) to another entity. Used to update passenger orientation
|
applyOrientationToEntity
|
{
"repo_name": "aebert1/BigTransport",
"path": "build/tmp/recompileMc/sources/net/minecraft/entity/Entity.java",
"license": "gpl-3.0",
"size": 111191
}
|
[
"net.minecraftforge.fml.relauncher.Side",
"net.minecraftforge.fml.relauncher.SideOnly"
] |
import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly;
|
import net.minecraftforge.fml.relauncher.*;
|
[
"net.minecraftforge.fml"
] |
net.minecraftforge.fml;
| 1,137,872
|
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<RouteTableInner> listByResourceGroupAsync(String resourceGroupName) {
return new PagedFlux<>(
() -> listByResourceGroupSinglePageAsync(resourceGroupName), nextLink -> listNextSinglePageAsync(nextLink));
}
|
@ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<RouteTableInner> function(String resourceGroupName) { return new PagedFlux<>( () -> listByResourceGroupSinglePageAsync(resourceGroupName), nextLink -> listNextSinglePageAsync(nextLink)); }
|
/**
* Gets all route tables in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all route tables in a resource group.
*/
|
Gets all route tables in a resource group
|
listByResourceGroupAsync
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/RouteTablesClientImpl.java",
"license": "mit",
"size": 67090
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedFlux",
"com.azure.resourcemanager.network.fluent.models.RouteTableInner"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.resourcemanager.network.fluent.models.RouteTableInner;
|
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.network.fluent.models.*;
|
[
"com.azure.core",
"com.azure.resourcemanager"
] |
com.azure.core; com.azure.resourcemanager;
| 2,828,956
|
@NonNull
public static String getUniversalDateFormatted(@NonNull final DateItem date) {
Calendar calendar = getCalendar(date);
SimpleDateFormat sdf = new SimpleDateFormat("MM-dd-yyyy", Locale.US);
return sdf.format(calendar.getTime());
}
|
static String function(@NonNull final DateItem date) { Calendar calendar = getCalendar(date); SimpleDateFormat sdf = new SimpleDateFormat(STR, Locale.US); return sdf.format(calendar.getTime()); }
|
/**
* Date formatted to US locale in export format
*
* @param date Date Item
* @return Formatted date
*/
|
Date formatted to US locale in export format
|
getUniversalDateFormatted
|
{
"repo_name": "morris-james/appbaselibrary",
"path": "src/main/java/com/jamesmorrisstudios/appbaselibrary/time/UtilsTime.java",
"license": "apache-2.0",
"size": 47649
}
|
[
"android.support.annotation.NonNull",
"java.text.SimpleDateFormat",
"java.util.Calendar",
"java.util.Locale"
] |
import android.support.annotation.NonNull; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Locale;
|
import android.support.annotation.*; import java.text.*; import java.util.*;
|
[
"android.support",
"java.text",
"java.util"
] |
android.support; java.text; java.util;
| 2,458,520
|
@Override public void enterPpol(@NotNull PoCoParser.PpolContext ctx) { }
|
@Override public void enterPpol(@NotNull PoCoParser.PpolContext ctx) { }
|
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
|
The default implementation does nothing
|
exitSrecomp
|
{
"repo_name": "Corjuh/PoCo-Compiler",
"path": "Parser/gen/PoCoParserBaseListener.java",
"license": "lgpl-2.1",
"size": 18482
}
|
[
"org.antlr.v4.runtime.misc.NotNull"
] |
import org.antlr.v4.runtime.misc.NotNull;
|
import org.antlr.v4.runtime.misc.*;
|
[
"org.antlr.v4"
] |
org.antlr.v4;
| 706,443
|
private void handleSkRemoveButtonSelected() {
IStructuredSelection sel = (IStructuredSelection) smartKeyTable.getSelection();
smartKeyTable.getControl().setRedraw(false);
for (Iterator i = sel.iterator(); i.hasNext(); ) {
SmartKeyVariable var = (SmartKeyVariable) i.next();
smartKeyTable.remove(var);
smartKeys.remove(var.getName());
}
smartKeyTable.getControl().setRedraw(true);
}
|
void function() { IStructuredSelection sel = (IStructuredSelection) smartKeyTable.getSelection(); smartKeyTable.getControl().setRedraw(false); for (Iterator i = sel.iterator(); i.hasNext(); ) { SmartKeyVariable var = (SmartKeyVariable) i.next(); smartKeyTable.remove(var); smartKeys.remove(var.getName()); } smartKeyTable.getControl().setRedraw(true); }
|
/**
* Handle remove-button presses.
*/
|
Handle remove-button presses
|
handleSkRemoveButtonSelected
|
{
"repo_name": "kolovos/texlipse",
"path": "net.sourceforge.texlipse/src/net/sourceforge/texlipse/properties/editor/StyleListFieldEditor.java",
"license": "epl-1.0",
"size": 28156
}
|
[
"java.util.Iterator",
"org.eclipse.jface.viewers.IStructuredSelection"
] |
import java.util.Iterator; import org.eclipse.jface.viewers.IStructuredSelection;
|
import java.util.*; import org.eclipse.jface.viewers.*;
|
[
"java.util",
"org.eclipse.jface"
] |
java.util; org.eclipse.jface;
| 1,762,356
|
public VersionNumber getRemotingMinimumVersion() {
return RemotingVersionInfo.getMinimumSupportedVersion();
}
|
VersionNumber function() { return RemotingVersionInfo.getMinimumSupportedVersion(); }
|
/**
* Gets Remoting minimum supported version to prevent unsupported agents from connecting
* @since 2.171
*/
|
Gets Remoting minimum supported version to prevent unsupported agents from connecting
|
getRemotingMinimumVersion
|
{
"repo_name": "batmat/jenkins",
"path": "core/src/main/java/hudson/TcpSlaveAgentListener.java",
"license": "mit",
"size": 23887
}
|
[
"hudson.util.VersionNumber"
] |
import hudson.util.VersionNumber;
|
import hudson.util.*;
|
[
"hudson.util"
] |
hudson.util;
| 1,855,130
|
public static ExtensionDescription getDefaultDescription(boolean required,
boolean repeatable) {
ExtensionDescription desc =
ExtensionDescription.getDefaultDescription(GwoAbPageVariationId.class);
desc.setRequired(required);
desc.setRepeatable(repeatable);
return desc;
}
|
static ExtensionDescription function(boolean required, boolean repeatable) { ExtensionDescription desc = ExtensionDescription.getDefaultDescription(GwoAbPageVariationId.class); desc.setRequired(required); desc.setRepeatable(repeatable); return desc; }
|
/**
* Returns the extension description, specifying whether it is required, and
* whether it is repeatable.
*
* @param required whether it is required
* @param repeatable whether it is repeatable
* @return extension description
*/
|
Returns the extension description, specifying whether it is required, and whether it is repeatable
|
getDefaultDescription
|
{
"repo_name": "simonrrr/gdata-java-client",
"path": "java/src/com/google/gdata/data/analytics/GwoAbPageVariationId.java",
"license": "apache-2.0",
"size": 2132
}
|
[
"com.google.gdata.data.ExtensionDescription"
] |
import com.google.gdata.data.ExtensionDescription;
|
import com.google.gdata.data.*;
|
[
"com.google.gdata"
] |
com.google.gdata;
| 2,630,602
|
public Class<? extends IToken> getTokenClass() {
return tokenClass;
}
|
Class<? extends IToken> function() { return tokenClass; }
|
/**
* Get required token class
* @return Required Token class
*/
|
Get required token class
|
getTokenClass
|
{
"repo_name": "JonathanxD/TextLexer",
"path": "src/main/java/com/github/jonathanxd/textlexer/ext/parser/processor/rule/requeriments/Requirement.java",
"license": "agpl-3.0",
"size": 2926
}
|
[
"com.github.jonathanxd.textlexer.lexer.token.IToken"
] |
import com.github.jonathanxd.textlexer.lexer.token.IToken;
|
import com.github.jonathanxd.textlexer.lexer.token.*;
|
[
"com.github.jonathanxd"
] |
com.github.jonathanxd;
| 2,446,391
|
private long checkFile(FileIO fileIO, File cfgFile) throws IOException {
ByteBuffer hdr = ByteBuffer.allocate(headerSize()).order(ByteOrder.nativeOrder());
fileIO.readFully(hdr);
hdr.rewind();
long signature = hdr.getLong();
String prefix = "Failed to verify, file=" + cfgFile.getAbsolutePath() + "\" ";
if (SIGNATURE != signature)
throw new IOException(prefix + "(invalid file signature)" +
" [expectedSignature=" + U.hexLong(SIGNATURE) +
", actualSignature=" + U.hexLong(signature) + ']');
int ver = hdr.getInt();
if (version() != ver)
throw new IOException(prefix + "(invalid file version)" +
" [expectedVersion=" + version() +
", fileVersion=" + ver + "]");
byte type = hdr.get();
if (this.type != type)
throw new IOException(prefix + "(invalid file type)" +
" [expectedFileType=" + this.type +
", actualFileType=" + type + "]");
int pageSize = hdr.getInt();
if (dbCfg.getPageSize() != pageSize)
throw new IOException(prefix + "(invalid page size)" +
" [expectedPageSize=" + dbCfg.getPageSize() +
", filePageSize=" + pageSize + "]");
long fileSize = cfgFile.length();
if (fileSize == headerSize()) // Every file has a special meta page.
fileSize = pageSize + headerSize();
if (fileSize % pageSize != 0) // In the case of compressed pages we can miss the tail of the page.
fileSize = (fileSize / pageSize + 1) * pageSize;
return fileSize;
}
|
long function(FileIO fileIO, File cfgFile) throws IOException { ByteBuffer hdr = ByteBuffer.allocate(headerSize()).order(ByteOrder.nativeOrder()); fileIO.readFully(hdr); hdr.rewind(); long signature = hdr.getLong(); String prefix = STR + cfgFile.getAbsolutePath() + "\" STR(invalid file signature)STR [expectedSignature=STR, actualSignature=STR(invalid file version)STR [expectedVersion=STR, fileVersion=STR]STR(invalid file type)STR [expectedFileType=STR, actualFileType=STR]STR(invalid page size)STR [expectedPageSize=STR, filePageSize=STR]"); long fileSize = cfgFile.length(); if (fileSize == headerSize()) fileSize = pageSize + headerSize(); if (fileSize % pageSize != 0) fileSize = (fileSize / pageSize + 1) * pageSize; return fileSize; }
|
/**
* Checks that file store has correct header and size.
*
* @return Next available position in the file to store a data.
* @throws IOException If check has failed.
*/
|
Checks that file store has correct header and size
|
checkFile
|
{
"repo_name": "SomeFire/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/file/FilePageStore.java",
"license": "apache-2.0",
"size": 26722
}
|
[
"java.io.File",
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.ByteOrder"
] |
import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder;
|
import java.io.*; import java.nio.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 88,072
|
private JPanel getOptionPanel() {
if (optionPanel == null) {
labelForStrike = new JLabel();
labelForStrike.setHorizontalAlignment(SwingConstants.TRAILING);
labelForStrike.setText("strike");
labelForValue = new JLabel();
labelForValue.setText("value");
labelForValue.setHorizontalAlignment(SwingConstants.TRAILING);
labelForMaturity = new JLabel();
labelForMaturity.setText("maturity");
labelForMaturity.setHorizontalAlignment(SwingConstants.TRAILING);
optionPanel = new JPanel();
optionPanel.setLayout(new BoxLayout(getOptionPanel(), BoxLayout.Y_AXIS));
optionPanel.setBorder(BorderFactory.createTitledBorder(null, "Option Specification", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, null, null));
optionPanel.add(labelForStrike, null);
optionPanel.add(getOptionStrike(), null);
optionPanel.add(labelForMaturity, null);
optionPanel.add(getOptionMaturity(), null);
optionPanel.add(labelForValue, null);
optionPanel.add(getOptionValue(), null);
}
return optionPanel;
}
|
JPanel function() { if (optionPanel == null) { labelForStrike = new JLabel(); labelForStrike.setHorizontalAlignment(SwingConstants.TRAILING); labelForStrike.setText(STR); labelForValue = new JLabel(); labelForValue.setText("value"); labelForValue.setHorizontalAlignment(SwingConstants.TRAILING); labelForMaturity = new JLabel(); labelForMaturity.setText(STR); labelForMaturity.setHorizontalAlignment(SwingConstants.TRAILING); optionPanel = new JPanel(); optionPanel.setLayout(new BoxLayout(getOptionPanel(), BoxLayout.Y_AXIS)); optionPanel.setBorder(BorderFactory.createTitledBorder(null, STR, TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, null, null)); optionPanel.add(labelForStrike, null); optionPanel.add(getOptionStrike(), null); optionPanel.add(labelForMaturity, null); optionPanel.add(getOptionMaturity(), null); optionPanel.add(labelForValue, null); optionPanel.add(getOptionValue(), null); } return optionPanel; }
|
/**
* This method initializes optionPanel
*
* @return javax.swing.JPanel
*/
|
This method initializes optionPanel
|
getOptionPanel
|
{
"repo_name": "finmath/finmath-experiments",
"path": "src/main/java/net/finmath/experiments/blackScholes/BlackScholesOptionCalculator.java",
"license": "apache-2.0",
"size": 9040
}
|
[
"javax.swing.BorderFactory",
"javax.swing.BoxLayout",
"javax.swing.JLabel",
"javax.swing.JPanel",
"javax.swing.SwingConstants",
"javax.swing.border.TitledBorder"
] |
import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.SwingConstants; import javax.swing.border.TitledBorder;
|
import javax.swing.*; import javax.swing.border.*;
|
[
"javax.swing"
] |
javax.swing;
| 1,786,131
|
public void testCRLine(){
PartialLineBuffer partialLineBuffer = new PartialLineBuffer();
char[] data = "Test1\r".toCharArray();
partialLineBuffer.addData(data, 0, data.length);
data = "".toCharArray();
partialLineBuffer.addData(data, 0, data.length);
List<String> lines = partialLineBuffer.getLines();
assertEquals(0, lines.size());
String partialLine = partialLineBuffer.getPartialLine(true);
assertEquals("Test1",partialLine);
}
|
void function(){ PartialLineBuffer partialLineBuffer = new PartialLineBuffer(); char[] data = STR.toCharArray(); partialLineBuffer.addData(data, 0, data.length); data = STRTest1",partialLine); }
|
/**
* Test input ending with CR, then empty string
*/
|
Test input ending with CR, then empty string
|
testCRLine
|
{
"repo_name": "variacode/rundeck",
"path": "core/src/test/java/com/dtolabs/rundeck/core/utils/TestPartialLineBuffer.java",
"license": "apache-2.0",
"size": 14906
}
|
[
"com.dtolabs.rundeck.core.utils.PartialLineBuffer"
] |
import com.dtolabs.rundeck.core.utils.PartialLineBuffer;
|
import com.dtolabs.rundeck.core.utils.*;
|
[
"com.dtolabs.rundeck"
] |
com.dtolabs.rundeck;
| 1,735,819
|
public RunInner withPlatform(PlatformProperties platform) {
if (this.innerProperties() == null) {
this.innerProperties = new RunProperties();
}
this.innerProperties().withPlatform(platform);
return this;
}
|
RunInner function(PlatformProperties platform) { if (this.innerProperties() == null) { this.innerProperties = new RunProperties(); } this.innerProperties().withPlatform(platform); return this; }
|
/**
* Set the platform property: The platform properties against which the run will happen.
*
* @param platform the platform value to set.
* @return the RunInner object itself.
*/
|
Set the platform property: The platform properties against which the run will happen
|
withPlatform
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-containerregistry/src/main/java/com/azure/resourcemanager/containerregistry/fluent/models/RunInner.java",
"license": "mit",
"size": 18694
}
|
[
"com.azure.resourcemanager.containerregistry.models.PlatformProperties"
] |
import com.azure.resourcemanager.containerregistry.models.PlatformProperties;
|
import com.azure.resourcemanager.containerregistry.models.*;
|
[
"com.azure.resourcemanager"
] |
com.azure.resourcemanager;
| 1,608,540
|
private void visitCommonJSIfStatement(Node n) {
Node p = n.getParent();
if (p != null) {
// pull out then-branch
replaceIfStatementWithBranch(n, n.getSecondChild());
}
}
|
void function(Node n) { Node p = n.getParent(); if (p != null) { replaceIfStatementWithBranch(n, n.getSecondChild()); } }
|
/**
* Rewrites CommonJS part of UMD pattern by removing the if-condition and the
* else-branch and adds the then-branch directly to the current parent node.
*/
|
Rewrites CommonJS part of UMD pattern by removing the if-condition and the else-branch and adds the then-branch directly to the current parent node
|
visitCommonJSIfStatement
|
{
"repo_name": "Medium/closure-compiler",
"path": "src/com/google/javascript/jscomp/ProcessCommonJSModules.java",
"license": "apache-2.0",
"size": 26062
}
|
[
"com.google.javascript.rhino.Node"
] |
import com.google.javascript.rhino.Node;
|
import com.google.javascript.rhino.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 1,995,844
|
public MultipleCurrencyCurveSensitivityMarket multipliedBy(final double factor) {
final TreeMap<Currency, CurveSensitivityMarket> map = new TreeMap<Currency, CurveSensitivityMarket>();
for (final Currency loopccy : _sensitivity.keySet()) {
map.put(loopccy, _sensitivity.get(loopccy).multipliedBy(factor));
}
return new MultipleCurrencyCurveSensitivityMarket(map);
}
|
MultipleCurrencyCurveSensitivityMarket function(final double factor) { final TreeMap<Currency, CurveSensitivityMarket> map = new TreeMap<Currency, CurveSensitivityMarket>(); for (final Currency loopccy : _sensitivity.keySet()) { map.put(loopccy, _sensitivity.get(loopccy).multipliedBy(factor)); } return new MultipleCurrencyCurveSensitivityMarket(map); }
|
/**
* Create a new multiple currency sensitivity by multiplying all the sensitivities in a multiple currency sensitivity by a common factor.
* @param factor The multiplicative factor.
* @return The new multiple currency sensitivity.
*/
|
Create a new multiple currency sensitivity by multiplying all the sensitivities in a multiple currency sensitivity by a common factor
|
multipliedBy
|
{
"repo_name": "charles-cooper/idylfin",
"path": "src/com/opengamma/analytics/financial/interestrate/market/description/MultipleCurrencyCurveSensitivityMarket.java",
"license": "apache-2.0",
"size": 7869
}
|
[
"com.opengamma.util.money.Currency",
"java.util.TreeMap"
] |
import com.opengamma.util.money.Currency; import java.util.TreeMap;
|
import com.opengamma.util.money.*; import java.util.*;
|
[
"com.opengamma.util",
"java.util"
] |
com.opengamma.util; java.util;
| 307,508
|
public static ArrayList<String> get(String urlString) throws IOException{
// TODO handle 503 response
// TODO add timeout functionality
HttpURLConnection connection;
BufferedReader rd = null;
ArrayList<String> strings = new ArrayList<String>();
try {
URL url = new URL(urlString);
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod(GET);
rd = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String line = "";
while ((line = rd.readLine()) != null) {
strings.add(line);
}
} finally {
if (rd != null) {
rd.close();
}
}
return strings;
}
|
static ArrayList<String> function(String urlString) throws IOException{ HttpURLConnection connection; BufferedReader rd = null; ArrayList<String> strings = new ArrayList<String>(); try { URL url = new URL(urlString); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(GET); rd = new BufferedReader(new InputStreamReader(connection.getInputStream())); String line = ""; while ((line = rd.readLine()) != null) { strings.add(line); } } finally { if (rd != null) { rd.close(); } } return strings; }
|
/**
* Retrieves content on provided url.
* Recreates connection on each call.
* @param urlString
* @return BufferedReader
* @throws IOException
*/
|
Retrieves content on provided url. Recreates connection on each call
|
get
|
{
"repo_name": "PerrigoGames/CardStockGDX",
"path": "randomorg/java/com/mishadoff/randomorg/util/HTTPUtils.java",
"license": "apache-2.0",
"size": 1270
}
|
[
"java.io.BufferedReader",
"java.io.IOException",
"java.io.InputStreamReader",
"java.net.HttpURLConnection",
"java.util.ArrayList"
] |
import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.util.ArrayList;
|
import java.io.*; import java.net.*; import java.util.*;
|
[
"java.io",
"java.net",
"java.util"
] |
java.io; java.net; java.util;
| 583,258
|
@Override
public void contextDestroyed(ServletContextEvent sce) {
ServletContext servletContext = sce.getServletContext();
deleteFile(servletContext.getInitParameter(Constants.OCELOT));
deleteFile(servletContext.getInitParameter(Constants.OCELOT_MIN));
}
|
void function(ServletContextEvent sce) { ServletContext servletContext = sce.getServletContext(); deleteFile(servletContext.getInitParameter(Constants.OCELOT)); deleteFile(servletContext.getInitParameter(Constants.OCELOT_MIN)); }
|
/**
* Event context destroyed
*
* @param sce
*/
|
Event context destroyed
|
contextDestroyed
|
{
"repo_name": "antoinesd/ocelot",
"path": "ocelot-web/src/main/java/org/ocelotds/web/ContextListener.java",
"license": "mpl-2.0",
"size": 7435
}
|
[
"javax.servlet.ServletContext",
"javax.servlet.ServletContextEvent",
"org.ocelotds.Constants"
] |
import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import org.ocelotds.Constants;
|
import javax.servlet.*; import org.ocelotds.*;
|
[
"javax.servlet",
"org.ocelotds"
] |
javax.servlet; org.ocelotds;
| 1,846,921
|
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
file = (File) l.getItemAtPosition(position);
if (SDCARD.equals(file.getName())){
up.setVisibility(View.GONE);
} else {
up.setVisibility(View.VISIBLE);
}
int c;
if (multipleMode){
if (selectionFiles.contains(file)){
selectionFiles.remove(file);
c = Color.BLUE;
} else {
selectionFiles.add(file);
c = Color.DKGRAY;
}
v.setBackgroundColor(c);
l.getChildAt(position).refreshDrawableState();
} else {
if (file != null){
if (file.isDirectory()) {
setListAdapter(null);
if (file.getParent() != null){
parent = new File(file.getParent());
} else {
parent = null;
}
initialize(file.getName(), file);
} else {
ActionUtils.openDocument(this, file);
}
}
}
}
|
void function(ListView l, View v, int position, long id) { file = (File) l.getItemAtPosition(position); if (SDCARD.equals(file.getName())){ up.setVisibility(View.GONE); } else { up.setVisibility(View.VISIBLE); } int c; if (multipleMode){ if (selectionFiles.contains(file)){ selectionFiles.remove(file); c = Color.BLUE; } else { selectionFiles.add(file); c = Color.DKGRAY; } v.setBackgroundColor(c); l.getChildAt(position).refreshDrawableState(); } else { if (file != null){ if (file.isDirectory()) { setListAdapter(null); if (file.getParent() != null){ parent = new File(file.getParent()); } else { parent = null; } initialize(file.getName(), file); } else { ActionUtils.openDocument(this, file); } } } }
|
/**
* Stores the path of clicked file in the intent and exits.
*/
|
Stores the path of clicked file in the intent and exits
|
onListItemClick
|
{
"repo_name": "aegif/nemakiware-android-client",
"path": "app/src/main/java/jp/aegif/android/cmis/FileChooserActivity.java",
"license": "apache-2.0",
"size": 19079
}
|
[
"android.graphics.Color",
"android.view.View",
"android.widget.ListView",
"java.io.File",
"jp.aegif.android.cmis.utils.ActionUtils"
] |
import android.graphics.Color; import android.view.View; import android.widget.ListView; import java.io.File; import jp.aegif.android.cmis.utils.ActionUtils;
|
import android.graphics.*; import android.view.*; import android.widget.*; import java.io.*; import jp.aegif.android.cmis.utils.*;
|
[
"android.graphics",
"android.view",
"android.widget",
"java.io",
"jp.aegif.android"
] |
android.graphics; android.view; android.widget; java.io; jp.aegif.android;
| 1,126,342
|
public static boolean hasTopic(Model model,
org.ontoware.rdf2go.model.node.Resource instanceResource) {
return Base.has(model, instanceResource, TITLE);
}
|
static boolean function(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource) { return Base.has(model, instanceResource, TITLE); }
|
/**
* Check if org.ontoware.rdfreactor.generator.java.JProperty@334d545c has at
* least one value set
*
* @param model
* an RDF2Go model
* @param resource
* an RDF2Go resource
* @return true if this property has at least one value
*
* [Generated from RDFReactor template rule #get0has-static]
*/
|
Check if org.ontoware.rdfreactor.generator.java.JProperty@334d545c has at least one value set
|
hasTopic
|
{
"repo_name": "m0ep/master-thesis",
"path": "source/apis/rdf2go/rdf2go-sioc/src/main/java/org/rdfs/sioc/Thing.java",
"license": "mit",
"size": 317844
}
|
[
"org.ontoware.rdf2go.model.Model",
"org.ontoware.rdfreactor.runtime.Base"
] |
import org.ontoware.rdf2go.model.Model; import org.ontoware.rdfreactor.runtime.Base;
|
import org.ontoware.rdf2go.model.*; import org.ontoware.rdfreactor.runtime.*;
|
[
"org.ontoware.rdf2go",
"org.ontoware.rdfreactor"
] |
org.ontoware.rdf2go; org.ontoware.rdfreactor;
| 1,084,081
|
private Pair<Boolean, String> authenticateByAuthorizations(Context context, boolean addApiRequiredAuthorizations) throws AccountManagementException {
IUserAccount userAccount = getSecurityService().getCurrentUser();
if (userAccount == null) {
return Pair.of(false, "No valid user session");
}
// If the default permission is null this is not normal
if (StringUtils.isBlank(getDefaultPermission())) {
String message = "Unauthorized API call : no default permission set, please contact your administrator";
ApiLog.log.error(message);
return Pair.of(false, message);
}
String[] roles = new String[] { getDefaultPermission() };
if (addApiRequiredAuthorizations && configuration.permissions() != null && configuration.permissions().length != 0) {
roles = ArrayUtils.add(configuration.permissions(), getDefaultPermission());
if (ApiLog.log.isDebugEnabled()) {
ApiLog.log.debug("Adding permissions to default : " + Arrays.toString(roles));
}
}
if (userAccount == null || !(getSecurityService().restrict(roles))) {
ApiLog.log.error("Unauthorized API call : permissions of user " + userAccount.getUid() + " are not sufficient expecting " + Arrays.toString(roles));
return Pair.of(false, "Unauthorized API call for API browser access : insufficient permissions for the currently logged user");
}
logCall(context.request().method().toUpperCase(), context.request().uri(), userAccount.getUid());
return Pair.of(true, null);
}
|
Pair<Boolean, String> function(Context context, boolean addApiRequiredAuthorizations) throws AccountManagementException { IUserAccount userAccount = getSecurityService().getCurrentUser(); if (userAccount == null) { return Pair.of(false, STR); } if (StringUtils.isBlank(getDefaultPermission())) { String message = STR; ApiLog.log.error(message); return Pair.of(false, message); } String[] roles = new String[] { getDefaultPermission() }; if (addApiRequiredAuthorizations && configuration.permissions() != null && configuration.permissions().length != 0) { roles = ArrayUtils.add(configuration.permissions(), getDefaultPermission()); if (ApiLog.log.isDebugEnabled()) { ApiLog.log.debug(STR + Arrays.toString(roles)); } } if (userAccount == null !(getSecurityService().restrict(roles))) { ApiLog.log.error(STR + userAccount.getUid() + STR + Arrays.toString(roles)); return Pair.of(false, STR); } logCall(context.request().method().toUpperCase(), context.request().uri(), userAccount.getUid()); return Pair.of(true, null); }
|
/**
* Authenticate the current API using the currently logged user (as well as
* its authorizations)
*
* @param context
* @param addApiRequiredAuthorizations
* if true the authorizations associated to the API (using the
* {@link ApiAuthentication} annotations are required (and
* tested)
* @return true if everything went well otherwise a badRequest must be
* issued
* @throws AccountManagementException
*/
|
Authenticate the current API using the currently logged user (as well as its authorizations)
|
authenticateByAuthorizations
|
{
"repo_name": "theAgileFactory/app-framework",
"path": "app/framework/services/api/server/ApiAuthenticationAction.java",
"license": "gpl-2.0",
"size": 19244
}
|
[
"java.util.Arrays",
"org.apache.commons.lang3.ArrayUtils",
"org.apache.commons.lang3.StringUtils",
"org.apache.commons.lang3.tuple.Pair"
] |
import java.util.Arrays; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair;
|
import java.util.*; import org.apache.commons.lang3.*; import org.apache.commons.lang3.tuple.*;
|
[
"java.util",
"org.apache.commons"
] |
java.util; org.apache.commons;
| 1,220,631
|
public static void writeTo(DataOutput out, ByteBuffer buffer, int length) throws IOException {
if (buffer.hasArray()) {
out.write(buffer.array(), buffer.position() + buffer.arrayOffset(), length);
} else {
int pos = buffer.position();
for (int i = pos; i < length + pos; i++)
out.writeByte(buffer.get(i));
}
}
|
static void function(DataOutput out, ByteBuffer buffer, int length) throws IOException { if (buffer.hasArray()) { out.write(buffer.array(), buffer.position() + buffer.arrayOffset(), length); } else { int pos = buffer.position(); for (int i = pos; i < length + pos; i++) out.writeByte(buffer.get(i)); } }
|
/**
* Write the contents of a buffer to an output stream. The bytes are copied from the current position
* in the buffer.
* @param out The output to write to
* @param buffer The buffer to write from
* @param length The number of bytes to write
* @throws IOException For any errors writing to the output
*/
|
Write the contents of a buffer to an output stream. The bytes are copied from the current position in the buffer
|
writeTo
|
{
"repo_name": "themarkypantz/kafka",
"path": "clients/src/main/java/org/apache/kafka/common/utils/Utils.java",
"license": "apache-2.0",
"size": 32010
}
|
[
"java.io.DataOutput",
"java.io.IOException",
"java.nio.ByteBuffer"
] |
import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer;
|
import java.io.*; import java.nio.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 2,738,755
|
@Override
public void finer(final String message) {
logWrapper.logIfEnabled(loggerName, Level.TRACE, null, message, (Throwable) null);
}
|
void function(final String message) { logWrapper.logIfEnabled(loggerName, Level.TRACE, null, message, (Throwable) null); }
|
/**
* Logs a message object with the {@code Level.TRACE} level.
*
* @param message the message object to log.
*/
|
Logs a message object with the Level.TRACE level
|
finer
|
{
"repo_name": "PurelyApplied/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/logging/log4j/LogWriterLogger.java",
"license": "apache-2.0",
"size": 56907
}
|
[
"org.apache.logging.log4j.Level"
] |
import org.apache.logging.log4j.Level;
|
import org.apache.logging.log4j.*;
|
[
"org.apache.logging"
] |
org.apache.logging;
| 2,215,504
|
public static BufferedImage convertToAWT(final ImageData data) {
ColorModel colorModel = null;
final PaletteData palette = data.palette;
if (palette.isDirect) {
colorModel = new DirectColorModel(data.depth, palette.redMask, palette.greenMask, palette.blueMask);
final BufferedImage bufferedImage = new BufferedImage(
colorModel,
colorModel.createCompatibleWritableRaster(data.width, data.height),
false,
null);
final WritableRaster raster = bufferedImage.getRaster();
final int[] pixelArray = new int[3];
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
final int pixel = data.getPixel(x, y);
final RGB rgb = palette.getRGB(pixel);
pixelArray[0] = rgb.red;
pixelArray[1] = rgb.green;
pixelArray[2] = rgb.blue;
raster.setPixels(x, y, 1, 1, pixelArray);
}
}
return bufferedImage;
}
final RGB[] rgbs = palette.getRGBs();
final byte[] red = new byte[rgbs.length];
final byte[] green = new byte[rgbs.length];
final byte[] blue = new byte[rgbs.length];
for (int i = 0; i < rgbs.length; i++) {
final RGB rgb = rgbs[i];
red[i] = (byte) rgb.red;
green[i] = (byte) rgb.green;
blue[i] = (byte) rgb.blue;
}
if (data.transparentPixel != -1) {
colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue, data.transparentPixel);
} else {
colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue);
}
final BufferedImage bufferedImage = new BufferedImage(colorModel, colorModel.createCompatibleWritableRaster(
data.width,
data.height), false, null);
final WritableRaster raster = bufferedImage.getRaster();
final int[] pixelArray = new int[1];
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
final int pixel = data.getPixel(x, y);
pixelArray[0] = pixel;
raster.setPixel(x, y, pixelArray);
}
}
return bufferedImage;
}
|
static BufferedImage function(final ImageData data) { ColorModel colorModel = null; final PaletteData palette = data.palette; if (palette.isDirect) { colorModel = new DirectColorModel(data.depth, palette.redMask, palette.greenMask, palette.blueMask); final BufferedImage bufferedImage = new BufferedImage( colorModel, colorModel.createCompatibleWritableRaster(data.width, data.height), false, null); final WritableRaster raster = bufferedImage.getRaster(); final int[] pixelArray = new int[3]; for (int y = 0; y < data.height; y++) { for (int x = 0; x < data.width; x++) { final int pixel = data.getPixel(x, y); final RGB rgb = palette.getRGB(pixel); pixelArray[0] = rgb.red; pixelArray[1] = rgb.green; pixelArray[2] = rgb.blue; raster.setPixels(x, y, 1, 1, pixelArray); } } return bufferedImage; } final RGB[] rgbs = palette.getRGBs(); final byte[] red = new byte[rgbs.length]; final byte[] green = new byte[rgbs.length]; final byte[] blue = new byte[rgbs.length]; for (int i = 0; i < rgbs.length; i++) { final RGB rgb = rgbs[i]; red[i] = (byte) rgb.red; green[i] = (byte) rgb.green; blue[i] = (byte) rgb.blue; } if (data.transparentPixel != -1) { colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue, data.transparentPixel); } else { colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue); } final BufferedImage bufferedImage = new BufferedImage(colorModel, colorModel.createCompatibleWritableRaster( data.width, data.height), false, null); final WritableRaster raster = bufferedImage.getRaster(); final int[] pixelArray = new int[1]; for (int y = 0; y < data.height; y++) { for (int x = 0; x < data.width; x++) { final int pixel = data.getPixel(x, y); pixelArray[0] = pixel; raster.setPixel(x, y, pixelArray); } } return bufferedImage; }
|
/**
* This method converts an SWT image to an AWT image
*
* @param data
* @return BufferedImage
*/
|
This method converts an SWT image to an AWT image
|
convertToAWT
|
{
"repo_name": "rhchen/aftrace",
"path": "libs/plugins/net.tourbook.common/src/net/tourbook/common/util/SWT2Dutil.java",
"license": "epl-1.0",
"size": 10880
}
|
[
"java.awt.image.BufferedImage",
"java.awt.image.ColorModel",
"java.awt.image.DirectColorModel",
"java.awt.image.IndexColorModel",
"java.awt.image.WritableRaster",
"org.eclipse.swt.graphics.ImageData",
"org.eclipse.swt.graphics.PaletteData"
] |
import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.DirectColorModel; import java.awt.image.IndexColorModel; import java.awt.image.WritableRaster; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.PaletteData;
|
import java.awt.image.*; import org.eclipse.swt.graphics.*;
|
[
"java.awt",
"org.eclipse.swt"
] |
java.awt; org.eclipse.swt;
| 700,683
|
public void addCardsToVector(Vector v) {
v.add(saddleCard);
if (canterFirst) {
v.addAll(canterCards);
v.addAll(lengthCards);
v.addAll(heightCards);
} else {
v.addAll(lengthCards);
v.addAll(heightCards);
v.addAll(canterCards);
}
}
|
void function(Vector v) { v.add(saddleCard); if (canterFirst) { v.addAll(canterCards); v.addAll(lengthCards); v.addAll(heightCards); } else { v.addAll(lengthCards); v.addAll(heightCards); v.addAll(canterCards); } }
|
/**
* Add all of the cards in this jump to the given vector
*/
|
Add all of the cards in this jump to the given vector
|
addCardsToVector
|
{
"repo_name": "lsilvestre/Jogre",
"path": "games/grandPrixJumping/src/org/jogre/grandPrixJumping/common/JumpingJump.java",
"license": "gpl-2.0",
"size": 17040
}
|
[
"java.util.Vector"
] |
import java.util.Vector;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,024,892
|
protected synchronized TaskInProgress findSpeculativeTask(
Collection<TaskInProgress> list, TaskTrackerStatus ttStatus,
double avgProgress, long currentTime, boolean shouldRemove) {
Iterator<TaskInProgress> iter = list.iterator();
while (iter.hasNext()) {
TaskInProgress tip = iter.next();
// should never be true! (since we delete completed/failed tasks)
if (!tip.isRunning() || !tip.isRunnable()) {
iter.remove();
continue;
}
if (tip.hasSpeculativeTask(currentTime, avgProgress)) {
// Check if this tip can be removed from the list.
// If the list is shared then we should not remove.
if(shouldRemove){
iter.remove();
}
if (!tip.hasRunOnMachine(ttStatus.getHost(),
ttStatus.getTrackerName())) {
return tip;
}
} else {
if (shouldRemove && tip.hasRunOnMachine(ttStatus.getHost(),
ttStatus.getTrackerName())) {
iter.remove();
}
}
}
return null;
}
|
synchronized TaskInProgress function( Collection<TaskInProgress> list, TaskTrackerStatus ttStatus, double avgProgress, long currentTime, boolean shouldRemove) { Iterator<TaskInProgress> iter = list.iterator(); while (iter.hasNext()) { TaskInProgress tip = iter.next(); if (!tip.isRunning() !tip.isRunnable()) { iter.remove(); continue; } if (tip.hasSpeculativeTask(currentTime, avgProgress)) { if(shouldRemove){ iter.remove(); } if (!tip.hasRunOnMachine(ttStatus.getHost(), ttStatus.getTrackerName())) { return tip; } } else { if (shouldRemove && tip.hasRunOnMachine(ttStatus.getHost(), ttStatus.getTrackerName())) { iter.remove(); } } } return null; }
|
/**
* Find a speculative task
* @param list a list of tips
* @param ttStatus status of the tracker that has requested a tip
* @param avgProgress the average progress for speculation
* @param currentTime current time in milliseconds
* @param shouldRemove whether to remove the tips
* @return a tip that can be speculated on the tracker
*/
|
Find a speculative task
|
findSpeculativeTask
|
{
"repo_name": "awylie/hadoop",
"path": "src/mapred/org/apache/hadoop/mapred/JobInProgress.java",
"license": "apache-2.0",
"size": 130712
}
|
[
"java.util.Collection",
"java.util.Iterator"
] |
import java.util.Collection; import java.util.Iterator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,434,668
|
public EntityAvailabilityStatus entityAvailabilityStatus() {
return this.entityAvailabilityStatus;
}
|
EntityAvailabilityStatus function() { return this.entityAvailabilityStatus; }
|
/**
* Get the entityAvailabilityStatus property: Entity availability status for the topic.
*
* @return the entityAvailabilityStatus value.
*/
|
Get the entityAvailabilityStatus property: Entity availability status for the topic
|
entityAvailabilityStatus
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-servicebus/src/main/java/com/azure/resourcemanager/servicebus/fluent/models/SubscriptionResourceInner.java",
"license": "mit",
"size": 14692
}
|
[
"com.azure.resourcemanager.servicebus.models.EntityAvailabilityStatus"
] |
import com.azure.resourcemanager.servicebus.models.EntityAvailabilityStatus;
|
import com.azure.resourcemanager.servicebus.models.*;
|
[
"com.azure.resourcemanager"
] |
com.azure.resourcemanager;
| 2,803,041
|
private void sendInstallNotificationAndNextStep(
OMAInfo omaInfo, DownloadInfo downloadInfo, long downloadId, String statusMessage) {
if (!sendNotification(omaInfo, downloadInfo, downloadId, statusMessage)) {
showNextUrlDialog(omaInfo);
}
}
|
void function( OMAInfo omaInfo, DownloadInfo downloadInfo, long downloadId, String statusMessage) { if (!sendNotification(omaInfo, downloadInfo, downloadId, statusMessage)) { showNextUrlDialog(omaInfo); } }
|
/**
* Sends the install notification and then opens the nextURL if they are provided.
* If the install notification is sent, nextURL will be opened after the server
* response is received.
*
* @param omaInfo Information about the OMA content.
* @param downloadInfo Information about the download.
* @param downloadId Id of the download in Android DownloadManager.
* @param statusMessage The message to send to the notification server.
*/
|
Sends the install notification and then opens the nextURL if they are provided. If the install notification is sent, nextURL will be opened after the server response is received
|
sendInstallNotificationAndNextStep
|
{
"repo_name": "axinging/chromium-crosswalk",
"path": "chrome/android/java/src/org/chromium/chrome/browser/download/OMADownloadHandler.java",
"license": "bsd-3-clause",
"size": 32248
}
|
[
"org.chromium.content.browser.DownloadInfo"
] |
import org.chromium.content.browser.DownloadInfo;
|
import org.chromium.content.browser.*;
|
[
"org.chromium.content"
] |
org.chromium.content;
| 655,837
|
private Path getPathToAllLibsDir() {
return getBinPath();
}
|
Path function() { return getBinPath(); }
|
/**
* Returns the path that is the immediate parent of {@link #getPathToNativeLibsAssetsDir()} and
* {@link #getPathToNativeLibsDir()}.
*/
|
Returns the path that is the immediate parent of <code>#getPathToNativeLibsAssetsDir()</code> and <code>#getPathToNativeLibsDir()</code>
|
getPathToAllLibsDir
|
{
"repo_name": "rmaz/buck",
"path": "src/com/facebook/buck/android/CopyNativeLibraries.java",
"license": "apache-2.0",
"size": 16639
}
|
[
"java.nio.file.Path"
] |
import java.nio.file.Path;
|
import java.nio.file.*;
|
[
"java.nio"
] |
java.nio;
| 799,481
|
public static Expression tokenizeXMLExpression(String tagName, String inheritNamespaceTagName) {
StringHelper.notEmpty(tagName, "tagName");
return new TokenXMLExpressionIterator(tagName, inheritNamespaceTagName);
}
|
static Expression function(String tagName, String inheritNamespaceTagName) { StringHelper.notEmpty(tagName, STR); return new TokenXMLExpressionIterator(tagName, inheritNamespaceTagName); }
|
/**
* Returns an {@link TokenXMLExpressionIterator} expression
*/
|
Returns an <code>TokenXMLExpressionIterator</code> expression
|
tokenizeXMLExpression
|
{
"repo_name": "nicolaferraro/camel",
"path": "core/camel-support/src/main/java/org/apache/camel/support/builder/ExpressionBuilder.java",
"license": "apache-2.0",
"size": 63316
}
|
[
"org.apache.camel.Expression",
"org.apache.camel.util.StringHelper"
] |
import org.apache.camel.Expression; import org.apache.camel.util.StringHelper;
|
import org.apache.camel.*; import org.apache.camel.util.*;
|
[
"org.apache.camel"
] |
org.apache.camel;
| 1,312,546
|
return ThreadCategory.getInstance(QoSDrxAlarmEventReceiverEventHandlerImpl2.class);
}
// ************************
// Spring DAO setters
// ************************
public static int SPECIFY_OUTSTATION=1;
public static int USE_TYPE_INSTANCE=2;
private Integer almUpdateBehaviour= null;
private String alarmUpdateBehaviour=null;
|
return ThreadCategory.getInstance(QoSDrxAlarmEventReceiverEventHandlerImpl2.class); } public static int SPECIFY_OUTSTATION=1; public static int USE_TYPE_INSTANCE=2; private Integer almUpdateBehaviour= null; private String alarmUpdateBehaviour=null;
|
/**
* Method to get the QoSDrx's logger from OpenNMS
*/
|
Method to get the QoSDrx's logger from OpenNMS
|
getLog
|
{
"repo_name": "vishwaAbhinav/OpenNMS",
"path": "opennms-qosdaemon/src/main/java/org/openoss/opennms/spring/qosdrx/QoSDrxAlarmEventReceiverEventHandlerImpl2.java",
"license": "gpl-2.0",
"size": 29398
}
|
[
"org.opennms.core.utils.ThreadCategory"
] |
import org.opennms.core.utils.ThreadCategory;
|
import org.opennms.core.utils.*;
|
[
"org.opennms.core"
] |
org.opennms.core;
| 345,199
|
public ModifyableColumnFamilyDescriptor setEncryptionKey(byte[] keyBytes) {
return setValue(ENCRYPTION_KEY_BYTES, new Bytes(keyBytes));
}
|
ModifyableColumnFamilyDescriptor function(byte[] keyBytes) { return setValue(ENCRYPTION_KEY_BYTES, new Bytes(keyBytes)); }
|
/**
* Set the raw crypto key attribute for the family
*
* @param keyBytes
* @return this (for chained invocation)
*/
|
Set the raw crypto key attribute for the family
|
setEncryptionKey
|
{
"repo_name": "francisliu/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java",
"license": "apache-2.0",
"size": 50047
}
|
[
"org.apache.hadoop.hbase.util.Bytes"
] |
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,952,734
|
@Test
public void mkdirs() throws IOException {
// make sure the underfs address dir exists already
mUfs.mkdirs(mUnderfsAddress, true);
// empty lsr should be empty
Assert.assertEquals(0, mUfs.listRecursive(mUnderfsAddress).length);
String testDirTop = PathUtils.concatPath(mUnderfsAddress, "testDirTop");
String testDir1 = PathUtils.concatPath(mUnderfsAddress, "1");
String testDir2 = PathUtils.concatPath(testDir1, "2");
String testDir3 = PathUtils.concatPath(testDir2, "3");
String testDirDeep = PathUtils.concatPath(testDir3, "testDirDeep");
mUfs.mkdirs(testDirTop, false);
Assert.assertTrue(mUfs.exists(testDirTop));
mUfs.mkdirs(testDirDeep, true);
Assert.assertTrue(mUfs.exists(testDir1));
Assert.assertTrue(mUfs.exists(testDir2));
Assert.assertTrue(mUfs.exists(testDir3));
Assert.assertTrue(mUfs.exists(testDirDeep));
}
|
void function() throws IOException { mUfs.mkdirs(mUnderfsAddress, true); Assert.assertEquals(0, mUfs.listRecursive(mUnderfsAddress).length); String testDirTop = PathUtils.concatPath(mUnderfsAddress, STR); String testDir1 = PathUtils.concatPath(mUnderfsAddress, "1"); String testDir2 = PathUtils.concatPath(testDir1, "2"); String testDir3 = PathUtils.concatPath(testDir2, "3"); String testDirDeep = PathUtils.concatPath(testDir3, STR); mUfs.mkdirs(testDirTop, false); Assert.assertTrue(mUfs.exists(testDirTop)); mUfs.mkdirs(testDirDeep, true); Assert.assertTrue(mUfs.exists(testDir1)); Assert.assertTrue(mUfs.exists(testDir2)); Assert.assertTrue(mUfs.exists(testDir3)); Assert.assertTrue(mUfs.exists(testDirDeep)); }
|
/**
* Tests {@link UnderFileSystem#mkdirs(String, boolean)} correctly creates a directory.
* Tests {@link UnderFileSystem#mkdirs(String, boolean)} correctly makes parent directories if
* createParent is specified.
*/
|
Tests <code>UnderFileSystem#mkdirs(String, boolean)</code> correctly creates a directory. Tests <code>UnderFileSystem#mkdirs(String, boolean)</code> correctly makes parent directories if createParent is specified
|
mkdirs
|
{
"repo_name": "bit-zyl/Alluxio-Nvdimm",
"path": "tests/src/test/java/alluxio/underfs/UnderStorageSystemInterfaceIntegrationTest.java",
"license": "apache-2.0",
"size": 20086
}
|
[
"java.io.IOException",
"org.junit.Assert"
] |
import java.io.IOException; import org.junit.Assert;
|
import java.io.*; import org.junit.*;
|
[
"java.io",
"org.junit"
] |
java.io; org.junit;
| 757,461
|
public Dimension getMaximumSize(JComponent c)
{
// Sun's implementation returns Integer.MAX_VALUE here, so do we.
return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE);
}
|
Dimension function(JComponent c) { return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE); }
|
/**
* Returns the maximum size for text components that use this UI.
*
* This returns (Integer.MAX_VALUE, Integer.MAX_VALUE).
*
* @param c not used here
*
* @return the maximum size for text components that use this UI
*/
|
Returns the maximum size for text components that use this UI. This returns (Integer.MAX_VALUE, Integer.MAX_VALUE)
|
getMaximumSize
|
{
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/javax/swing/plaf/basic/BasicTextUI.java",
"license": "bsd-3-clause",
"size": 41609
}
|
[
"java.awt.Dimension",
"javax.swing.JComponent"
] |
import java.awt.Dimension; import javax.swing.JComponent;
|
import java.awt.*; import javax.swing.*;
|
[
"java.awt",
"javax.swing"
] |
java.awt; javax.swing;
| 482,296
|
private void dtdelm()
throws SAXException, IOException
{
// This is stub implementation which skips an element
// declaration.
wsskip();
name(mIsNSAware);
char ch;
while (true) {
ch = next();
switch (ch) {
case '>':
back();
return;
case EOS:
panic(FAULT);
default:
break;
}
}
}
|
void function() throws SAXException, IOException { wsskip(); name(mIsNSAware); char ch; while (true) { ch = next(); switch (ch) { case '>': back(); return; case EOS: panic(FAULT); default: break; } } }
|
/**
* Parses an element declaration.
*
* This method parses the declaration up to the closing angle
* bracket.
*
* @exception SAXException
* @exception IOException
*/
|
Parses an element declaration. This method parses the declaration up to the closing angle bracket
|
dtdelm
|
{
"repo_name": "tommythorn/yari",
"path": "shared/cacao-related/phoneme_feature/jsr172/src/ri/jaxp/com/sun/ukit/jaxp/Parser.java",
"license": "gpl-2.0",
"size": 75393
}
|
[
"java.io.IOException",
"org.xml.sax.SAXException"
] |
import java.io.IOException; import org.xml.sax.SAXException;
|
import java.io.*; import org.xml.sax.*;
|
[
"java.io",
"org.xml.sax"
] |
java.io; org.xml.sax;
| 326,647
|
int insertSelective(DBQuotaUsages record);
|
int insertSelective(DBQuotaUsages record);
|
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table quota_usages
*
* @mbggenerated Tue May 26 15:53:09 CST 2015
*/
|
This method was generated by MyBatis Generator. This method corresponds to the database table quota_usages
|
insertSelective
|
{
"repo_name": "wolabs/womano",
"path": "main/java/com/culabs/unicomportal/dao/DBQuotaUsagesMapper.java",
"license": "apache-2.0",
"size": 1587
}
|
[
"com.culabs.unicomportal.model.db.DBQuotaUsages"
] |
import com.culabs.unicomportal.model.db.DBQuotaUsages;
|
import com.culabs.unicomportal.model.db.*;
|
[
"com.culabs.unicomportal"
] |
com.culabs.unicomportal;
| 1,458,698
|
protected void explicitlyCastToFloat(Type targetType) {
Preconditions.checkState(targetType.isFloatingPointType());
type_ = targetType;
explicitlyCast_ = true;
}
|
void function(Type targetType) { Preconditions.checkState(targetType.isFloatingPointType()); type_ = targetType; explicitlyCast_ = true; }
|
/**
* Explicitly cast this literal to 'targetType'. The targetType must be a
* float point type.
*/
|
Explicitly cast this literal to 'targetType'. The targetType must be a float point type
|
explicitlyCastToFloat
|
{
"repo_name": "924060929/impala-frontend",
"path": "fe/src/main/java/org/apache/impala/analysis/NumericLiteral.java",
"license": "apache-2.0",
"size": 12727
}
|
[
"com.google.common.base.Preconditions",
"org.apache.impala.catalog.Type"
] |
import com.google.common.base.Preconditions; import org.apache.impala.catalog.Type;
|
import com.google.common.base.*; import org.apache.impala.catalog.*;
|
[
"com.google.common",
"org.apache.impala"
] |
com.google.common; org.apache.impala;
| 2,881,302
|
public default <S2, E2> Traversal.Admin<S2, E2> removeStep(final Step<?, ?> step) throws IllegalStateException {
return this.removeStep(TraversalHelper.stepIndex(step, this));
}
|
default <S2, E2> Traversal.Admin<S2, E2> function(final Step<?, ?> step) throws IllegalStateException { return this.removeStep(TraversalHelper.stepIndex(step, this)); }
|
/**
* Remove a {@link Step} from the traversal.
*
* @param step the step to remove
* @param <S2> the new start type of the traversal (if the removed step was a start step)
* @param <E2> the new end type of the traversal (if the removed step was an end step)
* @return the newly modulated traversal
* @throws IllegalStateException if the {@link TraversalStrategies} have already been applied
*/
|
Remove a <code>Step</code> from the traversal
|
removeStep
|
{
"repo_name": "dalaro/incubator-tinkerpop",
"path": "gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/Traversal.java",
"license": "apache-2.0",
"size": 19839
}
|
[
"org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper"
] |
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper;
|
import org.apache.tinkerpop.gremlin.process.traversal.util.*;
|
[
"org.apache.tinkerpop"
] |
org.apache.tinkerpop;
| 2,175,055
|
private void updateTabsVisibility(StackTab[] sortedPriorityArray) {
mVisibilityArray.clear();
for (int i = 0; i < sortedPriorityArray.length; i++) {
mVisibilityArray.add(sortedPriorityArray[i].getId());
}
updateCacheVisibleIds(mVisibilityArray);
}
|
void function(StackTab[] sortedPriorityArray) { mVisibilityArray.clear(); for (int i = 0; i < sortedPriorityArray.length; i++) { mVisibilityArray.add(sortedPriorityArray[i].getId()); } updateCacheVisibleIds(mVisibilityArray); }
|
/**
* Updates the list of visible tab Id that the tab content manager is suppose to serve. The list
* is ordered by priority. The first ones must be in the manager, then the remaining ones should
* have at least approximations if possible.
*
* @param sortedPriorityArray The array of all the {@link StackTab} sorted by priority.
*/
|
Updates the list of visible tab Id that the tab content manager is suppose to serve. The list is ordered by priority. The first ones must be in the manager, then the remaining ones should have at least approximations if possible
|
updateTabsVisibility
|
{
"repo_name": "mogoweb/365browser",
"path": "app/src/main/java/org/chromium/chrome/browser/compositor/layouts/phone/StackLayout.java",
"license": "apache-2.0",
"size": 51520
}
|
[
"org.chromium.chrome.browser.compositor.layouts.phone.stack.StackTab"
] |
import org.chromium.chrome.browser.compositor.layouts.phone.stack.StackTab;
|
import org.chromium.chrome.browser.compositor.layouts.phone.stack.*;
|
[
"org.chromium.chrome"
] |
org.chromium.chrome;
| 641,402
|
protected ApplicationContext getContext(PageContext pageContext) {
ServletContext servletContext = pageContext.getServletContext();
return SecurityWebApplicationContextUtils.findRequiredWebApplicationContext(servletContext);
}
|
ApplicationContext function(PageContext pageContext) { ServletContext servletContext = pageContext.getServletContext(); return SecurityWebApplicationContextUtils.findRequiredWebApplicationContext(servletContext); }
|
/**
* Allows test cases to override where application context obtained from.
* @param pageContext so the <code>ServletContext</code> can be accessed as required
* by Spring's <code>WebApplicationContextUtils</code>
* @return the Spring application context (never <code>null</code>)
*/
|
Allows test cases to override where application context obtained from
|
getContext
|
{
"repo_name": "fhanik/spring-security",
"path": "taglibs/src/main/java/org/springframework/security/taglibs/authz/AccessControlListTag.java",
"license": "apache-2.0",
"size": 6013
}
|
[
"javax.servlet.ServletContext",
"javax.servlet.jsp.PageContext",
"org.springframework.context.ApplicationContext",
"org.springframework.security.web.context.support.SecurityWebApplicationContextUtils"
] |
import javax.servlet.ServletContext; import javax.servlet.jsp.PageContext; import org.springframework.context.ApplicationContext; import org.springframework.security.web.context.support.SecurityWebApplicationContextUtils;
|
import javax.servlet.*; import javax.servlet.jsp.*; import org.springframework.context.*; import org.springframework.security.web.context.support.*;
|
[
"javax.servlet",
"org.springframework.context",
"org.springframework.security"
] |
javax.servlet; org.springframework.context; org.springframework.security;
| 1,432,985
|
private Map<String, Accumulator<?, ?>> deserializeAccumulators(TaskExecutionState state) {
AccumulatorSnapshot serializedAccumulators = state.getAccumulators();
if (serializedAccumulators != null) {
try {
return serializedAccumulators.deserializeUserAccumulators(userClassLoader);
}
catch (Throwable t) {
// we catch Throwable here to include all form of linking errors that may
// occur if user classes are missing in the classpath
LOG.error("Failed to deserialize final accumulator results.", t);
}
}
return null;
}
|
Map<String, Accumulator<?, ?>> function(TaskExecutionState state) { AccumulatorSnapshot serializedAccumulators = state.getAccumulators(); if (serializedAccumulators != null) { try { return serializedAccumulators.deserializeUserAccumulators(userClassLoader); } catch (Throwable t) { LOG.error(STR, t); } } return null; }
|
/**
* Deserializes accumulators from a task state update.
*
* <p>This method never throws an exception!
*
* @param state The task execution state from which to deserialize the accumulators.
* @return The deserialized accumulators, of null, if there are no accumulators or an error occurred.
*/
|
Deserializes accumulators from a task state update. This method never throws an exception
|
deserializeAccumulators
|
{
"repo_name": "Xpray/flink",
"path": "flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionGraph.java",
"license": "apache-2.0",
"size": 60342
}
|
[
"java.util.Map",
"org.apache.flink.api.common.accumulators.Accumulator",
"org.apache.flink.runtime.accumulators.AccumulatorSnapshot",
"org.apache.flink.runtime.taskmanager.TaskExecutionState"
] |
import java.util.Map; import org.apache.flink.api.common.accumulators.Accumulator; import org.apache.flink.runtime.accumulators.AccumulatorSnapshot; import org.apache.flink.runtime.taskmanager.TaskExecutionState;
|
import java.util.*; import org.apache.flink.api.common.accumulators.*; import org.apache.flink.runtime.accumulators.*; import org.apache.flink.runtime.taskmanager.*;
|
[
"java.util",
"org.apache.flink"
] |
java.util; org.apache.flink;
| 1,745,604
|
public List<String> getCustomFaultSequences(APIIdentifier apiIdentifier) throws APIManagementException {
List<String> sequenceList = new ArrayList<String>();
boolean isTenantFlowStarted = false;
try {
String tenantDomain = null;
if (apiIdentifier.getProviderName().contains("-AT-")) {
String provider = apiIdentifier.getProviderName().replace("-AT-", "@");
tenantDomain = MultitenantUtils.getTenantDomain(provider);
}
PrivilegedCarbonContext.startTenantFlow();
isTenantFlowStarted = true;
if (!StringUtils.isEmpty(tenantDomain)) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
} else {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain
(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
}
UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService()
.getGovernanceSystemRegistry(tenantId);
if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) {
org.wso2.carbon.registry.api.Collection faultSeqCollection =
(org.wso2.carbon.registry.api.Collection) registry.get(
APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION);
if (faultSeqCollection != null) {
String[] faultSeqChildPaths = faultSeqCollection.getChildren();
Arrays.sort(faultSeqChildPaths);
for (String faultSeqChildPath : faultSeqChildPaths) {
Resource outSequence = registry.get(faultSeqChildPath);
try {
OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream());
sequenceList.add(seqElment.getAttributeValue(new QName("name")));
} catch (OMException e) {
log.info("Error occurred when reading the sequence '" + faultSeqChildPath
+ "' from the registry.", e);
}
}
}
}
String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier,
APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT);
if (registry.resourceExists(customOutSeqFileLocation)) {
org.wso2.carbon.registry.api.Collection faultSeqCollection =
(org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation);
if (faultSeqCollection != null) {
String[] faultSeqChildPaths = faultSeqCollection.getChildren();
Arrays.sort(faultSeqChildPaths);
for (String faultSeqChildPath : faultSeqChildPaths) {
Resource faultSequence = registry.get(faultSeqChildPath);
try {
OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream());
sequenceList.add(seqElment.getAttributeValue(new QName("name")));
} catch (OMException e) {
log.info("Error occurred when reading the sequence '" + faultSeqChildPath
+ "' from the registry.", e);
}
}
}
}
} catch (RegistryException e) {
String msg = "Error while retrieving registry for tenant " + tenantId;
log.error(msg);
throw new APIManagementException(msg, e);
} catch (org.wso2.carbon.registry.api.RegistryException e) {
String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT
+ " sequences of " + apiIdentifier + " in the registry";
log.error(msg);
throw new APIManagementException(msg, e);
} catch (Exception e) {
log.error(e.getMessage());
throw new APIManagementException(e.getMessage(), e);
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return sequenceList;
}
|
List<String> function(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get( APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource outSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info(STR + faultSeqChildPath + STR, e); } } } } String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource faultSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info(STR + faultSeqChildPath + STR, e); } } } } } catch (RegistryException e) { String msg = STR + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = STR + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + STR + apiIdentifier + STR; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; }
|
/**
* Get stored custom fault sequences from governanceSystem registry
*
* @throws APIManagementException
*/
|
Get stored custom fault sequences from governanceSystem registry
|
getCustomFaultSequences
|
{
"repo_name": "tharikaGitHub/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIProviderImpl.java",
"license": "apache-2.0",
"size": 497958
}
|
[
"java.util.ArrayList",
"java.util.Arrays",
"java.util.Collection",
"java.util.List",
"javax.xml.namespace.QName",
"org.apache.axiom.om.OMElement",
"org.apache.axiom.om.OMException",
"org.apache.commons.lang3.StringUtils",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.model.APIIdentifier",
"org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder",
"org.wso2.carbon.apimgt.impl.utils.APIUtil",
"org.wso2.carbon.context.PrivilegedCarbonContext",
"org.wso2.carbon.registry.core.Resource",
"org.wso2.carbon.registry.core.exceptions.RegistryException",
"org.wso2.carbon.registry.core.session.UserRegistry",
"org.wso2.carbon.utils.multitenancy.MultitenantConstants",
"org.wso2.carbon.utils.multitenancy.MultitenantUtils"
] |
import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import javax.xml.namespace.QName; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMException; import org.apache.commons.lang3.StringUtils; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
|
import java.util.*; import javax.xml.namespace.*; import org.apache.axiom.om.*; import org.apache.commons.lang3.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.impl.internal.*; import org.wso2.carbon.apimgt.impl.utils.*; import org.wso2.carbon.context.*; import org.wso2.carbon.registry.core.*; import org.wso2.carbon.registry.core.exceptions.*; import org.wso2.carbon.registry.core.session.*; import org.wso2.carbon.utils.multitenancy.*;
|
[
"java.util",
"javax.xml",
"org.apache.axiom",
"org.apache.commons",
"org.wso2.carbon"
] |
java.util; javax.xml; org.apache.axiom; org.apache.commons; org.wso2.carbon;
| 2,403,384
|
public List<T> getBufferedEvents() {
return eventCircularFifoBuffer.toList();
}
|
List<T> function() { return eventCircularFifoBuffer.toList(); }
|
/**
* Returns a list containing all of the buffered events.
*
* @return a list containing all of the buffered events.
*/
|
Returns a list containing all of the buffered events
|
getBufferedEvents
|
{
"repo_name": "javaslang/javaslang-circuitbreaker",
"path": "resilience4j-consumer/src/main/java/io/github/resilience4j/consumer/CircularEventConsumer.java",
"license": "apache-2.0",
"size": 1854
}
|
[
"io.vavr.collection.List"
] |
import io.vavr.collection.List;
|
import io.vavr.collection.*;
|
[
"io.vavr.collection"
] |
io.vavr.collection;
| 246,938
|
@ApiModelProperty(example = "null", value = "X Coordinate when 'Position' is set to 'Absolute' (in Pts, 1/72 of an inch)")
public String getX() {
return x;
}
|
@ApiModelProperty(example = "null", value = STR) String function() { return x; }
|
/**
* X Coordinate when 'Position' is set to 'Absolute' (in Pts, 1/72 of an inch)
* @return x
**/
|
X Coordinate when 'Position' is set to 'Absolute' (in Pts, 1/72 of an inch)
|
getX
|
{
"repo_name": "Muhimbi/PDF-Converter-Services-Online",
"path": "clients/v1/java/client/src/main/java/com/muhimbi/online/client/model/LinearBarcodeWatermarkData.java",
"license": "apache-2.0",
"size": 31230
}
|
[
"io.swagger.annotations.ApiModelProperty"
] |
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.*;
|
[
"io.swagger.annotations"
] |
io.swagger.annotations;
| 110,623
|
public void add( String name, byte[] bytes ) {
byte[] val = bytes;
if ( _rounds > 0 ) {
MessageDigest md = null;
try {
md = MessageDigest.getInstance( MD5 );
} catch ( NoSuchAlgorithmException e ) {}
if ( md != null ) {
// make sure all the credentials are hashed even if added after digest
for ( int x = 0; x < _rounds; x++ ) {
val = md.digest( val );
}
}
}
// add the data
_credentials.put( name, val );
}
public CredentialSet( String name, String value ) {
try {
add( name, value.getBytes( UTF8 ) );
} catch ( UnsupportedEncodingException e ) {}
}
public CredentialSet( String name, String value, int rounds ) {
if ( rounds >= 0 ) {
this._rounds = rounds;
} else {
// negative numbers will default to 1
_rounds = 1;
}
try {
add( name, value.getBytes( UTF8 ) );
} catch ( UnsupportedEncodingException e ) {}
}
|
void function( String name, byte[] bytes ) { byte[] val = bytes; if ( _rounds > 0 ) { MessageDigest md = null; try { md = MessageDigest.getInstance( MD5 ); } catch ( NoSuchAlgorithmException e ) {} if ( md != null ) { for ( int x = 0; x < _rounds; x++ ) { val = md.digest( val ); } } } _credentials.put( name, val ); } CredentialSet( String name, String value ) { try { function( name, value.getBytes( UTF8 ) ); } catch ( UnsupportedEncodingException e ) {} } CredentialSet( String name, String value, int rounds ) { if ( rounds >= 0 ) { this._rounds = rounds; } else { _rounds = 1; } try { function( name, value.getBytes( UTF8 ) ); } catch ( UnsupportedEncodingException e ) {} }
|
/**
* Add the named data to these credentials
*
* <p>If rounds are set to 1 or more, the credential data will be stored as
* an MD5 digest.
*
* @param name The name of the credential
* @param bytes the bytes representing the credential
*/
|
Add the named data to these credentials If rounds are set to 1 or more, the credential data will be stored as an MD5 digest
|
add
|
{
"repo_name": "sdcote/commons",
"path": "src/main/java/coyote/commons/security/CredentialSet.java",
"license": "mit",
"size": 8307
}
|
[
"java.io.UnsupportedEncodingException",
"java.security.MessageDigest",
"java.security.NoSuchAlgorithmException"
] |
import java.io.UnsupportedEncodingException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException;
|
import java.io.*; import java.security.*;
|
[
"java.io",
"java.security"
] |
java.io; java.security;
| 594,269
|
private String convertToMessage(ByteBuffer buffer) {
byte[] data = buffer.array();
try {
int offset = buffer.arrayOffset();
return new String(data, offset, buffer.limit() - offset);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
|
String function(ByteBuffer buffer) { byte[] data = buffer.array(); try { int offset = buffer.arrayOffset(); return new String(data, offset, buffer.limit() - offset); } catch (Exception e) { e.printStackTrace(); return null; } }
|
/**
* Convert byte message to string
*
* @param buffer
* @return
*/
|
Convert byte message to string
|
convertToMessage
|
{
"repo_name": "rex-xxx/mt6572_x201",
"path": "packages/apps/Nfc/src/com/mediatek/nfc/handover/FilePushClient.java",
"license": "gpl-2.0",
"size": 18688
}
|
[
"java.nio.ByteBuffer"
] |
import java.nio.ByteBuffer;
|
import java.nio.*;
|
[
"java.nio"
] |
java.nio;
| 2,187,325
|
boolean canRemoveAllChildren( Name primaryTypeNameOfParent,
Collection<Name> mixinTypeNamesOfParent,
Name childName,
boolean skipProtected ) {
// First look in the primary type ...
JcrNodeType primaryType = getNodeType(primaryTypeNameOfParent);
if (primaryType != null) {
for (JcrNodeDefinition definition : primaryType.allChildNodeDefinitions(childName)) {
// Skip protected definitions ...
if (skipProtected && definition.isProtected()) continue;
// If this definition is not mandatory, then we have found that we CAN remove all children ...
return !definition.isMandatory();
}
}
// Then, look in the mixin types ...
if (mixinTypeNamesOfParent != null) {
for (Name mixinTypeName : mixinTypeNamesOfParent) {
JcrNodeType mixinType = getNodeType(mixinTypeName);
if (mixinType == null) continue;
for (JcrNodeDefinition definition : mixinType.allChildNodeDefinitions(childName)) {
// Skip protected definitions ...
if (skipProtected && definition.isProtected()) continue;
// If this definition is not mandatory, then we have found that we CAN remove all children ...
return !definition.isMandatory();
}
}
}
// Nothing was found, so look for residual node definitions ...
if (!childName.equals(JcrNodeType.RESIDUAL_NAME)) return canRemoveAllChildren(primaryTypeNameOfParent,
mixinTypeNamesOfParent,
JcrNodeType.RESIDUAL_NAME, skipProtected);
return false;
}
|
boolean canRemoveAllChildren( Name primaryTypeNameOfParent, Collection<Name> mixinTypeNamesOfParent, Name childName, boolean skipProtected ) { JcrNodeType primaryType = getNodeType(primaryTypeNameOfParent); if (primaryType != null) { for (JcrNodeDefinition definition : primaryType.allChildNodeDefinitions(childName)) { if (skipProtected && definition.isProtected()) continue; return !definition.isMandatory(); } } if (mixinTypeNamesOfParent != null) { for (Name mixinTypeName : mixinTypeNamesOfParent) { JcrNodeType mixinType = getNodeType(mixinTypeName); if (mixinType == null) continue; for (JcrNodeDefinition definition : mixinType.allChildNodeDefinitions(childName)) { if (skipProtected && definition.isProtected()) continue; return !definition.isMandatory(); } } } if (!childName.equals(JcrNodeType.RESIDUAL_NAME)) return canRemoveAllChildren(primaryTypeNameOfParent, mixinTypeNamesOfParent, JcrNodeType.RESIDUAL_NAME, skipProtected); return false; }
|
/**
* Determine if the child node definitions of the supplied primary type and mixin types of a parent node allow all of the
* children with the supplied name to be removed.
*
* @param primaryTypeNameOfParent the name of the primary type for the parent node; may not be null
* @param mixinTypeNamesOfParent the names of the mixin types for the parent node; may be null or empty if there are no mixins
* to include in the search
* @param childName the name of the child to be added to the parent; may not be null
* @param skipProtected true if this operation is being done from within the public JCR node and property API, or false if
* this operation is being done from within internal implementations
* @return true if at least one child node definition does not require children with the supplied name to exist, or false
* otherwise
*/
|
Determine if the child node definitions of the supplied primary type and mixin types of a parent node allow all of the children with the supplied name to be removed
|
canRemoveAllChildren
|
{
"repo_name": "stemig62/modeshape",
"path": "modeshape-jcr/src/main/java/org/modeshape/jcr/NodeTypes.java",
"license": "apache-2.0",
"size": 166534
}
|
[
"java.util.Collection",
"org.modeshape.jcr.value.Name"
] |
import java.util.Collection; import org.modeshape.jcr.value.Name;
|
import java.util.*; import org.modeshape.jcr.value.*;
|
[
"java.util",
"org.modeshape.jcr"
] |
java.util; org.modeshape.jcr;
| 1,277,004
|
public void onCollideWithPlayer(EntityPlayer par1EntityPlayer)
{
if (!this.worldObj.isRemote)
{
if (this.field_70532_c == 0 && par1EntityPlayer.xpCooldown == 0)
{
par1EntityPlayer.xpCooldown = 2;
this.playSound("random.orb", 0.1F, 0.5F * ((this.rand.nextFloat() - this.rand.nextFloat()) * 0.7F + 1.8F));
par1EntityPlayer.onItemPickup(this, 1);
par1EntityPlayer.addExperience(CraftEventFactory.callPlayerExpChangeEvent(par1EntityPlayer, this.xpValue).getAmount()); // CraftBukkit - this.value to event.getAmount()
this.setDead();
}
}
}
|
void function(EntityPlayer par1EntityPlayer) { if (!this.worldObj.isRemote) { if (this.field_70532_c == 0 && par1EntityPlayer.xpCooldown == 0) { par1EntityPlayer.xpCooldown = 2; this.playSound(STR, 0.1F, 0.5F * ((this.rand.nextFloat() - this.rand.nextFloat()) * 0.7F + 1.8F)); par1EntityPlayer.onItemPickup(this, 1); par1EntityPlayer.addExperience(CraftEventFactory.callPlayerExpChangeEvent(par1EntityPlayer, this.xpValue).getAmount()); this.setDead(); } } }
|
/**
* Called by a player entity when they collide with an entity
*/
|
Called by a player entity when they collide with an entity
|
onCollideWithPlayer
|
{
"repo_name": "wildex999/stjerncraft_mcpc",
"path": "src/minecraft/net/minecraft/entity/item/EntityXPOrb.java",
"license": "gpl-3.0",
"size": 10003
}
|
[
"net.minecraft.entity.player.EntityPlayer",
"org.bukkit.craftbukkit.event.CraftEventFactory"
] |
import net.minecraft.entity.player.EntityPlayer; import org.bukkit.craftbukkit.event.CraftEventFactory;
|
import net.minecraft.entity.player.*; import org.bukkit.craftbukkit.event.*;
|
[
"net.minecraft.entity",
"org.bukkit.craftbukkit"
] |
net.minecraft.entity; org.bukkit.craftbukkit;
| 642,341
|
public BigDecimal getPrice() {
return price;
}
|
BigDecimal function() { return price; }
|
/**
* Order price.
* @return price
**/
|
Order price
|
getPrice
|
{
"repo_name": "coinapi/coinapi-sdk",
"path": "oeml-sdk/java/src/main/java/org/openapitools/client/model/OrderExecutionReport.java",
"license": "mit",
"size": 22612
}
|
[
"java.math.BigDecimal"
] |
import java.math.BigDecimal;
|
import java.math.*;
|
[
"java.math"
] |
java.math;
| 1,891,736
|
public void destroy() {
if (cli_controller != null) {
if (!cli_controller.isOneMulticast()) {
saveCompleteConfig();
}
} else {
saveCompleteConfig();
}
// Disable all Multicasts
for (Entry<MulticastData, MulticastThreadSuper> m : getMcMap()
.entrySet()) {
m.getValue().setActive(false);
if(m.getKey() instanceof MMRPData){
if(currentModus.equals(Modus.SENDER)) {
mmrp_controller.deleteMulticastSenderMmrp((MMRPData)m.getKey());
} else {
mmrp_controller.deleteMulticastReceiverMmrp((MMRPData)m.getKey());
}
}
}
getMcDataVectorSender().removeAllElements();
getMcDataVectorReceiver().removeAllElements();
getMcMap().clear();
}
|
void function() { if (cli_controller != null) { if (!cli_controller.isOneMulticast()) { saveCompleteConfig(); } } else { saveCompleteConfig(); } for (Entry<MulticastData, MulticastThreadSuper> m : getMcMap() .entrySet()) { m.getValue().setActive(false); if(m.getKey() instanceof MMRPData){ if(currentModus.equals(Modus.SENDER)) { mmrp_controller.deleteMulticastSenderMmrp((MMRPData)m.getKey()); } else { mmrp_controller.deleteMulticastReceiverMmrp((MMRPData)m.getKey()); } } } getMcDataVectorSender().removeAllElements(); getMcDataVectorReceiver().removeAllElements(); getMcMap().clear(); }
|
/**
* Stops all Multicasts Threads and removes them from corresponding vectors.
*/
|
Stops all Multicasts Threads and removes them from corresponding vectors
|
destroy
|
{
"repo_name": "autarchprinceps/MultiCastor",
"path": "Vorgängerversionen/V2.1/Sourcecode/src/dhbw/multicastor/program/controller/MulticastController.java",
"license": "gpl-3.0",
"size": 23997
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,703,550
|
public java.sql.Time getTime(int columnIndex, Calendar cal)
throws SQLException {
return getTimeInternal(columnIndex, cal, cal.getTimeZone(), true);
}
|
java.sql.Time function(int columnIndex, Calendar cal) throws SQLException { return getTimeInternal(columnIndex, cal, cal.getTimeZone(), true); }
|
/**
* Get the value of a column in the current row as a java.sql.Time object.
* Use the calendar to construct an appropriate millisecond value for the
* Time, if the underlying database doesn't store timezone information.
*
* @param columnIndex
* the first column is 1, the second is 2, ...
* @param cal
* the calendar to use in constructing the time
*
* @return the column value; if the value is SQL NULL, the result is null
*
* @exception SQLException
* if a database-access error occurs.
*/
|
Get the value of a column in the current row as a java.sql.Time object. Use the calendar to construct an appropriate millisecond value for the Time, if the underlying database doesn't store timezone information
|
getTime
|
{
"repo_name": "shubhanshu-gupta/Apache-Solr",
"path": "example/solr/collection1/lib/mysql-connector-java-5.1.32/src/com/mysql/jdbc/ResultSetImpl.java",
"license": "apache-2.0",
"size": 247329
}
|
[
"java.sql.SQLException",
"java.sql.Time",
"java.util.Calendar"
] |
import java.sql.SQLException; import java.sql.Time; import java.util.Calendar;
|
import java.sql.*; import java.util.*;
|
[
"java.sql",
"java.util"
] |
java.sql; java.util;
| 545,749
|
public File processImage(File img, URI uri) throws DjatokaException {
String imgPath = img.getAbsolutePath();
String fmt = formatMap.get(imgPath.substring(imgPath.lastIndexOf('.') + 1).toLowerCase());
try {
if (fmt == null || !ImageProcessingUtils.isJp2Type(fmt)) {
ICompress jp2 = new KduCompressExe();
File jp2Local = File.createTempFile("cache" + uri.hashCode() + "-", ".jp2");
jp2Local.delete();
jp2.compressImage(img.getAbsolutePath(), jp2Local.getAbsolutePath(), new DjatokaEncodeParam());
img.delete();
img = jp2Local;
} else {
try {
IExtract ex = new KduExtractExe();
ex.getMetadata(new ImageRecord(uri.toString(), img.getAbsolutePath()));
} catch (DjatokaException e) {
throw new DjatokaException("Unknown JP2/JPX file format");
}
}
} catch (Exception e) {
throw new DjatokaException(e);
}
return img;
}
|
File function(File img, URI uri) throws DjatokaException { String imgPath = img.getAbsolutePath(); String fmt = formatMap.get(imgPath.substring(imgPath.lastIndexOf('.') + 1).toLowerCase()); try { if (fmt == null !ImageProcessingUtils.isJp2Type(fmt)) { ICompress jp2 = new KduCompressExe(); File jp2Local = File.createTempFile("cache" + uri.hashCode() + "-", ".jp2"); jp2Local.delete(); jp2.compressImage(img.getAbsolutePath(), jp2Local.getAbsolutePath(), new DjatokaEncodeParam()); img.delete(); img = jp2Local; } else { try { IExtract ex = new KduExtractExe(); ex.getMetadata(new ImageRecord(uri.toString(), img.getAbsolutePath())); } catch (DjatokaException e) { throw new DjatokaException(STR); } } } catch (Exception e) { throw new DjatokaException(e); } return img; }
|
/**
* Returns a delete on exit File object for a provide URI
* @param img File object on local image to be compressed
* @param uri the URI of an image to be compressed as JP2
* @return File object of JP2 compressed image
* @throws DjatokaException
*/
|
Returns a delete on exit File object for a provide URI
|
processImage
|
{
"repo_name": "cbeer/adore-djatoka-mirror",
"path": "src/gov/lanl/adore/djatoka/openurl/DjatokaImageMigrator.java",
"license": "lgpl-2.1",
"size": 7430
}
|
[
"gov.lanl.adore.djatoka.DjatokaEncodeParam",
"gov.lanl.adore.djatoka.DjatokaException",
"gov.lanl.adore.djatoka.ICompress",
"gov.lanl.adore.djatoka.IExtract",
"gov.lanl.adore.djatoka.kdu.KduCompressExe",
"gov.lanl.adore.djatoka.kdu.KduExtractExe",
"gov.lanl.adore.djatoka.util.ImageProcessingUtils",
"gov.lanl.adore.djatoka.util.ImageRecord",
"java.io.File"
] |
import gov.lanl.adore.djatoka.DjatokaEncodeParam; import gov.lanl.adore.djatoka.DjatokaException; import gov.lanl.adore.djatoka.ICompress; import gov.lanl.adore.djatoka.IExtract; import gov.lanl.adore.djatoka.kdu.KduCompressExe; import gov.lanl.adore.djatoka.kdu.KduExtractExe; import gov.lanl.adore.djatoka.util.ImageProcessingUtils; import gov.lanl.adore.djatoka.util.ImageRecord; import java.io.File;
|
import gov.lanl.adore.djatoka.*; import gov.lanl.adore.djatoka.kdu.*; import gov.lanl.adore.djatoka.util.*; import java.io.*;
|
[
"gov.lanl.adore",
"java.io"
] |
gov.lanl.adore; java.io;
| 1,155,622
|
@Test
public void testInvalidSecureRandomImplementation() throws Exception {
try (SslChannelBuilder channelBuilder = newClientChannelBuilder()) {
sslClientConfigs.put(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG, "invalid");
channelBuilder.configure(sslClientConfigs);
fail("SSL channel configured with invalid SecureRandom implementation");
} catch (KafkaException e) {
// Expected exception
}
}
|
void function() throws Exception { try (SslChannelBuilder channelBuilder = newClientChannelBuilder()) { sslClientConfigs.put(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG, STR); channelBuilder.configure(sslClientConfigs); fail(STR); } catch (KafkaException e) { } }
|
/**
* Tests that an invalid SecureRandom implementation cannot be configured
*/
|
Tests that an invalid SecureRandom implementation cannot be configured
|
testInvalidSecureRandomImplementation
|
{
"repo_name": "sslavic/kafka",
"path": "clients/src/test/java/org/apache/kafka/common/network/SslTransportLayerTest.java",
"license": "apache-2.0",
"size": 66205
}
|
[
"org.apache.kafka.common.KafkaException",
"org.apache.kafka.common.config.SslConfigs",
"org.junit.Assert"
] |
import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.config.SslConfigs; import org.junit.Assert;
|
import org.apache.kafka.common.*; import org.apache.kafka.common.config.*; import org.junit.*;
|
[
"org.apache.kafka",
"org.junit"
] |
org.apache.kafka; org.junit;
| 2,473,592
|
public static ChannelData getDataFromPackage(XByteBuffer xbuf) {
ChannelData data = new ChannelData(false);
int offset = 0;
data.setOptions(XByteBuffer.toInt(xbuf.getBytesDirect(),offset));
offset += 4; //options
data.setTimestamp(XByteBuffer.toLong(xbuf.getBytesDirect(),offset));
offset += 8; //timestamp
data.uniqueId = new byte[XByteBuffer.toInt(xbuf.getBytesDirect(),offset)];
offset += 4; //uniqueId length
System.arraycopy(xbuf.getBytesDirect(),offset,data.uniqueId,0,data.uniqueId.length);
offset += data.uniqueId.length; //uniqueId data
//byte[] addr = new byte[XByteBuffer.toInt(xbuf.getBytesDirect(),offset)];
int addrlen = XByteBuffer.toInt(xbuf.getBytesDirect(),offset);
offset += 4; //addr length
//System.arraycopy(xbuf.getBytesDirect(),offset,addr,0,addr.length);
data.setAddress(MemberImpl.getMember(xbuf.getBytesDirect(),offset,addrlen));
//offset += addr.length; //addr data
offset += addrlen;
int xsize = XByteBuffer.toInt(xbuf.getBytesDirect(),offset);
offset += 4; //xsize length
System.arraycopy(xbuf.getBytesDirect(),offset,xbuf.getBytesDirect(),0,xsize);
xbuf.setLength(xsize);
data.message = xbuf;
return data;
}
|
static ChannelData function(XByteBuffer xbuf) { ChannelData data = new ChannelData(false); int offset = 0; data.setOptions(XByteBuffer.toInt(xbuf.getBytesDirect(),offset)); offset += 4; data.setTimestamp(XByteBuffer.toLong(xbuf.getBytesDirect(),offset)); offset += 8; data.uniqueId = new byte[XByteBuffer.toInt(xbuf.getBytesDirect(),offset)]; offset += 4; System.arraycopy(xbuf.getBytesDirect(),offset,data.uniqueId,0,data.uniqueId.length); offset += data.uniqueId.length; int addrlen = XByteBuffer.toInt(xbuf.getBytesDirect(),offset); offset += 4; data.setAddress(MemberImpl.getMember(xbuf.getBytesDirect(),offset,addrlen)); offset += addrlen; int xsize = XByteBuffer.toInt(xbuf.getBytesDirect(),offset); offset += 4; System.arraycopy(xbuf.getBytesDirect(),offset,xbuf.getBytesDirect(),0,xsize); xbuf.setLength(xsize); data.message = xbuf; return data; }
|
/**
* Deserializes a ChannelData object from a byte array
*/
|
Deserializes a ChannelData object from a byte array
|
getDataFromPackage
|
{
"repo_name": "plumer/codana",
"path": "tomcat_files/6.0.43/ChannelData.java",
"license": "mit",
"size": 12360
}
|
[
"org.apache.catalina.tribes.membership.MemberImpl"
] |
import org.apache.catalina.tribes.membership.MemberImpl;
|
import org.apache.catalina.tribes.membership.*;
|
[
"org.apache.catalina"
] |
org.apache.catalina;
| 2,721,742
|
@Nullable
static SkylarkImportResult fetchImportsFromBuildFile(
Path buildFilePath,
PackageIdentifier packageId,
BuildFileAST buildFileAST,
Environment env,
SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining)
throws PackageFunctionException, InterruptedException {
Preconditions.checkArgument(!packageId.getRepository().isDefault());
ImmutableList<SkylarkImport> imports = buildFileAST.getImports();
Map<String, Extension> importMap = Maps.newHashMapWithExpectedSize(imports.size());
ImmutableList.Builder<SkylarkFileDependency> fileDependencies = ImmutableList.builder();
ImmutableMap<String, Label> importPathMap;
// Find the labels corresponding to the load statements.
Label labelForCurrBuildFile;
try {
labelForCurrBuildFile = Label.create(packageId, "BUILD");
} catch (LabelSyntaxException e) {
// Shouldn't happen; the Label is well-formed by construction.
throw new IllegalStateException(e);
}
try {
importPathMap = SkylarkImportLookupFunction.findLabelsForLoadStatements(
imports, labelForCurrBuildFile, env);
if (importPathMap == null) {
return null;
}
} catch (SkylarkImportFailedException e) {
throw new PackageFunctionException(
new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT);
}
// Look up and load the imports.
ImmutableCollection<Label> importLabels = importPathMap.values();
List<SkyKey> importLookupKeys = Lists.newArrayListWithExpectedSize(importLabels.size());
boolean inWorkspace = buildFilePath.getBaseName().endsWith("WORKSPACE");
for (Label importLabel : importLabels) {
importLookupKeys.add(SkylarkImportLookupValue.key(importLabel, inWorkspace));
}
Map<SkyKey, SkyValue> skylarkImportMap = Maps.newHashMapWithExpectedSize(importPathMap.size());
boolean valuesMissing = false;
try {
if (skylarkImportLookupFunctionForInlining == null) {
// Not inlining
Map<SkyKey,
ValueOrException2<
SkylarkImportFailedException,
InconsistentFilesystemException>> skylarkLookupResults = env.getValuesOrThrow(
importLookupKeys,
SkylarkImportFailedException.class,
InconsistentFilesystemException.class);
valuesMissing = env.valuesMissing();
for (Map.Entry<
SkyKey,
ValueOrException2<
SkylarkImportFailedException,
InconsistentFilesystemException>> entry : skylarkLookupResults.entrySet()) {
// Fetching the value will raise any deferred exceptions
skylarkImportMap.put(entry.getKey(), entry.getValue().get());
}
} else {
// Inlining calls to SkylarkImportLookupFunction
LinkedHashMap<Label, SkylarkImportLookupValue> alreadyVisitedImports =
Maps.newLinkedHashMapWithExpectedSize(importLookupKeys.size());
for (SkyKey importLookupKey : importLookupKeys) {
SkyValue skyValue =
skylarkImportLookupFunctionForInlining.computeWithInlineCalls(
importLookupKey, env, alreadyVisitedImports);
if (skyValue == null) {
Preconditions.checkState(
env.valuesMissing(), "no skylark import value for %s", importLookupKey);
// We continue making inline calls even if some requested values are missing, to
// maximize the number of dependent (non-inlined) SkyFunctions that are requested, thus
// avoiding a quadratic number of restarts.
valuesMissing = true;
} else {
skylarkImportMap.put(importLookupKey, skyValue);
}
}
}
} catch (SkylarkImportFailedException e) {
throw new PackageFunctionException(
new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT);
} catch (InconsistentFilesystemException e) {
throw new PackageFunctionException(
new NoSuchPackageException(packageId, e.getMessage(), e), Transience.PERSISTENT);
}
if (valuesMissing) {
// Some imports are unavailable.
return null;
}
// Process the loaded imports.
for (Entry<String, Label> importEntry : importPathMap.entrySet()) {
String importString = importEntry.getKey();
Label importLabel = importEntry.getValue();
SkyKey keyForLabel = SkylarkImportLookupValue.key(importLabel, inWorkspace);
SkylarkImportLookupValue importLookupValue =
(SkylarkImportLookupValue) skylarkImportMap.get(keyForLabel);
importMap.put(importString, importLookupValue.getEnvironmentExtension());
fileDependencies.add(importLookupValue.getDependency());
}
return new SkylarkImportResult(importMap, transitiveClosureOfLabels(fileDependencies.build()));
}
|
static SkylarkImportResult fetchImportsFromBuildFile( Path buildFilePath, PackageIdentifier packageId, BuildFileAST buildFileAST, Environment env, SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining) throws PackageFunctionException, InterruptedException { Preconditions.checkArgument(!packageId.getRepository().isDefault()); ImmutableList<SkylarkImport> imports = buildFileAST.getImports(); Map<String, Extension> importMap = Maps.newHashMapWithExpectedSize(imports.size()); ImmutableList.Builder<SkylarkFileDependency> fileDependencies = ImmutableList.builder(); ImmutableMap<String, Label> importPathMap; Label labelForCurrBuildFile; try { labelForCurrBuildFile = Label.create(packageId, "BUILD"); } catch (LabelSyntaxException e) { throw new IllegalStateException(e); } try { importPathMap = SkylarkImportLookupFunction.findLabelsForLoadStatements( imports, labelForCurrBuildFile, env); if (importPathMap == null) { return null; } } catch (SkylarkImportFailedException e) { throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } ImmutableCollection<Label> importLabels = importPathMap.values(); List<SkyKey> importLookupKeys = Lists.newArrayListWithExpectedSize(importLabels.size()); boolean inWorkspace = buildFilePath.getBaseName().endsWith(STR); for (Label importLabel : importLabels) { importLookupKeys.add(SkylarkImportLookupValue.key(importLabel, inWorkspace)); } Map<SkyKey, SkyValue> skylarkImportMap = Maps.newHashMapWithExpectedSize(importPathMap.size()); boolean valuesMissing = false; try { if (skylarkImportLookupFunctionForInlining == null) { Map<SkyKey, ValueOrException2< SkylarkImportFailedException, InconsistentFilesystemException>> skylarkLookupResults = env.getValuesOrThrow( importLookupKeys, SkylarkImportFailedException.class, InconsistentFilesystemException.class); valuesMissing = env.valuesMissing(); for (Map.Entry< SkyKey, ValueOrException2< SkylarkImportFailedException, InconsistentFilesystemException>> entry : skylarkLookupResults.entrySet()) { skylarkImportMap.put(entry.getKey(), entry.getValue().get()); } } else { LinkedHashMap<Label, SkylarkImportLookupValue> alreadyVisitedImports = Maps.newLinkedHashMapWithExpectedSize(importLookupKeys.size()); for (SkyKey importLookupKey : importLookupKeys) { SkyValue skyValue = skylarkImportLookupFunctionForInlining.computeWithInlineCalls( importLookupKey, env, alreadyVisitedImports); if (skyValue == null) { Preconditions.checkState( env.valuesMissing(), STR, importLookupKey); valuesMissing = true; } else { skylarkImportMap.put(importLookupKey, skyValue); } } } } catch (SkylarkImportFailedException e) { throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } catch (InconsistentFilesystemException e) { throw new PackageFunctionException( new NoSuchPackageException(packageId, e.getMessage(), e), Transience.PERSISTENT); } if (valuesMissing) { return null; } for (Entry<String, Label> importEntry : importPathMap.entrySet()) { String importString = importEntry.getKey(); Label importLabel = importEntry.getValue(); SkyKey keyForLabel = SkylarkImportLookupValue.key(importLabel, inWorkspace); SkylarkImportLookupValue importLookupValue = (SkylarkImportLookupValue) skylarkImportMap.get(keyForLabel); importMap.put(importString, importLookupValue.getEnvironmentExtension()); fileDependencies.add(importLookupValue.getDependency()); } return new SkylarkImportResult(importMap, transitiveClosureOfLabels(fileDependencies.build())); }
|
/**
* Fetch the skylark loads for this BUILD file. If any of them haven't been computed yet,
* returns null.
*/
|
Fetch the skylark loads for this BUILD file. If any of them haven't been computed yet, returns null
|
fetchImportsFromBuildFile
|
{
"repo_name": "hermione521/bazel",
"path": "src/main/java/com/google/devtools/build/lib/skyframe/PackageFunction.java",
"license": "apache-2.0",
"size": 59390
}
|
[
"com.google.common.collect.ImmutableCollection",
"com.google.common.collect.ImmutableList",
"com.google.common.collect.ImmutableMap",
"com.google.common.collect.Lists",
"com.google.common.collect.Maps",
"com.google.devtools.build.lib.cmdline.Label",
"com.google.devtools.build.lib.cmdline.LabelSyntaxException",
"com.google.devtools.build.lib.cmdline.PackageIdentifier",
"com.google.devtools.build.lib.packages.BuildFileContainsErrorsException",
"com.google.devtools.build.lib.packages.NoSuchPackageException",
"com.google.devtools.build.lib.skyframe.SkylarkImportLookupFunction",
"com.google.devtools.build.lib.syntax.BuildFileAST",
"com.google.devtools.build.lib.syntax.Environment",
"com.google.devtools.build.lib.syntax.SkylarkImport",
"com.google.devtools.build.lib.util.Preconditions",
"com.google.devtools.build.lib.vfs.Path",
"com.google.devtools.build.skyframe.SkyFunctionException",
"com.google.devtools.build.skyframe.SkyKey",
"com.google.devtools.build.skyframe.SkyValue",
"com.google.devtools.build.skyframe.ValueOrException2",
"java.util.LinkedHashMap",
"java.util.List",
"java.util.Map"
] |
import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.skyframe.SkylarkImportLookupFunction; import com.google.devtools.build.lib.syntax.BuildFileAST; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.SkylarkImport; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException2; import java.util.LinkedHashMap; import java.util.List; import java.util.Map;
|
import com.google.common.collect.*; import com.google.devtools.build.lib.cmdline.*; import com.google.devtools.build.lib.packages.*; import com.google.devtools.build.lib.skyframe.*; import com.google.devtools.build.lib.syntax.*; import com.google.devtools.build.lib.util.*; import com.google.devtools.build.lib.vfs.*; import com.google.devtools.build.skyframe.*; import java.util.*;
|
[
"com.google.common",
"com.google.devtools",
"java.util"
] |
com.google.common; com.google.devtools; java.util;
| 2,535,157
|
private List<Indicator> resolveIndicatorExpressions( DataQueryParams params )
{
List<Indicator> indicators = asTypedList( params.getIndicators() );
for ( Indicator indicator : indicators )
{
for ( ExpressionResolver resolver : resolvers.getExpressionResolvers() )
{
indicator.setNumerator( resolver.resolve( indicator.getNumerator() ) );
indicator.setDenominator( resolver.resolve( indicator.getDenominator() ) );
}
}
return indicators;
}
|
List<Indicator> function( DataQueryParams params ) { List<Indicator> indicators = asTypedList( params.getIndicators() ); for ( Indicator indicator : indicators ) { for ( ExpressionResolver resolver : resolvers.getExpressionResolvers() ) { indicator.setNumerator( resolver.resolve( indicator.getNumerator() ) ); indicator.setDenominator( resolver.resolve( indicator.getDenominator() ) ); } } return indicators; }
|
/**
* Resolves the numerator and denominator expressions of indicators in the
* data query.
*
* @param params the {@link DataQueryParams}.
* @return the resolved list of indicators.
*/
|
Resolves the numerator and denominator expressions of indicators in the data query
|
resolveIndicatorExpressions
|
{
"repo_name": "msf-oca-his/dhis2-core",
"path": "dhis-2/dhis-services/dhis-service-analytics/src/main/java/org/hisp/dhis/analytics/data/handler/DataHandler.java",
"license": "bsd-3-clause",
"size": 50048
}
|
[
"java.util.List",
"org.hisp.dhis.analytics.DataQueryParams",
"org.hisp.dhis.analytics.resolver.ExpressionResolver",
"org.hisp.dhis.common.DimensionalObjectUtils",
"org.hisp.dhis.indicator.Indicator"
] |
import java.util.List; import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.analytics.resolver.ExpressionResolver; import org.hisp.dhis.common.DimensionalObjectUtils; import org.hisp.dhis.indicator.Indicator;
|
import java.util.*; import org.hisp.dhis.analytics.*; import org.hisp.dhis.analytics.resolver.*; import org.hisp.dhis.common.*; import org.hisp.dhis.indicator.*;
|
[
"java.util",
"org.hisp.dhis"
] |
java.util; org.hisp.dhis;
| 2,210,892
|
public Set<NewGTFSError> setParameter (PreparedStatement preparedStatement, int oneBasedIndex, LocalDate localDate) {
try {
if (localDate == null) setNull(preparedStatement, oneBasedIndex);
else preparedStatement.setString(oneBasedIndex, localDate.format(GTFS_DATE_FORMATTER));
return Collections.EMPTY_SET;
} catch (Exception e) {
throw new StorageException(e);
}
}
|
Set<NewGTFSError> function (PreparedStatement preparedStatement, int oneBasedIndex, LocalDate localDate) { try { if (localDate == null) setNull(preparedStatement, oneBasedIndex); else preparedStatement.setString(oneBasedIndex, localDate.format(GTFS_DATE_FORMATTER)); return Collections.EMPTY_SET; } catch (Exception e) { throw new StorageException(e); } }
|
/**
* DateField specific method to set a statement parameter from a {@link LocalDate}.
*/
|
DateField specific method to set a statement parameter from a <code>LocalDate</code>
|
setParameter
|
{
"repo_name": "conveyal/gtfs-lib",
"path": "src/main/java/com/conveyal/gtfs/loader/DateField.java",
"license": "bsd-2-clause",
"size": 3292
}
|
[
"com.conveyal.gtfs.error.NewGTFSError",
"com.conveyal.gtfs.storage.StorageException",
"java.sql.PreparedStatement",
"java.time.LocalDate",
"java.util.Collections",
"java.util.Set"
] |
import com.conveyal.gtfs.error.NewGTFSError; import com.conveyal.gtfs.storage.StorageException; import java.sql.PreparedStatement; import java.time.LocalDate; import java.util.Collections; import java.util.Set;
|
import com.conveyal.gtfs.error.*; import com.conveyal.gtfs.storage.*; import java.sql.*; import java.time.*; import java.util.*;
|
[
"com.conveyal.gtfs",
"java.sql",
"java.time",
"java.util"
] |
com.conveyal.gtfs; java.sql; java.time; java.util;
| 678,642
|
public void close () {
final Iterator iterator = ((View) this).base.getModules().iterator();
while ( iterator.hasNext() ) {
Modul modul = (Modul) iterator.next();
modul.close();
}
if ( !((View) this).base.getSetting().isSaved() && JOptionPane.showConfirmDialog( ((View) this), "The settings are changed and unsaved. Save it now?", "Save settings?", JOptionPane.DEFAULT_OPTION, JOptionPane.QUESTION_MESSAGE ) == JOptionPane.OK_OPTION ) {
try {
((View) this).base.getSetting().save( true );
} catch ( SettingException exception ) {
JOptionPane.showMessageDialog( ((View) this), exception.getMessage(), "Coudn't save the changes of the profile.", JOptionPane.ERROR_MESSAGE );
}
}
// frame
setVisible( false );
dispose();
}
|
void function () { final Iterator iterator = ((View) this).base.getModules().iterator(); while ( iterator.hasNext() ) { Modul modul = (Modul) iterator.next(); modul.close(); } if ( !((View) this).base.getSetting().isSaved() && JOptionPane.showConfirmDialog( ((View) this), STR, STR, JOptionPane.DEFAULT_OPTION, JOptionPane.QUESTION_MESSAGE ) == JOptionPane.OK_OPTION ) { try { ((View) this).base.getSetting().save( true ); } catch ( SettingException exception ) { JOptionPane.showMessageDialog( ((View) this), exception.getMessage(), STR, JOptionPane.ERROR_MESSAGE ); } } setVisible( false ); dispose(); }
|
/**
* Check for unsaved settings (confirm dialog) and close the gui.
*/
|
Check for unsaved settings (confirm dialog) and close the gui
|
close
|
{
"repo_name": "SergiyKolesnikov/fuji",
"path": "examples/Devolution/build/Everything/View.java",
"license": "lgpl-3.0",
"size": 11449
}
|
[
"java.util.Iterator",
"javax.swing.JOptionPane"
] |
import java.util.Iterator; import javax.swing.JOptionPane;
|
import java.util.*; import javax.swing.*;
|
[
"java.util",
"javax.swing"
] |
java.util; javax.swing;
| 5,916
|
private static Details getDetailsForLambda(DetailAST ast) {
final DetailAST lcurly = ast.findFirstToken(TokenTypes.SLIST);
boolean shouldCheckLastRcurly = false;
DetailAST nextToken = getNextToken(ast);
if (nextToken.getType() != TokenTypes.RPAREN
&& nextToken.getType() != TokenTypes.COMMA) {
shouldCheckLastRcurly = true;
nextToken = getNextToken(nextToken);
}
DetailAST rcurly = null;
if (lcurly != null) {
rcurly = lcurly.getLastChild();
}
return new Details(lcurly, rcurly, nextToken, shouldCheckLastRcurly);
}
|
static Details function(DetailAST ast) { final DetailAST lcurly = ast.findFirstToken(TokenTypes.SLIST); boolean shouldCheckLastRcurly = false; DetailAST nextToken = getNextToken(ast); if (nextToken.getType() != TokenTypes.RPAREN && nextToken.getType() != TokenTypes.COMMA) { shouldCheckLastRcurly = true; nextToken = getNextToken(nextToken); } DetailAST rcurly = null; if (lcurly != null) { rcurly = lcurly.getLastChild(); } return new Details(lcurly, rcurly, nextToken, shouldCheckLastRcurly); }
|
/**
* Collects validation details for Lambdas.
* @param ast a {@code DetailAST} value
* @return an object containing all details to make a validation
*/
|
Collects validation details for Lambdas
|
getDetailsForLambda
|
{
"repo_name": "sharang108/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/checks/blocks/RightCurlyCheck.java",
"license": "lgpl-2.1",
"size": 22636
}
|
[
"com.puppycrawl.tools.checkstyle.api.DetailAST",
"com.puppycrawl.tools.checkstyle.api.TokenTypes"
] |
import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.TokenTypes;
|
import com.puppycrawl.tools.checkstyle.api.*;
|
[
"com.puppycrawl.tools"
] |
com.puppycrawl.tools;
| 918,573
|
@Override
public Path getContainerPath()
throws IOException {
Preconditions.checkState(metadataLocations.size() > 0);
int index = currentIndex % metadataLocations.size();
return Paths.get(metadataLocations.get(index).getNormalizedUri());
}
|
Path function() throws IOException { Preconditions.checkState(metadataLocations.size() > 0); int index = currentIndex % metadataLocations.size(); return Paths.get(metadataLocations.get(index).getNormalizedUri()); }
|
/**
* Returns the path where the container should be placed from a set of
* metadataLocations.
*
* @return A path where we should place this container and metadata.
* @throws IOException
*/
|
Returns the path where the container should be placed from a set of metadataLocations
|
getContainerPath
|
{
"repo_name": "ChetnaChaudhari/hadoop",
"path": "hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerLocationManagerImpl.java",
"license": "apache-2.0",
"size": 5352
}
|
[
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.nio.file.Path",
"java.nio.file.Paths"
] |
import com.google.common.base.Preconditions; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths;
|
import com.google.common.base.*; import java.io.*; import java.nio.file.*;
|
[
"com.google.common",
"java.io",
"java.nio"
] |
com.google.common; java.io; java.nio;
| 528,114
|
public boolean validateExpenses(ActualExpense expense, TravelDocument document) {
boolean success = (validateGeneralRules(expense, document) &&
validateExpenseDetail(expense) &&
validateAirfareRules(expense, document) &&
validateRentalCarRules(expense, document) &&
validateLodgingRules(expense, document) &&
validateLodgingAllowanceRules(expense, document) &&
validatePerDiemRules(expense, document) &&
validateMaximumAmountRules(expense, document));
return success;
}
|
boolean function(ActualExpense expense, TravelDocument document) { boolean success = (validateGeneralRules(expense, document) && validateExpenseDetail(expense) && validateAirfareRules(expense, document) && validateRentalCarRules(expense, document) && validateLodgingRules(expense, document) && validateLodgingAllowanceRules(expense, document) && validatePerDiemRules(expense, document) && validateMaximumAmountRules(expense, document)); return success; }
|
/**
* Validate the expense
*
* @param expense
* @param document
* @return
*/
|
Validate the expense
|
validateExpenses
|
{
"repo_name": "bhutchinson/kfs",
"path": "kfs-tem/src/main/java/org/kuali/kfs/module/tem/document/validation/impl/TravelDocumentActualExpenseLineValidation.java",
"license": "agpl-3.0",
"size": 17076
}
|
[
"org.kuali.kfs.module.tem.businessobject.ActualExpense",
"org.kuali.kfs.module.tem.document.TravelDocument"
] |
import org.kuali.kfs.module.tem.businessobject.ActualExpense; import org.kuali.kfs.module.tem.document.TravelDocument;
|
import org.kuali.kfs.module.tem.businessobject.*; import org.kuali.kfs.module.tem.document.*;
|
[
"org.kuali.kfs"
] |
org.kuali.kfs;
| 1,686,002
|
// fails frequently, disabled for now, see HBASE-6406
//@Test
public void testClientSessionExpired() throws Exception {
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
// We don't want to share the connection as we will check its state
c.set(HConstants.HBASE_CLIENT_INSTANCE_ID, "1111");
Connection connection = ConnectionFactory.createConnection(c);
ZooKeeperWatcher connectionZK = getZooKeeperWatcher(connection);
LOG.info("ZooKeeperWatcher= 0x"+ Integer.toHexString(
connectionZK.hashCode()));
LOG.info("getRecoverableZooKeeper= 0x"+ Integer.toHexString(
connectionZK.getRecoverableZooKeeper().hashCode()));
LOG.info("session="+Long.toHexString(
connectionZK.getRecoverableZooKeeper().getSessionId()));
TEST_UTIL.expireSession(connectionZK);
LOG.info("Before using zkw state=" +
connectionZK.getRecoverableZooKeeper().getState());
// provoke session expiration by doing something with ZK
try {
connectionZK.getRecoverableZooKeeper().getZooKeeper().exists(
"/1/1", false);
} catch (KeeperException ignored) {
}
// Check that the old ZK connection is closed, means we did expire
States state = connectionZK.getRecoverableZooKeeper().getState();
LOG.info("After using zkw state=" + state);
LOG.info("session="+Long.toHexString(
connectionZK.getRecoverableZooKeeper().getSessionId()));
// It's asynchronous, so we may have to wait a little...
final long limit1 = System.currentTimeMillis() + 3000;
while (System.currentTimeMillis() < limit1 && state != States.CLOSED){
state = connectionZK.getRecoverableZooKeeper().getState();
}
LOG.info("After using zkw loop=" + state);
LOG.info("ZooKeeper should have timed out");
LOG.info("session="+Long.toHexString(
connectionZK.getRecoverableZooKeeper().getSessionId()));
// It's surprising but sometimes we can still be in connected state.
// As it's known (even if not understood) we don't make the the test fail
// for this reason.)
// Assert.assertTrue("state=" + state, state == States.CLOSED);
// Check that the client recovered
ZooKeeperWatcher newConnectionZK = getZooKeeperWatcher(connection);
States state2 = newConnectionZK.getRecoverableZooKeeper().getState();
LOG.info("After new get state=" +state2);
// As it's an asynchronous event we may got the same ZKW, if it's not
// yet invalidated. Hence this loop.
final long limit2 = System.currentTimeMillis() + 3000;
while (System.currentTimeMillis() < limit2 &&
state2 != States.CONNECTED && state2 != States.CONNECTING) {
newConnectionZK = getZooKeeperWatcher(connection);
state2 = newConnectionZK.getRecoverableZooKeeper().getState();
}
LOG.info("After new get state loop=" + state2);
Assert.assertTrue(
state2 == States.CONNECTED || state2 == States.CONNECTING);
connection.close();
}
|
Configuration c = new Configuration(TEST_UTIL.getConfiguration()); c.set(HConstants.HBASE_CLIENT_INSTANCE_ID, "1111"); Connection connection = ConnectionFactory.createConnection(c); ZooKeeperWatcher connectionZK = getZooKeeperWatcher(connection); LOG.info(STR+ Integer.toHexString( connectionZK.hashCode())); LOG.info(STR+ Integer.toHexString( connectionZK.getRecoverableZooKeeper().hashCode())); LOG.info(STR+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); TEST_UTIL.expireSession(connectionZK); LOG.info(STR + connectionZK.getRecoverableZooKeeper().getState()); try { connectionZK.getRecoverableZooKeeper().getZooKeeper().exists( "/1/1", false); } catch (KeeperException ignored) { } States state = connectionZK.getRecoverableZooKeeper().getState(); LOG.info(STR + state); LOG.info(STR+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); final long limit1 = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < limit1 && state != States.CLOSED){ state = connectionZK.getRecoverableZooKeeper().getState(); } LOG.info(STR + state); LOG.info(STR); LOG.info(STR+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); ZooKeeperWatcher newConnectionZK = getZooKeeperWatcher(connection); States state2 = newConnectionZK.getRecoverableZooKeeper().getState(); LOG.info(STR +state2); final long limit2 = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < limit2 && state2 != States.CONNECTED && state2 != States.CONNECTING) { newConnectionZK = getZooKeeperWatcher(connection); state2 = newConnectionZK.getRecoverableZooKeeper().getState(); } LOG.info(STR + state2); Assert.assertTrue( state2 == States.CONNECTED state2 == States.CONNECTING); connection.close(); }
|
/**
* See HBASE-1232 and http://hbase.apache.org/book.html#trouble.zookeeper.
* @throws IOException
* @throws InterruptedException
*/
|
See HBASE-1232 and HREF
|
testClientSessionExpired
|
{
"repo_name": "JingchengDu/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java",
"license": "apache-2.0",
"size": 21739
}
|
[
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.client.ConnectionFactory",
"org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher",
"org.apache.zookeeper.KeeperException",
"org.apache.zookeeper.ZooKeeper",
"org.junit.Assert"
] |
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.junit.Assert;
|
import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.zookeeper.*; import org.apache.zookeeper.*; import org.junit.*;
|
[
"org.apache.hadoop",
"org.apache.zookeeper",
"org.junit"
] |
org.apache.hadoop; org.apache.zookeeper; org.junit;
| 825,651
|
public static Container createErrorContainer(String errCode) {
Container err = new Container();
err.setSuperTag(JERROR);
err.addE(new Element(ERR_KEY, errCode));
return err;
}
|
static Container function(String errCode) { Container err = new Container(); err.setSuperTag(JERROR); err.addE(new Element(ERR_KEY, errCode)); return err; }
|
/**
* Creates an errorcontainer with the given errkey.
*
* @param errCode The needed errorcode.
* @return an errorcontainer with the given errkey.
*/
|
Creates an errorcontainer with the given errkey
|
createErrorContainer
|
{
"repo_name": "Jattyv/jcapi",
"path": "src/main/java/de/jattyv/jcapi/util/factory/JattyvFactory.java",
"license": "gpl-3.0",
"size": 5466
}
|
[
"de.jattyv.jcapi.data.jobject.Container",
"de.jattyv.jcapi.data.jobject.Element"
] |
import de.jattyv.jcapi.data.jobject.Container; import de.jattyv.jcapi.data.jobject.Element;
|
import de.jattyv.jcapi.data.jobject.*;
|
[
"de.jattyv.jcapi"
] |
de.jattyv.jcapi;
| 2,193,925
|
@SuppressWarnings("unchecked")
public <T extends Tag> Set<T> getOfType(final Class<T> type) {
final long stamp = lock.readLock();
try {
final Set<T> tagsOfType = (Set<T>) tags.get(type);
return tagsOfType != null ? Collections.unmodifiableSet(tagsOfType) : Collections.emptySet();
} finally {
lock.unlockRead(stamp);
}
}
|
@SuppressWarnings(STR) <T extends Tag> Set<T> function(final Class<T> type) { final long stamp = lock.readLock(); try { final Set<T> tagsOfType = (Set<T>) tags.get(type); return tagsOfType != null ? Collections.unmodifiableSet(tagsOfType) : Collections.emptySet(); } finally { lock.unlockRead(stamp); } }
|
/**
* Gets all of the tags matching the specified type.
*
* @param type
* Type of tag.
* @return All tags of that type or an empty set if none are found.
*/
|
Gets all of the tags matching the specified type
|
getOfType
|
{
"repo_name": "thopit/JALSE",
"path": "src/main/java/jalse/tags/TagTypeSet.java",
"license": "apache-2.0",
"size": 4084
}
|
[
"java.util.Collections",
"java.util.Set"
] |
import java.util.Collections; import java.util.Set;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,033,250
|
@Generated
@Selector("nodeWithSource:")
public static native MPSCNNSoftMaxNode nodeWithSource(MPSNNImageNode sourceNode);
|
@Selector(STR) static native MPSCNNSoftMaxNode function(MPSNNImageNode sourceNode);
|
/**
* Init a node representing a autoreleased MPSCNNSoftMax kernel
*
* @param sourceNode The MPSNNImageNode representing the source MPSImage for the filter
* @return A new MPSNNFilter node for a MPSCNNSoftMax kernel.
*/
|
Init a node representing a autoreleased MPSCNNSoftMax kernel
|
nodeWithSource
|
{
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/metalperformanceshaders/MPSCNNSoftMaxNode.java",
"license": "apache-2.0",
"size": 5054
}
|
[
"org.moe.natj.objc.ann.Selector"
] |
import org.moe.natj.objc.ann.Selector;
|
import org.moe.natj.objc.ann.*;
|
[
"org.moe.natj"
] |
org.moe.natj;
| 42,136
|
protected void runSQL(String sql) throws SystemException {
try {
DataSource dataSource = advancedContentPersistence.getDataSource();
SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource,
sql, new int[0]);
sqlUpdate.update();
}
catch (Exception e) {
throw new SystemException(e);
}
}
@BeanReference(type = org.kisti.edison.content.service.AdvancedContentLocalService.class)
protected org.kisti.edison.content.service.AdvancedContentLocalService advancedContentLocalService;
@BeanReference(type = org.kisti.edison.content.service.AdvancedContentService.class)
protected org.kisti.edison.content.service.AdvancedContentService advancedContentService;
@BeanReference(type = AdvancedContentPersistence.class)
protected AdvancedContentPersistence advancedContentPersistence;
@BeanReference(type = org.kisti.edison.content.service.GeneralContentLocalService.class)
protected org.kisti.edison.content.service.GeneralContentLocalService generalContentLocalService;
@BeanReference(type = org.kisti.edison.content.service.GeneralContentService.class)
protected org.kisti.edison.content.service.GeneralContentService generalContentService;
@BeanReference(type = GeneralContentPersistence.class)
protected GeneralContentPersistence generalContentPersistence;
@BeanReference(type = GeneralContentFinder.class)
protected GeneralContentFinder generalContentFinder;
@BeanReference(type = org.kisti.edison.content.service.OrgImgContentLocalService.class)
protected org.kisti.edison.content.service.OrgImgContentLocalService orgImgContentLocalService;
@BeanReference(type = org.kisti.edison.content.service.OrgImgContentService.class)
protected org.kisti.edison.content.service.OrgImgContentService orgImgContentService;
@BeanReference(type = OrgImgContentPersistence.class)
protected OrgImgContentPersistence orgImgContentPersistence;
@BeanReference(type = OrgImgContentFinder.class)
protected OrgImgContentFinder orgImgContentFinder;
@BeanReference(type = com.liferay.counter.service.CounterLocalService.class)
protected com.liferay.counter.service.CounterLocalService counterLocalService;
@BeanReference(type = com.liferay.portal.service.ResourceLocalService.class)
protected com.liferay.portal.service.ResourceLocalService resourceLocalService;
@BeanReference(type = com.liferay.portal.service.UserLocalService.class)
protected com.liferay.portal.service.UserLocalService userLocalService;
@BeanReference(type = com.liferay.portal.service.UserService.class)
protected com.liferay.portal.service.UserService userService;
@BeanReference(type = UserPersistence.class)
protected UserPersistence userPersistence;
private String _beanIdentifier;
private ClassLoader _classLoader;
private AdvancedContentServiceClpInvoker _clpInvoker = new AdvancedContentServiceClpInvoker();
|
void function(String sql) throws SystemException { try { DataSource dataSource = advancedContentPersistence.getDataSource(); SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource, sql, new int[0]); sqlUpdate.update(); } catch (Exception e) { throw new SystemException(e); } } @BeanReference(type = org.kisti.edison.content.service.AdvancedContentLocalService.class) protected org.kisti.edison.content.service.AdvancedContentLocalService advancedContentLocalService; @BeanReference(type = org.kisti.edison.content.service.AdvancedContentService.class) protected org.kisti.edison.content.service.AdvancedContentService advancedContentService; @BeanReference(type = AdvancedContentPersistence.class) protected AdvancedContentPersistence advancedContentPersistence; @BeanReference(type = org.kisti.edison.content.service.GeneralContentLocalService.class) protected org.kisti.edison.content.service.GeneralContentLocalService generalContentLocalService; @BeanReference(type = org.kisti.edison.content.service.GeneralContentService.class) protected org.kisti.edison.content.service.GeneralContentService generalContentService; @BeanReference(type = GeneralContentPersistence.class) protected GeneralContentPersistence generalContentPersistence; @BeanReference(type = GeneralContentFinder.class) protected GeneralContentFinder generalContentFinder; @BeanReference(type = org.kisti.edison.content.service.OrgImgContentLocalService.class) protected org.kisti.edison.content.service.OrgImgContentLocalService orgImgContentLocalService; @BeanReference(type = org.kisti.edison.content.service.OrgImgContentService.class) protected org.kisti.edison.content.service.OrgImgContentService orgImgContentService; @BeanReference(type = OrgImgContentPersistence.class) protected OrgImgContentPersistence orgImgContentPersistence; @BeanReference(type = OrgImgContentFinder.class) protected OrgImgContentFinder orgImgContentFinder; @BeanReference(type = com.liferay.counter.service.CounterLocalService.class) protected com.liferay.counter.service.CounterLocalService counterLocalService; @BeanReference(type = com.liferay.portal.service.ResourceLocalService.class) protected com.liferay.portal.service.ResourceLocalService resourceLocalService; @BeanReference(type = com.liferay.portal.service.UserLocalService.class) protected com.liferay.portal.service.UserLocalService userLocalService; @BeanReference(type = com.liferay.portal.service.UserService.class) protected com.liferay.portal.service.UserService userService; @BeanReference(type = UserPersistence.class) protected UserPersistence userPersistence; private String _beanIdentifier; private ClassLoader _classLoader; private AdvancedContentServiceClpInvoker _clpInvoker = new AdvancedContentServiceClpInvoker();
|
/**
* Performs an SQL query.
*
* @param sql the sql query
*/
|
Performs an SQL query
|
runSQL
|
{
"repo_name": "queza85/edison",
"path": "edison-portal-framework/edison-content-2016-portlet/docroot/WEB-INF/src/org/kisti/edison/content/service/base/AdvancedContentServiceBaseImpl.java",
"license": "gpl-3.0",
"size": 14526
}
|
[
"com.liferay.portal.kernel.bean.BeanReference",
"com.liferay.portal.kernel.dao.jdbc.SqlUpdate",
"com.liferay.portal.kernel.dao.jdbc.SqlUpdateFactoryUtil",
"com.liferay.portal.kernel.exception.SystemException",
"com.liferay.portal.service.persistence.UserPersistence",
"javax.sql.DataSource",
"org.kisti.edison.content.service.AdvancedContentService",
"org.kisti.edison.content.service.persistence.AdvancedContentPersistence",
"org.kisti.edison.content.service.persistence.GeneralContentFinder",
"org.kisti.edison.content.service.persistence.GeneralContentPersistence",
"org.kisti.edison.content.service.persistence.OrgImgContentFinder",
"org.kisti.edison.content.service.persistence.OrgImgContentPersistence"
] |
import com.liferay.portal.kernel.bean.BeanReference; import com.liferay.portal.kernel.dao.jdbc.SqlUpdate; import com.liferay.portal.kernel.dao.jdbc.SqlUpdateFactoryUtil; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.service.persistence.UserPersistence; import javax.sql.DataSource; import org.kisti.edison.content.service.AdvancedContentService; import org.kisti.edison.content.service.persistence.AdvancedContentPersistence; import org.kisti.edison.content.service.persistence.GeneralContentFinder; import org.kisti.edison.content.service.persistence.GeneralContentPersistence; import org.kisti.edison.content.service.persistence.OrgImgContentFinder; import org.kisti.edison.content.service.persistence.OrgImgContentPersistence;
|
import com.liferay.portal.kernel.bean.*; import com.liferay.portal.kernel.dao.jdbc.*; import com.liferay.portal.kernel.exception.*; import com.liferay.portal.service.persistence.*; import javax.sql.*; import org.kisti.edison.content.service.*; import org.kisti.edison.content.service.persistence.*;
|
[
"com.liferay.portal",
"javax.sql",
"org.kisti.edison"
] |
com.liferay.portal; javax.sql; org.kisti.edison;
| 2,205,389
|
private void addToWrapper(EntityMetadata entityMetadata, AttributeWrapper wrapper, Object embeddedObject,
Attribute attrib, Attribute embeddedAttrib)
{
byte[] value = PropertyAccessorHelper.get(embeddedObject, (Field) attrib.getJavaMember());
byte[] name;
if (value != null)
{
if (embeddedAttrib == null)
{
name = getEncodedBytes(((AbstractAttribute) attrib).getJPAColumnName());
}
else
{
name = getEncodedBytes(getHashKey(embeddedAttrib.getName(),
((AbstractAttribute) attrib).getJPAColumnName()));
}
// add column name as key and value as value
wrapper.addColumn(name, value);
// // {tablename:columnname,hashcode} for value
// selective indexing.
if (entityMetadata.getIndexProperties().containsKey(((AbstractAttribute) attrib).getJPAColumnName()))
{
String valueAsStr = PropertyAccessorHelper.getString(embeddedObject, (Field) attrib.getJavaMember());
wrapper.addIndex(
getHashKey(entityMetadata.getTableName(), ((AbstractAttribute) attrib).getJPAColumnName()),
getDouble(valueAsStr));
wrapper.addIndex(
getHashKey(entityMetadata.getTableName(),
getHashKey(((AbstractAttribute) attrib).getJPAColumnName(), valueAsStr)),
getDouble(valueAsStr));
}
}
}
|
void function(EntityMetadata entityMetadata, AttributeWrapper wrapper, Object embeddedObject, Attribute attrib, Attribute embeddedAttrib) { byte[] value = PropertyAccessorHelper.get(embeddedObject, (Field) attrib.getJavaMember()); byte[] name; if (value != null) { if (embeddedAttrib == null) { name = getEncodedBytes(((AbstractAttribute) attrib).getJPAColumnName()); } else { name = getEncodedBytes(getHashKey(embeddedAttrib.getName(), ((AbstractAttribute) attrib).getJPAColumnName())); } wrapper.addColumn(name, value); if (entityMetadata.getIndexProperties().containsKey(((AbstractAttribute) attrib).getJPAColumnName())) { String valueAsStr = PropertyAccessorHelper.getString(embeddedObject, (Field) attrib.getJavaMember()); wrapper.addIndex( getHashKey(entityMetadata.getTableName(), ((AbstractAttribute) attrib).getJPAColumnName()), getDouble(valueAsStr)); wrapper.addIndex( getHashKey(entityMetadata.getTableName(), getHashKey(((AbstractAttribute) attrib).getJPAColumnName(), valueAsStr)), getDouble(valueAsStr)); } } }
|
/**
* Wraps entity attributes into redis format byte[].
*
* @param entityMetadata
* the entity metadata
* @param wrapper
* the wrapper
* @param embeddedObject
* the embedded object
* @param attrib
* the attrib
* @param embeddedAttrib
* the embedded attrib
*/
|
Wraps entity attributes into redis format byte[]
|
addToWrapper
|
{
"repo_name": "ravisund/Kundera",
"path": "src/kundera-redis/src/main/java/com/impetus/client/redis/RedisClient.java",
"license": "apache-2.0",
"size": 75957
}
|
[
"com.impetus.kundera.metadata.model.EntityMetadata",
"com.impetus.kundera.metadata.model.attributes.AbstractAttribute",
"com.impetus.kundera.property.PropertyAccessorHelper",
"java.lang.reflect.Field",
"javax.persistence.metamodel.Attribute"
] |
import com.impetus.kundera.metadata.model.EntityMetadata; import com.impetus.kundera.metadata.model.attributes.AbstractAttribute; import com.impetus.kundera.property.PropertyAccessorHelper; import java.lang.reflect.Field; import javax.persistence.metamodel.Attribute;
|
import com.impetus.kundera.metadata.model.*; import com.impetus.kundera.metadata.model.attributes.*; import com.impetus.kundera.property.*; import java.lang.reflect.*; import javax.persistence.metamodel.*;
|
[
"com.impetus.kundera",
"java.lang",
"javax.persistence"
] |
com.impetus.kundera; java.lang; javax.persistence;
| 339,754
|
@Nonnull
static CommandData fromData(@Nonnull DataObject object)
{
Checks.notNull(object, "DataObject");
String name = object.getString("name");
Command.Type commandType = Command.Type.fromId(object.getInt("type", 1));
if (commandType != Command.Type.SLASH)
return new CommandDataImpl(commandType, name);
return SlashCommandData.fromData(object);
}
|
static CommandData fromData(@Nonnull DataObject object) { Checks.notNull(object, STR); String name = object.getString("name"); Command.Type commandType = Command.Type.fromId(object.getInt("type", 1)); if (commandType != Command.Type.SLASH) return new CommandDataImpl(commandType, name); return SlashCommandData.fromData(object); }
|
/**
* Parses the provided serialization back into an CommandData instance.
* <br>This is the reverse function for {@link CommandData#toData()}.
*
* @param object
* The serialized {@link DataObject} representing the command
*
* @throws net.dv8tion.jda.api.exceptions.ParsingException
* If the serialized object is missing required fields
* @throws IllegalArgumentException
* If any of the values are failing the respective checks such as length
*
* @return The parsed CommandData instance, which can be further configured through setters
*
* @see SlashCommandData#fromData(DataObject)
* @see Commands#fromList(Collection)
*/
|
Parses the provided serialization back into an CommandData instance. This is the reverse function for <code>CommandData#toData()</code>
|
fromData
|
{
"repo_name": "DV8FromTheWorld/JDA",
"path": "src/main/java/net/dv8tion/jda/api/interactions/commands/build/CommandData.java",
"license": "apache-2.0",
"size": 5009
}
|
[
"javax.annotation.Nonnull",
"net.dv8tion.jda.api.interactions.commands.Command",
"net.dv8tion.jda.api.utils.data.DataObject",
"net.dv8tion.jda.internal.interactions.CommandDataImpl",
"net.dv8tion.jda.internal.utils.Checks"
] |
import javax.annotation.Nonnull; import net.dv8tion.jda.api.interactions.commands.Command; import net.dv8tion.jda.api.utils.data.DataObject; import net.dv8tion.jda.internal.interactions.CommandDataImpl; import net.dv8tion.jda.internal.utils.Checks;
|
import javax.annotation.*; import net.dv8tion.jda.api.interactions.commands.*; import net.dv8tion.jda.api.utils.data.*; import net.dv8tion.jda.internal.interactions.*; import net.dv8tion.jda.internal.utils.*;
|
[
"javax.annotation",
"net.dv8tion.jda"
] |
javax.annotation; net.dv8tion.jda;
| 1,278,489
|
public int readLabel() throws IOException {
if (labels == null) {
throw new IllegalStateException("labels file not initialized.");
}
return labels.readLabel();
}
|
int function() throws IOException { if (labels == null) { throw new IllegalStateException(STR); } return labels.readLabel(); }
|
/**
* Reads the current label.
*
* @return int
* @throws IOException
*/
|
Reads the current label
|
readLabel
|
{
"repo_name": "kinbod/deeplearning4j",
"path": "deeplearning4j-core/src/main/java/org/deeplearning4j/datasets/mnist/MnistManager.java",
"license": "apache-2.0",
"size": 5680
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 60,999
|
private static Tensor readRemoveUpdateTensor(TokenBuffer buffer, TensorType sparseType, TensorType originalType) {
Tensor.Builder builder = null;
expectObjectStart(buffer.currentToken());
int initNesting = buffer.nesting();
for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) {
if (TENSOR_ADDRESSES.equals(buffer.currentName())) {
expectArrayStart(buffer.currentToken());
int nesting = buffer.nesting();
for (buffer.next(); buffer.nesting() >= nesting; buffer.next()) {
if (builder == null) {
var typeAndAddress = readFirstTensorAddress(buffer, sparseType, originalType);
builder = Tensor.Builder.of(typeAndAddress.getFirst());
builder.cell(typeAndAddress.getSecond(), 1.0);
} else {
builder.cell(readTensorAddress(buffer, builder.type(), originalType), 1.0);
}
}
expectCompositeEnd(buffer.currentToken());
}
}
expectObjectEnd(buffer.currentToken());
return (builder != null) ? builder.build() : Tensor.Builder.of(sparseType).build();
}
|
static Tensor function(TokenBuffer buffer, TensorType sparseType, TensorType originalType) { Tensor.Builder builder = null; expectObjectStart(buffer.currentToken()); int initNesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= initNesting; buffer.next()) { if (TENSOR_ADDRESSES.equals(buffer.currentName())) { expectArrayStart(buffer.currentToken()); int nesting = buffer.nesting(); for (buffer.next(); buffer.nesting() >= nesting; buffer.next()) { if (builder == null) { var typeAndAddress = readFirstTensorAddress(buffer, sparseType, originalType); builder = Tensor.Builder.of(typeAndAddress.getFirst()); builder.cell(typeAndAddress.getSecond(), 1.0); } else { builder.cell(readTensorAddress(buffer, builder.type(), originalType), 1.0); } } expectCompositeEnd(buffer.currentToken()); } } expectObjectEnd(buffer.currentToken()); return (builder != null) ? builder.build() : Tensor.Builder.of(sparseType).build(); }
|
/**
* Reads all addresses in buffer and returns a tensor where addresses have cell value 1.0
*/
|
Reads all addresses in buffer and returns a tensor where addresses have cell value 1.0
|
readRemoveUpdateTensor
|
{
"repo_name": "vespa-engine/vespa",
"path": "document/src/main/java/com/yahoo/document/json/readers/TensorRemoveUpdateReader.java",
"license": "apache-2.0",
"size": 6531
}
|
[
"com.yahoo.document.json.TokenBuffer",
"com.yahoo.document.json.readers.JsonParserHelpers",
"com.yahoo.tensor.Tensor",
"com.yahoo.tensor.TensorType"
] |
import com.yahoo.document.json.TokenBuffer; import com.yahoo.document.json.readers.JsonParserHelpers; import com.yahoo.tensor.Tensor; import com.yahoo.tensor.TensorType;
|
import com.yahoo.document.json.*; import com.yahoo.document.json.readers.*; import com.yahoo.tensor.*;
|
[
"com.yahoo.document",
"com.yahoo.tensor"
] |
com.yahoo.document; com.yahoo.tensor;
| 1,567,421
|
public Cancellable deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteJob,
options,
DeleteJobResponse::fromXContent,
listener,
Collections.emptySet());
}
|
Cancellable function(DeleteJobRequest request, RequestOptions options, ActionListener<DeleteJobResponse> listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::deleteJob, options, DeleteJobResponse::fromXContent, listener, Collections.emptySet()); }
|
/**
* Deletes the given Machine Learning Job asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html">ML Delete Job documentation</a>
*
* @param request The request to delete the job
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
|
Deletes the given Machine Learning Job asynchronously and notifies the listener on completion For additional info see ML Delete Job documentation
|
deleteJobAsync
|
{
"repo_name": "HonzaKral/elasticsearch",
"path": "client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java",
"license": "apache-2.0",
"size": 130306
}
|
[
"java.util.Collections",
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.client.ml.DeleteJobRequest",
"org.elasticsearch.client.ml.DeleteJobResponse"
] |
import java.util.Collections; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.DeleteJobResponse;
|
import java.util.*; import org.elasticsearch.action.*; import org.elasticsearch.client.ml.*;
|
[
"java.util",
"org.elasticsearch.action",
"org.elasticsearch.client"
] |
java.util; org.elasticsearch.action; org.elasticsearch.client;
| 1,418,679
|
protected synchronized void commit(boolean optimize) throws IOException {
if (indexReader != null) {
log.debug("committing IndexReader.");
indexReader.flush();
}
if (indexWriter != null) {
log.debug("committing IndexWriter.");
indexWriter.commit();
}
// optimize if requested
if (optimize) {
IndexWriter writer = getIndexWriter();
writer.forceMerge(1, true);
writer.close();
indexWriter = null;
}
}
|
synchronized void function(boolean optimize) throws IOException { if (indexReader != null) { log.debug(STR); indexReader.flush(); } if (indexWriter != null) { log.debug(STR); indexWriter.commit(); } if (optimize) { IndexWriter writer = getIndexWriter(); writer.forceMerge(1, true); writer.close(); indexWriter = null; } }
|
/**
* Commits all pending changes to the underlying <code>Directory</code>.
*
* @param optimize if <code>true</code> the index is optimized after the
* commit.
* @throws IOException if an error occurs while commiting changes.
*/
|
Commits all pending changes to the underlying <code>Directory</code>
|
commit
|
{
"repo_name": "sdmcraft/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/AbstractIndex.java",
"license": "apache-2.0",
"size": 23245
}
|
[
"java.io.IOException",
"org.apache.lucene.index.IndexWriter"
] |
import java.io.IOException; import org.apache.lucene.index.IndexWriter;
|
import java.io.*; import org.apache.lucene.index.*;
|
[
"java.io",
"org.apache.lucene"
] |
java.io; org.apache.lucene;
| 9,171
|
public GeocentricCRS createGeocentricCRS(final String code)
throws NoSuchAuthorityCodeException, FactoryException {
final CoordinateReferenceSystem crs = createCoordinateReferenceSystem(code);
try {
return (GeocentricCRS) crs;
} catch (ClassCastException exception) {
throw noSuchAuthorityCode(GeocentricCRS.class, code, exception);
}
}
|
GeocentricCRS function(final String code) throws NoSuchAuthorityCodeException, FactoryException { final CoordinateReferenceSystem crs = createCoordinateReferenceSystem(code); try { return (GeocentricCRS) crs; } catch (ClassCastException exception) { throw noSuchAuthorityCode(GeocentricCRS.class, code, exception); } }
|
/**
* Returns a {@linkplain GeocentricCRS geocentric coordinate reference system} from a code.
*
* @param code Value allocated by authority.
* @return The coordinate reference system for the given code.
* @throws NoSuchAuthorityCodeException if the specified {@code code} was not found.
* @throws FactoryException if the object creation failed.
* @see #createGeodeticDatum
*/
|
Returns a GeocentricCRS geocentric coordinate reference system from a code
|
createGeocentricCRS
|
{
"repo_name": "geotools/geotools",
"path": "modules/library/referencing/src/main/java/org/geotools/referencing/factory/AbstractAuthorityFactory.java",
"license": "lgpl-2.1",
"size": 41575
}
|
[
"org.opengis.referencing.FactoryException",
"org.opengis.referencing.NoSuchAuthorityCodeException",
"org.opengis.referencing.crs.CoordinateReferenceSystem",
"org.opengis.referencing.crs.GeocentricCRS"
] |
import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.crs.GeocentricCRS;
|
import org.opengis.referencing.*; import org.opengis.referencing.crs.*;
|
[
"org.opengis.referencing"
] |
org.opengis.referencing;
| 2,550,510
|
List<DivertConfiguration> getDivertConfigurations();
|
List<DivertConfiguration> getDivertConfigurations();
|
/**
* Returns the diverts configured for this server.
*/
|
Returns the diverts configured for this server
|
getDivertConfigurations
|
{
"repo_name": "gaohoward/activemq-artemis",
"path": "artemis-server/src/main/java/org/apache/activemq/artemis/core/config/Configuration.java",
"license": "apache-2.0",
"size": 41576
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,085,716
|
public static <R> RunnableScheduledFuture<?> schedule(CoreAction<R> action, long delay, TimeUnit timeUnit)
{
return component.core().schedule(action, delay, timeUnit);
}
|
static <R> RunnableScheduledFuture<?> function(CoreAction<R> action, long delay, TimeUnit timeUnit) { return component.core().schedule(action, delay, timeUnit); }
|
/**
* Schedule an action on a certain delay from now.
* @param action the action to execute.
* @param delay the delay after which the action should be executed.
* @param timeUnit time unit in which the delay is specified.
* @return returns RunnableScheduleFuture object for keeping track of the result.
*/
|
Schedule an action on a certain delay from now
|
schedule
|
{
"repo_name": "tieniber/FileDocumentLink",
"path": "test/javasource/com/mendix/core/Core.java",
"license": "apache-2.0",
"size": 77610
}
|
[
"com.mendix.core.actionmanagement.CoreAction",
"java.util.concurrent.RunnableScheduledFuture",
"java.util.concurrent.TimeUnit"
] |
import com.mendix.core.actionmanagement.CoreAction; import java.util.concurrent.RunnableScheduledFuture; import java.util.concurrent.TimeUnit;
|
import com.mendix.core.actionmanagement.*; import java.util.concurrent.*;
|
[
"com.mendix.core",
"java.util"
] |
com.mendix.core; java.util;
| 2,264,422
|
Comparator<String> getPatternComparator(String path);
|
Comparator<String> getPatternComparator(String path);
|
/**
* Given a full path, returns a {@link Comparator} suitable for sorting
* patterns in order of explicitness for that path.
* <p>
* The full algorithm used depends on the underlying implementation, but
* generally, the returned {@code Comparator} will
* {@linkplain java.util.Collections#sort(java.util.List, java.util.Comparator)
* sort} a list so that more specific patterns come before generic patterns.
*
* @param path
* the full path to use for comparison
* @return a comparator capable of sorting patterns in order of explicitness
*/
|
Given a full path, returns a <code>Comparator</code> suitable for sorting patterns in order of explicitness for that path. The full algorithm used depends on the underlying implementation, but generally, the returned Comparator will java.util.Collections#sort(java.util.List, java.util.Comparator) sort a list so that more specific patterns come before generic patterns
|
getPatternComparator
|
{
"repo_name": "HotswapProjects/HotswapAgent",
"path": "hotswap-agent-core/src/main/java/org/hotswap/agent/util/spring/path/PathMatcher.java",
"license": "gpl-2.0",
"size": 6287
}
|
[
"java.util.Comparator"
] |
import java.util.Comparator;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,402,120
|
public boolean duplicateEntry(int nr) {
// first of all, we duplicate all authors and keywords frequencies from the existing entry.
// therefore, we first retrieve all author-index-numbers from that entry
changeFrequencies(nr, 1);
// retrieve entry that should be duplicated
Element oldzettel = retrieveElement(zknFile, nr);
// create new zettel
Element zettel = new Element(ELEMENT_ZETTEL);
// check whether we have any empty elements in between where we can insert the new entry
int emptypos = retrieveFirstEmptyEntry();
// if we have any empty elements...
if (emptypos != -1 && settings.getInsertNewEntryAtEmpty()) {
// retrieve empty element
zettel = retrieveElement(zknFile, emptypos);
// and remove former content, so we can add new content
zettel.removeContent();
}
try {
setZettelID(zettel);
//
// add title
//
// create child element with title information
Element t = new Element(ELEMENT_TITLE);
// and add it to the zettel-element
zettel.addContent(t);
// set value of the child element
t.setText(oldzettel.getChild(ELEMENT_TITLE).getText());
//
// add content
//
// create child element with content information
Element c = new Element(ELEMENT_CONTENT);
// and add it to the zettel-element
zettel.addContent(c);
// set value of the content element
c.setText(oldzettel.getChild(ELEMENT_CONTENT).getText());
//
// add author
//
// create child element with author information
Element a = new Element(ELEMENT_AUTHOR);
// and add it to the zettel-element
zettel.addContent(a);
// set value of author element
a.setText(oldzettel.getChild(ELEMENT_AUTHOR).getText());
//
// add keywords
//
// create child element with keyword information
Element k = new Element(ELEMENT_KEYWORD);
// and add it to the zettel-element
zettel.addContent(k);
// store keyword index numbers
k.setText(oldzettel.getChild(ELEMENT_KEYWORD).getText());
//
// now comes the manual links to other entries
//
Element m = new Element(ELEMENT_MANLINKS);
zettel.addContent(m);
m.setText("");
//
// add hyperlinks
//
// create child element with link information
Element h = new Element(ELEMENT_ATTACHMENTS);
// and add it to the zettel-element
zettel.addContent(h);
// add each hyperlink. therefor, iterate the array
List<Element> links = oldzettel.getChild(ELEMENT_ATTACHMENTS).getChildren();
Iterator<Element> i = links.iterator();
while (i.hasNext()) {
// create a new subchuld-element
Element sublink = new Element(ELEMENT_ATTCHILD);
Element le = i.next();
// and add the link-string from the array
sublink.setText(le.getText());
h.addContent(sublink);
}
//
// add remarks
//
// create child element with content information
Element r = new Element(ELEMENT_REMARKS);
// and add it to the zettel-element
zettel.addContent(r);
// set value of the content element
r.setText(oldzettel.getChild(ELEMENT_REMARKS).getText());
//
// add timestamp
//
// set creation timestamp, but set no text for edit timestamp
// since the entry is not edited
setTimestamp(zettel, Tools.getTimeStamp(), "");
//
// now comes the luhmann number
//
Element l = new Element(ELEMENT_TRAILS);
zettel.addContent(l);
l.setText(oldzettel.getChild(ELEMENT_TRAILS).getText());
//
// complete datafile
//
// if we have any empty elements, go on here
if (emptypos != -1 && settings.getInsertNewEntryAtEmpty()) {
// return the empty-position, which is now filled with the new author-value
zettelPos = emptypos;
} else {
// finally, add the whole element to the data file
zknFile.getRootElement().addContent(zettel);
// set the zettel-position to the new entry
zettelPos = getCount(ZKNCOUNT);
}
// titles have to be updated.
setTitlelistUpToDate(false);
// set modified state
setModified(true);
} catch (IllegalAddException | IllegalDataException ex) {
Constants.zknlogger.log(Level.SEVERE, ex.getLocalizedMessage());
return false;
}
return true;
}
/**
* This function retrieves an element of a xml document at a given position.
* used for other methods like getAuthor or getKeyword.<br><br>
* <b>Caution!</b> The position {@code pos} is a value from <b>1</b> to
* {@link #getCount(int) getCount()} - in contrary to usual array handling
* where the range is from 0 to (size-1).
*
* @param doc the xml document where to look for elements. use following
* parameters:<br> - {@link #authorFile authorFile}<br> -
* {@link #keywordFile keywordFile}<br> - {@link #zknFile zknFile}
* @param pos the position of the element. must be a value from <b>1</b> to
* {@link #getCount(int) getCount()}.
* @return the element if a match was found, otherwise {@code null}
|
boolean function(int nr) { changeFrequencies(nr, 1); Element oldzettel = retrieveElement(zknFile, nr); Element zettel = new Element(ELEMENT_ZETTEL); int emptypos = retrieveFirstEmptyEntry(); if (emptypos != -1 && settings.getInsertNewEntryAtEmpty()) { zettel = retrieveElement(zknFile, emptypos); zettel.removeContent(); } try { setZettelID(zettel); Element t = new Element(ELEMENT_TITLE); zettel.addContent(t); t.setText(oldzettel.getChild(ELEMENT_TITLE).getText()); Element c = new Element(ELEMENT_CONTENT); zettel.addContent(c); c.setText(oldzettel.getChild(ELEMENT_CONTENT).getText()); Element a = new Element(ELEMENT_AUTHOR); zettel.addContent(a); a.setText(oldzettel.getChild(ELEMENT_AUTHOR).getText()); Element k = new Element(ELEMENT_KEYWORD); zettel.addContent(k); k.setText(oldzettel.getChild(ELEMENT_KEYWORD).getText()); zettel.addContent(m); m.setText(STR"); zettel.addContent(l); l.setText(oldzettel.getChild(ELEMENT_TRAILS).getText()); if (emptypos != -1 && settings.getInsertNewEntryAtEmpty()) { zettelPos = emptypos; } else { zknFile.getRootElement().addContent(zettel); zettelPos = getCount(ZKNCOUNT); } setTitlelistUpToDate(false); setModified(true); } catch (IllegalAddException IllegalDataException ex) { Constants.zknlogger.log(Level.SEVERE, ex.getLocalizedMessage()); return false; } return true; } /** * This function retrieves an element of a xml document at a given position. * used for other methods like getAuthor or getKeyword.<br><br> * <b>Caution!</b> The position {@code pos} is a value from <b>1</b> to * {@link #getCount(int) getCount()} - in contrary to usual array handling * where the range is from 0 to (size-1). * * @param doc the xml document where to look for elements. use following * parameters:<br> - {@link #authorFile authorFile}<br> - * {@link #keywordFile keywordFile}<br> - {@link #zknFile zknFile} * @param pos the position of the element. must be a value from <b>1</b> to * {@link #getCount(int) getCount()}. * @return the element if a match was found, otherwise {@code null}
|
/**
* This method duplicates an entry and inserts it at the end or the next
* empty place in the data file
*
* @param nr the number of the entry that should be duplicated
* @return
*/
|
This method duplicates an entry and inserts it at the end or the next empty place in the data file
|
duplicateEntry
|
{
"repo_name": "sjPlot/Zettelkasten",
"path": "src/main/java/de/danielluedecke/zettelkasten/database/Daten.java",
"license": "gpl-3.0",
"size": 336724
}
|
[
"de.danielluedecke.zettelkasten.util.Constants",
"java.util.logging.Level",
"org.jdom2.Element",
"org.jdom2.IllegalAddException",
"org.jdom2.IllegalDataException"
] |
import de.danielluedecke.zettelkasten.util.Constants; import java.util.logging.Level; import org.jdom2.Element; import org.jdom2.IllegalAddException; import org.jdom2.IllegalDataException;
|
import de.danielluedecke.zettelkasten.util.*; import java.util.logging.*; import org.jdom2.*;
|
[
"de.danielluedecke.zettelkasten",
"java.util",
"org.jdom2"
] |
de.danielluedecke.zettelkasten; java.util; org.jdom2;
| 181,370
|
public List<MPDOutput> getOutputs() throws IOException, MPDException {
final List<MPDOutput> result = new LinkedList<>();
final List<String> response = mConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTS);
final LinkedList<String> lineCache = new LinkedList<>();
for (final String line : response) {
if (line.startsWith(MPDOutput.CMD_ID)) {
if (!lineCache.isEmpty()) {
result.add(MPDOutput.build(lineCache));
lineCache.clear();
}
}
lineCache.add(line);
}
if (!lineCache.isEmpty()) {
result.add(MPDOutput.build(lineCache));
}
return result;
}
|
List<MPDOutput> function() throws IOException, MPDException { final List<MPDOutput> result = new LinkedList<>(); final List<String> response = mConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTS); final LinkedList<String> lineCache = new LinkedList<>(); for (final String line : response) { if (line.startsWith(MPDOutput.CMD_ID)) { if (!lineCache.isEmpty()) { result.add(MPDOutput.build(lineCache)); lineCache.clear(); } } lineCache.add(line); } if (!lineCache.isEmpty()) { result.add(MPDOutput.build(lineCache)); } return result; }
|
/**
* Returns the available outputs
*
* @return List of available outputs
* @throws IOException Thrown upon a communication error with the server.
* @throws MPDException Thrown if an error occurs as a result of command execution.
*/
|
Returns the available outputs
|
getOutputs
|
{
"repo_name": "abarisain/dmix",
"path": "JMPDComm/src/main/java/org/a0z/mpd/MPD.java",
"license": "apache-2.0",
"size": 70703
}
|
[
"java.io.IOException",
"java.util.LinkedList",
"java.util.List",
"org.a0z.mpd.exception.MPDException"
] |
import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.a0z.mpd.exception.MPDException;
|
import java.io.*; import java.util.*; import org.a0z.mpd.exception.*;
|
[
"java.io",
"java.util",
"org.a0z.mpd"
] |
java.io; java.util; org.a0z.mpd;
| 1,281,897
|
public String getValue() {
return m_value;
}
}
public static final String CONFIG_FOLDER_PROPERTY = "opencms.config";
public static final String FILE_PERSISTENCE = "classes"
+ File.separatorChar
+ "META-INF"
+ File.separatorChar
+ "persistence.xml";
public static final String FILE_PROPERTIES = "opencms.properties";
public static final String FILE_TLD = "opencms.tld";
public static final String FOLDER_CONFIG_DEFAULT = "config" + File.separatorChar;
public static final String FOLDER_MODULES = "modules" + File.separatorChar;
public static final String FOLDER_PACKAGES = "packages" + File.separatorChar;
public static final String FOLDER_WEBINF = "WEB-INF" + File.separatorChar;
public static final String WORKPLACE_PATH = "/workplace";
private static final String DEFAULT_ENCODING = CmsEncoder.ENCODING_UTF_8;
private static final String DEFAULT_VERSION_ID = "Static";
private static final String DEFAULT_VERSION_NUMBER = "9.x.y";
private Map<String, BuildInfoItem> m_buildInfo;
private String m_configurationFileRfsPath;
private String m_defaultEncoding;
private I_CmsJspDeviceSelector m_deviceSelector;
private boolean m_historyEnabled;
private int m_historyVersions;
private int m_historyVersionsAfterDeletion;
private CmsHttpAuthenticationSettings m_httpAuthenticationSettings;
private CmsMailSettings m_mailSettings;
private String m_notificationProject;
private int m_notificationTime;
private String m_packagesRfsPath;
private String m_persistenceFileRfsPath;
private boolean m_restrictDetailContents;
private String m_serverName;
private CmsServletContainerSettings m_servletContainerSettings;
private long m_startupTime;
private String m_staticResourcePathFragment;
private String m_version;
private String m_versionId;
private String m_versionNumber;
public CmsSystemInfo() {
// set startup time
m_startupTime = System.currentTimeMillis();
// init version information
initVersion();
// set default encoding (will be changed again later when properties have been read)
m_defaultEncoding = DEFAULT_ENCODING.intern();
// this may look odd, but initMembers in OpenCms core has to initialize this (e.g. for setup to avoid NPE)
m_servletContainerSettings = new CmsServletContainerSettings(null);
}
|
String function() { return m_value; } } public static final String CONFIG_FOLDER_PROPERTY = STR; public static final String FILE_PERSISTENCE = STR + File.separatorChar + STR + File.separatorChar + STR; public static final String FILE_PROPERTIES = STR; public static final String FILE_TLD = STR; public static final String FOLDER_CONFIG_DEFAULT = STR + File.separatorChar; public static final String FOLDER_MODULES = STR + File.separatorChar; public static final String FOLDER_PACKAGES = STR + File.separatorChar; public static final String FOLDER_WEBINF = STR + File.separatorChar; public static final String WORKPLACE_PATH = STR; private static final String DEFAULT_ENCODING = CmsEncoder.ENCODING_UTF_8; private static final String DEFAULT_VERSION_ID = STR; private static final String DEFAULT_VERSION_NUMBER = "9.x.y"; private Map<String, BuildInfoItem> m_buildInfo; private String m_configurationFileRfsPath; private String m_defaultEncoding; private I_CmsJspDeviceSelector m_deviceSelector; private boolean m_historyEnabled; private int m_historyVersions; private int m_historyVersionsAfterDeletion; private CmsHttpAuthenticationSettings m_httpAuthenticationSettings; private CmsMailSettings m_mailSettings; private String m_notificationProject; private int m_notificationTime; private String m_packagesRfsPath; private String m_persistenceFileRfsPath; private boolean m_restrictDetailContents; private String m_serverName; private CmsServletContainerSettings m_servletContainerSettings; private long m_startupTime; private String m_staticResourcePathFragment; private String m_version; private String m_versionId; private String m_versionNumber; public CmsSystemInfo() { m_startupTime = System.currentTimeMillis(); initVersion(); m_defaultEncoding = DEFAULT_ENCODING.intern(); m_servletContainerSettings = new CmsServletContainerSettings(null); }
|
/**
* Gets the value for this build info item.<p>
*
* @return the value
*/
|
Gets the value for this build info item
|
getValue
|
{
"repo_name": "ggiudetti/opencms-core",
"path": "src/org/opencms/main/CmsSystemInfo.java",
"license": "lgpl-2.1",
"size": 29210
}
|
[
"java.io.File",
"java.util.Map",
"org.opencms.i18n.CmsEncoder",
"org.opencms.mail.CmsMailSettings"
] |
import java.io.File; import java.util.Map; import org.opencms.i18n.CmsEncoder; import org.opencms.mail.CmsMailSettings;
|
import java.io.*; import java.util.*; import org.opencms.i18n.*; import org.opencms.mail.*;
|
[
"java.io",
"java.util",
"org.opencms.i18n",
"org.opencms.mail"
] |
java.io; java.util; org.opencms.i18n; org.opencms.mail;
| 131,818
|
public void decrement(View view) {
quantity = quantity-1;
display(quantity);
}
|
void function(View view) { quantity = quantity-1; display(quantity); }
|
/**
* This method increments the quantity
*/
|
This method increments the quantity
|
decrement
|
{
"repo_name": "codeword-ignite/anoop-android",
"path": "demos/JustJava/app/src/main/java/com/example/anoopp/justjava/MainActivity.java",
"license": "mit",
"size": 1728
}
|
[
"android.view.View"
] |
import android.view.View;
|
import android.view.*;
|
[
"android.view"
] |
android.view;
| 2,284,432
|
public static List<String> getFieldNames(List<FieldSchema> fieldSchemas) {
List<String> names = new ArrayList<>(fieldSchemas.size());
for (FieldSchema fs : fieldSchemas) {
names.add(fs.getName());
}
return names;
}
|
static List<String> function(List<FieldSchema> fieldSchemas) { List<String> names = new ArrayList<>(fieldSchemas.size()); for (FieldSchema fs : fieldSchemas) { names.add(fs.getName()); } return names; }
|
/**
* Get field names from field schemas.
*/
|
Get field names from field schemas
|
getFieldNames
|
{
"repo_name": "tzulitai/flink",
"path": "flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/catalog/hive/HiveCatalog.java",
"license": "apache-2.0",
"size": 60905
}
|
[
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.hive.metastore.api.FieldSchema"
] |
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.metastore.api.FieldSchema;
|
import java.util.*; import org.apache.hadoop.hive.metastore.api.*;
|
[
"java.util",
"org.apache.hadoop"
] |
java.util; org.apache.hadoop;
| 2,208,335
|
HRegionLocation getRegionLocation(TableName tableName, byte [] row,
boolean reload)
throws IOException;
|
HRegionLocation getRegionLocation(TableName tableName, byte [] row, boolean reload) throws IOException;
|
/**
* Find region location hosting passed row
* @param tableName table name
* @param row Row to find.
* @param reload If true do not use cache, otherwise bypass.
* @return Location of row.
* @throws IOException if a remote or network exception occurs
*/
|
Find region location hosting passed row
|
getRegionLocation
|
{
"repo_name": "intel-hadoop/hbase-rhino",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java",
"license": "apache-2.0",
"size": 7832
}
|
[
"java.io.IOException",
"org.apache.hadoop.hbase.HRegionLocation",
"org.apache.hadoop.hbase.TableName"
] |
import java.io.IOException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName;
|
import java.io.*; import org.apache.hadoop.hbase.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 86,505
|
public EnumSet<ParseOption> getDefaultParseOptions() {
if (_parseAllStyles && _parseTZDBNames) {
return EnumSet.of(ParseOption.ALL_STYLES, ParseOption.TZ_DATABASE_ABBREVIATIONS);
} else if (_parseAllStyles) {
return EnumSet.of(ParseOption.ALL_STYLES);
} else if (_parseTZDBNames) {
return EnumSet.of(ParseOption.TZ_DATABASE_ABBREVIATIONS);
}
return EnumSet.noneOf(ParseOption.class);
}
|
EnumSet<ParseOption> function() { if (_parseAllStyles && _parseTZDBNames) { return EnumSet.of(ParseOption.ALL_STYLES, ParseOption.TZ_DATABASE_ABBREVIATIONS); } else if (_parseAllStyles) { return EnumSet.of(ParseOption.ALL_STYLES); } else if (_parseTZDBNames) { return EnumSet.of(ParseOption.TZ_DATABASE_ABBREVIATIONS); } return EnumSet.noneOf(ParseOption.class); }
|
/**
* Returns the default parse options used by this <code>TimeZoneFormat</code> instance.
* @return the default parse options.
* @see ParseOption
* @stable ICU 49
*/
|
Returns the default parse options used by this <code>TimeZoneFormat</code> instance
|
getDefaultParseOptions
|
{
"repo_name": "abhijitvalluri/fitnotifications",
"path": "icu4j/src/main/java/com/ibm/icu/text/TimeZoneFormat.java",
"license": "apache-2.0",
"size": 126382
}
|
[
"java.util.EnumSet"
] |
import java.util.EnumSet;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,295,400
|
void lock(Account... accounts) throws LockingException;
|
void lock(Account... accounts) throws LockingException;
|
/**
* Obtains pessimistic locks for the given accounts
*/
|
Obtains pessimistic locks for the given accounts
|
lock
|
{
"repo_name": "robertoandrade/cyclos",
"path": "src/nl/strohalm/cyclos/utils/lock/LockHandler.java",
"license": "gpl-2.0",
"size": 1530
}
|
[
"nl.strohalm.cyclos.entities.accounts.Account",
"nl.strohalm.cyclos.entities.exceptions.LockingException"
] |
import nl.strohalm.cyclos.entities.accounts.Account; import nl.strohalm.cyclos.entities.exceptions.LockingException;
|
import nl.strohalm.cyclos.entities.accounts.*; import nl.strohalm.cyclos.entities.exceptions.*;
|
[
"nl.strohalm.cyclos"
] |
nl.strohalm.cyclos;
| 41,041
|
private void cmd_export()
{
ValueNamePair pp = (ValueNamePair)fPaymentRule.getSelectedItem();
if (pp == null)
return;
String PaymentRule = pp.getValue();
log.info(PaymentRule);
if (!getChecks(PaymentRule))
return;
// Get File Info
JFileChooser fc = new JFileChooser();
fc.setDialogTitle(Msg.getMsg(Env.getCtx(), "Export"));
fc.setFileSelectionMode(JFileChooser.FILES_ONLY);
fc.setMultiSelectionEnabled(false);
fc.setSelectedFile(new java.io.File("paymentExport.txt"));
if (fc.showSaveDialog(panel) != JFileChooser.APPROVE_OPTION)
return;
// Create File
int no = 0;
StringBuffer err = new StringBuffer("");
if (m_PaymentExportClass == null || m_PaymentExportClass.trim().length() == 0) {
m_PaymentExportClass = "org.compiere.util.GenericPaymentExport";
}
// Get Payment Export Class
PaymentExport custom = null;
try
{
Class<?> clazz = Class.forName(m_PaymentExportClass);
custom = (PaymentExport)clazz.newInstance();
no = custom.exportToFile(m_checks, fc.getSelectedFile(), err);
}
catch (ClassNotFoundException e)
{
no = -1;
err.append("No custom PaymentExport class " + m_PaymentExportClass + " - " + e.toString());
log.log(Level.SEVERE, err.toString(), e);
}
catch (Exception e)
{
no = -1;
err.append("Error in " + m_PaymentExportClass + " check log, " + e.toString());
log.log(Level.SEVERE, err.toString(), e);
}
if (no >= 0) {
ADialog.info(m_WindowNo, panel, "Saved",
fc.getSelectedFile().getAbsolutePath() + "\n"
+ Msg.getMsg(Env.getCtx(), "NoOfLines") + "=" + no);
if (ADialog.ask(m_WindowNo, panel, "VPayPrintSuccess?"))
{
// int lastDocumentNo =
MPaySelectionCheck.confirmPrint (m_checks, m_batch);
// document No not updated
}
} else {
ADialog.error(m_WindowNo, panel, "Error", err.toString());
}
dispose();
} // cmd_export
|
void function() { ValueNamePair pp = (ValueNamePair)fPaymentRule.getSelectedItem(); if (pp == null) return; String PaymentRule = pp.getValue(); log.info(PaymentRule); if (!getChecks(PaymentRule)) return; JFileChooser fc = new JFileChooser(); fc.setDialogTitle(Msg.getMsg(Env.getCtx(), STR)); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setMultiSelectionEnabled(false); fc.setSelectedFile(new java.io.File(STR)); if (fc.showSaveDialog(panel) != JFileChooser.APPROVE_OPTION) return; int no = 0; StringBuffer err = new StringBuffer(STRorg.compiere.util.GenericPaymentExportSTRNo custom PaymentExport class STR - STRError in STR check log, STRSavedSTR\nSTRNoOfLinesSTR=STRVPayPrintSuccess?STRError", err.toString()); } dispose(); }
|
/**************************************************************************
* Export payments to file
*/
|
Export payments to file
|
cmd_export
|
{
"repo_name": "braully/adempiere",
"path": "client/src/org/compiere/apps/form/VPayPrint.java",
"license": "gpl-2.0",
"size": 17137
}
|
[
"javax.swing.JFileChooser",
"org.compiere.util.Env",
"org.compiere.util.Msg",
"org.compiere.util.PaymentExport",
"org.compiere.util.ValueNamePair"
] |
import javax.swing.JFileChooser; import org.compiere.util.Env; import org.compiere.util.Msg; import org.compiere.util.PaymentExport; import org.compiere.util.ValueNamePair;
|
import javax.swing.*; import org.compiere.util.*;
|
[
"javax.swing",
"org.compiere.util"
] |
javax.swing; org.compiere.util;
| 637,240
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.