method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public void addAppliance (Appliance appliance) { appliances.add(appliance); }
void function (Appliance appliance) { appliances.add(appliance); }
/** * Adds an appliance to the installation. * * @param appliance the appliance to be added */
Adds an appliance to the installation
addAppliance
{ "repo_name": "cassandra-project/cassandra-stand-alone", "path": "src/eu/cassandra/sim/entities/installations/Installation.java", "license": "apache-2.0", "size": 10941 }
[ "eu.cassandra.sim.entities.appliances.Appliance" ]
import eu.cassandra.sim.entities.appliances.Appliance;
import eu.cassandra.sim.entities.appliances.*;
[ "eu.cassandra.sim" ]
eu.cassandra.sim;
1,323,523
void setTerrain(TerrainType terrain);
void setTerrain(TerrainType terrain);
/** * Set the terrain associated with this tile. * @param terrain type of terrain to set */
Set the terrain associated with this tile
setTerrain
{ "repo_name": "MountainRange/MULE", "path": "src/main/java/io/github/mountainrange/mule/gameplay/Tile.java", "license": "gpl-3.0", "size": 2175 }
[ "io.github.mountainrange.mule.enums.TerrainType" ]
import io.github.mountainrange.mule.enums.TerrainType;
import io.github.mountainrange.mule.enums.*;
[ "io.github.mountainrange" ]
io.github.mountainrange;
1,443,843
public Field getField();
Field function();
/** * The field value * @return */
The field value
getField
{ "repo_name": "mdunker/usergrid", "path": "stack/corepersistence/collection/src/main/java/org/apache/usergrid/persistence/collection/serialization/UniqueValue.java", "license": "apache-2.0", "size": 1456 }
[ "org.apache.usergrid.persistence.model.field.Field" ]
import org.apache.usergrid.persistence.model.field.Field;
import org.apache.usergrid.persistence.model.field.*;
[ "org.apache.usergrid" ]
org.apache.usergrid;
2,502,742
private void populateDbMappings() throws OutputEventAdapterException { String dbName; dbTypeMappings = new HashMap<String, String>(); Connection con; try { CarbonDataSource carbonDataSource = RDBMSEventAdapterServiceValueHolder.getDataSourceService() .getDataSource( eventAdapterConfiguration.getStaticProperties().get(RDBMSEventAdapterConstants .ADAPTER_GENERIC_RDBMS_DATASOURCE_NAME)); con = ((DataSource) carbonDataSource.getDSObject()).getConnection(); DatabaseMetaData databaseMetaData = con.getMetaData(); dbName = databaseMetaData.getDatabaseProductName(); dbName = dbName.toLowerCase(); } catch (DataSourceException e) { log.error( "There is no any data-source found called : " + eventAdapterConfiguration.getStaticProperties().get( RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_DATASOURCE_NAME), e); throw new ConnectionUnavailableException(e.getMessage(), e); } catch (SQLException e) { throw new ConnectionUnavailableException(e); } // Map<String, String> defaultMappings = new HashMap<String, String>(); String[] staticAttributes = { RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_STRING, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_DOUBLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_INTEGER, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_LONG, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_FLOAT, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_BOOLEAN, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_CREATE_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_INSERT_DATA, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_TABLE_EXIST, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_UPDATE_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_PROPERTY_DATA_TYPE_IN_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_PROPERTY_SELECT_FROM_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_COMMA, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_QUESTION_MARK, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_EQUAL, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_AND }; Boolean staticAttributeExist; String attribute = null; Map<String, String> defaultMappings = new HashMap<String, String>(); for(int i=0 ; i<staticAttributes.length ; i++){ staticAttributeExist = false; for (Map.Entry<String, String> entry : globalProperties.entrySet()) { attribute = staticAttributes[i]; if(staticAttributes[i].equals(entry.getKey())){ staticAttributeExist = true; defaultMappings.put(entry.getKey(), entry.getValue()); break; } } if(!staticAttributeExist){ throw new OutputEventAdapterRuntimeException("A mandatory attribute " + attribute + " does not exist"); } } Boolean valueExist; for (Map.Entry<String, String> defaultMap : defaultMappings.entrySet()) { valueExist = false; for (Map.Entry<String, String> entry : globalProperties.entrySet()) { if (entry.getKey().contains(dbName)) { if (entry.getKey().contains(defaultMap.getKey())) { dbTypeMappings.put(defaultMap.getKey(), entry.getValue()); valueExist = true; break; } } } if(!valueExist){ dbTypeMappings.put(defaultMap.getKey(), defaultMap.getValue()); } } }
void function() throws OutputEventAdapterException { String dbName; dbTypeMappings = new HashMap<String, String>(); Connection con; try { CarbonDataSource carbonDataSource = RDBMSEventAdapterServiceValueHolder.getDataSourceService() .getDataSource( eventAdapterConfiguration.getStaticProperties().get(RDBMSEventAdapterConstants .ADAPTER_GENERIC_RDBMS_DATASOURCE_NAME)); con = ((DataSource) carbonDataSource.getDSObject()).getConnection(); DatabaseMetaData databaseMetaData = con.getMetaData(); dbName = databaseMetaData.getDatabaseProductName(); dbName = dbName.toLowerCase(); } catch (DataSourceException e) { log.error( STR + eventAdapterConfiguration.getStaticProperties().get( RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_DATASOURCE_NAME), e); throw new ConnectionUnavailableException(e.getMessage(), e); } catch (SQLException e) { throw new ConnectionUnavailableException(e); } String[] staticAttributes = { RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_STRING, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_DOUBLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_INTEGER, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_LONG, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_FLOAT, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_BOOLEAN, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_CREATE_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_INSERT_DATA, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_TABLE_EXIST, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_UPDATE_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_PROPERTY_DATA_TYPE_IN_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_PROPERTY_SELECT_FROM_TABLE, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_COMMA, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_QUESTION_MARK, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_EQUAL, RDBMSEventAdapterConstants.ADAPTER_GENERIC_RDBMS_AND }; Boolean staticAttributeExist; String attribute = null; Map<String, String> defaultMappings = new HashMap<String, String>(); for(int i=0 ; i<staticAttributes.length ; i++){ staticAttributeExist = false; for (Map.Entry<String, String> entry : globalProperties.entrySet()) { attribute = staticAttributes[i]; if(staticAttributes[i].equals(entry.getKey())){ staticAttributeExist = true; defaultMappings.put(entry.getKey(), entry.getValue()); break; } } if(!staticAttributeExist){ throw new OutputEventAdapterRuntimeException(STR + attribute + STR); } } Boolean valueExist; for (Map.Entry<String, String> defaultMap : defaultMappings.entrySet()) { valueExist = false; for (Map.Entry<String, String> entry : globalProperties.entrySet()) { if (entry.getKey().contains(dbName)) { if (entry.getKey().contains(defaultMap.getKey())) { dbTypeMappings.put(defaultMap.getKey(), entry.getValue()); valueExist = true; break; } } } if(!valueExist){ dbTypeMappings.put(defaultMap.getKey(), defaultMap.getValue()); } } }
/** * Populate specific db Mappings */
Populate specific db Mappings
populateDbMappings
{ "repo_name": "kasungayan/carbon-analytics-common", "path": "components/event-publisher/event-output-adapters/org.wso2.carbon.event.output.adapter.rdbms/src/main/java/org/wso2/carbon/event/output/adapter/rdbms/RDBMSEventAdapter.java", "license": "apache-2.0", "size": 26593 }
[ "java.sql.Connection", "java.sql.DatabaseMetaData", "java.sql.SQLException", "java.util.HashMap", "java.util.Map", "javax.sql.DataSource", "org.wso2.carbon.event.output.adapter.core.exception.ConnectionUnavailableException", "org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterException", "org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterRuntimeException", "org.wso2.carbon.event.output.adapter.rdbms.internal.ds.RDBMSEventAdapterServiceValueHolder", "org.wso2.carbon.event.output.adapter.rdbms.internal.util.RDBMSEventAdapterConstants", "org.wso2.carbon.ndatasource.common.DataSourceException", "org.wso2.carbon.ndatasource.core.CarbonDataSource" ]
import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.wso2.carbon.event.output.adapter.core.exception.ConnectionUnavailableException; import org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterException; import org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterRuntimeException; import org.wso2.carbon.event.output.adapter.rdbms.internal.ds.RDBMSEventAdapterServiceValueHolder; import org.wso2.carbon.event.output.adapter.rdbms.internal.util.RDBMSEventAdapterConstants; import org.wso2.carbon.ndatasource.common.DataSourceException; import org.wso2.carbon.ndatasource.core.CarbonDataSource;
import java.sql.*; import java.util.*; import javax.sql.*; import org.wso2.carbon.event.output.adapter.core.exception.*; import org.wso2.carbon.event.output.adapter.rdbms.internal.ds.*; import org.wso2.carbon.event.output.adapter.rdbms.internal.util.*; import org.wso2.carbon.ndatasource.common.*; import org.wso2.carbon.ndatasource.core.*;
[ "java.sql", "java.util", "javax.sql", "org.wso2.carbon" ]
java.sql; java.util; javax.sql; org.wso2.carbon;
2,020,673
public static SelectionDialog createTypeDialog(Shell parent, IRunnableContext context, IJavaSearchScope scope, int style, boolean multipleSelection) throws JavaModelException { return createTypeDialog(parent, context, scope, style, multipleSelection, "");//$NON-NLS-1$ }
static SelectionDialog function(Shell parent, IRunnableContext context, IJavaSearchScope scope, int style, boolean multipleSelection) throws JavaModelException { return createTypeDialog(parent, context, scope, style, multipleSelection, ""); }
/** * Creates a selection dialog that lists all types in the given scope. * The caller is responsible for opening the dialog with <code>Window.open</code>, * and subsequently extracting the selected type(s) (of type * <code>IType</code>) via <code>SelectionDialog.getResult</code>. * * @param parent the parent shell of the dialog to be created * @param context the runnable context used to show progress when the dialog * is being populated * @param scope the scope that limits which types are included * @param style flags defining the style of the dialog; the only valid values are * {@link IJavaElementSearchConstants#CONSIDER_CLASSES}, * {@link IJavaElementSearchConstants#CONSIDER_INTERFACES}, * {@link IJavaElementSearchConstants#CONSIDER_ANNOTATION_TYPES}, * {@link IJavaElementSearchConstants#CONSIDER_ENUMS}, * {@link IJavaElementSearchConstants#CONSIDER_ALL_TYPES}, * {@link IJavaElementSearchConstants#CONSIDER_CLASSES_AND_INTERFACES}, * {@link IJavaElementSearchConstants#CONSIDER_CLASSES_AND_ENUMS}, and * {@link IJavaElementSearchConstants#CONSIDER_INTERFACES_AND_ANNOTATIONS}. Please note that * the bitwise OR combination of the elementary constants is not supported. * @param multipleSelection <code>true</code> if multiple selection is allowed * * @return a new selection dialog * * @exception JavaModelException if the selection dialog could not be opened */
Creates a selection dialog that lists all types in the given scope. The caller is responsible for opening the dialog with <code>Window.open</code>, and subsequently extracting the selected type(s) (of type <code>IType</code>) via <code>SelectionDialog.getResult</code>
createTypeDialog
{ "repo_name": "trylimits/Eclipse-Postfix-Code-Completion", "path": "luna/org.eclipse.jdt.ui/ui/org/eclipse/jdt/ui/JavaUI.java", "license": "epl-1.0", "size": 45169 }
[ "org.eclipse.jdt.core.JavaModelException", "org.eclipse.jdt.core.search.IJavaSearchScope", "org.eclipse.jface.operation.IRunnableContext", "org.eclipse.swt.widgets.Shell", "org.eclipse.ui.dialogs.SelectionDialog" ]
import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.core.search.IJavaSearchScope; import org.eclipse.jface.operation.IRunnableContext; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.dialogs.SelectionDialog;
import org.eclipse.jdt.core.*; import org.eclipse.jdt.core.search.*; import org.eclipse.jface.operation.*; import org.eclipse.swt.widgets.*; import org.eclipse.ui.dialogs.*;
[ "org.eclipse.jdt", "org.eclipse.jface", "org.eclipse.swt", "org.eclipse.ui" ]
org.eclipse.jdt; org.eclipse.jface; org.eclipse.swt; org.eclipse.ui;
1,614,871
public static void setInfoStream(PrintStream infoStream) { SegmentInfos.infoStream = infoStream; }
static void function(PrintStream infoStream) { SegmentInfos.infoStream = infoStream; }
/** If non-null, information about retries when loading * the segments file will be printed to this. */
If non-null, information about retries when loading the segments file will be printed to this
setInfoStream
{ "repo_name": "PATRIC3/p3_solr", "path": "lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java", "license": "apache-2.0", "size": 39975 }
[ "java.io.PrintStream" ]
import java.io.PrintStream;
import java.io.*;
[ "java.io" ]
java.io;
1,241,636
public ByteBuffer encode() { // maximum length of the header if all fields are used int maxHeaderLength = 14; encoded = ByteBuffer.allocate(maxHeaderLength + data.limit()); byte flags = 0b1000; // fin without extensions byte opcode = (byte) this.opcode.getOpCodeNumber(); encoded.put((byte) (opcode | (flags << 4))); byte mask = 0; int payloadLen = data.remaining(); int firstLen = payloadLen; // first length field byte[] extendedLen = new byte[0]; if (payloadLen > 65536) { // use 64 bit field for really large payloads // TODO: implement firstLen = 127; extendedLen = new byte[8]; throw new UnsupportedOperationException("Not implemented yet"); } else if (payloadLen > 125) { // 16 bit field firstLen = 126; extendedLen = new byte[2]; extendedLen[0] = (byte) ((payloadLen >> 8) & 0xFF); extendedLen[1] = (byte) (payloadLen & 0xFF); } // include 2-byte basic header encoded.put((byte) ((mask << 7) | (firstLen))); // put extended len in case we are sending larger message encoded.put(extendedLen); // if this is a control frame, include a status code if (statusCode != null) { int sC = statusCode.getStatusCodeNumber(); encoded.putShort((short) (sC & 0xFFFF)); } encoded.put(data); encoded.flip(); // prepare buffer for reading! return encoded; }
ByteBuffer function() { int maxHeaderLength = 14; encoded = ByteBuffer.allocate(maxHeaderLength + data.limit()); byte flags = 0b1000; byte opcode = (byte) this.opcode.getOpCodeNumber(); encoded.put((byte) (opcode (flags << 4))); byte mask = 0; int payloadLen = data.remaining(); int firstLen = payloadLen; byte[] extendedLen = new byte[0]; if (payloadLen > 65536) { firstLen = 127; extendedLen = new byte[8]; throw new UnsupportedOperationException(STR); } else if (payloadLen > 125) { firstLen = 126; extendedLen = new byte[2]; extendedLen[0] = (byte) ((payloadLen >> 8) & 0xFF); extendedLen[1] = (byte) (payloadLen & 0xFF); } encoded.put((byte) ((mask << 7) (firstLen))); encoded.put(extendedLen); if (statusCode != null) { int sC = statusCode.getStatusCodeNumber(); encoded.putShort((short) (sC & 0xFFFF)); } encoded.put(data); encoded.flip(); return encoded; }
/** * Encodes contents of this frame into bytes stored in ByteBuffer * * @return buffer with encoded data, ready for reading (flipped) */
Encodes contents of this frame into bytes stored in ByteBuffer
encode
{ "repo_name": "praus/Aiolos", "path": "Aiolos/src/edu/baylor/aiolos/websocket/WebSocketFrame.java", "license": "mit", "size": 11658 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
369,592
private void createVirtualChildren() { // The virtual portion of the tree is one level deep. Note // that implementations can use any way of representing and // drawing virtual view. VirtualView firstChild = new VirtualView(0, new Rect(0, 0, 150, 150), Color.RED, "Virtual view 1"); mChildren.add(firstChild); VirtualView secondChild = new VirtualView(1, new Rect(0, 0, 150, 150), Color.GREEN, "Virtual view 2"); mChildren.add(secondChild); VirtualView thirdChild = new VirtualView(2, new Rect(0, 0, 150, 150), Color.BLUE, "Virtual view 3"); mChildren.add(thirdChild); }
void function() { VirtualView firstChild = new VirtualView(0, new Rect(0, 0, 150, 150), Color.RED, STR); mChildren.add(firstChild); VirtualView secondChild = new VirtualView(1, new Rect(0, 0, 150, 150), Color.GREEN, STR); mChildren.add(secondChild); VirtualView thirdChild = new VirtualView(2, new Rect(0, 0, 150, 150), Color.BLUE, STR); mChildren.add(thirdChild); }
/** * Creates the virtual children of this View. */
Creates the virtual children of this View
createVirtualChildren
{ "repo_name": "CJstar/android-maven-plugin", "path": "src/test/projects/apidemos-android-16/apidemos-application/src/main/java/com/example/android/apis/accessibility/AccessibilityNodeProviderActivity.java", "license": "apache-2.0", "size": 21564 }
[ "android.graphics.Color", "android.graphics.Rect" ]
import android.graphics.Color; import android.graphics.Rect;
import android.graphics.*;
[ "android.graphics" ]
android.graphics;
499,897
@Test public void shouldMarshalWithSpecificHeaders() throws Exception { template.sendBody("direct:header", Arrays.asList( asMap("A", "1", "B", "2", "C", "3"), asMap("A", "one", "B", "two", "C", "three"))); result.expectedMessageCount(1); result.assertIsSatisfied(); String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody()); assertEquals(join("1,3", "one,three"), body); }
void function() throws Exception { template.sendBody(STR, Arrays.asList( asMap("A", "1", "B", "2", "C", "3"), asMap("A", "one", "B", "two", "C", "three"))); result.expectedMessageCount(1); result.assertIsSatisfied(); String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody()); assertEquals(join("1,3", STR), body); }
/** * Tests that we can marshal CSV with specific headers */
Tests that we can marshal CSV with specific headers
shouldMarshalWithSpecificHeaders
{ "repo_name": "nikhilvibhav/camel", "path": "components/camel-univocity-parsers/src/test/java/org/apache/camel/dataformat/univocity/UniVocityCsvDataFormatMarshalSpringTest.java", "license": "apache-2.0", "size": 4879 }
[ "java.util.Arrays", "org.apache.camel.dataformat.univocity.UniVocityTestHelper", "org.apache.camel.test.junit5.TestSupport", "org.junit.jupiter.api.Assertions" ]
import java.util.Arrays; import org.apache.camel.dataformat.univocity.UniVocityTestHelper; import org.apache.camel.test.junit5.TestSupport; import org.junit.jupiter.api.Assertions;
import java.util.*; import org.apache.camel.dataformat.univocity.*; import org.apache.camel.test.junit5.*; import org.junit.jupiter.api.*;
[ "java.util", "org.apache.camel", "org.junit.jupiter" ]
java.util; org.apache.camel; org.junit.jupiter;
1,556,136
public static boolean isStripDevice(ThingTypeUID thingTypeUID) { return SUPPORTED_THING_TYPES_LIST.stream().filter(t -> t.is(thingTypeUID)) .anyMatch(t -> t.type == DeviceType.STRIP); }
static boolean function(ThingTypeUID thingTypeUID) { return SUPPORTED_THING_TYPES_LIST.stream().filter(t -> t.is(thingTypeUID)) .anyMatch(t -> t.type == DeviceType.STRIP); }
/** * Returns true if the given {@link ThingTypeUID} matches a device that supports the power strip communication * protocol. * * @param thingTypeUID if the check * @return true if it's a power strip supporting device */
Returns true if the given <code>ThingTypeUID</code> matches a device that supports the power strip communication protocol
isStripDevice
{ "repo_name": "theoweiss/openhab2", "path": "bundles/org.openhab.binding.tplinksmarthome/src/main/java/org/openhab/binding/tplinksmarthome/internal/TPLinkSmartHomeThingType.java", "license": "epl-1.0", "size": 5568 }
[ "org.eclipse.smarthome.core.thing.ThingTypeUID" ]
import org.eclipse.smarthome.core.thing.ThingTypeUID;
import org.eclipse.smarthome.core.thing.*;
[ "org.eclipse.smarthome" ]
org.eclipse.smarthome;
2,036,303
@Test public void values() { final RuneList<AngloSaxonRune> values = AngloSaxonRune.values(); assertNotNull(values); assertThat(values.size(), is(33)); assertThat(values.get(0), is(AngloSaxonRune.FEOH)); assertThat(values.get(32), is(AngloSaxonRune.NEWLINE)); }
void function() { final RuneList<AngloSaxonRune> values = AngloSaxonRune.values(); assertNotNull(values); assertThat(values.size(), is(33)); assertThat(values.get(0), is(AngloSaxonRune.FEOH)); assertThat(values.get(32), is(AngloSaxonRune.NEWLINE)); }
/** * Test the <code>values()</code> method. */
Test the <code>values()</code> method
values
{ "repo_name": "jmthompson2015/runetranscriber", "path": "core/src/test/java/org/runetranscriber/core/viking/AngloSaxonRuneTest.java", "license": "mit", "size": 2026 }
[ "org.hamcrest.CoreMatchers", "org.junit.Assert", "org.runetranscriber.core.RuneList" ]
import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.runetranscriber.core.RuneList;
import org.hamcrest.*; import org.junit.*; import org.runetranscriber.core.*;
[ "org.hamcrest", "org.junit", "org.runetranscriber.core" ]
org.hamcrest; org.junit; org.runetranscriber.core;
397,673
public boolean isSubscribed() throws CloudException, InternalException; /** * Indicates whether the highest precedence comes from low numbers. If true, 0 is the highest precedence a rule * can have. If false, 0 is the lowest precedence. * @return true if 0 is the highest precedence for a rule * @throws InternalException an error occurred locally independent of any events in the cloud * @throws CloudException an error occurred with the cloud provider while performing the operation * @deprecated use {@link NetworkFirewallCapabilities#isZeroPrecedenceHighest()}
boolean function() throws CloudException, InternalException; /** * Indicates whether the highest precedence comes from low numbers. If true, 0 is the highest precedence a rule * can have. If false, 0 is the lowest precedence. * @return true if 0 is the highest precedence for a rule * @throws InternalException an error occurred locally independent of any events in the cloud * @throws CloudException an error occurred with the cloud provider while performing the operation * @deprecated use {@link NetworkFirewallCapabilities#isZeroPrecedenceHighest()}
/** * Identifies whether or not the current account is subscribed to network firewall services in the current region. * @return true if the current account is subscribed to network firewall services for the current region * @throws CloudException an error occurred with the cloud provider while determining subscription status * @throws InternalException an error occurred in the Dasein Cloud implementation while determining subscription status */
Identifies whether or not the current account is subscribed to network firewall services in the current region
isSubscribed
{ "repo_name": "greese/dasein-cloud-core", "path": "src/main/java/org/dasein/cloud/network/NetworkFirewallSupport.java", "license": "apache-2.0", "size": 21275 }
[ "org.dasein.cloud.CloudException", "org.dasein.cloud.InternalException" ]
import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException;
import org.dasein.cloud.*;
[ "org.dasein.cloud" ]
org.dasein.cloud;
2,569,659
public synchronized MapperService createIndexMapperService(IndexMetadata indexMetadata) throws IOException { final IndexSettings idxSettings = new IndexSettings(indexMetadata, this.settings, indexScopedSettings); final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), directoryFactories); pluginsService.onIndexModule(indexModule); return indexModule.newIndexMapperService(xContentRegistry, mapperRegistry); }
synchronized MapperService function(IndexMetadata indexMetadata) throws IOException { final IndexSettings idxSettings = new IndexSettings(indexMetadata, this.settings, indexScopedSettings); final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry, getEngineFactory(idxSettings), directoryFactories); pluginsService.onIndexModule(indexModule); return indexModule.newIndexMapperService(xContentRegistry, mapperRegistry); }
/** * creates a new mapper service for the given index, in order to do administrative work like mapping updates. * This *should not* be used for document parsing. Doing so will result in an exception. * * Note: the returned {@link MapperService} should be closed when unneeded. */
creates a new mapper service for the given index, in order to do administrative work like mapping updates. This *should not* be used for document parsing. Doing so will result in an exception. Note: the returned <code>MapperService</code> should be closed when unneeded
createIndexMapperService
{ "repo_name": "crate/crate", "path": "server/src/main/java/org/elasticsearch/indices/IndicesService.java", "license": "apache-2.0", "size": 52343 }
[ "java.io.IOException", "org.elasticsearch.cluster.metadata.IndexMetadata", "org.elasticsearch.index.IndexModule", "org.elasticsearch.index.IndexSettings", "org.elasticsearch.index.mapper.MapperService" ]
import java.io.IOException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService;
import java.io.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.index.*; import org.elasticsearch.index.mapper.*;
[ "java.io", "org.elasticsearch.cluster", "org.elasticsearch.index" ]
java.io; org.elasticsearch.cluster; org.elasticsearch.index;
1,007,326
private void divideByFittestSpecies(final List<Species> speciesCollection, final double totalSpeciesScore) { final Species bestSpecies = findBestSpecies(); // loop over all species and calculate its share final Object[] speciesArray = speciesCollection.toArray(); for (final Object element : speciesArray) { final Species species = (Species) element; // calculate the species share based on the percent of the total // species score int share = (int) Math .round((species.getOffspringShare() / totalSpeciesScore) * this.owner.getPopulation().getPopulationSize()); // do not give the best species a zero-share if ((species == bestSpecies) && (share == 0)) { share = 1; } // if the share is zero, then remove the species if ((species.getMembers().isEmpty()) || (share == 0)) { removeSpecies(species); } // if the species has not improved over the specified number of // generations, then remove it. else if ((species.getGensNoImprovement() > this.numGensAllowedNoImprovement) && (species != bestSpecies)) { removeSpecies(species); } else { // otherwise assign a share and sort the members. species.setOffspringCount(share); Collections.sort(species.getMembers(), this.sortGenomes); } } }
void function(final List<Species> speciesCollection, final double totalSpeciesScore) { final Species bestSpecies = findBestSpecies(); final Object[] speciesArray = speciesCollection.toArray(); for (final Object element : speciesArray) { final Species species = (Species) element; int share = (int) Math .round((species.getOffspringShare() / totalSpeciesScore) * this.owner.getPopulation().getPopulationSize()); if ((species == bestSpecies) && (share == 0)) { share = 1; } if ((species.getMembers().isEmpty()) (share == 0)) { removeSpecies(species); } else if ((species.getGensNoImprovement() > this.numGensAllowedNoImprovement) && (species != bestSpecies)) { removeSpecies(species); } else { species.setOffspringCount(share); Collections.sort(species.getMembers(), this.sortGenomes); } } }
/** * Divide up the potential offspring by the most fit species. To do this we * look at the total species score, vs each individual species percent * contribution to that score. * * @param speciesCollection The current species list. * @param totalSpeciesScore The total score over all species. */
Divide up the potential offspring by the most fit species. To do this we look at the total species score, vs each individual species percent contribution to that score
divideByFittestSpecies
{ "repo_name": "automenta/java_dann", "path": "src/syncleus/dann/evolve/species/ThresholdSpeciation.java", "license": "agpl-3.0", "size": 15626 }
[ "java.util.Collections", "java.util.List" ]
import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,162,980
private void testSplit(InputSplit split, byte[]... columns) throws Exception { HLogRecordReader reader = new HLogRecordReader(); reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf)); for (byte[] column : columns) { assertTrue(reader.nextKeyValue()); KeyValue kv = reader.getCurrentValue().getKeyValues().get(0); if (!Bytes.equals(column, kv.getQualifier())) { assertTrue("expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString(kv.getQualifier()) + "]", false); } } assertFalse(reader.nextKeyValue()); reader.close(); }
void function(InputSplit split, byte[]... columns) throws Exception { HLogRecordReader reader = new HLogRecordReader(); reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf)); for (byte[] column : columns) { assertTrue(reader.nextKeyValue()); KeyValue kv = reader.getCurrentValue().getKeyValues().get(0); if (!Bytes.equals(column, kv.getQualifier())) { assertTrue(STR + Bytes.toString(column) + STR + Bytes.toString(kv.getQualifier()) + "]", false); } } assertFalse(reader.nextKeyValue()); reader.close(); }
/** * Create a new reader from the split, and match the edits against the passed columns. */
Create a new reader from the split, and match the edits against the passed columns
testSplit
{ "repo_name": "tobegit3hub/hbase", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java", "license": "apache-2.0", "size": 9347 }
[ "org.apache.hadoop.hbase.KeyValue", "org.apache.hadoop.hbase.mapreduce.HLogInputFormat", "org.apache.hadoop.hbase.util.Bytes", "org.apache.hadoop.mapreduce.InputSplit", "org.apache.hadoop.mapreduce.MapReduceTestUtil", "org.junit.Assert" ]
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.mapreduce.HLogInputFormat; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.junit.Assert;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.mapreduce.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.mapreduce.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
1,952,681
public static boolean clonePermissions(@NotNull String source, @NotNull String target) { try { return ourMediator.clonePermissions(source, target, false); } catch (Exception e) { LOG.warn(e); return false; } }
static boolean function(@NotNull String source, @NotNull String target) { try { return ourMediator.clonePermissions(source, target, false); } catch (Exception e) { LOG.warn(e); return false; } }
/** * Gives the second file permissions of the first one if possible; returns true if succeed. * Will do nothing on Windows. */
Gives the second file permissions of the first one if possible; returns true if succeed. Will do nothing on Windows
clonePermissions
{ "repo_name": "youdonghai/intellij-community", "path": "platform/util/src/com/intellij/openapi/util/io/FileSystemUtil.java", "license": "apache-2.0", "size": 23463 }
[ "org.jetbrains.annotations.NotNull" ]
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.*;
[ "org.jetbrains.annotations" ]
org.jetbrains.annotations;
1,694,277
public void testGetters() { ValueInteger vs1 = new ValueInteger(-4); ValueInteger vs2 = new ValueInteger(0); ValueInteger vs3 = new ValueInteger(3); assertEquals(true, vs1.getBoolean()); assertEquals(false, vs2.getBoolean()); assertEquals(true, vs3.getBoolean()); assertEquals("-4", vs1.getString()); assertEquals("0", vs2.getString()); assertEquals("3", vs3.getString()); assertEquals(-4.0D, vs1.getNumber(), 0.001D); assertEquals(0.0D, vs2.getNumber(), 0.001D); assertEquals(3.0D, vs3.getNumber(), 0.001D); assertEquals(-4L, vs1.getInteger()); assertEquals(0L, vs2.getInteger()); assertEquals(3L, vs3.getInteger()); assertEquals(new BigDecimal(-4L), vs1.getBigNumber()); assertEquals(new BigDecimal(0L), vs2.getBigNumber()); assertEquals(new BigDecimal(3L), vs3.getBigNumber()); assertEquals(-4L, vs1.getDate().getTime()); assertEquals(0L, vs2.getDate().getTime()); assertEquals(3L, vs3.getDate().getTime()); assertEquals(new Long(-4L), vs1.getSerializable()); assertEquals(new Long(0L), vs2.getSerializable()); assertEquals(new Long(3L), vs3.getSerializable()); }
void function() { ValueInteger vs1 = new ValueInteger(-4); ValueInteger vs2 = new ValueInteger(0); ValueInteger vs3 = new ValueInteger(3); assertEquals(true, vs1.getBoolean()); assertEquals(false, vs2.getBoolean()); assertEquals(true, vs3.getBoolean()); assertEquals("-4", vs1.getString()); assertEquals("0", vs2.getString()); assertEquals("3", vs3.getString()); assertEquals(-4.0D, vs1.getNumber(), 0.001D); assertEquals(0.0D, vs2.getNumber(), 0.001D); assertEquals(3.0D, vs3.getNumber(), 0.001D); assertEquals(-4L, vs1.getInteger()); assertEquals(0L, vs2.getInteger()); assertEquals(3L, vs3.getInteger()); assertEquals(new BigDecimal(-4L), vs1.getBigNumber()); assertEquals(new BigDecimal(0L), vs2.getBigNumber()); assertEquals(new BigDecimal(3L), vs3.getBigNumber()); assertEquals(-4L, vs1.getDate().getTime()); assertEquals(0L, vs2.getDate().getTime()); assertEquals(3L, vs3.getDate().getTime()); assertEquals(new Long(-4L), vs1.getSerializable()); assertEquals(new Long(0L), vs2.getSerializable()); assertEquals(new Long(3L), vs3.getSerializable()); }
/** * Test the getters of ValueInteger */
Test the getters of ValueInteger
testGetters
{ "repo_name": "icholy/geokettle-2.0", "path": "test/org/pentaho/di/compatibility/ValueIntegerTest.java", "license": "lgpl-2.1", "size": 4677 }
[ "java.math.BigDecimal" ]
import java.math.BigDecimal;
import java.math.*;
[ "java.math" ]
java.math;
701,106
T randomChange(T part); }
T randomChange(T part); }
/** * Makes random modifications to the part */
Makes random modifications to the part
randomChange
{ "repo_name": "Collaborne/elasticsearch", "path": "core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java", "license": "apache-2.0", "size": 31451 }
[ "org.elasticsearch.cluster.routing.RandomShardRoutingMutator" ]
import org.elasticsearch.cluster.routing.RandomShardRoutingMutator;
import org.elasticsearch.cluster.routing.*;
[ "org.elasticsearch.cluster" ]
org.elasticsearch.cluster;
28,860
EReference getDocumentRoot_InputSet();
EReference getDocumentRoot_InputSet();
/** * Returns the meta object for the containment reference '{@link org.eclipse.bpmn2.DocumentRoot#getInputSet <em>Input Set</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Input Set</em>'. * @see org.eclipse.bpmn2.DocumentRoot#getInputSet() * @see #getDocumentRoot() * @generated */
Returns the meta object for the containment reference '<code>org.eclipse.bpmn2.DocumentRoot#getInputSet Input Set</code>'.
getDocumentRoot_InputSet
{ "repo_name": "lqjack/fixflow", "path": "modules/fixflow-core/src/main/java/org/eclipse/bpmn2/Bpmn2Package.java", "license": "apache-2.0", "size": 1014933 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,407,873
public static PTable createFromProto(PTableProtos.PTable table) { PName tenantId = null; if(table.hasTenantId()){ tenantId = PNameFactory.newName(table.getTenantId().toByteArray()); } PName schemaName = PNameFactory.newName(table.getSchemaNameBytes().toByteArray()); PName tableName = PNameFactory.newName(table.getTableNameBytes().toByteArray()); PTableType tableType = PTableType.values()[table.getTableType().ordinal()]; PIndexState indexState = null; if (table.hasIndexState()) { indexState = PIndexState.fromSerializedValue(table.getIndexState()); } Short viewIndexId = null; if(table.hasViewIndexId()){ viewIndexId = (short)table.getViewIndexId(); } IndexType indexType = IndexType.getDefault(); if(table.hasIndexType()){ indexType = IndexType.fromSerializedValue(table.getIndexType().toByteArray()[0]); } long sequenceNumber = table.getSequenceNumber(); long timeStamp = table.getTimeStamp(); long indexDisableTimestamp = table.getIndexDisableTimestamp(); PName pkName = null; if (table.hasPkNameBytes()) { pkName = PNameFactory.newName(table.getPkNameBytes().toByteArray()); } int bucketNum = table.getBucketNum(); List<PColumn> columns = Lists.newArrayListWithExpectedSize(table.getColumnsCount()); for (PTableProtos.PColumn curPColumnProto : table.getColumnsList()) { columns.add(PColumnImpl.createFromProto(curPColumnProto)); } List<PTable> indexes = Lists.newArrayListWithExpectedSize(table.getIndexesCount()); for (PTableProtos.PTable curPTableProto : table.getIndexesList()) { indexes.add(createFromProto(curPTableProto)); } boolean isImmutableRows = table.getIsImmutableRows(); SortedMap<byte[], GuidePostsInfo> tableGuidePosts = new TreeMap<byte[], GuidePostsInfo>(Bytes.BYTES_COMPARATOR); for (PTableProtos.PTableStats pTableStatsProto : table.getGuidePostsList()) { PGuidePosts pGuidePosts = pTableStatsProto.getPGuidePosts(); int maxLength = pGuidePosts.getMaxLength(); int guidePostsCount = pGuidePosts.getEncodedGuidePostsCount(); GuidePostsInfo info = new GuidePostsInfo(pGuidePosts.getByteCountsList(), new ImmutableBytesWritable(pGuidePosts.getEncodedGuidePosts().toByteArray()), pGuidePosts.getRowCountsList(), maxLength, guidePostsCount); tableGuidePosts.put(pTableStatsProto.getKey().toByteArray(), info); } PTableStats stats = new PTableStatsImpl(tableGuidePosts, table.getStatsTimeStamp()); PName dataTableName = null; if (table.hasDataTableNameBytes()) { dataTableName = PNameFactory.newName(table.getDataTableNameBytes().toByteArray()); } PName defaultFamilyName = null; if (table.hasDefaultFamilyName()) { defaultFamilyName = PNameFactory.newName(table.getDefaultFamilyName().toByteArray()); } boolean disableWAL = table.getDisableWAL(); boolean multiTenant = table.getMultiTenant(); boolean storeNulls = table.getStoreNulls(); boolean isTransactional = table.getTransactional(); ViewType viewType = null; String viewStatement = null; List<PName> physicalNames = Collections.emptyList(); if (tableType == PTableType.VIEW) { viewType = ViewType.fromSerializedValue(table.getViewType().toByteArray()[0]); if(table.hasViewStatement()){ viewStatement = (String) PVarchar.INSTANCE.toObject(table.getViewStatement().toByteArray()); } } if (tableType == PTableType.VIEW || viewIndexId != null) { physicalNames = Lists.newArrayListWithExpectedSize(table.getPhysicalNamesCount()); for(int i = 0; i < table.getPhysicalNamesCount(); i++){ physicalNames.add(PNameFactory.newName(table.getPhysicalNames(i).toByteArray())); } } int baseColumnCount = -1; if (table.hasBaseColumnCount()) { baseColumnCount = table.getBaseColumnCount(); } boolean rowKeyOrderOptimizable = false; if (table.hasRowKeyOrderOptimizable()) { rowKeyOrderOptimizable = table.getRowKeyOrderOptimizable(); } long updateCacheFrequency = 0; if (table.hasUpdateCacheFrequency()) { updateCacheFrequency = table.getUpdateCacheFrequency(); } boolean isNamespaceMapped=false; if (table.hasIsNamespaceMapped()) { isNamespaceMapped = table.getIsNamespaceMapped(); } String autoParititonSeqName = null; if (table.hasAutoParititonSeqName()) { autoParititonSeqName = table.getAutoParititonSeqName(); } boolean isAppendOnlySchema = false; if (table.hasIsAppendOnlySchema()) { isAppendOnlySchema = table.getIsAppendOnlySchema(); } try { PTableImpl result = new PTableImpl(); result.init(tenantId, schemaName, tableName, tableType, indexState, timeStamp, sequenceNumber, pkName, (bucketNum == NO_SALTING) ? null : bucketNum, columns, stats, schemaName,dataTableName, indexes, isImmutableRows, physicalNames, defaultFamilyName, viewStatement, disableWAL, multiTenant, storeNulls, viewType, viewIndexId, indexType, baseColumnCount, rowKeyOrderOptimizable, isTransactional, updateCacheFrequency, indexDisableTimestamp, isNamespaceMapped, autoParititonSeqName, isAppendOnlySchema); return result; } catch (SQLException e) { throw new RuntimeException(e); // Impossible } }
static PTable function(PTableProtos.PTable table) { PName tenantId = null; if(table.hasTenantId()){ tenantId = PNameFactory.newName(table.getTenantId().toByteArray()); } PName schemaName = PNameFactory.newName(table.getSchemaNameBytes().toByteArray()); PName tableName = PNameFactory.newName(table.getTableNameBytes().toByteArray()); PTableType tableType = PTableType.values()[table.getTableType().ordinal()]; PIndexState indexState = null; if (table.hasIndexState()) { indexState = PIndexState.fromSerializedValue(table.getIndexState()); } Short viewIndexId = null; if(table.hasViewIndexId()){ viewIndexId = (short)table.getViewIndexId(); } IndexType indexType = IndexType.getDefault(); if(table.hasIndexType()){ indexType = IndexType.fromSerializedValue(table.getIndexType().toByteArray()[0]); } long sequenceNumber = table.getSequenceNumber(); long timeStamp = table.getTimeStamp(); long indexDisableTimestamp = table.getIndexDisableTimestamp(); PName pkName = null; if (table.hasPkNameBytes()) { pkName = PNameFactory.newName(table.getPkNameBytes().toByteArray()); } int bucketNum = table.getBucketNum(); List<PColumn> columns = Lists.newArrayListWithExpectedSize(table.getColumnsCount()); for (PTableProtos.PColumn curPColumnProto : table.getColumnsList()) { columns.add(PColumnImpl.createFromProto(curPColumnProto)); } List<PTable> indexes = Lists.newArrayListWithExpectedSize(table.getIndexesCount()); for (PTableProtos.PTable curPTableProto : table.getIndexesList()) { indexes.add(createFromProto(curPTableProto)); } boolean isImmutableRows = table.getIsImmutableRows(); SortedMap<byte[], GuidePostsInfo> tableGuidePosts = new TreeMap<byte[], GuidePostsInfo>(Bytes.BYTES_COMPARATOR); for (PTableProtos.PTableStats pTableStatsProto : table.getGuidePostsList()) { PGuidePosts pGuidePosts = pTableStatsProto.getPGuidePosts(); int maxLength = pGuidePosts.getMaxLength(); int guidePostsCount = pGuidePosts.getEncodedGuidePostsCount(); GuidePostsInfo info = new GuidePostsInfo(pGuidePosts.getByteCountsList(), new ImmutableBytesWritable(pGuidePosts.getEncodedGuidePosts().toByteArray()), pGuidePosts.getRowCountsList(), maxLength, guidePostsCount); tableGuidePosts.put(pTableStatsProto.getKey().toByteArray(), info); } PTableStats stats = new PTableStatsImpl(tableGuidePosts, table.getStatsTimeStamp()); PName dataTableName = null; if (table.hasDataTableNameBytes()) { dataTableName = PNameFactory.newName(table.getDataTableNameBytes().toByteArray()); } PName defaultFamilyName = null; if (table.hasDefaultFamilyName()) { defaultFamilyName = PNameFactory.newName(table.getDefaultFamilyName().toByteArray()); } boolean disableWAL = table.getDisableWAL(); boolean multiTenant = table.getMultiTenant(); boolean storeNulls = table.getStoreNulls(); boolean isTransactional = table.getTransactional(); ViewType viewType = null; String viewStatement = null; List<PName> physicalNames = Collections.emptyList(); if (tableType == PTableType.VIEW) { viewType = ViewType.fromSerializedValue(table.getViewType().toByteArray()[0]); if(table.hasViewStatement()){ viewStatement = (String) PVarchar.INSTANCE.toObject(table.getViewStatement().toByteArray()); } } if (tableType == PTableType.VIEW viewIndexId != null) { physicalNames = Lists.newArrayListWithExpectedSize(table.getPhysicalNamesCount()); for(int i = 0; i < table.getPhysicalNamesCount(); i++){ physicalNames.add(PNameFactory.newName(table.getPhysicalNames(i).toByteArray())); } } int baseColumnCount = -1; if (table.hasBaseColumnCount()) { baseColumnCount = table.getBaseColumnCount(); } boolean rowKeyOrderOptimizable = false; if (table.hasRowKeyOrderOptimizable()) { rowKeyOrderOptimizable = table.getRowKeyOrderOptimizable(); } long updateCacheFrequency = 0; if (table.hasUpdateCacheFrequency()) { updateCacheFrequency = table.getUpdateCacheFrequency(); } boolean isNamespaceMapped=false; if (table.hasIsNamespaceMapped()) { isNamespaceMapped = table.getIsNamespaceMapped(); } String autoParititonSeqName = null; if (table.hasAutoParititonSeqName()) { autoParititonSeqName = table.getAutoParititonSeqName(); } boolean isAppendOnlySchema = false; if (table.hasIsAppendOnlySchema()) { isAppendOnlySchema = table.getIsAppendOnlySchema(); } try { PTableImpl result = new PTableImpl(); result.init(tenantId, schemaName, tableName, tableType, indexState, timeStamp, sequenceNumber, pkName, (bucketNum == NO_SALTING) ? null : bucketNum, columns, stats, schemaName,dataTableName, indexes, isImmutableRows, physicalNames, defaultFamilyName, viewStatement, disableWAL, multiTenant, storeNulls, viewType, viewIndexId, indexType, baseColumnCount, rowKeyOrderOptimizable, isTransactional, updateCacheFrequency, indexDisableTimestamp, isNamespaceMapped, autoParititonSeqName, isAppendOnlySchema); return result; } catch (SQLException e) { throw new RuntimeException(e); } }
/** * Construct a PTable instance from ProtoBuffered PTable instance * @param table */
Construct a PTable instance from ProtoBuffered PTable instance
createFromProto
{ "repo_name": "7shurik/phoenix", "path": "phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java", "license": "apache-2.0", "size": 61968 }
[ "com.google.common.collect.Lists", "java.sql.SQLException", "java.util.Collections", "java.util.List", "java.util.SortedMap", "java.util.TreeMap", "org.apache.hadoop.hbase.io.ImmutableBytesWritable", "org.apache.hadoop.hbase.util.Bytes", "org.apache.phoenix.coprocessor.generated.PGuidePostsProtos", "org.apache.phoenix.coprocessor.generated.PTableProtos", "org.apache.phoenix.schema.stats.GuidePostsInfo", "org.apache.phoenix.schema.stats.PTableStats", "org.apache.phoenix.schema.stats.PTableStatsImpl", "org.apache.phoenix.schema.types.PVarchar" ]
import com.google.common.collect.Lists; import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.SortedMap; import java.util.TreeMap; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.coprocessor.generated.PGuidePostsProtos; import org.apache.phoenix.coprocessor.generated.PTableProtos; import org.apache.phoenix.schema.stats.GuidePostsInfo; import org.apache.phoenix.schema.stats.PTableStats; import org.apache.phoenix.schema.stats.PTableStatsImpl; import org.apache.phoenix.schema.types.PVarchar;
import com.google.common.collect.*; import java.sql.*; import java.util.*; import org.apache.hadoop.hbase.io.*; import org.apache.hadoop.hbase.util.*; import org.apache.phoenix.coprocessor.generated.*; import org.apache.phoenix.schema.stats.*; import org.apache.phoenix.schema.types.*;
[ "com.google.common", "java.sql", "java.util", "org.apache.hadoop", "org.apache.phoenix" ]
com.google.common; java.sql; java.util; org.apache.hadoop; org.apache.phoenix;
1,434,349
public static refWeekNumberType fromPerUnaligned(byte[] encodedBytes) { refWeekNumberType result = new refWeekNumberType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; }
static refWeekNumberType function(byte[] encodedBytes) { refWeekNumberType result = new refWeekNumberType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; }
/** * Creates a new refWeekNumberType from encoded stream. */
Creates a new refWeekNumberType from encoded stream
fromPerUnaligned
{ "repo_name": "google/supl-client", "path": "src/main/java/com/google/location/suplclient/asn1/supl2/ulp_components/CdmaCellInformation.java", "license": "apache-2.0", "size": 34169 }
[ "com.google.location.suplclient.asn1.base.BitStreamReader" ]
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.*;
[ "com.google.location" ]
com.google.location;
322,682
PomEquippedMavenImporter loadPomFromFile(File pomFile) throws IllegalArgumentException, InvalidConfigurationFileException;
PomEquippedMavenImporter loadPomFromFile(File pomFile) throws IllegalArgumentException, InvalidConfigurationFileException;
/** * Configures the Maven Importer from Project Object Model contained in the specified POM {@link File}. * * @param pomFile A POM {@link File} the maven Importer should be configured from. * @return The configured Maven Importer from the given Project Object Model. * @throws IllegalArgumentException If no file was specified, if the file does not exist or points to a directory * @throws InvalidConfigurationFileException If the configuration file contents are not in appropriate format */
Configures the Maven Importer from Project Object Model contained in the specified POM <code>File</code>
loadPomFromFile
{ "repo_name": "shrinkwrap/resolver", "path": "maven/api-maven-archive/src/main/java/org/jboss/shrinkwrap/resolver/api/maven/archive/importer/PomlessMavenImporter.java", "license": "apache-2.0", "size": 7921 }
[ "java.io.File", "org.jboss.shrinkwrap.resolver.api.InvalidConfigurationFileException" ]
import java.io.File; import org.jboss.shrinkwrap.resolver.api.InvalidConfigurationFileException;
import java.io.*; import org.jboss.shrinkwrap.resolver.api.*;
[ "java.io", "org.jboss.shrinkwrap" ]
java.io; org.jboss.shrinkwrap;
1,088,313
private List<Member> memberPath(Position position, Member member) { List<Member> posMembers = position.getMembers(); int index = posMembers.indexOf(member); if (index < 0) { return Collections.emptyList(); } return posMembers.subList(0, index + 1); }
List<Member> function(Position position, Member member) { List<Member> posMembers = position.getMembers(); int index = posMembers.indexOf(member); if (index < 0) { return Collections.emptyList(); } return posMembers.subList(0, index + 1); }
/** * determine path to member * * @param position * @param member * @return path to Member */
determine path to member
memberPath
{ "repo_name": "seddikouiss/pivo4j", "path": "pivot4j-core/src/main/java/org/pivot4j/transform/impl/DrillExpandPositionImpl.java", "license": "epl-1.0", "size": 3069 }
[ "java.util.Collections", "java.util.List", "org.olap4j.Position", "org.olap4j.metadata.Member" ]
import java.util.Collections; import java.util.List; import org.olap4j.Position; import org.olap4j.metadata.Member;
import java.util.*; import org.olap4j.*; import org.olap4j.metadata.*;
[ "java.util", "org.olap4j", "org.olap4j.metadata" ]
java.util; org.olap4j; org.olap4j.metadata;
1,149,556
public java.sql.ResultSet getResultSet() throws SQLException { synchronized (checkClosed().getConnectionMutex()) { return ((this.results != null) && this.results.reallyResult()) ? (java.sql.ResultSet) this.results : null; } }
java.sql.ResultSet function() throws SQLException { synchronized (checkClosed().getConnectionMutex()) { return ((this.results != null) && this.results.reallyResult()) ? (java.sql.ResultSet) this.results : null; } }
/** * getResultSet returns the current result as a ResultSet. It should only be * called once per result. * * @return the current result set; null if there are no more * * @exception SQLException * if a database access error occurs (why?) */
getResultSet returns the current result as a ResultSet. It should only be called once per result
getResultSet
{ "repo_name": "namdp06/mysql-connector-java-1", "path": "src/com/mysql/jdbc/StatementImpl.java", "license": "gpl-2.0", "size": 89676 }
[ "java.sql.ResultSet", "java.sql.SQLException" ]
import java.sql.ResultSet; import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
2,284,151
@Test public void testGreedySolutionNode() { System.out.println("StateSpaceStrategy: Test solution node from Greedy Best First Search."); final CodedProblem codedProblem = Tools.generateCodedProblem(domainFile, problemFile); stateSpaceStrategy = new GreedyBestFirstSearch(TIMEOUT * 1000, HEURISTIC_TYPE, HEURISTIC_WEIGHT); final Node solutionNode = stateSpaceStrategy.searchSolutionNode(codedProblem); Assert.assertTrue(solutionNode.getCost() == GREEDY_SOLUTION_COST); Assert.assertTrue(stateSpaceStrategy.extractPlan(solutionNode, codedProblem).size() == GREEDY_SOLUTION_SIZE); }
void function() { System.out.println(STR); final CodedProblem codedProblem = Tools.generateCodedProblem(domainFile, problemFile); stateSpaceStrategy = new GreedyBestFirstSearch(TIMEOUT * 1000, HEURISTIC_TYPE, HEURISTIC_WEIGHT); final Node solutionNode = stateSpaceStrategy.searchSolutionNode(codedProblem); Assert.assertTrue(solutionNode.getCost() == GREEDY_SOLUTION_COST); Assert.assertTrue(stateSpaceStrategy.extractPlan(solutionNode, codedProblem).size() == GREEDY_SOLUTION_SIZE); }
/** * Method that tests solution node (cost and size) for Greedy Best First Search search strategy. */
Method that tests solution node (cost and size) for Greedy Best First Search search strategy
testGreedySolutionNode
{ "repo_name": "pellierd/pddl4j", "path": "src/test/java/fr/uga/pddl4j/test/planners/statespace/search/strategy/StateSpaceStrategyTest.java", "license": "lgpl-3.0", "size": 16938 }
[ "fr.uga.pddl4j.encoding.CodedProblem", "fr.uga.pddl4j.planners.statespace.search.strategy.GreedyBestFirstSearch", "fr.uga.pddl4j.planners.statespace.search.strategy.Node", "fr.uga.pddl4j.test.Tools", "org.junit.Assert" ]
import fr.uga.pddl4j.encoding.CodedProblem; import fr.uga.pddl4j.planners.statespace.search.strategy.GreedyBestFirstSearch; import fr.uga.pddl4j.planners.statespace.search.strategy.Node; import fr.uga.pddl4j.test.Tools; import org.junit.Assert;
import fr.uga.pddl4j.encoding.*; import fr.uga.pddl4j.planners.statespace.search.strategy.*; import fr.uga.pddl4j.test.*; import org.junit.*;
[ "fr.uga.pddl4j", "org.junit" ]
fr.uga.pddl4j; org.junit;
2,885,366
@Override public Cursor query(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { Cursor cursor; switch (sUriMatcher.match(uri)) { case CODE_WEATHER_WITH_DATE: { String normalizedUtcDateString = uri.getLastPathSegment(); String[] selectionArguments = new String[]{normalizedUtcDateString}; cursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, WeatherContract.WeatherEntry.COLUMN_DATE + " = ? ", selectionArguments, null, null, sortOrder); break; } case CODE_WEATHER: { cursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } cursor.setNotificationUri(getContext().getContentResolver(), uri); return cursor; }
Cursor function(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { Cursor cursor; switch (sUriMatcher.match(uri)) { case CODE_WEATHER_WITH_DATE: { String normalizedUtcDateString = uri.getLastPathSegment(); String[] selectionArguments = new String[]{normalizedUtcDateString}; cursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, WeatherContract.WeatherEntry.COLUMN_DATE + STR, selectionArguments, null, null, sortOrder); break; } case CODE_WEATHER: { cursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder); break; } default: throw new UnsupportedOperationException(STR + uri); } cursor.setNotificationUri(getContext().getContentResolver(), uri); return cursor; }
/** * Handles query requests from clients. We will use this method in Sunshine to query for all * of our weather data as well as to query for the weather on a particular day. * * @param uri The URI to query * @param projection The list of columns to put into the cursor. If null, all columns are * included. * @param selection A selection criteria to apply when filtering rows. If null, then all * rows are included. * @param selectionArgs You may include ?s in selection, which will be replaced by * the values from selectionArgs, in order that they appear in the * selection. * @param sortOrder How the rows in the cursor should be sorted. * @return A Cursor containing the results of the query. In our implementation, */
Handles query requests from clients. We will use this method in Sunshine to query for all of our weather data as well as to query for the weather on a particular day
query
{ "repo_name": "3Heads6Arms/Sunshine", "path": "S09.02-Exercise-ContentProviderBulkInsert/app/src/main/java/com/example/android/sunshine/data/WeatherProvider.java", "license": "apache-2.0", "size": 15668 }
[ "android.database.Cursor", "android.net.Uri", "android.support.annotation.NonNull" ]
import android.database.Cursor; import android.net.Uri; import android.support.annotation.NonNull;
import android.database.*; import android.net.*; import android.support.annotation.*;
[ "android.database", "android.net", "android.support" ]
android.database; android.net; android.support;
1,343,247
public void processTick() { List<ContainerStatus> latestEvents = getLatestEvents(); LOG.debug("CL :: Event count: " + latestEvents.size()); // Go through all events and update active and update lists checkEventContainerStatuses(latestEvents); // Checkpoint if (checkpointEnabled) { createCheckpoint(); } LOG.debug("CL :: Update list size: " + updateContainers.size()); LOG.debug("CL :: Active list size: " + activeContainers.size()); // Update Containers logs table and tick counter updateContainersLogs(true); } private class TickThread implements Runnable {
void function() { List<ContainerStatus> latestEvents = getLatestEvents(); LOG.debug(STR + latestEvents.size()); checkEventContainerStatuses(latestEvents); if (checkpointEnabled) { createCheckpoint(); } LOG.debug(STR + updateContainers.size()); LOG.debug(STR + activeContainers.size()); updateContainersLogs(true); } private class TickThread implements Runnable {
/** * Retrieve latest events from the queue; * Update active and update lists with latest events * Perform checkpoint if necessary * Update containers logs table */
Retrieve latest events from the queue; Update active and update lists with latest events Perform checkpoint if necessary Update containers logs table
processTick
{ "repo_name": "gigaroby/hops", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/quota/ContainersLogsService.java", "license": "apache-2.0", "size": 17944 }
[ "io.hops.metadata.yarn.entity.ContainerStatus", "java.util.List" ]
import io.hops.metadata.yarn.entity.ContainerStatus; import java.util.List;
import io.hops.metadata.yarn.entity.*; import java.util.*;
[ "io.hops.metadata", "java.util" ]
io.hops.metadata; java.util;
919,906
public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_ENTER) { handleEnterKeyPressed(e.getSource()); } } }; if (list != null) list.addKeyListener(ka); if (list == null) { JTable t = (JTable) UIUtilities .findComponent(chooser, JTable.class); if (t != null) t.addKeyListener(ka); } try { File f = UIUtilities.getDefaultFolder(); if (f != null) chooser.setCurrentDirectory(f); } catch (Exception e) { // Ignore: could not set the default container } chooser.addPropertyChangeListener(this); chooser.setMultiSelectionEnabled(true); chooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); chooser.setControlButtonsAreShown(false); chooser.setApproveButtonText(TEXT_IMPORT); chooser.setApproveButtonToolTipText(TOOLTIP_IMPORT); bioFormatsFileFilters = new ArrayList<FileFilter>(); if (filters != null) { chooser.setAcceptAllFileFilterUsed(false); for (FileFilter fileFilter : filters) { if (fileFilter instanceof ComboFileFilter) { bioFormatsFileFiltersCombined = fileFilter; ComboFileFilter comboFilter = (ComboFileFilter) fileFilter; FileFilter[] extensionFilters = comboFilter.getFilters(); for (FileFilter combinedFilter : extensionFilters) { bioFormatsFileFilters.add(combinedFilter); } break; } } chooser.addChoosableFileFilter(bioFormatsFileFiltersCombined); for (FileFilter fileFilter : bioFormatsFileFilters) { chooser.addChoosableFileFilter(fileFilter); } chooser.setFileFilter(bioFormatsFileFiltersCombined); } else { chooser.setAcceptAllFileFilterUsed(true); } closeButton = new JButton(TEXT_CLOSE); closeButton.setToolTipText(TOOLTIP_CLOSE); closeButton.setActionCommand("" + CMD_CLOSE); closeButton.addActionListener(this); cancelImportButton = new JButton(importerAction); importerAction.setEnabled(false); importButton = new JButton(TEXT_IMPORT); importButton.setToolTipText(TOOLTIP_IMPORT); importButton.setActionCommand("" + CMD_IMPORT); importButton.addActionListener(this); importButton.setEnabled(false); pixelsSize = new ArrayList<NumericalTextField>(); NumericalTextField field; for (int i = 0; i < 3; i++) { field = new NumericalTextField(); field.setNumberType(Double.class); field.setColumns(2); pixelsSize.add(field); } List<Component> boxes = UIUtilities.findComponents(chooser, JComboBox.class); if (boxes != null) { JComboBox box; JComboBox filterBox = null; Iterator<Component> i = boxes.iterator(); while (i.hasNext()) { box = (JComboBox) i.next(); Object o = box.getItemAt(0); if (o instanceof FileFilter) { filterBox = box; break; } } if (filterBox != null) { filterBox.addKeyListener(new KeyAdapter() {
void function(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_ENTER) { handleEnterKeyPressed(e.getSource()); } } }; if (list != null) list.addKeyListener(ka); if (list == null) { JTable t = (JTable) UIUtilities .findComponent(chooser, JTable.class); if (t != null) t.addKeyListener(ka); } try { File f = UIUtilities.getDefaultFolder(); if (f != null) chooser.setCurrentDirectory(f); } catch (Exception e) { } chooser.addPropertyChangeListener(this); chooser.setMultiSelectionEnabled(true); chooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); chooser.setControlButtonsAreShown(false); chooser.setApproveButtonText(TEXT_IMPORT); chooser.setApproveButtonToolTipText(TOOLTIP_IMPORT); bioFormatsFileFilters = new ArrayList<FileFilter>(); if (filters != null) { chooser.setAcceptAllFileFilterUsed(false); for (FileFilter fileFilter : filters) { if (fileFilter instanceof ComboFileFilter) { bioFormatsFileFiltersCombined = fileFilter; ComboFileFilter comboFilter = (ComboFileFilter) fileFilter; FileFilter[] extensionFilters = comboFilter.getFilters(); for (FileFilter combinedFilter : extensionFilters) { bioFormatsFileFilters.add(combinedFilter); } break; } } chooser.addChoosableFileFilter(bioFormatsFileFiltersCombined); for (FileFilter fileFilter : bioFormatsFileFilters) { chooser.addChoosableFileFilter(fileFilter); } chooser.setFileFilter(bioFormatsFileFiltersCombined); } else { chooser.setAcceptAllFileFilterUsed(true); } closeButton = new JButton(TEXT_CLOSE); closeButton.setToolTipText(TOOLTIP_CLOSE); closeButton.setActionCommand(STR" + CMD_IMPORT); importButton.addActionListener(this); importButton.setEnabled(false); pixelsSize = new ArrayList<NumericalTextField>(); NumericalTextField field; for (int i = 0; i < 3; i++) { field = new NumericalTextField(); field.setNumberType(Double.class); field.setColumns(2); pixelsSize.add(field); } List<Component> boxes = UIUtilities.findComponents(chooser, JComboBox.class); if (boxes != null) { JComboBox box; JComboBox filterBox = null; Iterator<Component> i = boxes.iterator(); while (i.hasNext()) { box = (JComboBox) i.next(); Object o = box.getItemAt(0); if (o instanceof FileFilter) { filterBox = box; break; } } if (filterBox != null) { filterBox.addKeyListener(new KeyAdapter() {
/** * Adds the files to the import queue. * * @see KeyListener#keyPressed(KeyEvent) */
Adds the files to the import queue
keyPressed
{ "repo_name": "dpwrussell/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/fsimporter/chooser/ImportDialog.java", "license": "gpl-2.0", "size": 52010 }
[ "java.awt.Component", "java.awt.event.KeyAdapter", "java.awt.event.KeyEvent", "java.io.File", "java.util.ArrayList", "java.util.Iterator", "java.util.List", "javax.swing.JButton", "javax.swing.JComboBox", "javax.swing.JFileChooser", "javax.swing.JTable", "javax.swing.filechooser.FileFilter", "org.openmicroscopy.shoola.util.ui.NumericalTextField", "org.openmicroscopy.shoola.util.ui.UIUtilities" ]
import java.awt.Component; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JTable; import javax.swing.filechooser.FileFilter; import org.openmicroscopy.shoola.util.ui.NumericalTextField; import org.openmicroscopy.shoola.util.ui.UIUtilities;
import java.awt.*; import java.awt.event.*; import java.io.*; import java.util.*; import javax.swing.*; import javax.swing.filechooser.*; import org.openmicroscopy.shoola.util.ui.*;
[ "java.awt", "java.io", "java.util", "javax.swing", "org.openmicroscopy.shoola" ]
java.awt; java.io; java.util; javax.swing; org.openmicroscopy.shoola;
887,836
@JsonProperty("unit") public String getUnit() { return unit; }
@JsonProperty("unit") String function() { return unit; }
/** * avu unit part **/
avu unit part
getUnit
{ "repo_name": "DFC-Incubator/base-service", "path": "base-service-impl/src/main/java/org/irods/jargon/rest/base/model/Avu.java", "license": "bsd-2-clause", "size": 2136 }
[ "com.fasterxml.jackson.annotation.JsonProperty" ]
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.*;
[ "com.fasterxml.jackson" ]
com.fasterxml.jackson;
650,170
public void testAttributeUndoableEdit() { Element elem = new PlainDocument().getDefaultRootElement(); AttributeSet attrs = new SimpleAttributeSet(); undoEdit = new AttributeUndoableEdit(elem, attrs, true); assertSame(elem, undoEdit.element); assertSame(attrs, undoEdit.newAttributes); assertTrue(undoEdit.isReplacing); assertSame(SimpleAttributeSet.EMPTY, undoEdit.copy); }
void function() { Element elem = new PlainDocument().getDefaultRootElement(); AttributeSet attrs = new SimpleAttributeSet(); undoEdit = new AttributeUndoableEdit(elem, attrs, true); assertSame(elem, undoEdit.element); assertSame(attrs, undoEdit.newAttributes); assertTrue(undoEdit.isReplacing); assertSame(SimpleAttributeSet.EMPTY, undoEdit.copy); }
/** * Tests the constructor of the class. The element has no attributes * in this test. */
Tests the constructor of the class. The element has no attributes in this test
testAttributeUndoableEdit
{ "repo_name": "skyHALud/codenameone", "path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/swing/src/test/api/java.injected/javax/swing/text/DefaultStyledDocument_AttributeUndoableEditTest.java", "license": "gpl-2.0", "size": 8118 }
[ "javax.swing.text.DefaultStyledDocument" ]
import javax.swing.text.DefaultStyledDocument;
import javax.swing.text.*;
[ "javax.swing" ]
javax.swing;
24,589
public IncrementTypeEnum getIncrementTypeValue();
IncrementTypeEnum function();
/** * Returns the increment type. This specifies dataset values increment step. * @return one of the increment constants in {@link IncrementTypeEnum}. */
Returns the increment type. This specifies dataset values increment step
getIncrementTypeValue
{ "repo_name": "OpenSoftwareSolutions/PDFReporter", "path": "pdfreporter-core/src/org/oss/pdfreporter/engine/JRElementDataset.java", "license": "lgpl-3.0", "size": 3016 }
[ "org.oss.pdfreporter.engine.type.IncrementTypeEnum" ]
import org.oss.pdfreporter.engine.type.IncrementTypeEnum;
import org.oss.pdfreporter.engine.type.*;
[ "org.oss.pdfreporter" ]
org.oss.pdfreporter;
2,003,739
private static Map<String, Double> calculateSpatialEntropyWeights( Map<String, Double> entropies){ double[] termSpatialEntropyValues = entropies .values().stream().mapToDouble(d -> d).toArray(); NormalDistribution gd = new NormalDistribution( // Gaussian function for re-weighting new Mean().evaluate(termSpatialEntropyValues), new StandardDeviation().evaluate(termSpatialEntropyValues)); Double gdMax = 0.0; Map<String, Double> weights = new HashMap<String, Double>(); for(Entry<String, Double> p:entropies.entrySet()){ double weight = gd.density(p.getValue()); weights.put(p.getKey(), weight); if(gdMax < weight){ gdMax = weight; } } for(Entry<String, Double> term:weights.entrySet()){ term.setValue(term.getValue()/gdMax); } return Utils.sortByValues(weights); }
static Map<String, Double> function( Map<String, Double> entropies){ double[] termSpatialEntropyValues = entropies .values().stream().mapToDouble(d -> d).toArray(); NormalDistribution gd = new NormalDistribution( new Mean().evaluate(termSpatialEntropyValues), new StandardDeviation().evaluate(termSpatialEntropyValues)); Double gdMax = 0.0; Map<String, Double> weights = new HashMap<String, Double>(); for(Entry<String, Double> p:entropies.entrySet()){ double weight = gd.density(p.getValue()); weights.put(p.getKey(), weight); if(gdMax < weight){ gdMax = weight; } } for(Entry<String, Double> term:weights.entrySet()){ term.setValue(term.getValue()/gdMax); } return Utils.sortByValues(weights); }
/** * Calculate the max probability value applying the Gaussian functionon the * probability distribution * @param entropies : spatial entropy values of the terms * @return max weight */
Calculate the max probability value applying the Gaussian functionon the probability distribution
calculateSpatialEntropyWeights
{ "repo_name": "socialsensor/multimedia-geotagging", "path": "src/main/java/gr/iti/mklab/metrics/Entropy.java", "license": "apache-2.0", "size": 3829 }
[ "gr.iti.mklab.util.Utils", "java.util.HashMap", "java.util.Map", "org.apache.commons.math3.distribution.NormalDistribution", "org.apache.commons.math3.stat.descriptive.moment.Mean", "org.apache.commons.math3.stat.descriptive.moment.StandardDeviation" ]
import gr.iti.mklab.util.Utils; import java.util.HashMap; import java.util.Map; import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.commons.math3.stat.descriptive.moment.Mean; import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation;
import gr.iti.mklab.util.*; import java.util.*; import org.apache.commons.math3.distribution.*; import org.apache.commons.math3.stat.descriptive.moment.*;
[ "gr.iti.mklab", "java.util", "org.apache.commons" ]
gr.iti.mklab; java.util; org.apache.commons;
29,004
private static Process action(final String fileName, String parameters, boolean waitForTermination) throws IOException { Process process = null; if(parameters.trim().length() > 0) { parameters = " " + parameters.trim(); } else { parameters = ""; } if(acroread != null) { process = Runtime.getRuntime().exec( acroread + parameters + " \"" + fileName + "\""); } else if(isWindows()) { if(isWindows9X()) { process = Runtime.getRuntime().exec( "command.com /C start acrord32" + parameters + " \"" + fileName + "\""); } else { process = Runtime.getRuntime().exec( "cmd /c start acrord32" + parameters + " \"" + fileName + "\""); } } else if(isMac()) { if(parameters.trim().length() == 0) { process = Runtime.getRuntime().exec( new String[]{"/usr/bin/open", fileName}); } else { process = Runtime.getRuntime().exec( new String[]{"/usr/bin/open", parameters.trim(), fileName}); } } try { if(process != null && waitForTermination) process.waitFor(); } catch(InterruptedException ie) { } return process; }
static Process function(final String fileName, String parameters, boolean waitForTermination) throws IOException { Process process = null; if(parameters.trim().length() > 0) { parameters = " " + parameters.trim(); } else { parameters = STR \STR\STRcommand.com /C start acrord32STR \STR\STRcmd /c start acrord32STR \STR\STR/usr/bin/openSTR/usr/bin/open", parameters.trim(), fileName}); } } try { if(process != null && waitForTermination) process.waitFor(); } catch(InterruptedException ie) { } return process; }
/** * Performs an action on a PDF document. * * @param fileName * @param parameters * @param waitForTermination * @return a process * @throws IOException */
Performs an action on a PDF document
action
{ "repo_name": "SafetyCulture/DroidText", "path": "app/src/main/java/com/lowagie/tools/Executable.java", "license": "lgpl-3.0", "size": 9688 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,337,042
@Override public FragmentsStats getFragmentsStats() throws Exception { throw new UnsupportedOperationException("ANALYZE for HiveRc and HiveText plugins is not supported"); }
FragmentsStats function() throws Exception { throw new UnsupportedOperationException(STR); }
/** * Returns statistics for Hive table. Currently it's not implemented. */
Returns statistics for Hive table. Currently it's not implemented
getFragmentsStats
{ "repo_name": "cwelton/incubator-hawq", "path": "pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java", "license": "apache-2.0", "size": 12207 }
[ "org.apache.hawq.pxf.api.FragmentsStats" ]
import org.apache.hawq.pxf.api.FragmentsStats;
import org.apache.hawq.pxf.api.*;
[ "org.apache.hawq" ]
org.apache.hawq;
2,303,181
public void rescheduleRecordingsCheck(int recorderId, int findId, String title, String subtitle, String description, String programId, RecordingStatus recStatus, String reason) throws IOException, CommandException;
void function(int recorderId, int findId, String title, String subtitle, String description, String programId, RecordingStatus recStatus, String reason) throws IOException, CommandException;
/** * Request the scheduler to run due to the change in status of a particular * recording. * * @param recorderId * the ID of the recorder for which the scheduler should run or * <code>0</code> to exclude this restriction (only used if * findId is non-zero) * @param findId * TODO * @param title * the program title for which the scheduler should run or * <code>null</code> to exclude this restriction * @param subtitle * the program subtitle for which the scheduler should run or * <code>null</code> to exclude this restriction * @param description * the program description for which the scheduler should run or * <code>null</code> to exclude this restriction * @param programId * the program ID for which the scheduler should run or * <code>null</code> to exclude this restriction * @param recStatus * the new recording status of the changed recording * (informational only) * @param reason * the reason for this reschedule request (informational only) * @throws IOException * if there is a communication or protocol error * @throws CommandException * if the backend fails to process the reschedule request * * @since 73 */
Request the scheduler to run due to the change in status of a particular recording
rescheduleRecordingsCheck
{ "repo_name": "syphr42/libmythtv-java", "path": "protocol/src/main/java/org/syphr/mythtv/protocol/Protocol.java", "license": "apache-2.0", "size": 50355 }
[ "java.io.IOException", "org.syphr.mythtv.commons.exception.CommandException", "org.syphr.mythtv.types.RecordingStatus" ]
import java.io.IOException; import org.syphr.mythtv.commons.exception.CommandException; import org.syphr.mythtv.types.RecordingStatus;
import java.io.*; import org.syphr.mythtv.commons.exception.*; import org.syphr.mythtv.types.*;
[ "java.io", "org.syphr.mythtv" ]
java.io; org.syphr.mythtv;
112,714
EReference getMConnectionSwitchCase_Connector();
EReference getMConnectionSwitchCase_Connector();
/** * Returns the meta object for the reference '{@link es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitchCase#getConnector <em>Connector</em>}'. * @return the meta object for the reference '<em>Connector</em>'. * @see es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitchCase#getConnector() * @see #getMConnectionSwitchCase() * @generated */
Returns the meta object for the reference '<code>es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitchCase#getConnector Connector</code>'
getMConnectionSwitchCase_Connector
{ "repo_name": "parraman/micobs", "path": "mclev/es.uah.aut.srg.micobs.mclev/src/es/uah/aut/srg/micobs/mclev/mclevmcad/mclevmcadPackage.java", "license": "epl-1.0", "size": 59510 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
116,661
public void setTabHTML(int index, String html) { assert (index >= 0) && (index < getTabCount()) : "Tab index out of bounds"; ClickDelegatePanel delPanel = (ClickDelegatePanel) panel.getWidget(index + 1); SimplePanel focusablePanel = delPanel.getFocusablePanel(); focusablePanel.setWidget(new HTML(html, false)); }
void function(int index, String html) { assert (index >= 0) && (index < getTabCount()) : STR; ClickDelegatePanel delPanel = (ClickDelegatePanel) panel.getWidget(index + 1); SimplePanel focusablePanel = delPanel.getFocusablePanel(); focusablePanel.setWidget(new HTML(html, false)); }
/** * Sets a tab's contents via HTML. * * Use care when setting an object's HTML; it is an easy way to expose * script-based security problems. Consider using * * @param index * the index of the tab whose HTML is to be set * @param html * the tab new HTML {@link #setTabText(int, String)} whenever * possible. */
Sets a tab's contents via HTML. Use care when setting an object's HTML; it is an easy way to expose script-based security problems. Consider using
setTabHTML
{ "repo_name": "JaLandry/MeasureAuthoringTool_LatestSprint", "path": "mat/src/mat/client/shared/ui/MATTabBar.java", "license": "apache-2.0", "size": 24416 }
[ "com.google.gwt.user.client.ui.SimplePanel" ]
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.*;
[ "com.google.gwt" ]
com.google.gwt;
822,222
@SuppressWarnings("unchecked") private void declareOp(BOperator op) throws Exception { JsonObject json = op._complete(); if (!verifyOp(json)) return; String opClassName = jstring(json, KIND_CLASS); if (opClassName == null) { opClassName = requireNonNull(jstring(kind2Class, jstring(json, OpProperties.KIND))); } Class<? extends Operator> opClass = (Class<? extends Operator>) Class.forName(opClassName); OperatorInvocation<? extends Operator> opDecl = graphDecl.addOperator(opClass); if (json.has("parameters")) { JsonObject params = json.getAsJsonObject("parameters"); for (Entry<String, JsonElement> param : params.entrySet()) setOpParameter(opDecl, param.getKey(), param.getValue().getAsJsonObject()); } declareOutputs(opDecl, json.getAsJsonArray("outputs")); declareInputs(opDecl, json.getAsJsonArray("inputs")); }
@SuppressWarnings(STR) void function(BOperator op) throws Exception { JsonObject json = op._complete(); if (!verifyOp(json)) return; String opClassName = jstring(json, KIND_CLASS); if (opClassName == null) { opClassName = requireNonNull(jstring(kind2Class, jstring(json, OpProperties.KIND))); } Class<? extends Operator> opClass = (Class<? extends Operator>) Class.forName(opClassName); OperatorInvocation<? extends Operator> opDecl = graphDecl.addOperator(opClass); if (json.has(STR)) { JsonObject params = json.getAsJsonObject(STR); for (Entry<String, JsonElement> param : params.entrySet()) setOpParameter(opDecl, param.getKey(), param.getValue().getAsJsonObject()); } declareOutputs(opDecl, json.getAsJsonArray(STR)); declareInputs(opDecl, json.getAsJsonArray(STR)); }
/** * Creates the complete operator declaration * from the JSON representation. * @param op * @throws Exception */
Creates the complete operator declaration from the JSON representation
declareOp
{ "repo_name": "wmarshall484/streamsx.topology", "path": "java/src/com/ibm/streamsx/topology/internal/embedded/EmbeddedGraph.java", "license": "apache-2.0", "size": 18799 }
[ "com.google.gson.JsonElement", "com.google.gson.JsonObject", "com.ibm.streams.flow.declare.OperatorInvocation", "com.ibm.streams.operator.Operator", "com.ibm.streamsx.topology.builder.BOperator", "com.ibm.streamsx.topology.generator.operator.OpProperties", "com.ibm.streamsx.topology.internal.gson.GsonUtilities", "java.util.Map", "java.util.Objects" ]
import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.ibm.streams.flow.declare.OperatorInvocation; import com.ibm.streams.operator.Operator; import com.ibm.streamsx.topology.builder.BOperator; import com.ibm.streamsx.topology.generator.operator.OpProperties; import com.ibm.streamsx.topology.internal.gson.GsonUtilities; import java.util.Map; import java.util.Objects;
import com.google.gson.*; import com.ibm.streams.flow.declare.*; import com.ibm.streams.operator.*; import com.ibm.streamsx.topology.builder.*; import com.ibm.streamsx.topology.generator.operator.*; import com.ibm.streamsx.topology.internal.gson.*; import java.util.*;
[ "com.google.gson", "com.ibm.streams", "com.ibm.streamsx", "java.util" ]
com.google.gson; com.ibm.streams; com.ibm.streamsx; java.util;
2,458,129
return perform(invocation -> invocation.path(ME_ENDPOINT) .queryParam(TOKEN_PARAM, token) .request() .get(MeModel.class)); }
return perform(invocation -> invocation.path(ME_ENDPOINT) .queryParam(TOKEN_PARAM, token) .request() .get(MeModel.class)); }
/** * The /me resource allows you to get information about the authenticated user and easily access * his or her related subresources like tracks, followings, followers, groups and so on. * * @param token * A valid OAuth token * @return the me response */
The /me resource allows you to get information about the authenticated user and easily access his or her related subresources like tracks, followings, followers, groups and so on
getMe
{ "repo_name": "MoodCat/MoodCat.me-Core", "path": "src/main/java/me/moodcat/soundcloud/SoundCloudIdentifier.java", "license": "mit", "size": 862 }
[ "me.moodcat.soundcloud.models.MeModel" ]
import me.moodcat.soundcloud.models.MeModel;
import me.moodcat.soundcloud.models.*;
[ "me.moodcat.soundcloud" ]
me.moodcat.soundcloud;
497,923
AdminView getAdminView();
AdminView getAdminView();
/** * Gets the admin view. * * @return the admin view */
Gets the admin view
getAdminView
{ "repo_name": "cristcost/springsec", "path": "src/main/java/net/cristcost/study/gwt/client/ClientFactory.java", "license": "apache-2.0", "size": 2033 }
[ "net.cristcost.study.gwt.client.admin.AdminView" ]
import net.cristcost.study.gwt.client.admin.AdminView;
import net.cristcost.study.gwt.client.admin.*;
[ "net.cristcost.study" ]
net.cristcost.study;
1,979,059
public void sendDumpB2NRegionForBucket(int bucketId) { getRegionAdvisor().dumpProfiles("dumpB2NForBucket"); try { PartitionResponse response = DumpB2NRegion.send(this.getRegionAdvisor().adviseAllPRNodes(), this, bucketId, false); response.waitForRepliesUninterruptibly(); this.dumpB2NForBucket(bucketId); } catch (ReplyException re) { if (logger.isDebugEnabled()) { logger.debug("sendDumpB2NRegionForBucket got ReplyException", re); } } catch (CancelException e) { if (logger.isDebugEnabled()) { logger.debug("sendDumpB2NRegionForBucket got CacheClosedException", e); } } catch (RegionDestroyedException e) { if (logger.isDebugEnabled()) { logger.debug("sendDumpB2RegionForBucket got RegionDestroyedException", e); } } }
void function(int bucketId) { getRegionAdvisor().dumpProfiles(STR); try { PartitionResponse response = DumpB2NRegion.send(this.getRegionAdvisor().adviseAllPRNodes(), this, bucketId, false); response.waitForRepliesUninterruptibly(); this.dumpB2NForBucket(bucketId); } catch (ReplyException re) { if (logger.isDebugEnabled()) { logger.debug(STR, re); } } catch (CancelException e) { if (logger.isDebugEnabled()) { logger.debug(STR, e); } } catch (RegionDestroyedException e) { if (logger.isDebugEnabled()) { logger.debug(STR, e); } } }
/** * Sends a message to all the {@code PartitionedRegion} participants, telling each member of the * PartitionedRegion to dump the nodelist in bucket2node metadata for specified bucketId. */
Sends a message to all the PartitionedRegion participants, telling each member of the PartitionedRegion to dump the nodelist in bucket2node metadata for specified bucketId
sendDumpB2NRegionForBucket
{ "repo_name": "PurelyApplied/geode", "path": "geode-core/src/main/java/org/apache/geode/internal/cache/PartitionedRegion.java", "license": "apache-2.0", "size": 379321 }
[ "org.apache.geode.CancelException", "org.apache.geode.cache.RegionDestroyedException", "org.apache.geode.distributed.internal.ReplyException", "org.apache.geode.internal.cache.partitioned.DumpB2NRegion", "org.apache.geode.internal.cache.partitioned.PartitionMessage" ]
import org.apache.geode.CancelException; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.internal.cache.partitioned.DumpB2NRegion; import org.apache.geode.internal.cache.partitioned.PartitionMessage;
import org.apache.geode.*; import org.apache.geode.cache.*; import org.apache.geode.distributed.internal.*; import org.apache.geode.internal.cache.partitioned.*;
[ "org.apache.geode" ]
org.apache.geode;
762,278
void deleteComment(String commentId) throws QiitaException;
void deleteComment(String commentId) throws QiitaException;
/** * Deletes the comment. * Needs access token. * * @param commentId the target comment identifier * @throws QiitaException if arguments are incorrect or Qiita API is unavailable */
Deletes the comment. Needs access token
deleteComment
{ "repo_name": "Yuiki/Qiita4Jv2", "path": "src/main/java/jp/yuiki/dev/qiita4jv2/resources/CommentsResources.java", "license": "mit", "size": 2202 }
[ "jp.yuiki.dev.qiita4jv2.QiitaException" ]
import jp.yuiki.dev.qiita4jv2.QiitaException;
import jp.yuiki.dev.qiita4jv2.*;
[ "jp.yuiki.dev" ]
jp.yuiki.dev;
2,911,617
private static void checkForIncompatibleDateTimeOperands(RexCall call) { RelDataType op1 = call.getOperands().get(0).getType(); RelDataType op2 = call.getOperands().get(1).getType(); if ((SqlTypeFamily.DATETIME.contains(op1) && !SqlTypeFamily.DATETIME.contains(op2)) || (SqlTypeFamily.DATETIME.contains(op2) && !SqlTypeFamily.DATETIME.contains(op1)) || (SqlTypeFamily.DATE.contains(op1) && !SqlTypeFamily.DATE.contains(op2)) || (SqlTypeFamily.DATE.contains(op2) && !SqlTypeFamily.DATE.contains(op1)) || (SqlTypeFamily.TIMESTAMP.contains(op1) && !SqlTypeFamily.TIMESTAMP.contains(op2)) || (SqlTypeFamily.TIMESTAMP.contains(op2) && !SqlTypeFamily.TIMESTAMP.contains(op1)) || (SqlTypeFamily.TIME.contains(op1) && !SqlTypeFamily.TIME.contains(op2)) || (SqlTypeFamily.TIME.contains(op2) && !SqlTypeFamily.TIME.contains(op1))) { throw new PredicateAnalyzerException("Cannot handle " + call.getKind() + " expression for _id field, " + call); } }
static void function(RexCall call) { RelDataType op1 = call.getOperands().get(0).getType(); RelDataType op2 = call.getOperands().get(1).getType(); if ((SqlTypeFamily.DATETIME.contains(op1) && !SqlTypeFamily.DATETIME.contains(op2)) (SqlTypeFamily.DATETIME.contains(op2) && !SqlTypeFamily.DATETIME.contains(op1)) (SqlTypeFamily.DATE.contains(op1) && !SqlTypeFamily.DATE.contains(op2)) (SqlTypeFamily.DATE.contains(op2) && !SqlTypeFamily.DATE.contains(op1)) (SqlTypeFamily.TIMESTAMP.contains(op1) && !SqlTypeFamily.TIMESTAMP.contains(op2)) (SqlTypeFamily.TIMESTAMP.contains(op2) && !SqlTypeFamily.TIMESTAMP.contains(op1)) (SqlTypeFamily.TIME.contains(op1) && !SqlTypeFamily.TIME.contains(op2)) (SqlTypeFamily.TIME.contains(op2) && !SqlTypeFamily.TIME.contains(op1))) { throw new PredicateAnalyzerException(STR + call.getKind() + STR + call); } }
/** * If one operand in a binary operator is a DateTime type, but the other isn't, * we should not push down the predicate. * * @param call Current node being evaluated */
If one operand in a binary operator is a DateTime type, but the other isn't, we should not push down the predicate
checkForIncompatibleDateTimeOperands
{ "repo_name": "datametica/calcite", "path": "elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/PredicateAnalyzer.java", "license": "apache-2.0", "size": 33127 }
[ "org.apache.calcite.rel.type.RelDataType", "org.apache.calcite.rex.RexCall", "org.apache.calcite.sql.type.SqlTypeFamily" ]
import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.rel.type.*; import org.apache.calcite.rex.*; import org.apache.calcite.sql.type.*;
[ "org.apache.calcite" ]
org.apache.calcite;
1,720,311
protected Response sendUnsignedRequest( final OAuthRequest request ) { if ( request == null ) { throw new IllegalArgumentException(); } if ( LOGGER.isTraceEnabled() ) { LOGGER.trace( "[" + getClass().getSimpleName() + "] sendUnsignedRequest() : request=" + request ); } return request.send(); }
Response function( final OAuthRequest request ) { if ( request == null ) { throw new IllegalArgumentException(); } if ( LOGGER.isTraceEnabled() ) { LOGGER.trace( "[" + getClass().getSimpleName() + STR + request ); } return request.send(); }
/** * Send a unsigned request. * * @param request the request * @return the response */
Send a unsigned request
sendUnsignedRequest
{ "repo_name": "fabienvauchelles/superpipes", "path": "superpipes/src/main/java/com/vaushell/superpipes/tools/scribe/OAuthClient.java", "license": "lgpl-3.0", "size": 8769 }
[ "org.scribe.model.OAuthRequest", "org.scribe.model.Response" ]
import org.scribe.model.OAuthRequest; import org.scribe.model.Response;
import org.scribe.model.*;
[ "org.scribe.model" ]
org.scribe.model;
986,116
@Test public void testFiles() throws Exception { final String m = "testFiles"; final String fileName = "file"; final String fileExt = ".log"; final File file = new File(dir, fileName + fileExt); final File notANumber = new File(dir, fileName + fileExt + ".notANumber");
void function() throws Exception { final String m = STR; final String fileName = "file"; final String fileExt = ".log"; final File file = new File(dir, fileName + fileExt); final File notANumber = new File(dir, fileName + fileExt + STR);
/** * Test method for {@link com.ibm.ws.ras.internal.LogStreamUtils#findNextFiles(int, java.lang.String, java.lang.String)} . */
Test method for <code>com.ibm.ws.ras.internal.LogStreamUtils#findNextFiles(int, java.lang.String, java.lang.String)</code>
testFiles
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.logging_test/test/com/ibm/ws/logging/internal/impl/LoggingFileUtilsTest.java", "license": "epl-1.0", "size": 11496 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,177,482
public Image getLogo () { return logo; }
Image function () { return logo; }
/** * Used to get an Image variable of the modpack's logo * @return - the modpacks logo */
Used to get an Image variable of the modpack's logo
getLogo
{ "repo_name": "juju790/FTNTLauncher_1.7Fix", "path": "src/main/java/net/ftb/data/ModPack.java", "license": "apache-2.0", "size": 17524 }
[ "java.awt.Image" ]
import java.awt.Image;
import java.awt.*;
[ "java.awt" ]
java.awt;
269,727
CloudServiceOperationStatusResponse delete(String cloudServiceName) throws InterruptedException, ExecutionException, ServiceException, IOException;
CloudServiceOperationStatusResponse delete(String cloudServiceName) throws InterruptedException, ExecutionException, ServiceException, IOException;
/** * Delete a cloud service. * * @param cloudServiceName Required. The cloud service name. * @throws InterruptedException Thrown when a thread is waiting, sleeping, * or otherwise occupied, and the thread is interrupted, either before or * during the activity. Occasionally a method may wish to test whether the * current thread has been interrupted, and if so, to immediately throw * this exception. The following code can be used to achieve this effect: * @throws ExecutionException Thrown when attempting to retrieve the result * of a task that aborted by throwing an exception. This exception can be * inspected using the Throwable.getCause() method. * @throws ServiceException Thrown if the server returned an error for the * request. * @throws IOException Thrown if there was an error setting up tracing for * the request. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */
Delete a cloud service
delete
{ "repo_name": "southworkscom/azure-sdk-for-java", "path": "service-management/azure-svc-mgmt-scheduler/src/main/java/com/microsoft/windowsazure/management/scheduler/CloudServiceOperations.java", "license": "apache-2.0", "size": 11387 }
[ "com.microsoft.windowsazure.exception.ServiceException", "com.microsoft.windowsazure.management.scheduler.models.CloudServiceOperationStatusResponse", "java.io.IOException", "java.util.concurrent.ExecutionException" ]
import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.scheduler.models.CloudServiceOperationStatusResponse; import java.io.IOException; import java.util.concurrent.ExecutionException;
import com.microsoft.windowsazure.exception.*; import com.microsoft.windowsazure.management.scheduler.models.*; import java.io.*; import java.util.concurrent.*;
[ "com.microsoft.windowsazure", "java.io", "java.util" ]
com.microsoft.windowsazure; java.io; java.util;
307,520
@Override public void close() throws IOException { // Mark this writer as closed. Note that we only need to synchronize // this part, as no other synchronized methods should get invoked // once close() has been initiated (see related checkState calls). synchronized (this) { if (closed) { return; } closed = true; } // If nothing was written to this file, then we're already done. if (!archive.isCreated()) { return; } // Complete the tar file by adding the graph, the index and the // trailing two zero blocks. This code is synchronized on the closeMonitor // to ensure that no concurrent thread is still flushing // the file when we close the file handle. synchronized (closeMonitor) { writeBinaryReferences(); writeGraph(); archive.close(); } }
void function() throws IOException { synchronized (this) { if (closed) { return; } closed = true; } if (!archive.isCreated()) { return; } synchronized (closeMonitor) { writeBinaryReferences(); writeGraph(); archive.close(); } }
/** * Closes this tar file. * * @throws IOException if the tar file could not be closed */
Closes this tar file
close
{ "repo_name": "trekawek/jackrabbit-oak", "path": "oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarWriter.java", "license": "apache-2.0", "size": 11122 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
819,070
Iterator getCurrentDataEntries();
Iterator getCurrentDataEntries();
/** * Get all data configuration entries from the current locator. * If the current locator is root will be returned only own data entries. * * @return an <code>Iterator</code> structure with all data entries (whithout comments) from the current locator */
Get all data configuration entries from the current locator. If the current locator is root will be returned only own data entries
getCurrentDataEntries
{ "repo_name": "stefandmn/AREasy", "path": "src/java/org/areasy/common/support/configuration/ConfigurationLocator.java", "license": "lgpl-3.0", "size": 10518 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
446,432
public ListMultimap<String, DrillFuncHolder> getAllFunctionsWithHolders() { return getAllFunctionsWithHolders(null); }
ListMultimap<String, DrillFuncHolder> function() { return getAllFunctionsWithHolders(null); }
/** * Returns list of functions with list of function holders for each functions without version number. * This is read operation, so several users can perform this operation at the same time. * * @return all functions which their holders */
Returns list of functions with list of function holders for each functions without version number. This is read operation, so several users can perform this operation at the same time
getAllFunctionsWithHolders
{ "repo_name": "superbstreak/drill", "path": "exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java", "license": "apache-2.0", "size": 17312 }
[ "org.apache.drill.exec.expr.fn.DrillFuncHolder", "org.apache.drill.shaded.guava.com.google.common.collect.ListMultimap" ]
import org.apache.drill.exec.expr.fn.DrillFuncHolder; import org.apache.drill.shaded.guava.com.google.common.collect.ListMultimap;
import org.apache.drill.exec.expr.fn.*; import org.apache.drill.shaded.guava.com.google.common.collect.*;
[ "org.apache.drill" ]
org.apache.drill;
2,045,997
protected void addUsersRealName(Map<String, Object> model, String userName, String propertyPrefix) { NodeRef user = this.personService.getPerson(userName); if (user != null) { String firstName = (String) getNodeService().getProperty(user, ContentModel.PROP_FIRSTNAME); if (firstName != null) { model.put(propertyPrefix + "FirstName", firstName); } String lastName = (String) getNodeService().getProperty(user, ContentModel.PROP_LASTNAME); if (lastName != null) { model.put(propertyPrefix + "LastName", lastName); } } }
void function(Map<String, Object> model, String userName, String propertyPrefix) { NodeRef user = this.personService.getPerson(userName); if (user != null) { String firstName = (String) getNodeService().getProperty(user, ContentModel.PROP_FIRSTNAME); if (firstName != null) { model.put(propertyPrefix + STR, firstName); } String lastName = (String) getNodeService().getProperty(user, ContentModel.PROP_LASTNAME); if (lastName != null) { model.put(propertyPrefix + STR, lastName); } } }
/** * Adds the given username's first and last name to the given model. * * @param model The model to add the first and last name to * @param userName The username of the user to lookup * @param propertyPrefix The prefix of the property name to use when adding to the model */
Adds the given username's first and last name to the given model
addUsersRealName
{ "repo_name": "dnacreative/records-management", "path": "rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/script/DispositionLifecycleGet.java", "license": "lgpl-3.0", "size": 7338 }
[ "java.util.Map", "org.alfresco.model.ContentModel", "org.alfresco.service.cmr.repository.NodeRef" ]
import java.util.Map; import org.alfresco.model.ContentModel; import org.alfresco.service.cmr.repository.NodeRef;
import java.util.*; import org.alfresco.model.*; import org.alfresco.service.cmr.repository.*;
[ "java.util", "org.alfresco.model", "org.alfresco.service" ]
java.util; org.alfresco.model; org.alfresco.service;
388,573
@Override public synchronized void seek(long targetPos) throws IOException { if (targetPos > getFileLength()) { throw new EOFException("Cannot seek after EOF"); } if (targetPos < 0) { throw new EOFException("Cannot seek to negative offset"); } if (closed.get()) { throw new IOException("Stream is closed!"); } boolean done = false; if (pos <= targetPos && targetPos <= blockEnd) { // // If this seek is to a positive position in the current // block, and this piece of data might already be lying in // the TCP buffer, then just eat up the intervening data. // int diff = (int)(targetPos - pos); if (diff <= blockReader.available()) { try { pos += blockReader.skip(diff); if (pos == targetPos) { done = true; } else { // The range was already checked. If the block reader returns // something unexpected instead of throwing an exception, it is // most likely a bug. String errMsg = "BlockReader failed to seek to " + targetPos + ". Instead, it seeked to " + pos + "."; DFSClient.LOG.warn(errMsg); throw new IOException(errMsg); } } catch (IOException e) {//make following read to retry if(DFSClient.LOG.isDebugEnabled()) { DFSClient.LOG.debug("Exception while seek to " + targetPos + " from " + getCurrentBlock() + " of " + src + " from " + currentNode, e); } } } } if (!done) { pos = targetPos; blockEnd = -1; } }
synchronized void function(long targetPos) throws IOException { if (targetPos > getFileLength()) { throw new EOFException(STR); } if (targetPos < 0) { throw new EOFException(STR); } if (closed.get()) { throw new IOException(STR); } boolean done = false; if (pos <= targetPos && targetPos <= blockEnd) { if (diff <= blockReader.available()) { try { pos += blockReader.skip(diff); if (pos == targetPos) { done = true; } else { String errMsg = STR + targetPos + STR + pos + "."; DFSClient.LOG.warn(errMsg); throw new IOException(errMsg); } } catch (IOException e) { if(DFSClient.LOG.isDebugEnabled()) { DFSClient.LOG.debug(STR + targetPos + STR + getCurrentBlock() + STR + src + STR + currentNode, e); } } } } if (!done) { pos = targetPos; blockEnd = -1; } }
/** * Seek to a new arbitrary location */
Seek to a new arbitrary location
seek
{ "repo_name": "wankunde/cloudera_hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java", "license": "apache-2.0", "size": 66520 }
[ "java.io.EOFException", "java.io.IOException" ]
import java.io.EOFException; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
53,675
public List<ProvidedBrowserUI> getProvidedBrowserUIList() { return Collections.unmodifiableList(providedBrowserUIList); }
List<ProvidedBrowserUI> function() { return Collections.unmodifiableList(providedBrowserUIList); }
/** * Gets the (unmodifiable) list of {@code ProvidedBrowserUI} objects for all {@code ProvidedBrowser}s installed. * * @return an unmodifiable list with all browsers installed * @since 1.1.0 * @see #createProvidedBrowsersComboBoxModel() */
Gets the (unmodifiable) list of ProvidedBrowserUI objects for all ProvidedBrowsers installed
getProvidedBrowserUIList
{ "repo_name": "rnehra01/zap-extensions", "path": "src/org/zaproxy/zap/extension/selenium/ExtensionSelenium.java", "license": "apache-2.0", "size": 34384 }
[ "java.util.Collections", "java.util.List" ]
import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,113,806
return rejectReason; } /** * Sets the value of the rejectReason property. * * @param value * allowed object is * {@link RejectReasonRef }
return rejectReason; } /** * Sets the value of the rejectReason property. * * @param value * allowed object is * {@link RejectReasonRef }
/** * Gets the value of the rejectReason property. * * @return * possible object is * {@link RejectReasonRef } * */
Gets the value of the rejectReason property
getRejectReason
{ "repo_name": "simokhov/schemas44", "path": "src/main/java/ru/gov/zakupki/oos/pprf615types/_1/AppRejectedReasonType.java", "license": "mit", "size": 2699 }
[ "ru.gov.zakupki.oos.base._1.RejectReasonRef" ]
import ru.gov.zakupki.oos.base._1.RejectReasonRef;
import ru.gov.zakupki.oos.base.*;
[ "ru.gov.zakupki" ]
ru.gov.zakupki;
1,156,981
@Test public void testPoolAndWriter() throws Exception { final String regionName = this.getName(); final Host host = Host.getHost(0); VM server = host.getVM(0); VM client = host.getVM(1);
void function() throws Exception { final String regionName = this.getName(); final Host host = Host.getHost(0); VM server = host.getVM(0); VM client = host.getVM(1);
/** * Test the we can have both a connection pool and a cache writer. * * The expected order of operations for put is: local writer put on server */
Test the we can have both a connection pool and a cache writer. The expected order of operations for put is: local writer put on server
testPoolAndWriter
{ "repo_name": "deepakddixit/incubator-geode", "path": "geode-core/src/distributedTest/java/org/apache/geode/cache/ConnectionPoolAndLoaderDUnitTest.java", "license": "apache-2.0", "size": 15592 }
[ "org.apache.geode.test.dunit.Host" ]
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.*;
[ "org.apache.geode" ]
org.apache.geode;
467,708
protected void testImpl(String xml) throws Exception { WebConversation conversation = new WebConversation(); String jobId = createJob(conversation, xml); // POST request to the phase resource. String resourceUrl = serviceUrl + "/" + jobId + "/phase"; WebRequest postRequest = new PostMethodWebRequest(resourceUrl); postRequest.setParameter("PHASE", "RUN"); WebResponse response = post(conversation, postRequest); // Get the redirect. String location = response.getHeaderField("Location"); log.debug("Location: " + location); assertNotNull(" POST response to " + resourceUrl + " location header not set", location); // Follow the redirect. response = get(conversation, location); // Validate the XML against the schema. log.debug("XML:\r\n" + response.getText()); buildDocument(response.getText(), true); // Job resource for this jobId. resourceUrl = serviceUrl + "/" + jobId; // Loop until the phase is either COMPLETED, ERROR or ABORTED. Element root = null; List list = null; Namespace namespace = null; boolean done = false; Long start = System.currentTimeMillis(); while (!done) { // Wait for 1 second. Thread.sleep(1000); // GET the resource. response = get(conversation, resourceUrl); // Create DOM document from XML. log.debug("XML:\r\n" + response.getText()); Document document = buildDocument(response.getText(), false); // Root element of the document. root = document.getRootElement(); assertNotNull(" no XML returned from GET of " + resourceUrl, root); // Get the phase element. list = root.getChildren("phase", namespace); assertEquals( " phase element should only have a single element in XML returned from GET of " + resourceUrl, 1, list.size()); Element phase = (Element) list.get(0); String phaseText = phase.getText(); // Check if request timeout exceeded. if ((System.currentTimeMillis() - start) > (REQUEST_TIMEOUT * 1000)) fail(" request timeout exceeded in GET of " + resourceUrl); // COMPLETED phase, continue with test. if (phaseText.equals("COMPLETED")) break; // Fail if phase is ERROR or ABORTED. else if (phaseText.equals("ERROR") || phaseText.equals("ABORTED")) fail(" phase should not be " + phaseText + ", in XML returned from GET of " + resourceUrl); // Check phase, if still PENDING or QUEUED after x seconds, fail. else if (phaseText.equals("PENDING") || phaseText.equals("QUEUED") || phaseText.equals("EXECUTING")) continue; } // Get the results element. list = root.getChildren("results", namespace); assertEquals( " uws:results element should only have a single element in XML returned from GET of " + resourceUrl, 1, list.size()); // Get the list of result elements. Element results = (Element) list.get(0); list = results.getChildren("result", namespace); // Get a List of URL's for the result href attribute. List<URL> resultUrls = new ArrayList<URL>(); for (Iterator it = list.iterator(); it.hasNext();) { Element element = (Element) it.next(); List attributes = element.getAttributes(); for (Iterator itt = attributes.iterator(); itt.hasNext();) { Attribute attribute = (Attribute) itt.next(); if (attribute.getNamespacePrefix().equals("xlink") && attribute.getName().equals("href")) { try { // Try and create an URL from the href and add to list. URL url = new URL(attribute.getValue()); resultUrls.add(url); } catch (MalformedURLException mue) { log.error(mue); fail(mue.getMessage()); } } } } // Do a HEAD request on each result url. for (URL url : resultUrls) { head(conversation, url.toString()); } deleteJob(conversation, jobId); }
void function(String xml) throws Exception { WebConversation conversation = new WebConversation(); String jobId = createJob(conversation, xml); String resourceUrl = serviceUrl + "/" + jobId + STR; WebRequest postRequest = new PostMethodWebRequest(resourceUrl); postRequest.setParameter("PHASE", "RUN"); WebResponse response = post(conversation, postRequest); String location = response.getHeaderField(STR); log.debug(STR + location); assertNotNull(STR + resourceUrl + STR, location); response = get(conversation, location); log.debug(STR + response.getText()); buildDocument(response.getText(), true); resourceUrl = serviceUrl + "/" + jobId; Element root = null; List list = null; Namespace namespace = null; boolean done = false; Long start = System.currentTimeMillis(); while (!done) { Thread.sleep(1000); response = get(conversation, resourceUrl); log.debug(STR + response.getText()); Document document = buildDocument(response.getText(), false); root = document.getRootElement(); assertNotNull(STR + resourceUrl, root); list = root.getChildren("phase", namespace); assertEquals( STR + resourceUrl, 1, list.size()); Element phase = (Element) list.get(0); String phaseText = phase.getText(); if ((System.currentTimeMillis() - start) > (REQUEST_TIMEOUT * 1000)) fail(STR + resourceUrl); if (phaseText.equals(STR)) break; else if (phaseText.equals("ERROR") phaseText.equals(STR)) fail(STR + phaseText + STR + resourceUrl); else if (phaseText.equals(STR) phaseText.equals(STR) phaseText.equals(STR)) continue; } list = root.getChildren(STR, namespace); assertEquals( STR + resourceUrl, 1, list.size()); Element results = (Element) list.get(0); list = results.getChildren(STR, namespace); List<URL> resultUrls = new ArrayList<URL>(); for (Iterator it = list.iterator(); it.hasNext();) { Element element = (Element) it.next(); List attributes = element.getAttributes(); for (Iterator itt = attributes.iterator(); itt.hasNext();) { Attribute attribute = (Attribute) itt.next(); if (attribute.getNamespacePrefix().equals("xlink") && attribute.getName().equals("href")) { try { URL url = new URL(attribute.getValue()); resultUrls.add(url); } catch (MalformedURLException mue) { log.error(mue); fail(mue.getMessage()); } } } } for (URL url : resultUrls) { head(conversation, url.toString()); } deleteJob(conversation, jobId); }
/** * This is the actual test implentation. * * @param xml: XML string to be posted to the SampleUWS Server. * */
This is the actual test implentation
testImpl
{ "repo_name": "opencadc/uws", "path": "cadc-test-uws/src/main/java/ca/nrc/cadc/conformance/uws/ResultsXmlTest.java", "license": "agpl-3.0", "size": 10020 }
[ "com.meterware.httpunit.PostMethodWebRequest", "com.meterware.httpunit.WebConversation", "com.meterware.httpunit.WebRequest", "com.meterware.httpunit.WebResponse", "java.net.MalformedURLException", "java.util.ArrayList", "java.util.Iterator", "java.util.List", "org.jdom2.Attribute", "org.jdom2.Document", "org.jdom2.Element", "org.jdom2.Namespace", "org.junit.Assert" ]
import com.meterware.httpunit.PostMethodWebRequest; import com.meterware.httpunit.WebConversation; import com.meterware.httpunit.WebRequest; import com.meterware.httpunit.WebResponse; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.jdom2.Attribute; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.Namespace; import org.junit.Assert;
import com.meterware.httpunit.*; import java.net.*; import java.util.*; import org.jdom2.*; import org.junit.*;
[ "com.meterware.httpunit", "java.net", "java.util", "org.jdom2", "org.junit" ]
com.meterware.httpunit; java.net; java.util; org.jdom2; org.junit;
1,580,729
public boolean isAttributeSupported(int attributeId) { synchronized (supportedAttributes) { return supportedAttributes.contains(attributeId); } } /** * Discovers the list of attributes supported by the cluster on the remote device. * <p> * If the discovery has already been completed, and rediscover is false, then the future will complete immediately * and the user can use existing results. Normally there should not be a need to set rediscover to true. * <p> * This method returns a future to a boolean. Upon success the caller should call {@link #getSupportedAttributes()} * to get the list of supported attributes or {@link #isAttributeSupported(int)} to test if a single attribute is * supported. * * @param rediscover true to perform a discovery even if it was previously completed * @return {@link Future} returning a {@link Boolean}
boolean function(int attributeId) { synchronized (supportedAttributes) { return supportedAttributes.contains(attributeId); } } /** * Discovers the list of attributes supported by the cluster on the remote device. * <p> * If the discovery has already been completed, and rediscover is false, then the future will complete immediately * and the user can use existing results. Normally there should not be a need to set rediscover to true. * <p> * This method returns a future to a boolean. Upon success the caller should call {@link #getSupportedAttributes()} * to get the list of supported attributes or {@link #isAttributeSupported(int)} to test if a single attribute is * supported. * * @param rediscover true to perform a discovery even if it was previously completed * @return {@link Future} returning a {@link Boolean}
/** * Checks if the cluster supports a specified attribute ID. * Note that if {@link #discoverAttributes(boolean)} has not been called, this method will return false. * * @param attributeId the attribute to check * @return true if the attribute is known to be supported, otherwise false */
Checks if the cluster supports a specified attribute ID. Note that if <code>#discoverAttributes(boolean)</code> has not been called, this method will return false
isAttributeSupported
{ "repo_name": "zsmartsystems/com.zsmartsystems.zigbee", "path": "com.zsmartsystems.zigbee/src/main/java/com/zsmartsystems/zigbee/zcl/ZclCluster.java", "license": "epl-1.0", "size": 85440 }
[ "java.util.concurrent.Future" ]
import java.util.concurrent.Future;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
854,169
@NotNull public Builder withTimeUnit(@NotNull TimeUnit unit) { PARAM.unit = unit; return this; }
Builder function(@NotNull TimeUnit unit) { PARAM.unit = unit; return this; }
/** * Set the {@link #unit} instance. * @param unit {@link TimeUnit} instance. * @return {@link Builder} instance. */
Set the <code>#unit</code> instance
withTimeUnit
{ "repo_name": "protoman92/JavaUtilities", "path": "src/main/java/org/swiften/javautilities/rx/RxParam.java", "license": "apache-2.0", "size": 3429 }
[ "java.util.concurrent.TimeUnit", "org.jetbrains.annotations.NotNull" ]
import java.util.concurrent.TimeUnit; import org.jetbrains.annotations.NotNull;
import java.util.concurrent.*; import org.jetbrains.annotations.*;
[ "java.util", "org.jetbrains.annotations" ]
java.util; org.jetbrains.annotations;
389,217
protected final void init(Session session) { // flag the Session-dependent cached tables Table t; for (int i = 0; i < sysTables.length; i++) { t = sysTables[i] = generateTable(session, null, i); if (t != null) { t.setDataReadOnly(true); } } GranteeManager gm = database.getGranteeManager(); Right right = new Right(); right.set(GrantConstants.SELECT, null); for (int i = 0; i < sysTableHsqlNames.length; i++) { if (sysTables[i] != null) { gm.grantSystemToPublic(sysTables[i], right); } } right = Right.fullRights; gm.grantSystemToPublic(Charset.SQL_CHARACTER, right); gm.grantSystemToPublic(Charset.SQL_IDENTIFIER_CHARSET, right); gm.grantSystemToPublic(Charset.SQL_TEXT, right); gm.grantSystemToPublic(TypeInvariants.SQL_IDENTIFIER, right); gm.grantSystemToPublic(TypeInvariants.YES_OR_NO, right); gm.grantSystemToPublic(TypeInvariants.TIME_STAMP, right); gm.grantSystemToPublic(TypeInvariants.CARDINAL_NUMBER, right); gm.grantSystemToPublic(TypeInvariants.CHARACTER_DATA, right); }
final void function(Session session) { Table t; for (int i = 0; i < sysTables.length; i++) { t = sysTables[i] = generateTable(session, null, i); if (t != null) { t.setDataReadOnly(true); } } GranteeManager gm = database.getGranteeManager(); Right right = new Right(); right.set(GrantConstants.SELECT, null); for (int i = 0; i < sysTableHsqlNames.length; i++) { if (sysTables[i] != null) { gm.grantSystemToPublic(sysTables[i], right); } } right = Right.fullRights; gm.grantSystemToPublic(Charset.SQL_CHARACTER, right); gm.grantSystemToPublic(Charset.SQL_IDENTIFIER_CHARSET, right); gm.grantSystemToPublic(Charset.SQL_TEXT, right); gm.grantSystemToPublic(TypeInvariants.SQL_IDENTIFIER, right); gm.grantSystemToPublic(TypeInvariants.YES_OR_NO, right); gm.grantSystemToPublic(TypeInvariants.TIME_STAMP, right); gm.grantSystemToPublic(TypeInvariants.CARDINAL_NUMBER, right); gm.grantSystemToPublic(TypeInvariants.CHARACTER_DATA, right); }
/** * One time initialisation of instance attributes * at construction time. <p> * */
One time initialisation of instance attributes at construction time.
init
{ "repo_name": "ferquies/2dam", "path": "AD/Tema 2/hsqldb-2.3.1/hsqldb/src/org/hsqldb/dbinfo/DatabaseInformationMain.java", "license": "gpl-3.0", "size": 147237 }
[ "org.hsqldb.Session", "org.hsqldb.Table", "org.hsqldb.TypeInvariants", "org.hsqldb.rights.GrantConstants", "org.hsqldb.rights.GranteeManager", "org.hsqldb.rights.Right", "org.hsqldb.types.Charset" ]
import org.hsqldb.Session; import org.hsqldb.Table; import org.hsqldb.TypeInvariants; import org.hsqldb.rights.GrantConstants; import org.hsqldb.rights.GranteeManager; import org.hsqldb.rights.Right; import org.hsqldb.types.Charset;
import org.hsqldb.*; import org.hsqldb.rights.*; import org.hsqldb.types.*;
[ "org.hsqldb", "org.hsqldb.rights", "org.hsqldb.types" ]
org.hsqldb; org.hsqldb.rights; org.hsqldb.types;
2,395,388
private static File createCacheDir() { final File cachedir; try { cachedir = File.createTempFile("felix.example.extenderbased", null); cachedir.delete(); createShutdownHook(cachedir); return cachedir; } catch (IOException e) { // temp dir creation failed, return null return null; } }
static File function() { final File cachedir; try { cachedir = File.createTempFile(STR, null); cachedir.delete(); createShutdownHook(cachedir); return cachedir; } catch (IOException e) { return null; } }
/** * Tries to create a temporay cache dir. If creation of the cache dir is successful, * it will be returned. If creation fails, null will be returned. * * @return a {@code File} object representing the cache dir */
Tries to create a temporay cache dir. If creation of the cache dir is successful, it will be returned. If creation fails, null will be returned
createCacheDir
{ "repo_name": "boneman1231/org.apache.felix", "path": "trunk/examples/servicebased.host/src/main/java/org/apache/felix/example/servicebased/host/launch/ConfigUtil.java", "license": "apache-2.0", "size": 3638 }
[ "java.io.File", "java.io.IOException" ]
import java.io.File; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,133,787
@Transactional public void onPasswordChange(@Observes UserPasswordChangeEvent event) { auditProcessor.process(null, event.getOperationId(), event); }
void function(@Observes UserPasswordChangeEvent event) { auditProcessor.process(null, event.getOperationId(), event); }
/** * Observes user password changes. * * @param event * - the password change event */
Observes user password changes
onPasswordChange
{ "repo_name": "SirmaITT/conservation-space-1.7.0", "path": "docker/sirma-platform/platform/seip-parent/platform/seip-audit/seip-audit-impl/src/main/java/com/sirma/itt/emf/audit/observer/SecurityAuditObserver.java", "license": "lgpl-3.0", "size": 2725 }
[ "com.sirma.itt.seip.resources.event.UserPasswordChangeEvent", "javax.enterprise.event.Observes" ]
import com.sirma.itt.seip.resources.event.UserPasswordChangeEvent; import javax.enterprise.event.Observes;
import com.sirma.itt.seip.resources.event.*; import javax.enterprise.event.*;
[ "com.sirma.itt", "javax.enterprise" ]
com.sirma.itt; javax.enterprise;
197,505
////////////////////////////////////////////////////////////////////////////// public void testGetAllUserNames() throws FtpException { String[] usernames = userManager.getAllUserNames(); assertNotNull(usernames); assertTrue(usernames.length == 3); HashSet<String> userSet = new HashSet<String>(); Collections.addAll(userSet, usernames[0], usernames[1], usernames[2]); assertTrue(userSet.contains("demo")); assertTrue(userSet.contains("demo2")); assertTrue(userSet.contains("admin")); } // testGetAllUserNames
void function() throws FtpException { String[] usernames = userManager.getAllUserNames(); assertNotNull(usernames); assertTrue(usernames.length == 3); HashSet<String> userSet = new HashSet<String>(); Collections.addAll(userSet, usernames[0], usernames[1], usernames[2]); assertTrue(userSet.contains("demo")); assertTrue(userSet.contains("demo2")); assertTrue(userSet.contains("admin")); }
/** Test method for {@link cat.calidos.maven.ftpserver.users.SimpleUserManager#getAllUserNames()}. * @throws FtpException */
Test method for <code>cat.calidos.maven.ftpserver.users.SimpleUserManager#getAllUserNames()</code>
testGetAllUserNames
{ "repo_name": "danigiri/ftpserver-maven-plugin", "path": "src/test/java/cat/calidos/maven/ftpserver/SimpleUserManagerTest.java", "license": "apache-2.0", "size": 8196 }
[ "java.util.Collections", "java.util.HashSet", "org.apache.ftpserver.ftplet.FtpException" ]
import java.util.Collections; import java.util.HashSet; import org.apache.ftpserver.ftplet.FtpException;
import java.util.*; import org.apache.ftpserver.ftplet.*;
[ "java.util", "org.apache.ftpserver" ]
java.util; org.apache.ftpserver;
472,361
public static String getNewSaveFileLocation(boolean isTemporary) { File dst; File savesDir = FMLClientHandler.instance().getSavesDir(); do { // We used to create filenames based on the current date/time, but this can cause problems when // multiple clients might be writing to the same save location. Instead, use a GUID: String s = UUID.randomUUID().toString(); // Add our port number, to help with file management: s = AddressHelper.getMissionControlPort() + "_" + s; // If this is a temp file, mark it as such: if (isTemporary) { s = tempMark + s; } dst = new File(savesDir, s); } while (dst.exists()); return dst.getName(); }
static String function(boolean isTemporary) { File dst; File savesDir = FMLClientHandler.instance().getSavesDir(); do { String s = UUID.randomUUID().toString(); s = AddressHelper.getMissionControlPort() + "_" + s; if (isTemporary) { s = tempMark + s; } dst = new File(savesDir, s); } while (dst.exists()); return dst.getName(); }
/** Get a filename to use for creating a new Minecraft save map.<br> * Ensure no duplicates. * @param isTemporary mark the filename such that the file management code knows to delete this later * @return a unique filename (relative to the saves folder) */
Get a filename to use for creating a new Minecraft save map. Ensure no duplicates
getNewSaveFileLocation
{ "repo_name": "Yarichi/Proyecto-DASI", "path": "Malmo/Minecraft/src/main/java/com/microsoft/Malmo/Utils/MapFileHelper.java", "license": "gpl-2.0", "size": 5852 }
[ "java.io.File", "java.util.UUID", "net.minecraftforge.fml.client.FMLClientHandler" ]
import java.io.File; import java.util.UUID; import net.minecraftforge.fml.client.FMLClientHandler;
import java.io.*; import java.util.*; import net.minecraftforge.fml.client.*;
[ "java.io", "java.util", "net.minecraftforge.fml" ]
java.io; java.util; net.minecraftforge.fml;
754,255
@JsonIgnore public void setTabAccessMap(Map<String, List<String>> tabAccessMap) { this.tabAccessMap = tabAccessMap; }
public void setTabAccessMap(Map<String, List<String>> tabAccessMap) { this.tabAccessMap = tabAccessMap; }
/** * Returns the map of available tabs for specified permissions for this module. * @return a map, where the keys are tabs names and values are lists of permissions for which the tab is available */
Returns the map of available tabs for specified permissions for this module
getTabAccessMap
{ "repo_name": "justin-hayes/motech", "path": "platform/osgi-web-util/src/main/java/org/motechproject/osgi/web/ModuleRegistrationData.java", "license": "bsd-3-clause", "size": 15011 }
[ "java.util.List", "java.util.Map" ]
import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,839,217
public void register(AsyncContext asyncContext) { contextQueue.add(asyncContext); }
void function(AsyncContext asyncContext) { contextQueue.add(asyncContext); }
/** * Register a new context. * @param asyncContext the context */
Register a new context
register
{ "repo_name": "peterbanda/coel", "path": "source/Web/src/java/edu/banda/coel/web/AsyncMessageManager.java", "license": "apache-2.0", "size": 2753 }
[ "javax.servlet.AsyncContext" ]
import javax.servlet.AsyncContext;
import javax.servlet.*;
[ "javax.servlet" ]
javax.servlet;
497,089
@Test public void testRelativePathLabel() throws Exception { scratch.file("sub/BUILD", "exports_files(['dir2/dir2'])"); scratch.file("sub/dir/BUILD", "exports_files(['dir2'])"); scratch.file("sub/dir/dir/BUILD", "exports_files(['dir'])"); // sub/dir/dir is a package assertEquals("//sub/dir/dir:dir", parseIndividualTarget("sub/dir/dir").toString()); // sub/dir is a package but not sub/dir/dir2 assertEquals("//sub/dir:dir2", parseIndividualTarget("sub/dir/dir2").toString()); // sub is a package but not sub/dir2 assertEquals("//sub:dir2/dir2", parseIndividualTarget("sub/dir2/dir2").toString()); }
void function() throws Exception { scratch.file(STR, STR); scratch.file(STR, STR); scratch.file(STR, STR); assertEquals(STR assertEquals(" }
/** * Test that the relative path label parsing behaves as stated in the target-syntax documentation. */
Test that the relative path label parsing behaves as stated in the target-syntax documentation
testRelativePathLabel
{ "repo_name": "anupcshan/bazel", "path": "src/test/java/com/google/devtools/build/lib/pkgcache/TargetPatternEvaluatorTest.java", "license": "apache-2.0", "size": 45264 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,206,213
public void doIndex(final StaplerRequest request, final StaplerResponse response) throws IOException { AbstractBuild<?, ?> build = getLastFinishedBuild(); if (build != null) { response.sendRedirect2(String.format("../%d/%s", build.getNumber(), resultUrl)); } } @Deprecated public AbstractProjectAction(final AbstractProject<?, ?> project, final Class<? extends T> resultActionType, final PluginDescriptor plugin) { this(project, resultActionType, null, null, plugin.getPluginName(), plugin.getIconUrl(), plugin.getPluginResultUrlName()); }
void function(final StaplerRequest request, final StaplerResponse response) throws IOException { AbstractBuild<?, ?> build = getLastFinishedBuild(); if (build != null) { response.sendRedirect2(String.format(STR, build.getNumber(), resultUrl)); } } public AbstractProjectAction(final AbstractProject<?, ?> project, final Class<? extends T> resultActionType, final PluginDescriptor plugin) { this(project, resultActionType, null, null, plugin.getPluginName(), plugin.getIconUrl(), plugin.getPluginResultUrlName()); }
/** * Redirects the index page to the last result. * * @param request * Stapler request * @param response * Stapler response * @throws IOException * in case of an error */
Redirects the index page to the last result
doIndex
{ "repo_name": "recena/analysis-core-plugin", "path": "src/main/java/hudson/plugins/analysis/core/AbstractProjectAction.java", "license": "mit", "size": 15161 }
[ "hudson.model.AbstractBuild", "hudson.model.AbstractProject", "java.io.IOException", "org.kohsuke.stapler.StaplerRequest", "org.kohsuke.stapler.StaplerResponse" ]
import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import java.io.IOException; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse;
import hudson.model.*; import java.io.*; import org.kohsuke.stapler.*;
[ "hudson.model", "java.io", "org.kohsuke.stapler" ]
hudson.model; java.io; org.kohsuke.stapler;
1,630,457
EList<ExecuteCommandType> getExecuteCommand();
EList<ExecuteCommandType> getExecuteCommand();
/** * Returns the value of the '<em><b>Execute Command</b></em>' containment reference list. * The list contents are of type {@link org.liquibase.xml.ns.dbchangelog.ExecuteCommandType}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the value of the '<em>Execute Command</em>' containment reference list. * @see org.liquibase.xml.ns.dbchangelog.DbchangelogPackage#getRollbackType_ExecuteCommand() * @model containment="true" transient="true" volatile="true" derived="true" * extendedMetaData="kind='element' name='executeCommand' namespace='##targetNamespace' group='#ChangeSetChildren:1'" * @generated */
Returns the value of the 'Execute Command' containment reference list. The list contents are of type <code>org.liquibase.xml.ns.dbchangelog.ExecuteCommandType</code>.
getExecuteCommand
{ "repo_name": "Treehopper/EclipseAugments", "path": "liquibase-editor/eu.hohenegger.xsd.liquibase/src-gen/org/liquibase/xml/ns/dbchangelog/RollbackType.java", "license": "epl-1.0", "size": 44628 }
[ "org.eclipse.emf.common.util.EList" ]
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
733,309
public List<StepMetaDataCombi> getSteps() { return steps; }
List<StepMetaDataCombi> function() { return steps; }
/** * Gets a list of steps in the transformation. * * @return a list of the steps in the transformation */
Gets a list of steps in the transformation
getSteps
{ "repo_name": "EcoleKeine/pentaho-kettle", "path": "engine/src/org/pentaho/di/trans/Trans.java", "license": "apache-2.0", "size": 194675 }
[ "java.util.List", "org.pentaho.di.trans.step.StepMetaDataCombi" ]
import java.util.List; import org.pentaho.di.trans.step.StepMetaDataCombi;
import java.util.*; import org.pentaho.di.trans.step.*;
[ "java.util", "org.pentaho.di" ]
java.util; org.pentaho.di;
2,891,866
public void addToCache(K key, V entry) { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext .getThreadLocalCarbonContext(); carbonContext.setTenantId(MultitenantConstants.SUPER_TENANT_ID); carbonContext.setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); // Element already in the cache. Remove it first Cache<K, V> cache = getBaseCache(); if (cache != null) { cache.put(key, entry); } } finally { PrivilegedCarbonContext.endTenantFlow(); } }
void function(K key, V entry) { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext .getThreadLocalCarbonContext(); carbonContext.setTenantId(MultitenantConstants.SUPER_TENANT_ID); carbonContext.setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); Cache<K, V> cache = getBaseCache(); if (cache != null) { cache.put(key, entry); } } finally { PrivilegedCarbonContext.endTenantFlow(); } }
/** * Add a cache entry. * * @param key Key which cache entry is indexed. * @param entry Actual object where cache entry is placed. */
Add a cache entry
addToCache
{ "repo_name": "kasungayan/carbon-identity", "path": "components/application-mgt/org.wso2.carbon.identity.application.common/src/main/java/org/wso2/carbon/identity/application/common/cache/BaseCache.java", "license": "apache-2.0", "size": 7209 }
[ "javax.cache.Cache", "org.wso2.carbon.context.PrivilegedCarbonContext", "org.wso2.carbon.utils.multitenancy.MultitenantConstants" ]
import javax.cache.Cache; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import javax.cache.*; import org.wso2.carbon.context.*; import org.wso2.carbon.utils.multitenancy.*;
[ "javax.cache", "org.wso2.carbon" ]
javax.cache; org.wso2.carbon;
2,220,657
public void registerVirtualKeyboard(VirtualKeyboardInterface vkb){ virtualKeyboards.put(vkb.getVirtualKeyboardName(), vkb); }
void function(VirtualKeyboardInterface vkb){ virtualKeyboards.put(vkb.getVirtualKeyboardName(), vkb); }
/** * Register a virtual keyboard * @param vkb * @deprecated this method is only used in feature phones and has no modern equivalent */
Register a virtual keyboard
registerVirtualKeyboard
{ "repo_name": "sannysanoff/CodenameOne", "path": "CodenameOne/src/com/codename1/ui/Display.java", "license": "gpl-2.0", "size": 148611 }
[ "com.codename1.impl.VirtualKeyboardInterface" ]
import com.codename1.impl.VirtualKeyboardInterface;
import com.codename1.impl.*;
[ "com.codename1.impl" ]
com.codename1.impl;
2,508,333
private final Instance lookupInstance(String name) { return renderObjects.lookupInstance(name); }
final Instance function(String name) { return renderObjects.lookupInstance(name); }
/** * Retrieve an instance object by its name, or <code>null</code> if no * instance was found, or if the specified object is not an instance. * * @param name instance name * @return the instance object associated with that name */
Retrieve an instance object by its name, or <code>null</code> if no instance was found, or if the specified object is not an instance
lookupInstance
{ "repo_name": "Acbentle/terrain474", "path": "src/org/sunflow/SunflowAPI.java", "license": "mit", "size": 32384 }
[ "org.sunflow.core.Instance" ]
import org.sunflow.core.Instance;
import org.sunflow.core.*;
[ "org.sunflow.core" ]
org.sunflow.core;
2,758,331
public void addObject(final Object object) { assert object != null; if (!SwingUtilities.isEventDispatchThread()) { throw new IllegalStateException("This method must be called from Swing thread"); } // Create new node final DefaultMutableTreeNode newNode = new DefaultMutableTreeNode(object); treeObjects.put(object, newNode); if (object instanceof RawDataFile) { int childCount = getChildCount(rootNode); insertNodeInto(newNode, rootNode, childCount); RawDataFile dataFile = (RawDataFile) object; int scanNumbers[] = dataFile.getScanNumbers(); for (int i = 0; i < scanNumbers.length; i++) { Scan scan = dataFile.getScan(scanNumbers[i]); DefaultMutableTreeNode scanNode = new DefaultMutableTreeNode(scan); treeObjects.put(scan, scanNode); insertNodeInto(scanNode, newNode, i); MassList massLists[] = scan.getMassLists(); for (int j = 0; j < massLists.length; j++) { DefaultMutableTreeNode mlNode = new DefaultMutableTreeNode(massLists[j]); treeObjects.put(massLists[j], mlNode); insertNodeInto(mlNode, scanNode, j); } } } if (object instanceof MassList) { Scan scan = ((MassList) object).getScan(); final DefaultMutableTreeNode scNode = treeObjects.get(scan); assert scNode != null; int index = scNode.getChildCount(); insertNodeInto(newNode, scNode, index); } }
void function(final Object object) { assert object != null; if (!SwingUtilities.isEventDispatchThread()) { throw new IllegalStateException(STR); } final DefaultMutableTreeNode newNode = new DefaultMutableTreeNode(object); treeObjects.put(object, newNode); if (object instanceof RawDataFile) { int childCount = getChildCount(rootNode); insertNodeInto(newNode, rootNode, childCount); RawDataFile dataFile = (RawDataFile) object; int scanNumbers[] = dataFile.getScanNumbers(); for (int i = 0; i < scanNumbers.length; i++) { Scan scan = dataFile.getScan(scanNumbers[i]); DefaultMutableTreeNode scanNode = new DefaultMutableTreeNode(scan); treeObjects.put(scan, scanNode); insertNodeInto(scanNode, newNode, i); MassList massLists[] = scan.getMassLists(); for (int j = 0; j < massLists.length; j++) { DefaultMutableTreeNode mlNode = new DefaultMutableTreeNode(massLists[j]); treeObjects.put(massLists[j], mlNode); insertNodeInto(mlNode, scanNode, j); } } } if (object instanceof MassList) { Scan scan = ((MassList) object).getScan(); final DefaultMutableTreeNode scNode = treeObjects.get(scan); assert scNode != null; int index = scNode.getChildCount(); insertNodeInto(newNode, scNode, index); } }
/** * This method must be called from Swing thread */
This method must be called from Swing thread
addObject
{ "repo_name": "du-lab/mzmine2", "path": "src/main/java/net/sf/mzmine/desktop/impl/projecttree/RawDataTreeModel.java", "license": "gpl-2.0", "size": 6280 }
[ "javax.swing.SwingUtilities", "javax.swing.tree.DefaultMutableTreeNode", "net.sf.mzmine.datamodel.MassList", "net.sf.mzmine.datamodel.RawDataFile", "net.sf.mzmine.datamodel.Scan" ]
import javax.swing.SwingUtilities; import javax.swing.tree.DefaultMutableTreeNode; import net.sf.mzmine.datamodel.MassList; import net.sf.mzmine.datamodel.RawDataFile; import net.sf.mzmine.datamodel.Scan;
import javax.swing.*; import javax.swing.tree.*; import net.sf.mzmine.datamodel.*;
[ "javax.swing", "net.sf.mzmine" ]
javax.swing; net.sf.mzmine;
2,186,091
private ItemPanel createItemPanel(Class<? extends IEntity> itemClass, SpriteStore store, String id, String image) { ItemPanel panel = new ItemPanel(id, store.getSprite(image)); slotPanels.put(id, panel); panel.setAcceptedTypes(itemClass); return panel; }
ItemPanel function(Class<? extends IEntity> itemClass, SpriteStore store, String id, String image) { ItemPanel panel = new ItemPanel(id, store.getSprite(image)); slotPanels.put(id, panel); panel.setAcceptedTypes(itemClass); return panel; }
/** * Create an item panel to be placed to the character window. * * @param itemClass acceptable drops to the slot * @param store sprite store * @param id slot identifier * @param image empty slot image * * @return item panel */
Create an item panel to be placed to the character window
createItemPanel
{ "repo_name": "acsid/stendhal", "path": "src/games/stendhal/client/gui/Character.java", "license": "gpl-2.0", "size": 9138 }
[ "games.stendhal.client.entity.IEntity", "games.stendhal.client.sprite.SpriteStore" ]
import games.stendhal.client.entity.IEntity; import games.stendhal.client.sprite.SpriteStore;
import games.stendhal.client.entity.*; import games.stendhal.client.sprite.*;
[ "games.stendhal.client" ]
games.stendhal.client;
2,881,944
public static long getMinGzipSize(ContentResolver resolver) { return DEFAULT_SYNC_MIN_GZIP_BYTES; // For now, this is just a constant. } private static class LoggingConfiguration { private final String tag; private final int level; private LoggingConfiguration(String tag, int level) { this.tag = tag; this.level = level; }
static long function(ContentResolver resolver) { return DEFAULT_SYNC_MIN_GZIP_BYTES; } private static class LoggingConfiguration { private final String tag; private final int level; private LoggingConfiguration(String tag, int level) { this.tag = tag; this.level = level; }
/** * Retrieves the minimum size for compressing data. * Shorter data will not be compressed. */
Retrieves the minimum size for compressing data. Shorter data will not be compressed
getMinGzipSize
{ "repo_name": "haikuowuya/android_system_code", "path": "src/android/net/http/AndroidHttpClient.java", "license": "apache-2.0", "size": 19372 }
[ "android.content.ContentResolver" ]
import android.content.ContentResolver;
import android.content.*;
[ "android.content" ]
android.content;
2,415,370
public VirtualNetworkUsageName name() { return this.name; }
VirtualNetworkUsageName function() { return this.name; }
/** * Get the name value. * * @return the name value */
Get the name value
name
{ "repo_name": "martinsawicki/azure-sdk-for-java", "path": "azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/implementation/VirtualNetworkUsageInner.java", "license": "mit", "size": 2084 }
[ "com.microsoft.azure.management.network.VirtualNetworkUsageName" ]
import com.microsoft.azure.management.network.VirtualNetworkUsageName;
import com.microsoft.azure.management.network.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
1,068,804
private NodeRef getPersonContainer(String remoteSystem, boolean lazyCreate) { // Get the person node NodeRef person = repositoryHelper.getPerson(); if (person == null) { // Something's rather broken, the service security ought to prevent this throw new IllegalStateException("Person details required but none found! Running as " + AuthenticationUtil.getRunAsUser()); } // If we're in edit mode, ensure the correct aspect is applied if (lazyCreate) { ensureCredentialsSystemContainer(person); } // Find the container return findRemoteSystemContainer(person, remoteSystem, lazyCreate); }
NodeRef function(String remoteSystem, boolean lazyCreate) { NodeRef person = repositoryHelper.getPerson(); if (person == null) { throw new IllegalStateException(STR + AuthenticationUtil.getRunAsUser()); } if (lazyCreate) { ensureCredentialsSystemContainer(person); } return findRemoteSystemContainer(person, remoteSystem, lazyCreate); }
/** * Gets, creating as needed, the person credentials container for the given system */
Gets, creating as needed, the person credentials container for the given system
getPersonContainer
{ "repo_name": "Alfresco/alfresco-repository", "path": "src/main/java/org/alfresco/repo/remotecredentials/RemoteCredentialsServiceImpl.java", "license": "lgpl-3.0", "size": 29919 }
[ "org.alfresco.repo.security.authentication.AuthenticationUtil", "org.alfresco.service.cmr.repository.NodeRef" ]
import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.repo.security.authentication.*; import org.alfresco.service.cmr.repository.*;
[ "org.alfresco.repo", "org.alfresco.service" ]
org.alfresco.repo; org.alfresco.service;
1,327,260
public List<LangString> getShortName() { return shortName; } @ContextField(name = "name", parentName = "dataSetInformation", isMultiple = true, type = Type.MultiLangText) private List<LangString> name = new ArrayList<LangString>();
List<LangString> function() { return shortName; } @ContextField(name = "name", parentName = STR, isMultiple = true, type = Type.MultiLangText) private List<LangString> name = new ArrayList<LangString>();
/** * Short name for the contact, that is used for display e.g. of links to * this data set (especially in case the full name of the contact is rather * long, e.g. "FAO" for "Food and Agriculture Organization"). * * @Element shortName * @DataType String */
Short name for the contact, that is used for display e.g. of links to this data set (especially in case the full name of the contact is rather long, e.g. "FAO" for "Food and Agriculture Organization")
getShortName
{ "repo_name": "GreenDelta/olca-converter", "path": "src/main/java/org/openlca/olcatdb/ilcd/ILCDContactDescription.java", "license": "mpl-2.0", "size": 8368 }
[ "java.util.ArrayList", "java.util.List", "org.openlca.olcatdb.datatypes.LangString", "org.openlca.olcatdb.parsing.ContextField" ]
import java.util.ArrayList; import java.util.List; import org.openlca.olcatdb.datatypes.LangString; import org.openlca.olcatdb.parsing.ContextField;
import java.util.*; import org.openlca.olcatdb.datatypes.*; import org.openlca.olcatdb.parsing.*;
[ "java.util", "org.openlca.olcatdb" ]
java.util; org.openlca.olcatdb;
2,529,540
public InetSocketAddress getRemoteAddress() { Socket s = ((SocketChannel) key.channel()).socket(); return (InetSocketAddress) s.getRemoteSocketAddress(); } //
InetSocketAddress function() { Socket s = ((SocketChannel) key.channel()).socket(); return (InetSocketAddress) s.getRemoteSocketAddress(); } //
/** * Returns the remove socket address (host+port) */
Returns the remove socket address (host+port)
getRemoteAddress
{ "repo_name": "rubenlagus/TelegramApi", "path": "src/main/java/jawnae/pyronet/PyroClient.java", "license": "mit", "size": 13822 }
[ "java.net.InetSocketAddress", "java.net.Socket", "java.nio.channels.SocketChannel" ]
import java.net.InetSocketAddress; import java.net.Socket; import java.nio.channels.SocketChannel;
import java.net.*; import java.nio.channels.*;
[ "java.net", "java.nio" ]
java.net; java.nio;
2,694,262
protected Class findStringClass(Schema schema) { String name = schema.getProp(GenericData.STRING_PROP); if (name == null) return CharSequence.class; switch (GenericData.StringType.valueOf(name)) { case String: return String.class; default: return CharSequence.class; } } private Map<Schema,Class> stringClassCache = new IdentityHashMap<Schema,Class>();
Class function(Schema schema) { String name = schema.getProp(GenericData.STRING_PROP); if (name == null) return CharSequence.class; switch (GenericData.StringType.valueOf(name)) { case String: return String.class; default: return CharSequence.class; } } private Map<Schema,Class> stringClassCache = new IdentityHashMap<Schema,Class>();
/** Determines the class to used to represent a string Schema. By default * uses {@link GenericData#STRING_PROP} to determine whether {@link Utf8} or * {@link String} is used. Subclasses may override for alternate * representations. */
Determines the class to used to represent a string Schema. By default uses <code>GenericData#STRING_PROP</code> to determine whether <code>Utf8</code> or <code>String</code> is used. Subclasses may override for alternate representations
findStringClass
{ "repo_name": "wojtkiewicz/avro", "path": "lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java", "license": "apache-2.0", "size": 21343 }
[ "java.util.IdentityHashMap", "java.util.Map", "org.apache.avro.Schema" ]
import java.util.IdentityHashMap; import java.util.Map; import org.apache.avro.Schema;
import java.util.*; import org.apache.avro.*;
[ "java.util", "org.apache.avro" ]
java.util; org.apache.avro;
311,871
final protected boolean doOnClickEvent() { return _onClickEvent.length() == 0 || getScriptableDelegate().doEvent( _onClickEvent ); } protected void doButtonAction() throws IOException, SAXException {}
final boolean function() { return _onClickEvent.length() == 0 getScriptableDelegate().doEvent( _onClickEvent ); } protected void doButtonAction() throws IOException, SAXException {}
/** * Does the 'onClick' event defined for this button. * @return true if subsequent actions should be performed. */
Does the 'onClick' event defined for this button
doOnClickEvent
{ "repo_name": "simeshev/parabuild-ci", "path": "3rdparty/httpunit154/src/com/meterware/httpunit/Button.java", "license": "lgpl-3.0", "size": 4308 }
[ "java.io.IOException", "org.xml.sax.SAXException" ]
import java.io.IOException; import org.xml.sax.SAXException;
import java.io.*; import org.xml.sax.*;
[ "java.io", "org.xml.sax" ]
java.io; org.xml.sax;
2,796,144
EClass getExecutionExit();
EClass getExecutionExit();
/** * Returns the meta object for class '{@link org.yakindu.sct.model.sexec.ExecutionExit <em>Execution Exit</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Execution Exit</em>'. * @see org.yakindu.sct.model.sexec.ExecutionExit * @generated */
Returns the meta object for class '<code>org.yakindu.sct.model.sexec.ExecutionExit Execution Exit</code>'.
getExecutionExit
{ "repo_name": "Yakindu/statecharts", "path": "plugins/org.yakindu.sct.model.sexec/src/org/yakindu/sct/model/sexec/SexecPackage.java", "license": "epl-1.0", "size": 168724 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
456,161
public static boolean isValid(String dateString) { boolean result = true; try { DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT); df.setLenient(false); df.parse(dateString); } catch (Exception e) { result = false; } return result; }
static boolean function(String dateString) { boolean result = true; try { DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT); df.setLenient(false); df.parse(dateString); } catch (Exception e) { result = false; } return result; }
/** * Validar una fecha. */
Validar una fecha
isValid
{ "repo_name": "rranz/meccano4j_vaadin", "path": "javalego/javalego_util/src/main/java/com/javalego/util/DateUtils.java", "license": "gpl-3.0", "size": 47778 }
[ "java.text.DateFormat" ]
import java.text.DateFormat;
import java.text.*;
[ "java.text" ]
java.text;
2,546,816
void finalizeStarting(InternalJob job, InternalTask task, Node node, TaskLauncher launcher) { tlogger.info(task.getId(), "started on " + node.getNodeInformation().getVMInformation().getHostName() + "(node: " + node.getNodeInformation().getName() + ")"); schedulingService.taskStarted(job, task, launcher); }
void finalizeStarting(InternalJob job, InternalTask task, Node node, TaskLauncher launcher) { tlogger.info(task.getId(), STR + node.getNodeInformation().getVMInformation().getHostName() + STR + node.getNodeInformation().getName() + ")"); schedulingService.taskStarted(job, task, launcher); }
/** * Finalize the start of the task by mark it as started. Also mark the job if it is not already started. * * @param job the job that owns the task to be started * @param task the task to be started * @param node the node on which the task will be started * @param launcher the taskLauncher that has just been launched */
Finalize the start of the task by mark it as started. Also mark the job if it is not already started
finalizeStarting
{ "repo_name": "yinan-liu/scheduling", "path": "scheduler/scheduler-server/src/main/java/org/ow2/proactive/scheduler/core/SchedulingMethodImpl.java", "license": "agpl-3.0", "size": 31817 }
[ "org.objectweb.proactive.core.node.Node", "org.ow2.proactive.scheduler.job.InternalJob", "org.ow2.proactive.scheduler.task.TaskLauncher", "org.ow2.proactive.scheduler.task.internal.InternalTask" ]
import org.objectweb.proactive.core.node.Node; import org.ow2.proactive.scheduler.job.InternalJob; import org.ow2.proactive.scheduler.task.TaskLauncher; import org.ow2.proactive.scheduler.task.internal.InternalTask;
import org.objectweb.proactive.core.node.*; import org.ow2.proactive.scheduler.job.*; import org.ow2.proactive.scheduler.task.*; import org.ow2.proactive.scheduler.task.internal.*;
[ "org.objectweb.proactive", "org.ow2.proactive" ]
org.objectweb.proactive; org.ow2.proactive;
809,860
public static SearchFragment newInstance(Bundle intentExtras) { SearchFragment fragment = new SearchFragment(); fragment.setArguments(intentExtras); return fragment; }
static SearchFragment function(Bundle intentExtras) { SearchFragment fragment = new SearchFragment(); fragment.setArguments(intentExtras); return fragment; }
/** * Returns a new {@link SearchFragment}. * * @params intentExtras the extras passed with the search intent for which this fragment should * show results */
Returns a new <code>SearchFragment</code>
newInstance
{ "repo_name": "charmasaur/alps-insects", "path": "src/main/java/com/github/charmasaur/alpsinsects/ui/fragments/SearchFragment.java", "license": "apache-2.0", "size": 4859 }
[ "android.os.Bundle" ]
import android.os.Bundle;
import android.os.*;
[ "android.os" ]
android.os;
1,810,847
public void zoomRangeAxes(double lowerPercent, double upperPercent, PlotRenderingInfo state, Point2D source) { this.rangeAxis.zoomRange(lowerPercent, upperPercent); }
void function(double lowerPercent, double upperPercent, PlotRenderingInfo state, Point2D source) { this.rangeAxis.zoomRange(lowerPercent, upperPercent); }
/** * Zooms the range axes. * * @param lowerPercent the lower percent. * @param upperPercent the upper percent. * @param state the plot state. * @param source the source point. */
Zooms the range axes
zoomRangeAxes
{ "repo_name": "nologic/nabs", "path": "client/trunk/shared/libraries/jfreechart-1.0.5/source/org/jfree/chart/plot/ThermometerPlot.java", "license": "gpl-2.0", "size": 48435 }
[ "java.awt.geom.Point2D" ]
import java.awt.geom.Point2D;
import java.awt.geom.*;
[ "java.awt" ]
java.awt;
2,699,190
private Iterator<SSLSession> sessionIterator() { synchronized (sessions) { SSLSession[] array = sessions.values().toArray( new SSLSession[sessions.size()]); return Arrays.asList(array).iterator(); } }
Iterator<SSLSession> function() { synchronized (sessions) { SSLSession[] array = sessions.values().toArray( new SSLSession[sessions.size()]); return Arrays.asList(array).iterator(); } }
/** * Returns the collection of sessions ordered from oldest to newest */
Returns the collection of sessions ordered from oldest to newest
sessionIterator
{ "repo_name": "xdajog/samsung_sources_i927", "path": "libcore/luni/src/main/java/org/apache/harmony/xnet/provider/jsse/AbstractSessionContext.java", "license": "gpl-2.0", "size": 8965 }
[ "java.util.Arrays", "java.util.Iterator", "javax.net.ssl.SSLSession" ]
import java.util.Arrays; import java.util.Iterator; import javax.net.ssl.SSLSession;
import java.util.*; import javax.net.ssl.*;
[ "java.util", "javax.net" ]
java.util; javax.net;
2,375,449
private boolean sameFile(final FileStatus inputStat, final FileStatus outputStat) { // Not matching length if (inputStat.getLen() != outputStat.getLen()) return false; // Mark files as equals, since user asked for no checksum verification if (!verifyChecksum) return true; // If checksums are not available, files are not the same. FileChecksum inChecksum = getFileChecksum(inputFs, inputStat.getPath()); if (inChecksum == null) return false; FileChecksum outChecksum = getFileChecksum(outputFs, outputStat.getPath()); if (outChecksum == null) return false; return inChecksum.equals(outChecksum); } } // ========================================================================== // Input Format // ==========================================================================
boolean function(final FileStatus inputStat, final FileStatus outputStat) { if (inputStat.getLen() != outputStat.getLen()) return false; if (!verifyChecksum) return true; FileChecksum inChecksum = getFileChecksum(inputFs, inputStat.getPath()); if (inChecksum == null) return false; FileChecksum outChecksum = getFileChecksum(outputFs, outputStat.getPath()); if (outChecksum == null) return false; return inChecksum.equals(outChecksum); } }
/** * Check if the two files are equal by looking at the file length, * and at the checksum (if user has specified the verifyChecksum flag). */
Check if the two files are equal by looking at the file length, and at the checksum (if user has specified the verifyChecksum flag)
sameFile
{ "repo_name": "vincentpoon/hbase", "path": "hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java", "license": "apache-2.0", "size": 45712 }
[ "org.apache.hadoop.fs.FileChecksum", "org.apache.hadoop.fs.FileStatus" ]
import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
213,618
public void setAccessCache(SimpleCache<Serializable, AccessStatus> accessCache) { this.accessCache = accessCache; }
void function(SimpleCache<Serializable, AccessStatus> accessCache) { this.accessCache = accessCache; }
/** * Set the permissions access cache. * * @param accessCache * a transactionally safe cache */
Set the permissions access cache
setAccessCache
{ "repo_name": "loftuxab/alfresco-community-loftux", "path": "projects/repository/source/java/org/alfresco/repo/security/permissions/impl/PermissionServiceImpl.java", "license": "lgpl-3.0", "size": 108452 }
[ "java.io.Serializable", "org.alfresco.repo.cache.SimpleCache", "org.alfresco.service.cmr.security.AccessStatus" ]
import java.io.Serializable; import org.alfresco.repo.cache.SimpleCache; import org.alfresco.service.cmr.security.AccessStatus;
import java.io.*; import org.alfresco.repo.cache.*; import org.alfresco.service.cmr.security.*;
[ "java.io", "org.alfresco.repo", "org.alfresco.service" ]
java.io; org.alfresco.repo; org.alfresco.service;
157,486
Objects.requireNonNull(minecraftDir); Objects.requireNonNull(version); if (doesVersionExist(minecraftDir, version)) { try { return getVersionParser().parseVersion(resolveVersionHierarchy(version, minecraftDir), PlatformDescription.current()); } catch (JSONException e) { throw new IOException("Couldn't parse version json: " + version, e); } } else { return null; } }
Objects.requireNonNull(minecraftDir); Objects.requireNonNull(version); if (doesVersionExist(minecraftDir, version)) { try { return getVersionParser().parseVersion(resolveVersionHierarchy(version, minecraftDir), PlatformDescription.current()); } catch (JSONException e) { throw new IOException(STR + version, e); } } else { return null; } }
/** * Resolves the version. * * @param minecraftDir the minecraft directory * @param version the version name * @return the version object, or null if the version does not exist * @throws IOException if an I/O error has occurred during resolving version * @throws NullPointerException if * <code>minecraftDir==null || version==null</code> */
Resolves the version
resolveVersion
{ "repo_name": "yushijinhun/JMCCC", "path": "jmccc/src/main/java/org/to2mbn/jmccc/version/parsing/Versions.java", "license": "mit", "size": 4495 }
[ "java.io.IOException", "java.util.Objects", "org.to2mbn.jmccc.internal.org.json.JSONException" ]
import java.io.IOException; import java.util.Objects; import org.to2mbn.jmccc.internal.org.json.JSONException;
import java.io.*; import java.util.*; import org.to2mbn.jmccc.internal.org.json.*;
[ "java.io", "java.util", "org.to2mbn.jmccc" ]
java.io; java.util; org.to2mbn.jmccc;
1,966,967
@SimpleProperty( category = PropertyCategory.BEHAVIOR) public String DataType() { return dataType; }
@SimpleProperty( category = PropertyCategory.BEHAVIOR) String function() { return dataType; }
/** * Returns the MIME type to pass to the activity. */
Returns the MIME type to pass to the activity
DataType
{ "repo_name": "youprofit/appinventor-sources", "path": "appinventor/components/src/com/google/appinventor/components/runtime/ActivityStarter.java", "license": "apache-2.0", "size": 19022 }
[ "com.google.appinventor.components.annotations.PropertyCategory", "com.google.appinventor.components.annotations.SimpleProperty" ]
import com.google.appinventor.components.annotations.PropertyCategory; import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.*;
[ "com.google.appinventor" ]
com.google.appinventor;
2,535,790
public List<StringPair> getAdvancedSearchSelectItems(String field, String language, boolean hierarchical) throws PresentationException, IndexUnreachableException, DAOException { // logger.trace("getAdvancedSearchSelectItems: {}", field); if (field == null) { throw new IllegalArgumentException("field may not be null."); } if (language == null) { throw new IllegalArgumentException("language may not be null."); } String key = new StringBuilder(language).append('_').append(field).toString(); List<StringPair> ret = advancedSearchSelectItems.get(key); if (ret != null) { return ret; } ret = new ArrayList<>(); logger.trace("Generating drop-down values for {}", field); if (SolrConstants.BOOKMARKS.equals(field)) { if (userBean != null && userBean.isLoggedIn()) { // User bookshelves List<BookmarkList> bookmarkLists = DataManager.getInstance().getDao().getBookmarkLists(userBean.getUser()); if (!bookmarkLists.isEmpty()) { for (BookmarkList bookmarkList : bookmarkLists) { if (!bookmarkList.getItems().isEmpty()) { ret.add(new StringPair(bookmarkList.getName(), bookmarkList.getName())); } } } } else { // Session bookmark list Optional<BookmarkList> bookmarkList = DataManager.getInstance().getBookmarkManager().getBookmarkList(BeanUtils.getRequest().getSession()); if (bookmarkList.isPresent() && !bookmarkList.get().getItems().isEmpty()) { ret.add(new StringPair(bookmarkList.get().getName(), ViewerResourceBundle.getTranslation("bookmarkList_session", null))); } } // public bookmark lists List<BookmarkList> publicBookmarkLists = DataManager.getInstance().getDao().getPublicBookmarkLists(); if (!publicBookmarkLists.isEmpty()) { for (BookmarkList bookmarkList : publicBookmarkLists) { StringPair pair = new StringPair(bookmarkList.getName(), bookmarkList.getName()); if (!bookmarkList.getItems().isEmpty() && !ret.contains(pair)) { ret.add(pair); } } } } else if (hierarchical) { BrowseBean browseBean = BeanUtils.getBrowseBean(); if (browseBean == null) { browseBean = new BrowseBean(); } // Make sure displayDepth is at configured to the desired depth for this field (or -1 for complete depth) int displayDepth = DataManager.getInstance().getConfiguration().getCollectionDisplayDepthForSearch(field); List<BrowseDcElement> elementList = browseBean.getList(field, displayDepth); StringBuilder sbItemLabel = new StringBuilder(); for (BrowseDcElement dc : elementList) { // Skip reversed values that MD_* and MD2_* fields will return if (StringUtils.isEmpty(dc.getName()) || dc.getName().charAt(0) == 1) { continue; } for (int i = 0; i < dc.getLevel(); ++i) { sbItemLabel.append("- "); } sbItemLabel.append(ViewerResourceBundle.getTranslation(dc.getName(), null)); ret.add(new StringPair(dc.getName(), sbItemLabel.toString())); sbItemLabel.setLength(0); } advancedSearchSelectItems.put(key, ret); } else { String suffix = SearchHelper.getAllSuffixes(); List<String> values = SearchHelper.getFacetValues(field + ":[* TO *]" + suffix, field, 1); for (String value : values) { ret.add(new StringPair(value, ViewerResourceBundle.getTranslation(value, null))); } Collections.sort(ret); advancedSearchSelectItems.put(key, ret); } logger.trace("Generated {} values", ret.size()); return ret; }
List<StringPair> function(String field, String language, boolean hierarchical) throws PresentationException, IndexUnreachableException, DAOException { if (field == null) { throw new IllegalArgumentException(STR); } if (language == null) { throw new IllegalArgumentException(STR); } String key = new StringBuilder(language).append('_').append(field).toString(); List<StringPair> ret = advancedSearchSelectItems.get(key); if (ret != null) { return ret; } ret = new ArrayList<>(); logger.trace(STR, field); if (SolrConstants.BOOKMARKS.equals(field)) { if (userBean != null && userBean.isLoggedIn()) { List<BookmarkList> bookmarkLists = DataManager.getInstance().getDao().getBookmarkLists(userBean.getUser()); if (!bookmarkLists.isEmpty()) { for (BookmarkList bookmarkList : bookmarkLists) { if (!bookmarkList.getItems().isEmpty()) { ret.add(new StringPair(bookmarkList.getName(), bookmarkList.getName())); } } } } else { Optional<BookmarkList> bookmarkList = DataManager.getInstance().getBookmarkManager().getBookmarkList(BeanUtils.getRequest().getSession()); if (bookmarkList.isPresent() && !bookmarkList.get().getItems().isEmpty()) { ret.add(new StringPair(bookmarkList.get().getName(), ViewerResourceBundle.getTranslation(STR, null))); } } List<BookmarkList> publicBookmarkLists = DataManager.getInstance().getDao().getPublicBookmarkLists(); if (!publicBookmarkLists.isEmpty()) { for (BookmarkList bookmarkList : publicBookmarkLists) { StringPair pair = new StringPair(bookmarkList.getName(), bookmarkList.getName()); if (!bookmarkList.getItems().isEmpty() && !ret.contains(pair)) { ret.add(pair); } } } } else if (hierarchical) { BrowseBean browseBean = BeanUtils.getBrowseBean(); if (browseBean == null) { browseBean = new BrowseBean(); } int displayDepth = DataManager.getInstance().getConfiguration().getCollectionDisplayDepthForSearch(field); List<BrowseDcElement> elementList = browseBean.getList(field, displayDepth); StringBuilder sbItemLabel = new StringBuilder(); for (BrowseDcElement dc : elementList) { if (StringUtils.isEmpty(dc.getName()) dc.getName().charAt(0) == 1) { continue; } for (int i = 0; i < dc.getLevel(); ++i) { sbItemLabel.append(STR); } sbItemLabel.append(ViewerResourceBundle.getTranslation(dc.getName(), null)); ret.add(new StringPair(dc.getName(), sbItemLabel.toString())); sbItemLabel.setLength(0); } advancedSearchSelectItems.put(key, ret); } else { String suffix = SearchHelper.getAllSuffixes(); List<String> values = SearchHelper.getFacetValues(field + STR + suffix, field, 1); for (String value : values) { ret.add(new StringPair(value, ViewerResourceBundle.getTranslation(value, null))); } Collections.sort(ret); advancedSearchSelectItems.put(key, ret); } logger.trace(STR, ret.size()); return ret; }
/** * Populates the list of advanced search drop-down values for the given field. List is only generated once per user session. * * @param field The index field for which to get drop-down values. * @param language Translation language for the values. * @param hierarchical If true, the menu items will be listed in their corresponding hierarchy (e.g. DC) * @return a {@link java.util.List} object. * @throws io.goobi.viewer.exceptions.PresentationException if any. * @throws io.goobi.viewer.exceptions.IndexUnreachableException if any. * @throws io.goobi.viewer.exceptions.DAOException if any. * @throws IllegalRequestException */
Populates the list of advanced search drop-down values for the given field. List is only generated once per user session
getAdvancedSearchSelectItems
{ "repo_name": "intranda/goobi-viewer-core", "path": "goobi-viewer-core/src/main/java/io/goobi/viewer/managedbeans/SearchBean.java", "license": "gpl-2.0", "size": 118095 }
[ "io.goobi.viewer.controller.DataManager", "io.goobi.viewer.exceptions.DAOException", "io.goobi.viewer.exceptions.IndexUnreachableException", "io.goobi.viewer.exceptions.PresentationException", "io.goobi.viewer.managedbeans.utils.BeanUtils", "io.goobi.viewer.messages.ViewerResourceBundle", "io.goobi.viewer.model.bookmark.BookmarkList", "io.goobi.viewer.model.search.SearchHelper", "io.goobi.viewer.model.viewer.BrowseDcElement", "io.goobi.viewer.model.viewer.StringPair", "io.goobi.viewer.solr.SolrConstants", "java.util.ArrayList", "java.util.Collections", "java.util.List", "java.util.Optional", "org.apache.commons.lang3.StringUtils" ]
import io.goobi.viewer.controller.DataManager; import io.goobi.viewer.exceptions.DAOException; import io.goobi.viewer.exceptions.IndexUnreachableException; import io.goobi.viewer.exceptions.PresentationException; import io.goobi.viewer.managedbeans.utils.BeanUtils; import io.goobi.viewer.messages.ViewerResourceBundle; import io.goobi.viewer.model.bookmark.BookmarkList; import io.goobi.viewer.model.search.SearchHelper; import io.goobi.viewer.model.viewer.BrowseDcElement; import io.goobi.viewer.model.viewer.StringPair; import io.goobi.viewer.solr.SolrConstants; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import org.apache.commons.lang3.StringUtils;
import io.goobi.viewer.controller.*; import io.goobi.viewer.exceptions.*; import io.goobi.viewer.managedbeans.utils.*; import io.goobi.viewer.messages.*; import io.goobi.viewer.model.bookmark.*; import io.goobi.viewer.model.search.*; import io.goobi.viewer.model.viewer.*; import io.goobi.viewer.solr.*; import java.util.*; import org.apache.commons.lang3.*;
[ "io.goobi.viewer", "java.util", "org.apache.commons" ]
io.goobi.viewer; java.util; org.apache.commons;
1,406,347
EClass getArrow();
EClass getArrow();
/** * Returns the meta object for class '{@link bento.sirius.tests.boxes.Arrow <em>Arrow</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Arrow</em>'. * @see bento.sirius.tests.boxes.Arrow * @generated */
Returns the meta object for class '<code>bento.sirius.tests.boxes.Arrow Arrow</code>'.
getArrow
{ "repo_name": "jesusc/bento", "path": "tests/bento.sirius.tests/src-gen/bento/sirius/tests/boxes/BoxesPackage.java", "license": "epl-1.0", "size": 10649 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,419,927
@Override public void write(byte[] theBytes, int off, int len) throws IOException { // Encoding suspended? if (suspendEncoding) { this.out.write(theBytes, off, len); return; } for (int i = 0; i < len; i++) { write(theBytes[off + i]); } }
void function(byte[] theBytes, int off, int len) throws IOException { if (suspendEncoding) { this.out.write(theBytes, off, len); return; } for (int i = 0; i < len; i++) { write(theBytes[off + i]); } }
/** * Calls {@link #write(int)} repeatedly until <var>len</var> bytes are * written. * * @param theBytes * array from which to read bytes * @param off * offset for array * @param len * max number of bytes to read into array * @since 1.3 */
Calls <code>#write(int)</code> repeatedly until len bytes are written
write
{ "repo_name": "mrpdaemon/encfs-java", "path": "src/main/java/org/mrpdaemon/sec/encfs/EncFSBase64.java", "license": "lgpl-3.0", "size": 48662 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
926,739
boolean isDirectory() { if (file.isFile()) return false; File[] list = file.listFiles(); if (list == null || list.length == 0) return false; return true; }
boolean isDirectory() { if (file.isFile()) return false; File[] list = file.listFiles(); if (list == null list.length == 0) return false; return true; }
/** * Returns <code>true</code> if the file is a directory, * <code>false</code> otherwise. * * @return See above. */
Returns <code>true</code> if the file is a directory, <code>false</code> otherwise
isDirectory
{ "repo_name": "rleigh-dundee/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/fsimporter/chooser/FileElement.java", "license": "gpl-2.0", "size": 5552 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,779,535
public ResourceAdapter getResourceAdapter() { log.finest("getResourceAdapter()"); return ra; }
ResourceAdapter function() { log.finest(STR); return ra; }
/** * Get the resource adapter * * @return The handle */
Get the resource adapter
getResourceAdapter
{ "repo_name": "ironjacamar/ironjacamar", "path": "deployers/tests/src/test/java/org/jboss/jca/deployers/test/rars/inout/SimpleManagedConnectionFactoryWOHashCode.java", "license": "lgpl-2.1", "size": 7325 }
[ "javax.resource.spi.ResourceAdapter" ]
import javax.resource.spi.ResourceAdapter;
import javax.resource.spi.*;
[ "javax.resource" ]
javax.resource;
416,847
Queryable<T> except(Queryable<T> source, Enumerable<T> enumerable, EqualityComparer<T> comparer);
Queryable<T> except(Queryable<T> source, Enumerable<T> enumerable, EqualityComparer<T> comparer);
/** * Produces the set difference of two sequences by * using the specified {@code EqualityComparer<T>} to compare * values. */
Produces the set difference of two sequences by using the specified EqualityComparer to compare values
except
{ "repo_name": "minji-kim/calcite", "path": "linq4j/src/main/java/org/apache/calcite/linq4j/QueryableFactory.java", "license": "apache-2.0", "size": 27824 }
[ "org.apache.calcite.linq4j.function.EqualityComparer" ]
import org.apache.calcite.linq4j.function.EqualityComparer;
import org.apache.calcite.linq4j.function.*;
[ "org.apache.calcite" ]
org.apache.calcite;
254,223
@Nullable public ImmutableList<TransitiveInfoCollection> getFiles() { return files; } } private static final String HOST_CONFIGURATION_PROGRESS_TAG = "for host"; private final Rule rule; private final ImmutableList<Aspect> aspects; private final ImmutableList<AspectDescriptor> aspectDescriptors; private final ListMultimap<String, ConfiguredTargetAndData> targetMap; private final ListMultimap<String, ConfiguredFilesetEntry> filesetEntryMap; private final ImmutableMap<Label, ConfigMatchingProvider> configConditions; private final AspectAwareAttributeMapper attributes; private final ImmutableSet<String> enabledFeatures; private final ImmutableSet<String> disabledFeatures; private final String ruleClassNameForLogging; private final BuildConfiguration hostConfiguration; private final ConfigurationFragmentPolicy configurationFragmentPolicy; private final FragmentClassSet universalFragments; private final RuleErrorConsumer reporter; @Nullable private final ToolchainCollection<ResolvedToolchainContext> toolchainContexts; private final ExecGroupCollection execGroupCollection; private final ConstraintSemantics<RuleContext> constraintSemantics; private final ImmutableSet<String> requiredConfigFragments; private final List<Expander> makeVariableExpanders = new ArrayList<>(); private final Map<String, ActionOwner> actionOwners = new HashMap<>(); private final SymbolGenerator<ActionLookupKey> actionOwnerSymbolGenerator; private transient ConfigurationMakeVariableContext configurationMakeVariableContext = null; private final StarlarkSemantics starlarkSemantics; private final StarlarkThread starlarkThread; @Nullable private StarlarkRuleContext starlarkRuleContext; private RuleContext( Builder builder, AttributeMap attributes, ListMultimap<String, ConfiguredTargetAndData> targetMap, ListMultimap<String, ConfiguredFilesetEntry> filesetEntryMap, ImmutableMap<Label, ConfigMatchingProvider> configConditions, FragmentClassSet universalFragments, String ruleClassNameForLogging, ActionLookupKey actionLookupKey, ImmutableMap<String, Attribute> aspectAttributes, @Nullable ToolchainCollection<ResolvedToolchainContext> toolchainContexts, ExecGroupCollection execGroupCollection, ConstraintSemantics<RuleContext> constraintSemantics, ImmutableSet<String> requiredConfigFragments, String toolsRepository, StarlarkSemantics starlarkSemantics, Mutability mutability) throws InvalidExecGroupException { super( builder.env, builder.target.getAssociatedRule(), builder.configuration, builder.prerequisiteMap.get(null), builder.visibility); this.rule = builder.target.getAssociatedRule(); this.aspects = builder.aspects; this.aspectDescriptors = builder .aspects .stream() .map(a -> a.getDescriptor()) .collect(ImmutableList.toImmutableList()); this.configurationFragmentPolicy = builder.configurationFragmentPolicy; this.universalFragments = universalFragments; this.targetMap = targetMap; this.filesetEntryMap = filesetEntryMap; this.configConditions = configConditions; this.attributes = new AspectAwareAttributeMapper(attributes, aspectAttributes); Set<String> allEnabledFeatures = new HashSet<>(); Set<String> allDisabledFeatures = new HashSet<>(); getAllFeatures(allEnabledFeatures, allDisabledFeatures); this.enabledFeatures = ImmutableSortedSet.copyOf(allEnabledFeatures); this.disabledFeatures = ImmutableSortedSet.copyOf(allDisabledFeatures); this.ruleClassNameForLogging = ruleClassNameForLogging; this.hostConfiguration = builder.hostConfiguration; this.actionOwnerSymbolGenerator = new SymbolGenerator<>(actionLookupKey); reporter = builder.reporter; this.toolchainContexts = toolchainContexts; this.execGroupCollection = execGroupCollection; this.constraintSemantics = constraintSemantics; this.requiredConfigFragments = requiredConfigFragments; this.starlarkSemantics = starlarkSemantics; this.starlarkThread = createStarlarkThread(toolsRepository, mutability); // uses above state }
ImmutableList<TransitiveInfoCollection> function() { return files; } } private static final String HOST_CONFIGURATION_PROGRESS_TAG = STR; private final Rule rule; private final ImmutableList<Aspect> aspects; private final ImmutableList<AspectDescriptor> aspectDescriptors; private final ListMultimap<String, ConfiguredTargetAndData> targetMap; private final ListMultimap<String, ConfiguredFilesetEntry> filesetEntryMap; private final ImmutableMap<Label, ConfigMatchingProvider> configConditions; private final AspectAwareAttributeMapper attributes; private final ImmutableSet<String> enabledFeatures; private final ImmutableSet<String> disabledFeatures; private final String ruleClassNameForLogging; private final BuildConfiguration hostConfiguration; private final ConfigurationFragmentPolicy configurationFragmentPolicy; private final FragmentClassSet universalFragments; private final RuleErrorConsumer reporter; @Nullable private final ToolchainCollection<ResolvedToolchainContext> toolchainContexts; private final ExecGroupCollection execGroupCollection; private final ConstraintSemantics<RuleContext> constraintSemantics; private final ImmutableSet<String> requiredConfigFragments; private final List<Expander> makeVariableExpanders = new ArrayList<>(); private final Map<String, ActionOwner> actionOwners = new HashMap<>(); private final SymbolGenerator<ActionLookupKey> actionOwnerSymbolGenerator; private transient ConfigurationMakeVariableContext configurationMakeVariableContext = null; private final StarlarkSemantics starlarkSemantics; private final StarlarkThread starlarkThread; @Nullable private StarlarkRuleContext starlarkRuleContext; private RuleContext( Builder builder, AttributeMap attributes, ListMultimap<String, ConfiguredTargetAndData> targetMap, ListMultimap<String, ConfiguredFilesetEntry> filesetEntryMap, ImmutableMap<Label, ConfigMatchingProvider> configConditions, FragmentClassSet universalFragments, String ruleClassNameForLogging, ActionLookupKey actionLookupKey, ImmutableMap<String, Attribute> aspectAttributes, @Nullable ToolchainCollection<ResolvedToolchainContext> toolchainContexts, ExecGroupCollection execGroupCollection, ConstraintSemantics<RuleContext> constraintSemantics, ImmutableSet<String> requiredConfigFragments, String toolsRepository, StarlarkSemantics starlarkSemantics, Mutability mutability) throws InvalidExecGroupException { super( builder.env, builder.target.getAssociatedRule(), builder.configuration, builder.prerequisiteMap.get(null), builder.visibility); this.rule = builder.target.getAssociatedRule(); this.aspects = builder.aspects; this.aspectDescriptors = builder .aspects .stream() .map(a -> a.getDescriptor()) .collect(ImmutableList.toImmutableList()); this.configurationFragmentPolicy = builder.configurationFragmentPolicy; this.universalFragments = universalFragments; this.targetMap = targetMap; this.filesetEntryMap = filesetEntryMap; this.configConditions = configConditions; this.attributes = new AspectAwareAttributeMapper(attributes, aspectAttributes); Set<String> allEnabledFeatures = new HashSet<>(); Set<String> allDisabledFeatures = new HashSet<>(); getAllFeatures(allEnabledFeatures, allDisabledFeatures); this.enabledFeatures = ImmutableSortedSet.copyOf(allEnabledFeatures); this.disabledFeatures = ImmutableSortedSet.copyOf(allDisabledFeatures); this.ruleClassNameForLogging = ruleClassNameForLogging; this.hostConfiguration = builder.hostConfiguration; this.actionOwnerSymbolGenerator = new SymbolGenerator<>(actionLookupKey); reporter = builder.reporter; this.toolchainContexts = toolchainContexts; this.execGroupCollection = execGroupCollection; this.constraintSemantics = constraintSemantics; this.requiredConfigFragments = requiredConfigFragments; this.starlarkSemantics = starlarkSemantics; this.starlarkThread = createStarlarkThread(toolsRepository, mutability); }
/** * Targets from FilesetEntry.files, or null if the user omitted it. */
Targets from FilesetEntry.files, or null if the user omitted it
getFiles
{ "repo_name": "meteorcloudy/bazel", "path": "src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java", "license": "apache-2.0", "size": 99413 }
[ "com.google.common.collect.ImmutableList", "com.google.common.collect.ImmutableMap", "com.google.common.collect.ImmutableSet", "com.google.common.collect.ImmutableSortedSet", "com.google.common.collect.ListMultimap", "com.google.devtools.build.lib.actions.ActionLookupKey", "com.google.devtools.build.lib.actions.ActionOwner", "com.google.devtools.build.lib.analysis.ExecGroupCollection", "com.google.devtools.build.lib.analysis.config.BuildConfiguration", "com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider", "com.google.devtools.build.lib.analysis.config.FragmentClassSet", "com.google.devtools.build.lib.analysis.constraints.ConstraintSemantics", "com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext", "com.google.devtools.build.lib.cmdline.Label", "com.google.devtools.build.lib.packages.Aspect", "com.google.devtools.build.lib.packages.AspectDescriptor", "com.google.devtools.build.lib.packages.Attribute", "com.google.devtools.build.lib.packages.AttributeMap", "com.google.devtools.build.lib.packages.ConfigurationFragmentPolicy", "com.google.devtools.build.lib.packages.Rule", "com.google.devtools.build.lib.packages.SymbolGenerator", "com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData", "java.util.ArrayList", "java.util.HashMap", "java.util.HashSet", "java.util.List", "java.util.Map", "java.util.Set", "javax.annotation.Nullable", "net.starlark.java.eval.Mutability", "net.starlark.java.eval.StarlarkSemantics", "net.starlark.java.eval.StarlarkThread" ]
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.ListMultimap; import com.google.devtools.build.lib.actions.ActionLookupKey; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.analysis.ExecGroupCollection; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider; import com.google.devtools.build.lib.analysis.config.FragmentClassSet; import com.google.devtools.build.lib.analysis.constraints.ConstraintSemantics; import com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.Aspect; import com.google.devtools.build.lib.packages.AspectDescriptor; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.ConfigurationFragmentPolicy; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.SymbolGenerator; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import net.starlark.java.eval.Mutability; import net.starlark.java.eval.StarlarkSemantics; import net.starlark.java.eval.StarlarkThread;
import com.google.common.collect.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.analysis.config.*; import com.google.devtools.build.lib.analysis.constraints.*; import com.google.devtools.build.lib.analysis.starlark.*; import com.google.devtools.build.lib.cmdline.*; import com.google.devtools.build.lib.packages.*; import com.google.devtools.build.lib.skyframe.*; import java.util.*; import javax.annotation.*; import net.starlark.java.eval.*;
[ "com.google.common", "com.google.devtools", "java.util", "javax.annotation", "net.starlark.java" ]
com.google.common; com.google.devtools; java.util; javax.annotation; net.starlark.java;
2,888,001
Page<Operation> listGlobalOperations(OperationListOption... options);
Page<Operation> listGlobalOperations(OperationListOption... options);
/** * Lists the global operations. * * @throws ComputeException upon failure */
Lists the global operations
listGlobalOperations
{ "repo_name": "jabubake/google-cloud-java", "path": "google-cloud-compute/src/main/java/com/google/cloud/compute/Compute.java", "license": "apache-2.0", "size": 93984 }
[ "com.google.cloud.Page" ]
import com.google.cloud.Page;
import com.google.cloud.*;
[ "com.google.cloud" ]
com.google.cloud;
2,569,220
public static int countByc(long companyId) throws com.liferay.portal.kernel.exception.SystemException { return getPersistence().countByc(companyId); }
static int function(long companyId) throws com.liferay.portal.kernel.exception.SystemException { return getPersistence().countByc(companyId); }
/** * Returns the number of related applicationses where companyId = &#63;. * * @param companyId the company ID * @return the number of matching related applicationses * @throws SystemException if a system exception occurred */
Returns the number of related applicationses where companyId = &#63;
countByc
{ "repo_name": "fraunhoferfokus/govapps", "path": "data-portlet/src/main/java/de/fraunhofer/fokus/movepla/service/persistence/RelatedApplicationsUtil.java", "license": "bsd-3-clause", "size": 58392 }
[ "com.liferay.portal.kernel.exception.SystemException" ]
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.exception.*;
[ "com.liferay.portal" ]
com.liferay.portal;
2,852,797