method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public List<GraphNode> getLeaves();
List<GraphNode> function();
/** * Returns the leaf nodes of the Graph. * * @return a list containing <code>GraphNode</code> corressponding to the leaf nodes. */
Returns the leaf nodes of the Graph
getLeaves
{ "repo_name": "pegasus-isi/pegasus", "path": "src/edu/isi/pegasus/planner/partitioner/graph/Graph.java", "license": "apache-2.0", "size": 4972 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,350,017
public String toJson(JsonElement jsonElement) { StringWriter writer = new StringWriter(); toJson(jsonElement, writer); return writer.toString(); }
String function(JsonElement jsonElement) { StringWriter writer = new StringWriter(); toJson(jsonElement, writer); return writer.toString(); }
/** * Converts a tree of {@link JsonElement}s into its equivalent JSON representation. * * @param jsonElement root of a tree of {@link JsonElement}s * @return JSON String representation of the tree * @since 1.4 */
Converts a tree of <code>JsonElement</code>s into its equivalent JSON representation
toJson
{ "repo_name": "MarkehMe/FactionsAlias", "path": "src/com/markehme/factionsalias/libs/gson/Gson.java", "license": "agpl-3.0", "size": 40456 }
[ "java.io.StringWriter" ]
import java.io.StringWriter;
import java.io.*;
[ "java.io" ]
java.io;
1,470,311
public static long getOldestVisibleTimestamp(Map<byte[], Long> ttlByFamily, Transaction tx, boolean readNonTxnData) { if (readNonTxnData) { long maxTTL = getMaxTTL(ttlByFamily); return maxTTL < Long.MAX_VALUE ? System.currentTimeMillis() - maxTTL : 0; } return getOldestVisibleTimestamp(ttlByFamily, tx); }
static long function(Map<byte[], Long> ttlByFamily, Transaction tx, boolean readNonTxnData) { if (readNonTxnData) { long maxTTL = getMaxTTL(ttlByFamily); return maxTTL < Long.MAX_VALUE ? System.currentTimeMillis() - maxTTL : 0; } return getOldestVisibleTimestamp(ttlByFamily, tx); }
/** * Returns the oldest visible timestamp for the given transaction, based on the TTLs configured for each column * family. If no TTL is set on any column family, the oldest visible timestamp will be {@code 0}. * @param ttlByFamily A map of column family name to TTL value (in milliseconds) * @param tx The current transaction * @param readNonTxnData indicates that the timestamp returned should allow reading non-transactional data * @return The oldest timestamp that will be visible for the given transaction and TTL configuration */
Returns the oldest visible timestamp for the given transaction, based on the TTLs configured for each column family. If no TTL is set on any column family, the oldest visible timestamp will be 0
getOldestVisibleTimestamp
{ "repo_name": "poornachandra/incubator-tephra", "path": "tephra-core/src/main/java/org/apache/tephra/util/TxUtils.java", "license": "apache-2.0", "size": 9214 }
[ "java.util.Map", "org.apache.tephra.Transaction" ]
import java.util.Map; import org.apache.tephra.Transaction;
import java.util.*; import org.apache.tephra.*;
[ "java.util", "org.apache.tephra" ]
java.util; org.apache.tephra;
1,664,878
synchronized public boolean setStartAndEndListViewDates( String startingDateStr, String endingDateStr, StringBuilder errorMessage) { Date startDate, endDate; LIST_VIEW_DATE_FORMAT.setTimeZone(TimeService.getLocalTimeZone()); try { startDate = LIST_VIEW_DATE_FORMAT.parse(startingDateStr); endDate = LIST_VIEW_DATE_FORMAT.parse(endingDateStr); } catch (ParseException e) { errorMessage.append(rb.getString("java.alert.invalid")); return false; } // Do a sanity check if (startDate.after(endDate)) { errorMessage.append(rb.getString("java.alert.start")); return false; } // Use Gregorian calendars to pick out the year, month, and day. GregorianCalendar calStart = new GregorianCalendar(TimeService.getLocalTimeZone()); GregorianCalendar calEnd = new GregorianCalendar(TimeService.getLocalTimeZone()); calStart.setTime(startDate); calEnd.setTime(endDate); startingListViewDate = TimeService.newTimeLocal( calStart.get(GregorianCalendar.YEAR), calStart.get(GregorianCalendar.MONTH) + 1, calStart.get(GregorianCalendar.DAY_OF_MONTH), 0, 0, 0, 0); endingListViewDate = TimeService.newTimeLocal( calEnd.get(GregorianCalendar.YEAR), calEnd.get(GregorianCalendar.MONTH) + 1, calEnd.get(GregorianCalendar.DAY_OF_MONTH), 23, 59, 59, 99); // Set a flag to indicate that we've modified the defaults // and to switch our viewing mode. listViewDatesCustomized = true; listViewFilterMode = SHOW_CUSTOM_RANGE; return true; }
synchronized boolean function( String startingDateStr, String endingDateStr, StringBuilder errorMessage) { Date startDate, endDate; LIST_VIEW_DATE_FORMAT.setTimeZone(TimeService.getLocalTimeZone()); try { startDate = LIST_VIEW_DATE_FORMAT.parse(startingDateStr); endDate = LIST_VIEW_DATE_FORMAT.parse(endingDateStr); } catch (ParseException e) { errorMessage.append(rb.getString(STR)); return false; } if (startDate.after(endDate)) { errorMessage.append(rb.getString(STR)); return false; } GregorianCalendar calStart = new GregorianCalendar(TimeService.getLocalTimeZone()); GregorianCalendar calEnd = new GregorianCalendar(TimeService.getLocalTimeZone()); calStart.setTime(startDate); calEnd.setTime(endDate); startingListViewDate = TimeService.newTimeLocal( calStart.get(GregorianCalendar.YEAR), calStart.get(GregorianCalendar.MONTH) + 1, calStart.get(GregorianCalendar.DAY_OF_MONTH), 0, 0, 0, 0); endingListViewDate = TimeService.newTimeLocal( calEnd.get(GregorianCalendar.YEAR), calEnd.get(GregorianCalendar.MONTH) + 1, calEnd.get(GregorianCalendar.DAY_OF_MONTH), 23, 59, 59, 99); listViewDatesCustomized = true; listViewFilterMode = SHOW_CUSTOM_RANGE; return true; }
/** * Set the start/end dates from strings. Format an error message and return false if there is a problem. */
Set the start/end dates from strings. Format an error message and return false if there is a problem
setStartAndEndListViewDates
{ "repo_name": "harfalm/Sakai-10.1", "path": "calendar/calendar-tool/tool/src/java/org/sakaiproject/calendar/tool/CalendarFilter.java", "license": "apache-2.0", "size": 12433 }
[ "java.text.ParseException", "java.util.Date", "java.util.GregorianCalendar", "org.sakaiproject.time.cover.TimeService" ]
import java.text.ParseException; import java.util.Date; import java.util.GregorianCalendar; import org.sakaiproject.time.cover.TimeService;
import java.text.*; import java.util.*; import org.sakaiproject.time.cover.*;
[ "java.text", "java.util", "org.sakaiproject.time" ]
java.text; java.util; org.sakaiproject.time;
1,135,087
public static Value oci_field_name(Env env, @NotNull OracleStatement stmt, @NotNull int fieldNumber) { try { if (stmt == null) { return BooleanValue.FALSE; } JdbcResultResource resource = stmt.getResultSet(); return resource.getFieldName(env, fieldNumber); } catch (Exception e) { env.warning(e); return BooleanValue.FALSE; } }
static Value function(Env env, @NotNull OracleStatement stmt, @NotNull int fieldNumber) { try { if (stmt == null) { return BooleanValue.FALSE; } JdbcResultResource resource = stmt.getResultSet(); return resource.getFieldName(env, fieldNumber); } catch (Exception e) { env.warning(e); return BooleanValue.FALSE; } }
/** * Returns the name of a field from the statement */
Returns the name of a field from the statement
oci_field_name
{ "repo_name": "smba/oak", "path": "quercus/src/main/java/com/caucho/quercus/lib/db/OracleModule.java", "license": "lgpl-3.0", "size": 62182 }
[ "com.caucho.quercus.annotation.NotNull", "com.caucho.quercus.env.BooleanValue", "com.caucho.quercus.env.Env", "com.caucho.quercus.env.Value" ]
import com.caucho.quercus.annotation.NotNull; import com.caucho.quercus.env.BooleanValue; import com.caucho.quercus.env.Env; import com.caucho.quercus.env.Value;
import com.caucho.quercus.annotation.*; import com.caucho.quercus.env.*;
[ "com.caucho.quercus" ]
com.caucho.quercus;
1,972,719
public int getNumberOfReplicas() { return settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null); }
public int getNumberOfReplicas() { return settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null); }
/** * Returns the number of shards this index has. */
Returns the number of shards this index has
getNumberOfShards
{ "repo_name": "fuchao01/elasticsearch", "path": "core/src/main/java/org/elasticsearch/index/IndexSettings.java", "license": "apache-2.0", "size": 30573 }
[ "org.elasticsearch.cluster.metadata.IndexMetaData" ]
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.*;
[ "org.elasticsearch.cluster" ]
org.elasticsearch.cluster;
1,744,683
private void init() {// called from ctor, so must not be overridable setLayout(new BorderLayout(0, 5)); setBorder(makeBorder()); add(makeTitlePanel(), BorderLayout.NORTH); JPanel classnameRequestPanel = new JPanel(new BorderLayout(0, 5)); classnameRequestPanel.add(createClassnamePanel(), BorderLayout.NORTH); classnameRequestPanel.add(createParameterPanel(), BorderLayout.CENTER); add(classnameRequestPanel, BorderLayout.CENTER); }
void function() { setLayout(new BorderLayout(0, 5)); setBorder(makeBorder()); add(makeTitlePanel(), BorderLayout.NORTH); JPanel classnameRequestPanel = new JPanel(new BorderLayout(0, 5)); classnameRequestPanel.add(createClassnamePanel(), BorderLayout.NORTH); classnameRequestPanel.add(createParameterPanel(), BorderLayout.CENTER); add(classnameRequestPanel, BorderLayout.CENTER); }
/** * Initialize the GUI components and layout. */
Initialize the GUI components and layout
init
{ "repo_name": "KemingChen/JMeter-2.13", "path": "src/components/org/apache/jmeter/visualizers/backend/BackendListenerGui.java", "license": "apache-2.0", "size": 10590 }
[ "java.awt.BorderLayout", "javax.swing.JPanel" ]
import java.awt.BorderLayout; import javax.swing.JPanel;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
1,213,207
@Before public void setUp() { customerFilter = new CustomerFilter(); customers = new ArrayList<>(); customers.add(customerOne); customers.add(customerTwo); customers.add(customerThree); customers.add(customerFour); customers.add(customerFive); when(customerOne.getCustomerId()).thenReturn(1); when(customerTwo.getCustomerId()).thenReturn(2); when(customerThree.getCustomerId()).thenReturn(3); when(customerFour.getCustomerId()).thenReturn(4); when(customerFive.getCustomerId()).thenReturn(5); }
void function() { customerFilter = new CustomerFilter(); customers = new ArrayList<>(); customers.add(customerOne); customers.add(customerTwo); customers.add(customerThree); customers.add(customerFour); customers.add(customerFive); when(customerOne.getCustomerId()).thenReturn(1); when(customerTwo.getCustomerId()).thenReturn(2); when(customerThree.getCustomerId()).thenReturn(3); when(customerFour.getCustomerId()).thenReturn(4); when(customerFive.getCustomerId()).thenReturn(5); }
/** * This method is used for initializing the test, and called before tests are executed. */
This method is used for initializing the test, and called before tests are executed
setUp
{ "repo_name": "Limmen/chinook", "path": "java_backend/chinook_rest/src/test/java/limmen/business/services/filters/CustomerFilterTest.java", "license": "mit", "size": 2632 }
[ "java.util.ArrayList", "org.mockito.Mockito" ]
import java.util.ArrayList; import org.mockito.Mockito;
import java.util.*; import org.mockito.*;
[ "java.util", "org.mockito" ]
java.util; org.mockito;
1,535,616
void exitAnnotationTypeElementDeclaration(@NotNull Java8Parser.AnnotationTypeElementDeclarationContext ctx);
void exitAnnotationTypeElementDeclaration(@NotNull Java8Parser.AnnotationTypeElementDeclarationContext ctx);
/** * Exit a parse tree produced by {@link Java8Parser#annotationTypeElementDeclaration}. * * @param ctx the parse tree */
Exit a parse tree produced by <code>Java8Parser#annotationTypeElementDeclaration</code>
exitAnnotationTypeElementDeclaration
{ "repo_name": "BigDaddy-Germany/WHOAMI", "path": "WHOAMI/src/de/aima13/whoami/modules/syntaxcheck/languages/antlrgen/Java8Listener.java", "license": "mit", "size": 97945 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
100,335
public void validateNewEntityInput(@NotNull UpdateCheckEntity inputEntity) throws Exception { validateInput(inputEntity); }
void function(@NotNull UpdateCheckEntity inputEntity) throws Exception { validateInput(inputEntity); }
/** * Given an update check entity, validate all fields and throw an exception if the validation failed. * * @param inputEntity Input data for creating a new update check entity * @throws Exception Throws an exception if the validation fails. */
Given an update check entity, validate all fields and throw an exception if the validation failed
validateNewEntityInput
{ "repo_name": "botorabi/Meet4Eat", "path": "src/main/java/net/m4e/update/business/UpdateCheckValidator.java", "license": "mit", "size": 3485 }
[ "org.jetbrains.annotations.NotNull" ]
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.*;
[ "org.jetbrains.annotations" ]
org.jetbrains.annotations;
2,467,683
protected void build() throws SVGException { StyleAttribute sty = new StyleAttribute(); if (getPres(sty.setName("id"))) { String newId = sty.getStringValue(); if (!newId.equals(id)) { diagram.removeElement(id); id = newId; diagram.setElement(this.id, this); } } if (getPres(sty.setName("class"))) { cssClass = sty.getStringValue(); } if (getPres(sty.setName("xml:base"))) { xmlBase = sty.getURIValue(); } //Build children for (int i = 0; i < children.size(); ++i) { SVGElement ele = (SVGElement) children.get(i); ele.build(); } }
void function() throws SVGException { StyleAttribute sty = new StyleAttribute(); if (getPres(sty.setName("id"))) { String newId = sty.getStringValue(); if (!newId.equals(id)) { diagram.removeElement(id); id = newId; diagram.setElement(this.id, this); } } if (getPres(sty.setName("class"))) { cssClass = sty.getStringValue(); } if (getPres(sty.setName(STR))) { xmlBase = sty.getURIValue(); } for (int i = 0; i < children.size(); ++i) { SVGElement ele = (SVGElement) children.get(i); ele.build(); } }
/** * Called by internal processes to rebuild the geometry of this node from * it's presentation attributes, style attributes and animated tracks. */
Called by internal processes to rebuild the geometry of this node from it's presentation attributes, style attributes and animated tracks
build
{ "repo_name": "CURocketry/Ground_Station_GUI", "path": "src/com/kitfox/svg/SVGElement.java", "license": "gpl-3.0", "size": 26311 }
[ "com.kitfox.svg.xml.StyleAttribute" ]
import com.kitfox.svg.xml.StyleAttribute;
import com.kitfox.svg.xml.*;
[ "com.kitfox.svg" ]
com.kitfox.svg;
1,220,083
public static void validate (@Nonnull final Schema aSchema, @Nonnull final Source aXML, @Nonnull final ErrorList aErrorList, @Nullable final Locale aLocale) { ValueEnforcer.notNull (aSchema, "Schema"); ValueEnforcer.notNull (aXML, "XML"); ValueEnforcer.notNull (aErrorList, "ErrorList"); // Build the validator final Validator aValidator = aSchema.newValidator (); if (aLocale != null) EXMLParserProperty.GENERAL_LOCALE.applyTo (aValidator, aLocale); aValidator.setErrorHandler (new WrappedCollectingSAXErrorHandler (aErrorList)); try { aValidator.validate (aXML, null); } catch (final Exception ex) { // Most likely the input XML document is invalid throw new IllegalArgumentException ("Failed to validate the XML " + aXML + " against " + aSchema, ex); } }
static void function (@Nonnull final Schema aSchema, @Nonnull final Source aXML, @Nonnull final ErrorList aErrorList, @Nullable final Locale aLocale) { ValueEnforcer.notNull (aSchema, STR); ValueEnforcer.notNull (aXML, "XML"); ValueEnforcer.notNull (aErrorList, STR); final Validator aValidator = aSchema.newValidator (); if (aLocale != null) EXMLParserProperty.GENERAL_LOCALE.applyTo (aValidator, aLocale); aValidator.setErrorHandler (new WrappedCollectingSAXErrorHandler (aErrorList)); try { aValidator.validate (aXML, null); } catch (final Exception ex) { throw new IllegalArgumentException (STR + aXML + STR + aSchema, ex); } }
/** * Validate the passed XML against the passed XSD and put all errors in the * passed error list. * * @param aSchema * The source XSD. May not be <code>null</code>. * @param aXML * The XML to be validated. May not be <code>null</code>. * @param aErrorList * The error list to be filled. May not be <code>null</code>. * @param aLocale * The locale to use for error messages. May be <code>null</code> to * use the system default locale. * @throws IllegalArgumentException * If XSD validation failed with an exception * @since 9.0.1 */
Validate the passed XML against the passed XSD and put all errors in the passed error list
validate
{ "repo_name": "phax/ph-commons", "path": "ph-xml/src/main/java/com/helger/xml/schema/XMLSchemaValidationHelper.java", "license": "apache-2.0", "size": 5348 }
[ "com.helger.commons.ValueEnforcer", "com.helger.commons.error.list.ErrorList", "com.helger.xml.EXMLParserProperty", "com.helger.xml.sax.WrappedCollectingSAXErrorHandler", "java.util.Locale", "javax.annotation.Nonnull", "javax.annotation.Nullable", "javax.xml.transform.Source", "javax.xml.validation.Schema", "javax.xml.validation.Validator" ]
import com.helger.commons.ValueEnforcer; import com.helger.commons.error.list.ErrorList; import com.helger.xml.EXMLParserProperty; import com.helger.xml.sax.WrappedCollectingSAXErrorHandler; import java.util.Locale; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.xml.transform.Source; import javax.xml.validation.Schema; import javax.xml.validation.Validator;
import com.helger.commons.*; import com.helger.commons.error.list.*; import com.helger.xml.*; import com.helger.xml.sax.*; import java.util.*; import javax.annotation.*; import javax.xml.transform.*; import javax.xml.validation.*;
[ "com.helger.commons", "com.helger.xml", "java.util", "javax.annotation", "javax.xml" ]
com.helger.commons; com.helger.xml; java.util; javax.annotation; javax.xml;
2,589,594
if (ma.length != mb.length) return false; for (int i = 0; i < ma.length; i++) { double[] _ma = ma[i]; double[] _mb = mb[i]; if (!Arrays.equals(_ma, _mb)) return false; } return true; }
if (ma.length != mb.length) return false; for (int i = 0; i < ma.length; i++) { double[] _ma = ma[i]; double[] _mb = mb[i]; if (!Arrays.equals(_ma, _mb)) return false; } return true; }
/** * Tests two matrices for equality. * * @param ma The first 2D matrix to check. * @param mb The second 2D matrix to check. * @return True iff the first and second matrices are equal. */
Tests two matrices for equality
equals
{ "repo_name": "jmogarrio/tetrad", "path": "tetrad-lib/src/main/java/edu/cmu/tetrad/util/MatrixUtils.java", "license": "gpl-2.0", "size": 31535 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
80,274
@Test public void testRegistrationCluster() throws Exception { TestChangeListener listener = new TestChangeListener(EnumSet.of(VtnUpdateType.CREATED)); Logger logger = listener.getLogger(); InstanceIdentifier<VtnPort> path = listener.getWildcardPath(); LogicalDatastoreType store = LogicalDatastoreType.OPERATIONAL; DataTreeIdentifier<VtnPort> ident = new DataTreeIdentifier<>(store, path); DataTreeChangeService service = mock(DataTreeChangeService.class); @SuppressWarnings("unchecked") ListenerRegistration<ClusteredDataTreeChangeListener> reg = mock(ListenerRegistration.class); when(service.registerDataTreeChangeListener( eq(ident), isA(ClusteredDataTreeChangeListener.class))). thenReturn(reg); listener.registerListener(service, store, true); ArgumentCaptor<ClusteredDataTreeChangeListener> captor = ArgumentCaptor.forClass(ClusteredDataTreeChangeListener.class); verify(service). registerDataTreeChangeListener(eq(ident), captor.capture()); List<ClusteredDataTreeChangeListener> wrappers = captor.getAllValues(); assertEquals(1, wrappers.size()); ClusteredDataTreeChangeListener cdcl = wrappers.get(0); assertEquals(listener, getFieldValue(cdcl, DataTreeChangeListener.class, "theListener")); verifyZeroInteractions(reg, logger); // ClusteredListener should toss received events to the actual // listener. Object ctx = new Object(); List<NotifiedEvent<VtnPort>> created = Collections.singletonList(newCreationEvent(1L, 1L)); Collection<DataTreeModification<VtnPort>> changes = createEvent(store, created); listener.setDataChangeEvent(changes, ctx); assertSame(changes, listener.getEvent()); assertSame(ctx, listener.getContext()); assertEquals(Collections.<NotifiedEvent<VtnPort>>emptyList(), listener.getCreationEvents()); cdcl.onDataTreeChanged(changes); assertSame(null, listener.getEvent()); assertSame(null, listener.getContext()); assertEquals(created, listener.getCreationEvents()); // Unregister a listener. // Registration should be closed only one time. for (int i = 0; i < 10; i++) { listener.close(); if (i == 0) { verify(reg).close(); } verifyNoMoreInteractions(service, logger, reg); } }
void function() throws Exception { TestChangeListener listener = new TestChangeListener(EnumSet.of(VtnUpdateType.CREATED)); Logger logger = listener.getLogger(); InstanceIdentifier<VtnPort> path = listener.getWildcardPath(); LogicalDatastoreType store = LogicalDatastoreType.OPERATIONAL; DataTreeIdentifier<VtnPort> ident = new DataTreeIdentifier<>(store, path); DataTreeChangeService service = mock(DataTreeChangeService.class); @SuppressWarnings(STR) ListenerRegistration<ClusteredDataTreeChangeListener> reg = mock(ListenerRegistration.class); when(service.registerDataTreeChangeListener( eq(ident), isA(ClusteredDataTreeChangeListener.class))). thenReturn(reg); listener.registerListener(service, store, true); ArgumentCaptor<ClusteredDataTreeChangeListener> captor = ArgumentCaptor.forClass(ClusteredDataTreeChangeListener.class); verify(service). registerDataTreeChangeListener(eq(ident), captor.capture()); List<ClusteredDataTreeChangeListener> wrappers = captor.getAllValues(); assertEquals(1, wrappers.size()); ClusteredDataTreeChangeListener cdcl = wrappers.get(0); assertEquals(listener, getFieldValue(cdcl, DataTreeChangeListener.class, STR)); verifyZeroInteractions(reg, logger); Object ctx = new Object(); List<NotifiedEvent<VtnPort>> created = Collections.singletonList(newCreationEvent(1L, 1L)); Collection<DataTreeModification<VtnPort>> changes = createEvent(store, created); listener.setDataChangeEvent(changes, ctx); assertSame(changes, listener.getEvent()); assertSame(ctx, listener.getContext()); assertEquals(Collections.<NotifiedEvent<VtnPort>>emptyList(), listener.getCreationEvents()); cdcl.onDataTreeChanged(changes); assertSame(null, listener.getEvent()); assertSame(null, listener.getContext()); assertEquals(created, listener.getCreationEvents()); for (int i = 0; i < 10; i++) { listener.close(); if (i == 0) { verify(reg).close(); } verifyNoMoreInteractions(service, logger, reg); } }
/** * Test case for registration and unregistration. * * <p> * Register clustered listener. * </p> * * <ul> * <li>{@link DataStoreListener#registerListener(DataTreeChangeService, LogicalDatastoreType, boolean)}</li> * <li>{@link DataStoreListener#close()}</li> * </ul> * * @throws Exception An error occurred. */
Test case for registration and unregistration. Register clustered listener. <code>DataStoreListener#registerListener(DataTreeChangeService, LogicalDatastoreType, boolean)</code> <code>DataStoreListener#close()</code>
testRegistrationCluster
{ "repo_name": "opendaylight/vtn", "path": "manager/implementation/src/test/java/org/opendaylight/vtn/manager/internal/util/DataStoreListenerTest.java", "license": "epl-1.0", "size": 42964 }
[ "java.util.Collection", "java.util.Collections", "java.util.EnumSet", "java.util.List", "org.mockito.ArgumentCaptor", "org.mockito.Mockito", "org.opendaylight.controller.md.sal.binding.api.ClusteredDataTreeChangeListener", "org.opendaylight.controller.md.sal.binding.api.DataTreeChangeListener", "org.opendaylight.controller.md.sal.binding.api.DataTreeChangeService", "org.opendaylight.controller.md.sal.binding.api.DataTreeIdentifier", "org.opendaylight.controller.md.sal.binding.api.DataTreeModification", "org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType", "org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.impl.inventory.rev150209.vtn.node.info.VtnPort", "org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.VtnUpdateType", "org.opendaylight.yangtools.concepts.ListenerRegistration", "org.opendaylight.yangtools.yang.binding.InstanceIdentifier", "org.slf4j.Logger" ]
import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.List; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.opendaylight.controller.md.sal.binding.api.ClusteredDataTreeChangeListener; import org.opendaylight.controller.md.sal.binding.api.DataTreeChangeListener; import org.opendaylight.controller.md.sal.binding.api.DataTreeChangeService; import org.opendaylight.controller.md.sal.binding.api.DataTreeIdentifier; import org.opendaylight.controller.md.sal.binding.api.DataTreeModification; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.impl.inventory.rev150209.vtn.node.info.VtnPort; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.VtnUpdateType; import org.opendaylight.yangtools.concepts.ListenerRegistration; import org.opendaylight.yangtools.yang.binding.InstanceIdentifier; import org.slf4j.Logger;
import java.util.*; import org.mockito.*; import org.opendaylight.controller.md.sal.binding.api.*; import org.opendaylight.controller.md.sal.common.api.data.*; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.impl.inventory.rev150209.vtn.node.info.*; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.*; import org.opendaylight.yangtools.concepts.*; import org.opendaylight.yangtools.yang.binding.*; import org.slf4j.*;
[ "java.util", "org.mockito", "org.opendaylight.controller", "org.opendaylight.yang", "org.opendaylight.yangtools", "org.slf4j" ]
java.util; org.mockito; org.opendaylight.controller; org.opendaylight.yang; org.opendaylight.yangtools; org.slf4j;
128,394
public void postStopClientArchive() throws Exception { final String method = "postStopClientArchive"; Log.entering(c, method); try { checkLogsForErrorsAndWarnings(); } finally { ignorePatterns.clear(); Log.info(c, method, "Moving logs to the output folder"); SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy-HH-mm-ss"); Date d = new Date(System.currentTimeMillis()); this.logStamp = sdf.format(d); String logDirectoryName = pathToAutoFVTOutputClientsFolder + "/" + clientToUse + "-" + logStamp; LocalFile logFolder = new LocalFile(logDirectoryName); RemoteFile clientFolder = new RemoteFile(machine, clientRoot); runJextract(clientFolder); // Copy the log files: try to move them instead if we can recursivelyCopyDirectory(clientFolder, logFolder, false, true, true); deleteClientMarkerFile(); Log.exiting(c, method); } }
void function() throws Exception { final String method = STR; Log.entering(c, method); try { checkLogsForErrorsAndWarnings(); } finally { ignorePatterns.clear(); Log.info(c, method, STR); SimpleDateFormat sdf = new SimpleDateFormat(STR); Date d = new Date(System.currentTimeMillis()); this.logStamp = sdf.format(d); String logDirectoryName = pathToAutoFVTOutputClientsFolder + "/" + clientToUse + "-" + logStamp; LocalFile logFolder = new LocalFile(logDirectoryName); RemoteFile clientFolder = new RemoteFile(machine, clientRoot); runJextract(clientFolder); recursivelyCopyDirectory(clientFolder, logFolder, false, true, true); deleteClientMarkerFile(); Log.exiting(c, method); } }
/** * This method is used to archive client logs after the client completes. * Also, this will prevent the client log contents from being lost (over written) in a restart case. */
This method is used to archive client logs after the client completes. Also, this will prevent the client log contents from being lost (over written) in a restart case
postStopClientArchive
{ "repo_name": "kgibm/open-liberty", "path": "dev/fattest.simplicity/src/componenttest/topology/impl/LibertyClient.java", "license": "epl-1.0", "size": 176215 }
[ "com.ibm.websphere.simplicity.LocalFile", "com.ibm.websphere.simplicity.RemoteFile", "com.ibm.websphere.simplicity.log.Log", "java.text.SimpleDateFormat", "java.util.Date" ]
import com.ibm.websphere.simplicity.LocalFile; import com.ibm.websphere.simplicity.RemoteFile; import com.ibm.websphere.simplicity.log.Log; import java.text.SimpleDateFormat; import java.util.Date;
import com.ibm.websphere.simplicity.*; import com.ibm.websphere.simplicity.log.*; import java.text.*; import java.util.*;
[ "com.ibm.websphere", "java.text", "java.util" ]
com.ibm.websphere; java.text; java.util;
277,355
public Renderer<?> getRenderer() { return (Renderer<?>) getState().rendererConnector; }
Renderer<?> function() { return (Renderer<?>) getState().rendererConnector; }
/** * Returns the renderer instance used by this column. * * @return the renderer */
Returns the renderer instance used by this column
getRenderer
{ "repo_name": "mstahv/framework", "path": "compatibility-server/src/main/java/com/vaadin/v7/ui/Grid.java", "license": "apache-2.0", "size": 273176 }
[ "com.vaadin.v7.ui.renderers.Renderer" ]
import com.vaadin.v7.ui.renderers.Renderer;
import com.vaadin.v7.ui.renderers.*;
[ "com.vaadin.v7" ]
com.vaadin.v7;
19,841
VirtualPort getPort(FixedIp fixedIP);
VirtualPort getPort(FixedIp fixedIP);
/** * Returns the virtualPort associated with the fixedIP. * * @param fixedIP the fixedIP identifier * @return virtualPort. */
Returns the virtualPort associated with the fixedIP
getPort
{ "repo_name": "sdnwiselab/onos", "path": "apps/vtn/vtnrsc/src/main/java/org/onosproject/vtnrsc/virtualport/VirtualPortService.java", "license": "apache-2.0", "size": 4175 }
[ "org.onosproject.vtnrsc.FixedIp", "org.onosproject.vtnrsc.VirtualPort" ]
import org.onosproject.vtnrsc.FixedIp; import org.onosproject.vtnrsc.VirtualPort;
import org.onosproject.vtnrsc.*;
[ "org.onosproject.vtnrsc" ]
org.onosproject.vtnrsc;
519,768
public void setSpecialDateString(String specialDateString) { this.specialDate = Universal.getCalenderDate(specialDateString); }
void function(String specialDateString) { this.specialDate = Universal.getCalenderDate(specialDateString); }
/** * Sets the specialDate using a String representation * * @param specialDateString */
Sets the specialDate using a String representation
setSpecialDateString
{ "repo_name": "davidlad123/spine", "path": "spine/build/spine-0.9-src/src/com/zphinx/spine/members/AccountSecrets.java", "license": "gpl-3.0", "size": 9025 }
[ "com.zphinx.spine.Universal" ]
import com.zphinx.spine.Universal;
import com.zphinx.spine.*;
[ "com.zphinx.spine" ]
com.zphinx.spine;
314,347
public static DataBuffer readUnknown(DataInputStream s, long length) { DataBuffer buffer = Nd4j.createBuffer(length); buffer.read(s); // if buffer is uncompressed, it'll be valid buffer, so we'll just return it if (buffer.dataType() != Type.COMPRESSED) return buffer; else { try { // if buffer is compressed one, we''ll restore it here String compressionAlgorithm = s.readUTF(); long compressedLength = s.readLong(); long originalLength = s.readLong(); long numberOfElements = s.readLong(); byte[] temp = new byte[(int) compressedLength]; for (int i = 0; i < compressedLength; i++) { temp[i] = s.readByte(); } Pointer pointer = new BytePointer(temp); CompressionDescriptor descriptor = new CompressionDescriptor(); descriptor.setCompressedLength(compressedLength); descriptor.setCompressionAlgorithm(compressionAlgorithm); descriptor.setOriginalLength(originalLength); descriptor.setNumberOfElements(numberOfElements); return new CompressedDataBuffer(pointer, descriptor); } catch (Exception e) { throw new RuntimeException(e); } } }
static DataBuffer function(DataInputStream s, long length) { DataBuffer buffer = Nd4j.createBuffer(length); buffer.read(s); if (buffer.dataType() != Type.COMPRESSED) return buffer; else { try { String compressionAlgorithm = s.readUTF(); long compressedLength = s.readLong(); long originalLength = s.readLong(); long numberOfElements = s.readLong(); byte[] temp = new byte[(int) compressedLength]; for (int i = 0; i < compressedLength; i++) { temp[i] = s.readByte(); } Pointer pointer = new BytePointer(temp); CompressionDescriptor descriptor = new CompressionDescriptor(); descriptor.setCompressedLength(compressedLength); descriptor.setCompressionAlgorithm(compressionAlgorithm); descriptor.setOriginalLength(originalLength); descriptor.setNumberOfElements(numberOfElements); return new CompressedDataBuffer(pointer, descriptor); } catch (Exception e) { throw new RuntimeException(e); } } }
/** * Drop-in replacement wrapper for BaseDataBuffer.read() method, aware of CompressedDataBuffer * @param s * @return */
Drop-in replacement wrapper for BaseDataBuffer.read() method, aware of CompressedDataBuffer
readUnknown
{ "repo_name": "deeplearning4j/nd4j", "path": "nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/compression/CompressedDataBuffer.java", "license": "apache-2.0", "size": 6828 }
[ "java.io.DataInputStream", "org.bytedeco.javacpp.BytePointer", "org.bytedeco.javacpp.Pointer", "org.nd4j.linalg.api.buffer.DataBuffer", "org.nd4j.linalg.factory.Nd4j" ]
import java.io.DataInputStream; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacpp.Pointer; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.factory.Nd4j;
import java.io.*; import org.bytedeco.javacpp.*; import org.nd4j.linalg.api.buffer.*; import org.nd4j.linalg.factory.*;
[ "java.io", "org.bytedeco.javacpp", "org.nd4j.linalg" ]
java.io; org.bytedeco.javacpp; org.nd4j.linalg;
1,648,933
public ServiceFuture<ThroughputSettingsGetResultsInner> updateMongoDBDatabaseThroughputAsync(String resourceGroupName, String accountName, String databaseName, ThroughputSettingsUpdateParameters updateThroughputParameters, final ServiceCallback<ThroughputSettingsGetResultsInner> serviceCallback) { return ServiceFuture.fromResponse(updateMongoDBDatabaseThroughputWithServiceResponseAsync(resourceGroupName, accountName, databaseName, updateThroughputParameters), serviceCallback); }
ServiceFuture<ThroughputSettingsGetResultsInner> function(String resourceGroupName, String accountName, String databaseName, ThroughputSettingsUpdateParameters updateThroughputParameters, final ServiceCallback<ThroughputSettingsGetResultsInner> serviceCallback) { return ServiceFuture.fromResponse(updateMongoDBDatabaseThroughputWithServiceResponseAsync(resourceGroupName, accountName, databaseName, updateThroughputParameters), serviceCallback); }
/** * Update RUs per second of the an Azure Cosmos DB MongoDB database. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName Cosmos DB database account name. * @param databaseName Cosmos DB database name. * @param updateThroughputParameters The RUs per second of the parameters to provide for the current MongoDB database. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Update RUs per second of the an Azure Cosmos DB MongoDB database
updateMongoDBDatabaseThroughputAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/cosmos/mgmt-v2020_06_01_preview/src/main/java/com/microsoft/azure/management/cosmosdb/v2020_06_01_preview/implementation/MongoDBResourcesInner.java", "license": "mit", "size": 126045 }
[ "com.microsoft.azure.management.cosmosdb.v2020_06_01_preview.ThroughputSettingsUpdateParameters", "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.azure.management.cosmosdb.v2020_06_01_preview.ThroughputSettingsUpdateParameters; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.azure.management.cosmosdb.v2020_06_01_preview.*; import com.microsoft.rest.*;
[ "com.microsoft.azure", "com.microsoft.rest" ]
com.microsoft.azure; com.microsoft.rest;
1,864,010
synchronized void add(StateMachine sm, Message msg, String messageInfo, IState state, IState orgState, IState transToState) { mCount += 1; if (mLogRecVector.size() < mMaxSize) { mLogRecVector.add(new LogRec(sm, msg, messageInfo, state, orgState, transToState)); } else { LogRec pmi = mLogRecVector.get(mOldestIndex); mOldestIndex += 1; if (mOldestIndex >= mMaxSize) { mOldestIndex = 0; } pmi.update(sm, msg, messageInfo, state, orgState, transToState); } } } private static class SmHandler extends Handler { private boolean mHasQuit = false; private boolean mDbg = false; private static final Object mSmHandlerObj = new Object(); private Message mMsg; private LogRecords mLogRecords = new LogRecords(); private boolean mIsConstructionCompleted; private StateInfo mStateStack[]; private int mStateStackTopIndex = -1; private StateInfo mTempStateStack[]; private int mTempStateStackCount; private HaltingState mHaltingState = new HaltingState(); private QuittingState mQuittingState = new QuittingState(); private StateMachine mSm; private class StateInfo { State state; StateInfo parentStateInfo; boolean active;
synchronized void add(StateMachine sm, Message msg, String messageInfo, IState state, IState orgState, IState transToState) { mCount += 1; if (mLogRecVector.size() < mMaxSize) { mLogRecVector.add(new LogRec(sm, msg, messageInfo, state, orgState, transToState)); } else { LogRec pmi = mLogRecVector.get(mOldestIndex); mOldestIndex += 1; if (mOldestIndex >= mMaxSize) { mOldestIndex = 0; } pmi.update(sm, msg, messageInfo, state, orgState, transToState); } } } private static class SmHandler extends Handler { private boolean mHasQuit = false; private boolean mDbg = false; private static final Object mSmHandlerObj = new Object(); private Message mMsg; private LogRecords mLogRecords = new LogRecords(); private boolean mIsConstructionCompleted; private StateInfo mStateStack[]; private int mStateStackTopIndex = -1; private StateInfo mTempStateStack[]; private int mTempStateStackCount; private HaltingState mHaltingState = new HaltingState(); private QuittingState mQuittingState = new QuittingState(); private StateMachine mSm; private class StateInfo { State state; StateInfo parentStateInfo; boolean active;
/** * Add a processed message. * * @param msg * @param messageInfo to be stored * @param state that handled the message * @param orgState is the first state the received the message but * did not processes the message. * @param transToState is the state that was transitioned to after the message was * processed. * */
Add a processed message
add
{ "repo_name": "OmniEvo/android_frameworks_base", "path": "core/java/com/android/internal/util/StateMachine.java", "license": "gpl-3.0", "size": 69732 }
[ "android.os.Handler", "android.os.Message" ]
import android.os.Handler; import android.os.Message;
import android.os.*;
[ "android.os" ]
android.os;
370,047
public Observable<ServiceResponse<Void>> beginResetAADProfileWithServiceResponseAsync(String resourceGroupName, String resourceName, ManagedClusterAADProfile parameters) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (resourceName == null) { throw new IllegalArgumentException("Parameter resourceName is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); }
Observable<ServiceResponse<Void>> function(String resourceGroupName, String resourceName, ManagedClusterAADProfile parameters) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (resourceName == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); }
/** * Reset AAD Profile of a managed cluster. * Update the AAD Profile for a managed cluster. * * @param resourceGroupName The name of the resource group. * @param resourceName The name of the managed cluster resource. * @param parameters Parameters supplied to the Reset AAD Profile operation for a Managed Cluster. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */
Reset AAD Profile of a managed cluster. Update the AAD Profile for a managed cluster
beginResetAADProfileWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/containerservice/mgmt-v2020_07_01/src/main/java/com/microsoft/azure/management/containerservice/v2020_07_01/implementation/ManagedClustersInner.java", "license": "mit", "size": 155942 }
[ "com.microsoft.azure.management.containerservice.v2020_07_01.ManagedClusterAADProfile", "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.azure.management.containerservice.v2020_07_01.ManagedClusterAADProfile; import com.microsoft.rest.ServiceResponse;
import com.microsoft.azure.management.containerservice.v2020_07_01.*; import com.microsoft.rest.*;
[ "com.microsoft.azure", "com.microsoft.rest" ]
com.microsoft.azure; com.microsoft.rest;
1,050,530
public void setDragView(View dragView) { if (mDragView != null) { mDragView.setOnClickListener(null); }
void function(View dragView) { if (mDragView != null) { mDragView.setOnClickListener(null); }
/** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragView A view that will be used to drag the panel. */
Set the draggable view portion. Use to null, to allow the whole panel to be draggable
setDragView
{ "repo_name": "mikes222/BsnsTemplate", "path": "app/src/main/java/com/mschwartz/bsnstemplate/ui/slidinguppanel/SlidingUpPanelLayout.java", "license": "apache-2.0", "size": 50828 }
[ "android.view.View" ]
import android.view.View;
import android.view.*;
[ "android.view" ]
android.view;
164,030
@ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<SearchIndexerSkillset>> getWithResponseAsync( String skillsetName, RequestOptions requestOptions, Context context) { final String accept = "application/json; odata.metadata=minimal"; UUID xMsClientRequestIdInternal = null; if (requestOptions != null) { xMsClientRequestIdInternal = requestOptions.getXMsClientRequestId(); } UUID xMsClientRequestId = xMsClientRequestIdInternal; return service.get( this.client.getEndpoint(), skillsetName, xMsClientRequestId, this.client.getApiVersion(), accept, context); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<SearchIndexerSkillset>> function( String skillsetName, RequestOptions requestOptions, Context context) { final String accept = STR; UUID xMsClientRequestIdInternal = null; if (requestOptions != null) { xMsClientRequestIdInternal = requestOptions.getXMsClientRequestId(); } UUID xMsClientRequestId = xMsClientRequestIdInternal; return service.get( this.client.getEndpoint(), skillsetName, xMsClientRequestId, this.client.getApiVersion(), accept, context); }
/** * Retrieves a skillset in a search service. * * @param skillsetName The name of the skillset to retrieve. * @param requestOptions Parameter group. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws SearchErrorException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of skills along with {@link Response} on successful completion of {@link Mono}. */
Retrieves a skillset in a search service
getWithResponseAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/search/azure-search-documents/src/main/java/com/azure/search/documents/indexes/implementation/SkillsetsImpl.java", "license": "mit", "size": 16473 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.core.util.Context", "com.azure.search.documents.indexes.implementation.models.RequestOptions", "com.azure.search.documents.indexes.models.SearchIndexerSkillset" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.search.documents.indexes.implementation.models.RequestOptions; import com.azure.search.documents.indexes.models.SearchIndexerSkillset;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.search.documents.indexes.implementation.models.*; import com.azure.search.documents.indexes.models.*;
[ "com.azure.core", "com.azure.search" ]
com.azure.core; com.azure.search;
927,626
@Test public void testSerialize() throws Exception { byte[] bytes = new byte[6]; for (int i = 0; i < 100; i++) { random.nextBytes(bytes); long addr = EtherAddress.toLong(bytes); String hex = ByteUtils.toHexString(bytes); MacAddress mac = new MacAddress(hex); EtherAddress ea = new EtherAddress(addr); EtherAddress ea1 = serializeTest(ea, EtherAddress.class); assertEquals(addr, ea1.getAddress()); assertArrayEquals(bytes, ea1.getBytes()); assertEquals(hex, ea1.getText()); assertEquals(mac, ea1.getMacAddress()); EtherAddress ea2 = serializeTest(ea1, EtherAddress.class); assertEquals(addr, ea2.getAddress()); assertArrayEquals(bytes, ea2.getBytes()); assertEquals(hex, ea2.getText()); assertEquals(mac, ea2.getMacAddress()); } }
void function() throws Exception { byte[] bytes = new byte[6]; for (int i = 0; i < 100; i++) { random.nextBytes(bytes); long addr = EtherAddress.toLong(bytes); String hex = ByteUtils.toHexString(bytes); MacAddress mac = new MacAddress(hex); EtherAddress ea = new EtherAddress(addr); EtherAddress ea1 = serializeTest(ea, EtherAddress.class); assertEquals(addr, ea1.getAddress()); assertArrayEquals(bytes, ea1.getBytes()); assertEquals(hex, ea1.getText()); assertEquals(mac, ea1.getMacAddress()); EtherAddress ea2 = serializeTest(ea1, EtherAddress.class); assertEquals(addr, ea2.getAddress()); assertArrayEquals(bytes, ea2.getBytes()); assertEquals(hex, ea2.getText()); assertEquals(mac, ea2.getMacAddress()); } }
/** * Ensure that {@link EtherAddress} is serializable. * * @throws Exception An error occurred. */
Ensure that <code>EtherAddress</code> is serializable
testSerialize
{ "repo_name": "opendaylight/vtn", "path": "manager/api/src/test/java/org/opendaylight/vtn/manager/util/EtherAddressTest.java", "license": "epl-1.0", "size": 25559 }
[ "org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.MacAddress" ]
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.MacAddress;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.*;
[ "org.opendaylight.yang" ]
org.opendaylight.yang;
598,887
void submit(String applicationName, String jsonConfig) throws Exception; /** * Register a jar file containing applications with * the application service. * * <p>See {@link ApplicationService#registerJar(String, String) ApplicationService.registerJar()}
void submit(String applicationName, String jsonConfig) throws Exception; /** * Register a jar file containing applications with * the application service. * * <p>See {@link ApplicationService#registerJar(String, String) ApplicationService.registerJar()}
/** * Submit an application registered with the application service. * * @param applicationName Name of the application. * @param jsonConfig JsonObject configuration serialized as a JSON String. * Null or an empty String is equivalent to an empty JSON object. * * @throws Exception Error submitting application. * @see ApplicationService */
Submit an application registered with the application service
submit
{ "repo_name": "dlaboss/incubator-quarks", "path": "api/topology/src/main/java/org/apache/edgent/topology/mbeans/ApplicationServiceMXBean.java", "license": "apache-2.0", "size": 2236 }
[ "org.apache.edgent.topology.services.ApplicationService" ]
import org.apache.edgent.topology.services.ApplicationService;
import org.apache.edgent.topology.services.*;
[ "org.apache.edgent" ]
org.apache.edgent;
620,915
public static void main(String[] args) throws Exception { Configuration startCfg = new Configuration(true); SliveTest runner = new SliveTest(startCfg); int ec = ToolRunner.run(runner, args); System.exit(ec); }
static void function(String[] args) throws Exception { Configuration startCfg = new Configuration(true); SliveTest runner = new SliveTest(startCfg); int ec = ToolRunner.run(runner, args); System.exit(ec); }
/** * The main program entry point. Sets up and parses the command line options, * then merges those options and then dumps those options and the runs the * corresponding map/reduce job that those operations represent and then * writes the report for the output of the run that occurred. * * @param args * command line options */
The main program entry point. Sets up and parses the command line options, then merges those options and then dumps those options and the runs the corresponding map/reduce job that those operations represent and then writes the report for the output of the run that occurred
main
{ "repo_name": "ulmon/hadoop1.2.1", "path": "src/test/org/apache/hadoop/fs/slive/SliveTest.java", "license": "apache-2.0", "size": 10824 }
[ "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.util.ToolRunner" ]
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.conf.*; import org.apache.hadoop.util.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
541,311
public static String joinStrings(List<String> strings, boolean fixCase, char withChar) { if (strings == null || strings.size() == 0) { return ""; } StringBuilder result = null; for (String s : strings) { if (fixCase) { s = fixCase(s); } if (result == null) { result = new StringBuilder(s); } else { result.append(withChar); result.append(s); } } return result.toString(); }
static String function(List<String> strings, boolean fixCase, char withChar) { if (strings == null strings.size() == 0) { return ""; } StringBuilder result = null; for (String s : strings) { if (fixCase) { s = fixCase(s); } if (result == null) { result = new StringBuilder(s); } else { result.append(withChar); result.append(s); } } return result.toString(); }
/** * Generic string joining function. * * @param strings Strings to be joined * @param fixCase does it need to fix word case * @param withChar char to join strings with. * @return joined-string */
Generic string joining function
joinStrings
{ "repo_name": "masgari/cliche", "path": "src/main/java/asg/cliche/util/Strings.java", "license": "mit", "size": 3602 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,624,548
public Timestamp getCreated(); public static final String COLUMNNAME_CreatedBy = "CreatedBy";
Timestamp function(); public static final String COLUMNNAME_CreatedBy = STR;
/** Get Created. * Date this record was created */
Get Created. Date this record was created
getCreated
{ "repo_name": "geneos/adempiere", "path": "base/src/org/compiere/model/I_AD_Sequence.java", "license": "gpl-2.0", "size": 8756 }
[ "java.sql.Timestamp" ]
import java.sql.Timestamp;
import java.sql.*;
[ "java.sql" ]
java.sql;
258,565
@Test(expectedExceptions = { TaskException.class }) public void testCreateFromEntryWithoutAttributes() throws Exception { new ExecTask(new Entry( "dn: ds-task-id=missing-path,cn=Scheduled Tasks,cn=tasks", "objectClass: top", "objectClass: ds-task", "objectClass: ds-task-exec", "ds-task-id: missing-path", "ds-task-class-name: com.unboundid.directory.server.tasks.ExecTask")); }
@Test(expectedExceptions = { TaskException.class }) void function() throws Exception { new ExecTask(new Entry( STR, STR, STR, STR, STR, STR)); }
/** * Tests the behavior when trying to create an exec task from an entry without * any attributes. * * @throws Exception If an unexpected problem occurs. */
Tests the behavior when trying to create an exec task from an entry without any attributes
testCreateFromEntryWithoutAttributes
{ "repo_name": "UnboundID/ldapsdk", "path": "tests/unit/src/com/unboundid/ldap/sdk/unboundidds/tasks/ExecTaskTestCase.java", "license": "gpl-2.0", "size": 9759 }
[ "com.unboundid.ldap.sdk.Entry", "org.testng.annotations.Test" ]
import com.unboundid.ldap.sdk.Entry; import org.testng.annotations.Test;
import com.unboundid.ldap.sdk.*; import org.testng.annotations.*;
[ "com.unboundid.ldap", "org.testng.annotations" ]
com.unboundid.ldap; org.testng.annotations;
971,925
void getRepository(@Nonnull String user, @Nonnull String repository, @Nonnull AsyncRequestCallback<GitHubRepository> callback);
void getRepository(@Nonnull String user, @Nonnull String repository, @Nonnull AsyncRequestCallback<GitHubRepository> callback);
/** * Get given repository information. * * @param user * the owner of the repository. * @param repository * the repository name. * @param callback * callback called when operation is done. */
Get given repository information
getRepository
{ "repo_name": "Panthro/che-plugins", "path": "plugin-github/che-plugin-github-ext-github/src/main/java/org/eclipse/che/ide/ext/github/client/GitHubClientService.java", "license": "epl-1.0", "size": 7967 }
[ "javax.annotation.Nonnull", "org.eclipse.che.ide.ext.github.shared.GitHubRepository", "org.eclipse.che.ide.rest.AsyncRequestCallback" ]
import javax.annotation.Nonnull; import org.eclipse.che.ide.ext.github.shared.GitHubRepository; import org.eclipse.che.ide.rest.AsyncRequestCallback;
import javax.annotation.*; import org.eclipse.che.ide.ext.github.shared.*; import org.eclipse.che.ide.rest.*;
[ "javax.annotation", "org.eclipse.che" ]
javax.annotation; org.eclipse.che;
586,459
public void chooseCancelOnNextAlert() { Alert alert = driver.switchTo().alert(); alert.dismiss(); }
void function() { Alert alert = driver.switchTo().alert(); alert.dismiss(); }
/** * Chooses Cancel/No or any other "dismiss" button for the upcoming alert * window. If no alert is displayed an exception will be thrown. */
Chooses Cancel/No or any other "dismiss" button for the upcoming alert window. If no alert is displayed an exception will be thrown
chooseCancelOnNextAlert
{ "repo_name": "ludovicianul/selenium-on-steroids", "path": "src/main/java/com/insidecoding/sos/webdriver/WebDriverHelper.java", "license": "apache-2.0", "size": 38225 }
[ "org.openqa.selenium.Alert" ]
import org.openqa.selenium.Alert;
import org.openqa.selenium.*;
[ "org.openqa.selenium" ]
org.openqa.selenium;
2,238,756
public static <T, K> KeyedStream<T, K> reinterpretAsKeyedStream( DataStream<T> stream, KeySelector<T, K> keySelector, TypeInformation<K> typeInfo) { PartitionTransformation<T> partitionTransformation = new PartitionTransformation<>( stream.getTransformation(), new ForwardPartitioner<>()); return new KeyedStream<>( stream, partitionTransformation, keySelector, typeInfo); } // ------------------------------------------------------------------------ private DataStreamUtils() {} // ------------------------------------------------------------------------ public static final class ClientAndIterator<E> { public final JobClient client; public final Iterator<E> iterator; ClientAndIterator(JobClient client, Iterator<E> iterator) { this.client = checkNotNull(client); this.iterator = checkNotNull(iterator); } }
static <T, K> KeyedStream<T, K> function( DataStream<T> stream, KeySelector<T, K> keySelector, TypeInformation<K> typeInfo) { PartitionTransformation<T> partitionTransformation = new PartitionTransformation<>( stream.getTransformation(), new ForwardPartitioner<>()); return new KeyedStream<>( stream, partitionTransformation, keySelector, typeInfo); } private DataStreamUtils() {} public static final class ClientAndIterator<E> { public final JobClient client; public final Iterator<E> iterator; ClientAndIterator(JobClient client, Iterator<E> iterator) { this.client = checkNotNull(client); this.iterator = checkNotNull(iterator); } }
/** * Reinterprets the given {@link DataStream} as a {@link KeyedStream}, which extracts keys with the given * {@link KeySelector}. * * <p>IMPORTANT: For every partition of the base stream, the keys of events in the base stream must be * partitioned exactly in the same way as if it was created through a {@link DataStream#keyBy(KeySelector)}. * * @param stream The data stream to reinterpret. For every partition, this stream must be partitioned exactly * in the same way as if it was created through a {@link DataStream#keyBy(KeySelector)}. * @param keySelector Function that defines how keys are extracted from the data stream. * @param typeInfo Explicit type information about the key type. * @param <T> Type of events in the data stream. * @param <K> Type of the extracted keys. * @return The reinterpretation of the {@link DataStream} as a {@link KeyedStream}. */
Reinterprets the given <code>DataStream</code> as a <code>KeyedStream</code>, which extracts keys with the given <code>KeySelector</code>. partitioned exactly in the same way as if it was created through a <code>DataStream#keyBy(KeySelector)</code>
reinterpretAsKeyedStream
{ "repo_name": "darionyaphet/flink", "path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/DataStreamUtils.java", "license": "apache-2.0", "size": 10155 }
[ "java.util.Iterator", "org.apache.flink.api.common.typeinfo.TypeInformation", "org.apache.flink.api.java.functions.KeySelector", "org.apache.flink.core.execution.JobClient", "org.apache.flink.streaming.api.transformations.PartitionTransformation", "org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner", "org.apache.flink.util.Preconditions" ]
import java.util.Iterator; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.core.execution.JobClient; import org.apache.flink.streaming.api.transformations.PartitionTransformation; import org.apache.flink.streaming.runtime.partitioner.ForwardPartitioner; import org.apache.flink.util.Preconditions;
import java.util.*; import org.apache.flink.api.common.typeinfo.*; import org.apache.flink.api.java.functions.*; import org.apache.flink.core.execution.*; import org.apache.flink.streaming.api.transformations.*; import org.apache.flink.streaming.runtime.partitioner.*; import org.apache.flink.util.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
2,861,197
@Test @Ignore public void testGetConfigsForCluster() throws GenieException { }
void function() throws GenieException { }
/** * Test get configurations for cluster. * * @throws GenieException For any problem */
Test get configurations for cluster
testGetConfigsForCluster
{ "repo_name": "sensaid/genie", "path": "genie-core/src/test/java/com/netflix/genie/core/services/impl/jpa/TestClusterConfigServiceJPAImpl.java", "license": "apache-2.0", "size": 17701 }
[ "com.netflix.genie.common.exceptions.GenieException" ]
import com.netflix.genie.common.exceptions.GenieException;
import com.netflix.genie.common.exceptions.*;
[ "com.netflix.genie" ]
com.netflix.genie;
1,978,773
public static void printTile(Graphics2D g2, int destx, int desty, int w, int h, int t1, int t2, int t3, int t4) { long a = hash(t1, t2, t3, t4); Integer ti = map.get(a); if (ti == null) { System.out.printf("Not found %d %d %d %d\n", t1, t2, t3, t4); return; } if (ti == 4) // spice up water { int r = new Random().nextInt(20)-17; if (r > 0) ti = WATER_CODES[r]; } int tix = ti % 16; int tiy = ti / 16; g2.drawImage(atlas, destx, desty, destx+w, desty+h, TW*tix, TH*tiy, TW*tix+TW, TH*tiy+TW, null); } private static int[] _map = new int[]{ 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 1, 1, 1, 0, 6, 1, 1, 1, 4, 7, 1, 1, 1, 2, 8, 1, 1, 0, 1, 9, 1, 1, 0, 0, 10, 1, 1, 0, 4, 11, 1, 1, 0, 2, 12, 1, 1, 4, 1, 13, 1, 1, 4, 0, 14, 1, 1, 4, 4, 15, 1, 1, 4, 2, 16, 1, 1, 2, 1, 17, 1, 1, 2, 0, 18, 1, 1, 2, 4, 19, 1, 1, 2, 2, 20, 1, 0, 1, 1, 21, 1, 0, 1, 0, 22, 1, 0, 1, 4, 23, 1, 0, 1, 2, 24, 1, 0, 0, 1, 25, 1, 0, 0, 0, 26, 1, 0, 0, 4, 27, 1, 0, 0, 2, 28, 1, 0, 4, 1, 29, 1, 0, 4, 0, 30, 1, 0, 4, 4, 31, 1, 0, 4, 2, 32, 1, 0, 2, 1, 33, 1, 0, 2, 0, 34, 1, 0, 2, 4, 35, 1, 0, 2, 2, 36, 1, 4, 1, 1, 37, 1, 4, 1, 0, 38, 1, 4, 1, 4, 39, 1, 4, 1, 2, 40, 1, 4, 0, 1, 41, 1, 4, 0, 0, 42, 1, 4, 0, 4, 43, 1, 4, 0, 2, 44, 1, 4, 4, 1, 45, 1, 4, 4, 0, 46, 1, 4, 4, 4, 47, 1, 4, 4, 2, 48, 1, 4, 2, 1, 49, 1, 4, 2, 0, 50, 1, 4, 2, 4, 51, 1, 4, 2, 2, 52, 1, 2, 1, 1, 53, 1, 2, 1, 0, 54, 1, 2, 1, 4, 55, 1, 2, 1, 2, 56, 1, 2, 0, 1, 57, 1, 2, 0, 0, 58, 1, 2, 0, 4, 59, 1, 2, 0, 2, 60, 1, 2, 4, 1, 61, 1, 2, 4, 0, 62, 1, 2, 4, 4, 63, 1, 2, 4, 2, 64, 1, 2, 2, 1, 65, 1, 2, 2, 0, 66, 1, 2, 2, 4, 67, 1, 2, 2, 2, 68, 0, 1, 1, 1, 69, 0, 1, 1, 0, 70, 0, 1, 1, 4, 71, 0, 1, 1, 2, 72, 0, 1, 0, 1, 73, 0, 1, 0, 0, 74, 0, 1, 0, 4, 75, 0, 1, 0, 2, 76, 0, 1, 4, 1, 77, 0, 1, 4, 0, 78, 0, 1, 4, 4, 79, 0, 1, 4, 2, 80, 0, 1, 2, 1, 81, 0, 1, 2, 0, 82, 0, 1, 2, 4, 83, 0, 1, 2, 2, 84, 0, 0, 1, 1, 85, 0, 0, 1, 0, 86, 0, 0, 1, 4, 87, 0, 0, 1, 2, 88, 0, 0, 0, 1, 89, 0, 0, 0, 4, 90, 0, 0, 0, 2, 91, 0, 0, 4, 1, 92, 0, 0, 4, 0, 93, 0, 0, 4, 4, 94, 0, 0, 4, 2, 95, 0, 0, 2, 1, 96, 0, 0, 2, 0, 97, 0, 0, 2, 4, 98, 0, 0, 2, 2, 99, 0, 4, 1, 1, 100, 0, 4, 1, 0, 101, 0, 4, 1, 4, 102, 0, 4, 1, 2, 103, 0, 4, 0, 1, 104, 0, 4, 0, 0, 105, 0, 4, 0, 4, 106, 0, 4, 0, 2, 107, 0, 4, 4, 1, 108, 0, 4, 4, 0, 109, 0, 4, 4, 4, 110, 0, 4, 4, 2, 111, 0, 4, 2, 1, 112, 0, 4, 2, 0, 113, 0, 4, 2, 4, 114, 0, 4, 2, 2, 115, 0, 2, 1, 1, 116, 0, 2, 1, 0, 117, 0, 2, 1, 4, 118, 0, 2, 1, 2, 119, 0, 2, 0, 1, 120, 0, 2, 0, 0, 121, 0, 2, 0, 4, 122, 0, 2, 0, 2, 123, 0, 2, 4, 1, 124, 0, 2, 4, 0, 125, 0, 2, 4, 4, 126, 0, 2, 4, 2, 127, 0, 2, 2, 1, 128, 0, 2, 2, 0, 129, 0, 2, 2, 4, 130, 0, 2, 2, 2, 131, 4, 1, 1, 1, 132, 4, 1, 1, 0, 133, 4, 1, 1, 4, 134, 4, 1, 1, 2, 135, 4, 1, 0, 1, 136, 4, 1, 0, 0, 137, 4, 1, 0, 4, 138, 4, 1, 0, 2, 139, 4, 1, 4, 1, 140, 4, 1, 4, 0, 141, 4, 1, 4, 4, 142, 4, 1, 4, 2, 143, 4, 1, 2, 1, 144, 4, 1, 2, 0, 145, 4, 1, 2, 4, 146, 4, 1, 2, 2, 147, 4, 0, 1, 1, 148, 4, 0, 1, 0, 149, 4, 0, 1, 4, 150, 4, 0, 1, 2, 151, 4, 0, 0, 1, 152, 4, 0, 0, 0, 153, 4, 0, 0, 4, 154, 4, 0, 0, 2, 155, 4, 0, 4, 1, 156, 4, 0, 4, 0, 157, 4, 0, 4, 4, 158, 4, 0, 4, 2, 159, 4, 0, 2, 1, 160, 4, 0, 2, 0, 161, 4, 0, 2, 4, 162, 4, 0, 2, 2, 163, 4, 4, 1, 1, 164, 4, 4, 1, 0, 165, 4, 4, 1, 4, 166, 4, 4, 1, 2, 167, 4, 4, 0, 1, 168, 4, 4, 0, 0, 169, 4, 4, 0, 4, 170, 4, 4, 0, 2, 171, 4, 4, 4, 1, 172, 4, 4, 4, 0, 173, 4, 4, 4, 2, 174, 4, 4, 2, 1, 175, 4, 4, 2, 0, 176, 4, 4, 2, 4, 177, 4, 4, 2, 2, 178, 4, 2, 1, 1, 179, 4, 2, 1, 0, 180, 4, 2, 1, 4, 181, 4, 2, 1, 2, 182, 4, 2, 0, 1, 183, 4, 2, 0, 0, 184, 4, 2, 0, 4, 185, 4, 2, 0, 2, 186, 4, 2, 4, 1, 187, 4, 2, 4, 0, 188, 4, 2, 4, 4, 189, 4, 2, 4, 2, 190, 4, 2, 2, 1, 191, 4, 2, 2, 0, 192, 4, 2, 2, 4, 193, 4, 2, 2, 2, 194, 2, 1, 1, 1, 195, 2, 1, 1, 0, 196, 2, 1, 1, 4, 197, 2, 1, 1, 2, 198, 2, 1, 0, 1, 199, 2, 1, 0, 0, 200, 2, 1, 0, 4, 201, 2, 1, 0, 2, 202, 2, 1, 4, 1, 203, 2, 1, 4, 0, 204, 2, 1, 4, 4, 205, 2, 1, 4, 2, 206, 2, 1, 2, 1, 207, 2, 1, 2, 0, 208, 2, 1, 2, 4, 209, 2, 1, 2, 2, 210, 2, 0, 1, 1, 211, 2, 0, 1, 0, 212, 2, 0, 1, 4, 213, 2, 0, 1, 2, 214, 2, 0, 0, 1, 215, 2, 0, 0, 0, 216, 2, 0, 0, 4, 217, 2, 0, 0, 2, 218, 2, 0, 4, 1, 219, 2, 0, 4, 0, 220, 2, 0, 4, 4, 221, 2, 0, 4, 2, 222, 2, 0, 2, 1, 223, 2, 0, 2, 0, 224, 2, 0, 2, 4, 225, 2, 0, 2, 2, 226, 2, 4, 1, 1, 227, 2, 4, 1, 0, 228, 2, 4, 1, 4, 229, 2, 4, 1, 2, 230, 2, 4, 0, 1, 231, 2, 4, 0, 0, 232, 2, 4, 0, 4, 233, 2, 4, 0, 2, 234, 2, 4, 4, 1, 235, 2, 4, 4, 0, 236, 2, 4, 4, 4, 237, 2, 4, 4, 2, 238, 2, 4, 2, 1, 239, 2, 4, 2, 0, 240, 2, 4, 2, 4, 241, 2, 4, 2, 2, 242, 2, 2, 1, 1, 243, 2, 2, 1, 0, 244, 2, 2, 1, 4, 245, 2, 2, 1, 2, 246, 2, 2, 0, 1, 247, 2, 2, 0, 0, 248, 2, 2, 0, 4, 249, 2, 2, 0, 2, 250, 2, 2, 4, 1, 251, 2, 2, 4, 0, 252, 2, 2, 4, 4, 253, 2, 2, 4, 2, 254, 2, 2, 2, 1, 255, 2, 2, 2, 0, 256, 2, 2, 2, 4, 257, 0, 0, 0, 3, 258, 0, 0, 2, 3, 259, 0, 0, 3, 0, 260, 0, 0, 3, 2, 261, 0, 0, 3, 3, 262, 0, 2, 0, 3, 263, 0, 2, 2, 3, 264, 0, 2, 3, 0, 265, 0, 2, 3, 2, 266, 0, 2, 3, 3, 267, 0, 3, 0, 0, 268, 0, 3, 0, 2, 269, 0, 3, 0, 3, 270, 0, 3, 2, 0, 271, 0, 3, 2, 2, 272, 0, 3, 2, 3, 273, 0, 3, 3, 0, 274, 0, 3, 3, 2, 275, 0, 3, 3, 3, 276, 2, 0, 0, 3, 277, 2, 0, 2, 3, 278, 2, 0, 3, 0, 279, 2, 0, 3, 2, 280, 2, 0, 3, 3, 281, 2, 2, 0, 3, 282, 2, 2, 2, 3, 283, 2, 2, 3, 0, 284, 2, 2, 3, 2, 285, 2, 2, 3, 3, 286, 2, 3, 0, 0, 287, 2, 3, 0, 2, 288, 2, 3, 0, 3, 289, 2, 3, 2, 0, 290, 2, 3, 2, 2, 291, 2, 3, 2, 3, 292, 2, 3, 3, 0, 293, 2, 3, 3, 2, 294, 2, 3, 3, 3, 295, 3, 0, 0, 0, 296, 3, 0, 0, 2, 297, 3, 0, 0, 3, 298, 3, 0, 2, 0, 299, 3, 0, 2, 2, 300, 3, 0, 2, 3, 301, 3, 0, 3, 0, 302, 3, 0, 3, 2, 303, 3, 0, 3, 3, 304, 3, 2, 0, 0, 305, 3, 2, 0, 2, 306, 3, 2, 0, 3, 307, 3, 2, 2, 0, 308, 3, 2, 2, 2, 309, 3, 2, 2, 3, 310, 3, 2, 3, 0, 311, 3, 2, 3, 2, 312, 3, 2, 3, 3, 313, 3, 3, 0, 0, 314, 3, 3, 0, 2, 315, 3, 3, 0, 3, 316, 3, 3, 2, 0, 317, 3, 3, 2, 2, 318, 3, 3, 2, 3, 319, 3, 3, 3, 0, 320, 3, 3, 3, 2, 321, 5, 5, 5, 5, 322, 5, 5, 5, 3, 323, 5, 5, 3, 5, 324, 5, 5, 3, 3, 325, 5, 3, 5, 5, 326, 5, 3, 5, 3, 327, 5, 3, 3, 5, 328, 5, 3, 3, 3, 329, 3, 5, 5, 5, 330, 3, 5, 5, 3, 331, 3, 5, 3, 5, 332, 3, 5, 3, 3, 333, 3, 3, 5, 5, 334, 3, 3, 5, 3, 335, 3, 3, 3, 5, 336, 6, 6, 6, 6, 337, 7, 7, 7, 7, 338, 8, 8, 8, 8, 339, 7, 7, 7, 2, 340, 7, 7, 2, 7, 341, 7, 7, 2, 2, 342, 7, 2, 7, 7, 343, 7, 2, 7, 2, 344, 7, 2, 2, 7, 345, 7, 2, 2, 2, 346, 2, 7, 7, 7, 347, 2, 7, 7, 2, 348, 2, 7, 2, 7, 349, 2, 7, 2, 2, 350, 2, 2, 7, 7, 351, 2, 2, 7, 2, 352, 2, 2, 2, 7, 353, 1, 1, 1, 6, 354, 1, 1, 6, 1, 355, 1, 1, 6, 6, 356, 1, 1, 6, 2, 357, 1, 1, 2, 6, 358, 1, 6, 1, 1, 359, 1, 6, 1, 6, 360, 1, 6, 1, 2, 361, 1, 6, 6, 1, 362, 1, 6, 6, 6, 363, 1, 6, 6, 2, 364, 1, 6, 2, 1, 365, 1, 6, 2, 6, 366, 1, 6, 2, 2, 367, 1, 2, 1, 6, 368, 1, 2, 6, 1, 369, 1, 2, 6, 6, 370, 1, 2, 6, 2, 371, 1, 2, 2, 6, 372, 6, 1, 1, 1, 373, 6, 1, 1, 6, 374, 6, 1, 1, 2, 375, 6, 1, 6, 1, 376, 6, 1, 6, 6, 377, 6, 1, 6, 2, 378, 6, 1, 2, 1, 379, 6, 1, 2, 6, 380, 6, 1, 2, 2, 381, 6, 6, 1, 1, 382, 6, 6, 1, 6, 383, 6, 6, 1, 2, 384, 6, 6, 6, 1, 385, 6, 6, 6, 2, 386, 6, 6, 2, 1, 387, 6, 6, 2, 6, 388, 6, 6, 2, 2, 389, 6, 2, 1, 1, 390, 6, 2, 1, 6, 391, 6, 2, 1, 2, 392, 6, 2, 6, 1, 393, 6, 2, 6, 6, 394, 6, 2, 6, 2, 395, 6, 2, 2, 1, 396, 6, 2, 2, 6, 397, 6, 2, 2, 2, 398, 2, 1, 1, 6, 399, 2, 1, 6, 1, 400, 2, 1, 6, 6, 401, 2, 1, 6, 2, 402, 2, 1, 2, 6, 403, 2, 6, 1, 1, 404, 2, 6, 1, 6, 405, 2, 6, 1, 2, 406, 2, 6, 6, 1, 407, 2, 6, 6, 6, 408, 2, 6, 6, 2, 409, 2, 6, 2, 1, 410, 2, 6, 2, 6, 411, 2, 6, 2, 2, 412, 2, 2, 1, 6, 413, 2, 2, 6, 1, 414, 2, 2, 6, 6, 415, 2, 2, 6, 2, 416, 2, 2, 2, 6, 417, 8, 8, 8, 4, 418, 8, 8, 4, 8, 419, 8, 8, 4, 4, 420, 8, 4, 8, 8, 421, 8, 4, 8, 4, 422, 8, 4, 4, 8, 423, 8, 4, 4, 4, 424, 4, 8, 8, 8, 425, 4, 8, 8, 4, 426, 4, 8, 4, 8, 427, 4, 8, 4, 4, 428, 4, 4, 8, 8, 429, 4, 4, 8, 4, 430, 4, 4, 4, 8, // 431, 0, 0, 0, 0, // 432, 0, 0, 0, 0, // 433, 0, 0, 0, 0, // 434, 1, 1, 1, 1, // 435, 1, 1, 1, 1, // 436, 2, 2, 2, 2, // 437, 2, 2, 2, 2, // 438, 2, 2, 2, 2, // 439, 3, 3, 3, 3, // 440, 3, 3, 3, 3, // 441, 3, 3, 3, 3, // 442, 4, 4, 4, 4, // 443, 4, 4, 4, 4, // 444, 4, 4, 4, 4, // 445, 7, 7, 7, 7, // 446, 7, 7, 7, 7, // 447, 1, 1, 1, 1, // 448, 8, 8, 8, 8, // 449, 8, 8, 8, 8, // 450, 8, 8, 8, 8, // 451, 7, 7, 7, 7, // 452, 5, 5, 5, 5, // 453, 5, 5, 5, 5, // 454, 5, 5, 5, 5, // 455, 3, 3, 3, 3, // 456, 3, 3, 3, 3, // 457, 4, 4, 4, 4, // 458, 4, 4, 4, 4, }; static { init(); }
static void function(Graphics2D g2, int destx, int desty, int w, int h, int t1, int t2, int t3, int t4) { long a = hash(t1, t2, t3, t4); Integer ti = map.get(a); if (ti == null) { System.out.printf(STR, t1, t2, t3, t4); return; } if (ti == 4) { int r = new Random().nextInt(20)-17; if (r > 0) ti = WATER_CODES[r]; } int tix = ti % 16; int tiy = ti / 16; g2.drawImage(atlas, destx, desty, destx+w, desty+h, TW*tix, TH*tiy, TW*tix+TW, TH*tiy+TW, null); } private static int[] _map = new int[]{ 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 1, 1, 1, 0, 6, 1, 1, 1, 4, 7, 1, 1, 1, 2, 8, 1, 1, 0, 1, 9, 1, 1, 0, 0, 10, 1, 1, 0, 4, 11, 1, 1, 0, 2, 12, 1, 1, 4, 1, 13, 1, 1, 4, 0, 14, 1, 1, 4, 4, 15, 1, 1, 4, 2, 16, 1, 1, 2, 1, 17, 1, 1, 2, 0, 18, 1, 1, 2, 4, 19, 1, 1, 2, 2, 20, 1, 0, 1, 1, 21, 1, 0, 1, 0, 22, 1, 0, 1, 4, 23, 1, 0, 1, 2, 24, 1, 0, 0, 1, 25, 1, 0, 0, 0, 26, 1, 0, 0, 4, 27, 1, 0, 0, 2, 28, 1, 0, 4, 1, 29, 1, 0, 4, 0, 30, 1, 0, 4, 4, 31, 1, 0, 4, 2, 32, 1, 0, 2, 1, 33, 1, 0, 2, 0, 34, 1, 0, 2, 4, 35, 1, 0, 2, 2, 36, 1, 4, 1, 1, 37, 1, 4, 1, 0, 38, 1, 4, 1, 4, 39, 1, 4, 1, 2, 40, 1, 4, 0, 1, 41, 1, 4, 0, 0, 42, 1, 4, 0, 4, 43, 1, 4, 0, 2, 44, 1, 4, 4, 1, 45, 1, 4, 4, 0, 46, 1, 4, 4, 4, 47, 1, 4, 4, 2, 48, 1, 4, 2, 1, 49, 1, 4, 2, 0, 50, 1, 4, 2, 4, 51, 1, 4, 2, 2, 52, 1, 2, 1, 1, 53, 1, 2, 1, 0, 54, 1, 2, 1, 4, 55, 1, 2, 1, 2, 56, 1, 2, 0, 1, 57, 1, 2, 0, 0, 58, 1, 2, 0, 4, 59, 1, 2, 0, 2, 60, 1, 2, 4, 1, 61, 1, 2, 4, 0, 62, 1, 2, 4, 4, 63, 1, 2, 4, 2, 64, 1, 2, 2, 1, 65, 1, 2, 2, 0, 66, 1, 2, 2, 4, 67, 1, 2, 2, 2, 68, 0, 1, 1, 1, 69, 0, 1, 1, 0, 70, 0, 1, 1, 4, 71, 0, 1, 1, 2, 72, 0, 1, 0, 1, 73, 0, 1, 0, 0, 74, 0, 1, 0, 4, 75, 0, 1, 0, 2, 76, 0, 1, 4, 1, 77, 0, 1, 4, 0, 78, 0, 1, 4, 4, 79, 0, 1, 4, 2, 80, 0, 1, 2, 1, 81, 0, 1, 2, 0, 82, 0, 1, 2, 4, 83, 0, 1, 2, 2, 84, 0, 0, 1, 1, 85, 0, 0, 1, 0, 86, 0, 0, 1, 4, 87, 0, 0, 1, 2, 88, 0, 0, 0, 1, 89, 0, 0, 0, 4, 90, 0, 0, 0, 2, 91, 0, 0, 4, 1, 92, 0, 0, 4, 0, 93, 0, 0, 4, 4, 94, 0, 0, 4, 2, 95, 0, 0, 2, 1, 96, 0, 0, 2, 0, 97, 0, 0, 2, 4, 98, 0, 0, 2, 2, 99, 0, 4, 1, 1, 100, 0, 4, 1, 0, 101, 0, 4, 1, 4, 102, 0, 4, 1, 2, 103, 0, 4, 0, 1, 104, 0, 4, 0, 0, 105, 0, 4, 0, 4, 106, 0, 4, 0, 2, 107, 0, 4, 4, 1, 108, 0, 4, 4, 0, 109, 0, 4, 4, 4, 110, 0, 4, 4, 2, 111, 0, 4, 2, 1, 112, 0, 4, 2, 0, 113, 0, 4, 2, 4, 114, 0, 4, 2, 2, 115, 0, 2, 1, 1, 116, 0, 2, 1, 0, 117, 0, 2, 1, 4, 118, 0, 2, 1, 2, 119, 0, 2, 0, 1, 120, 0, 2, 0, 0, 121, 0, 2, 0, 4, 122, 0, 2, 0, 2, 123, 0, 2, 4, 1, 124, 0, 2, 4, 0, 125, 0, 2, 4, 4, 126, 0, 2, 4, 2, 127, 0, 2, 2, 1, 128, 0, 2, 2, 0, 129, 0, 2, 2, 4, 130, 0, 2, 2, 2, 131, 4, 1, 1, 1, 132, 4, 1, 1, 0, 133, 4, 1, 1, 4, 134, 4, 1, 1, 2, 135, 4, 1, 0, 1, 136, 4, 1, 0, 0, 137, 4, 1, 0, 4, 138, 4, 1, 0, 2, 139, 4, 1, 4, 1, 140, 4, 1, 4, 0, 141, 4, 1, 4, 4, 142, 4, 1, 4, 2, 143, 4, 1, 2, 1, 144, 4, 1, 2, 0, 145, 4, 1, 2, 4, 146, 4, 1, 2, 2, 147, 4, 0, 1, 1, 148, 4, 0, 1, 0, 149, 4, 0, 1, 4, 150, 4, 0, 1, 2, 151, 4, 0, 0, 1, 152, 4, 0, 0, 0, 153, 4, 0, 0, 4, 154, 4, 0, 0, 2, 155, 4, 0, 4, 1, 156, 4, 0, 4, 0, 157, 4, 0, 4, 4, 158, 4, 0, 4, 2, 159, 4, 0, 2, 1, 160, 4, 0, 2, 0, 161, 4, 0, 2, 4, 162, 4, 0, 2, 2, 163, 4, 4, 1, 1, 164, 4, 4, 1, 0, 165, 4, 4, 1, 4, 166, 4, 4, 1, 2, 167, 4, 4, 0, 1, 168, 4, 4, 0, 0, 169, 4, 4, 0, 4, 170, 4, 4, 0, 2, 171, 4, 4, 4, 1, 172, 4, 4, 4, 0, 173, 4, 4, 4, 2, 174, 4, 4, 2, 1, 175, 4, 4, 2, 0, 176, 4, 4, 2, 4, 177, 4, 4, 2, 2, 178, 4, 2, 1, 1, 179, 4, 2, 1, 0, 180, 4, 2, 1, 4, 181, 4, 2, 1, 2, 182, 4, 2, 0, 1, 183, 4, 2, 0, 0, 184, 4, 2, 0, 4, 185, 4, 2, 0, 2, 186, 4, 2, 4, 1, 187, 4, 2, 4, 0, 188, 4, 2, 4, 4, 189, 4, 2, 4, 2, 190, 4, 2, 2, 1, 191, 4, 2, 2, 0, 192, 4, 2, 2, 4, 193, 4, 2, 2, 2, 194, 2, 1, 1, 1, 195, 2, 1, 1, 0, 196, 2, 1, 1, 4, 197, 2, 1, 1, 2, 198, 2, 1, 0, 1, 199, 2, 1, 0, 0, 200, 2, 1, 0, 4, 201, 2, 1, 0, 2, 202, 2, 1, 4, 1, 203, 2, 1, 4, 0, 204, 2, 1, 4, 4, 205, 2, 1, 4, 2, 206, 2, 1, 2, 1, 207, 2, 1, 2, 0, 208, 2, 1, 2, 4, 209, 2, 1, 2, 2, 210, 2, 0, 1, 1, 211, 2, 0, 1, 0, 212, 2, 0, 1, 4, 213, 2, 0, 1, 2, 214, 2, 0, 0, 1, 215, 2, 0, 0, 0, 216, 2, 0, 0, 4, 217, 2, 0, 0, 2, 218, 2, 0, 4, 1, 219, 2, 0, 4, 0, 220, 2, 0, 4, 4, 221, 2, 0, 4, 2, 222, 2, 0, 2, 1, 223, 2, 0, 2, 0, 224, 2, 0, 2, 4, 225, 2, 0, 2, 2, 226, 2, 4, 1, 1, 227, 2, 4, 1, 0, 228, 2, 4, 1, 4, 229, 2, 4, 1, 2, 230, 2, 4, 0, 1, 231, 2, 4, 0, 0, 232, 2, 4, 0, 4, 233, 2, 4, 0, 2, 234, 2, 4, 4, 1, 235, 2, 4, 4, 0, 236, 2, 4, 4, 4, 237, 2, 4, 4, 2, 238, 2, 4, 2, 1, 239, 2, 4, 2, 0, 240, 2, 4, 2, 4, 241, 2, 4, 2, 2, 242, 2, 2, 1, 1, 243, 2, 2, 1, 0, 244, 2, 2, 1, 4, 245, 2, 2, 1, 2, 246, 2, 2, 0, 1, 247, 2, 2, 0, 0, 248, 2, 2, 0, 4, 249, 2, 2, 0, 2, 250, 2, 2, 4, 1, 251, 2, 2, 4, 0, 252, 2, 2, 4, 4, 253, 2, 2, 4, 2, 254, 2, 2, 2, 1, 255, 2, 2, 2, 0, 256, 2, 2, 2, 4, 257, 0, 0, 0, 3, 258, 0, 0, 2, 3, 259, 0, 0, 3, 0, 260, 0, 0, 3, 2, 261, 0, 0, 3, 3, 262, 0, 2, 0, 3, 263, 0, 2, 2, 3, 264, 0, 2, 3, 0, 265, 0, 2, 3, 2, 266, 0, 2, 3, 3, 267, 0, 3, 0, 0, 268, 0, 3, 0, 2, 269, 0, 3, 0, 3, 270, 0, 3, 2, 0, 271, 0, 3, 2, 2, 272, 0, 3, 2, 3, 273, 0, 3, 3, 0, 274, 0, 3, 3, 2, 275, 0, 3, 3, 3, 276, 2, 0, 0, 3, 277, 2, 0, 2, 3, 278, 2, 0, 3, 0, 279, 2, 0, 3, 2, 280, 2, 0, 3, 3, 281, 2, 2, 0, 3, 282, 2, 2, 2, 3, 283, 2, 2, 3, 0, 284, 2, 2, 3, 2, 285, 2, 2, 3, 3, 286, 2, 3, 0, 0, 287, 2, 3, 0, 2, 288, 2, 3, 0, 3, 289, 2, 3, 2, 0, 290, 2, 3, 2, 2, 291, 2, 3, 2, 3, 292, 2, 3, 3, 0, 293, 2, 3, 3, 2, 294, 2, 3, 3, 3, 295, 3, 0, 0, 0, 296, 3, 0, 0, 2, 297, 3, 0, 0, 3, 298, 3, 0, 2, 0, 299, 3, 0, 2, 2, 300, 3, 0, 2, 3, 301, 3, 0, 3, 0, 302, 3, 0, 3, 2, 303, 3, 0, 3, 3, 304, 3, 2, 0, 0, 305, 3, 2, 0, 2, 306, 3, 2, 0, 3, 307, 3, 2, 2, 0, 308, 3, 2, 2, 2, 309, 3, 2, 2, 3, 310, 3, 2, 3, 0, 311, 3, 2, 3, 2, 312, 3, 2, 3, 3, 313, 3, 3, 0, 0, 314, 3, 3, 0, 2, 315, 3, 3, 0, 3, 316, 3, 3, 2, 0, 317, 3, 3, 2, 2, 318, 3, 3, 2, 3, 319, 3, 3, 3, 0, 320, 3, 3, 3, 2, 321, 5, 5, 5, 5, 322, 5, 5, 5, 3, 323, 5, 5, 3, 5, 324, 5, 5, 3, 3, 325, 5, 3, 5, 5, 326, 5, 3, 5, 3, 327, 5, 3, 3, 5, 328, 5, 3, 3, 3, 329, 3, 5, 5, 5, 330, 3, 5, 5, 3, 331, 3, 5, 3, 5, 332, 3, 5, 3, 3, 333, 3, 3, 5, 5, 334, 3, 3, 5, 3, 335, 3, 3, 3, 5, 336, 6, 6, 6, 6, 337, 7, 7, 7, 7, 338, 8, 8, 8, 8, 339, 7, 7, 7, 2, 340, 7, 7, 2, 7, 341, 7, 7, 2, 2, 342, 7, 2, 7, 7, 343, 7, 2, 7, 2, 344, 7, 2, 2, 7, 345, 7, 2, 2, 2, 346, 2, 7, 7, 7, 347, 2, 7, 7, 2, 348, 2, 7, 2, 7, 349, 2, 7, 2, 2, 350, 2, 2, 7, 7, 351, 2, 2, 7, 2, 352, 2, 2, 2, 7, 353, 1, 1, 1, 6, 354, 1, 1, 6, 1, 355, 1, 1, 6, 6, 356, 1, 1, 6, 2, 357, 1, 1, 2, 6, 358, 1, 6, 1, 1, 359, 1, 6, 1, 6, 360, 1, 6, 1, 2, 361, 1, 6, 6, 1, 362, 1, 6, 6, 6, 363, 1, 6, 6, 2, 364, 1, 6, 2, 1, 365, 1, 6, 2, 6, 366, 1, 6, 2, 2, 367, 1, 2, 1, 6, 368, 1, 2, 6, 1, 369, 1, 2, 6, 6, 370, 1, 2, 6, 2, 371, 1, 2, 2, 6, 372, 6, 1, 1, 1, 373, 6, 1, 1, 6, 374, 6, 1, 1, 2, 375, 6, 1, 6, 1, 376, 6, 1, 6, 6, 377, 6, 1, 6, 2, 378, 6, 1, 2, 1, 379, 6, 1, 2, 6, 380, 6, 1, 2, 2, 381, 6, 6, 1, 1, 382, 6, 6, 1, 6, 383, 6, 6, 1, 2, 384, 6, 6, 6, 1, 385, 6, 6, 6, 2, 386, 6, 6, 2, 1, 387, 6, 6, 2, 6, 388, 6, 6, 2, 2, 389, 6, 2, 1, 1, 390, 6, 2, 1, 6, 391, 6, 2, 1, 2, 392, 6, 2, 6, 1, 393, 6, 2, 6, 6, 394, 6, 2, 6, 2, 395, 6, 2, 2, 1, 396, 6, 2, 2, 6, 397, 6, 2, 2, 2, 398, 2, 1, 1, 6, 399, 2, 1, 6, 1, 400, 2, 1, 6, 6, 401, 2, 1, 6, 2, 402, 2, 1, 2, 6, 403, 2, 6, 1, 1, 404, 2, 6, 1, 6, 405, 2, 6, 1, 2, 406, 2, 6, 6, 1, 407, 2, 6, 6, 6, 408, 2, 6, 6, 2, 409, 2, 6, 2, 1, 410, 2, 6, 2, 6, 411, 2, 6, 2, 2, 412, 2, 2, 1, 6, 413, 2, 2, 6, 1, 414, 2, 2, 6, 6, 415, 2, 2, 6, 2, 416, 2, 2, 2, 6, 417, 8, 8, 8, 4, 418, 8, 8, 4, 8, 419, 8, 8, 4, 4, 420, 8, 4, 8, 8, 421, 8, 4, 8, 4, 422, 8, 4, 4, 8, 423, 8, 4, 4, 4, 424, 4, 8, 8, 8, 425, 4, 8, 8, 4, 426, 4, 8, 4, 8, 427, 4, 8, 4, 4, 428, 4, 4, 8, 8, 429, 4, 4, 8, 4, 430, 4, 4, 4, 8, }; static { init(); }
/** * Given corner codes, find the correct offset of the tile in the bigger atlas. * And copy the tile onto the destination. * @param g2 * @param destx * @param desty * @param w * @param h * @param t1 * @param t2 * @param t3 * @param t4 */
Given corner codes, find the correct offset of the tile in the bigger atlas. And copy the tile onto the destination
printTile
{ "repo_name": "dattasid/PirateMap", "path": "src/piratemap/utils/TerrainMap.java", "license": "mit", "size": 12297 }
[ "java.awt.Graphics2D", "java.util.Random" ]
import java.awt.Graphics2D; import java.util.Random;
import java.awt.*; import java.util.*;
[ "java.awt", "java.util" ]
java.awt; java.util;
2,698,958
public GLGraphics withSaveRestore(IGLRenderer renderer, float w, float h, GLElement parent) { save(); renderer.render(this, w, h, parent); restore(); return this; }
GLGraphics function(IGLRenderer renderer, float w, float h, GLElement parent) { save(); renderer.render(this, w, h, parent); restore(); return this; }
/** * runs the given procedure, in a save-restore environment */
runs the given procedure, in a save-restore environment
withSaveRestore
{ "repo_name": "Caleydo/caleydo", "path": "org.caleydo.ui/src/org/caleydo/core/view/opengl/layout2/GLGraphics.java", "license": "bsd-3-clause", "size": 22476 }
[ "org.caleydo.core.view.opengl.layout2.renderer.IGLRenderer" ]
import org.caleydo.core.view.opengl.layout2.renderer.IGLRenderer;
import org.caleydo.core.view.opengl.layout2.renderer.*;
[ "org.caleydo.core" ]
org.caleydo.core;
307,755
List<DiskImage> getAllForDiskProfile(Guid diskProfileId);
List<DiskImage> getAllForDiskProfile(Guid diskProfileId);
/** * Return all images that attached to disk profile. * * @param diskProfileId * The Disk Profile Id attached to disks. * @return List of DiskImages */
Return all images that attached to disk profile
getAllForDiskProfile
{ "repo_name": "OpenUniversity/ovirt-engine", "path": "backend/manager/modules/dal/src/main/java/org/ovirt/engine/core/dao/DiskImageDao.java", "license": "apache-2.0", "size": 4296 }
[ "java.util.List", "org.ovirt.engine.core.common.businessentities.storage.DiskImage", "org.ovirt.engine.core.compat.Guid" ]
import java.util.List; import org.ovirt.engine.core.common.businessentities.storage.DiskImage; import org.ovirt.engine.core.compat.Guid;
import java.util.*; import org.ovirt.engine.core.common.businessentities.storage.*; import org.ovirt.engine.core.compat.*;
[ "java.util", "org.ovirt.engine" ]
java.util; org.ovirt.engine;
2,423,619
public Chart getChartOfAccounts();
Chart function();
/** * Gets the chartOfAccounts attribute. * * @return Returns the chartOfAccounts */
Gets the chartOfAccounts attribute
getChartOfAccounts
{ "repo_name": "bhutchinson/kfs", "path": "kfs-core/src/main/java/org/kuali/kfs/integration/cg/ContractsAndGrantsAccountAwardInformation.java", "license": "agpl-3.0", "size": 2311 }
[ "org.kuali.kfs.coa.businessobject.Chart" ]
import org.kuali.kfs.coa.businessobject.Chart;
import org.kuali.kfs.coa.businessobject.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
785,444
public void setKraAuthorizationService(KraAuthorizationService kraAuthorizationService) { this.kraAuthorizationService = kraAuthorizationService; }
void function(KraAuthorizationService kraAuthorizationService) { this.kraAuthorizationService = kraAuthorizationService; }
/** * Set the Kra Authorization Service. Usually injected by the Spring Framework. * @param kraAuthorizationService */
Set the Kra Authorization Service. Usually injected by the Spring Framework
setKraAuthorizationService
{ "repo_name": "vivantech/kc_fixes", "path": "src/main/java/org/kuali/kra/common/committee/document/authorizer/CommitteeAuthorizerBase.java", "license": "apache-2.0", "size": 2624 }
[ "org.kuali.kra.service.KraAuthorizationService" ]
import org.kuali.kra.service.KraAuthorizationService;
import org.kuali.kra.service.*;
[ "org.kuali.kra" ]
org.kuali.kra;
2,239,189
public int read(byte[] oBuffer) throws IOException { assert(_stream != null); return _stream.read(oBuffer); }
int function(byte[] oBuffer) throws IOException { assert(_stream != null); return _stream.read(oBuffer); }
/** * Read to output buffer. * @param oBuffer buffer to read to. Cannot be null * @return number of bytes read. @see java.nio.ByteBuffer#read(byte[]) * @throws IOException If there are any problems reading */
Read to output buffer
read
{ "repo_name": "skardach/ro-str-viewer", "path": "src/com/skardach/ro/common/LittleEndianInputStreamAdapter.java", "license": "lgpl-3.0", "size": 1770 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,383,986
public static @ContentSettingValues @Nullable Integer getDefaultEnabledValue(int contentType) { return getResourceItem(contentType).getDefaultEnabledValue(); }
static @ContentSettingValues @Nullable Integer function(int contentType) { return getResourceItem(contentType).getDefaultEnabledValue(); }
/** * Returns which ContentSetting the global default is set to, when enabled. * Either Ask/Allow. Not required unless this entry describes a settings * that appears on the Site Settings page and has a global toggle. */
Returns which ContentSetting the global default is set to, when enabled. Either Ask/Allow. Not required unless this entry describes a settings that appears on the Site Settings page and has a global toggle
getDefaultEnabledValue
{ "repo_name": "chromium/chromium", "path": "components/browser_ui/site_settings/android/java/src/org/chromium/components/browser_ui/site_settings/ContentSettingsResources.java", "license": "bsd-3-clause", "size": 25536 }
[ "androidx.annotation.Nullable", "org.chromium.components.content_settings.ContentSettingValues" ]
import androidx.annotation.Nullable; import org.chromium.components.content_settings.ContentSettingValues;
import androidx.annotation.*; import org.chromium.components.content_settings.*;
[ "androidx.annotation", "org.chromium.components" ]
androidx.annotation; org.chromium.components;
52,631
public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function) { return new FunctionScoreQueryBuilder(function); }
static FunctionScoreQueryBuilder function(ScoreFunctionBuilder function) { return new FunctionScoreQueryBuilder(function); }
/** * A query that allows to define a custom scoring function. * * @param function The function builder used to custom score */
A query that allows to define a custom scoring function
functionScoreQuery
{ "repo_name": "zuoyebushiwo/elasticsearch-1.5.0", "path": "src/main/java/org/elasticsearch/index/query/QueryBuilders.java", "license": "apache-2.0", "size": 29381 }
[ "org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder", "org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder" ]
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.*;
[ "org.elasticsearch.index" ]
org.elasticsearch.index;
2,034,071
protected static String getInputTableName(JobConf job) { return InputConfigurator.getInputTableName(CLASS, job); }
static String function(JobConf job) { return InputConfigurator.getInputTableName(CLASS, job); }
/** * Gets the table name from the configuration. * * @param job * the Hadoop context for the configured job * @return the table name * @since 1.5.0 * @see #setInputTableName(JobConf, String) */
Gets the table name from the configuration
getInputTableName
{ "repo_name": "adamjshook/accumulo", "path": "core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java", "license": "apache-2.0", "size": 15558 }
[ "org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator", "org.apache.hadoop.mapred.JobConf" ]
import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator; import org.apache.hadoop.mapred.JobConf;
import org.apache.accumulo.core.client.mapreduce.lib.impl.*; import org.apache.hadoop.mapred.*;
[ "org.apache.accumulo", "org.apache.hadoop" ]
org.apache.accumulo; org.apache.hadoop;
1,281,109
SearchFilterConfig getFilterConfiguration(String placeHolder);
SearchFilterConfig getFilterConfiguration(String placeHolder);
/** * Gets the list of supported filters for the given placeholder. * * @param placeHolder * the place holder * @return the filters */
Gets the list of supported filters for the given placeholder
getFilterConfiguration
{ "repo_name": "SirmaITT/conservation-space-1.7.0", "path": "docker/sirma-platform/platform/seip-parent/platform/domain-model/instance-search/src/main/java/com/sirma/itt/seip/search/SearchServiceFilterExtension.java", "license": "lgpl-3.0", "size": 1838 }
[ "com.sirma.itt.seip.domain.search.SearchFilterConfig" ]
import com.sirma.itt.seip.domain.search.SearchFilterConfig;
import com.sirma.itt.seip.domain.search.*;
[ "com.sirma.itt" ]
com.sirma.itt;
128,769
private void resolveExtends(CompassMapping compassMapping, AliasMapping aliasMapping, AliasMapping copyFromAliasMapping) throws MappingException { if (copyFromAliasMapping.getExtendedAliases() != null) { for (int i = 0; i < copyFromAliasMapping.getExtendedAliases().length; i++) { String extendedAlias = copyFromAliasMapping.getExtendedAliases()[i]; AliasMapping extendedAliasMapping = compassMapping.getAliasMapping(extendedAlias); if (extendedAliasMapping == null) { throw new MappingException("Failed to find alias [" + extendedAlias + "] in alias [" + aliasMapping.getAlias() + "] extends section"); } // recursivly call in order to resolve extends. Note, we copy the extended alias mapping // since we do not share mappings resolveExtends(compassMapping, aliasMapping, (AliasMapping) extendedAliasMapping.copy()); } } for (Iterator aliasMappingIt = copyFromAliasMapping.mappingsIt(); aliasMappingIt.hasNext();) { aliasMapping.addMapping((Mapping) aliasMappingIt.next()); } }
void function(CompassMapping compassMapping, AliasMapping aliasMapping, AliasMapping copyFromAliasMapping) throws MappingException { if (copyFromAliasMapping.getExtendedAliases() != null) { for (int i = 0; i < copyFromAliasMapping.getExtendedAliases().length; i++) { String extendedAlias = copyFromAliasMapping.getExtendedAliases()[i]; AliasMapping extendedAliasMapping = compassMapping.getAliasMapping(extendedAlias); if (extendedAliasMapping == null) { throw new MappingException(STR + extendedAlias + STR + aliasMapping.getAlias() + STR); } resolveExtends(compassMapping, aliasMapping, (AliasMapping) extendedAliasMapping.copy()); } } for (Iterator aliasMappingIt = copyFromAliasMapping.mappingsIt(); aliasMappingIt.hasNext();) { aliasMapping.addMapping((Mapping) aliasMappingIt.next()); } }
/** * Resolves (recursivly) all the extended aliases and addes their mappings (copy) into the alias mapping. */
Resolves (recursivly) all the extended aliases and addes their mappings (copy) into the alias mapping
resolveExtends
{ "repo_name": "vthriller/opensymphony-compass-backup", "path": "src/main/src/org/compass/core/config/process/ResolveExtendsMappingProcessor.java", "license": "apache-2.0", "size": 5135 }
[ "java.util.Iterator", "org.compass.core.mapping.AliasMapping", "org.compass.core.mapping.CompassMapping", "org.compass.core.mapping.Mapping", "org.compass.core.mapping.MappingException" ]
import java.util.Iterator; import org.compass.core.mapping.AliasMapping; import org.compass.core.mapping.CompassMapping; import org.compass.core.mapping.Mapping; import org.compass.core.mapping.MappingException;
import java.util.*; import org.compass.core.mapping.*;
[ "java.util", "org.compass.core" ]
java.util; org.compass.core;
2,653,462
protected Object readResolve() throws ObjectStreamException { EnumSyntax[] theTable = getEnumValueTable(); if (theTable == null) { throw new InvalidObjectException( "Null enumeration value table for class " + getClass()); } int theOffset = getOffset(); int theIndex = value - theOffset; if (0 > theIndex || theIndex >= theTable.length) { throw new InvalidObjectException ("Integer value = " + value + " not in valid range " + theOffset + ".." + (theOffset + theTable.length - 1) + "for class " + getClass()); } EnumSyntax result = theTable[theIndex]; if (result == null) { throw new InvalidObjectException ("No enumeration value for integer value = " + value + "for class " + getClass()); } return result; } // Hidden operations to be implemented in a subclass.
Object function() throws ObjectStreamException { EnumSyntax[] theTable = getEnumValueTable(); if (theTable == null) { throw new InvalidObjectException( STR + getClass()); } int theOffset = getOffset(); int theIndex = value - theOffset; if (0 > theIndex theIndex >= theTable.length) { throw new InvalidObjectException (STR + value + STR + theOffset + ".." + (theOffset + theTable.length - 1) + STR + getClass()); } EnumSyntax result = theTable[theIndex]; if (result == null) { throw new InvalidObjectException (STR + value + STR + getClass()); } return result; }
/** * During object input, convert this deserialized enumeration instance to * the proper enumeration value defined in the enumeration attribute class. * * @return The enumeration singleton value stored at index * <I>i</I>-<I>L</I> in the enumeration value table returned by * {@link #getEnumValueTable() <CODE>getEnumValueTable()</CODE>}, * where <I>i</I> is this enumeration value's integer value and * <I>L</I> is the value returned by {@link #getOffset() * <CODE>getOffset()</CODE>}. * * @throws ObjectStreamException if the stream can't be deserialised * @throws InvalidObjectException * Thrown if the enumeration value table is null, this enumeration * value's integer value does not correspond to an element in the * enumeration value table, or the corresponding element in the * enumeration value table is null. (Note: {@link * java.io.InvalidObjectException InvalidObjectException} is a subclass * of {@link java.io.ObjectStreamException ObjectStreamException}, which * <CODE>readResolve()</CODE> is declared to throw.) */
During object input, convert this deserialized enumeration instance to the proper enumeration value defined in the enumeration attribute class
readResolve
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk/jdk/src/share/classes/javax/print/attribute/EnumSyntax.java", "license": "mit", "size": 11563 }
[ "java.io.InvalidObjectException", "java.io.ObjectStreamException" ]
import java.io.InvalidObjectException; import java.io.ObjectStreamException;
import java.io.*;
[ "java.io" ]
java.io;
1,145,261
@Test public void testMerge() throws Exception { LOG.info("Starting " + name.getMethodName()); final TableName tableName = TableName.valueOf(name.getMethodName()); final Admin admin = TEST_UTIL.getAdmin(); try { // Create table and load data. Table table = createTableAndLoadData(MASTER, tableName); AssignmentManager am = MASTER.getAssignmentManager(); List<RegionInfo> regions = am.getRegionStates().getRegionsOfTable(tableName); // Fake offline one region RegionInfo a = regions.get(0); RegionInfo b = regions.get(1); am.unassign(b); am.offlineRegion(b); try { // Merge offline region. Region a is offline here FutureUtils.get( admin.mergeRegionsAsync(a.getEncodedNameAsBytes(), b.getEncodedNameAsBytes(), false)); fail("Offline regions should not be able to merge"); } catch (DoNotRetryRegionException ie) { System.out.println(ie); assertTrue(ie instanceof MergeRegionException); } try { // Merge the same region: b and b. FutureUtils .get(admin.mergeRegionsAsync(b.getEncodedNameAsBytes(), b.getEncodedNameAsBytes(), true)); fail("A region should not be able to merge with itself, even forcfully"); } catch (IOException ie) { assertTrue("Exception should mention regions not online", StringUtils.stringifyException(ie).contains("region to itself") && ie instanceof MergeRegionException); } try { // Merge unknown regions FutureUtils.get(admin.mergeRegionsAsync(Bytes.toBytes("-f1"), Bytes.toBytes("-f2"), true)); fail("Unknown region could not be merged"); } catch (IOException ie) { assertTrue("UnknownRegionException should be thrown", ie instanceof UnknownRegionException); } table.close(); } finally { TEST_UTIL.deleteTable(tableName); } }
void function() throws Exception { LOG.info(STR + name.getMethodName()); final TableName tableName = TableName.valueOf(name.getMethodName()); final Admin admin = TEST_UTIL.getAdmin(); try { Table table = createTableAndLoadData(MASTER, tableName); AssignmentManager am = MASTER.getAssignmentManager(); List<RegionInfo> regions = am.getRegionStates().getRegionsOfTable(tableName); RegionInfo a = regions.get(0); RegionInfo b = regions.get(1); am.unassign(b); am.offlineRegion(b); try { FutureUtils.get( admin.mergeRegionsAsync(a.getEncodedNameAsBytes(), b.getEncodedNameAsBytes(), false)); fail(STR); } catch (DoNotRetryRegionException ie) { System.out.println(ie); assertTrue(ie instanceof MergeRegionException); } try { FutureUtils .get(admin.mergeRegionsAsync(b.getEncodedNameAsBytes(), b.getEncodedNameAsBytes(), true)); fail(STR); } catch (IOException ie) { assertTrue(STR, StringUtils.stringifyException(ie).contains(STR) && ie instanceof MergeRegionException); } try { FutureUtils.get(admin.mergeRegionsAsync(Bytes.toBytes("-f1"), Bytes.toBytes("-f2"), true)); fail(STR); } catch (IOException ie) { assertTrue(STR, ie instanceof UnknownRegionException); } table.close(); } finally { TEST_UTIL.deleteTable(tableName); } }
/** * This test tests 1, merging region not online; * 2, merging same two regions; 3, merging unknown regions. * They are in one test case so that we don't have to create * many tables, and these tests are simple. */
This test tests 1, merging region not online; 2, merging same two regions; 3, merging unknown regions. They are in one test case so that we don't have to create many tables, and these tests are simple
testMerge
{ "repo_name": "ChinmaySKulkarni/hbase", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java", "license": "apache-2.0", "size": 22625 }
[ "java.io.IOException", "java.util.List", "org.apache.hadoop.hbase.TableName", "org.apache.hadoop.hbase.UnknownRegionException", "org.apache.hadoop.hbase.client.Admin", "org.apache.hadoop.hbase.client.DoNotRetryRegionException", "org.apache.hadoop.hbase.client.RegionInfo", "org.apache.hadoop.hbase.client.Table", "org.apache.hadoop.hbase.exceptions.MergeRegionException", "org.apache.hadoop.hbase.master.assignment.AssignmentManager", "org.apache.hadoop.hbase.util.Bytes", "org.apache.hadoop.hbase.util.FutureUtils", "org.apache.hadoop.util.StringUtils", "org.junit.Assert" ]
import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.DoNotRetryRegionException; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.exceptions.MergeRegionException; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Assert;
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.exceptions.*; import org.apache.hadoop.hbase.master.assignment.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.util.*; import org.junit.*;
[ "java.io", "java.util", "org.apache.hadoop", "org.junit" ]
java.io; java.util; org.apache.hadoop; org.junit;
1,543,397
protected void getOracleTablespaces(Map<String, String> dbPoolData) { String dataTablespace = "users"; String indexTablespace = "users"; CmsSetupDb setupDb = new CmsSetupDb(null); try { setupDb.setConnection( dbPoolData.get("driver"), dbPoolData.get("url"), dbPoolData.get("params"), dbPoolData.get("user"), dbPoolData.get("pwd")); // read tablespace for data CmsSetupDBWrapper db = null; try { db = setupDb.executeSqlStatement("SELECT DISTINCT tablespace_name FROM user_tables", null); if (db.getResultSet().next()) { dataTablespace = db.getResultSet().getString(1).toLowerCase(); } } finally { if (db != null) { db.close(); } } // read tablespace for indexes try { db = setupDb.executeSqlStatement("SELECT DISTINCT tablespace_name FROM user_indexes", null); if (db.getResultSet().next()) { indexTablespace = db.getResultSet().getString(1).toLowerCase(); } } finally { if (db != null) { db.close(); } } } catch (SQLException e) { e.printStackTrace(); } finally { setupDb.closeConnection(); } dbPoolData.put("indexTablespace", indexTablespace); System.out.println("Index Tablespace: " + indexTablespace); dbPoolData.put("dataTablespace", dataTablespace); System.out.println("Data Tablespace: " + dataTablespace); }
void function(Map<String, String> dbPoolData) { String dataTablespace = "users"; String indexTablespace = "users"; CmsSetupDb setupDb = new CmsSetupDb(null); try { setupDb.setConnection( dbPoolData.get(STR), dbPoolData.get("url"), dbPoolData.get(STR), dbPoolData.get("user"), dbPoolData.get("pwd")); CmsSetupDBWrapper db = null; try { db = setupDb.executeSqlStatement(STR, null); if (db.getResultSet().next()) { dataTablespace = db.getResultSet().getString(1).toLowerCase(); } } finally { if (db != null) { db.close(); } } try { db = setupDb.executeSqlStatement(STR, null); if (db.getResultSet().next()) { indexTablespace = db.getResultSet().getString(1).toLowerCase(); } } finally { if (db != null) { db.close(); } } } catch (SQLException e) { e.printStackTrace(); } finally { setupDb.closeConnection(); } dbPoolData.put(STR, indexTablespace); System.out.println(STR + indexTablespace); dbPoolData.put(STR, dataTablespace); System.out.println(STR + dataTablespace); }
/** * Retrieves the oracle tablespace names.<p> * * @param dbPoolData the database pool data */
Retrieves the oracle tablespace names
getOracleTablespaces
{ "repo_name": "it-tavis/opencms-core", "path": "src-setup/org/opencms/setup/db/CmsUpdateDBManager.java", "license": "lgpl-2.1", "size": 17330 }
[ "java.sql.SQLException", "java.util.Map", "org.opencms.setup.CmsSetupDBWrapper", "org.opencms.setup.CmsSetupDb" ]
import java.sql.SQLException; import java.util.Map; import org.opencms.setup.CmsSetupDBWrapper; import org.opencms.setup.CmsSetupDb;
import java.sql.*; import java.util.*; import org.opencms.setup.*;
[ "java.sql", "java.util", "org.opencms.setup" ]
java.sql; java.util; org.opencms.setup;
153,759
private void explodeConceptSetHelper(Concept concept, Collection<Concept> ret, Collection<Integer> alreadySeen) { if (alreadySeen.contains(concept.getConceptId())) { return; } alreadySeen.add(concept.getConceptId()); List<ConceptSet> cs = getConceptSetsByConcept(concept); for (ConceptSet set : cs) { Concept c = set.getConcept(); if (c.isSet()) { ret.add(c); explodeConceptSetHelper(c, ret, alreadySeen); } else { ret.add(c); } } }
void function(Concept concept, Collection<Concept> ret, Collection<Integer> alreadySeen) { if (alreadySeen.contains(concept.getConceptId())) { return; } alreadySeen.add(concept.getConceptId()); List<ConceptSet> cs = getConceptSetsByConcept(concept); for (ConceptSet set : cs) { Concept c = set.getConcept(); if (c.isSet()) { ret.add(c); explodeConceptSetHelper(c, ret, alreadySeen); } else { ret.add(c); } } }
/** * Utility method used by getConceptsInSet(Concept concept) * * @param concept * @param ret * @param alreadySeen */
Utility method used by getConceptsInSet(Concept concept)
explodeConceptSetHelper
{ "repo_name": "chethandeshpande/openmrs-core", "path": "api/src/main/java/org/openmrs/api/impl/ConceptServiceImpl.java", "license": "mpl-2.0", "size": 60489 }
[ "java.util.Collection", "java.util.List", "org.apache.commons.lang.StringUtils", "org.openmrs.Concept", "org.openmrs.ConceptSet" ]
import java.util.Collection; import java.util.List; import org.apache.commons.lang.StringUtils; import org.openmrs.Concept; import org.openmrs.ConceptSet;
import java.util.*; import org.apache.commons.lang.*; import org.openmrs.*;
[ "java.util", "org.apache.commons", "org.openmrs" ]
java.util; org.apache.commons; org.openmrs;
2,466,295
if (offset > 0 || value.length != intLen) return Arrays.copyOfRange(value, offset, offset + intLen); return value; }
if (offset > 0 value.length != intLen) return Arrays.copyOfRange(value, offset, offset + intLen); return value; }
/** * Internal helper method to return the magnitude array. The caller is not * supposed to modify the returned array. */
Internal helper method to return the magnitude array. The caller is not supposed to modify the returned array
getMagnitudeArray
{ "repo_name": "andreagenso/java2scala", "path": "test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/java/math/MutableBigInteger.java", "license": "apache-2.0", "size": 46139 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
1,296,309
private void decodeRaster(InputStream in) throws IOException { int initialCodeSize = in.read(); compressedData = readData( in ); dataBlockIndex = 0; int rasterIndex = 0; // Index into the raster int clearCode = (1 << initialCodeSize); // 256 usually int endCode = clearCode + 1; // The stop code. raster = new byte[ width * height ]; int codeSize = initialCodeSize + 1; int code = getBits( codeSize ); // = clear int nextCode = endCode + 1; short[][] dictionary = new short[ 4096 ][ 4 ]; for(short i = 0; i < nColors; i ++ ) { dictionary[i][0] = i; // color index dictionary[i][1] = -1; // parent dictionary[i][2] = i; // first dictionary[i][3] = 1; // depth } code = getBits( codeSize ); // get second code raster[ rasterIndex++ ] = (byte)dictionary[code][0]; int old = code; code = getBits( codeSize ); // start at the third code int c; do { if( code == clearCode ) { codeSize = initialCodeSize + 1; nextCode = endCode + 1; // get and output second code code = getBits( codeSize ); raster[ rasterIndex++ ] = (byte)dictionary[code][0]; old = code; } else { dictionary[nextCode][1] = (short)old; // parent = old dictionary[nextCode][2] = dictionary[old][2]; // first pixel dictionary[nextCode][3] = (short)(dictionary[old][3] + 1); // depth // appended pixel = first pixel of c if( code < nextCode ) { dictionary[nextCode][0] = dictionary[code][2]; old = code; } else // first of old { dictionary[nextCode][0] = dictionary[old][2]; old = nextCode; } c = old; // output the code c int depth = dictionary[c][3]; for( int i = depth - 1; i >= 0; i-- ) { raster[ rasterIndex + i ] = (byte)dictionary[c][0]; c = dictionary[c][1]; // go to parent. } rasterIndex += depth; nextCode ++; if( codeSize < 12 && nextCode >= (1 << codeSize) ) codeSize++; } code = getBits( codeSize ); } while( code != endCode && dataBlockIndex < compressedData.length ); compressedData = null; // throw away compressed data. }
void function(InputStream in) throws IOException { int initialCodeSize = in.read(); compressedData = readData( in ); dataBlockIndex = 0; int rasterIndex = 0; int clearCode = (1 << initialCodeSize); int endCode = clearCode + 1; raster = new byte[ width * height ]; int codeSize = initialCodeSize + 1; int code = getBits( codeSize ); int nextCode = endCode + 1; short[][] dictionary = new short[ 4096 ][ 4 ]; for(short i = 0; i < nColors; i ++ ) { dictionary[i][0] = i; dictionary[i][1] = -1; dictionary[i][2] = i; dictionary[i][3] = 1; } code = getBits( codeSize ); raster[ rasterIndex++ ] = (byte)dictionary[code][0]; int old = code; code = getBits( codeSize ); int c; do { if( code == clearCode ) { codeSize = initialCodeSize + 1; nextCode = endCode + 1; code = getBits( codeSize ); raster[ rasterIndex++ ] = (byte)dictionary[code][0]; old = code; } else { dictionary[nextCode][1] = (short)old; dictionary[nextCode][2] = dictionary[old][2]; dictionary[nextCode][3] = (short)(dictionary[old][3] + 1); if( code < nextCode ) { dictionary[nextCode][0] = dictionary[code][2]; old = code; } else { dictionary[nextCode][0] = dictionary[old][2]; old = nextCode; } c = old; int depth = dictionary[c][3]; for( int i = depth - 1; i >= 0; i-- ) { raster[ rasterIndex + i ] = (byte)dictionary[c][0]; c = dictionary[c][1]; } rasterIndex += depth; nextCode ++; if( codeSize < 12 && nextCode >= (1 << codeSize) ) codeSize++; } code = getBits( codeSize ); } while( code != endCode && dataBlockIndex < compressedData.length ); compressedData = null; }
/** * Loads a compressed image block and decompresses it. */
Loads a compressed image block and decompresses it
decodeRaster
{ "repo_name": "shaotuanchen/sunflower_exp", "path": "tools/source/gcc-4.2.4/libjava/classpath/gnu/javax/imageio/gif/GIFFile.java", "license": "bsd-3-clause", "size": 17622 }
[ "java.io.IOException", "java.io.InputStream" ]
import java.io.IOException; import java.io.InputStream;
import java.io.*;
[ "java.io" ]
java.io;
1,156,903
public ZoneOffset getOffsetBefore() { return offsetBefore; }
ZoneOffset function() { return offsetBefore; }
/** * Gets the offset before the transition. * * @return the offset before, not null */
Gets the offset before the transition
getOffsetBefore
{ "repo_name": "frohoff/jdk8u-jdk", "path": "src/share/classes/java/time/zone/ZoneOffsetTransitionRule.java", "license": "gpl-2.0", "size": 26368 }
[ "java.time.ZoneOffset" ]
import java.time.ZoneOffset;
import java.time.*;
[ "java.time" ]
java.time;
501,977
public boolean containsConfiguration(String dn) { return persistenceEntryManager.contains(dn, GluuConfiguration.class); }
boolean function(String dn) { return persistenceEntryManager.contains(dn, GluuConfiguration.class); }
/** * Check if LDAP server contains configuration with specified attributes * * @return True if configuration with specified attributes exist */
Check if LDAP server contains configuration with specified attributes
containsConfiguration
{ "repo_name": "GluuFederation/oxTrust", "path": "service/src/main/java/org/gluu/oxtrust/service/ConfigurationService.java", "license": "mit", "size": 9792 }
[ "org.gluu.oxtrust.model.GluuConfiguration" ]
import org.gluu.oxtrust.model.GluuConfiguration;
import org.gluu.oxtrust.model.*;
[ "org.gluu.oxtrust" ]
org.gluu.oxtrust;
2,733,393
private Set<String> generateQuestions(FormulaGenerationInfo fgInfo) { Set<String> res = new LinkedHashSet<String>(); //hack to identify cvt things (I don't trust the CVT markings on freebase schema) if(fgInfo.bInfo.toReverseString().contains("lambda")) handleCvtBinary(fgInfo, res); else handleNonCvtBinary(fgInfo, res); if(opts.genFromLex) { if(formulaToLexemsMap.containsKey(fgInfo.bInfo.formula)) { handleLexiconBinary(fgInfo, res); } } res = postProcess(res,fgInfo); if(fgInfo.isInject) res = handleInjection(res,fgInfo); return res; }
Set<String> function(FormulaGenerationInfo fgInfo) { Set<String> res = new LinkedHashSet<String>(); if(fgInfo.bInfo.toReverseString().contains(STR)) handleCvtBinary(fgInfo, res); else handleNonCvtBinary(fgInfo, res); if(opts.genFromLex) { if(formulaToLexemsMap.containsKey(fgInfo.bInfo.formula)) { handleLexiconBinary(fgInfo, res); } } res = postProcess(res,fgInfo); if(fgInfo.isInject) res = handleInjection(res,fgInfo); return res; }
/** * Get questions for a binary formula - main interface with this class - use both FB descriptions and lexicon * @param bInfo * @return */
Get questions for a binary formula - main interface with this class - use both FB descriptions and lexicon
generateQuestions
{ "repo_name": "laotao/sempre", "path": "src/edu/stanford/nlp/sempre/paraphrase/QuestionGenerator.java", "license": "gpl-2.0", "size": 26240 }
[ "edu.stanford.nlp.sempre.FormulaGenerationInfo", "java.util.LinkedHashSet", "java.util.Set" ]
import edu.stanford.nlp.sempre.FormulaGenerationInfo; import java.util.LinkedHashSet; import java.util.Set;
import edu.stanford.nlp.sempre.*; import java.util.*;
[ "edu.stanford.nlp", "java.util" ]
edu.stanford.nlp; java.util;
2,474,449
public static String tcompose (String key, Object... args) { return MessageUtil.tcompose(key, args); }
static String function (String key, Object... args) { return MessageUtil.tcompose(key, args); }
/** * A convenience method for calling {@link #compose(String,Object[])} with an array of * arguments that will be automatically tainted (see {@link #taint}). */
A convenience method for calling <code>#compose(String,Object[])</code> with an array of arguments that will be automatically tainted (see <code>#taint</code>)
tcompose
{ "repo_name": "threerings/ooo-util", "path": "src/main/java/com/threerings/util/MessageBundle.java", "license": "bsd-3-clause", "size": 15167 }
[ "com.samskivert.text.MessageUtil" ]
import com.samskivert.text.MessageUtil;
import com.samskivert.text.*;
[ "com.samskivert.text" ]
com.samskivert.text;
450,395
protected JestResult getResultsById(final String id) { // DONT use _all for type when searching by id. return getResultsById(id, index, null); }
JestResult function(final String id) { return getResultsById(id, index, null); }
/** * When given just the id. Uses default index and no types for this dao. * * @param id * @return */
When given just the id. Uses default index and no types for this dao
getResultsById
{ "repo_name": "codeaudit/graphene", "path": "graphene-parent/graphene-dao-es/src/main/java/graphene/dao/es/BasicESDAO.java", "license": "apache-2.0", "size": 29453 }
[ "io.searchbox.client.JestResult" ]
import io.searchbox.client.JestResult;
import io.searchbox.client.*;
[ "io.searchbox.client" ]
io.searchbox.client;
150,995
private class ConnectionMultiplexerSessionListener implements ConnectionCloseListener { public void onConnectionClose(Object handback) { ConnectionMultiplexerSession session = (ConnectionMultiplexerSession)handback; // Remove all the hostnames that were registered for this server session String domain = session.getAddress().getDomain(); localSessionManager.getConnnectionManagerSessions().remove(session.getAddress().toString()); // Remove track of the cluster node hosting the CM connection multiplexerSessionsCache.remove(session.getAddress().toString()); if (getConnectionMultiplexerSessions(domain).isEmpty()) { // Terminate ClientSessions originated from this connection manager // that are still active since the connection manager has gone down ConnectionMultiplexerManager.getInstance().multiplexerUnavailable(domain); } } }
class ConnectionMultiplexerSessionListener implements ConnectionCloseListener { public void function(Object handback) { ConnectionMultiplexerSession session = (ConnectionMultiplexerSession)handback; String domain = session.getAddress().getDomain(); localSessionManager.getConnnectionManagerSessions().remove(session.getAddress().toString()); multiplexerSessionsCache.remove(session.getAddress().toString()); if (getConnectionMultiplexerSessions(domain).isEmpty()) { ConnectionMultiplexerManager.getInstance().multiplexerUnavailable(domain); } } }
/** * Handle a session that just closed. * * @param handback The session that just closed */
Handle a session that just closed
onConnectionClose
{ "repo_name": "surevine/openfire-bespoke", "path": "src/java/org/jivesoftware/openfire/SessionManager.java", "license": "gpl-2.0", "size": 68316 }
[ "org.jivesoftware.openfire.multiplex.ConnectionMultiplexerManager", "org.jivesoftware.openfire.session.ConnectionMultiplexerSession" ]
import org.jivesoftware.openfire.multiplex.ConnectionMultiplexerManager; import org.jivesoftware.openfire.session.ConnectionMultiplexerSession;
import org.jivesoftware.openfire.multiplex.*; import org.jivesoftware.openfire.session.*;
[ "org.jivesoftware.openfire" ]
org.jivesoftware.openfire;
2,378,258
protected void moveElements(HeaderIndexFile<Data> source, RangeHashFunction targetHashfunction, String workingDir) throws IOException, FileLockException { ByteBuffer elem = ByteBuffer.allocate(source.getElementSize()); HeaderIndexFile<Data> tmp = null; newBucketIds = new IntArrayList(); long offset = 0; byte[] key = new byte[gp.getKeySize()]; int oldBucket = -1, newBucket; while (offset < source.getFilledUpFromContentStart()) { source.read(offset, elem); elem.rewind(); elem.get(key); newBucket = targetHashfunction.getBucketId(key); if (newBucket != oldBucket) { this.newBucketIds.add(newBucket); if (tmp != null) { tmp.close(); } String fileName = workingDir + "/" + targetHashfunction.getFilename(newBucket); tmp = new HeaderIndexFile<Data>(fileName, AccessMode.READ_WRITE, 100, gp); oldBucket = newBucket; } tmp.append(elem); offset += elem.limit(); } if (tmp != null) tmp.close(); }
void function(HeaderIndexFile<Data> source, RangeHashFunction targetHashfunction, String workingDir) throws IOException, FileLockException { ByteBuffer elem = ByteBuffer.allocate(source.getElementSize()); HeaderIndexFile<Data> tmp = null; newBucketIds = new IntArrayList(); long offset = 0; byte[] key = new byte[gp.getKeySize()]; int oldBucket = -1, newBucket; while (offset < source.getFilledUpFromContentStart()) { source.read(offset, elem); elem.rewind(); elem.get(key); newBucket = targetHashfunction.getBucketId(key); if (newBucket != oldBucket) { this.newBucketIds.add(newBucket); if (tmp != null) { tmp.close(); } String fileName = workingDir + "/" + targetHashfunction.getFilename(newBucket); tmp = new HeaderIndexFile<Data>(fileName, AccessMode.READ_WRITE, 100, gp); oldBucket = newBucket; } tmp.append(elem); offset += elem.limit(); } if (tmp != null) tmp.close(); }
/** * moves elements from the source file to new smaller files. The filenames are generated automatically * * @param source * @param targetHashfunction * @param workingDir * @throws IOException * @throws FileLockException */
moves elements from the source file to new smaller files. The filenames are generated automatically
moveElements
{ "repo_name": "mgledi/DRUMS", "path": "src/main/java/com/unister/semweb/drums/bucket/BucketSplitter.java", "license": "gpl-2.0", "size": 7062 }
[ "com.carrotsearch.hppc.IntArrayList", "com.unister.semweb.drums.bucket.hashfunction.RangeHashFunction", "com.unister.semweb.drums.file.AbstractHeaderFile", "com.unister.semweb.drums.file.FileLockException", "com.unister.semweb.drums.file.HeaderIndexFile", "java.io.IOException", "java.nio.ByteBuffer" ]
import com.carrotsearch.hppc.IntArrayList; import com.unister.semweb.drums.bucket.hashfunction.RangeHashFunction; import com.unister.semweb.drums.file.AbstractHeaderFile; import com.unister.semweb.drums.file.FileLockException; import com.unister.semweb.drums.file.HeaderIndexFile; import java.io.IOException; import java.nio.ByteBuffer;
import com.carrotsearch.hppc.*; import com.unister.semweb.drums.bucket.hashfunction.*; import com.unister.semweb.drums.file.*; import java.io.*; import java.nio.*;
[ "com.carrotsearch.hppc", "com.unister.semweb", "java.io", "java.nio" ]
com.carrotsearch.hppc; com.unister.semweb; java.io; java.nio;
921,407
PartitionedTopicMetadata getPartitionedTopicMetadata(String destination) throws PulsarAdminException;
PartitionedTopicMetadata getPartitionedTopicMetadata(String destination) throws PulsarAdminException;
/** * Get metadata of a partitioned topic. * <p> * Get metadata of a partitioned topic. * <p> * * @param destination * Destination name * @return Partitioned topic metadata * @throws PulsarAdminException */
Get metadata of a partitioned topic. Get metadata of a partitioned topic.
getPartitionedTopicMetadata
{ "repo_name": "tkb77/pulsar", "path": "pulsar-client-admin/src/main/java/org/apache/pulsar/client/admin/PersistentTopics.java", "license": "apache-2.0", "size": 31213 }
[ "org.apache.pulsar.common.partition.PartitionedTopicMetadata" ]
import org.apache.pulsar.common.partition.PartitionedTopicMetadata;
import org.apache.pulsar.common.partition.*;
[ "org.apache.pulsar" ]
org.apache.pulsar;
1,997,833
public void actionPerformed(ActionEvent e) { int index = Integer.parseInt(e.getActionCommand()); switch (index) { case CANCEL: close(); break; case APPLY: runScript(); break; case DOWNLOAD: firePropertyChange(DOWNLOAD_SELECTED_SCRIPT_PROPERTY, null, script); break; case VIEW: firePropertyChange(VIEW_SELECTED_SCRIPT_PROPERTY, null, script); } }
void function(ActionEvent e) { int index = Integer.parseInt(e.getActionCommand()); switch (index) { case CANCEL: close(); break; case APPLY: runScript(); break; case DOWNLOAD: firePropertyChange(DOWNLOAD_SELECTED_SCRIPT_PROPERTY, null, script); break; case VIEW: firePropertyChange(VIEW_SELECTED_SCRIPT_PROPERTY, null, script); } }
/** * Closes or runs the scripts. * @see ActionListener#actionPerformed(ActionEvent) */
Closes or runs the scripts
actionPerformed
{ "repo_name": "stelfrich/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/util/ui/ScriptingDialog.java", "license": "gpl-2.0", "size": 32492 }
[ "java.awt.event.ActionEvent" ]
import java.awt.event.ActionEvent;
import java.awt.event.*;
[ "java.awt" ]
java.awt;
1,780,732
private HeaderParseStatus parseHeader() throws IOException { // // Check for blank line // byte chr = 0; while (headerParsePos == HeaderParsePosition.HEADER_START) { // Read new bytes if needed if (pos >= lastValid) { if (!fill(true,false)) {//parse header headerParsePos = HeaderParsePosition.HEADER_START; return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.CR) { // Skip } else if (chr == Constants.LF) { pos++; return HeaderParseStatus.DONE; } else { break; } pos++; } if ( headerParsePos == HeaderParsePosition.HEADER_START ) { // Mark the current buffer position headerData.start = pos; headerParsePos = HeaderParsePosition.HEADER_NAME; } // // Reading the header name // Header name is always US-ASCII // while (headerParsePos == HeaderParsePosition.HEADER_NAME) { // Read new bytes if needed if (pos >= lastValid) { if (!fill(true,false)) { //parse header return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.COLON) { headerParsePos = HeaderParsePosition.HEADER_VALUE_START; headerData.headerValue = headers.addValue(buf, headerData.start, pos - headerData.start); pos++; // Mark the current buffer position headerData.start = pos; headerData.realPos = pos; headerData.lastSignificantChar = pos; break; } else if (!HTTP_TOKEN_CHAR[chr]) { // If a non-token header is detected, skip the line and // ignore the header headerData.lastSignificantChar = pos; return skipLine(); } // chr is next byte of header name. Convert to lowercase. if ((chr >= Constants.A) && (chr <= Constants.Z)) { buf[pos] = (byte) (chr - Constants.LC_OFFSET); } pos++; } // Skip the line and ignore the header if (headerParsePos == HeaderParsePosition.HEADER_SKIPLINE) { return skipLine(); } // // Reading the header value (which can be spanned over multiple lines) // while (headerParsePos == HeaderParsePosition.HEADER_VALUE_START || headerParsePos == HeaderParsePosition.HEADER_VALUE || headerParsePos == HeaderParsePosition.HEADER_MULTI_LINE) { if ( headerParsePos == HeaderParsePosition.HEADER_VALUE_START ) { // Skipping spaces while (true) { // Read new bytes if needed if (pos >= lastValid) { if (!fill(true,false)) {//parse header //HEADER_VALUE_START return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.SP || chr == Constants.HT) { pos++; } else { headerParsePos = HeaderParsePosition.HEADER_VALUE; break; } } } if ( headerParsePos == HeaderParsePosition.HEADER_VALUE ) { // Reading bytes until the end of the line boolean eol = false; while (!eol) { // Read new bytes if needed if (pos >= lastValid) { if (!fill(true,false)) {//parse header //HEADER_VALUE return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.CR) { // Skip } else if (chr == Constants.LF) { eol = true; } else if (chr == Constants.SP || chr == Constants.HT) { buf[headerData.realPos] = chr; headerData.realPos++; } else { buf[headerData.realPos] = chr; headerData.realPos++; headerData.lastSignificantChar = headerData.realPos; } pos++; } // Ignore whitespaces at the end of the line headerData.realPos = headerData.lastSignificantChar; // Checking the first character of the new line. If the character // is a LWS, then it's a multiline header headerParsePos = HeaderParsePosition.HEADER_MULTI_LINE; } // Read new bytes if needed if (pos >= lastValid) { if (!fill(true,false)) {//parse header //HEADER_MULTI_LINE return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if ( headerParsePos == HeaderParsePosition.HEADER_MULTI_LINE ) { if ( (chr != Constants.SP) && (chr != Constants.HT)) { headerParsePos = HeaderParsePosition.HEADER_START; break; } else { // Copying one extra space in the buffer (since there must // be at least one space inserted between the lines) buf[headerData.realPos] = chr; headerData.realPos++; headerParsePos = HeaderParsePosition.HEADER_VALUE_START; } } } // Set the header value headerData.headerValue.setBytes(buf, headerData.start, headerData.lastSignificantChar - headerData.start); headerData.recycle(); return HeaderParseStatus.HAVE_MORE_HEADERS; }
HeaderParseStatus function() throws IOException { byte chr = 0; while (headerParsePos == HeaderParsePosition.HEADER_START) { if (pos >= lastValid) { if (!fill(true,false)) { headerParsePos = HeaderParsePosition.HEADER_START; return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.CR) { } else if (chr == Constants.LF) { pos++; return HeaderParseStatus.DONE; } else { break; } pos++; } if ( headerParsePos == HeaderParsePosition.HEADER_START ) { headerData.start = pos; headerParsePos = HeaderParsePosition.HEADER_NAME; } while (headerParsePos == HeaderParsePosition.HEADER_NAME) { if (pos >= lastValid) { if (!fill(true,false)) { return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.COLON) { headerParsePos = HeaderParsePosition.HEADER_VALUE_START; headerData.headerValue = headers.addValue(buf, headerData.start, pos - headerData.start); pos++; headerData.start = pos; headerData.realPos = pos; headerData.lastSignificantChar = pos; break; } else if (!HTTP_TOKEN_CHAR[chr]) { headerData.lastSignificantChar = pos; return skipLine(); } if ((chr >= Constants.A) && (chr <= Constants.Z)) { buf[pos] = (byte) (chr - Constants.LC_OFFSET); } pos++; } if (headerParsePos == HeaderParsePosition.HEADER_SKIPLINE) { return skipLine(); } while (headerParsePos == HeaderParsePosition.HEADER_VALUE_START headerParsePos == HeaderParsePosition.HEADER_VALUE headerParsePos == HeaderParsePosition.HEADER_MULTI_LINE) { if ( headerParsePos == HeaderParsePosition.HEADER_VALUE_START ) { while (true) { if (pos >= lastValid) { if (!fill(true,false)) { return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.SP chr == Constants.HT) { pos++; } else { headerParsePos = HeaderParsePosition.HEADER_VALUE; break; } } } if ( headerParsePos == HeaderParsePosition.HEADER_VALUE ) { boolean eol = false; while (!eol) { if (pos >= lastValid) { if (!fill(true,false)) { return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if (chr == Constants.CR) { } else if (chr == Constants.LF) { eol = true; } else if (chr == Constants.SP chr == Constants.HT) { buf[headerData.realPos] = chr; headerData.realPos++; } else { buf[headerData.realPos] = chr; headerData.realPos++; headerData.lastSignificantChar = headerData.realPos; } pos++; } headerData.realPos = headerData.lastSignificantChar; headerParsePos = HeaderParsePosition.HEADER_MULTI_LINE; } if (pos >= lastValid) { if (!fill(true,false)) { return HeaderParseStatus.NEED_MORE_DATA; } } chr = buf[pos]; if ( headerParsePos == HeaderParsePosition.HEADER_MULTI_LINE ) { if ( (chr != Constants.SP) && (chr != Constants.HT)) { headerParsePos = HeaderParsePosition.HEADER_START; break; } else { buf[headerData.realPos] = chr; headerData.realPos++; headerParsePos = HeaderParsePosition.HEADER_VALUE_START; } } } headerData.headerValue.setBytes(buf, headerData.start, headerData.lastSignificantChar - headerData.start); headerData.recycle(); return HeaderParseStatus.HAVE_MORE_HEADERS; }
/** * Parse an HTTP header. * * @return false after reading a blank line (which indicates that the * HTTP header parsing is done */
Parse an HTTP header
parseHeader
{ "repo_name": "sdw2330976/apache-tomcat-7.0.57", "path": "target/classes/org/apache/coyote/http11/InternalNioInputBuffer.java", "license": "apache-2.0", "size": 29658 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
327,762
public SpoutPlayer getPlayer() { return player; }
SpoutPlayer function() { return player; }
/** * Gets the player involved in this permission event * @return player */
Gets the player involved in this permission event
getPlayer
{ "repo_name": "Sleaker/SpoutPluginAPI", "path": "src/org/getspout/spoutapi/event/permission/PlayerPermissionEvent.java", "license": "lgpl-3.0", "size": 1761 }
[ "org.getspout.spoutapi.player.SpoutPlayer" ]
import org.getspout.spoutapi.player.SpoutPlayer;
import org.getspout.spoutapi.player.*;
[ "org.getspout.spoutapi" ]
org.getspout.spoutapi;
334,512
public boolean getAntiAlias() { Object o = this.renderingHints.get(RenderingHints.KEY_ANTIALIASING); if (o == null) { return false; } return (o.equals(RenderingHints.VALUE_ANTIALIAS_ON)); }
boolean function() { Object o = this.renderingHints.get(RenderingHints.KEY_ANTIALIASING); if (o == null) { return false; } return (o.equals(RenderingHints.VALUE_ANTIALIAS_ON)); }
/** * Returns a flag that indicates whether or not anti-aliasing is used when * the chart is drawn. * * @return The flag. */
Returns a flag that indicates whether or not anti-aliasing is used when the chart is drawn
getAntiAlias
{ "repo_name": "raedle/univis", "path": "lib/jfreechart-1.0.1/src/org/jfree/chart/JFreeChart.java", "license": "lgpl-2.1", "size": 60091 }
[ "java.awt.RenderingHints" ]
import java.awt.RenderingHints;
import java.awt.*;
[ "java.awt" ]
java.awt;
2,568,463
public String deleteByExample(TBUserExample example) { BEGIN(); DELETE_FROM("users"); applyWhere(example, false); return SQL(); }
String function(TBUserExample example) { BEGIN(); DELETE_FROM("users"); applyWhere(example, false); return SQL(); }
/** * This method was generated by MyBatis Generator. * This method corresponds to the database table users * * @mbggenerated */
This method was generated by MyBatis Generator. This method corresponds to the database table users
deleteByExample
{ "repo_name": "upup1000/spring", "path": "spring-08/src/main/java/com/leader/spring08/test01/mapping/TBUserSqlProvider.java", "license": "apache-2.0", "size": 9923 }
[ "com.leader.spring08.test01.domain.TBUserExample" ]
import com.leader.spring08.test01.domain.TBUserExample;
import com.leader.spring08.test01.domain.*;
[ "com.leader.spring08" ]
com.leader.spring08;
2,539,218
CompletableFuture<TransientBlobKey> requestFileUploadByType( FileType fileType, @RpcTimeout Time timeout);
CompletableFuture<TransientBlobKey> requestFileUploadByType( FileType fileType, @RpcTimeout Time timeout);
/** * Requests the file upload of the specified type to the cluster's {@link BlobServer}. * * @param fileType to upload * @param timeout for the asynchronous operation * @return Future which is completed with the {@link TransientBlobKey} of the uploaded file. */
Requests the file upload of the specified type to the cluster's <code>BlobServer</code>
requestFileUploadByType
{ "repo_name": "kl0u/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/TaskExecutorGateway.java", "license": "apache-2.0", "size": 11452 }
[ "java.util.concurrent.CompletableFuture", "org.apache.flink.api.common.time.Time", "org.apache.flink.runtime.blob.TransientBlobKey", "org.apache.flink.runtime.rpc.RpcTimeout" ]
import java.util.concurrent.CompletableFuture; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.blob.TransientBlobKey; import org.apache.flink.runtime.rpc.RpcTimeout;
import java.util.concurrent.*; import org.apache.flink.api.common.time.*; import org.apache.flink.runtime.blob.*; import org.apache.flink.runtime.rpc.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
2,893,657
public static CommandElement location(Text key, Game game) { return new LocationCommandElement(key, game); }
static CommandElement function(Text key, Game game) { return new LocationCommandElement(key, game); }
/** * Expect an argument to represent a {@link Location}. * * @param key The key to store under * @param game The game to find worlds from * @return the argument */
Expect an argument to represent a <code>Location</code>
location
{ "repo_name": "joshgarde/SpongeAPI", "path": "src/main/java/org/spongepowered/api/util/command/args/GenericArguments.java", "license": "mit", "size": 47750 }
[ "org.spongepowered.api.Game", "org.spongepowered.api.text.Text" ]
import org.spongepowered.api.Game; import org.spongepowered.api.text.Text;
import org.spongepowered.api.*; import org.spongepowered.api.text.*;
[ "org.spongepowered.api" ]
org.spongepowered.api;
1,678,391
public @Nonnull RelFieldCollation.NullDirection defaultNullDirection( RelFieldCollation.Direction direction) { switch (direction) { case ASCENDING: case STRICTLY_ASCENDING: return getNullCollation().last(false) ? RelFieldCollation.NullDirection.LAST : RelFieldCollation.NullDirection.FIRST; case DESCENDING: case STRICTLY_DESCENDING: return getNullCollation().last(true) ? RelFieldCollation.NullDirection.LAST : RelFieldCollation.NullDirection.FIRST; default: return RelFieldCollation.NullDirection.UNSPECIFIED; } }
@Nonnull RelFieldCollation.NullDirection function( RelFieldCollation.Direction direction) { switch (direction) { case ASCENDING: case STRICTLY_ASCENDING: return getNullCollation().last(false) ? RelFieldCollation.NullDirection.LAST : RelFieldCollation.NullDirection.FIRST; case DESCENDING: case STRICTLY_DESCENDING: return getNullCollation().last(true) ? RelFieldCollation.NullDirection.LAST : RelFieldCollation.NullDirection.FIRST; default: return RelFieldCollation.NullDirection.UNSPECIFIED; } }
/** Returns whether NULL values are sorted first or last, in this dialect, * in an ORDER BY item of a given direction. */
Returns whether NULL values are sorted first or last, in this dialect
defaultNullDirection
{ "repo_name": "xhoong/incubator-calcite", "path": "core/src/main/java/org/apache/calcite/sql/SqlDialect.java", "license": "apache-2.0", "size": 56598 }
[ "javax.annotation.Nonnull", "org.apache.calcite.rel.RelFieldCollation" ]
import javax.annotation.Nonnull; import org.apache.calcite.rel.RelFieldCollation;
import javax.annotation.*; import org.apache.calcite.rel.*;
[ "javax.annotation", "org.apache.calcite" ]
javax.annotation; org.apache.calcite;
237,118
public static WaveletId waveletIdFromPathSegment(String pathSegment) { Pair<String, String> segments = decodePathSegmentPair(pathSegment); return WaveletId.of(segments.first, segments.second); }
static WaveletId function(String pathSegment) { Pair<String, String> segments = decodePathSegmentPair(pathSegment); return WaveletId.of(segments.first, segments.second); }
/** * Converts a path segment created using waveIdToPathSegment back to a wave id. * * @param pathSegment * @return the decoded waveletId * @throws IllegalArgumentException the encoding on the path segment is invalid */
Converts a path segment created using waveIdToPathSegment back to a wave id
waveletIdFromPathSegment
{ "repo_name": "wisebaldone/incubator-wave", "path": "wave/src/main/java/org/waveprotocol/box/server/persistence/file/FileUtils.java", "license": "apache-2.0", "size": 11649 }
[ "org.waveprotocol.wave.model.id.WaveletId", "org.waveprotocol.wave.model.util.Pair" ]
import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.util.Pair;
import org.waveprotocol.wave.model.id.*; import org.waveprotocol.wave.model.util.*;
[ "org.waveprotocol.wave" ]
org.waveprotocol.wave;
517,727
public void testClassLoadOrdering() throws SQLException, MalformedURLException { Statement s = createStatement(); s.executeUpdate("CREATE SCHEMA OT"); // Functions to get the class loader of a specific class. // Thre variants that are loaded out of each installed jar // file to ensure that loading is delegated from one jar // to another correctly. // We use the added feature that the toString() of the // ClassLoader for installed jars returns the jar name // first. The RETURNS VARCHAR(10) trims the string to // the correct length for our compare purposes, ie. the // length of "OT"."OT{1,2,3}" s.execute("create function OT.WHICH_LOADER1(classname VARCHAR(256)) " + "RETURNS VARCHAR(10) " + "NO SQL " + "external name " + "'org.apache.derbyTesting.databaseclassloader.ot.OrderTest1.whichLoader' " + "language java parameter style java"); s.execute("create function OT.WHICH_LOADER2(classname VARCHAR(256)) " + "RETURNS VARCHAR(10) " + "NO SQL " + "external name " + "'org.apache.derbyTesting.databaseclassloader.ot.OrderTest2.whichLoader' " + "language java parameter style java"); s.execute("create function OT.WHICH_LOADER3(classname VARCHAR(256)) " + "RETURNS VARCHAR(10) " + "NO SQL " + "external name " + "'org.apache.derbyTesting.databaseclassloader.ot.OrderTest3.whichLoader' " + "language java parameter style java"); installJar("dcl_ot1.jar", "OT.OT1"); installJar("dcl_ot2.jar", "OT.OT2"); installJar("dcl_ot3.jar", "OT.OT3"); checkLoading("123"); checkLoading("132"); checkLoading("213"); checkLoading("231"); checkLoading("321"); checkLoading("312"); s.close(); }
void function() throws SQLException, MalformedURLException { Statement s = createStatement(); s.executeUpdate(STR); s.execute(STR + STR + STR + STR + STR + STR); s.execute(STR + STR + STR + STR + STR + STR); s.execute(STR + STR + STR + STR + STR + STR); installJar(STR, STR); installJar(STR, STR); installJar(STR, STR); checkLoading("123"); checkLoading("132"); checkLoading("213"); checkLoading("231"); checkLoading("321"); checkLoading("312"); s.close(); }
/** * Test ordering of class loading. * @throws MalformedURLException */
Test ordering of class loading
testClassLoadOrdering
{ "repo_name": "kavin256/Derby", "path": "java/testing/org/apache/derbyTesting/functionTests/tests/lang/DatabaseClassLoadingTest.java", "license": "apache-2.0", "size": 46217 }
[ "java.net.MalformedURLException", "java.sql.SQLException", "java.sql.Statement" ]
import java.net.MalformedURLException; import java.sql.SQLException; import java.sql.Statement;
import java.net.*; import java.sql.*;
[ "java.net", "java.sql" ]
java.net; java.sql;
1,382,047
public void validate() throws MalformedDataMapCommandException { List<CarbonColumn> indexColumns = carbonTable.getIndexedColumns(dataMapSchema); Set<String> unique = new HashSet<>(); for (CarbonColumn indexColumn : indexColumns) { unique.add(indexColumn.getColName()); } if (unique.size() != indexColumns.size()) { throw new MalformedDataMapCommandException(INDEX_COLUMNS + " has duplicate column"); } }
void function() throws MalformedDataMapCommandException { List<CarbonColumn> indexColumns = carbonTable.getIndexedColumns(dataMapSchema); Set<String> unique = new HashSet<>(); for (CarbonColumn indexColumn : indexColumns) { unique.add(indexColumn.getColName()); } if (unique.size() != indexColumns.size()) { throw new MalformedDataMapCommandException(INDEX_COLUMNS + STR); } }
/** * Validate INDEX_COLUMNS property and return a array containing index column name * Following will be validated * 1. require INDEX_COLUMNS property * 2. INDEX_COLUMNS can't contains illegal argument(empty, blank) * 3. INDEX_COLUMNS can't contains duplicate same columns * 4. INDEX_COLUMNS should be exists in table columns */
Validate INDEX_COLUMNS property and return a array containing index column name Following will be validated 1. require INDEX_COLUMNS property 2. INDEX_COLUMNS can't contains illegal argument(empty, blank) 3. INDEX_COLUMNS can't contains duplicate same columns 4. INDEX_COLUMNS should be exists in table columns
validate
{ "repo_name": "jackylk/incubator-carbondata", "path": "core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapFactory.java", "license": "apache-2.0", "size": 7481 }
[ "java.util.HashSet", "java.util.List", "java.util.Set", "org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException", "org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn" ]
import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException; import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
import java.util.*; import org.apache.carbondata.common.exceptions.sql.*; import org.apache.carbondata.core.metadata.schema.table.column.*;
[ "java.util", "org.apache.carbondata" ]
java.util; org.apache.carbondata;
680,678
public String[] addAttribute(String key, String value) { if (key.startsWith("com.ibm.wsspi.security") || key.startsWith("com.ibm.websphere.security")) { java.lang.SecurityManager sm = System.getSecurityManager(); if (sm != null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Performing Java 2 Security Permission Check ..."); Tr.debug(tc, "Expecting : " + UPDATE_TOKEN.toString()); } sm.checkPermission(UPDATE_TOKEN); } } if (!isReadOnly && token != null) { // change_counter is used by PropagationToken to determine uniqueness change_counter++; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "Setting attribute with key: " + key + ", value: " + value); return token.addAttribute(key, value); } else return null; }
String[] function(String key, String value) { if (key.startsWith(STR) key.startsWith(STR)) { java.lang.SecurityManager sm = System.getSecurityManager(); if (sm != null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, STR); Tr.debug(tc, STR + UPDATE_TOKEN.toString()); } sm.checkPermission(UPDATE_TOKEN); } } if (!isReadOnly && token != null) { change_counter++; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, STR + key + STR + value); return token.addAttribute(key, value); } else return null; }
/** * Sets the attribute name/value pair. Returns the previous value set for key, * or null if not previously set. * * @param String key * @param String value * @returns String[]; */
Sets the attribute name/value pair. Returns the previous value set for key, or null if not previously set
addAttribute
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.security.token/src/com/ibm/ws/security/token/internal/AbstractTokenImpl.java", "license": "epl-1.0", "size": 9288 }
[ "com.ibm.websphere.ras.Tr", "com.ibm.websphere.ras.TraceComponent" ]
import com.ibm.websphere.ras.Tr; import com.ibm.websphere.ras.TraceComponent;
import com.ibm.websphere.ras.*;
[ "com.ibm.websphere" ]
com.ibm.websphere;
233,001
Iterable<IRegionCategory<F>> categorize(ILanguageImpl langImpl, P result);
Iterable<IRegionCategory<F>> categorize(ILanguageImpl langImpl, P result);
/** * Returns a categorization of given parse result. * * @param langImpl * Language implementation that contains the categorization logic. * @param parseResult * Parse result to categorize. * @return Iterable over categories assigned to regions of the source text. Regions do not overlap and are iterated * over in ascending order. */
Returns a categorization of given parse result
categorize
{ "repo_name": "metaborg/spoofax", "path": "org.metaborg.core/src/main/java/org/metaborg/core/style/ICategorizerService.java", "license": "apache-2.0", "size": 1487 }
[ "org.metaborg.core.language.ILanguageImpl" ]
import org.metaborg.core.language.ILanguageImpl;
import org.metaborg.core.language.*;
[ "org.metaborg.core" ]
org.metaborg.core;
320,269
public int compareTo(SelectedNode b) { int aDepth = _depth; int bDepth = b._depth; Node aPtr = getNode(); Node bPtr = b.getNode(); if (aDepth == bDepth && aPtr.getParentNode() == bPtr.getParentNode()) return _level - b._level; return compareTo(aPtr, aDepth, bPtr, bDepth); }
int function(SelectedNode b) { int aDepth = _depth; int bDepth = b._depth; Node aPtr = getNode(); Node bPtr = b.getNode(); if (aDepth == bDepth && aPtr.getParentNode() == bPtr.getParentNode()) return _level - b._level; return compareTo(aPtr, aDepth, bPtr, bDepth); }
/** * Returns the node's index */
Returns the node's index
compareTo
{ "repo_name": "CleverCloud/Quercus", "path": "resin/src/main/java/com/caucho/xpath/pattern/SelectedNode.java", "license": "gpl-2.0", "size": 2750 }
[ "org.w3c.dom.Node" ]
import org.w3c.dom.Node;
import org.w3c.dom.*;
[ "org.w3c.dom" ]
org.w3c.dom;
2,732,572
@Deprecated final Method getter() { return this.getter; }
final Method getter() { return this.getter; }
/** * Gets the getter method. */
Gets the getter method
getter
{ "repo_name": "aws/aws-sdk-java", "path": "aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/datamodeling/ConvertibleType.java", "license": "apache-2.0", "size": 7207 }
[ "java.lang.reflect.Method" ]
import java.lang.reflect.Method;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
1,607,226
public void execute() { checkParams(); AWSOpsWorksClient client = getOrCreateClient(AWSOpsWorksClient.class); CreateStackRequest createStackRequest = new CreateStackRequest() .withName(name).withRegion(region) .withServiceRoleArn(serviceRoleArn) .withUseOpsworksSecurityGroups(useOpsworksSecurityGroups) .withUseCustomCookbooks(useCustomCookbooks).withVpcId(vpcId) .withDefaultAvailabilityZone(defaultAvailabilityZone) .withDefaultOs(defaultOs) .withDefaultRootDeviceType(defaultRootDeviceType) .withDefaultInstanceProfileArn(defaultInstanceProfileArn) .withDefaultSshKeyName(defaultSshKeyName) .withHostnameTheme(hostnameTheme).withCustomJson(customJson); if (attributes.size() > 0) { createStackRequest.setAttributes(attributes); } if (chefVersion != null) { createStackRequest .setConfigurationManager(new StackConfigurationManager() .withName(CHEF).withVersion(chefVersion)); } if (useCustomCookbooks) { if (repoType != null && repoUrl != null) { Source customCookBookSource = new Source().withType(repoType) .withUrl(repoUrl).withSshKey(repoSshKey) .withRevision(repoRevision).withPassword(repoPassword) .withUsername(repoUsername); createStackRequest .setCustomCookbooksSource(customCookBookSource); } } if (manageBerkshelf) { ChefConfiguration chefConfiguration = new ChefConfiguration() .withManageBerkshelf(manageBerkshelf); if (berkshelfVersion != null) { chefConfiguration.setBerkshelfVersion(berkshelfVersion); } createStackRequest.setChefConfiguration(chefConfiguration); } String stackId; try { stackId = client.createStack(createStackRequest).getStackId(); System.out.println("Created stack with stackId " + stackId); } catch (Exception e) { throw new BuildException("Could not create stack: " + e.getMessage(), e); } if (startOnCreate) { client.startStack(new StartStackRequest().withStackId(stackId)); System.out.println("Started stack."); } if (stackId != null) { if (propertyNameForStackId.equals(Constants.STACK_ID_PROPERTY) && getProject().getProperty(Constants.STACK_ID_PROPERTY) != null) { getProject().addReference(Constants.STACK_ID_REFERENCE, true); } else { getProject().addReference(Constants.STACK_ID_REFERENCE, false); getProject().setNewProperty(propertyNameForStackId, stackId); } } } public static class StackAttribute extends KeyValueNestedElement { }
void function() { checkParams(); AWSOpsWorksClient client = getOrCreateClient(AWSOpsWorksClient.class); CreateStackRequest createStackRequest = new CreateStackRequest() .withName(name).withRegion(region) .withServiceRoleArn(serviceRoleArn) .withUseOpsworksSecurityGroups(useOpsworksSecurityGroups) .withUseCustomCookbooks(useCustomCookbooks).withVpcId(vpcId) .withDefaultAvailabilityZone(defaultAvailabilityZone) .withDefaultOs(defaultOs) .withDefaultRootDeviceType(defaultRootDeviceType) .withDefaultInstanceProfileArn(defaultInstanceProfileArn) .withDefaultSshKeyName(defaultSshKeyName) .withHostnameTheme(hostnameTheme).withCustomJson(customJson); if (attributes.size() > 0) { createStackRequest.setAttributes(attributes); } if (chefVersion != null) { createStackRequest .setConfigurationManager(new StackConfigurationManager() .withName(CHEF).withVersion(chefVersion)); } if (useCustomCookbooks) { if (repoType != null && repoUrl != null) { Source customCookBookSource = new Source().withType(repoType) .withUrl(repoUrl).withSshKey(repoSshKey) .withRevision(repoRevision).withPassword(repoPassword) .withUsername(repoUsername); createStackRequest .setCustomCookbooksSource(customCookBookSource); } } if (manageBerkshelf) { ChefConfiguration chefConfiguration = new ChefConfiguration() .withManageBerkshelf(manageBerkshelf); if (berkshelfVersion != null) { chefConfiguration.setBerkshelfVersion(berkshelfVersion); } createStackRequest.setChefConfiguration(chefConfiguration); } String stackId; try { stackId = client.createStack(createStackRequest).getStackId(); System.out.println(STR + stackId); } catch (Exception e) { throw new BuildException(STR + e.getMessage(), e); } if (startOnCreate) { client.startStack(new StartStackRequest().withStackId(stackId)); System.out.println(STR); } if (stackId != null) { if (propertyNameForStackId.equals(Constants.STACK_ID_PROPERTY) && getProject().getProperty(Constants.STACK_ID_PROPERTY) != null) { getProject().addReference(Constants.STACK_ID_REFERENCE, true); } else { getProject().addReference(Constants.STACK_ID_REFERENCE, false); getProject().setNewProperty(propertyNameForStackId, stackId); } } } public static class StackAttribute extends KeyValueNestedElement { }
/** * Creates a stack according to the set parameters. Also sets the stackId * property to the created stack's ID. The ID is also printed for you to set * to your own property for later use. */
Creates a stack according to the set parameters. Also sets the stackId property to the created stack's ID. The ID is also printed for you to set to your own property for later use
execute
{ "repo_name": "centic9/aws-ant-tasks", "path": "src/main/java/com/amazonaws/ant/opsworks/CreateStackTask.java", "license": "apache-2.0", "size": 15992 }
[ "com.amazonaws.ant.KeyValueNestedElement", "com.amazonaws.services.opsworks.AWSOpsWorksClient", "com.amazonaws.services.opsworks.model.ChefConfiguration", "com.amazonaws.services.opsworks.model.CreateStackRequest", "com.amazonaws.services.opsworks.model.Source", "com.amazonaws.services.opsworks.model.StackConfigurationManager", "com.amazonaws.services.opsworks.model.StartStackRequest", "org.apache.tools.ant.BuildException" ]
import com.amazonaws.ant.KeyValueNestedElement; import com.amazonaws.services.opsworks.AWSOpsWorksClient; import com.amazonaws.services.opsworks.model.ChefConfiguration; import com.amazonaws.services.opsworks.model.CreateStackRequest; import com.amazonaws.services.opsworks.model.Source; import com.amazonaws.services.opsworks.model.StackConfigurationManager; import com.amazonaws.services.opsworks.model.StartStackRequest; import org.apache.tools.ant.BuildException;
import com.amazonaws.ant.*; import com.amazonaws.services.opsworks.*; import com.amazonaws.services.opsworks.model.*; import org.apache.tools.ant.*;
[ "com.amazonaws.ant", "com.amazonaws.services", "org.apache.tools" ]
com.amazonaws.ant; com.amazonaws.services; org.apache.tools;
1,028,971
public void removeSubmission(AssignmentSubmissionEdit submission) throws PermissionException;
void function(AssignmentSubmissionEdit submission) throws PermissionException;
/** * Removes an AssignmentSubmission and all references to it * * @param submission - * the AssignmentSubmission to remove. * @throws PermissionException * if current User does not have permission to do this. */
Removes an AssignmentSubmission and all references to it
removeSubmission
{ "repo_name": "rodriguezdevera/sakai", "path": "assignment/assignment-api/api/src/java/org/sakaiproject/assignment/api/AssignmentService.java", "license": "apache-2.0", "size": 35815 }
[ "org.sakaiproject.exception.PermissionException" ]
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.exception.*;
[ "org.sakaiproject.exception" ]
org.sakaiproject.exception;
1,296,797
DeterministicKey hd = DeterministicKey.deserializeB58(SideConstants.KEY_B58,Constants.getNetwork()); // DeterministicKey hd = DeterministicKey.deserializeB58(null,KEY_B58); // DeterministicKey hd = HDKeyDerivation.createMasterPrivateKey(KEY.getBytes()); DeterministicHierarchy hi = new DeterministicHierarchy(hd); List<ChildNumber> childList = new ArrayList<ChildNumber>(); ChildNumber childNumber = new ChildNumber(number, true); childList.add(childNumber); DeterministicKey key = hi.get(childList, true, true); return key; }
DeterministicKey hd = DeterministicKey.deserializeB58(SideConstants.KEY_B58,Constants.getNetwork()); DeterministicHierarchy hi = new DeterministicHierarchy(hd); List<ChildNumber> childList = new ArrayList<ChildNumber>(); ChildNumber childNumber = new ChildNumber(number, true); childList.add(childNumber); DeterministicKey key = hi.get(childList, true, true); return key; }
/** * * Call to get the MasterKey for a new Channel. * * @param number Query the Database to get the latest unused number * @return DeterministicKey for the new Channel */
Call to get the MasterKey for a new Channel
getMasterKey
{ "repo_name": "repos-bitcoin/thundernetwork", "path": "thunder-server/src/main/java/network/thunder/server/etc/KeyDerivation.java", "license": "agpl-3.0", "size": 5359 }
[ "java.util.ArrayList", "java.util.List", "org.bitcoinj.crypto.ChildNumber", "org.bitcoinj.crypto.DeterministicHierarchy", "org.bitcoinj.crypto.DeterministicKey" ]
import java.util.ArrayList; import java.util.List; import org.bitcoinj.crypto.ChildNumber; import org.bitcoinj.crypto.DeterministicHierarchy; import org.bitcoinj.crypto.DeterministicKey;
import java.util.*; import org.bitcoinj.crypto.*;
[ "java.util", "org.bitcoinj.crypto" ]
java.util; org.bitcoinj.crypto;
1,158,591
public Leaf fork() throws IndexOperationException;
Leaf function() throws IndexOperationException;
/** * Creates a new leaf context for this page in READ mode (e.g. shared * latch). The corresponding page handle will therefore be fixed and latched * again. */
Creates a new leaf context for this page in READ mode (e.g. shared latch). The corresponding page handle will therefore be fixed and latched again
fork
{ "repo_name": "x-clone/brackit.brackitdb", "path": "brackitdb-server/src/main/java/org/brackit/server/store/index/bracket/page/Leaf.java", "license": "bsd-3-clause", "size": 12254 }
[ "org.brackit.server.store.index.bracket.IndexOperationException" ]
import org.brackit.server.store.index.bracket.IndexOperationException;
import org.brackit.server.store.index.bracket.*;
[ "org.brackit.server" ]
org.brackit.server;
2,229,271
public void deleteDatabaseMeta( String databaseName ) throws KettleException { repository.getSecurityProvider().validateAction( RepositoryOperation.DELETE_DATABASE ); try { ObjectId id_database = getDatabaseID( databaseName ); delDatabase( id_database ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "KettleDatabaseRepository.Exception.ErrorDeletingConnection.Message", databaseName ), dbe ); } }
void function( String databaseName ) throws KettleException { repository.getSecurityProvider().validateAction( RepositoryOperation.DELETE_DATABASE ); try { ObjectId id_database = getDatabaseID( databaseName ); delDatabase( id_database ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, STR, databaseName ), dbe ); } }
/** * Remove a database connection from the repository * * @param databaseName * The name of the connection to remove * @throws KettleException * In case something went wrong: database error, insufficient permissions, depending objects, etc. */
Remove a database connection from the repository
deleteDatabaseMeta
{ "repo_name": "codek/pentaho-kettle", "path": "engine/src/org/pentaho/di/repository/kdr/delegates/KettleDatabaseRepositoryDatabaseDelegate.java", "license": "apache-2.0", "size": 23137 }
[ "org.pentaho.di.core.exception.KettleException", "org.pentaho.di.i18n.BaseMessages", "org.pentaho.di.repository.ObjectId", "org.pentaho.di.repository.RepositoryOperation" ]
import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.RepositoryOperation;
import org.pentaho.di.core.exception.*; import org.pentaho.di.i18n.*; import org.pentaho.di.repository.*;
[ "org.pentaho.di" ]
org.pentaho.di;
1,528,459
public String getLocalizedName() { return I18n.translateToLocal((this.getUnlocalizedName() + ".name").replaceAll("tile", "item")); }
String function() { return I18n.translateToLocal((this.getUnlocalizedName() + ".name").replaceAll("tile", "item")); }
/** * Gets the localized name of this block. Used for the statistics page. */
Gets the localized name of this block. Used for the statistics page
getLocalizedName
{ "repo_name": "MartyParty21/AwakenDreamsClient", "path": "mcp/src/minecraft/net/minecraft/block/BlockDoor.java", "license": "gpl-3.0", "size": 16889 }
[ "net.minecraft.util.text.translation.I18n" ]
import net.minecraft.util.text.translation.I18n;
import net.minecraft.util.text.translation.*;
[ "net.minecraft.util" ]
net.minecraft.util;
2,168,682
ScheduledFuture<?> scheduleRunnable(Runnable runnable, long delay, TimeUnit unit);
ScheduledFuture<?> scheduleRunnable(Runnable runnable, long delay, TimeUnit unit);
/** * Execute the runnable in the execution context of this RPC Service, as returned by {@link * #getScheduledExecutor()} ()}, after a scheduled delay. * * @param runnable Runnable to be executed * @param delay The delay after which the runnable will be executed */
Execute the runnable in the execution context of this RPC Service, as returned by <code>#getScheduledExecutor()</code> ()}, after a scheduled delay
scheduleRunnable
{ "repo_name": "apache/flink", "path": "flink-rpc/flink-rpc-core/src/main/java/org/apache/flink/runtime/rpc/RpcService.java", "license": "apache-2.0", "size": 7670 }
[ "java.util.concurrent.ScheduledFuture", "java.util.concurrent.TimeUnit" ]
import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,679,111
public static <T extends IdentifiableObject> List<String> getUids( Collection<T> objects ) { return objects != null ? objects.stream().map( IdentifiableObject::getUid ).collect( Collectors.toList() ) : null; }
static <T extends IdentifiableObject> List<String> function( Collection<T> objects ) { return objects != null ? objects.stream().map( IdentifiableObject::getUid ).collect( Collectors.toList() ) : null; }
/** * Returns a list of uids for the given collection of IdentifiableObjects. * * @param objects the list of IdentifiableObjects. * @return a list of uids. */
Returns a list of uids for the given collection of IdentifiableObjects
getUids
{ "repo_name": "uonafya/jphes-core", "path": "dhis-2/dhis-api/src/main/java/org/hisp/dhis/common/IdentifiableObjectUtils.java", "license": "bsd-3-clause", "size": 12881 }
[ "java.util.Collection", "java.util.List", "java.util.stream.Collectors" ]
import java.util.Collection; import java.util.List; import java.util.stream.Collectors;
import java.util.*; import java.util.stream.*;
[ "java.util" ]
java.util;
2,008,701
public void testConstructors() { System.out.println( "Constructors" ); NegativeLogLikelihood<Double> instance = new NegativeLogLikelihood<Double>(); assertNull( instance.getCostParameters() ); UnivariateGaussian g = new UnivariateGaussian.PDF(); Collection<Double> samples = g.sample(RANDOM, 1000); instance = new NegativeLogLikelihood<Double>( samples ); assertSame( samples, instance.getCostParameters() ); }
void function() { System.out.println( STR ); NegativeLogLikelihood<Double> instance = new NegativeLogLikelihood<Double>(); assertNull( instance.getCostParameters() ); UnivariateGaussian g = new UnivariateGaussian.PDF(); Collection<Double> samples = g.sample(RANDOM, 1000); instance = new NegativeLogLikelihood<Double>( samples ); assertSame( samples, instance.getCostParameters() ); }
/** * Tests the constructors of class NegativeLogLikelihoodTest. */
Tests the constructors of class NegativeLogLikelihoodTest
testConstructors
{ "repo_name": "codeaudit/Foundry", "path": "Components/LearningCore/Test/gov/sandia/cognition/learning/function/cost/NegativeLogLikelihoodTest.java", "license": "bsd-3-clause", "size": 3308 }
[ "gov.sandia.cognition.statistics.distribution.UnivariateGaussian", "java.util.Collection" ]
import gov.sandia.cognition.statistics.distribution.UnivariateGaussian; import java.util.Collection;
import gov.sandia.cognition.statistics.distribution.*; import java.util.*;
[ "gov.sandia.cognition", "java.util" ]
gov.sandia.cognition; java.util;
2,401,213
void enterTypeImportOnDemandDeclaration(@NotNull Java8Parser.TypeImportOnDemandDeclarationContext ctx);
void enterTypeImportOnDemandDeclaration(@NotNull Java8Parser.TypeImportOnDemandDeclarationContext ctx);
/** * Enter a parse tree produced by {@link Java8Parser#typeImportOnDemandDeclaration}. * * @param ctx the parse tree */
Enter a parse tree produced by <code>Java8Parser#typeImportOnDemandDeclaration</code>
enterTypeImportOnDemandDeclaration
{ "repo_name": "BigDaddy-Germany/WHOAMI", "path": "WHOAMI/src/de/aima13/whoami/modules/syntaxcheck/languages/antlrgen/Java8Listener.java", "license": "mit", "size": 97945 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
100,536
public static Element documentContents( final SDocumentContents s) { NullCheck.notNull(s, "Document contents"); return new Element("s:document-contents", SXML.XML_URI.toString()); }
static Element function( final SDocumentContents s) { NullCheck.notNull(s, STR); return new Element(STR, SXML.XML_URI.toString()); }
/** * Serialize the given element to XML. * * @param s * The element * @return An XML element */
Serialize the given element to XML
documentContents
{ "repo_name": "io7m/jstructural", "path": "io7m-jstructural-xom/src/main/java/com/io7m/jstructural/xom/SDocumentSerializer.java", "license": "isc", "size": 36816 }
[ "com.io7m.jnull.NullCheck", "com.io7m.jstructural.core.SDocumentContents", "nu.xom.Element" ]
import com.io7m.jnull.NullCheck; import com.io7m.jstructural.core.SDocumentContents; import nu.xom.Element;
import com.io7m.jnull.*; import com.io7m.jstructural.core.*; import nu.xom.*;
[ "com.io7m.jnull", "com.io7m.jstructural", "nu.xom" ]
com.io7m.jnull; com.io7m.jstructural; nu.xom;
710,523
protected void paint(Graphics graphics, IFigure fig, Insets insets, Color[] tl, Color[] br) { graphics.setLineWidth(1); graphics.setLineStyle(Graphics.LINE_SOLID); graphics.setXORMode(false); Rectangle rect = getPaintRectangle(fig, insets); int top = rect.y; int left = rect.x; int bottom = rect.bottom() - 1; int right = rect.right() - 1; Color color; for (int i = 0; i < br.length; i++) { color = br[i]; graphics.setForegroundColor(color); graphics.drawLine(right - i, bottom - i, right - i, top + i); graphics.drawLine(right - i, bottom - i, left + i, bottom - i); } right--; bottom--; for (int i = 0; i < tl.length; i++) { color = tl[i]; graphics.setForegroundColor(color); graphics.drawLine(left + i, top + i, right - i, top + i); graphics.drawLine(left + i, top + i, left + i, bottom - i); } }
void function(Graphics graphics, IFigure fig, Insets insets, Color[] tl, Color[] br) { graphics.setLineWidth(1); graphics.setLineStyle(Graphics.LINE_SOLID); graphics.setXORMode(false); Rectangle rect = getPaintRectangle(fig, insets); int top = rect.y; int left = rect.x; int bottom = rect.bottom() - 1; int right = rect.right() - 1; Color color; for (int i = 0; i < br.length; i++) { color = br[i]; graphics.setForegroundColor(color); graphics.drawLine(right - i, bottom - i, right - i, top + i); graphics.drawLine(right - i, bottom - i, left + i, bottom - i); } right--; bottom--; for (int i = 0; i < tl.length; i++) { color = tl[i]; graphics.setForegroundColor(color); graphics.drawLine(left + i, top + i, right - i, top + i); graphics.drawLine(left + i, top + i, left + i, bottom - i); } }
/** * Paints the border using the information in the set Scheme and the inputs * given. Side widths are determined by the number of colors in the Scheme * for each side. * * @param graphics * the graphics object * @param fig * the figure this border belongs to * @param insets * the insets * @param tl * the highlight (top/left) colors * @param br * the shadow (bottom/right) colors */
Paints the border using the information in the set Scheme and the inputs given. Side widths are determined by the number of colors in the Scheme for each side
paint
{ "repo_name": "archimatetool/archi", "path": "org.eclipse.draw2d/src/org/eclipse/draw2d/SchemeBorder.java", "license": "mit", "size": 10248 }
[ "org.eclipse.draw2d.geometry.Insets", "org.eclipse.draw2d.geometry.Rectangle", "org.eclipse.swt.graphics.Color" ]
import org.eclipse.draw2d.geometry.Insets; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.swt.graphics.Color;
import org.eclipse.draw2d.geometry.*; import org.eclipse.swt.graphics.*;
[ "org.eclipse.draw2d", "org.eclipse.swt" ]
org.eclipse.draw2d; org.eclipse.swt;
350,859
public static Date stringToDate(String dateString) { Date date; if (!dateString.equalsIgnoreCase("")) { try { date = new SimpleDateFormat("dd MMM yyyy") .parse(dateString); return date; } catch (ParseException e) { } } return null; }
static Date function(String dateString) { Date date; if (!dateString.equalsIgnoreCase(STRdd MMM yyyy") .parse(dateString); return date; } catch (ParseException e) { } } return null; }
/** * String to date. * * @param dateString * the date string * @return the date */
String to date
stringToDate
{ "repo_name": "gudipatiharitha/medicmobile", "path": "modules/medicmobile/trunk/src/main/java/org/medicmobile/util/UtilService.java", "license": "bsd-3-clause", "size": 4765 }
[ "java.text.ParseException", "java.util.Date" ]
import java.text.ParseException; import java.util.Date;
import java.text.*; import java.util.*;
[ "java.text", "java.util" ]
java.text; java.util;
1,321,167
protected Set<java.sql.Date> getDaysWithMeals(List<PerDiemExpense> perDiemExpenses, List<ActualExpense> actualExpenses) { Set<java.sql.Date> days = new HashSet<java.sql.Date>(); for (PerDiemExpense perDiemExpense : perDiemExpenses) { if (perDiemExpense.getBreakfastValue().isGreaterThan(KualiDecimal.ZERO) || perDiemExpense.getLunchValue().isGreaterThan(KualiDecimal.ZERO) || perDiemExpense.getDinnerValue().isGreaterThan(KualiDecimal.ZERO)) { java.sql.Date day = new java.sql.Date(perDiemExpense.getMileageDate().getTime()); days.add(day); } } for (ActualExpense expense : actualExpenses) { if (expense.isBreakfast() || expense.isLunch() || expense.isDinner()) { days.add(expense.getExpenseDate()); } } return days; }
Set<java.sql.Date> function(List<PerDiemExpense> perDiemExpenses, List<ActualExpense> actualExpenses) { Set<java.sql.Date> days = new HashSet<java.sql.Date>(); for (PerDiemExpense perDiemExpense : perDiemExpenses) { if (perDiemExpense.getBreakfastValue().isGreaterThan(KualiDecimal.ZERO) perDiemExpense.getLunchValue().isGreaterThan(KualiDecimal.ZERO) perDiemExpense.getDinnerValue().isGreaterThan(KualiDecimal.ZERO)) { java.sql.Date day = new java.sql.Date(perDiemExpense.getMileageDate().getTime()); days.add(day); } } for (ActualExpense expense : actualExpenses) { if (expense.isBreakfast() expense.isLunch() expense.isDinner()) { days.add(expense.getExpenseDate()); } } return days; }
/** * Finds all of the days within per diem expenses and actual expenses which have meals * @param perDiemExpenses the per diem expenses to check * @param actualExpenses the actual expenses to check * @return a Set of days to check */
Finds all of the days within per diem expenses and actual expenses which have meals
getDaysWithMeals
{ "repo_name": "kkronenb/kfs", "path": "kfs-tem/src/main/java/org/kuali/kfs/module/tem/document/TravelDocumentBase.java", "license": "agpl-3.0", "size": 88298 }
[ "java.util.HashSet", "java.util.List", "java.util.Set", "org.kuali.kfs.module.tem.businessobject.ActualExpense", "org.kuali.kfs.module.tem.businessobject.PerDiemExpense", "org.kuali.rice.core.api.util.type.KualiDecimal" ]
import java.util.HashSet; import java.util.List; import java.util.Set; import org.kuali.kfs.module.tem.businessobject.ActualExpense; import org.kuali.kfs.module.tem.businessobject.PerDiemExpense; import org.kuali.rice.core.api.util.type.KualiDecimal;
import java.util.*; import org.kuali.kfs.module.tem.businessobject.*; import org.kuali.rice.core.api.util.type.*;
[ "java.util", "org.kuali.kfs", "org.kuali.rice" ]
java.util; org.kuali.kfs; org.kuali.rice;
1,807,339
public void testLimitAndMaxRows() throws Exception { try { this.stmt.executeUpdate("DROP TABLE IF EXISTS testMaxRowsAndLimit"); this.stmt .executeUpdate("CREATE TABLE testMaxRowsAndLimit(limitField INT)"); for (int i = 0; i < 500; i++) { this.stmt .executeUpdate("INSERT INTO testMaxRowsAndLimit VALUES (" + i + ")"); } this.stmt.setMaxRows(250); this.stmt .executeQuery("SELECT limitField FROM testMaxRowsAndLimit"); } finally { this.stmt.setMaxRows(0); this.stmt.executeUpdate("DROP TABLE IF EXISTS testMaxRowsAndLimit"); } } /* * public void testBug9595() throws Exception { double[] vals = new double[] * {52.21, 52.22, 52.23, 52.24}; * * createTable("testBug9595", "(field1 DECIMAL(10,2), sortField INT)"); * * this.pstmt = this.conn.prepareStatement("INSERT INTO testBug9595 VALUES * (?, ?)"); // Try setting as doubles for (int i = 0; i < vals.length; i++) { * this.pstmt.setDouble(1, vals[i]); this.pstmt.setInt(2, i); * this.pstmt.executeUpdate(); } * * this.pstmt = this.conn.prepareStatement("SELECT field1 FROM testBug9595 * ORDER BY sortField"); this.rs = this.pstmt.executeQuery(); * * int i = 0; * * while (this.rs.next()) { double valToTest = vals[i++]; * * assertEquals(this.rs.getDouble(1), valToTest, 0.001); * assertEquals(this.rs.getBigDecimal(1).doubleValue(), valToTest, 0.001); } * * this.pstmt = this.conn.prepareStatement("INSERT INTO testBug9595 VALUES * (?, ?)"); * * this.stmt.executeUpdate("TRUNCATE TABLE testBug9595"); // Now, as * BigDecimals for (i = 0; i < vals.length; i++) { BigDecimal foo = new * BigDecimal(vals[i]); * * this.pstmt.setObject(1, foo, Types.DECIMAL, 2); this.pstmt.setInt(2, i); * this.pstmt.executeUpdate(); } * * this.pstmt = this.conn.prepareStatement("SELECT field1 FROM testBug9595 * ORDER BY sortField"); this.rs = this.pstmt.executeQuery(); * * i = 0; * * while (this.rs.next()) { double valToTest = vals[i++]; * System.out.println(this.rs.getString(1)); * assertEquals(this.rs.getDouble(1), valToTest, 0.001); * assertEquals(this.rs.getBigDecimal(1).doubleValue(), valToTest, 0.001); } }
void function() throws Exception { try { this.stmt.executeUpdate(STR); this.stmt .executeUpdate(STR); for (int i = 0; i < 500; i++) { this.stmt .executeUpdate(STR + i + ")"); } this.stmt.setMaxRows(250); this.stmt .executeQuery(STR); } finally { this.stmt.setMaxRows(0); this.stmt.executeUpdate(STR); } } /* * public void testBug9595() throws Exception { double[] vals = new double[] * {52.21, 52.22, 52.23, 52.24}; * * createTable(STR, STR); * * this.pstmt = this.conn.prepareStatement(STR); * this.pstmt.setDouble(1, vals[i]); this.pstmt.setInt(2, i); * this.pstmt.executeUpdate(); } * * this.pstmt = this.conn.prepareStatement(STR); this.rs = this.pstmt.executeQuery(); * * int i = 0; * * while (this.rs.next()) { double valToTest = vals[i++]; * * assertEquals(this.rs.getDouble(1), valToTest, 0.001); * assertEquals(this.rs.getBigDecimal(1).doubleValue(), valToTest, 0.001); } * * this.pstmt = this.conn.prepareStatement(STR); * * this.stmt.executeUpdate(STR); * BigDecimals for (i = 0; i < vals.length; i++) { BigDecimal foo = new * BigDecimal(vals[i]); * * this.pstmt.setObject(1, foo, Types.DECIMAL, 2); this.pstmt.setInt(2, i); * this.pstmt.executeUpdate(); } * * this.pstmt = this.conn.prepareStatement(STR); this.rs = this.pstmt.executeQuery(); * * i = 0; * * while (this.rs.next()) { double valToTest = vals[i++]; * System.out.println(this.rs.getString(1)); * assertEquals(this.rs.getDouble(1), valToTest, 0.001); * assertEquals(this.rs.getBigDecimal(1).doubleValue(), valToTest, 0.001); } }
/** * Tests that max_rows and 'limit' don't cause exceptions to be thrown. * * @throws Exception * if the test fails. */
Tests that max_rows and 'limit' don't cause exceptions to be thrown
testLimitAndMaxRows
{ "repo_name": "hwroitzsch/BikersLifeSaver", "path": "src/AccidentSpotsFinder/lib/mysql-connector-java-5.0.8/src/testsuite/regression/StatementRegressionTest.java", "license": "mit", "size": 120742 }
[ "java.math.BigDecimal", "java.sql.Types" ]
import java.math.BigDecimal; import java.sql.Types;
import java.math.*; import java.sql.*;
[ "java.math", "java.sql" ]
java.math; java.sql;
1,045,793
int lookForSelectablePositionAfter(int current, int position, boolean lookDown) { final ListAdapter adapter = mAdapter; if (adapter == null || isInTouchMode()) { return INVALID_POSITION; } // First check after the starting position in the specified direction. final int after = lookForSelectablePosition(position, lookDown); if (after != INVALID_POSITION) { return after; } // Then check between the starting position and the current position. final int count = adapter.getCount(); current = MathUtils.constrain(current, -1, count - 1); if (lookDown) { position = Math.min(position - 1, count - 1); while ((position > current) && !adapter.isEnabled(position)) { position--; } if (position <= current) { return INVALID_POSITION; } } else { position = Math.max(0, position + 1); while ((position < current) && !adapter.isEnabled(position)) { position++; } if (position >= current) { return INVALID_POSITION; } } return position; }
int lookForSelectablePositionAfter(int current, int position, boolean lookDown) { final ListAdapter adapter = mAdapter; if (adapter == null isInTouchMode()) { return INVALID_POSITION; } final int after = lookForSelectablePosition(position, lookDown); if (after != INVALID_POSITION) { return after; } final int count = adapter.getCount(); current = MathUtils.constrain(current, -1, count - 1); if (lookDown) { position = Math.min(position - 1, count - 1); while ((position > current) && !adapter.isEnabled(position)) { position--; } if (position <= current) { return INVALID_POSITION; } } else { position = Math.max(0, position + 1); while ((position < current) && !adapter.isEnabled(position)) { position++; } if (position >= current) { return INVALID_POSITION; } } return position; }
/** * Find a position that can be selected (i.e., is not a separator). If there * are no selectable positions in the specified direction from the starting * position, searches in the opposite direction from the starting position * to the current position. * * @param current the current position * @param position the starting position * @param lookDown whether to look down for other positions * @return the next selectable position, or {@link #INVALID_POSITION} if * nothing can be found */
Find a position that can be selected (i.e., is not a separator). If there are no selectable positions in the specified direction from the starting position, searches in the opposite direction from the starting position to the current position
lookForSelectablePositionAfter
{ "repo_name": "Ant-Droid/android_frameworks_base_OLD", "path": "core/java/android/widget/ListView.java", "license": "apache-2.0", "size": 155482 }
[ "android.util.MathUtils" ]
import android.util.MathUtils;
import android.util.*;
[ "android.util" ]
android.util;
2,232,102
public static Org_sl_planet_bgfSimplifiedPackage init() { if (isInited) return (Org_sl_planet_bgfSimplifiedPackage)EPackage.Registry.INSTANCE.getEPackage(Org_sl_planet_bgfSimplifiedPackage.eNS_URI); // Obtain or create and register package Org_sl_planet_bgfSimplifiedPackageImpl theOrg_sl_planet_bgfSimplifiedPackage = (Org_sl_planet_bgfSimplifiedPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof Org_sl_planet_bgfSimplifiedPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new Org_sl_planet_bgfSimplifiedPackageImpl()); isInited = true; // Create package meta-data objects theOrg_sl_planet_bgfSimplifiedPackage.createPackageContents(); // Initialize created meta-data theOrg_sl_planet_bgfSimplifiedPackage.initializePackageContents(); // Mark meta-data to indicate it can't be changed theOrg_sl_planet_bgfSimplifiedPackage.freeze(); // Update the registry and return the package EPackage.Registry.INSTANCE.put(Org_sl_planet_bgfSimplifiedPackage.eNS_URI, theOrg_sl_planet_bgfSimplifiedPackage); return theOrg_sl_planet_bgfSimplifiedPackage; }
static Org_sl_planet_bgfSimplifiedPackage function() { if (isInited) return (Org_sl_planet_bgfSimplifiedPackage)EPackage.Registry.INSTANCE.getEPackage(Org_sl_planet_bgfSimplifiedPackage.eNS_URI); Org_sl_planet_bgfSimplifiedPackageImpl theOrg_sl_planet_bgfSimplifiedPackage = (Org_sl_planet_bgfSimplifiedPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof Org_sl_planet_bgfSimplifiedPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new Org_sl_planet_bgfSimplifiedPackageImpl()); isInited = true; theOrg_sl_planet_bgfSimplifiedPackage.createPackageContents(); theOrg_sl_planet_bgfSimplifiedPackage.initializePackageContents(); theOrg_sl_planet_bgfSimplifiedPackage.freeze(); EPackage.Registry.INSTANCE.put(Org_sl_planet_bgfSimplifiedPackage.eNS_URI, theOrg_sl_planet_bgfSimplifiedPackage); return theOrg_sl_planet_bgfSimplifiedPackage; }
/** * Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends. * * <p>This method is used to initialize {@link Org_sl_planet_bgfSimplifiedPackage#eINSTANCE} when that field is accessed. * Clients should not invoke it directly. Instead, they should simply access that field to obtain the package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #eNS_URI * @see #createPackageContents() * @see #initializePackageContents() * @generated */
Creates, registers, and initializes the Package for this model, and for any others upon which it depends. This method is used to initialize <code>Org_sl_planet_bgfSimplifiedPackage#eINSTANCE</code> when that field is accessed. Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
init
{ "repo_name": "patrickneubauer/XMLIntellEdit", "path": "xmlintelledit/xmltext/src/main/java/org_sl_planet_bgfSimplified/impl/Org_sl_planet_bgfSimplifiedPackageImpl.java", "license": "mit", "size": 30665 }
[ "org.eclipse.emf.ecore.EPackage" ]
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
633,015
public Builder region(Country region) { JodaBeanUtils.notNull(region, "region"); this.region = region; return this; }
Builder function(Country region) { JodaBeanUtils.notNull(region, STR); this.region = region; return this; }
/** * Sets the region of the index. * @param region the new value, not null * @return this, for chaining, not null */
Sets the region of the index
region
{ "repo_name": "ChinaQuants/Strata", "path": "modules/basics/src/main/java/com/opengamma/strata/basics/index/ImmutablePriceIndex.java", "license": "apache-2.0", "size": 17411 }
[ "com.opengamma.strata.basics.location.Country", "org.joda.beans.JodaBeanUtils" ]
import com.opengamma.strata.basics.location.Country; import org.joda.beans.JodaBeanUtils;
import com.opengamma.strata.basics.location.*; import org.joda.beans.*;
[ "com.opengamma.strata", "org.joda.beans" ]
com.opengamma.strata; org.joda.beans;
1,358,574
private TemporalParseData fileBasedASN1Analysis(final IFile file) { return fileBasedGeneralAnalysis(file, new ASN1Analyzer()); }
TemporalParseData function(final IFile file) { return fileBasedGeneralAnalysis(file, new ASN1Analyzer()); }
/** * Parses the provided file. * * @param file * the file to be parsed * * @return the temporal data structure needed to insert the parsed * module in the list of modules, in the post-analyzes step. * */
Parses the provided file
fileBasedASN1Analysis
{ "repo_name": "eroslevi/titan.EclipsePlug-ins", "path": "org.eclipse.titan.designer/src/org/eclipse/titan/designer/parsers/ProjectSourceSyntacticAnalyzer.java", "license": "epl-1.0", "size": 33730 }
[ "org.eclipse.core.resources.IFile", "org.eclipse.titan.designer.parsers.asn1parser.ASN1Analyzer" ]
import org.eclipse.core.resources.IFile; import org.eclipse.titan.designer.parsers.asn1parser.ASN1Analyzer;
import org.eclipse.core.resources.*; import org.eclipse.titan.designer.parsers.asn1parser.*;
[ "org.eclipse.core", "org.eclipse.titan" ]
org.eclipse.core; org.eclipse.titan;
752,839
@Test public final void testQueryParamsBatchElementFail() { Robolectric.getFakeHttpLayer().interceptHttpRequests(false); String subpath = "/queryparamsbatchelementfail"; Map<String, User> params = new HashMap<String, User>(); params.put("subject", new User(1, "Kurt", "Wagner" , 32, false)); stubFor(get(urlMatching(subpath)) .willReturn(aResponse() .withStatus(200))); expectedException.expect(Is.isA(InvocationException.class)); requestEndpoint.queryParamsBatchElementFail(params); }
final void function() { Robolectric.getFakeHttpLayer().interceptHttpRequests(false); String subpath = STR; Map<String, User> params = new HashMap<String, User>(); params.put(STR, new User(1, "Kurt", STR , 32, false)); stubFor(get(urlMatching(subpath)) .willReturn(aResponse() .withStatus(200))); expectedException.expect(Is.isA(InvocationException.class)); requestEndpoint.queryParamsBatchElementFail(params); }
/** * <p>Test for a {@link Request} having illegal batch {@link QueryParams} elements.</p> * * @since 1.3.0 */
Test for a <code>Request</code> having illegal batch <code>QueryParams</code> elements
testQueryParamsBatchElementFail
{ "repo_name": "sahan/RoboZombie", "path": "robozombie/src/test/java/com/lonepulse/robozombie/processor/RequestParamEndpointTest.java", "license": "apache-2.0", "size": 21257 }
[ "com.github.tomakehurst.wiremock.client.WireMock", "com.lonepulse.robozombie.model.User", "com.lonepulse.robozombie.proxy.InvocationException", "java.util.HashMap", "java.util.Map", "org.hamcrest.core.Is", "org.robolectric.Robolectric" ]
import com.github.tomakehurst.wiremock.client.WireMock; import com.lonepulse.robozombie.model.User; import com.lonepulse.robozombie.proxy.InvocationException; import java.util.HashMap; import java.util.Map; import org.hamcrest.core.Is; import org.robolectric.Robolectric;
import com.github.tomakehurst.wiremock.client.*; import com.lonepulse.robozombie.model.*; import com.lonepulse.robozombie.proxy.*; import java.util.*; import org.hamcrest.core.*; import org.robolectric.*;
[ "com.github.tomakehurst", "com.lonepulse.robozombie", "java.util", "org.hamcrest.core", "org.robolectric" ]
com.github.tomakehurst; com.lonepulse.robozombie; java.util; org.hamcrest.core; org.robolectric;
334,639
public void gotElement(String contents) throws SAXException { if (container instanceof PGMLHandler) { Object o = getPGMLStackParser().getDiagram(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } // No other uses of string in PGMLHandler return; } if (container instanceof FigGroupHandler) { Object o = ((FigGroupHandler) container).getFigGroup(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } } if (container instanceof FigEdgeHandler) { Object o = ((FigEdgeHandler) container).getFigEdge(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } } // Handle other uses of <private> contents super.gotElement(contents); }
void function(String contents) throws SAXException { if (container instanceof PGMLHandler) { Object o = getPGMLStackParser().getDiagram(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } return; } if (container instanceof FigGroupHandler) { Object o = ((FigGroupHandler) container).getFigGroup(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } } if (container instanceof FigEdgeHandler) { Object o = ((FigEdgeHandler) container).getFigEdge(); if (o instanceof IItemUID) { ItemUID uid = getItemUID(contents); if (uid != null) { ((IItemUID) o).setItemUID(uid); } } } super.gotElement(contents); }
/** * If the containing object is a type for which the private element * might contain an ItemUID, extract the ItemUID if it exists and assign it * to the object. * * @param contents * @exception SAXException */
If the containing object is a type for which the private element might contain an ItemUID, extract the ItemUID if it exists and assign it to the object
gotElement
{ "repo_name": "ckaestne/LEADT", "path": "workspace/argouml_diagrams/argouml-app/src/org/argouml/persistence/PrivateHandler.java", "license": "gpl-3.0", "size": 17786 }
[ "org.argouml.util.IItemUID", "org.argouml.util.ItemUID", "org.tigris.gef.persistence.pgml.FigEdgeHandler", "org.tigris.gef.persistence.pgml.FigGroupHandler", "org.tigris.gef.persistence.pgml.PGMLHandler", "org.xml.sax.SAXException" ]
import org.argouml.util.IItemUID; import org.argouml.util.ItemUID; import org.tigris.gef.persistence.pgml.FigEdgeHandler; import org.tigris.gef.persistence.pgml.FigGroupHandler; import org.tigris.gef.persistence.pgml.PGMLHandler; import org.xml.sax.SAXException;
import org.argouml.util.*; import org.tigris.gef.persistence.pgml.*; import org.xml.sax.*;
[ "org.argouml.util", "org.tigris.gef", "org.xml.sax" ]
org.argouml.util; org.tigris.gef; org.xml.sax;
55,643
public final void setDdService(DataDictionaryService ddService) { this.ddService = ddService; }
final void function(DataDictionaryService ddService) { this.ddService = ddService; }
/** * Sets the ddService attribute value. * * @param ddService The ddService to set. */
Sets the ddService attribute value
setDdService
{ "repo_name": "jruchcolo/rice-cd", "path": "rice-middleware/kns/src/main/java/org/kuali/rice/kns/maintenance/rules/MaintenanceDocumentRuleBase.java", "license": "apache-2.0", "size": 69713 }
[ "org.kuali.rice.krad.service.DataDictionaryService" ]
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.rice.krad.service.*;
[ "org.kuali.rice" ]
org.kuali.rice;
2,709,779
EReference getprogramBlock_Uses();
EReference getprogramBlock_Uses();
/** * Returns the meta object for the containment reference '{@link org.xtext.example.delphi.delphi.programBlock#getUses <em>Uses</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Uses</em>'. * @see org.xtext.example.delphi.delphi.programBlock#getUses() * @see #getprogramBlock() * @generated */
Returns the meta object for the containment reference '<code>org.xtext.example.delphi.delphi.programBlock#getUses Uses</code>'.
getprogramBlock_Uses
{ "repo_name": "adolfosbh/cs2as", "path": "org.xtext.example.delphi/src-gen/org/xtext/example/delphi/delphi/DelphiPackage.java", "license": "epl-1.0", "size": 434880 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
416,096
public void processNestedTag(PutListTag nestedTag) throws JspException { // Get real value and check role // If role is set, add it in attribute definition if any. // If no attribute definition, create untyped one, and set role. Object attributeValue = nestedTag.getList(); if (nestedTag.getRole() != null) { AttributeDefinition def = new UntypedAttribute(attributeValue); def.setRole(nestedTag.getRole()); attributeValue = def; } // now add attribute to enclosing parent (i.e. : this object) addElement(attributeValue); }
void function(PutListTag nestedTag) throws JspException { Object attributeValue = nestedTag.getList(); if (nestedTag.getRole() != null) { AttributeDefinition def = new UntypedAttribute(attributeValue); def.setRole(nestedTag.getRole()); attributeValue = def; } addElement(attributeValue); }
/** * Process nested &lg;putList&gt; tag. * Method calls by nested &lg;putList&gt; tags. * Nested list is added to current list. * If role is defined, nested attribute is wrapped into an untypped definition * containing attribute value and role. */
Process nested &lg;putList&gt; tag. Method calls by nested &lg;putList&gt; tags. Nested list is added to current list. If role is defined, nested attribute is wrapped into an untypped definition containing attribute value and role
processNestedTag
{ "repo_name": "shuliangtao/struts-1.3.10", "path": "src/tiles/src/main/java/org/apache/struts/tiles/taglib/PutListTag.java", "license": "apache-2.0", "size": 5863 }
[ "javax.servlet.jsp.JspException", "org.apache.struts.tiles.AttributeDefinition", "org.apache.struts.tiles.UntypedAttribute" ]
import javax.servlet.jsp.JspException; import org.apache.struts.tiles.AttributeDefinition; import org.apache.struts.tiles.UntypedAttribute;
import javax.servlet.jsp.*; import org.apache.struts.tiles.*;
[ "javax.servlet", "org.apache.struts" ]
javax.servlet; org.apache.struts;
934,830
boolean canAdd(UUID sellerId, ItemStack itemStack, int qty);
boolean canAdd(UUID sellerId, ItemStack itemStack, int qty);
/** * Determine if there is room to add a sale item represented by the * provided item stack in the amount specified. * * @param sellerId The id of the seller trying to add the item. * @param itemStack The item stack * @param qty The amount that needs to fit */
Determine if there is room to add a sale item represented by the provided item stack in the amount specified
canAdd
{ "repo_name": "JCThePants/Storefront", "path": "src/com/jcwhatever/storefront/stores/IStore.java", "license": "mit", "size": 7998 }
[ "org.bukkit.inventory.ItemStack" ]
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.*;
[ "org.bukkit.inventory" ]
org.bukkit.inventory;
733,094
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
/** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */
Handles the HTTP <code>POST</code> method
doPost
{ "repo_name": "DevsignStudio/e-tourism-servlet", "path": "src/java/user/UpdateAccount.java", "license": "apache-2.0", "size": 6065 }
[ "java.io.IOException", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "javax.servlet" ]
java.io; javax.servlet;
291,337