method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
@Nonnull public WorkbookFunctionsReplaceRequest expand(@Nonnull final String value) { addExpandOption(value); return this; }
WorkbookFunctionsReplaceRequest function(@Nonnull final String value) { addExpandOption(value); return this; }
/** * Sets the expand clause for the request * * @param value the expand clause * @return the updated request */
Sets the expand clause for the request
expand
{ "repo_name": "microsoftgraph/msgraph-sdk-java", "path": "src/main/java/com/microsoft/graph/requests/WorkbookFunctionsReplaceRequest.java", "license": "mit", "size": 2983 }
[ "com.microsoft.graph.requests.WorkbookFunctionsReplaceRequest", "javax.annotation.Nonnull" ]
import com.microsoft.graph.requests.WorkbookFunctionsReplaceRequest; import javax.annotation.Nonnull;
import com.microsoft.graph.requests.*; import javax.annotation.*;
[ "com.microsoft.graph", "javax.annotation" ]
com.microsoft.graph; javax.annotation;
423,071
public List<SourceTriggerEvent> sourceTriggerEvents() { return this.sourceTriggerEvents; }
List<SourceTriggerEvent> function() { return this.sourceTriggerEvents; }
/** * Get the source event corresponding to the trigger. * * @return the sourceTriggerEvents value */
Get the source event corresponding to the trigger
sourceTriggerEvents
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/containerregistry/mgmt-v2019_06_01_preview/src/main/java/com/microsoft/azure/management/containerregistry/v2019_06_01_preview/SourceTriggerUpdateParameters.java", "license": "mit", "size": 3461 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,477,957
public Group createGroup(List<HueObject> lights) throws IOException, ApiException { requireAuthentication(); String body = gson.toJson(new SetAttributesRequest(lights)); Result result = http.post(getRelativeURL("groups"), body); handleErrors(result); List<SuccessResponse> entries = safeFromJson(result.getBody(), SuccessResponse.GSON_TYPE); SuccessResponse response = entries.get(0); Group group = new Group(); group.setName("Group"); group.setId(Util.quickMatch("^/groups/([0-9]+)$", (String) response.success.values().toArray()[0])); return group; }
Group function(List<HueObject> lights) throws IOException, ApiException { requireAuthentication(); String body = gson.toJson(new SetAttributesRequest(lights)); Result result = http.post(getRelativeURL(STR), body); handleErrors(result); List<SuccessResponse> entries = safeFromJson(result.getBody(), SuccessResponse.GSON_TYPE); SuccessResponse response = entries.get(0); Group group = new Group(); group.setName("Group"); group.setId(Util.quickMatch(STR, (String) response.success.values().toArray()[0])); return group; }
/** * Creates a new group and returns it. * Due to API limitations, the name of the returned object * will simply be "Group". The bridge will append a number to this * name if it's a duplicate. To get the final name, call getGroup * with the returned object. * * @param lights lights in group * @return object representing new group * @throws UnauthorizedException thrown if the user no longer exists * @throws GroupTableFullException thrown if the group limit has been reached */
Creates a new group and returns it. Due to API limitations, the name of the returned object will simply be "Group". The bridge will append a number to this name if it's a duplicate. To get the final name, call getGroup with the returned object
createGroup
{ "repo_name": "theoweiss/openhab2", "path": "bundles/org.openhab.binding.hue/src/main/java/org/openhab/binding/hue/internal/HueBridge.java", "license": "epl-1.0", "size": 37886 }
[ "java.io.IOException", "java.util.List", "org.openhab.binding.hue.internal.HttpClient", "org.openhab.binding.hue.internal.exceptions.ApiException" ]
import java.io.IOException; import java.util.List; import org.openhab.binding.hue.internal.HttpClient; import org.openhab.binding.hue.internal.exceptions.ApiException;
import java.io.*; import java.util.*; import org.openhab.binding.hue.internal.*; import org.openhab.binding.hue.internal.exceptions.*;
[ "java.io", "java.util", "org.openhab.binding" ]
java.io; java.util; org.openhab.binding;
912,245
private static final String getMonospacedFontFamily() { String family = RTextArea.getDefaultFont().getFamily(); if ("Monospaced".equals(family)) { family = "Courier"; } return family; }
static final String function() { String family = RTextArea.getDefaultFont().getFamily(); if (STR.equals(family)) { family = STR; } return family; }
/** * Returns a good "default" monospaced font to use when Java's logical * font "Monospaced" is found. * * @return The monospaced font family to use. */
Returns a good "default" monospaced font to use when Java's logical font "Monospaced" is found
getMonospacedFontFamily
{ "repo_name": "curiosag/ftc", "path": "RSyntaxTextArea/src/main/java/org/fife/ui/rsyntaxtextarea/RtfGenerator.java", "license": "gpl-3.0", "size": 13729 }
[ "org.fife.ui.rtextarea.RTextArea" ]
import org.fife.ui.rtextarea.RTextArea;
import org.fife.ui.rtextarea.*;
[ "org.fife.ui" ]
org.fife.ui;
514,186
@Override public int getRunLimit(final Attribute attribute) { ArgumentChecks.ensureNonNull("attribute", attribute); update(attribute, null); return limit; }
int function(final Attribute attribute) { ArgumentChecks.ensureNonNull(STR, attribute); update(attribute, null); return limit; }
/** * Returns the index of the first character following the run having the same "value" for * the given attribute than the current character. See this class javadoc for a note about * which attribute "values" are considered equal. */
Returns the index of the first character following the run having the same "value" for the given attribute than the current character. See this class javadoc for a note about which attribute "values" are considered equal
getRunLimit
{ "repo_name": "desruisseaux/sis", "path": "core/sis-utility/src/main/java/org/apache/sis/measure/FormattedCharacterIterator.java", "license": "apache-2.0", "size": 17519 }
[ "org.apache.sis.util.ArgumentChecks" ]
import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.*;
[ "org.apache.sis" ]
org.apache.sis;
1,741,379
public FileServicePropertiesInner withShareDeleteRetentionPolicy(DeleteRetentionPolicy shareDeleteRetentionPolicy) { this.shareDeleteRetentionPolicy = shareDeleteRetentionPolicy; return this; }
FileServicePropertiesInner function(DeleteRetentionPolicy shareDeleteRetentionPolicy) { this.shareDeleteRetentionPolicy = shareDeleteRetentionPolicy; return this; }
/** * Set the file service properties for share soft delete. * * @param shareDeleteRetentionPolicy the shareDeleteRetentionPolicy value to set * @return the FileServicePropertiesInner object itself. */
Set the file service properties for share soft delete
withShareDeleteRetentionPolicy
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/storage/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/storage/v2019_06_01/implementation/FileServicePropertiesInner.java", "license": "mit", "size": 3168 }
[ "com.microsoft.azure.management.storage.v2019_06_01.DeleteRetentionPolicy" ]
import com.microsoft.azure.management.storage.v2019_06_01.DeleteRetentionPolicy;
import com.microsoft.azure.management.storage.v2019_06_01.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
2,324,050
public Collection<XSAttributeUse> getAttributeUses() { // TODO: this is fairly inefficient List<XSAttributeUse> v = new ArrayList<XSAttributeUse>(); v.addAll(attributes.values()); for( XSAttGroupDecl agd : getAttGroups() ) v.addAll(agd.getAttributeUses()); return v; }
Collection<XSAttributeUse> function() { List<XSAttributeUse> v = new ArrayList<XSAttributeUse>(); v.addAll(attributes.values()); for( XSAttGroupDecl agd : getAttGroups() ) v.addAll(agd.getAttributeUses()); return v; }
/** * Returns the attribute uses by looking at attribute groups and etc. * Searching for the base type is done in {@link ComplexTypeImpl}. */
Returns the attribute uses by looking at attribute groups and etc. Searching for the base type is done in <code>ComplexTypeImpl</code>
getAttributeUses
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk2/jaxws/src/share/jaxws_classes/com/sun/xml/internal/xsom/impl/AttributesHolder.java", "license": "mit", "size": 4795 }
[ "com.sun.xml.internal.xsom.XSAttGroupDecl", "com.sun.xml.internal.xsom.XSAttributeUse", "java.util.ArrayList", "java.util.Collection", "java.util.List" ]
import com.sun.xml.internal.xsom.XSAttGroupDecl; import com.sun.xml.internal.xsom.XSAttributeUse; import java.util.ArrayList; import java.util.Collection; import java.util.List;
import com.sun.xml.internal.xsom.*; import java.util.*;
[ "com.sun.xml", "java.util" ]
com.sun.xml; java.util;
1,215,567
private static JobVertexBackPressureInfo.VertexBackPressureLevel getBackPressureLevel( double backPressureRatio) { if (backPressureRatio <= 0.10) { return JobVertexBackPressureInfo.VertexBackPressureLevel.OK; } else if (backPressureRatio <= 0.5) { return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW; } else { return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH; } }
static JobVertexBackPressureInfo.VertexBackPressureLevel function( double backPressureRatio) { if (backPressureRatio <= 0.10) { return JobVertexBackPressureInfo.VertexBackPressureLevel.OK; } else if (backPressureRatio <= 0.5) { return JobVertexBackPressureInfo.VertexBackPressureLevel.LOW; } else { return JobVertexBackPressureInfo.VertexBackPressureLevel.HIGH; } }
/** * Returns the back pressure level as a String. * * @param backPressureRatio Ratio of back pressures samples to total number of samples. * @return Back pressure level ('ok', 'low', or 'high') */
Returns the back pressure level as a String
getBackPressureLevel
{ "repo_name": "rmetzger/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobVertexBackPressureHandler.java", "license": "apache-2.0", "size": 7528 }
[ "org.apache.flink.runtime.rest.messages.JobVertexBackPressureInfo" ]
import org.apache.flink.runtime.rest.messages.JobVertexBackPressureInfo;
import org.apache.flink.runtime.rest.messages.*;
[ "org.apache.flink" ]
org.apache.flink;
2,353,089
@Test public void testParseNoSemicolon() { String testString = "foo=bar,Max-Age=2,Domain=baz.qux"; CookieModel expected = new CookieModel("foo", "bar,Max-Age=2,Domain=baz.qux"); CookieShim actual = CookieShim.parse(testString, expected.creationTime); assertAsExpected(testString, expected, actual); }
void function() { String testString = STR; CookieModel expected = new CookieModel("foo", STR); CookieShim actual = CookieShim.parse(testString, expected.creationTime); assertAsExpected(testString, expected, actual); }
/** * Tests parsing of a cookie without ';'s. */
Tests parsing of a cookie without ';'s
testParseNoSemicolon
{ "repo_name": "teamfx/openjfx-10-dev-rt", "path": "modules/javafx.web/src/test/java/test/com/sun/webkit/network/CookieTest.java", "license": "gpl-2.0", "size": 24899 }
[ "com.sun.webkit.network.CookieShim" ]
import com.sun.webkit.network.CookieShim;
import com.sun.webkit.network.*;
[ "com.sun.webkit" ]
com.sun.webkit;
407,536
public void setLowerBound(double min) { if (this.range.getUpperBound() > min) { setRange(new Range(min, this.range.getUpperBound())); } else { setRange(new Range(min, min + 1.0)); } }
void function(double min) { if (this.range.getUpperBound() > min) { setRange(new Range(min, this.range.getUpperBound())); } else { setRange(new Range(min, min + 1.0)); } }
/** * Sets the lower bound for the axis range. An {@link AxisChangeEvent} is * sent to all registered listeners. * * @param min the new minimum. * * @see #getLowerBound() */
Sets the lower bound for the axis range. An <code>AxisChangeEvent</code> is sent to all registered listeners
setLowerBound
{ "repo_name": "jfree/jfreechart", "path": "src/main/java/org/jfree/chart/axis/ValueAxis.java", "license": "lgpl-2.1", "size": 57099 }
[ "org.jfree.data.Range" ]
import org.jfree.data.Range;
import org.jfree.data.*;
[ "org.jfree.data" ]
org.jfree.data;
1,110,796
protected void putFieldError(String propertyName, String errorConstant, String[] parameters) { if (!errorAlreadyExists(MAINTAINABLE_ERROR_PREFIX + propertyName, errorConstant)) { GlobalVariables.getMessageMap() .putErrorWithoutFullErrorPath(MAINTAINABLE_ERROR_PREFIX + propertyName, errorConstant, parameters); } }
void function(String propertyName, String errorConstant, String[] parameters) { if (!errorAlreadyExists(MAINTAINABLE_ERROR_PREFIX + propertyName, errorConstant)) { GlobalVariables.getMessageMap() .putErrorWithoutFullErrorPath(MAINTAINABLE_ERROR_PREFIX + propertyName, errorConstant, parameters); } }
/** * This method is a convenience method to add a property-specific error to the global errors list. This method makes * sure that * the correct prefix is added to the property name so that it will display correctly on maintenance documents. * * @param propertyName - Property name of the element that is associated with the error. Used to mark the field as * errored in * the UI. * @param errorConstant - Error Constant that can be mapped to a resource for the actual text message. * @param parameters - Array of strings holding values that can be used in the message so that you can display * specific values * to the user. */
This method is a convenience method to add a property-specific error to the global errors list. This method makes sure that the correct prefix is added to the property name so that it will display correctly on maintenance documents
putFieldError
{ "repo_name": "jruchcolo/rice-cd", "path": "rice-middleware/kns/src/main/java/org/kuali/rice/kns/maintenance/rules/MaintenanceDocumentRuleBase.java", "license": "apache-2.0", "size": 69713 }
[ "org.kuali.rice.krad.util.GlobalVariables" ]
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.*;
[ "org.kuali.rice" ]
org.kuali.rice;
2,709,753
private void onAccessTokenResponse(Map<String, String> fragmentParameters) { if (fragmentParameters == null) throw new AssertionError(); OAuthSuccessfulResponse response; try { response = OAuthSuccessfulResponse.createFromFragment(fragmentParameters); } catch (LiveAuthException e) { this.onException(e); return; } this.onResponse(response); }
void function(Map<String, String> fragmentParameters) { if (fragmentParameters == null) throw new AssertionError(); OAuthSuccessfulResponse response; try { response = OAuthSuccessfulResponse.createFromFragment(fragmentParameters); } catch (LiveAuthException e) { this.onException(e); return; } this.onResponse(response); }
/** * Called when the response uri contains an access_token in the fragment. * * This method reads the response and calls back the LiveOAuthListener on the UI/main thread, * and then dismisses the dialog window. * * See <a href="http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-1.3.1">Section * 1.3.1</a> of the OAuth 2.0 spec. * * @param fragmentParameters in the uri */
Called when the response uri contains an access_token in the fragment. This method reads the response and calls back the LiveOAuthListener on the UI/main thread, and then dismisses the dialog window. See Section 1.3.1 of the OAuth 2.0 spec
onAccessTokenResponse
{ "repo_name": "MSOpenTech/msa-auth-for-android", "path": "src/main/java/com/microsoft/services/msa/AuthorizationRequest.java", "license": "mit", "size": 22226 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,569,027
private boolean isRequiredDependency(String dependencyLine) { boolean result = (StringUtils.countMatches(dependencyLine, ":") > 3) && !dependencyLine.endsWith("test") && !dependencyLine.endsWith("system"); return result; }
boolean function(String dependencyLine) { boolean result = (StringUtils.countMatches(dependencyLine, ":") > 3) && !dependencyLine.endsWith("test") && !dependencyLine.endsWith(STR); return result; }
/** * Checks whether dependency is correct and required. Required dependencies are: compile, runtime, provided Also * dependency should be fully described. It should contain gav, classifier and scope (totally 5 fields, thus 4 ":" * separators between fields) * * @param dependencyLine line from dependencies.txt file describing component dependencies * @return true, if dependency is required; false - otherwise */
Checks whether dependency is correct and required. Required dependencies are: compile, runtime, provided Also dependency should be fully described. It should contain gav, classifier and scope (totally 5 fields, thus 4 ":" separators between fields)
isRequiredDependency
{ "repo_name": "Talend/components", "path": "core/components-api/src/main/java/org/talend/components/api/component/runtime/DependenciesReader.java", "license": "apache-2.0", "size": 12484 }
[ "org.apache.commons.lang3.StringUtils" ]
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.*;
[ "org.apache.commons" ]
org.apache.commons;
1,198,708
public static ArrayList<File> getOlderFolders(long millisecondsSinceModification) { long currentTime = System.currentTimeMillis(); File s2CacheFolder = getSentinel2CacheFolder(); if (s2CacheFolder == null || !s2CacheFolder.isDirectory()) return null; File[] levelFolders = s2CacheFolder.listFiles(); if (levelFolders == null) return null; ArrayList<File> oldestFiles = new ArrayList<>(); for (File levelFolder : levelFolders) { if (levelFolder == null || !levelFolder.isDirectory()) continue; File[] versions = levelFolder.listFiles(); if (versions == null) continue; for (File version : versions) { if (version == null || !version.isDirectory()) continue; File[] products = version.listFiles(); if (products == null) continue; for (File product : products) { if (product == null) continue; if ((currentTime - getModificationTime(product)) > millisecondsSinceModification) { oldestFiles.add(product); } } } } return oldestFiles; }
static ArrayList<File> function(long millisecondsSinceModification) { long currentTime = System.currentTimeMillis(); File s2CacheFolder = getSentinel2CacheFolder(); if (s2CacheFolder == null !s2CacheFolder.isDirectory()) return null; File[] levelFolders = s2CacheFolder.listFiles(); if (levelFolders == null) return null; ArrayList<File> oldestFiles = new ArrayList<>(); for (File levelFolder : levelFolders) { if (levelFolder == null !levelFolder.isDirectory()) continue; File[] versions = levelFolder.listFiles(); if (versions == null) continue; for (File version : versions) { if (version == null !version.isDirectory()) continue; File[] products = version.listFiles(); if (products == null) continue; for (File product : products) { if (product == null) continue; if ((currentTime - getModificationTime(product)) > millisecondsSinceModification) { oldestFiles.add(product); } } } } return oldestFiles; }
/** * Method for obtaining the products in cache that were cached more than x milliseconds ago. * * @param millisecondsSinceModification: the minimum time passed since the product folder was cached * @return null if cache is empty or an error occurs, in other case an arraylist containing the oldest products */
Method for obtaining the products in cache that were cached more than x milliseconds ago
getOlderFolders
{ "repo_name": "oscarpicas/s2tbx", "path": "s2tbx-cache/src/main/java/org/esa/s2tbx/dataio/cache/S2CacheUtils.java", "license": "gpl-3.0", "size": 10675 }
[ "java.io.File", "java.util.ArrayList" ]
import java.io.File; import java.util.ArrayList;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,836,218
public java.sql.Date toSqlDate() { String date = getDate(DMN_DATA); String ano = date.substring(6, date.length()); String mes = date.substring(3, 5); String dia = date.substring(0, 2); date = ano + "-" + mes + "-" + dia; return java.sql.Date.valueOf(date); }
java.sql.Date function() { String date = getDate(DMN_DATA); String ano = date.substring(6, date.length()); String mes = date.substring(3, 5); String dia = date.substring(0, 2); date = ano + "-" + mes + "-" + dia; return java.sql.Date.valueOf(date); }
/** * Retorna este objeto como sql.Date * @return este objeto como sql.Date */
Retorna este objeto como sql.Date
toSqlDate
{ "repo_name": "israeldb27/blg", "path": "src/main/java/com/busqueumlugar/util/DateUtil.java", "license": "apache-2.0", "size": 26855 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
2,398,076
int updateByPrimaryKeySelective(Borrower record);
int updateByPrimaryKeySelective(Borrower record);
/** * This method was generated by MyBatis Generator. * This method corresponds to the database table BORROWER * * @mbg.generated Mon May 01 16:23:46 CST 2017 */
This method was generated by MyBatis Generator. This method corresponds to the database table BORROWER
updateByPrimaryKeySelective
{ "repo_name": "zackwoo/Librarian", "path": "dal/src/main/java/com/github/librarian/model/mapper/BorrowerMapper.java", "license": "mit", "size": 3000 }
[ "com.github.librarian.model.entity.Borrower" ]
import com.github.librarian.model.entity.Borrower;
import com.github.librarian.model.entity.*;
[ "com.github.librarian" ]
com.github.librarian;
1,707,994
public boolean validate(final Object obj, final DAO dao, final String operation,ShipmentRequestUIObject uiObject) throws BizLogicException { boolean isValid = true; boolean isSpecimenVirtual = false; //StringBuffer errorMsg=new StringBuffer(); if (obj instanceof ShipmentRequest == false) { //throw new DAOException(ApplicationProperties.getValue("errors.invalid.object.passed")); logger.debug( "Invalid object passed." ); throw new BizLogicException( ErrorKey.getErrorKey( "errors.invalid.object.passed" ) , null , "object is not the instance of ShipMent request class" ); } final ShipmentRequest shipmentRequest = (ShipmentRequest) obj; if (shipmentRequest == null) { //throw new DAOException(ApplicationProperties.getValue("errors.domain.object.null")); logger.debug( "Shipment object is null." ); throw new BizLogicException( ErrorKey.getErrorKey( "errors.domain.object.null" ) , null , "shipment object is null" ); } try { if (!uiObject.isRequestProcessed() && ( shipmentRequest.getActivityStatus() != null && !shipmentRequest .getActivityStatus().equals( Constants.ACTIVITY_STATUS_REJECTED ) )) { boolean isEmptyContColl = false; if (shipmentRequest.getContainerCollection() != null && shipmentRequest.getContainerCollection().size() > 0) { final Iterator < StorageContainer > containerIterator = shipmentRequest .getContainerCollection().iterator(); while (containerIterator.hasNext()) { StorageContainer container = containerIterator.next(); if (container != null) { String containerName = ""; if (container.getName() != null && !container.getName().trim().equals( "" )) { containerName = container.getName(); container = this.getContainerByNameOrBarcode( container.getName(), dao, Constants.CONTAINER_PROPERTY_NAME ); } else if (container.getBarcode() != null && !container.getBarcode().trim().equals( "" )) { containerName = container.getBarcode(); container = this.getContainerByNameOrBarcode( container .getBarcode(), dao, Constants.CONTAINER_PROPERTY_BARCODE ); } if (container == null) { //No container with such name or barcode exists throw this.getBizLogicException( null, "shipment.NoContainerExists", containerName ); } else { isValid = this.containerBelongsToSite( container, shipmentRequest .getSenderSite().getId() ); if (isValid) { throw this.getBizLogicException( null, "shipment.container.RequestingSite", containerName ); } else if (container.getName().contains( Constants.IN_TRANSIT_CONTAINER_NAME_PREFIX )) { throw this.getBizLogicException( null, "shipment.containerInShipment", containerName ); } } } } } else { isEmptyContColl = true; } boolean isEmptySpecColl = false; boolean isInTransit = false; if (shipmentRequest.getSpecimenCollection() != null && shipmentRequest.getSpecimenCollection().size() > 0) { final Iterator < Specimen > specimenIterator = shipmentRequest .getSpecimenCollection().iterator(); while (specimenIterator.hasNext()) { Specimen specimen = specimenIterator.next(); if (specimen != null) { String specimenName = ""; if (specimen.getLabel() != null && !specimen.getLabel().trim().equals( "" )) { specimenName = specimen.getLabel(); specimen = this.getSpecimenByLabelOrBarcode( specimenName, dao, Constants.SPECIMEN_PROPERTY_LABEL ); } else if (specimen.getBarcode() != null && !specimen.getBarcode().trim().equals( "" )) { specimenName = specimen.getBarcode(); specimen = this.getSpecimenByLabelOrBarcode( specimenName, dao, Constants.SPECIMEN_PROPERTY_BARCODE ); } if (specimen == null) { //No container with such name or barcode exists throw this.getBizLogicException( null, "shipment.NoSpecimenExists", specimenName ); } else { isValid = this.specimenBelongsToSite( specimen, shipmentRequest .getSenderSite().getId() ); isInTransit = this.specimenBelongsToInTransitSite( specimen ); isSpecimenVirtual = this.specimenBelongsToVirtualSite( specimen ); if (isValid && !isInTransit) { throw this.getBizLogicException( null, "shipment.specimenInRequestingSite", specimenName ); } else if (isInTransit) { throw this.getBizLogicException( null, "shipment.specimenInShipment", specimenName ); } else if (isSpecimenVirtual) { throw this.getBizLogicException( null, "shipment.virtual.specimen", specimenName ); } } } } } else { isEmptySpecColl = true; } if (isEmptyContColl && isEmptySpecColl) { throw this.getBizLogicException( null, "shipment.noSpecimenInRequest", null ); } } } catch (final DAOException ex) { ShipmentRequestBizLogic.logger.error( ex.getMessage(), ex ); //ex.printStackTrace(); //throw new BizLogicException(ErrorKey.getErrorKey("dao.error"),ex,errorMsg.toString()); throw this.getBizLogicException( ex, ex.getErrorKeyName(), ex.getMsgValues() ); } return true; }
boolean function(final Object obj, final DAO dao, final String operation,ShipmentRequestUIObject uiObject) throws BizLogicException { boolean isValid = true; boolean isSpecimenVirtual = false; if (obj instanceof ShipmentRequest == false) { logger.debug( STR ); throw new BizLogicException( ErrorKey.getErrorKey( STR ) , null , STR ); } final ShipmentRequest shipmentRequest = (ShipmentRequest) obj; if (shipmentRequest == null) { logger.debug( STR ); throw new BizLogicException( ErrorKey.getErrorKey( STR ) , null , STR ); } try { if (!uiObject.isRequestProcessed() && ( shipmentRequest.getActivityStatus() != null && !shipmentRequest .getActivityStatus().equals( Constants.ACTIVITY_STATUS_REJECTED ) )) { boolean isEmptyContColl = false; if (shipmentRequest.getContainerCollection() != null && shipmentRequest.getContainerCollection().size() > 0) { final Iterator < StorageContainer > containerIterator = shipmentRequest .getContainerCollection().iterator(); while (containerIterator.hasNext()) { StorageContainer container = containerIterator.next(); if (container != null) { String containerName = STRSTRSTRshipment.NoContainerExistsSTRshipment.container.RequestingSiteSTRshipment.containerInShipmentSTRSTRSTRSTRshipment.NoSpecimenExistsSTRshipment.specimenInRequestingSiteSTRshipment.specimenInShipmentSTRshipment.virtual.specimenSTRshipment.noSpecimenInRequest", null ); } } } catch (final DAOException ex) { ShipmentRequestBizLogic.logger.error( ex.getMessage(), ex ); throw this.getBizLogicException( ex, ex.getErrorKeyName(), ex.getMsgValues() ); } return true; }
/** * Overriding the parent class's method to validate the enumerated attribute values. * @param obj object to be validated. * @param dao object of DAO class. * @param operation string containing operation of request. * @return true or false based on validation results. * @throws BizLogicException if database operation fails. */
Overriding the parent class's method to validate the enumerated attribute values
validate
{ "repo_name": "NCIP/catissue-core", "path": "software/caTissue/modules/core/src/main/java/edu/wustl/catissuecore/bizlogic/shippingtracking/ShipmentRequestBizLogic.java", "license": "bsd-3-clause", "size": 36771 }
[ "edu.wustl.catissuecore.domain.StorageContainer", "edu.wustl.catissuecore.domain.shippingtracking.ShipmentRequest", "edu.wustl.catissuecore.uiobject.ShipmentRequestUIObject", "edu.wustl.catissuecore.util.shippingtracking.Constants", "edu.wustl.common.exception.BizLogicException", "edu.wustl.common.exception.ErrorKey", "edu.wustl.dao.exception.DAOException", "java.util.Iterator" ]
import edu.wustl.catissuecore.domain.StorageContainer; import edu.wustl.catissuecore.domain.shippingtracking.ShipmentRequest; import edu.wustl.catissuecore.uiobject.ShipmentRequestUIObject; import edu.wustl.catissuecore.util.shippingtracking.Constants; import edu.wustl.common.exception.BizLogicException; import edu.wustl.common.exception.ErrorKey; import edu.wustl.dao.exception.DAOException; import java.util.Iterator;
import edu.wustl.catissuecore.domain.*; import edu.wustl.catissuecore.domain.shippingtracking.*; import edu.wustl.catissuecore.uiobject.*; import edu.wustl.catissuecore.util.shippingtracking.*; import edu.wustl.common.exception.*; import edu.wustl.dao.exception.*; import java.util.*;
[ "edu.wustl.catissuecore", "edu.wustl.common", "edu.wustl.dao", "java.util" ]
edu.wustl.catissuecore; edu.wustl.common; edu.wustl.dao; java.util;
253,528
protected void timeoutLinks() { List<Link> eraseList = new ArrayList<Link>(); Long curTime = System.currentTimeMillis(); boolean linkChanged = false; // reentrant required here because deleteLink also write locks lock.writeLock().lock(); try { Iterator<Entry<Link, LinkInfo>> it = this.links.entrySet() .iterator(); while (it.hasNext()) { Entry<Link, LinkInfo> entry = it.next(); Link lt = entry.getKey(); LinkInfo info = entry.getValue(); // Timeout the unicast and multicast LLDP valid times // independently. if ((info.getUnicastValidTime() != null) && (info.getUnicastValidTime() + (this.LINK_TIMEOUT * 1000) < curTime)) { info.setUnicastValidTime(null); linkChanged = true; } if ((info.getMulticastValidTime() != null) && (info.getMulticastValidTime() + (this.LINK_TIMEOUT * 1000) < curTime)) { info.setMulticastValidTime(null); linkChanged = true; } // Add to the erase list only if the unicast // time is null. if (info.getUnicastValidTime() == null && info.getMulticastValidTime() == null) { eraseList.add(entry.getKey()); } else if (linkChanged) { updates.add(new LDUpdate(lt.getSrc(), lt.getSrcPort(), lt.getDst(), lt.getDstPort(), getLinkType(lt, info), UpdateOperation.LINK_UPDATED)); } } // if any link was deleted or any link was changed. if ((eraseList.size() > 0) || linkChanged) { deleteLinks(eraseList, "LLDP timeout"); } } finally { lock.writeLock().unlock(); } }
void function() { List<Link> eraseList = new ArrayList<Link>(); Long curTime = System.currentTimeMillis(); boolean linkChanged = false; lock.writeLock().lock(); try { Iterator<Entry<Link, LinkInfo>> it = this.links.entrySet() .iterator(); while (it.hasNext()) { Entry<Link, LinkInfo> entry = it.next(); Link lt = entry.getKey(); LinkInfo info = entry.getValue(); if ((info.getUnicastValidTime() != null) && (info.getUnicastValidTime() + (this.LINK_TIMEOUT * 1000) < curTime)) { info.setUnicastValidTime(null); linkChanged = true; } if ((info.getMulticastValidTime() != null) && (info.getMulticastValidTime() + (this.LINK_TIMEOUT * 1000) < curTime)) { info.setMulticastValidTime(null); linkChanged = true; } if (info.getUnicastValidTime() == null && info.getMulticastValidTime() == null) { eraseList.add(entry.getKey()); } else if (linkChanged) { updates.add(new LDUpdate(lt.getSrc(), lt.getSrcPort(), lt.getDst(), lt.getDstPort(), getLinkType(lt, info), UpdateOperation.LINK_UPDATED)); } } if ((eraseList.size() > 0) linkChanged) { deleteLinks(eraseList, STR); } } finally { lock.writeLock().unlock(); } }
/** * Iterates through the list of links and deletes if the last discovery * message reception time exceeds timeout values. */
Iterates through the list of links and deletes if the last discovery message reception time exceeds timeout values
timeoutLinks
{ "repo_name": "drinkwithwater/floodlightplus", "path": "src/main/java/net/floodlightcontroller/linkdiscovery/internal/LinkDiscoveryManager.java", "license": "apache-2.0", "size": 89759 }
[ "java.util.ArrayList", "java.util.Iterator", "java.util.List", "java.util.Map", "net.floodlightcontroller.linkdiscovery.ILinkDiscovery", "net.floodlightcontroller.linkdiscovery.LinkInfo", "net.floodlightcontroller.routing.Link" ]
import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import net.floodlightcontroller.linkdiscovery.ILinkDiscovery; import net.floodlightcontroller.linkdiscovery.LinkInfo; import net.floodlightcontroller.routing.Link;
import java.util.*; import net.floodlightcontroller.linkdiscovery.*; import net.floodlightcontroller.routing.*;
[ "java.util", "net.floodlightcontroller.linkdiscovery", "net.floodlightcontroller.routing" ]
java.util; net.floodlightcontroller.linkdiscovery; net.floodlightcontroller.routing;
1,491,735
return (Integer) parameters.get( Names.choiceID ); }
return (Integer) parameters.get( Names.choiceID ); }
/** * Gets the application-scoped identifier that uniquely identifies this choice. * @return choiceID Min: 0 Max: 65535 */
Gets the application-scoped identifier that uniquely identifies this choice
getChoiceID
{ "repo_name": "Luxoft/SDLP2", "path": "SDL_Android/SmartDeviceLinkProxyAndroid/src/com/smartdevicelink/proxy/rpc/PerformInteractionResponse.java", "license": "lgpl-2.1", "size": 2627 }
[ "com.smartdevicelink.proxy.constants.Names" ]
import com.smartdevicelink.proxy.constants.Names;
import com.smartdevicelink.proxy.constants.*;
[ "com.smartdevicelink.proxy" ]
com.smartdevicelink.proxy;
175,824
public static long findFirstSyncPosition(RcFileDataSource dataSource, long offset, long length, long syncFirst, long syncSecond) throws IOException { requireNonNull(dataSource, "dataSource is null"); checkArgument(offset >= 0, "offset is negative"); checkArgument(length >= 1, "length must be at least 1"); checkArgument(offset + length <= dataSource.getSize(), "offset plus length is greater than data size"); // The full sync sequence is "0xFFFFFFFF syncFirst syncSecond". If // this sequence begins the file range, the start position is returned // even if the sequence finishes after length. // NOTE: this decision must agree with RcFileReader.nextBlock Slice sync = Slices.allocate(SIZE_OF_INT + SIZE_OF_LONG + SIZE_OF_LONG); sync.setInt(0, 0xFFFF_FFFF); sync.setLong(SIZE_OF_INT, syncFirst); sync.setLong(SIZE_OF_INT + SIZE_OF_LONG, syncSecond); // read 4 MB chunks at a time, but only skip ahead 4 MB - SYNC_SEQUENCE_LENGTH bytes // this causes a re-read of SYNC_SEQUENCE_LENGTH bytes each time, but is much simpler code byte[] buffer = new byte[toIntExact(min(1 << 22, length + (SYNC_SEQUENCE_LENGTH - 1)))]; Slice bufferSlice = Slices.wrappedBuffer(buffer); for (long position = 0; position < length; position += bufferSlice.length() - (SYNC_SEQUENCE_LENGTH - 1)) { // either fill the buffer entirely, or read enough to allow all bytes in offset + length to be a start sequence int bufferSize = toIntExact(min(buffer.length, length + (SYNC_SEQUENCE_LENGTH - 1) - position)); // don't read off the end of the file bufferSize = toIntExact(min(bufferSize, dataSource.getSize() - offset - position)); dataSource.readFully(offset + position, buffer, 0, bufferSize); // find the starting index position of the sync sequence int index = bufferSlice.indexOf(sync); if (index >= 0) { // If the starting position is before the end of the search region, return the // absolute start position of the sequence. if (position + index < length) { long startOfSyncSequence = offset + position + index; return startOfSyncSequence; } else { // Otherwise, this is not a match for this region // Note: this case isn't strictly needed as the loop will exit, but it is // simpler to explicitly call it out. return -1; } } } return -1; }
static long function(RcFileDataSource dataSource, long offset, long length, long syncFirst, long syncSecond) throws IOException { requireNonNull(dataSource, STR); checkArgument(offset >= 0, STR); checkArgument(length >= 1, STR); checkArgument(offset + length <= dataSource.getSize(), STR); Slice sync = Slices.allocate(SIZE_OF_INT + SIZE_OF_LONG + SIZE_OF_LONG); sync.setInt(0, 0xFFFF_FFFF); sync.setLong(SIZE_OF_INT, syncFirst); sync.setLong(SIZE_OF_INT + SIZE_OF_LONG, syncSecond); byte[] buffer = new byte[toIntExact(min(1 << 22, length + (SYNC_SEQUENCE_LENGTH - 1)))]; Slice bufferSlice = Slices.wrappedBuffer(buffer); for (long position = 0; position < length; position += bufferSlice.length() - (SYNC_SEQUENCE_LENGTH - 1)) { int bufferSize = toIntExact(min(buffer.length, length + (SYNC_SEQUENCE_LENGTH - 1) - position)); bufferSize = toIntExact(min(bufferSize, dataSource.getSize() - offset - position)); dataSource.readFully(offset + position, buffer, 0, bufferSize); int index = bufferSlice.indexOf(sync); if (index >= 0) { if (position + index < length) { long startOfSyncSequence = offset + position + index; return startOfSyncSequence; } else { return -1; } } } return -1; }
/** * Find the beginning of the first full sync sequence that starts within the specified range. */
Find the beginning of the first full sync sequence that starts within the specified range
findFirstSyncPosition
{ "repo_name": "jxiang/presto", "path": "presto-rcfile/src/main/java/com/facebook/presto/rcfile/RcFileDecoderUtils.java", "license": "apache-2.0", "size": 9230 }
[ "com.google.common.base.Preconditions", "io.airlift.slice.Slice", "io.airlift.slice.Slices", "java.io.IOException", "java.lang.Math", "java.util.Objects" ]
import com.google.common.base.Preconditions; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import java.io.IOException; import java.lang.Math; import java.util.Objects;
import com.google.common.base.*; import io.airlift.slice.*; import java.io.*; import java.lang.*; import java.util.*;
[ "com.google.common", "io.airlift.slice", "java.io", "java.lang", "java.util" ]
com.google.common; io.airlift.slice; java.io; java.lang; java.util;
577,913
void setStream(PreparedStatement st, int fieldIndex, InputStream value, long length) throws SQLException;
void setStream(PreparedStatement st, int fieldIndex, InputStream value, long length) throws SQLException;
/** * Sets value as a stream * @param st prepared statement * @param fieldIndex field index * @param value value as a stream * @param length length of the stream * @throws SQLException if setting stream failed */
Sets value as a stream
setStream
{ "repo_name": "usgin/usgin-geoportal", "path": "src/com/esri/gpt/framework/sql/IClobMutator.java", "license": "apache-2.0", "size": 2546 }
[ "java.io.InputStream", "java.sql.PreparedStatement", "java.sql.SQLException" ]
import java.io.InputStream; import java.sql.PreparedStatement; import java.sql.SQLException;
import java.io.*; import java.sql.*;
[ "java.io", "java.sql" ]
java.io; java.sql;
1,427,909
Configuration setAcceptorConfigurations(Set<TransportConfiguration> infos);
Configuration setAcceptorConfigurations(Set<TransportConfiguration> infos);
/** * Sets the acceptors configured for this server. */
Sets the acceptors configured for this server
setAcceptorConfigurations
{ "repo_name": "tabish121/activemq-artemis", "path": "artemis-server/src/main/java/org/apache/activemq/artemis/core/config/Configuration.java", "license": "apache-2.0", "size": 46863 }
[ "java.util.Set", "org.apache.activemq.artemis.api.core.TransportConfiguration" ]
import java.util.Set; import org.apache.activemq.artemis.api.core.TransportConfiguration;
import java.util.*; import org.apache.activemq.artemis.api.core.*;
[ "java.util", "org.apache.activemq" ]
java.util; org.apache.activemq;
2,352,316
public static <SERVICE, RESPONSE extends PaxosResponse> PaxosResponsesWithRemote<SERVICE, RESPONSE> collectQuorumResponses( ImmutableList<SERVICE> remotes, Function<SERVICE, RESPONSE> request, int quorumSize, Map<? extends SERVICE, CheckedRejectionExecutorService> executors, Duration remoteRequestTimeout, boolean cancelRemainingCalls) { Preconditions.checkState( executors.keySet().equals(new HashSet<>(remotes)), "Each remote should have an executor."); return collectResponses( remotes, request, quorumSize, remoteRequestTimeout, quorumShortcutPredicate(quorumSize), cancelRemainingCalls, MultiplexingCompletionService.createFromCheckedExecutors(executors)); }
static <SERVICE, RESPONSE extends PaxosResponse> PaxosResponsesWithRemote<SERVICE, RESPONSE> function( ImmutableList<SERVICE> remotes, Function<SERVICE, RESPONSE> request, int quorumSize, Map<? extends SERVICE, CheckedRejectionExecutorService> executors, Duration remoteRequestTimeout, boolean cancelRemainingCalls) { Preconditions.checkState( executors.keySet().equals(new HashSet<>(remotes)), STR); return collectResponses( remotes, request, quorumSize, remoteRequestTimeout, quorumShortcutPredicate(quorumSize), cancelRemainingCalls, MultiplexingCompletionService.createFromCheckedExecutors(executors)); }
/** * Collects a list of responses from a quorum of remote services. * This method short-circuits if a quorum can no longer be obtained (if too many servers have sent nacks), and * cancels pending requests once a quorum has been obtained. * * @param remotes a list endpoints to make the remote call on * @param request the request to make on each of the remote endpoints * @param quorumSize number of acknowledge requests required to reach quorum * @param executors runs requests for a given remote on its own executor * @param remoteRequestTimeout timeout for the call * @param cancelRemainingCalls whether or not to cancel in progress calls after we've received enough responses * @return a list responses */
Collects a list of responses from a quorum of remote services. This method short-circuits if a quorum can no longer be obtained (if too many servers have sent nacks), and cancels pending requests once a quorum has been obtained
collectQuorumResponses
{ "repo_name": "palantir/atlasdb", "path": "leader-election-impl/src/main/java/com/palantir/paxos/PaxosQuorumChecker.java", "license": "apache-2.0", "size": 15082 }
[ "com.google.common.collect.ImmutableList", "com.palantir.common.concurrent.CheckedRejectionExecutorService", "com.palantir.common.concurrent.MultiplexingCompletionService", "com.palantir.logsafe.Preconditions", "java.time.Duration", "java.util.HashSet", "java.util.Map", "java.util.function.Function" ]
import com.google.common.collect.ImmutableList; import com.palantir.common.concurrent.CheckedRejectionExecutorService; import com.palantir.common.concurrent.MultiplexingCompletionService; import com.palantir.logsafe.Preconditions; import java.time.Duration; import java.util.HashSet; import java.util.Map; import java.util.function.Function;
import com.google.common.collect.*; import com.palantir.common.concurrent.*; import com.palantir.logsafe.*; import java.time.*; import java.util.*; import java.util.function.*;
[ "com.google.common", "com.palantir.common", "com.palantir.logsafe", "java.time", "java.util" ]
com.google.common; com.palantir.common; com.palantir.logsafe; java.time; java.util;
535,802
public GradebookUiSettings getUiSettings() { GradebookUiSettings settings = (GradebookUiSettings) Session.get().getAttribute("GBNG_UI_SETTINGS"); if(settings == null) { settings = new GradebookUiSettings(); settings.setCategoriesEnabled(businessService.categoriesAreEnabled()); } return settings; }
GradebookUiSettings function() { GradebookUiSettings settings = (GradebookUiSettings) Session.get().getAttribute(STR); if(settings == null) { settings = new GradebookUiSettings(); settings.setCategoriesEnabled(businessService.categoriesAreEnabled()); } return settings; }
/** * Getter for the GradebookUiSettings. Used to store a few UI related settings for the current session only. * * TODO move this to a helper */
Getter for the GradebookUiSettings. Used to store a few UI related settings for the current session only. TODO move this to a helper
getUiSettings
{ "repo_name": "steveswinsburg/gradebookNG", "path": "tool/src/java/org/sakaiproject/gradebookng/tool/pages/GradebookPage.java", "license": "apache-2.0", "size": 20946 }
[ "org.apache.wicket.Session", "org.sakaiproject.gradebookng.tool.model.GradebookUiSettings" ]
import org.apache.wicket.Session; import org.sakaiproject.gradebookng.tool.model.GradebookUiSettings;
import org.apache.wicket.*; import org.sakaiproject.gradebookng.tool.model.*;
[ "org.apache.wicket", "org.sakaiproject.gradebookng" ]
org.apache.wicket; org.sakaiproject.gradebookng;
1,233,671
public Path getCanonicalPath() throws RepositoryException;
Path function() throws RepositoryException;
/** * Returns the canonical path representation of this path. * <p> * If the path is relative or cannot be normalized a RepositoryException * is thrown. * * @return a canonical path representation of this path. * @throws RepositoryException if this path can not be canonicalized * (e.g. if it is relative). */
Returns the canonical path representation of this path. If the path is relative or cannot be normalized a RepositoryException is thrown
getCanonicalPath
{ "repo_name": "Kast0rTr0y/jackrabbit", "path": "jackrabbit-spi/src/main/java/org/apache/jackrabbit/spi/Path.java", "license": "apache-2.0", "size": 22910 }
[ "javax.jcr.RepositoryException" ]
import javax.jcr.RepositoryException;
import javax.jcr.*;
[ "javax.jcr" ]
javax.jcr;
1,031,901
private static boolean isSecurityException(IOException ioe) { return (ioe instanceof InvalidToken) || (ioe instanceof InvalidEncryptionKeyException) || (ioe instanceof InvalidBlockTokenException) || (ioe instanceof AccessControlException); }
static boolean function(IOException ioe) { return (ioe instanceof InvalidToken) (ioe instanceof InvalidEncryptionKeyException) (ioe instanceof InvalidBlockTokenException) (ioe instanceof AccessControlException); }
/** * Determine if an exception is security-related. * * We need to handle these exceptions differently than other IOExceptions. * They don't indicate a communication problem. Instead, they mean that there * is some action the client needs to take, such as refetching block tokens, * renewing encryption keys, etc. * * @param ioe The exception * @return True only if the exception is security-related. */
Determine if an exception is security-related. We need to handle these exceptions differently than other IOExceptions. They don't indicate a communication problem. Instead, they mean that there is some action the client needs to take, such as refetching block tokens, renewing encryption keys, etc
isSecurityException
{ "repo_name": "Wajihulhassan/Hadoop-2.7.0", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java", "license": "apache-2.0", "size": 30245 }
[ "java.io.IOException", "org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException", "org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException", "org.apache.hadoop.security.AccessControlException", "org.apache.hadoop.security.token.SecretManager" ]
import java.io.IOException; import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.SecretManager;
import java.io.*; import org.apache.hadoop.hdfs.protocol.datatransfer.*; import org.apache.hadoop.hdfs.security.token.block.*; import org.apache.hadoop.security.*; import org.apache.hadoop.security.token.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,779,571
public static TransitiveOptionDetails forOptionsWithDefaults( Iterable<? extends OptionsBase> buildOptions, Map<String, Object> lateBoundDefaults) { ImmutableMap.Builder<String, OptionDetails> map = ImmutableMap.builder(); try { for (OptionsBase options : buildOptions) { for (Field field : options.getClass().getFields()) { if (field.isAnnotationPresent(Option.class)) { Option option = field.getAnnotation(Option.class); if (option.optionUsageRestrictions() == OptionUsageRestrictions.INTERNAL) { // ignore internal options continue; } Object value = field.get(options); if (value == null) { if (lateBoundDefaults.containsKey(option.name())) { value = lateBoundDefaults.get(option.name()); } else if (!option.defaultValue().equals("null")) { // See {@link Option#defaultValue} for an explanation of default "null" strings. value = option.defaultValue(); } } map.put(option.name(), new OptionDetails(options.getClass(), value, option.allowMultiple())); } } } } catch (IllegalAccessException e) { throw new IllegalStateException( "Unexpected illegal access trying to create this configuration's options map: ", e); } return new TransitiveOptionDetails(map.build()); } private static final class OptionDetails implements Serializable { private OptionDetails(Class<? extends OptionsBase> optionsClass, Object value, boolean allowsMultiple) { this.optionsClass = optionsClass; this.value = value; this.allowsMultiple = allowsMultiple; } private final Class<? extends OptionsBase> optionsClass; @Nullable private final Object value; private final boolean allowsMultiple; } private final ImmutableMap<String, OptionDetails> transitiveOptionsMap; private TransitiveOptionDetails(ImmutableMap<String, OptionDetails> transitiveOptionsMap) { this.transitiveOptionsMap = transitiveOptionsMap; }
static TransitiveOptionDetails function( Iterable<? extends OptionsBase> buildOptions, Map<String, Object> lateBoundDefaults) { ImmutableMap.Builder<String, OptionDetails> map = ImmutableMap.builder(); try { for (OptionsBase options : buildOptions) { for (Field field : options.getClass().getFields()) { if (field.isAnnotationPresent(Option.class)) { Option option = field.getAnnotation(Option.class); if (option.optionUsageRestrictions() == OptionUsageRestrictions.INTERNAL) { continue; } Object value = field.get(options); if (value == null) { if (lateBoundDefaults.containsKey(option.name())) { value = lateBoundDefaults.get(option.name()); } else if (!option.defaultValue().equals("null")) { value = option.defaultValue(); } } map.put(option.name(), new OptionDetails(options.getClass(), value, option.allowMultiple())); } } } } catch (IllegalAccessException e) { throw new IllegalStateException( STR, e); } return new TransitiveOptionDetails(map.build()); } private static final class OptionDetails implements Serializable { private OptionDetails(Class<? extends OptionsBase> optionsClass, Object value, boolean allowsMultiple) { this.optionsClass = optionsClass; this.value = value; this.allowsMultiple = allowsMultiple; } private final Class<? extends OptionsBase> optionsClass; @Nullable private final Object value; private final boolean allowsMultiple; } private final ImmutableMap<String, OptionDetails> transitiveOptionsMap; private TransitiveOptionDetails(ImmutableMap<String, OptionDetails> transitiveOptionsMap) { this.transitiveOptionsMap = transitiveOptionsMap; }
/** * Computes and returns the transitive optionName -> "option info" map for the given set of * options sets, using the given map as defaults for options which would otherwise be null. */
Computes and returns the transitive optionName -> "option info" map for the given set of options sets, using the given map as defaults for options which would otherwise be null
forOptionsWithDefaults
{ "repo_name": "juhalindfors/bazel-patches", "path": "src/main/java/com/google/devtools/build/lib/analysis/config/TransitiveOptionDetails.java", "license": "apache-2.0", "size": 5950 }
[ "com.google.common.collect.ImmutableMap", "com.google.devtools.common.options.Option", "com.google.devtools.common.options.OptionsBase", "com.google.devtools.common.options.OptionsParser", "java.io.Serializable", "java.lang.reflect.Field", "java.util.Map", "javax.annotation.Nullable" ]
import com.google.common.collect.ImmutableMap; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import java.io.Serializable; import java.lang.reflect.Field; import java.util.Map; import javax.annotation.Nullable;
import com.google.common.collect.*; import com.google.devtools.common.options.*; import java.io.*; import java.lang.reflect.*; import java.util.*; import javax.annotation.*;
[ "com.google.common", "com.google.devtools", "java.io", "java.lang", "java.util", "javax.annotation" ]
com.google.common; com.google.devtools; java.io; java.lang; java.util; javax.annotation;
2,028,790
ServiceFuture<A> get200ModelA400ValidAsync(final ServiceCallback<A> serviceCallback);
ServiceFuture<A> get200ModelA400ValidAsync(final ServiceCallback<A> serviceCallback);
/** * Send a 200 response with payload {'statusCode': '400'}. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Send a 200 response with payload {'statusCode': '400'}
get200ModelA400ValidAsync
{ "repo_name": "lmazuel/autorest", "path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/http/MultipleResponses.java", "license": "mit", "size": 50475 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,004,000
default Web3jEndpointProducerBuilder gasPrice(BigInteger gasPrice) { doSetProperty("gasPrice", gasPrice); return this; }
default Web3jEndpointProducerBuilder gasPrice(BigInteger gasPrice) { doSetProperty(STR, gasPrice); return this; }
/** * Gas price used for each paid gas. * * The option is a: <code>java.math.BigInteger</code> type. * * Group: producer */
Gas price used for each paid gas. The option is a: <code>java.math.BigInteger</code> type. Group: producer
gasPrice
{ "repo_name": "DariusX/camel", "path": "core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Web3jEndpointBuilderFactory.java", "license": "apache-2.0", "size": 51259 }
[ "java.math.BigInteger" ]
import java.math.BigInteger;
import java.math.*;
[ "java.math" ]
java.math;
2,234,878
@FxThread private @NotNull ColorPicker getColorPicker() { return notNull(colorPicker); }
@NotNull ColorPicker function() { return notNull(colorPicker); }
/** * Get the color picker. * * @return the color picker. */
Get the color picker
getColorPicker
{ "repo_name": "JavaSaBr/jME3-SpaceShift-Editor", "path": "src/main/java/com/ss/editor/ui/control/property/impl/ColorPropertyControl.java", "license": "apache-2.0", "size": 3037 }
[ "com.ss.rlib.common.util.ObjectUtils", "org.jetbrains.annotations.NotNull" ]
import com.ss.rlib.common.util.ObjectUtils; import org.jetbrains.annotations.NotNull;
import com.ss.rlib.common.util.*; import org.jetbrains.annotations.*;
[ "com.ss.rlib", "org.jetbrains.annotations" ]
com.ss.rlib; org.jetbrains.annotations;
1,356,599
public List<IpsecPolicy> vpnClientIpsecPolicies() { return this.vpnClientIpsecPolicies; }
List<IpsecPolicy> function() { return this.vpnClientIpsecPolicies; }
/** * Get vpnClientIpsecPolicies for virtual network gateway P2S client. * * @return the vpnClientIpsecPolicies value */
Get vpnClientIpsecPolicies for virtual network gateway P2S client
vpnClientIpsecPolicies
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_07_01/src/main/java/com/microsoft/azure/management/network/v2019_07_01/VpnClientConfiguration.java", "license": "mit", "size": 9371 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,744,774
public void testSmallRandomStressTest() throws Exception { final int numTrials = 1; final int numArtifacts = 30; final int randomSeed = 42; StressTest test = new StressTest(numArtifacts, numTrials, randomSeed); test.runStressTest(); } private static enum BuildKind { Clean, Incremental, Nop } protected class StressTest { final int numArtifacts; final int numTrials; Random random; Artifact artifacts[]; public StressTest(int numArtifacts, int numTrials, int randomSeed) { this.numTrials = numTrials; this.numArtifacts = numArtifacts; this.random = new Random(randomSeed); }
void function() throws Exception { final int numTrials = 1; final int numArtifacts = 30; final int randomSeed = 42; StressTest test = new StressTest(numArtifacts, numTrials, randomSeed); test.runStressTest(); } private static enum BuildKind { Clean, Incremental, Nop } protected class StressTest { final int numArtifacts; final int numTrials; Random random; Artifact artifacts[]; public StressTest(int numArtifacts, int numTrials, int randomSeed) { this.numTrials = numTrials; this.numArtifacts = numArtifacts; this.random = new Random(randomSeed); }
/** * Test a randomly-generated complex dependency graph. */
Test a randomly-generated complex dependency graph
testSmallRandomStressTest
{ "repo_name": "kamalmarhubi/bazel", "path": "src/test/java/com/google/devtools/build/lib/skyframe/ParallelBuilderTest.java", "license": "apache-2.0", "size": 31396 }
[ "com.google.devtools.build.lib.actions.Artifact", "java.util.Random" ]
import com.google.devtools.build.lib.actions.Artifact; import java.util.Random;
import com.google.devtools.build.lib.actions.*; import java.util.*;
[ "com.google.devtools", "java.util" ]
com.google.devtools; java.util;
237,041
@Override ResultSet getTypeInfo() throws SQLException;
ResultSet getTypeInfo() throws SQLException;
/** * <strong>Drill</strong>: Currently, returns an empty (zero-row) result set. * (Note: Currently, result set might not have the expected columns.) */
Drill: Currently, returns an empty (zero-row) result set. (Note: Currently, result set might not have the expected columns.)
getTypeInfo
{ "repo_name": "cwestin/incubator-drill", "path": "exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillDatabaseMetaData.java", "license": "apache-2.0", "size": 15269 }
[ "java.sql.ResultSet", "java.sql.SQLException" ]
import java.sql.ResultSet; import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
344,890
public void sendError(int sc) throws IOException { doSessionUpdate(); super.sendError( sc ); }
void function(int sc) throws IOException { doSessionUpdate(); super.sendError( sc ); }
/** * Makes sure the session is updated before calling the * superclass <code>sendError()</code> */
Makes sure the session is updated before calling the superclass <code>sendError()</code>
sendError
{ "repo_name": "pentaho/pentaho-engineering-samples", "path": "Samples_for_Extending_Pentaho/Reference Implementations/Security/SAML 2.0/pentaho-saml/src/main/java/org/pentaho/platform/spring/security/saml/responsewrapper/SamlOnRedirectUpdateSessionResponseWrapper.java", "license": "apache-2.0", "size": 7514 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,754,833
public void setConnectivity(SimpleMatrix con){ this.triangles = con; this.numConnections = con.getRows(); }
void function(SimpleMatrix con){ this.triangles = con; this.numConnections = con.getRows(); }
/** * Sets the connectivity and number of connections. * @param con The connectivity. */
Sets the connectivity and number of connections
setConnectivity
{ "repo_name": "PhilippSchlieper/CONRAD", "path": "src/edu/stanford/rsl/conrad/geometry/shapes/mesh/Mesh.java", "license": "gpl-3.0", "size": 7448 }
[ "edu.stanford.rsl.conrad.numerics.SimpleMatrix" ]
import edu.stanford.rsl.conrad.numerics.SimpleMatrix;
import edu.stanford.rsl.conrad.numerics.*;
[ "edu.stanford.rsl" ]
edu.stanford.rsl;
2,338,987
public void rewind() throws IOException { seekTo(beginLocation); }
void function() throws IOException { seekTo(beginLocation); }
/** * Rewind to the first entry in the scanner. The entry returned by the * previous entry() call will be invalid. * * @throws IOException */
Rewind to the first entry in the scanner. The entry returned by the previous entry() call will be invalid
rewind
{ "repo_name": "koichi626/hadoop-gpu", "path": "hadoop-gpu-0.20.1/src/core/org/apache/hadoop/io/file/tfile/TFile.java", "license": "apache-2.0", "size": 74043 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
179,428
private String parseDestinationHeader(HttpServletRequest req, HttpServletResponse resp) throws IOException { String destinationPath = req.getHeader("Destination"); if (destinationPath == null) { resp.sendError(WebdavStatus.SC_BAD_REQUEST); return null; } // Remove url encoding from destination destinationPath = RequestUtil.URLDecode(destinationPath, "UTF8"); int protocolIndex = destinationPath.indexOf("://"); if (protocolIndex >= 0) { // if the Destination URL contains the protocol, we can safely // trim everything upto the first "/" character after "://" int firstSeparator = destinationPath.indexOf("/", protocolIndex + 4); if (firstSeparator < 0) { destinationPath = "/"; } else { destinationPath = destinationPath.substring(firstSeparator); } } else { String hostName = req.getServerName(); if ((hostName != null) && (destinationPath.startsWith(hostName))) { destinationPath = destinationPath.substring(hostName.length()); } int portIndex = destinationPath.indexOf(":"); if (portIndex >= 0) { destinationPath = destinationPath.substring(portIndex); } if (destinationPath.startsWith(":")) { int firstSeparator = destinationPath.indexOf("/"); if (firstSeparator < 0) { destinationPath = "/"; } else { destinationPath = destinationPath.substring(firstSeparator); } } } // Normalize destination path (remove '.' and' ..') destinationPath = normalize(destinationPath); String contextPath = req.getContextPath(); if ((contextPath != null) && (destinationPath.startsWith(contextPath))) { destinationPath = destinationPath.substring(contextPath.length()); } String pathInfo = req.getPathInfo(); if (pathInfo != null) { String servletPath = req.getServletPath(); if ((servletPath != null) && (destinationPath.startsWith(servletPath))) { destinationPath = destinationPath.substring(servletPath.length()); } } return destinationPath; }
String function(HttpServletRequest req, HttpServletResponse resp) throws IOException { String destinationPath = req.getHeader(STR); if (destinationPath == null) { resp.sendError(WebdavStatus.SC_BAD_REQUEST); return null; } destinationPath = RequestUtil.URLDecode(destinationPath, "UTF8"); int protocolIndex = destinationPath.indexOf(STR/STR/STR:STR:STR/STR/"; } else { destinationPath = destinationPath.substring(firstSeparator); } } } destinationPath = normalize(destinationPath); String contextPath = req.getContextPath(); if ((contextPath != null) && (destinationPath.startsWith(contextPath))) { destinationPath = destinationPath.substring(contextPath.length()); } String pathInfo = req.getPathInfo(); if (pathInfo != null) { String servletPath = req.getServletPath(); if ((servletPath != null) && (destinationPath.startsWith(servletPath))) { destinationPath = destinationPath.substring(servletPath.length()); } } return destinationPath; }
/** * Parses and normalizes the destination header. * * @param req * Servlet request * @param resp * Servlet response * @return destinationPath * @throws IOException * if an error occurs while sending response */
Parses and normalizes the destination header
parseDestinationHeader
{ "repo_name": "Armatiek/xslweb", "path": "src/main/java/net/sf/webdav/methods/DoCopy.java", "license": "apache-2.0", "size": 15346 }
[ "java.io.IOException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse", "net.sf.webdav.WebdavStatus", "net.sf.webdav.fromcatalina.RequestUtil" ]
import java.io.IOException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import net.sf.webdav.WebdavStatus; import net.sf.webdav.fromcatalina.RequestUtil;
import java.io.*; import javax.servlet.http.*; import net.sf.webdav.*; import net.sf.webdav.fromcatalina.*;
[ "java.io", "javax.servlet", "net.sf.webdav" ]
java.io; javax.servlet; net.sf.webdav;
2,800,810
@Override public void notifyChanged(Notification notification) { updateChildren(notification); super.notifyChanged(notification); }
void function(Notification notification) { updateChildren(notification); super.notifyChanged(notification); }
/** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>.
notifyChanged
{ "repo_name": "nwnpallewela/devstudio-tooling-esb", "path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/RouterMediatorInputConnectorItemProvider.java", "license": "apache-2.0", "size": 2899 }
[ "org.eclipse.emf.common.notify.Notification" ]
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
640,203
public void close() throws SQLException { if (triggerCallback != null) { triggerCallback.close(); } }
void function() throws SQLException { if (triggerCallback != null) { triggerCallback.close(); } }
/** * Close the trigger. */
Close the trigger
close
{ "repo_name": "titus08/frostwire-desktop", "path": "lib/jars-src/h2-1.3.164/org/h2/schema/TriggerObject.java", "license": "gpl-3.0", "size": 11490 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,719,928
protected void closeMasters() { try { LOG.info("Closing all masters."); mRegistry.close(); LOG.info("Closed all masters."); } catch (IOException e) { throw new RuntimeException(e); } }
void function() { try { LOG.info(STR); mRegistry.close(); LOG.info(STR); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Closes all masters, including block master, fileSystem master and additional masters. */
Closes all masters, including block master, fileSystem master and additional masters
closeMasters
{ "repo_name": "maobaolong/alluxio", "path": "core/server/master/src/main/java/alluxio/master/AlluxioMasterProcess.java", "license": "apache-2.0", "size": 15610 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
315,998
private void verifyCanConnect(DataSource ds) throws SQLException { for (int i = 0; i < USERS.length; i++) { Connection c = ds.getConnection(USERS[i][0], USERS[i][1]); c.close(); } }
void function(DataSource ds) throws SQLException { for (int i = 0; i < USERS.length; i++) { Connection c = ds.getConnection(USERS[i][0], USERS[i][1]); c.close(); } }
/** * Verify that all users specified in {@code USERS} can connect to the * database. * * @param ds a data source for connecting to the database * @throws SQLException if one of the users cannot connect to the database */
Verify that all users specified in USERS can connect to the database
verifyCanConnect
{ "repo_name": "apache/derby", "path": "java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/upgradeTests/Changes10_9.java", "license": "apache-2.0", "size": 40962 }
[ "java.sql.Connection", "java.sql.SQLException", "javax.sql.DataSource" ]
import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource;
import java.sql.*; import javax.sql.*;
[ "java.sql", "javax.sql" ]
java.sql; javax.sql;
2,410,523
List<KeyValueScanner> getScanners( boolean cacheBlocks, boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt ) throws IOException;
List<KeyValueScanner> getScanners( boolean cacheBlocks, boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt ) throws IOException;
/** * Get all scanners with no filtering based on TTL (that happens further down * the line). * @param cacheBlocks * @param isGet * @param usePread * @param isCompaction * @param matcher * @param startRow * @param stopRow * @param readPt * @return all scanners for this store */
Get all scanners with no filtering based on TTL (that happens further down the line)
getScanners
{ "repo_name": "juwi/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java", "license": "apache-2.0", "size": 13653 }
[ "java.io.IOException", "java.util.List" ]
import java.io.IOException; import java.util.List;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,697,647
@Override public void run() { try { DrawableFile drawableFile = DrawableFile.create(getFile(), true, false); getTaskDB().updateFile(drawableFile); } catch (NullPointerException ex) { // This is one of the places where we get many errors if the case is closed during processing. // We don't want to print out a ton of exceptions if this is the case. if (Case.isCaseOpen()) { Logger.getLogger(UpdateFileTask.class.getName()).log(Level.SEVERE, "Error in UpdateFile task"); //NON-NLS } } } } static private class RemoveFileTask extends FileTask { RemoveFileTask(AbstractFile f, DrawableDB taskDB) { super(f, taskDB); }
void function() { try { DrawableFile drawableFile = DrawableFile.create(getFile(), true, false); getTaskDB().updateFile(drawableFile); } catch (NullPointerException ex) { if (Case.isCaseOpen()) { Logger.getLogger(UpdateFileTask.class.getName()).log(Level.SEVERE, STR); } } } } static private class RemoveFileTask extends FileTask { RemoveFileTask(AbstractFile f, DrawableDB taskDB) { super(f, taskDB); }
/** * Update a file in the database */
Update a file in the database
run
{ "repo_name": "narfindustries/autopsy", "path": "ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java", "license": "apache-2.0", "size": 40596 }
[ "java.util.logging.Level", "org.sleuthkit.autopsy.casemodule.Case", "org.sleuthkit.autopsy.coreutils.Logger", "org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB", "org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile", "org.sleuthkit.datamodel.AbstractFile" ]
import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableFile; import org.sleuthkit.datamodel.AbstractFile;
import java.util.logging.*; import org.sleuthkit.autopsy.casemodule.*; import org.sleuthkit.autopsy.coreutils.*; import org.sleuthkit.autopsy.imagegallery.datamodel.*; import org.sleuthkit.datamodel.*;
[ "java.util", "org.sleuthkit.autopsy", "org.sleuthkit.datamodel" ]
java.util; org.sleuthkit.autopsy; org.sleuthkit.datamodel;
2,655,833
public void clearError() { stopped.set( false ); errors.set( 0 ); setFinished( false ); for ( StepMetaDataCombi combi : steps ) { StepInterface step = combi.step; for ( RowSet rowSet : step.getInputRowSets() ) { rowSet.clear(); } step.setStopped( false ); } }
void function() { stopped.set( false ); errors.set( 0 ); setFinished( false ); for ( StepMetaDataCombi combi : steps ) { StepInterface step = combi.step; for ( RowSet rowSet : step.getInputRowSets() ) { rowSet.clear(); } step.setStopped( false ); } }
/** * Clear the error in the transformation, clear all the rows from all the row sets, to make sure the transformation * can continue with other data. This is intended for use when running single threaded. */
Clear the error in the transformation, clear all the rows from all the row sets, to make sure the transformation can continue with other data. This is intended for use when running single threaded
clearError
{ "repo_name": "gretchiemoran/pentaho-kettle", "path": "engine/src/org/pentaho/di/trans/Trans.java", "license": "apache-2.0", "size": 194677 }
[ "org.pentaho.di.core.RowSet", "org.pentaho.di.trans.step.StepInterface", "org.pentaho.di.trans.step.StepMetaDataCombi" ]
import org.pentaho.di.core.RowSet; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMetaDataCombi;
import org.pentaho.di.core.*; import org.pentaho.di.trans.step.*;
[ "org.pentaho.di" ]
org.pentaho.di;
602,877
public Pair<Integer, Integer> getAlterStatus(final byte[] tableName) throws IOException { return getAlterStatus(TableName.valueOf(tableName)); }
Pair<Integer, Integer> function(final byte[] tableName) throws IOException { return getAlterStatus(TableName.valueOf(tableName)); }
/** * Get the status of alter command - indicates how many regions have received * the updated schema Asynchronous operation. * * @param tableName * name of the table to get the status of * @return Pair indicating the number of regions updated Pair.getFirst() is the * regions that are yet to be updated Pair.getSecond() is the total number * of regions of the table * @throws IOException * if a remote or network exception occurs */
Get the status of alter command - indicates how many regions have received the updated schema Asynchronous operation
getAlterStatus
{ "repo_name": "cloud-software-foundation/c5", "path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java", "license": "apache-2.0", "size": 128826 }
[ "java.io.IOException", "org.apache.hadoop.hbase.TableName", "org.apache.hadoop.hbase.util.Pair" ]
import java.io.IOException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.util.Pair;
import java.io.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
364,799
@Override public void removeContext(OwnIdentity ownIdentity, String context) { RemoveContextJob removeContextJob = new RemoveContextJob(ownIdentity, context); if (!updateJobs.contains(removeContextJob)) { logger.log(Level.FINER, "Adding Context Job: " + removeContextJob); try { updateJobs.put(removeContextJob); } catch (InterruptedException ie1) { } } }
void function(OwnIdentity ownIdentity, String context) { RemoveContextJob removeContextJob = new RemoveContextJob(ownIdentity, context); if (!updateJobs.contains(removeContextJob)) { logger.log(Level.FINER, STR + removeContextJob); try { updateJobs.put(removeContextJob); } catch (InterruptedException ie1) { } } }
/** * Removes the given context from the given own identity. * * @param ownIdentity * The own identity to remove the context from * @param context * The context to remove */
Removes the given context from the given own identity
removeContext
{ "repo_name": "ArneBab/Sone", "path": "src/main/java/net/pterodactylus/sone/core/WebOfTrustUpdaterImpl.java", "license": "gpl-3.0", "size": 16838 }
[ "java.util.logging.Level", "net.pterodactylus.sone.freenet.wot.OwnIdentity" ]
import java.util.logging.Level; import net.pterodactylus.sone.freenet.wot.OwnIdentity;
import java.util.logging.*; import net.pterodactylus.sone.freenet.wot.*;
[ "java.util", "net.pterodactylus.sone" ]
java.util; net.pterodactylus.sone;
1,753,385
public static Builder status(Status status) { return new Builder(status); } public static class Builder { private Status status; private Map<String, Object> details; public Builder() { this.status = Status.UNKNOWN; this.details = new LinkedHashMap<String, Object>(); } public Builder(Status status) { Assert.notNull(status, "Status must not be null"); this.status = status; this.details = new LinkedHashMap<String, Object>(); } public Builder(Status status, Map<String, ?> details) { Assert.notNull(status, "Status must not be null"); Assert.notNull(details, "Details must not be null"); this.status = status; this.details = new LinkedHashMap<String, Object>(details); }
static Builder function(Status status) { return new Builder(status); } static class Builder { private Status function; Map<String, Object> details; public Builder() { this.status = Status.UNKNOWN; this.details = new LinkedHashMap<String, Object>(); } public Builder(Status function) { Assert.notNull(status, STR); this.status = status; this.details = new LinkedHashMap<String, Object>(); } Builder(Status function, Map<String, ?> details) { Assert.notNull(status, STR); Assert.notNull(details, STR); this.status = status; this.details = new LinkedHashMap<String, Object>(details); }
/** * Create a new {@link Builder} instance with a specific {@link Status}. * @param status the status * @return a new {@link Builder} instance */
Create a new <code>Builder</code> instance with a specific <code>Status</code>
status
{ "repo_name": "izeye/spring-boot", "path": "spring-boot-actuator/src/main/java/org/springframework/boot/actuate/health/Health.java", "license": "apache-2.0", "size": 8055 }
[ "java.util.LinkedHashMap", "java.util.Map", "org.springframework.util.Assert" ]
import java.util.LinkedHashMap; import java.util.Map; import org.springframework.util.Assert;
import java.util.*; import org.springframework.util.*;
[ "java.util", "org.springframework.util" ]
java.util; org.springframework.util;
1,024,461
protected void buildAxis() { infovis.Visualization vis = (infovis.Visualization) backingVis.getUnderlyingImpl(infovis.Visualization.class); this.scrollPanel = new JScrollPane(new VisualizationPanel(vis)); InteractorFactory.installInteractor(vis); if (vis.getRulerTable() != null) { DefaultAxisVisualization column = new DefaultAxisVisualization( vis, Orientable.ORIENTATION_NORTH); InteractorFactory.installInteractor(column); VisualLabel vl = VisualLabel.get(column); if (vl instanceof DefaultVisualLabel) { DefaultVisualLabel dvl = (DefaultVisualLabel) vl; dvl.setOrientation(Orientable.ORIENTATION_NORTH); dvl.setOutlined(false); } VisualizationPanel vp1 = new VisualizationPanel(column); vp1.setUsingGradient(false); scrollPanel.setColumnHeaderView(vp1); DefaultAxisVisualization row = new DefaultAxisVisualization( vis, Orientable.ORIENTATION_EAST); InteractorFactory.installInteractor(row); vl = VisualLabel.get(row); if (vl instanceof DefaultVisualLabel) { DefaultVisualLabel dvl = (DefaultVisualLabel) vl; dvl.setOrientation(Orientable.ORIENTATION_EAST); dvl.setOutlined(false); } VisualizationPanel vp2 = new VisualizationPanel(row); vp2.setUsingGradient(false); scrollPanel.setRowHeaderView(vp2); } }
void function() { infovis.Visualization vis = (infovis.Visualization) backingVis.getUnderlyingImpl(infovis.Visualization.class); this.scrollPanel = new JScrollPane(new VisualizationPanel(vis)); InteractorFactory.installInteractor(vis); if (vis.getRulerTable() != null) { DefaultAxisVisualization column = new DefaultAxisVisualization( vis, Orientable.ORIENTATION_NORTH); InteractorFactory.installInteractor(column); VisualLabel vl = VisualLabel.get(column); if (vl instanceof DefaultVisualLabel) { DefaultVisualLabel dvl = (DefaultVisualLabel) vl; dvl.setOrientation(Orientable.ORIENTATION_NORTH); dvl.setOutlined(false); } VisualizationPanel vp1 = new VisualizationPanel(column); vp1.setUsingGradient(false); scrollPanel.setColumnHeaderView(vp1); DefaultAxisVisualization row = new DefaultAxisVisualization( vis, Orientable.ORIENTATION_EAST); InteractorFactory.installInteractor(row); vl = VisualLabel.get(row); if (vl instanceof DefaultVisualLabel) { DefaultVisualLabel dvl = (DefaultVisualLabel) vl; dvl.setOrientation(Orientable.ORIENTATION_EAST); dvl.setOutlined(false); } VisualizationPanel vp2 = new VisualizationPanel(row); vp2.setUsingGradient(false); scrollPanel.setRowHeaderView(vp2); } }
/** * Build axis for the scatterplot. */
Build axis for the scatterplot
buildAxis
{ "repo_name": "jdfekete/obvious", "path": "obvious-ivtk/src/main/java/obvious/ivtk/view/util/IvtkScatterPlotView.java", "license": "bsd-3-clause", "size": 3225 }
[ "javax.swing.JScrollPane" ]
import javax.swing.JScrollPane;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
2,444,665
public Path getWorkPath() { if (_workPath != null) return _workPath; else return WorkDir.getLocalWorkDir(_loader); }
Path function() { if (_workPath != null) return _workPath; else return WorkDir.getLocalWorkDir(_loader); }
/** * Gets the work path. */
Gets the work path
getWorkPath
{ "repo_name": "christianchristensen/resin", "path": "modules/kernel/src/com/caucho/loader/enhancer/EnhancerManager.java", "license": "gpl-2.0", "size": 9443 }
[ "com.caucho.java.WorkDir", "com.caucho.vfs.Path" ]
import com.caucho.java.WorkDir; import com.caucho.vfs.Path;
import com.caucho.java.*; import com.caucho.vfs.*;
[ "com.caucho.java", "com.caucho.vfs" ]
com.caucho.java; com.caucho.vfs;
2,563,562
int deleteByExample(ActExample example);
int deleteByExample(ActExample example);
/** * This method was generated by MyBatis Generator. This method corresponds to the database table act * @mbggenerated Tue Apr 01 20:15:29 ICT 2014 */
This method was generated by MyBatis Generator. This method corresponds to the database table act
deleteByExample
{ "repo_name": "punyararj/his-interface-core", "path": "Server/src/com/healthcare/db/client/ActMapper.java", "license": "gpl-3.0", "size": 2358 }
[ "com.healthcare.db.model.ActExample" ]
import com.healthcare.db.model.ActExample;
import com.healthcare.db.model.*;
[ "com.healthcare.db" ]
com.healthcare.db;
73,043
public void renderPageImages(File pdfFile, File outputDir) throws Exception { this.imagifier.generateImages(pdfFile, outputDir); }
void function(File pdfFile, File outputDir) throws Exception { this.imagifier.generateImages(pdfFile, outputDir); }
/** * Render images of the pages of the PDF file * * @param pdfFile * @param outputDir * @throws Exception */
Render images of the pages of the PDF file
renderPageImages
{ "repo_name": "benmccann/lapdftext", "path": "src/main/java/edu/isi/bmkeg/lapdf/controller/LapdfEngine.java", "license": "gpl-3.0", "size": 27974 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,173,735
public TimerUpdate build() { return new TimerUpdate( key, ImmutableSet.copyOf(completedTimers), ImmutableSet.copyOf(setTimers), ImmutableSet.copyOf(deletedTimers)); } } private TimerUpdate( StructuralKey<?> key, Iterable<? extends TimerData> completedTimers, Iterable<? extends TimerData> setTimers, Iterable<? extends TimerData> deletedTimers) { this.key = key; this.completedTimers = completedTimers; this.setTimers = setTimers; this.deletedTimers = deletedTimers; }
TimerUpdate function() { return new TimerUpdate( key, ImmutableSet.copyOf(completedTimers), ImmutableSet.copyOf(setTimers), ImmutableSet.copyOf(deletedTimers)); } } private TimerUpdate( StructuralKey<?> key, Iterable<? extends TimerData> completedTimers, Iterable<? extends TimerData> setTimers, Iterable<? extends TimerData> deletedTimers) { this.key = key; this.completedTimers = completedTimers; this.setTimers = setTimers; this.deletedTimers = deletedTimers; }
/** * Returns a new {@link TimerUpdate} with the most recently set completedTimers, setTimers, * and deletedTimers. */
Returns a new <code>TimerUpdate</code> with the most recently set completedTimers, setTimers, and deletedTimers
build
{ "repo_name": "tweise/beam", "path": "runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java", "license": "apache-2.0", "size": 55302 }
[ "com.google.common.collect.ImmutableSet", "org.apache.beam.sdk.util.TimerInternals" ]
import com.google.common.collect.ImmutableSet; import org.apache.beam.sdk.util.TimerInternals;
import com.google.common.collect.*; import org.apache.beam.sdk.util.*;
[ "com.google.common", "org.apache.beam" ]
com.google.common; org.apache.beam;
2,283,712
public String getFirst(String key) { key = canonicalizeKey(key); List<String> l = mHeaders.get(key); if (l == null) { return null; } return l.get(0); }
String function(String key) { key = canonicalizeKey(key); List<String> l = mHeaders.get(key); if (l == null) { return null; } return l.get(0); }
/** * Convenience method for returning the first header value or null if no * mapping exists. */
Convenience method for returning the first header value or null if no mapping exists
getFirst
{ "repo_name": "kevinko/mahttp", "path": "src/main/com/faveset/mahttpd/Headers.java", "license": "bsd-3-clause", "size": 7105 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
373,354
@Override public boolean canPlaceBlockAt(World worldIn, BlockPos pos) { return super.canPlaceBlockAt(worldIn, pos) && this.canBlockStay(worldIn, pos, worldIn.getBlockState(pos)); }
boolean function(World worldIn, BlockPos pos) { return super.canPlaceBlockAt(worldIn, pos) && this.canBlockStay(worldIn, pos, worldIn.getBlockState(pos)); }
/** * Checks to see if its valid to put this block at the specified coordinates. Args: world, x, y, z */
Checks to see if its valid to put this block at the specified coordinates. Args: world, x, y, z
canPlaceBlockAt
{ "repo_name": "Stormister/Rediscovered-Mod-1.8", "path": "src/main/java/com/stormister/rediscovered/BlockEmptyRoseBush.java", "license": "gpl-3.0", "size": 8556 }
[ "net.minecraft.util.BlockPos", "net.minecraft.world.World" ]
import net.minecraft.util.BlockPos; import net.minecraft.world.World;
import net.minecraft.util.*; import net.minecraft.world.*;
[ "net.minecraft.util", "net.minecraft.world" ]
net.minecraft.util; net.minecraft.world;
1,981,824
private String getAccountNameWithoutDomain(String fullAccountName) { StringTokenizer tokenizer = new StringTokenizer(fullAccountName, "."); return tokenizer.nextToken(); }
String function(String fullAccountName) { StringTokenizer tokenizer = new StringTokenizer(fullAccountName, "."); return tokenizer.nextToken(); }
/** * Helper method that returns the Storage account name without * the domain name suffix. * @param fullAccountName Storage account name with domain name suffix * @return String */
Helper method that returns the Storage account name without the domain name suffix
getAccountNameWithoutDomain
{ "repo_name": "dennishuo/hadoop", "path": "hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/LocalSASKeyGeneratorImpl.java", "license": "apache-2.0", "size": 11024 }
[ "java.util.StringTokenizer" ]
import java.util.StringTokenizer;
import java.util.*;
[ "java.util" ]
java.util;
1,936,899
public boolean createBond(BluetoothDevice device) { boolean result = device.createBond(); Timber.d("Creating bond with: %s/%s/%b", device.getName(), device.getAddress(), result); return result; }
boolean function(BluetoothDevice device) { boolean result = device.createBond(); Timber.d(STR, device.getName(), device.getAddress(), result); return result; }
/** * Pair with the specific device. */
Pair with the specific device
createBond
{ "repo_name": "zugaldia/android-robocar", "path": "robocar/libsoftware/src/main/java/com/zugaldia/robocar/software/controller/nes30/Nes30Connection.java", "license": "bsd-2-clause", "size": 4805 }
[ "android.bluetooth.BluetoothDevice" ]
import android.bluetooth.BluetoothDevice;
import android.bluetooth.*;
[ "android.bluetooth" ]
android.bluetooth;
904,080
public void setL1Entry(final ReadOnlyEntry e) { entry = e; }
void function(final ReadOnlyEntry e) { entry = e; }
/** * Sets the value of the entry field for this object. * * @param e The entry to set. */
Sets the value of the entry field for this object
setL1Entry
{ "repo_name": "UnboundID/ldapsdk", "path": "tests/unit/src/com/unboundid/ldap/sdk/persist/TestInheritanceL1.java", "license": "gpl-2.0", "size": 3673 }
[ "com.unboundid.ldap.sdk.ReadOnlyEntry" ]
import com.unboundid.ldap.sdk.ReadOnlyEntry;
import com.unboundid.ldap.sdk.*;
[ "com.unboundid.ldap" ]
com.unboundid.ldap;
2,854,125
private void click_select_card_player1(MouseEvent e) { if(model.get_game().get_current_player().equals(model.get_game().get_player1())) { int count = 0; List<List<PlayCard>> t1 = model.get_game().get_player1().get_player_table(); int index = get_key(images_table_player1, e.getSource()); System.out.println("PlayCard Index: " + index); for(int i = 0; i < t1.size(); i++) { for(int j = 0; j < t1.get(i).size(); j++) { if(count == index) { images_table_player1.get(count).setBorder(new LineBorder(Color.RED, 1)); model.set_selected_playcard(index + 1); model.set_selected_playcard_district(i + 1); } else { images_table_player1.get(count).setBorder(new LineBorder(new Color(214,217,223), 1)); } count++; } } } } // </editor-fold> // <editor-fold defaultstate="collapsed" desc="Player 2">
void function(MouseEvent e) { if(model.get_game().get_current_player().equals(model.get_game().get_player1())) { int count = 0; List<List<PlayCard>> t1 = model.get_game().get_player1().get_player_table(); int index = get_key(images_table_player1, e.getSource()); System.out.println(STR + index); for(int i = 0; i < t1.size(); i++) { for(int j = 0; j < t1.get(i).size(); j++) { if(count == index) { images_table_player1.get(count).setBorder(new LineBorder(Color.RED, 1)); model.set_selected_playcard(index + 1); model.set_selected_playcard_district(i + 1); } else { images_table_player1.get(count).setBorder(new LineBorder(new Color(214,217,223), 1)); } count++; } } } }
/** * This function is called when a play card is clicked. * When clicked it is set a variable in the model with the card clicked. */
This function is called when a play card is clicked. When clicked it is set a variable in the model with the card clicked
click_select_card_player1
{ "repo_name": "Tenza/Decktet-Magnate", "path": "Decktet - Magnate/src/magnate/ui/graphics/Table.java", "license": "gpl-3.0", "size": 29302 }
[ "java.awt.Color", "java.awt.event.MouseEvent", "java.util.List", "javax.swing.border.LineBorder" ]
import java.awt.Color; import java.awt.event.MouseEvent; import java.util.List; import javax.swing.border.LineBorder;
import java.awt.*; import java.awt.event.*; import java.util.*; import javax.swing.border.*;
[ "java.awt", "java.util", "javax.swing" ]
java.awt; java.util; javax.swing;
1,509,238
public static Message constructMessage(byte[] buf, int offset) throws MalformedMessageException { // depending on the type of message, call a different constructor MessageType type = MessageType.getMessageType(TypeHelper.byteArrayToInt(buf, offset + Field.MSG_TYPE.getOffset())); switch (type) { case REGISTRATION_REQUEST: return new RegistrationRequestMessage(buf, offset); case REGISTRATION_REPLY: return new RegistrationReplyMessage(buf, offset); case DATA_REQUEST: return new DataRequestMessage(buf, offset); case DATA_REPLY: return new DataReplyMessage(buf, offset); case ERR_: return new ErrorMessage(buf, offset); case DEBUG_: return new DebugMessage(buf, offset); case HEARTBEAT_CLIENT: return new HeartbeatClientMessage(buf, offset); case HEARTBEAT_ROUTER: return new HeartbeatRouterMessage(buf, offset); case RELOAD_CONFIGURATION: return new ReloadConfigurationMessage(buf, offset); default: throw new MalformedMessageException("Unknown message type: " + type); } }
static Message function(byte[] buf, int offset) throws MalformedMessageException { MessageType type = MessageType.getMessageType(TypeHelper.byteArrayToInt(buf, offset + Field.MSG_TYPE.getOffset())); switch (type) { case REGISTRATION_REQUEST: return new RegistrationRequestMessage(buf, offset); case REGISTRATION_REPLY: return new RegistrationReplyMessage(buf, offset); case DATA_REQUEST: return new DataRequestMessage(buf, offset); case DATA_REPLY: return new DataReplyMessage(buf, offset); case ERR_: return new ErrorMessage(buf, offset); case DEBUG_: return new DebugMessage(buf, offset); case HEARTBEAT_CLIENT: return new HeartbeatClientMessage(buf, offset); case HEARTBEAT_ROUTER: return new HeartbeatRouterMessage(buf, offset); case RELOAD_CONFIGURATION: return new ReloadConfigurationMessage(buf, offset); default: throw new MalformedMessageException(STR + type); } }
/** Construct a message from a byte array * * @param buf * a buffer which contains a message * @param offset * the offset at which the message begins * @throws MalformedMessageException * If the buffer does not contain a valid message */
Construct a message from a byte array
constructMessage
{ "repo_name": "moliva/proactive", "path": "src/Extensions/org/objectweb/proactive/extensions/pamr/protocol/message/Message.java", "license": "agpl-3.0", "size": 17963 }
[ "org.objectweb.proactive.extensions.pamr.exceptions.MalformedMessageException", "org.objectweb.proactive.extensions.pamr.protocol.TypeHelper" ]
import org.objectweb.proactive.extensions.pamr.exceptions.MalformedMessageException; import org.objectweb.proactive.extensions.pamr.protocol.TypeHelper;
import org.objectweb.proactive.extensions.pamr.exceptions.*; import org.objectweb.proactive.extensions.pamr.protocol.*;
[ "org.objectweb.proactive" ]
org.objectweb.proactive;
2,014,074
super.setUp(); final PipedOutputStream outputStream = new PipedOutputStream(); final InputStream inputStream = new BigBufferPipedInputStream(outputStream); final FanOutStreamSender fanOutStreamSender = new FanOutStreamSender(3); fanOutStreamSender.add(outputStream); m_sender = fanOutStreamSender; m_receiver = new StreamReceiver(inputStream); }
super.setUp(); final PipedOutputStream outputStream = new PipedOutputStream(); final InputStream inputStream = new BigBufferPipedInputStream(outputStream); final FanOutStreamSender fanOutStreamSender = new FanOutStreamSender(3); fanOutStreamSender.add(outputStream); m_sender = fanOutStreamSender; m_receiver = new StreamReceiver(inputStream); }
/** * Sigh, JUnit treats setUp and tearDown as non-virtual methods - * must define in concrete test case class. */
Sigh, JUnit treats setUp and tearDown as non-virtual methods - must define in concrete test case class
setUp
{ "repo_name": "slantview/DrupalLoadTest", "path": "lib/grinder/grinder-core/src/test/java/net/grinder/communication/TestFanOutStreamSenderAndStreamReceiver.java", "license": "gpl-2.0", "size": 2248 }
[ "java.io.InputStream", "java.io.PipedOutputStream" ]
import java.io.InputStream; import java.io.PipedOutputStream;
import java.io.*;
[ "java.io" ]
java.io;
1,665,408
public static void highlight(WiQueryResourceManager manager) { manager.addJavaScriptResource(CoreEffectJavaScriptResourceReference.get()); manager.addJavaScriptResource(HighlightEffectJavaScriptResourceReference.get()); }
static void function(WiQueryResourceManager manager) { manager.addJavaScriptResource(CoreEffectJavaScriptResourceReference.get()); manager.addJavaScriptResource(HighlightEffectJavaScriptResourceReference.get()); }
/** * Method to load the highlight effect * @param manager */
Method to load the highlight effect
highlight
{ "repo_name": "magomi/wiquery", "path": "src/main/java/org/odlabs/wiquery/ui/effects/EffectsHelper.java", "license": "mit", "size": 4853 }
[ "org.odlabs.wiquery.core.commons.WiQueryResourceManager" ]
import org.odlabs.wiquery.core.commons.WiQueryResourceManager;
import org.odlabs.wiquery.core.commons.*;
[ "org.odlabs.wiquery" ]
org.odlabs.wiquery;
2,463,646
@Test public void testOvertakingCheckpointBarriers() throws Exception { try (StreamTaskMailboxTestHarness<String> testHarness = new StreamTaskMailboxTestHarnessBuilder<>( MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO, 2) .addInput(BasicTypeInfo.INT_TYPE_INFO, 2) .addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2) .setupOutputForSingletonOperatorChain( new MapToStringMultipleInputOperatorFactory(3)) .build()) { ArrayDeque<Object> expectedOutput = new ArrayDeque<>(); long initialTime = 0L; testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); // These elements should be forwarded, since we did not yet receive a checkpoint barrier // on that input, only add to same input, otherwise we would not know the ordering // of the output since the Task might read the inputs in any order testHarness.processElement(new StreamRecord<>("Witam-0-1", initialTime), 0, 1); testHarness.processElement(new StreamRecord<>(42, initialTime), 1, 1); testHarness.processElement(new StreamRecord<>(1.0d, initialTime), 2, 1); expectedOutput.add(new StreamRecord<>("Witam-0-1", initialTime)); expectedOutput.add(new StreamRecord<>("42", initialTime)); expectedOutput.add(new StreamRecord<>("1.0", initialTime)); // we should not yet see the barrier, only the two elements from non-blocked input assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); // Now give a later barrier to all inputs, this should unblock the first channel testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 0); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); expectedOutput.add(new CancelCheckpointMarker(0)); expectedOutput.add( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation())); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); // Then give the earlier barrier, these should be ignored testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 0); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 1); testHarness.waitForTaskCompletion(); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); } }
void function() throws Exception { try (StreamTaskMailboxTestHarness<String> testHarness = new StreamTaskMailboxTestHarnessBuilder<>( MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO, 2) .addInput(BasicTypeInfo.INT_TYPE_INFO, 2) .addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2) .setupOutputForSingletonOperatorChain( new MapToStringMultipleInputOperatorFactory(3)) .build()) { ArrayDeque<Object> expectedOutput = new ArrayDeque<>(); long initialTime = 0L; testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); testHarness.processElement(new StreamRecord<>(STR, initialTime), 0, 1); testHarness.processElement(new StreamRecord<>(42, initialTime), 1, 1); testHarness.processElement(new StreamRecord<>(1.0d, initialTime), 2, 1); expectedOutput.add(new StreamRecord<>(STR, initialTime)); expectedOutput.add(new StreamRecord<>("42", initialTime)); expectedOutput.add(new StreamRecord<>("1.0", initialTime)); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 0); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 1); testHarness.processEvent( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); expectedOutput.add(new CancelCheckpointMarker(0)); expectedOutput.add( new CheckpointBarrier( 1, 1, CheckpointOptions.forCheckpointWithDefaultLocation())); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 0); testHarness.processEvent( new CheckpointBarrier( 0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 2, 1); testHarness.waitForTaskCompletion(); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); } }
/** * This test verifies that checkpoint barriers and barrier buffers work correctly with * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e. * some inputs receive barriers from an earlier checkpoint, thereby blocking, then all inputs * receive barriers from a later checkpoint. */
This test verifies that checkpoint barriers and barrier buffers work correctly with concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e. some inputs receive barriers from an earlier checkpoint, thereby blocking, then all inputs receive barriers from a later checkpoint
testOvertakingCheckpointBarriers
{ "repo_name": "tony810430/flink", "path": "flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/MultipleInputStreamTaskTest.java", "license": "apache-2.0", "size": 73814 }
[ "java.util.ArrayDeque", "org.apache.flink.api.common.typeinfo.BasicTypeInfo", "org.apache.flink.runtime.checkpoint.CheckpointOptions", "org.apache.flink.runtime.io.network.api.CancelCheckpointMarker", "org.apache.flink.runtime.io.network.api.CheckpointBarrier", "org.apache.flink.streaming.runtime.streamrecord.StreamRecord", "org.hamcrest.MatcherAssert", "org.hamcrest.Matchers" ]
import java.util.ArrayDeque; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.io.network.api.CancelCheckpointMarker; import org.apache.flink.runtime.io.network.api.CheckpointBarrier; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers;
import java.util.*; import org.apache.flink.api.common.typeinfo.*; import org.apache.flink.runtime.checkpoint.*; import org.apache.flink.runtime.io.network.api.*; import org.apache.flink.streaming.runtime.streamrecord.*; import org.hamcrest.*;
[ "java.util", "org.apache.flink", "org.hamcrest" ]
java.util; org.apache.flink; org.hamcrest;
952,285
@Override protected boolean isValidInput(Object element) { if (element instanceof IType) { IType type= (IType)element; return type.isBinary() || type.getDeclaringType() == null; } return false; }
boolean function(Object element) { if (element instanceof IType) { IType type= (IType)element; return type.isBinary() type.getDeclaringType() == null; } return false; }
/** * Answers if the given <code>element</code> is a valid * input for this part. * * @param element the object to test * @return <true> if the given element is a valid input */
Answers if the given <code>element</code> is a valid input for this part
isValidInput
{ "repo_name": "elucash/eclipse-oxygen", "path": "org.eclipse.jdt.ui/src/org/eclipse/jdt/internal/ui/browsing/MembersView.java", "license": "epl-1.0", "size": 11568 }
[ "org.eclipse.jdt.core.IType" ]
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.*;
[ "org.eclipse.jdt" ]
org.eclipse.jdt;
2,220,902
public static List<RexNode> transformExprs(RexBuilder builder, List<RexNode> exprs, Map<Integer, Integer> corrMap) { List<RexNode> outputExprs = new ArrayList<>(); DrillRelOptUtil.RexFieldsTransformer transformer = new DrillRelOptUtil.RexFieldsTransformer(builder, corrMap); for (RexNode expr : exprs) { outputExprs.add(transformer.go(expr)); } return outputExprs; }
static List<RexNode> function(RexBuilder builder, List<RexNode> exprs, Map<Integer, Integer> corrMap) { List<RexNode> outputExprs = new ArrayList<>(); DrillRelOptUtil.RexFieldsTransformer transformer = new DrillRelOptUtil.RexFieldsTransformer(builder, corrMap); for (RexNode expr : exprs) { outputExprs.add(transformer.go(expr)); } return outputExprs; }
/** * Given a list of rexnodes it transforms the rexnodes by changing the expr to use new index mapped to the old index. * @param builder : RexBuilder from the planner. * @param exprs: RexNodes to be transformed. * @param corrMap: Mapping between old index to new index. * @return */
Given a list of rexnodes it transforms the rexnodes by changing the expr to use new index mapped to the old index
transformExprs
{ "repo_name": "sohami/drill", "path": "exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillRelOptUtil.java", "license": "apache-2.0", "size": 14727 }
[ "java.util.ArrayList", "java.util.List", "java.util.Map", "org.apache.calcite.rex.RexBuilder", "org.apache.calcite.rex.RexNode" ]
import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode;
import java.util.*; import org.apache.calcite.rex.*;
[ "java.util", "org.apache.calcite" ]
java.util; org.apache.calcite;
759,290
public MibType getElementType() { return base; }
MibType function() { return base; }
/** * Returns the sequence element type. This is the type of each * individual element in the sequence. * * @return the sequence element type * * @since 2.2 */
Returns the sequence element type. This is the type of each individual element in the sequence
getElementType
{ "repo_name": "tmoskun/JSNMPWalker", "path": "lib/mibble-2.9.3/src/java/net/percederberg/mibble/type/SequenceOfType.java", "license": "gpl-3.0", "size": 6746 }
[ "net.percederberg.mibble.MibType" ]
import net.percederberg.mibble.MibType;
import net.percederberg.mibble.*;
[ "net.percederberg.mibble" ]
net.percederberg.mibble;
1,613,689
public void addGlyphs(Glyph[] glyphs, boolean repaint) { for (Glyph glyph : glyphs) { glyph.initCams(cm.cameraList.length); visualEnts.add(glyph); } addGlyphsToDrawingList(glyphs); if (repaint) { VirtualSpaceManager.INSTANCE.repaint(); } }
void function(Glyph[] glyphs, boolean repaint) { for (Glyph glyph : glyphs) { glyph.initCams(cm.cameraList.length); visualEnts.add(glyph); } addGlyphsToDrawingList(glyphs); if (repaint) { VirtualSpaceManager.INSTANCE.repaint(); } }
/** * Add a list of glyphs to this virtual space. glyphs with lower indices in * the list will be added first in the virtual space and will consequently * be lower in the drawing stack. * * @param repaint pass false if views should not be repainted as a * consequence of this addition (default is true). */
Add a list of glyphs to this virtual space. glyphs with lower indices in the list will be added first in the virtual space and will consequently be lower in the drawing stack
addGlyphs
{ "repo_name": "sharwell/zgrnbviewer", "path": "org-tvl-netbeans-zgrviewer/src/fr/inria/zvtm/engine/VirtualSpace.java", "license": "lgpl-3.0", "size": 25742 }
[ "fr.inria.zvtm.glyphs.Glyph" ]
import fr.inria.zvtm.glyphs.Glyph;
import fr.inria.zvtm.glyphs.*;
[ "fr.inria.zvtm" ]
fr.inria.zvtm;
458,998
public File getBatchApplicationHome() { return getBatchApplicationHome(getEnvironmentVariables()); }
File function() { return getBatchApplicationHome(getEnvironmentVariables()); }
/** * Returns the batch application installation base path. * @return the batch application installation base path */
Returns the batch application installation base path
getBatchApplicationHome
{ "repo_name": "ashigeru/asakusafw-compiler", "path": "compiler-project/tester/src/main/java/com/asakusafw/lang/compiler/tester/TesterContext.java", "license": "apache-2.0", "size": 5031 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
439,703
public static String decode(String name) { try { return URLDecoder.decode(name, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("UTF-8 encoding is not present on the system!", e); } }
static String function(String name) { try { return URLDecoder.decode(name, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { throw new RuntimeException(STR, e); } }
/** * Decode name encoded with {@link #encode(String, boolean)}. * * @param name the name to decode * @return the decoded name */
Decode name encoded with <code>#encode(String, boolean)</code>
decode
{ "repo_name": "xwiki/xwiki-platform", "path": "xwiki-platform-core/xwiki-platform-store/xwiki-platform-store-filesystem/src/main/java/org/xwiki/store/internal/FileSystemStoreUtils.java", "license": "lgpl-2.1", "size": 5201 }
[ "java.io.UnsupportedEncodingException", "java.net.URLDecoder", "java.nio.charset.StandardCharsets" ]
import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets;
import java.io.*; import java.net.*; import java.nio.charset.*;
[ "java.io", "java.net", "java.nio" ]
java.io; java.net; java.nio;
574,074
public static Object getOneReportById(Connection conn, Long encounterId, Long formId, Class clazz) throws IOException, ServletException, SQLException, ObjectNotFoundException { Object item; Map queries = QueryLoader.instance().load("/" + Constants.SQL_DEMO_PROPERTIES); String sqlName = "SQL_RETRIEVE_REPORT_ID" + formId; String sql = (String) queries.get(sqlName); ArrayList values = new ArrayList(); values.add(encounterId); try { item = DatabaseUtils.getZEPRSBean(conn, clazz, sql, values); } catch (ObjectNotFoundException e) { log.error("SQL: " + sql + " encounterId: " + encounterId); throw new ObjectNotFoundException(e); } return item; }
static Object function(Connection conn, Long encounterId, Long formId, Class clazz) throws IOException, ServletException, SQLException, ObjectNotFoundException { Object item; Map queries = QueryLoader.instance().load("/" + Constants.SQL_DEMO_PROPERTIES); String sqlName = STR + formId; String sql = (String) queries.get(sqlName); ArrayList values = new ArrayList(); values.add(encounterId); try { item = DatabaseUtils.getZEPRSBean(conn, clazz, sql, values); } catch (ObjectNotFoundException e) { log.error(STR + sql + STR + encounterId); throw new ObjectNotFoundException(e); } return item; }
/** * Used mostly for reports - can access fields by user-friendly starschemaname, rather than fieldX * @param conn * @param encounterId * @param formId * @param clazz * @return * @throws IOException * @throws ServletException * @throws SQLException * @throws ObjectNotFoundException */
Used mostly for reports - can access fields by user-friendly starschemaname, rather than fieldX
getOneReportById
{ "repo_name": "chrisekelley/zeprs", "path": "src/zeprs/org/cidrz/webapp/dynasite/dao/EncountersDAO.java", "license": "apache-2.0", "size": 77848 }
[ "java.io.IOException", "java.sql.Connection", "java.sql.SQLException", "java.util.ArrayList", "java.util.Map", "javax.servlet.ServletException", "org.apache.commons.dbutils.QueryLoader", "org.cidrz.webapp.dynasite.Constants", "org.cidrz.webapp.dynasite.exception.ObjectNotFoundException", "org.cidrz.webapp.dynasite.utils.DatabaseUtils" ]
import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Map; import javax.servlet.ServletException; import org.apache.commons.dbutils.QueryLoader; import org.cidrz.webapp.dynasite.Constants; import org.cidrz.webapp.dynasite.exception.ObjectNotFoundException; import org.cidrz.webapp.dynasite.utils.DatabaseUtils;
import java.io.*; import java.sql.*; import java.util.*; import javax.servlet.*; import org.apache.commons.dbutils.*; import org.cidrz.webapp.dynasite.*; import org.cidrz.webapp.dynasite.exception.*; import org.cidrz.webapp.dynasite.utils.*;
[ "java.io", "java.sql", "java.util", "javax.servlet", "org.apache.commons", "org.cidrz.webapp" ]
java.io; java.sql; java.util; javax.servlet; org.apache.commons; org.cidrz.webapp;
2,115,739
public synchronized Http2HeadersStreamSinkChannel createStream(HeaderMap requestHeaders) throws IOException { if (!isClient()) { throw UndertowMessages.MESSAGES.headersStreamCanOnlyBeCreatedByClient(); } if (!isOpen()) { throw UndertowMessages.MESSAGES.channelIsClosed(); } int streamId = streamIdCounter; streamIdCounter += 2; Http2HeadersStreamSinkChannel spdySynStreamStreamSinkChannel = new Http2HeadersStreamSinkChannel(this, streamId, requestHeaders); outgoingStreams.put(streamId, spdySynStreamStreamSinkChannel); return spdySynStreamStreamSinkChannel; }
synchronized Http2HeadersStreamSinkChannel function(HeaderMap requestHeaders) throws IOException { if (!isClient()) { throw UndertowMessages.MESSAGES.headersStreamCanOnlyBeCreatedByClient(); } if (!isOpen()) { throw UndertowMessages.MESSAGES.channelIsClosed(); } int streamId = streamIdCounter; streamIdCounter += 2; Http2HeadersStreamSinkChannel spdySynStreamStreamSinkChannel = new Http2HeadersStreamSinkChannel(this, streamId, requestHeaders); outgoingStreams.put(streamId, spdySynStreamStreamSinkChannel); return spdySynStreamStreamSinkChannel; }
/** * Creates a strema using a HEADERS frame * * @param requestHeaders * @return * @throws IOException */
Creates a strema using a HEADERS frame
createStream
{ "repo_name": "wildfly-security-incubator/undertow", "path": "core/src/main/java/io/undertow/protocols/http2/Http2Channel.java", "license": "apache-2.0", "size": 35715 }
[ "io.undertow.UndertowMessages", "io.undertow.util.HeaderMap", "java.io.IOException" ]
import io.undertow.UndertowMessages; import io.undertow.util.HeaderMap; import java.io.IOException;
import io.undertow.*; import io.undertow.util.*; import java.io.*;
[ "io.undertow", "io.undertow.util", "java.io" ]
io.undertow; io.undertow.util; java.io;
2,530,837
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); try { String[] musicTypes = request.getParameterValues("musicType"); if (musicTypes != null) { Set<String> musicTypesSet = new HashSet<String>(Arrays.asList(musicTypes)); Connection connection = dataSource.getConnection(); String selectSQL = "select * from VOTES"; PreparedStatement selectStatement = connection.prepareStatement(selectSQL); ResultSet musicTypeRecords = selectStatement.executeQuery(); while (musicTypeRecords.next()) { if (musicTypesSet.contains(musicTypeRecords.getString("MUSICTYPE"))) { int numVotes = musicTypeRecords.getInt("NUMVOTES"); numVotes++; String updateVoteSQL = "update VOTES set NUMVOTES = ? where MUSICTYPE = ?"; PreparedStatement updateStatement = connection.prepareStatement(updateVoteSQL); updateStatement.setInt(1, numVotes); updateStatement.setString(2, musicTypeRecords.getString("MUSICTYPE")); updateStatement.executeUpdate(); updateStatement.close(); } } Integer curVotes = (Integer) request.getSession().getAttribute("sessionVotes"); curVotes = curVotes == null ? 1 : curVotes + 1; request.getSession().setAttribute("sessionVotes", curVotes); musicTypeRecords.close(); selectStatement.close(); connection.close(); } request.getRequestDispatcher("DisplayVotesServlet").forward(request, response); } catch (Exception e) { out.println("Had a problem getting the music types from the database " + e.getMessage()); } finally { out.close(); } }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(STR); PrintWriter out = response.getWriter(); try { String[] musicTypes = request.getParameterValues(STR); if (musicTypes != null) { Set<String> musicTypesSet = new HashSet<String>(Arrays.asList(musicTypes)); Connection connection = dataSource.getConnection(); String selectSQL = STR; PreparedStatement selectStatement = connection.prepareStatement(selectSQL); ResultSet musicTypeRecords = selectStatement.executeQuery(); while (musicTypeRecords.next()) { if (musicTypesSet.contains(musicTypeRecords.getString(STR))) { int numVotes = musicTypeRecords.getInt(STR); numVotes++; String updateVoteSQL = STR; PreparedStatement updateStatement = connection.prepareStatement(updateVoteSQL); updateStatement.setInt(1, numVotes); updateStatement.setString(2, musicTypeRecords.getString(STR)); updateStatement.executeUpdate(); updateStatement.close(); } } Integer curVotes = (Integer) request.getSession().getAttribute(STR); curVotes = curVotes == null ? 1 : curVotes + 1; request.getSession().setAttribute(STR, curVotes); musicTypeRecords.close(); selectStatement.close(); connection.close(); } request.getRequestDispatcher(STR).forward(request, response); } catch (Exception e) { out.println(STR + e.getMessage()); } finally { out.close(); } }
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */
Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods
processRequest
{ "repo_name": "ccsu-cs416F15/CS416ClassDemos", "path": "HW2Soln/src/java/edu/ccsu/hw2soln/VoteServlet.java", "license": "mit", "size": 4710 }
[ "java.io.IOException", "java.io.PrintWriter", "java.sql.Connection", "java.sql.PreparedStatement", "java.sql.ResultSet", "java.util.Arrays", "java.util.HashSet", "java.util.Set", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import java.io.PrintWriter; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import java.sql.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "java.sql", "java.util", "javax.servlet" ]
java.io; java.sql; java.util; javax.servlet;
294,683
public void onEvent(ShareEvent e) { PhotoDB photoDB = (PhotoDB) e.getObject(); com.nasa.pic.utils.Utils.share(photoDB, e.getIntent()); }
void function(ShareEvent e) { PhotoDB photoDB = (PhotoDB) e.getObject(); com.nasa.pic.utils.Utils.share(photoDB, e.getIntent()); }
/** * Handler for {@link com.nasa.pic.events.ShareEvent}. * * @param e Event {@link com.nasa.pic.events.ShareEvent}. */
Handler for <code>com.nasa.pic.events.ShareEvent</code>
onEvent
{ "repo_name": "XinyueZ/nasapic", "path": "app/src/main/java/com/nasa/pic/app/activities/AppRestfulActivity.java", "license": "mit", "size": 2464 }
[ "com.nasa.pic.ds.PhotoDB", "com.nasa.pic.events.ShareEvent", "com.nasa.pic.utils.Utils" ]
import com.nasa.pic.ds.PhotoDB; import com.nasa.pic.events.ShareEvent; import com.nasa.pic.utils.Utils;
import com.nasa.pic.ds.*; import com.nasa.pic.events.*; import com.nasa.pic.utils.*;
[ "com.nasa.pic" ]
com.nasa.pic;
256,576
Node readVariable(String variableName) throws FaultException;
Node readVariable(String variableName) throws FaultException;
/** * Read the value of a BPEL variable. * * @param variableName * variable to read * @return the value of the variable, wrapped in a <code>Node</code> */
Read the value of a BPEL variable
readVariable
{ "repo_name": "TheRingbearer/HAWKS", "path": "ode/bpel-runtime/src/main/java/org/apache/ode/bpel/runtime/common/extension/ExtensionContext.java", "license": "apache-2.0", "size": 5739 }
[ "org.apache.ode.bpel.common.FaultException", "org.w3c.dom.Node" ]
import org.apache.ode.bpel.common.FaultException; import org.w3c.dom.Node;
import org.apache.ode.bpel.common.*; import org.w3c.dom.*;
[ "org.apache.ode", "org.w3c.dom" ]
org.apache.ode; org.w3c.dom;
1,981,246
public boolean filterVideoByLanguage() { Set<String> preferredLanguages = getPreferredLanguages(); // if the video's language is not defined (i.e. null) // OR if there is no linguistic content to the video (zxx) // OR if the language is undefined (und) // then we are NOT going to filter this video if (getLanguage() == null || getLanguage().equalsIgnoreCase("zxx") || getLanguage().equalsIgnoreCase("und")) return false; // if there are no preferred languages, then it means we must not filter this video if (preferredLanguages == null || preferredLanguages.isEmpty()) return false; // if this video's language is equal to the user's preferred one... then do NOT filter it out for (String prefLanguage : preferredLanguages) { if (getLanguage().matches(prefLanguage)) return false; } // this video is undesirable, hence we are going to filter it Log.i("FILTERING Video", getTitle() + "[" + getLanguage() + "]"); return true; }
boolean function() { Set<String> preferredLanguages = getPreferredLanguages(); if (getLanguage() == null getLanguage().equalsIgnoreCase("zxx") getLanguage().equalsIgnoreCase("und")) return false; if (preferredLanguages == null preferredLanguages.isEmpty()) return false; for (String prefLanguage : preferredLanguages) { if (getLanguage().matches(prefLanguage)) return false; } Log.i(STR, getTitle() + "[" + getLanguage() + "]"); return true; }
/** * Return true if this video does not meet the preferred language criteria; false otherwise. * Many YouTube videos do not set the language, hence this method will not be accurate. * * @return True to filter out the video; false otherwise. */
Return true if this video does not meet the preferred language criteria; false otherwise. Many YouTube videos do not set the language, hence this method will not be accurate
filterVideoByLanguage
{ "repo_name": "gysgogo/levetube", "path": "app/src/main/java/free/rm/skytube/businessobjects/YouTubeVideo.java", "license": "gpl-3.0", "size": 13094 }
[ "android.util.Log", "java.util.Set" ]
import android.util.Log; import java.util.Set;
import android.util.*; import java.util.*;
[ "android.util", "java.util" ]
android.util; java.util;
2,397,654
@Test public void testParseISODate() throws JsonParseException, java.text.ParseException { final SimpleDateFormat format = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ"); Object doc = Json.parse("{ a : ISODate('2012-07-14T01:00:00.000') }"); assertEquals( BuilderFactory.start() .add("a", format.parse("2012-07-14T01:00:00.000UTC")) .build(), doc); doc = Json.parse("{ a : ISODate('2012-07-14') }"); assertEquals( BuilderFactory.start() .add("a", format.parse("2012-07-14T00:00:00.000UTC")) .build(), doc); }
void function() throws JsonParseException, java.text.ParseException { final SimpleDateFormat format = new SimpleDateFormat( STR); Object doc = Json.parse(STR); assertEquals( BuilderFactory.start() .add("a", format.parse(STR)) .build(), doc); doc = Json.parse(STR); assertEquals( BuilderFactory.start() .add("a", format.parse(STR)) .build(), doc); }
/** * Test Parsing a ISODate(..) element. * * @throws JsonParseException * On a test failure. * @throws java.text.ParseException * On a test failure. */
Test Parsing a ISODate(..) element
testParseISODate
{ "repo_name": "allanbank/mongodb-async-driver", "path": "src/test/java/com/allanbank/mongodb/bson/json/JsonTest.java", "license": "apache-2.0", "size": 22377 }
[ "com.allanbank.mongodb.bson.builder.BuilderFactory", "com.allanbank.mongodb.error.JsonParseException", "java.text.SimpleDateFormat", "org.junit.Assert" ]
import com.allanbank.mongodb.bson.builder.BuilderFactory; import com.allanbank.mongodb.error.JsonParseException; import java.text.SimpleDateFormat; import org.junit.Assert;
import com.allanbank.mongodb.bson.builder.*; import com.allanbank.mongodb.error.*; import java.text.*; import org.junit.*;
[ "com.allanbank.mongodb", "java.text", "org.junit" ]
com.allanbank.mongodb; java.text; org.junit;
2,226,477
public static HARegionQueue getHARegionQueueInstance(String regionName, Cache cache, final int haRgnQType, final boolean isDurable) throws IOException, ClassNotFoundException, CacheException, InterruptedException { Map container = null; if (haRgnQType == HARegionQueue.BLOCKING_HA_QUEUE) { container = new HAContainerMap(new HashMap()); } else { // Should actually be HAContainerRegion, but ok if only JUnits using this // method. container = new HashMap(); } return getHARegionQueueInstance(regionName, (GemFireCacheImpl)cache, HARegionQueueAttributes.DEFAULT_HARQ_ATTRIBUTES, haRgnQType, isDurable, container, null, HandShake.CONFLATION_DEFAULT, false, Boolean.FALSE); }
static HARegionQueue function(String regionName, Cache cache, final int haRgnQType, final boolean isDurable) throws IOException, ClassNotFoundException, CacheException, InterruptedException { Map container = null; if (haRgnQType == HARegionQueue.BLOCKING_HA_QUEUE) { container = new HAContainerMap(new HashMap()); } else { container = new HashMap(); } return getHARegionQueueInstance(regionName, (GemFireCacheImpl)cache, HARegionQueueAttributes.DEFAULT_HARQ_ATTRIBUTES, haRgnQType, isDurable, container, null, HandShake.CONFLATION_DEFAULT, false, Boolean.FALSE); }
/** * Creates a HARegionQueue object with default attributes * Used by tests * * @param regionName * uniquely identifies the HARegionQueue in the VM.For HARegionQueues * across the VM to communicate with each other , the name should be * identical * @param cache * Gemfire Cache instance * @param haRgnQType * int identifying whether the HARegionQueue is of type blocking or * non blocking * @return an instance of HARegionQueue * @throws IOException * @throws ClassNotFoundException * @throws InterruptedException * @throws CacheException */
Creates a HARegionQueue object with default attributes Used by tests
getHARegionQueueInstance
{ "repo_name": "SnappyDataInc/snappy-store", "path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueue.java", "license": "apache-2.0", "size": 145094 }
[ "com.gemstone.gemfire.cache.Cache", "com.gemstone.gemfire.cache.CacheException", "com.gemstone.gemfire.internal.cache.GemFireCacheImpl", "com.gemstone.gemfire.internal.cache.tier.sockets.HandShake", "java.io.IOException", "java.util.HashMap", "java.util.Map" ]
import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheException; import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.internal.cache.tier.sockets.HandShake; import java.io.IOException; import java.util.HashMap; import java.util.Map;
import com.gemstone.gemfire.cache.*; import com.gemstone.gemfire.internal.cache.*; import com.gemstone.gemfire.internal.cache.tier.sockets.*; import java.io.*; import java.util.*;
[ "com.gemstone.gemfire", "java.io", "java.util" ]
com.gemstone.gemfire; java.io; java.util;
2,732,057
public void assertEventsReceivedInHandler() { int eventsOutsideHandler = mEventsReceivedOutsideHandler.get(); String message = String.format( "Events arrived outside the associated Looper. Expected=0, Found=%d", eventsOutsideHandler); Assert.assertEquals(message, 0 , eventsOutsideHandler); }
void function() { int eventsOutsideHandler = mEventsReceivedOutsideHandler.get(); String message = String.format( STR, eventsOutsideHandler); Assert.assertEquals(message, 0 , eventsOutsideHandler); }
/** * Asserts that sensor events arrived in the proper thread if a {@link Handler} was associated * with the current instance. * * If no events were received this assertion will be evaluated to {@code true}. */
Asserts that sensor events arrived in the proper thread if a <code>Handler</code> was associated with the current instance. If no events were received this assertion will be evaluated to true
assertEventsReceivedInHandler
{ "repo_name": "wiki2014/Learning-Summary", "path": "alps/cts/tests/tests/hardware/src/android/hardware/cts/helpers/TestSensorEventListener.java", "license": "gpl-3.0", "size": 14783 }
[ "junit.framework.Assert" ]
import junit.framework.Assert;
import junit.framework.*;
[ "junit.framework" ]
junit.framework;
2,136,031
public static void requeueJobs(DataSource dataSource, List<Long> jobIds) throws MissingParamException, DatabaseException { MissingParam.checkMissing(dataSource, "dataSource"); MissingParam.checkMissing(jobIds, "jobIds"); Connection conn = null; try { conn = dataSource.getConnection(); requeueJobs(conn, jobIds); } catch (SQLException e) { throw new DatabaseException("An error occurred while requeuing jobs", e); } finally { DatabaseUtils.closeConnection(conn); } }
static void function(DataSource dataSource, List<Long> jobIds) throws MissingParamException, DatabaseException { MissingParam.checkMissing(dataSource, STR); MissingParam.checkMissing(jobIds, STR); Connection conn = null; try { conn = dataSource.getConnection(); requeueJobs(conn, jobIds); } catch (SQLException e) { throw new DatabaseException(STR, e); } finally { DatabaseUtils.closeConnection(conn); } }
/** * Place a set of jobs in the {@link Job#WAITING_STATUS} state so they can be * re-run. * * @param dataSource * A data source * @param jobIds * The job IDs * @throws MissingParamException * If any required parameters are missing * @throws DatabaseException * If a database error occurs */
Place a set of jobs in the <code>Job#WAITING_STATUS</code> state so they can be re-run
requeueJobs
{ "repo_name": "squaregoldfish/QuinCe", "path": "WebApp/src/uk/ac/exeter/QuinCe/jobs/JobManager.java", "license": "gpl-3.0", "size": 56040 }
[ "java.sql.Connection", "java.sql.SQLException", "java.util.List", "javax.sql.DataSource", "uk.ac.exeter.QuinCe" ]
import java.sql.Connection; import java.sql.SQLException; import java.util.List; import javax.sql.DataSource; import uk.ac.exeter.QuinCe;
import java.sql.*; import java.util.*; import javax.sql.*; import uk.ac.exeter.*;
[ "java.sql", "java.util", "javax.sql", "uk.ac.exeter" ]
java.sql; java.util; javax.sql; uk.ac.exeter;
2,041,125
//--------------// // getTimeInter // //--------------// public AbstractTimeInter getTimeInter () { return timeInter; }
AbstractTimeInter function () { return timeInter; }
/** * Report the time sig instance, if any, for the staff. * * @return the timeInter or null */
Report the time sig instance, if any, for the staff
getTimeInter
{ "repo_name": "Audiveris/audiveris", "path": "src/main/org/audiveris/omr/sheet/time/TimeBuilder.java", "license": "agpl-3.0", "size": 16851 }
[ "org.audiveris.omr.sig.inter.AbstractTimeInter" ]
import org.audiveris.omr.sig.inter.AbstractTimeInter;
import org.audiveris.omr.sig.inter.*;
[ "org.audiveris.omr" ]
org.audiveris.omr;
1,427,596
@CreatedDate @Temporal(TemporalType.TIMESTAMP) public Date getCreateon() { return createon; }
@Temporal(TemporalType.TIMESTAMP) Date function() { return createon; }
/** * Gets the createon. * * @return the createon */
Gets the createon
getCreateon
{ "repo_name": "gleb619/hotel_shop", "path": "src/main/java/org/test/shop/model/domain/SObject.java", "license": "apache-2.0", "size": 8039 }
[ "java.util.Date", "javax.persistence.Temporal", "javax.persistence.TemporalType" ]
import java.util.Date; import javax.persistence.Temporal; import javax.persistence.TemporalType;
import java.util.*; import javax.persistence.*;
[ "java.util", "javax.persistence" ]
java.util; javax.persistence;
1,177,302
// Ignore people! String sMessage; if (token[0].charAt(0) == ':') { sMessage = token[0].substring(1); } else { sMessage = token[0]; } // We use sMessage to be the users host (first token in the line) try { if (parser.getIgnoreList().matches(sMessage) > -1) { return; } } catch (PatternSyntaxException pse) { final ParserError pe = new ParserError(ParserError.ERROR_WARNING + ParserError.ERROR_USER, "Error with ignore list regex: " + pse, parser.getLastLine()); pe.setException(pse); callErrorInfo(pe); } // Lines such as: // "nick!user@host PRIVMSG" // are invalid, stop processing. if (token.length < 3) { return; } // Is this actually a notice auth? if (token[0].indexOf('!') == -1 && "NOTICE".equalsIgnoreCase(token[1]) && "AUTH".equalsIgnoreCase(token[2])) { try { parser.getProcessingManager().process(date, "Notice Auth", token); } catch (ProcessorNotFoundException e) { } return; } // "nick!user@host PRIVMSG #Channel" should be processed as "nick!user@host PRIVMSG #Channel :" if (token.length < 4) { sMessage = ""; } else { sMessage = token[token.length - 1]; } String[] bits = sMessage.split(" ", 2); String sCTCP = ""; boolean isAction = false; boolean isCTCP = false; if (sMessage.length() > 1) { // Actions are special CTCPs // Bits is the message been split into 2 parts // the first word and the rest final Character char1 = (char) 1; if ("PRIVMSG".equalsIgnoreCase(sParam) && bits[0].equalsIgnoreCase(char1 + "ACTION") && Character.valueOf(sMessage.charAt(sMessage.length() - 1)).equals(char1)) { isAction = true; if (bits.length > 1) { sMessage = bits[1]; sMessage = sMessage.substring(0, sMessage.length() - 1); } else { sMessage = ""; } } // If the message is not an action, check if it is another type of CTCP // CTCPs have Character(1) at the start/end of the line if (!isAction && Character.valueOf(sMessage.charAt(0)).equals(char1) && Character.valueOf(sMessage.charAt(sMessage.length() - 1)).equals(char1)) { isCTCP = true; // Bits is the message been split into 2 parts, the first word and the rest // Some CTCPs have messages and some do not if (bits.length > 1) { sMessage = bits[1]; } else { sMessage = ""; } // Remove the leading char1 bits = bits[0].split(char1.toString(), 2); sCTCP = bits[1]; // remove the trailing char1 if (sMessage.isEmpty()) { sCTCP = sCTCP.split(char1.toString(), 2)[0]; } else { sMessage = sMessage.split(char1.toString(), 2)[0]; } callDebugInfo(IRCParser.DEBUG_INFO, "CTCP: \"%s\" \"%s\"", sCTCP, sMessage); } } // Remove the leading : from the host. final String firstToken; if (token[0].charAt(0) == ':' && token[0].length() > 1) { firstToken = token[0].substring(1); } else { firstToken = token[0]; } final IRCClientInfo iClient = getClientInfo(token[0]); // Facilitate DMDIRC Formatter if (IRCParser.ALWAYS_UPDATECLIENT && iClient != null && iClient.getHostname().isEmpty()) { iClient.setUserBits(firstToken, false); } // Fire the appropriate callbacks. // OnChannel* Callbacks are fired if the target was a channel // OnPrivate* Callbacks are fired if the target was us // OnUnknown* Callbacks are fired if the target was neither of the above // Actions and CTCPs are send as PRIVMSGS // CTCPReplies are sent as Notices // Check if we have a Mode Prefix for channel targets. // Non-Channel messages still use the whole token, even if the first char // is a prefix. // CTCP and CTCPReplies that are aimed at a channel with a prefix are // handled as if the prefix wasn't used. This can be changed in the future // if desired. final char modePrefix = token[2].charAt(0); final boolean hasModePrefix = prefixModeManager.isPrefix(modePrefix); final String targetName = hasModePrefix ? token[2].substring(1) : token[2]; if (isValidChannelName(targetName)) { final IRCChannelInfo iChannel = getChannel(targetName); if (iChannel == null) { // callErrorInfo(new ParserError(ParserError.ERROR_WARNING, "Got message for channel ("+targetName+") that I am not on.", parser.getLastLine())); return; } final IRCChannelClientInfo iChannelClient = iChannel.getChannelClient(token[0], true); if ("PRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callChannelAction(date, iChannel, iChannelClient, sMessage, firstToken); } else { if (isCTCP) { callChannelCTCP(date, iChannel, iChannelClient, sCTCP, sMessage, firstToken); } else if (hasModePrefix) { callChannelModeMessage(date, modePrefix, iChannel, iChannelClient, sMessage, firstToken); } else { callChannelMessage(date, iChannel, iChannelClient, sMessage, firstToken); } } } else if ("NOTICE".equalsIgnoreCase(sParam)) { if (isCTCP) { callChannelCTCPReply(date, iChannel, iChannelClient, sCTCP, sMessage, firstToken); } else if (hasModePrefix) { callChannelModeNotice(date, modePrefix, iChannel, iChannelClient, sMessage, firstToken); } else { callChannelNotice(date, iChannel, iChannelClient, sMessage, firstToken); } } } else if (parser.getStringConverter().equalsIgnoreCase(token[2], parser.getMyNickname())) { if ("PRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callPrivateAction(date, sMessage, firstToken); } else { if (isCTCP) { callPrivateCTCP(date, sCTCP, sMessage, firstToken); } else { callPrivateMessage(date, sMessage, firstToken); } } } else if ("NOTICE".equalsIgnoreCase(sParam)) { if (isCTCP) { callPrivateCTCPReply(date, sCTCP, sMessage, firstToken); } else { if (firstToken.indexOf('@') == -1) { callServerNotice(date, sMessage, firstToken); } else { callPrivateNotice(date, sMessage, firstToken); } } } } else { callDebugInfo(IRCParser.DEBUG_INFO, "Message for Other (" + token[2] + ')'); if ("PRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callUnknownAction(date, sMessage, token[2], firstToken); } else { if (isCTCP) { callUnknownCTCP(date, sCTCP, sMessage, token[2], firstToken); } else { callUnknownMessage(date, sMessage, token[2], firstToken); } } } else if ("NOTICE".equalsIgnoreCase(sParam)) { if (isCTCP) { callUnknownCTCPReply(date, sCTCP, sMessage, token[2], firstToken); } else { if (firstToken.indexOf('@') == -1) { callUnknownServerNotice(date, sMessage, token[2], firstToken); } else { callUnknownNotice(date, sMessage, token[2], firstToken); } } } } }
String sMessage; if (token[0].charAt(0) == ':') { sMessage = token[0].substring(1); } else { sMessage = token[0]; } try { if (parser.getIgnoreList().matches(sMessage) > -1) { return; } } catch (PatternSyntaxException pse) { final ParserError pe = new ParserError(ParserError.ERROR_WARNING + ParserError.ERROR_USER, STR + pse, parser.getLastLine()); pe.setException(pse); callErrorInfo(pe); } if (token.length < 3) { return; } if (token[0].indexOf('!') == -1 && STR.equalsIgnoreCase(token[1]) && "AUTH".equalsIgnoreCase(token[2])) { try { parser.getProcessingManager().process(date, STR, token); } catch (ProcessorNotFoundException e) { } return; } if (token.length < 4) { sMessage = STR STRSTRPRIVMSGSTRACTIONSTRSTRSTRCTCP: \"%s\" \"%s\"STRPRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callChannelAction(date, iChannel, iChannelClient, sMessage, firstToken); } else { if (isCTCP) { callChannelCTCP(date, iChannel, iChannelClient, sCTCP, sMessage, firstToken); } else if (hasModePrefix) { callChannelModeMessage(date, modePrefix, iChannel, iChannelClient, sMessage, firstToken); } else { callChannelMessage(date, iChannel, iChannelClient, sMessage, firstToken); } } } else if (STR.equalsIgnoreCase(sParam)) { if (isCTCP) { callChannelCTCPReply(date, iChannel, iChannelClient, sCTCP, sMessage, firstToken); } else if (hasModePrefix) { callChannelModeNotice(date, modePrefix, iChannel, iChannelClient, sMessage, firstToken); } else { callChannelNotice(date, iChannel, iChannelClient, sMessage, firstToken); } } } else if (parser.getStringConverter().equalsIgnoreCase(token[2], parser.getMyNickname())) { if ("PRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callPrivateAction(date, sMessage, firstToken); } else { if (isCTCP) { callPrivateCTCP(date, sCTCP, sMessage, firstToken); } else { callPrivateMessage(date, sMessage, firstToken); } } } else if (STR.equalsIgnoreCase(sParam)) { if (isCTCP) { callPrivateCTCPReply(date, sCTCP, sMessage, firstToken); } else { if (firstToken.indexOf('@') == -1) { callServerNotice(date, sMessage, firstToken); } else { callPrivateNotice(date, sMessage, firstToken); } } } } else { callDebugInfo(IRCParser.DEBUG_INFO, "Message for Other (STRPRIVMSG".equalsIgnoreCase(sParam)) { if (isAction) { callUnknownAction(date, sMessage, token[2], firstToken); } else { if (isCTCP) { callUnknownCTCP(date, sCTCP, sMessage, token[2], firstToken); } else { callUnknownMessage(date, sMessage, token[2], firstToken); } } } else if (STR.equalsIgnoreCase(sParam)) { if (isCTCP) { callUnknownCTCPReply(date, sCTCP, sMessage, token[2], firstToken); } else { if (firstToken.indexOf('@') == -1) { callUnknownServerNotice(date, sMessage, token[2], firstToken); } else { callUnknownNotice(date, sMessage, token[2], firstToken); } } } } }
/** * Process PRIVMSGs and NOTICEs. * This horrible thing handles PRIVMSGs and NOTICES<br> * This inclues CTCPs and CTCPReplies<br> * It handles all 3 targets (Channel, Private, Unknown)<br> * Actions are handled here aswell separately from CTCPs.<br> * Each type has 5 Calls, making 15 callbacks handled here. * * @param date The LocalDateTime that this event occurred at. * @param sParam Type of line to process ("NOTICE", "PRIVMSG") * @param token IRCTokenised line to process */
Process PRIVMSGs and NOTICEs. This horrible thing handles PRIVMSGs and NOTICES This inclues CTCPs and CTCPReplies It handles all 3 targets (Channel, Private, Unknown) Actions are handled here aswell separately from CTCPs. Each type has 5 Calls, making 15 callbacks handled here
process
{ "repo_name": "csmith/DMDirc-Parser", "path": "irc/src/main/java/com/dmdirc/parser/irc/processors/ProcessMessage.java", "license": "mit", "size": 24713 }
[ "com.dmdirc.parser.common.ParserError", "com.dmdirc.parser.irc.IRCParser", "com.dmdirc.parser.irc.ProcessorNotFoundException", "java.util.regex.PatternSyntaxException" ]
import com.dmdirc.parser.common.ParserError; import com.dmdirc.parser.irc.IRCParser; import com.dmdirc.parser.irc.ProcessorNotFoundException; import java.util.regex.PatternSyntaxException;
import com.dmdirc.parser.common.*; import com.dmdirc.parser.irc.*; import java.util.regex.*;
[ "com.dmdirc.parser", "java.util" ]
com.dmdirc.parser; java.util;
1,754,361
@Generated @Selector("showsPointsOfInterest") public native boolean showsPointsOfInterest();
@Selector(STR) native boolean function();
/** * Affects MKMapTypeStandard and MKMapTypeHybrid */
Affects MKMapTypeStandard and MKMapTypeHybrid
showsPointsOfInterest
{ "repo_name": "multi-os-engine/moe-core", "path": "moe.apple/moe.platform.ios/src/main/java/apple/mapkit/MKMapView.java", "license": "apache-2.0", "size": 34537 }
[ "org.moe.natj.objc.ann.Selector" ]
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.ann.*;
[ "org.moe.natj" ]
org.moe.natj;
629,524
Lexicon addLexicon(URI uri, String language);
Lexicon addLexicon(URI uri, String language);
/** * Add a (new blank) lexicon */
Add a (new blank) lexicon
addLexicon
{ "repo_name": "monnetproject/lemon.api", "path": "main/src/main/java/eu/monnetproject/lemon/LemonModel.java", "license": "bsd-3-clause", "size": 5044 }
[ "eu.monnetproject.lemon.model.Lexicon" ]
import eu.monnetproject.lemon.model.Lexicon;
import eu.monnetproject.lemon.model.*;
[ "eu.monnetproject.lemon" ]
eu.monnetproject.lemon;
1,457,416
private boolean ignoreNetworkDueToVpn(Network network) { return mVpnInPlace != null && !mVpnInPlace.equals(network); }
boolean function(Network network) { return mVpnInPlace != null && !mVpnInPlace.equals(network); }
/** * Should changes to network {@code network} be ignored due to a VPN being in place * and blocking direct access to {@code network}? * @param network Network to possibly consider ignoring changes to. */
Should changes to network network be ignored due to a VPN being in place and blocking direct access to network
ignoreNetworkDueToVpn
{ "repo_name": "scheib/chromium", "path": "net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java", "license": "bsd-3-clause", "size": 60035 }
[ "android.net.Network" ]
import android.net.Network;
import android.net.*;
[ "android.net" ]
android.net;
269,671
public void assignButton(String mappingName, int buttonId){ if (buttonId < 0 || buttonId >= buttonCount) throw new IllegalArgumentException(); inputManager.addMapping(mappingName, new JoyButtonTrigger(joyId, buttonId)); }
void function(String mappingName, int buttonId){ if (buttonId < 0 buttonId >= buttonCount) throw new IllegalArgumentException(); inputManager.addMapping(mappingName, new JoyButtonTrigger(joyId, buttonId)); }
/** * Assign the mapping name to receive events from the given button index * on the joystick. * * @param mappingName The mapping to receive joystick button events. * @param buttonId The button index. * * @see Joystick#getButtonCount() */
Assign the mapping name to receive events from the given button index on the joystick
assignButton
{ "repo_name": "rex-xxx/mt6572_x201", "path": "external/jmonkeyengine/engine/src/core/com/jme3/input/Joystick.java", "license": "gpl-2.0", "size": 3984 }
[ "com.jme3.input.controls.JoyButtonTrigger" ]
import com.jme3.input.controls.JoyButtonTrigger;
import com.jme3.input.controls.*;
[ "com.jme3.input" ]
com.jme3.input;
1,149,557
public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } }
InputStream function(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } }
/** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */
Returns an unbuffered input stream to read the last committed value, or null if no value has been committed
newInputStream
{ "repo_name": "lovethisshit/android-squeezer", "path": "Squeezer/src/main/java/uk/org/ngo/squeezer/util/DiskLruCache.java", "license": "apache-2.0", "size": 34037 }
[ "java.io.FileInputStream", "java.io.IOException", "java.io.InputStream" ]
import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream;
import java.io.*;
[ "java.io" ]
java.io;
688,255
public final VoltTable clone(int extraBytes) { assert(verifyTableInvariants()); // share the immutable metadata if it's present for tests final VoltTable cloned = new VoltTable(m_extraMetadata); cloned.m_colCount = m_colCount; cloned.m_rowCount = 0; cloned.m_rowStart = m_rowStart; final int pos = m_buffer.position(); m_buffer.position(0); m_buffer.limit(m_rowStart); // the 100 is for extra safety cloned.m_buffer = ByteBuffer.allocate(m_rowStart + extraBytes + 100); cloned.m_buffer.put(m_buffer); m_buffer.limit(m_buffer.capacity()); m_buffer.position(pos); cloned.m_buffer.putInt(0); assert(verifyTableInvariants()); assert(cloned.verifyTableInvariants()); return cloned; }
final VoltTable function(int extraBytes) { assert(verifyTableInvariants()); final VoltTable cloned = new VoltTable(m_extraMetadata); cloned.m_colCount = m_colCount; cloned.m_rowCount = 0; cloned.m_rowStart = m_rowStart; final int pos = m_buffer.position(); m_buffer.position(0); m_buffer.limit(m_rowStart); cloned.m_buffer = ByteBuffer.allocate(m_rowStart + extraBytes + 100); cloned.m_buffer.put(m_buffer); m_buffer.limit(m_buffer.capacity()); m_buffer.position(pos); cloned.m_buffer.putInt(0); assert(verifyTableInvariants()); assert(cloned.verifyTableInvariants()); return cloned; }
/** * <p>Generates a duplicate of a table including the column schema. Only works * on tables that have no rows, have columns defined, and will not have columns added/deleted/modified * later. Useful as way of creating template tables that can be cloned and then populated with * {@link VoltTableRow rows} repeatedly.</p> * * @param extraBytes The number of extra bytes to leave for to-be-added rows beyond the header. * @return An <tt>VoltTable</tt> with the same column schema as the original and enough space * for the specified number of {@link VoltTableRow rows} and strings. */
Generates a duplicate of a table including the column schema. Only works on tables that have no rows, have columns defined, and will not have columns added/deleted/modified later. Useful as way of creating template tables that can be cloned and then populated with <code>VoltTableRow rows</code> repeatedly
clone
{ "repo_name": "paulmartel/voltdb", "path": "src/frontend/org/voltdb/VoltTable.java", "license": "agpl-3.0", "size": 73432 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
2,437,031
@Override public void compute() { if (!Double.isNaN(getValue())) { // since the data cannot change, avoid re-doing the calculations return; } iniCompute(); Map<U, List<Pair<I, Double>>> data = processDataAsRankedTestRelevance(); userMAPAtCutoff = new HashMap<Integer, Map<U, Double>>(); int nUsers = 0; for (Entry<U, List<Pair<I, Double>>> e : data.entrySet()) { U user = e.getKey(); List<Pair<I, Double>> sortedList = e.getValue(); // number of relevant items for this user double uRel = getNumberOfRelevantItems(user); double uMAP = 0.0; double uPrecision = 0.0; int rank = 0; for (Pair<I, Double> pair : sortedList) { double rel = pair.getSecond(); rank++; double itemPrecision = computeBinaryPrecision(rel); uPrecision += itemPrecision; if (itemPrecision > 0) { uMAP += uPrecision / rank; } // compute at a particular cutoff for (int at : getCutoffs()) { if (rank == at) { Map<U, Double> m = userMAPAtCutoff.get(at); if (m == null) { m = new HashMap<U, Double>(); userMAPAtCutoff.put(at, m); } m.put(user, uMAP / uRel); } } } // normalize by number of relevant items uMAP /= uRel; // assign the MAP of the whole list to those cutoffs larger than the list's size for (int at : getCutoffs()) { if (rank <= at) { Map<U, Double> m = userMAPAtCutoff.get(at); if (m == null) { m = new HashMap<U, Double>(); userMAPAtCutoff.put(at, m); } m.put(user, uMAP); } } if (!Double.isNaN(uMAP)) { setValue(getValue() + uMAP); getMetricPerUser().put(user, uMAP); nUsers++; } } setValue(getValue() / nUsers); }
void function() { if (!Double.isNaN(getValue())) { return; } iniCompute(); Map<U, List<Pair<I, Double>>> data = processDataAsRankedTestRelevance(); userMAPAtCutoff = new HashMap<Integer, Map<U, Double>>(); int nUsers = 0; for (Entry<U, List<Pair<I, Double>>> e : data.entrySet()) { U user = e.getKey(); List<Pair<I, Double>> sortedList = e.getValue(); double uRel = getNumberOfRelevantItems(user); double uMAP = 0.0; double uPrecision = 0.0; int rank = 0; for (Pair<I, Double> pair : sortedList) { double rel = pair.getSecond(); rank++; double itemPrecision = computeBinaryPrecision(rel); uPrecision += itemPrecision; if (itemPrecision > 0) { uMAP += uPrecision / rank; } for (int at : getCutoffs()) { if (rank == at) { Map<U, Double> m = userMAPAtCutoff.get(at); if (m == null) { m = new HashMap<U, Double>(); userMAPAtCutoff.put(at, m); } m.put(user, uMAP / uRel); } } } uMAP /= uRel; for (int at : getCutoffs()) { if (rank <= at) { Map<U, Double> m = userMAPAtCutoff.get(at); if (m == null) { m = new HashMap<U, Double>(); userMAPAtCutoff.put(at, m); } m.put(user, uMAP); } } if (!Double.isNaN(uMAP)) { setValue(getValue() + uMAP); getMetricPerUser().put(user, uMAP); nUsers++; } } setValue(getValue() / nUsers); }
/** * Computes the global MAP by first summing the AP (average precision) for * each user and then averaging by the number of users. */
Computes the global MAP by first summing the AP (average precision) for each user and then averaging by the number of users
compute
{ "repo_name": "recommenders/rival", "path": "rival-evaluate/src/main/java/net/recommenders/rival/evaluation/metric/ranking/MAP.java", "license": "apache-2.0", "size": 6590 }
[ "java.util.HashMap", "java.util.List", "java.util.Map", "net.recommenders.rival.evaluation.Pair" ]
import java.util.HashMap; import java.util.List; import java.util.Map; import net.recommenders.rival.evaluation.Pair;
import java.util.*; import net.recommenders.rival.evaluation.*;
[ "java.util", "net.recommenders.rival" ]
java.util; net.recommenders.rival;
223,907
default void checkCanDenyTablePrivilege(ConnectorSecurityContext context, Privilege privilege, SchemaTableName tableName, TrinoPrincipal grantee) { denyDenyTablePrivilege(privilege.toString(), tableName.toString()); }
default void checkCanDenyTablePrivilege(ConnectorSecurityContext context, Privilege privilege, SchemaTableName tableName, TrinoPrincipal grantee) { denyDenyTablePrivilege(privilege.toString(), tableName.toString()); }
/** * Check if identity is allowed to deny to any other user the specified privilege on the specified table. * * @throws io.trino.spi.security.AccessDeniedException if not allowed */
Check if identity is allowed to deny to any other user the specified privilege on the specified table
checkCanDenyTablePrivilege
{ "repo_name": "ebyhr/presto", "path": "core/trino-spi/src/main/java/io/trino/spi/connector/ConnectorAccessControl.java", "license": "apache-2.0", "size": 24835 }
[ "io.trino.spi.security.AccessDeniedException", "io.trino.spi.security.Privilege", "io.trino.spi.security.TrinoPrincipal" ]
import io.trino.spi.security.AccessDeniedException; import io.trino.spi.security.Privilege; import io.trino.spi.security.TrinoPrincipal;
import io.trino.spi.security.*;
[ "io.trino.spi" ]
io.trino.spi;
2,653,442
private void showGlobalContextActionBar() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); }
void function() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); }
/** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */
Per the navigation drawer design guidelines, updates the action bar to show the global app 'context', rather than just what's in the current screen
showGlobalContextActionBar
{ "repo_name": "drdrej/android-drafts-ui", "path": "app/src/main/java/com/touchableheroes/drafts/ui/NavigationDrawerFragment.java", "license": "gpl-3.0", "size": 10713 }
[ "android.support.v7.app.ActionBar" ]
import android.support.v7.app.ActionBar;
import android.support.v7.app.*;
[ "android.support" ]
android.support;
1,134,765
public VolumeArgs build() { Preconditions.checkNotNull(adminName); Preconditions.checkNotNull(ownerName); Preconditions.checkNotNull(volume); return new VolumeArgs(adminName, ownerName, volume, quotaInBytes, extendedAttributes); } }
VolumeArgs function() { Preconditions.checkNotNull(adminName); Preconditions.checkNotNull(ownerName); Preconditions.checkNotNull(volume); return new VolumeArgs(adminName, ownerName, volume, quotaInBytes, extendedAttributes); } }
/** * Constructs a CreateVolumeArgument. * * @return CreateVolumeArgs. */
Constructs a CreateVolumeArgument
build
{ "repo_name": "ChetnaChaudhari/hadoop", "path": "hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/VolumeArgs.java", "license": "apache-2.0", "size": 3605 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
528,930
protected void setSize(Control control, int width, int height) { GridData gd= new GridData(GridData.END); gd.widthHint= width ; gd.heightHint= height; control.setLayoutData(gd); }
void function(Control control, int width, int height) { GridData gd= new GridData(GridData.END); gd.widthHint= width ; gd.heightHint= height; control.setLayoutData(gd); }
/** * Sets the size of a control. * * @param control the control for which to set the size * @param width the new width of the control * @param height the new height of the control */
Sets the size of a control
setSize
{ "repo_name": "elucash/eclipse-oxygen", "path": "org.eclipse.jdt.ui/src/org/eclipse/jdt/internal/ui/jarpackager/JarManifestWizardPage.java", "license": "epl-1.0", "size": 39332 }
[ "org.eclipse.swt.layout.GridData", "org.eclipse.swt.widgets.Control" ]
import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
128,362
void deleteCareerLevelTodo(CareerLevelTodo careerLevelTodo);
void deleteCareerLevelTodo(CareerLevelTodo careerLevelTodo);
/** * Deletes instance of CareerLevelTodo entity. */
Deletes instance of CareerLevelTodo entity
deleteCareerLevelTodo
{ "repo_name": "nsirbu/CareerLevels", "path": "src/main/java/com/inthergroup/internship/services/TodoService.java", "license": "mit", "size": 5539 }
[ "com.inthergroup.internship.models.CareerLevelTodo" ]
import com.inthergroup.internship.models.CareerLevelTodo;
import com.inthergroup.internship.models.*;
[ "com.inthergroup.internship" ]
com.inthergroup.internship;
1,528,739
public void setGraphicLocations(double latitude, double longitude) { if (location instanceof OMRaster) { OMRaster ras = (OMRaster) location; ras.setLat(latitude); ras.setLon(longitude); label.setLat(latitude); label.setLon(longitude); setHorizontalLabelBuffer((((OMRaster) location).getWidth() / 2) + SPACING); } }
void function(double latitude, double longitude) { if (location instanceof OMRaster) { OMRaster ras = (OMRaster) location; ras.setLat(latitude); ras.setLon(longitude); label.setLat(latitude); label.setLon(longitude); setHorizontalLabelBuffer((((OMRaster) location).getWidth() / 2) + SPACING); } }
/** * Given a new latitude/longitude, reposition the graphic and label. */
Given a new latitude/longitude, reposition the graphic and label
setGraphicLocations
{ "repo_name": "d2fn/passage", "path": "src/main/java/com/bbn/openmap/layer/location/URLRasterLocation.java", "license": "mit", "size": 11326 }
[ "com.bbn.openmap.omGraphics.OMRaster" ]
import com.bbn.openmap.omGraphics.OMRaster;
import com.bbn.openmap.*;
[ "com.bbn.openmap" ]
com.bbn.openmap;
2,085,751
public void reloadSdk() { // reload SDK mSdkManager.reloadSdk(mSdkLog); // reload AVDs if (mAvdManager != null) { try { mAvdManager.reloadAvds(mSdkLog); } catch (AndroidLocationException e) { // FIXME } } mLocalSdkParser.clearPackages(); // notify listeners broadcastOnSdkReload(); }
void function() { mSdkManager.reloadSdk(mSdkLog); if (mAvdManager != null) { try { mAvdManager.reloadAvds(mSdkLog); } catch (AndroidLocationException e) { } } mLocalSdkParser.clearPackages(); broadcastOnSdkReload(); }
/** * Reloads the SDK content (targets). * <p/> * This also reloads the AVDs in case their status changed. * <p/> * This does not notify the listeners ({@link ISdkChangeListener}). */
Reloads the SDK content (targets). This also reloads the AVDs in case their status changed. This does not notify the listeners (<code>ISdkChangeListener</code>)
reloadSdk
{ "repo_name": "rex-xxx/mt6572_x201", "path": "sdk/sdkmanager/libs/sdkuilib/src/com/android/sdkuilib/internal/repository/UpdaterData.java", "license": "gpl-2.0", "size": 45714 }
[ "com.android.prefs.AndroidLocation" ]
import com.android.prefs.AndroidLocation;
import com.android.prefs.*;
[ "com.android.prefs" ]
com.android.prefs;
880,970
public LinkedList<Patch> patch_make(String text1, LinkedList<Diff> diffs) { LinkedList<Patch> patches = new LinkedList<Patch>(); if (diffs.isEmpty()) { return patches; // Get rid of the null case. } Patch patch = new Patch(); int char_count1 = 0; // Number of characters into the text1 string. int char_count2 = 0; // Number of characters into the text2 string. // Start with text1 (prepatch_text) and apply the diffs until we arrive at // text2 (postpatch_text). We recreate the patches one by one to determine // context info. String prepatch_text = text1; String postpatch_text = text1; for (Diff aDiff : diffs) { if (patch.diffs.isEmpty() && aDiff.operation != Operation.EQUAL) { // A new patch starts here. patch.start1 = char_count1; patch.start2 = char_count2; } switch (aDiff.operation) { case INSERT: patch.diffs.add(aDiff); patch.length2 += aDiff.text.length(); postpatch_text = postpatch_text.substring(0, char_count2) + aDiff.text + postpatch_text.substring(char_count2); break; case DELETE: patch.length1 += aDiff.text.length(); patch.diffs.add(aDiff); postpatch_text = postpatch_text.substring(0, char_count2) + postpatch_text.substring(char_count2 + aDiff.text.length()); break; case EQUAL: if (aDiff.text.length() <= 2 * Patch_Margin && !patch.diffs.isEmpty() && aDiff != diffs.getLast()) { // Small equality inside a patch. patch.diffs.add(aDiff); patch.length1 += aDiff.text.length(); patch.length2 += aDiff.text.length(); } if (aDiff.text.length() >= 2 * Patch_Margin) { // Time for a new patch. if (!patch.diffs.isEmpty()) { patch_addContext(patch, prepatch_text); patches.add(patch); patch = new Patch(); // Unlike Unidiff, our patch lists have a rolling context. // http://code.google.com/p/google-diff-match-patch/wiki/Unidiff // Update prepatch text & pos to reflect the application of the // just completed patch. prepatch_text = postpatch_text; char_count1 = char_count2; } } break; } // Update the current character count. if (aDiff.operation != Operation.INSERT) { char_count1 += aDiff.text.length(); } if (aDiff.operation != Operation.DELETE) { char_count2 += aDiff.text.length(); } } // Pick up the leftover patch if not empty. if (!patch.diffs.isEmpty()) { patch_addContext(patch, prepatch_text); patches.add(patch); } return patches; }
LinkedList<Patch> function(String text1, LinkedList<Diff> diffs) { LinkedList<Patch> patches = new LinkedList<Patch>(); if (diffs.isEmpty()) { return patches; } Patch patch = new Patch(); int char_count1 = 0; int char_count2 = 0; String prepatch_text = text1; String postpatch_text = text1; for (Diff aDiff : diffs) { if (patch.diffs.isEmpty() && aDiff.operation != Operation.EQUAL) { patch.start1 = char_count1; patch.start2 = char_count2; } switch (aDiff.operation) { case INSERT: patch.diffs.add(aDiff); patch.length2 += aDiff.text.length(); postpatch_text = postpatch_text.substring(0, char_count2) + aDiff.text + postpatch_text.substring(char_count2); break; case DELETE: patch.length1 += aDiff.text.length(); patch.diffs.add(aDiff); postpatch_text = postpatch_text.substring(0, char_count2) + postpatch_text.substring(char_count2 + aDiff.text.length()); break; case EQUAL: if (aDiff.text.length() <= 2 * Patch_Margin && !patch.diffs.isEmpty() && aDiff != diffs.getLast()) { patch.diffs.add(aDiff); patch.length1 += aDiff.text.length(); patch.length2 += aDiff.text.length(); } if (aDiff.text.length() >= 2 * Patch_Margin) { if (!patch.diffs.isEmpty()) { patch_addContext(patch, prepatch_text); patches.add(patch); patch = new Patch(); prepatch_text = postpatch_text; char_count1 = char_count2; } } break; } if (aDiff.operation != Operation.INSERT) { char_count1 += aDiff.text.length(); } if (aDiff.operation != Operation.DELETE) { char_count2 += aDiff.text.length(); } } if (!patch.diffs.isEmpty()) { patch_addContext(patch, prepatch_text); patches.add(patch); } return patches; }
/** * Compute a list of patches to turn text1 into text2. * text2 is not provided, diffs are the delta between text1 and text2. * @param text1 Old text. * @param diffs Array of diff tuples for text1 to text2. * @return LinkedList of Patch objects. */
Compute a list of patches to turn text1 into text2. text2 is not provided, diffs are the delta between text1 and text2
patch_make
{ "repo_name": "visik7/webfilesys", "path": "src/main/webapp/WEB-INF/source/name/fraser/neil/plaintext/diff_match_patch.java", "license": "gpl-3.0", "size": 84995 }
[ "java.util.LinkedList" ]
import java.util.LinkedList;
import java.util.*;
[ "java.util" ]
java.util;
224,655
public List<Range> all(double value);
List<Range> function(double value);
/** * Returns all the ranges which contains this value * * @param value * @return All Ranges which contains this value. An empty list if no such * ranges are available. */
Returns all the ranges which contains this value
all
{ "repo_name": "pbloem/kit", "path": "src/main/java/nl/peterbloem/kit/ranges/RangeSet.java", "license": "mit", "size": 963 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,951,704
public static GraphOperationException reservedLabel(Label label){ return new GraphOperationException(RESERVED_WORD.getMessage(label.getValue())); } /** * Thrown when trying to add a {@link Schema.VertexProperty} to a {@link Concept} which does not accept that type * of {@link Schema.VertexProperty}
static GraphOperationException function(Label label){ return new GraphOperationException(RESERVED_WORD.getMessage(label.getValue())); } /** * Thrown when trying to add a {@link Schema.VertexProperty} to a {@link Concept} which does not accept that type * of {@link Schema.VertexProperty}
/** * Thrown when trying to create something using a label reserved by the system */
Thrown when trying to create something using a label reserved by the system
reservedLabel
{ "repo_name": "sheldonkhall/grakn", "path": "grakn-core/src/main/java/ai/grakn/exception/GraphOperationException.java", "license": "gpl-3.0", "size": 11233 }
[ "ai.grakn.concept.Concept", "ai.grakn.concept.Label", "ai.grakn.util.Schema" ]
import ai.grakn.concept.Concept; import ai.grakn.concept.Label; import ai.grakn.util.Schema;
import ai.grakn.concept.*; import ai.grakn.util.*;
[ "ai.grakn.concept", "ai.grakn.util" ]
ai.grakn.concept; ai.grakn.util;
1,130,102
public static BundleContext getContext() { return context; }
static BundleContext function() { return context; }
/** * Returns the bundle context of this bundle * * @return the bundle context */
Returns the bundle context of this bundle
getContext
{ "repo_name": "computergeek1507/openhab", "path": "bundles/binding/org.openhab.binding.fritzaha/src/main/java/org/openhab/binding/fritzaha/internal/FritzahaActivator.java", "license": "epl-1.0", "size": 1542 }
[ "org.osgi.framework.BundleContext" ]
import org.osgi.framework.BundleContext;
import org.osgi.framework.*;
[ "org.osgi.framework" ]
org.osgi.framework;
2,153,447
private int loadMiniBlockToOutput(int remaining, WritableColumnVector c, int rowId, IntegerOutputWriter outputWriter) throws IOException { // new block; read the block header if (remainingInBlock == 0) { readBlockHeader(); } // new miniblock, unpack the miniblock if (remainingInMiniBlock == 0) { unpackMiniBlock(); } // read values from miniblock int valuesRead = 0; for (int i = miniBlockSizeInValues - remainingInMiniBlock; i < miniBlockSizeInValues && valuesRead < remaining; i++) { // calculate values from deltas unpacked for current block long outValue = lastValueRead + minDeltaInCurrentBlock + unpackedValuesBuffer[i]; lastValueRead = outValue; outputWriter.write(c, rowId + valuesRead, outValue); remainingInBlock--; remainingInMiniBlock--; valuesRead++; } return valuesRead; }
int function(int remaining, WritableColumnVector c, int rowId, IntegerOutputWriter outputWriter) throws IOException { if (remainingInBlock == 0) { readBlockHeader(); } if (remainingInMiniBlock == 0) { unpackMiniBlock(); } int valuesRead = 0; for (int i = miniBlockSizeInValues - remainingInMiniBlock; i < miniBlockSizeInValues && valuesRead < remaining; i++) { long outValue = lastValueRead + minDeltaInCurrentBlock + unpackedValuesBuffer[i]; lastValueRead = outValue; outputWriter.write(c, rowId + valuesRead, outValue); remainingInBlock--; remainingInMiniBlock--; valuesRead++; } return valuesRead; }
/** * Read from a mini block. Read at most 'remaining' values into output. * * @return the number of values read into output */
Read from a mini block. Read at most 'remaining' values into output
loadMiniBlockToOutput
{ "repo_name": "holdenk/spark", "path": "sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaBinaryPackedReader.java", "license": "apache-2.0", "size": 10842 }
[ "java.io.IOException", "org.apache.spark.sql.execution.vectorized.WritableColumnVector" ]
import java.io.IOException; import org.apache.spark.sql.execution.vectorized.WritableColumnVector;
import java.io.*; import org.apache.spark.sql.execution.vectorized.*;
[ "java.io", "org.apache.spark" ]
java.io; org.apache.spark;
852,734