method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public void addStopToDatabase(TimeoStop stop) throws IllegalArgumentException, SQLiteConstraintException { if(stop != null) { // when we want to add a stop, we add the line first, then the // direction addLineToDatabase(stop.getLine()); // then, open the database, and start enumerating when we'll need to // add SQLiteDatabase db = databaseOpenHelper.getWritableDatabase(); ContentValues values = new ContentValues(); values.put("stop_id", stop.getId()); values.put("line_id", stop.getLine().getId()); values.put("dir_id", stop.getLine().getDirection().getId()); values.put("stop_name", stop.getName()); values.put("stop_ref", stop.getReference()); values.put("network_code", stop.getLine().getNetworkCode()); try { // insert the stop with the specified columns db.insertOrThrow("twi_stop", null, values); } finally { // we want to close the database afterwards either way db.close(); } } else { throw new IllegalArgumentException(); } }
void function(TimeoStop stop) throws IllegalArgumentException, SQLiteConstraintException { if(stop != null) { addLineToDatabase(stop.getLine()); SQLiteDatabase db = databaseOpenHelper.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(STR, stop.getId()); values.put(STR, stop.getLine().getId()); values.put(STR, stop.getLine().getDirection().getId()); values.put(STR, stop.getName()); values.put(STR, stop.getReference()); values.put(STR, stop.getLine().getNetworkCode()); try { db.insertOrThrow(STR, null, values); } finally { db.close(); } } else { throw new IllegalArgumentException(); } }
/** * Adds a bus stop to the database. * * @param stop the bus stop to add * @throws IllegalArgumentException if the stop is not valid * @throws SQLiteConstraintException if a constraint failed */
Adds a bus stop to the database
addStopToDatabase
{ "repo_name": "dpiquet/Twistoast-android", "path": "twistoast/src/main/java/fr/outadev/twistoast/Database.java", "license": "gpl-3.0", "size": 14985 }
[ "android.content.ContentValues", "android.database.sqlite.SQLiteConstraintException", "android.database.sqlite.SQLiteDatabase", "fr.outadev.android.timeo.TimeoStop" ]
import android.content.ContentValues; import android.database.sqlite.SQLiteConstraintException; import android.database.sqlite.SQLiteDatabase; import fr.outadev.android.timeo.TimeoStop;
import android.content.*; import android.database.sqlite.*; import fr.outadev.android.timeo.*;
[ "android.content", "android.database", "fr.outadev.android" ]
android.content; android.database; fr.outadev.android;
1,228,401
Collection<String> getScopes();
Collection<String> getScopes();
/** * Get requested scopes requested at the time of issuing this code. * * @return requested scopes. */
Get requested scopes requested at the time of issuing this code
getScopes
{ "repo_name": "rrenomeron/cas", "path": "support/cas-server-support-oauth-api/src/main/java/org/apereo/cas/ticket/OAuthToken.java", "license": "apache-2.0", "size": 807 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
1,811,319
public void writeEntries(JarFile jarFile) throws IOException { writeEntries(jarFile, EntryTransformer.NONE, UnpackHandler.NEVER, (name) -> false); }
void function(JarFile jarFile) throws IOException { writeEntries(jarFile, EntryTransformer.NONE, UnpackHandler.NEVER, (name) -> false); }
/** * Write all entries from the specified jar file. * @param jarFile the source jar file * @throws IOException if the entries cannot be written */
Write all entries from the specified jar file
writeEntries
{ "repo_name": "jxblum/spring-boot", "path": "spring-boot-project/spring-boot-tools/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/AbstractJarWriter.java", "license": "apache-2.0", "size": 13643 }
[ "java.io.IOException", "java.util.jar.JarFile" ]
import java.io.IOException; import java.util.jar.JarFile;
import java.io.*; import java.util.jar.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,665,546
public static String showInputDialog(final Window owner, final String key, final String text, final Object... keyArguments) { return showInputDialog(owner, key, text, null, keyArguments); }
static String function(final Window owner, final String key, final String text, final Object... keyArguments) { return showInputDialog(owner, key, text, null, keyArguments); }
/** * This method will present a dialog to enter a text. This text will be returned if the user * confirmed the edit. Otherwise {@code null} is returned. The key will be used for the * properties gui.dialog.input.-key-.title, gui.dialog.input.-key-.message and * gui.dialog.input.-key-.icon * * @since 7.5.0 */
This method will present a dialog to enter a text. This text will be returned if the user confirmed the edit. Otherwise null is returned. The key will be used for the properties gui.dialog.input.-key-.title, gui.dialog.input.-key-.message and gui.dialog.input.-key-.icon
showInputDialog
{ "repo_name": "rapidminer/rapidminer-studio", "path": "src/main/java/com/rapidminer/gui/tools/SwingTools.java", "license": "agpl-3.0", "size": 93902 }
[ "java.awt.Window" ]
import java.awt.Window;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,249,749
static BlocksMapUpdateInfo delete( FSNamesystem fsn, String src, boolean recursive, boolean logRetryCache) throws IOException { FSDirectory fsd = fsn.getFSDirectory(); FSPermissionChecker pc = fsd.getPermissionChecker(); byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src); src = fsd.resolvePath(pc, src, pathComponents); final INodesInPath iip = fsd.getINodesInPath4Write(src, false); if (!recursive && fsd.isNonEmptyDirectory(iip)) { throw new PathIsNotEmptyDirectoryException(src + " is non empty"); } if (fsd.isPermissionEnabled()) { fsd.checkPermission(pc, iip, false, null, FsAction.WRITE, null, FsAction.ALL, true); } return deleteInternal(fsn, src, iip, logRetryCache); }
static BlocksMapUpdateInfo delete( FSNamesystem fsn, String src, boolean recursive, boolean logRetryCache) throws IOException { FSDirectory fsd = fsn.getFSDirectory(); FSPermissionChecker pc = fsd.getPermissionChecker(); byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src); src = fsd.resolvePath(pc, src, pathComponents); final INodesInPath iip = fsd.getINodesInPath4Write(src, false); if (!recursive && fsd.isNonEmptyDirectory(iip)) { throw new PathIsNotEmptyDirectoryException(src + STR); } if (fsd.isPermissionEnabled()) { fsd.checkPermission(pc, iip, false, null, FsAction.WRITE, null, FsAction.ALL, true); } return deleteInternal(fsn, src, iip, logRetryCache); }
/** * Remove a file/directory from the namespace. * <p> * For large directories, deletion is incremental. The blocks under * the directory are collected and deleted a small number at a time holding * the {@link FSNamesystem} lock. * <p> * For small directory or file the deletion is done in one shot. * */
Remove a file/directory from the namespace. For large directories, deletion is incremental. The blocks under the directory are collected and deleted a small number at a time holding the <code>FSNamesystem</code> lock. For small directory or file the deletion is done in one shot
delete
{ "repo_name": "bysslord/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirDeleteOp.java", "license": "apache-2.0", "size": 9227 }
[ "java.io.IOException", "org.apache.hadoop.fs.PathIsNotEmptyDirectoryException", "org.apache.hadoop.fs.permission.FsAction", "org.apache.hadoop.hdfs.server.namenode.INode" ]
import java.io.IOException; import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hdfs.server.namenode.INode;
import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.hdfs.server.namenode.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
980,212
public void startElement(java.lang.String uri, java.lang.String localName, java.lang.String qName, Attributes atts) { if (mTask!=null && mTask.isInterrupted()) { throw new RuntimeException("Interrupted!"); } //System.out.println("START: " + qName); if (qName.equals("Strains")) { try { if (atts.getValue("HasQualityData").equalsIgnoreCase("true")) { refSeq.hasQualityData = true; } } catch (NullPointerException e) { // ignore if there is no HasQualityData param } try { // this sets up the progress bar to count strains hich is not ideal, but // to correct this would require a change to the XML // (TODO:4 add number of reads to Strains XML header) mTask.setLengthOfTask(Integer.parseInt(atts.getValue("Size"))); } catch (NullPointerException e) { // ignore if there is no HasQualityData param } } else if (refSeq!=null) { if (qName.equals("Strain")) { currentStrain = new Strain(); index++; if (mTask!=null && mTask.getLengthOfTask()>0) { mTask.setCurrent(index); } currentStrain.setId(index); try { if (atts.getValue("Open").equalsIgnoreCase("false")) { currentStrain.toggleOpen(); } } catch (NullPointerException e) { // ignore if there is no Open param } } else if (currentStrain!=null) { if (qName.equals("Read")) { currentRead = new Read(); currentReadId = new Integer(atts.getValue("Id")); currentRead.setId(currentReadId.intValue()); currentRead.setName(atts.getValue("Name")); currentRead.setLength(Integer.parseInt(atts.getValue("Length"))); currentRead.setBadClone(atts.getValue("IsBadClone").equals("1")); if (currentRead.isRecombinant()!=(atts.getValue("IsRecombinant").equals("1"))) { currentRead.toggleRecombinant(); } // record the mate pair id for this read currentMatePairId = new Integer(atts.getValue("MatePairId")); refSeq.reads.put(currentReadId,currentRead); // The following was moved to endElement because it // needs alignment info if there is a mate pair involved //currentStrain.putRead(readId,currentRead); } else if (currentRead!=null) { if (qName.equals("Alignment")) { int start = Integer.parseInt(atts.getValue("Start")); int end = Integer.parseInt(atts.getValue("End")); boolean dir = atts.getValue("Dir").equals("1"); currentDiffs=new ArrayList<Difference>(); // a.setDiffs(currentDiffs); Alignment a = new Alignment( new SequenceSegment(refSeq,start,end), new SequenceSegment(currentRead,1,currentRead.getLength()), dir, currentDiffs); a.score = Integer.parseInt(atts.getValue("Score")); currentRead.setAlignment(a); // System.out.println(currentRead); // System.out.println(currentRead.getAlignment()); } else if (currentDiffs!=null) { if (qName.equals("Diff")) { String qual = atts.getValue("Quality"); if (qual==null) { currentDiffs.add( new Difference( // for historical reasons, XML label for the reference sequence's base nuber and nucleotide at a polymorphic site is "EntryPos" and "EntryBase" Integer.parseInt(atts.getValue("EntryPos")), atts.getValue("EntryBase").charAt(0), Integer.parseInt(atts.getValue("QueryPos")), atts.getValue("QueryBase").charAt(0) )); } else { refSeq.hasQualityData=true; currentDiffs.add( new QualifiedDifference( // for historical reasons, XML label for the reference sequence's base nuber and nucleotide at a polymorphic site is "EntryPos" and "EntryBase" Integer.parseInt(atts.getValue("EntryPos")), atts.getValue("EntryBase").charAt(0), Integer.parseInt(atts.getValue("QueryPos")), atts.getValue("QueryBase").charAt(0), Short.parseShort(qual) )); } } } } } } }
void function(java.lang.String uri, java.lang.String localName, java.lang.String qName, Attributes atts) { if (mTask!=null && mTask.isInterrupted()) { throw new RuntimeException(STR); } if (qName.equals(STR)) { try { if (atts.getValue(STR).equalsIgnoreCase("true")) { refSeq.hasQualityData = true; } } catch (NullPointerException e) { } try { mTask.setLengthOfTask(Integer.parseInt(atts.getValue("Size"))); } catch (NullPointerException e) { } } else if (refSeq!=null) { if (qName.equals(STR)) { currentStrain = new Strain(); index++; if (mTask!=null && mTask.getLengthOfTask()>0) { mTask.setCurrent(index); } currentStrain.setId(index); try { if (atts.getValue("Open").equalsIgnoreCase("false")) { currentStrain.toggleOpen(); } } catch (NullPointerException e) { } } else if (currentStrain!=null) { if (qName.equals("Read")) { currentRead = new Read(); currentReadId = new Integer(atts.getValue("Id")); currentRead.setId(currentReadId.intValue()); currentRead.setName(atts.getValue("Name")); currentRead.setLength(Integer.parseInt(atts.getValue(STR))); currentRead.setBadClone(atts.getValue(STR).equals("1")); if (currentRead.isRecombinant()!=(atts.getValue(STR).equals("1"))) { currentRead.toggleRecombinant(); } currentMatePairId = new Integer(atts.getValue(STR)); refSeq.reads.put(currentReadId,currentRead); } else if (currentRead!=null) { if (qName.equals(STR)) { int start = Integer.parseInt(atts.getValue("Start")); int end = Integer.parseInt(atts.getValue("End")); boolean dir = atts.getValue("Dir").equals("1"); currentDiffs=new ArrayList<Difference>(); Alignment a = new Alignment( new SequenceSegment(refSeq,start,end), new SequenceSegment(currentRead,1,currentRead.getLength()), dir, currentDiffs); a.score = Integer.parseInt(atts.getValue("Score")); currentRead.setAlignment(a); } else if (currentDiffs!=null) { if (qName.equals("Diff")) { String qual = atts.getValue(STR); if (qual==null) { currentDiffs.add( new Difference( Integer.parseInt(atts.getValue(STR)), atts.getValue(STR).charAt(0), Integer.parseInt(atts.getValue(STR)), atts.getValue(STR).charAt(0) )); } else { refSeq.hasQualityData=true; currentDiffs.add( new QualifiedDifference( Integer.parseInt(atts.getValue(STR)), atts.getValue(STR).charAt(0), Integer.parseInt(atts.getValue(STR)), atts.getValue(STR).charAt(0), Short.parseShort(qual) )); } } } } } } }
/** * Receive notification of the beginning of an element. */
Receive notification of the beginning of an element
startElement
{ "repo_name": "jmeppley/strainer", "path": "src/amd/strainer/file/StrainXMLHandler3.java", "license": "lgpl-3.0", "size": 9090 }
[ "java.util.ArrayList", "org.xml.sax.Attributes" ]
import java.util.ArrayList; import org.xml.sax.Attributes;
import java.util.*; import org.xml.sax.*;
[ "java.util", "org.xml.sax" ]
java.util; org.xml.sax;
1,908,443
public void closeStatement( Statement statement, boolean commitOrRollback ) { try { if( autoCommit() ) { Connection connection = statement.getConnection(); if( commitOrRollback ) connection.commit(); else connection.rollback(); } statement.close(); // TODO Shouldn't the statement be closed before commit or rollback? } catch( SQLException e ) { throw new SystemException( e ); } }
void function( Statement statement, boolean commitOrRollback ) { try { if( autoCommit() ) { Connection connection = statement.getConnection(); if( commitOrRollback ) connection.commit(); else connection.rollback(); } statement.close(); } catch( SQLException e ) { throw new SystemException( e ); } }
/** * Closes the given statement and commits or rollbacks if the command processor is in auto commit mode. * * @param statement The statement to close. * @param commitOrRollback If the command processor is in auto commit mode, this boolean indicates if commit or * rollback should be called on the statement's connection. If the command processor is not in auto commit * mode, this boolean is ignored. */
Closes the given statement and commits or rollbacks if the command processor is in auto commit mode
closeStatement
{ "repo_name": "roberth/solidbase-patch", "path": "src/solidbase/core/CommandProcessor.java", "license": "apache-2.0", "size": 17989 }
[ "java.sql.Connection", "java.sql.SQLException", "java.sql.Statement" ]
import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,509,515
public PcepObjectHeader getSrpObjHeader() { return this.srpObjHeader; }
PcepObjectHeader function() { return this.srpObjHeader; }
/** * Returns SRP object header. * * @return srpObjHeader */
Returns SRP object header
getSrpObjHeader
{ "repo_name": "rvhub/onos", "path": "pcep/pcepio/src/main/java/org/onosproject/pcepio/protocol/ver1/PcepSrpObjectVer1.java", "license": "apache-2.0", "size": 12103 }
[ "org.onosproject.pcepio.types.PcepObjectHeader" ]
import org.onosproject.pcepio.types.PcepObjectHeader;
import org.onosproject.pcepio.types.*;
[ "org.onosproject.pcepio" ]
org.onosproject.pcepio;
609,183
@RequestMapping(method = RequestMethod.POST) protected ModelAndView onSubmit(@ModelAttribute(COMMAND_NAME) final RegisteredService service, final BindingResult result, final ModelMap model, final HttpServletRequest request) throws Exception { updateModelMap(model, request); this.validator.validate(service, result); if (result.hasErrors()) { model.addAttribute("validationErrors", result.getAllErrors()); return render(request, model); } RegisteredService svcToUse = service; if (service.getServiceId().startsWith("^") && service instanceof RegisteredServiceImpl) { LOGGER.debug("Detected regular expression starting with ^"); final RegexRegisteredService regexService = new RegexRegisteredService(); regexService.copyFrom(service); svcToUse = regexService; } else if (!service.getServiceId().startsWith("^") && service instanceof RegexRegisteredService) { LOGGER.debug("Detected ant expression {}", service.getServiceId()); final RegisteredServiceImpl regexService = new RegisteredServiceImpl(); regexService.copyFrom(service); svcToUse = regexService; } this.servicesManager.save(svcToUse); LOGGER.info("Saved changes to service {}", svcToUse.getId()); final ModelAndView modelAndView = new ModelAndView(new RedirectView( "manage.html#" + svcToUse.getId(), true)); modelAndView.addObject("action", "add"); modelAndView.addObject("id", svcToUse.getId()); return modelAndView; }
@RequestMapping(method = RequestMethod.POST) ModelAndView function(@ModelAttribute(COMMAND_NAME) final RegisteredService service, final BindingResult result, final ModelMap model, final HttpServletRequest request) throws Exception { updateModelMap(model, request); this.validator.validate(service, result); if (result.hasErrors()) { model.addAttribute(STR, result.getAllErrors()); return render(request, model); } RegisteredService svcToUse = service; if (service.getServiceId().startsWith("^") && service instanceof RegisteredServiceImpl) { LOGGER.debug(STR); final RegexRegisteredService regexService = new RegexRegisteredService(); regexService.copyFrom(service); svcToUse = regexService; } else if (!service.getServiceId().startsWith("^") && service instanceof RegexRegisteredService) { LOGGER.debug(STR, service.getServiceId()); final RegisteredServiceImpl regexService = new RegisteredServiceImpl(); regexService.copyFrom(service); svcToUse = regexService; } this.servicesManager.save(svcToUse); LOGGER.info(STR, svcToUse.getId()); final ModelAndView modelAndView = new ModelAndView(new RedirectView( STR + svcToUse.getId(), true)); modelAndView.addObject(STR, "add"); modelAndView.addObject("id", svcToUse.getId()); return modelAndView; }
/** * Adds the service to the ServiceRegistry via the ServiceRegistryManager. * * @param service the service * @param result the binding result * @param model the page model * @param request the http request * @return the model and view * @throws Exception the exception */
Adds the service to the ServiceRegistry via the ServiceRegistryManager
onSubmit
{ "repo_name": "0be1/cas", "path": "cas-management-webapp/src/main/java/org/jasig/cas/services/web/RegisteredServiceSimpleFormController.java", "license": "apache-2.0", "size": 9462 }
[ "javax.servlet.http.HttpServletRequest", "org.jasig.cas.services.RegexRegisteredService", "org.jasig.cas.services.RegisteredService", "org.jasig.cas.services.RegisteredServiceImpl", "org.springframework.ui.ModelMap", "org.springframework.validation.BindingResult", "org.springframework.web.bind.annotation.ModelAttribute", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestMethod", "org.springframework.web.servlet.ModelAndView", "org.springframework.web.servlet.view.RedirectView" ]
import javax.servlet.http.HttpServletRequest; import org.jasig.cas.services.RegexRegisteredService; import org.jasig.cas.services.RegisteredService; import org.jasig.cas.services.RegisteredServiceImpl; import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.view.RedirectView;
import javax.servlet.http.*; import org.jasig.cas.services.*; import org.springframework.ui.*; import org.springframework.validation.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.*; import org.springframework.web.servlet.view.*;
[ "javax.servlet", "org.jasig.cas", "org.springframework.ui", "org.springframework.validation", "org.springframework.web" ]
javax.servlet; org.jasig.cas; org.springframework.ui; org.springframework.validation; org.springframework.web;
1,541,069
public void handleException(String msg, Exception e) throws RSSDAOException { log.error(msg, e); throw new RSSDAOException(msg, e); }
void function(String msg, Exception e) throws RSSDAOException { log.error(msg, e); throw new RSSDAOException(msg, e); }
/** * Log and throw a rss manager data access exception * @param msg high level exception message * @param e error * @throws RSSDAOException throw RSS DAO exception */
Log and throw a rss manager data access exception
handleException
{ "repo_name": "wso2/carbon-storage-management", "path": "components/rss-manager/org.wso2.carbon.rssmanager.core/src/main/java/org/wso2/carbon/rssmanager/core/dao/impl/DatabaseDAOImpl.java", "license": "apache-2.0", "size": 18974 }
[ "org.wso2.carbon.rssmanager.core.dao.exception.RSSDAOException" ]
import org.wso2.carbon.rssmanager.core.dao.exception.RSSDAOException;
import org.wso2.carbon.rssmanager.core.dao.exception.*;
[ "org.wso2.carbon" ]
org.wso2.carbon;
818,766
private void setOperationType(YdtContextOperationType ydtoperation, YangSchemaRegistry schemaRegistry) { if (ydtoperation == null) { return; } Object builderObject = builderOrBuiltObject.getBuilderObject(); Class<?> defaultBuilderClass = builderOrBuiltObject.yangBuilderClass; Class<?>[] intfClass = builderOrBuiltObject.yangDefaultClass .getInterfaces(); String setterName = YANG + intfClass[0].getSimpleName() + OP_TYPE; // Setting the value into YANG node operation type from ydtContext // operation type. try { Class<?> interfaceClass; interfaceClass = getModuleInterface(yangSchemaNode, schemaRegistry); Object operationType; Class<?>[] innerClasses = interfaceClass.getClasses(); for (Class<?> innerEnumClass : innerClasses) { if (innerEnumClass.getSimpleName().equals(ONOS_YANG_OP_TYPE)) { Method valueOfMethod = innerEnumClass .getDeclaredMethod(VALUE_OF, String.class); operationType = valueOfMethod.invoke(null, ydtoperation. toString()); Field operationTypeField = defaultBuilderClass .getDeclaredField(setterName); operationTypeField.setAccessible(true); operationTypeField.set(builderObject, operationType); break; } } } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException | IllegalArgumentException e) { log.error(E_SET_OP_TYPE_FAIL); throw new YobException(E_SET_OP_TYPE_FAIL); } catch (NoSuchFieldException e) { log.error(E_SET_OP_TYPE_FAIL); } }
void function(YdtContextOperationType ydtoperation, YangSchemaRegistry schemaRegistry) { if (ydtoperation == null) { return; } Object builderObject = builderOrBuiltObject.getBuilderObject(); Class<?> defaultBuilderClass = builderOrBuiltObject.yangBuilderClass; Class<?>[] intfClass = builderOrBuiltObject.yangDefaultClass .getInterfaces(); String setterName = YANG + intfClass[0].getSimpleName() + OP_TYPE; try { Class<?> interfaceClass; interfaceClass = getModuleInterface(yangSchemaNode, schemaRegistry); Object operationType; Class<?>[] innerClasses = interfaceClass.getClasses(); for (Class<?> innerEnumClass : innerClasses) { if (innerEnumClass.getSimpleName().equals(ONOS_YANG_OP_TYPE)) { Method valueOfMethod = innerEnumClass .getDeclaredMethod(VALUE_OF, String.class); operationType = valueOfMethod.invoke(null, ydtoperation. toString()); Field operationTypeField = defaultBuilderClass .getDeclaredField(setterName); operationTypeField.setAccessible(true); operationTypeField.set(builderObject, operationType); break; } } } catch (NoSuchMethodException InvocationTargetException IllegalAccessException IllegalArgumentException e) { log.error(E_SET_OP_TYPE_FAIL); throw new YobException(E_SET_OP_TYPE_FAIL); } catch (NoSuchFieldException e) { log.error(E_SET_OP_TYPE_FAIL); } }
/** * Set the operation type in the built object from the YDT node. * <p> * It needs to be invoked only for the workbench corresponding to the * schema YDT nodes, non schema node without the YDT node should not * invoke this, as it is not applicable to it. * * @param ydtoperation schema data tree node * @param schemaRegistry YANG schema registry */
Set the operation type in the built object from the YDT node. It needs to be invoked only for the workbench corresponding to the schema YDT nodes, non schema node without the YDT node should not invoke this, as it is not applicable to it
setOperationType
{ "repo_name": "kuujo/onos", "path": "apps/yms/app/src/main/java/org/onosproject/yms/app/yob/YobWorkBench.java", "license": "apache-2.0", "size": 19427 }
[ "java.lang.reflect.Field", "java.lang.reflect.InvocationTargetException", "java.lang.reflect.Method", "org.onosproject.yms.app.yob.YobUtils", "org.onosproject.yms.app.yob.exception.YobException", "org.onosproject.yms.app.ysr.YangSchemaRegistry", "org.onosproject.yms.ydt.YdtContextOperationType" ]
import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.onosproject.yms.app.yob.YobUtils; import org.onosproject.yms.app.yob.exception.YobException; import org.onosproject.yms.app.ysr.YangSchemaRegistry; import org.onosproject.yms.ydt.YdtContextOperationType;
import java.lang.reflect.*; import org.onosproject.yms.app.yob.*; import org.onosproject.yms.app.yob.exception.*; import org.onosproject.yms.app.ysr.*; import org.onosproject.yms.ydt.*;
[ "java.lang", "org.onosproject.yms" ]
java.lang; org.onosproject.yms;
1,921,367
public static Provider remove(SSLEngine engine) { return objects.remove(engine); } public interface Provider { }
static Provider function(SSLEngine engine) { return objects.remove(engine); } public interface Provider { }
/** * <p>Unregisters the given SSLEngine.</p> * * @param engine the engine to unregister * @return the provider registered with the engine * @see #put(SSLEngine, Provider) */
Unregisters the given SSLEngine
remove
{ "repo_name": "jamiepg1/jetty.project", "path": "jetty-npn/src/main/java/org/eclipse/jetty/npn/NextProtoNego.java", "license": "apache-2.0", "size": 7930 }
[ "javax.net.ssl.SSLEngine" ]
import javax.net.ssl.SSLEngine;
import javax.net.ssl.*;
[ "javax.net" ]
javax.net;
327,445
public Iterable<GroundOverlay> getMapsContainingPoint(float longitude, float latitude) { List<GroundOverlay> result = new ArrayList<GroundOverlay>(); for (GroundOverlay map : allMaps) { if (map.contains(longitude, latitude)) { result.add(map); } } return result; }
Iterable<GroundOverlay> function(float longitude, float latitude) { List<GroundOverlay> result = new ArrayList<GroundOverlay>(); for (GroundOverlay map : allMaps) { if (map.contains(longitude, latitude)) { result.add(map); } } return result; }
/** * Finds all maps that contain a location. This is slightly faster than * calling groupMapsByDistance(longitude, latitude) followed by * getLocalMaps(). Result is the same though. * * @param longitude of the location * @param latitude of the location * @return Iterable<GroundOverlay> of the maps that contain the location */
Finds all maps that contain a location. This is slightly faster than calling groupMapsByDistance(longitude, latitude) followed by getLocalMaps(). Result is the same though
getMapsContainingPoint
{ "repo_name": "patricks/custom-maps", "path": "android/src/com/custommapsapp/android/MapCatalog.java", "license": "apache-2.0", "size": 8768 }
[ "com.custommapsapp.android.kml.GroundOverlay", "java.util.ArrayList", "java.util.List" ]
import com.custommapsapp.android.kml.GroundOverlay; import java.util.ArrayList; import java.util.List;
import com.custommapsapp.android.kml.*; import java.util.*;
[ "com.custommapsapp.android", "java.util" ]
com.custommapsapp.android; java.util;
537,614
public Map<Segment, List<CoarseGrainDataMap>> getDataMaps(List<Segment> segments, List<PartitionSpec> partitionSpecs) throws IOException { Map<Segment, List<CoarseGrainDataMap>> dataMaps = new HashMap<>(); for (Segment segment : segments) { dataMaps.put(segment, (List<CoarseGrainDataMap>) this.getDataMaps(segment, partitionSpecs)); } return dataMaps; }
Map<Segment, List<CoarseGrainDataMap>> function(List<Segment> segments, List<PartitionSpec> partitionSpecs) throws IOException { Map<Segment, List<CoarseGrainDataMap>> dataMaps = new HashMap<>(); for (Segment segment : segments) { dataMaps.put(segment, (List<CoarseGrainDataMap>) this.getDataMaps(segment, partitionSpecs)); } return dataMaps; }
/** * Get the datamap for all segments with matched partitions. Load datamaps to cache, only if it * matches the partition. */
Get the datamap for all segments with matched partitions. Load datamaps to cache, only if it matches the partition
getDataMaps
{ "repo_name": "jackylk/incubator-carbondata", "path": "core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapFactory.java", "license": "apache-2.0", "size": 7481 }
[ "java.io.IOException", "java.util.HashMap", "java.util.List", "java.util.Map", "org.apache.carbondata.core.datamap.Segment", "org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap", "org.apache.carbondata.core.indexstore.PartitionSpec" ]
import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.carbondata.core.datamap.Segment; import org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap; import org.apache.carbondata.core.indexstore.PartitionSpec;
import java.io.*; import java.util.*; import org.apache.carbondata.core.datamap.*; import org.apache.carbondata.core.datamap.dev.cgdatamap.*; import org.apache.carbondata.core.indexstore.*;
[ "java.io", "java.util", "org.apache.carbondata" ]
java.io; java.util; org.apache.carbondata;
680,676
public void testSearchEngineFindForwardImpl(boolean markAll) throws BadLocationException { textArea.setText(text); SearchContext context = new SearchContext(); context.setMarkAll(markAll); // Search for "chuck", ignoring case. context.setSearchFor("chuck"); int markedCount = markAll ? 4 : 0; boolean found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 26, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(26, 31), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 48, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(48, 53), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for "Chuck", matching case. context.setSearchFor("Chuck"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("Chuck", 26, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(26, 31), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for "chuck", ignoring case, whole word context.setSearchFor("chuck"); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for "wood", matching case, whole word context.setSearchFor("wood"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("wood", 9, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(9, 13), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for ".ould", regex, ignoring case context.setSearchFor(".ould"); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(false); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("wOuld", 14, true); assertResult(new SearchResult(new DocumentRange(14, 19), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("could", 54, true); assertResult(new SearchResult(new DocumentRange(54, 59), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for ".ould", regex, matching case context.setSearchFor(".ould"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(false); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("could", 54, true); assertResult(new SearchResult(new DocumentRange(54, 59), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for "[cd]huck", regex, ignoring case, whole word context.setSearchFor("[cd]hUCk"); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(true); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); // Search for "[cd]huck", regex, matching case, whole word context.setSearchFor("[cd]huck"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(true); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, true); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); }
void function(boolean markAll) throws BadLocationException { textArea.setText(text); SearchContext context = new SearchContext(); context.setMarkAll(markAll); context.setSearchFor("chuck"); int markedCount = markAll ? 4 : 0; boolean found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 26, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(26, 31), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 48, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(48, 53), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor("Chuck"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("Chuck", 26, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(26, 31), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor("chuck"); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor("wood"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("wood", 9, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(9, 13), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor(".ould"); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(false); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("wOuld", 14, true); assertResult(new SearchResult(new DocumentRange(14, 19), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("could", 54, true); assertResult(new SearchResult(new DocumentRange(54, 59), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor(".ould"); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(false); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("could", 54, true); assertResult(new SearchResult(new DocumentRange(54, 59), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor(STR); markedCount = markAll ? 2 : 0; context.setMatchCase(false); context.setWholeWord(true); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 32, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(32, 37), 1, markedCount)); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, context.getMatchCase()); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); context.setSearchFor(STR); markedCount = markAll ? 1 : 0; context.setMatchCase(true); context.setWholeWord(true); context.setRegularExpression(true); textArea.setCaretPosition(0); found = findImpl(context); assertEquals(true, found); assertSelected("chuck", 60, true); assertResult(new SearchResult(new DocumentRange(60, 65), 1, markedCount)); found = findImpl(context); assertEquals(false, found); assertResult(new SearchResult(null, 0, markedCount)); }
/** * Tests <code>SearchEngine.find()</code> when searching forward. * * @param markAll Whether "mark all" should be enabled during the test. */
Tests <code>SearchEngine.find()</code> when searching forward
testSearchEngineFindForwardImpl
{ "repo_name": "reqT/reqT-syntax", "path": "test/org/fife/ui/rtextarea/SearchEngineTest.java", "license": "bsd-3-clause", "size": 30306 }
[ "javax.swing.text.BadLocationException", "org.fife.ui.rsyntaxtextarea.DocumentRange" ]
import javax.swing.text.BadLocationException; import org.fife.ui.rsyntaxtextarea.DocumentRange;
import javax.swing.text.*; import org.fife.ui.rsyntaxtextarea.*;
[ "javax.swing", "org.fife.ui" ]
javax.swing; org.fife.ui;
2,852,839
public Collection<SchoolsMatch> getAllMatches(final School pSch) { return rawGetAllMatches(new Object[]{pSch}); }
Collection<SchoolsMatch> function(final School pSch) { return rawGetAllMatches(new Object[]{pSch}); }
/** * Returns the set of all matches of the pattern that conform to the given fixed values of some parameters. * @param pSch the fixed value of pattern parameter Sch, or null if not bound. * @return matches represented as a SchoolsMatch object. * */
Returns the set of all matches of the pattern that conform to the given fixed values of some parameters
getAllMatches
{ "repo_name": "FTSRG/mondo-collab-framework", "path": "archive/mondo-property-based-locking/org.mondo.collaboration.client/src-gen/org/mondo/collaboration/client/incquery/SchoolsMatcher.java", "license": "epl-1.0", "size": 9526 }
[ "java.util.Collection", "org.mondo.collaboration.client.incquery.SchoolsMatch" ]
import java.util.Collection; import org.mondo.collaboration.client.incquery.SchoolsMatch;
import java.util.*; import org.mondo.collaboration.client.incquery.*;
[ "java.util", "org.mondo.collaboration" ]
java.util; org.mondo.collaboration;
623,671
protected void setPropertyValue(String nameSpace, String propertyName, String[] values) { if (model != null && resource != null) { Property p = model.createProperty(nameSpace, propertyName); model.removeAll(this.resource, p, null); if (values != null) { for (int i = 0; i < values.length; i++) { if (values[i] != null) { String valueUri = PRE_DEFINED_VALUE_URI.get(values[i]); if (valueUri != null) { // this is a pre-defined "special" SPDX value Resource valueResource = this.model.createResource(valueUri); this.resource.addProperty(p, valueResource); } else { this.resource.addProperty(p, values[i]); } } } } } }
void function(String nameSpace, String propertyName, String[] values) { if (model != null && resource != null) { Property p = model.createProperty(nameSpace, propertyName); model.removeAll(this.resource, p, null); if (values != null) { for (int i = 0; i < values.length; i++) { if (values[i] != null) { String valueUri = PRE_DEFINED_VALUE_URI.get(values[i]); if (valueUri != null) { Resource valueResource = this.model.createResource(valueUri); this.resource.addProperty(p, valueResource); } else { this.resource.addProperty(p, values[i]); } } } } } }
/** * Set a property values for this resource. Clears any existing resource. * If the string matches one of the SPDX pre-defined string values, the URI * for that value is stored. Otherwise, it is stored as a literal value. * @param nameSpace RDF Namespace for the property * @param propertyName RDF Property Name (the RDF * @param values Values to associate to this resource */
Set a property values for this resource. Clears any existing resource. If the string matches one of the SPDX pre-defined string values, the URI for that value is stored. Otherwise, it is stored as a literal value
setPropertyValue
{ "repo_name": "romeara/spdx-tools", "path": "src/org/spdx/rdfparser/model/RdfModelObject.java", "license": "apache-2.0", "size": 37713 }
[ "com.hp.hpl.jena.rdf.model.Property", "com.hp.hpl.jena.rdf.model.Resource" ]
import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.*;
[ "com.hp.hpl" ]
com.hp.hpl;
1,271,794
@MethodContract(post = @Expression("_value")) public final Object deepCopy(Object value) throws HibernateException { return value; }
@MethodContract(post = @Expression(STR)) final Object function(Object value) throws HibernateException { return value; }
/** * Returns {@code value}, because this is an immutable value type. */
Returns value, because this is an immutable value type
deepCopy
{ "repo_name": "jandppw/ppwcode-recovered-from-google-code", "path": "java/vernacular/value/trunk/src/main/java/org/ppwcode/vernacular/value_III/hibernate3/AbstractImmutableValueCompositeUserType.java", "license": "apache-2.0", "size": 1965 }
[ "org.hibernate.HibernateException", "org.toryt.annotations_I.Expression", "org.toryt.annotations_I.MethodContract" ]
import org.hibernate.HibernateException; import org.toryt.annotations_I.Expression; import org.toryt.annotations_I.MethodContract;
import org.hibernate.*; import org.toryt.*;
[ "org.hibernate", "org.toryt" ]
org.hibernate; org.toryt;
598,409
public static <T extends Serializable> T decode(byte[] bytes, Class<T> t) { Parameters.checkNotNull(t); Parameters.checkNotNull(bytes); ObjectInputStream ois = null; ByteArrayInputStream in = new ByteArrayInputStream(bytes); try { ois = new ObjectInputStream(in); return Parameters.checkType(ois.readObject(), t); } catch (IOException ex) { throw new DecodingException(ex); } catch (ClassNotFoundException ex) { throw new DecodingException(ex); } finally { IO.close(ois); } } public static final class EncodingException extends RuntimeException { private static final long serialVersionUID = 27396917846938764L; private EncodingException(Throwable cause) { super(cause); } } public static final class DecodingException extends RuntimeException { private static final long serialVersionUID = 39875873692107851L; private DecodingException(Throwable cause) { super(cause); } } private ObjectCodec() { }
static <T extends Serializable> T function(byte[] bytes, Class<T> t) { Parameters.checkNotNull(t); Parameters.checkNotNull(bytes); ObjectInputStream ois = null; ByteArrayInputStream in = new ByteArrayInputStream(bytes); try { ois = new ObjectInputStream(in); return Parameters.checkType(ois.readObject(), t); } catch (IOException ex) { throw new DecodingException(ex); } catch (ClassNotFoundException ex) { throw new DecodingException(ex); } finally { IO.close(ois); } } public static final class EncodingException extends RuntimeException { private static final long serialVersionUID = 27396917846938764L; private EncodingException(Throwable cause) { super(cause); } } public static final class DecodingException extends RuntimeException { private static final long serialVersionUID = 39875873692107851L; private DecodingException(Throwable cause) { super(cause); } } private ObjectCodec() { }
/** * Deserializes the given array of {@code byte}s into an object. * * @param <T> the object's expected type. * @param bytes the object's encoding. * @param t the object's expected type's {@code Class}. * * @return the deserialized object. * * @throws NullPointerException if one of the arguments is {@code null}. * @throws DecodingException if the deserialization fails. * @throws ClassCastException if the deserialized object is not an * instance of the expected class. */
Deserializes the given array of bytes into an object
decode
{ "repo_name": "kocakosm/pitaya", "path": "src/org/kocakosm/pitaya/io/ObjectCodec.java", "license": "lgpl-3.0", "size": 4275 }
[ "java.io.ByteArrayInputStream", "java.io.IOException", "java.io.ObjectInputStream", "java.io.Serializable", "org.kocakosm.pitaya.util.Parameters" ]
import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import org.kocakosm.pitaya.util.Parameters;
import java.io.*; import org.kocakosm.pitaya.util.*;
[ "java.io", "org.kocakosm.pitaya" ]
java.io; org.kocakosm.pitaya;
1,557,321
public String getDeviceManufacturerString(long handle) throws SerialComException { String ret = mSerialComCP210xManufacturingJNIBridge.getDeviceManufacturerString(handle); if(ret == null) { throw new SerialComException("Could not get the manufacturer string. Please retry !"); } return ret; }
String function(long handle) throws SerialComException { String ret = mSerialComCP210xManufacturingJNIBridge.getDeviceManufacturerString(handle); if(ret == null) { throw new SerialComException(STR); } return ret; }
/** * <p>Executes CP210x_GetDeviceManufacturerString function of CP210xManufacturing library.</p> * * <p>Returns the manufacturer string of the String Descriptor of a CP210x device.</p> * * @param handle of the device. * @return manufacturer of the device. * @throws SerialComException if an I/O error occurs. */
Executes CP210x_GetDeviceManufacturerString function of CP210xManufacturing library. Returns the manufacturer string of the String Descriptor of a CP210x device
getDeviceManufacturerString
{ "repo_name": "RishiGupta12/serial-communication-manager", "path": "modules/serial/src/com/serialpundit/serial/vendor/SerialComSLabsCP210xManufacturing.java", "license": "agpl-3.0", "size": 69410 }
[ "com.serialpundit.core.SerialComException" ]
import com.serialpundit.core.SerialComException;
import com.serialpundit.core.*;
[ "com.serialpundit.core" ]
com.serialpundit.core;
1,511,689
@Overridable FacetedSearchResult search(String query, int from, int size, Map<String, String[]> filters);
FacetedSearchResult search(String query, int from, int size, Map<String, String[]> filters);
/** * Search for Abstract tosca types in the catalog. * * @param query The text query. * @param from from index. * @param size The size of the query. * @param filters Filters. * @return A faceted search result that provides both result data and facets. */
Search for Abstract tosca types in the catalog
search
{ "repo_name": "broly-git/alien4cloud", "path": "alien4cloud-core/src/main/java/org/alien4cloud/tosca/catalog/index/ICsarService.java", "license": "apache-2.0", "size": 5145 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,808,508
@Test public void whenTheListChangesThenChangeStatisticsChanges() { List<Analize.User> previous = new ArrayList<>(); previous.add(new Analize.User(1, "Ivan")); previous.add(new Analize.User(2, "Ira")); previous.add(new Analize.User(3, "Sergey")); List<Analize.User> current = new ArrayList<>(previous); Analize analize = new Analize(); Analize.Info info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(0)); assertThat(info.getDeletedUsers(), is(0)); current.set(0, new Analize.User(1, "Sveta")); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(0)); current.remove(2); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(1)); current.add(new Analize.User(4, "Gleb")); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(1)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(1)); }
void function() { List<Analize.User> previous = new ArrayList<>(); previous.add(new Analize.User(1, "Ivan")); previous.add(new Analize.User(2, "Ira")); previous.add(new Analize.User(3, STR)); List<Analize.User> current = new ArrayList<>(previous); Analize analize = new Analize(); Analize.Info info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(0)); assertThat(info.getDeletedUsers(), is(0)); current.set(0, new Analize.User(1, "Sveta")); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(0)); current.remove(2); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(0)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(1)); current.add(new Analize.User(4, "Gleb")); info = analize.diff(previous, current); assertThat(info.getNewUsers(), is(1)); assertThat(info.getModifiedUsers(), is(1)); assertThat(info.getDeletedUsers(), is(1)); }
/** * Fourth test for the diff method. */
Fourth test for the diff method
whenTheListChangesThenChangeStatisticsChanges
{ "repo_name": "IvanBelyaev/ibelyaev", "path": "chapter_004/src/test/java/ru/job4j/statistics/AnalizeTest.java", "license": "apache-2.0", "size": 3928 }
[ "java.util.ArrayList", "java.util.List", "org.hamcrest.Matchers", "org.junit.Assert" ]
import java.util.ArrayList; import java.util.List; import org.hamcrest.Matchers; import org.junit.Assert;
import java.util.*; import org.hamcrest.*; import org.junit.*;
[ "java.util", "org.hamcrest", "org.junit" ]
java.util; org.hamcrest; org.junit;
1,677,952
public ICTreeNode getChildAtIndex(int childIndex) { try { GenericEntity entity = (GenericEntity)this.getClass().newInstance(); entity.findByPrimaryKey(childIndex); return (TreeableEntity)entity; } catch (Exception e) { System.err.println("There was an error in com.idega.data.TreeableEntityBMPBean.getChildAtIndex() " + e.getMessage()); e.printStackTrace(System.err); return null; } }
ICTreeNode function(int childIndex) { try { GenericEntity entity = (GenericEntity)this.getClass().newInstance(); entity.findByPrimaryKey(childIndex); return (TreeableEntity)entity; } catch (Exception e) { System.err.println(STR + e.getMessage()); e.printStackTrace(System.err); return null; } }
/** * Returns the child TreeNode at index childIndex. */
Returns the child TreeNode at index childIndex
getChildAtIndex
{ "repo_name": "idega/platform2", "path": "src/com/idega/data/TreeableEntityBMPBean.java", "license": "gpl-3.0", "size": 8810 }
[ "com.idega.core.data.ICTreeNode" ]
import com.idega.core.data.ICTreeNode;
import com.idega.core.data.*;
[ "com.idega.core" ]
com.idega.core;
234,140
private String getTypeFromCopyTo(ParticipantSetDataObject dataObject) { if (dataObject.getCopyTo() != null) { List<ParticipantSetDataObject> sets = this.diagram.getParticipantSetsWithName(dataObject.getCopyTo()); for (Iterator<ParticipantSetDataObject> it = sets.iterator(); it.hasNext();) { String type = getType(it.next()); if (type != null) { return type; } } } return null; }
String function(ParticipantSetDataObject dataObject) { if (dataObject.getCopyTo() != null) { List<ParticipantSetDataObject> sets = this.diagram.getParticipantSetsWithName(dataObject.getCopyTo()); for (Iterator<ParticipantSetDataObject> it = sets.iterator(); it.hasNext();) { String type = getType(it.next()); if (type != null) { return type; } } } return null; }
/** * Determines the type of a participant set from * the copy to attribute. The type is represented by * the type of the participant set the copy to * attribute denotes. * * @param dataObject the data object to get the type for * * @return The name of the type or null, if the type could not be * determined. */
Determines the type of a participant set from the copy to attribute. The type is represented by the type of the participant set the copy to attribute denotes
getTypeFromCopyTo
{ "repo_name": "grasscrm/gdesigner", "path": "editor/server/src/de/hpi/bpel4chor/transformation/factories/ParticipantsFactory.java", "license": "apache-2.0", "size": 51114 }
[ "de.hpi.bpel4chor.model.artifacts.ParticipantSetDataObject", "java.util.Iterator", "java.util.List" ]
import de.hpi.bpel4chor.model.artifacts.ParticipantSetDataObject; import java.util.Iterator; import java.util.List;
import de.hpi.bpel4chor.model.artifacts.*; import java.util.*;
[ "de.hpi.bpel4chor", "java.util" ]
de.hpi.bpel4chor; java.util;
1,825,816
private ScenarioRunner executeWhen(final AbstractScenario scenario) { final Class<?> candidateClass = scenario.getClass(); if (candidateClass.isAnnotationPresent(Feature.Scenario.class)) { this.log.trace(candidateClass.toString()); final Method[] declaredMethods = candidateClass.getDeclaredMethods(); for (final Method method : declaredMethods) { if (method.isAnnotationPresent(Feature.Given.class)) { this.log.debug(method.toGenericString()); } } } return this; }
ScenarioRunner function(final AbstractScenario scenario) { final Class<?> candidateClass = scenario.getClass(); if (candidateClass.isAnnotationPresent(Feature.Scenario.class)) { this.log.trace(candidateClass.toString()); final Method[] declaredMethods = candidateClass.getDeclaredMethods(); for (final Method method : declaredMethods) { if (method.isAnnotationPresent(Feature.Given.class)) { this.log.debug(method.toGenericString()); } } } return this; }
/** * Execute when. * * @param scenario the scenario * @return the scenario runner */
Execute when
executeWhen
{ "repo_name": "atf4j/atf4j", "path": "atf4j-fdd/src/main/java/net/atf4j/fdd/ScenarioRunner.java", "license": "gpl-3.0", "size": 4896 }
[ "java.lang.reflect.Method" ]
import java.lang.reflect.Method;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
654,170
public static List<int[]> dimensionsToCoordinateList(List<int[]> dimensions) { return CoordinateAssembler.assemble(dimensions); }
static List<int[]> function(List<int[]> dimensions) { return CoordinateAssembler.assemble(dimensions); }
/** * Called to merge a list of dimension arrays into a sequential row-major indexed * list of coordinates. * * @param dimensions a list of dimension arrays, each array being a dimension * of an n-dimensional array. * @return a list of n-dimensional coordinates in row-major format. */
Called to merge a list of dimension arrays into a sequential row-major indexed list of coordinates
dimensionsToCoordinateList
{ "repo_name": "user405/test", "path": "src/main/java/org/numenta/nupic/util/ArrayUtils.java", "license": "agpl-3.0", "size": 57278 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,204,720
public void setSpokenLanguages(List<String> spokenLanguages) { this.spokenLanguages.clear(); if (spokenLanguages != null) { this.spokenLanguages.addAll(spokenLanguages); } }
void function(List<String> spokenLanguages) { this.spokenLanguages.clear(); if (spokenLanguages != null) { this.spokenLanguages.addAll(spokenLanguages); } }
/** * Set the spoken languages (2 digit: ISO 639-1) * * @param spokenLanguages * the spoken languages to be set */
Set the spoken languages (2 digit: ISO 639-1)
setSpokenLanguages
{ "repo_name": "tinyMediaManager/api-scraper", "path": "src/main/java/org/tinymediamanager/scraper/MediaMetadata.java", "license": "apache-2.0", "size": 28830 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
613,453
private synchronized void create(int myHostId, CatalogContext catalogContext) { try { ImportType importElement = catalogContext.getDeployment().getImport(); if (importElement == null || importElement.getConfiguration().isEmpty()) { return; } initializeChannelDistributer(); final String clusterTag = m_distributer.getClusterTag(); ImportDataProcessor newProcessor = new ImportProcessor( myHostId, m_distributer, m_moduleManager, m_statsCollector, clusterTag); m_processorConfig = CatalogUtil.getImportProcessorConfig(catalogContext.getDeployment().getImport()); m_formatterFactories.clear(); for (ImportConfiguration config : m_processorConfig.values()) { Properties prop = config.getformatterProperties(); String module = prop.getProperty(ImportDataProcessor.IMPORT_FORMATTER); try { AbstractFormatterFactory formatterFactory = m_formatterFactories.get(module); if (formatterFactory == null) { URI moduleURI = URI.create(module); formatterFactory = m_moduleManager.getService(moduleURI, AbstractFormatterFactory.class); if (formatterFactory == null) { VoltDB.crashLocalVoltDB("Failed to initialize formatter from: " + module); } m_formatterFactories.put(module, formatterFactory); } config.setFormatterFactory(formatterFactory); } catch(Throwable t) { VoltDB.crashLocalVoltDB("Failed to configure import handler for " + module); } } newProcessor.setProcessorConfig(catalogContext, m_processorConfig); m_processor.set(newProcessor); } catch (final Exception e) { VoltDB.crashLocalVoltDB("Error creating import processor", true, e); } }
synchronized void function(int myHostId, CatalogContext catalogContext) { try { ImportType importElement = catalogContext.getDeployment().getImport(); if (importElement == null importElement.getConfiguration().isEmpty()) { return; } initializeChannelDistributer(); final String clusterTag = m_distributer.getClusterTag(); ImportDataProcessor newProcessor = new ImportProcessor( myHostId, m_distributer, m_moduleManager, m_statsCollector, clusterTag); m_processorConfig = CatalogUtil.getImportProcessorConfig(catalogContext.getDeployment().getImport()); m_formatterFactories.clear(); for (ImportConfiguration config : m_processorConfig.values()) { Properties prop = config.getformatterProperties(); String module = prop.getProperty(ImportDataProcessor.IMPORT_FORMATTER); try { AbstractFormatterFactory formatterFactory = m_formatterFactories.get(module); if (formatterFactory == null) { URI moduleURI = URI.create(module); formatterFactory = m_moduleManager.getService(moduleURI, AbstractFormatterFactory.class); if (formatterFactory == null) { VoltDB.crashLocalVoltDB(STR + module); } m_formatterFactories.put(module, formatterFactory); } config.setFormatterFactory(formatterFactory); } catch(Throwable t) { VoltDB.crashLocalVoltDB(STR + module); } } newProcessor.setProcessorConfig(catalogContext, m_processorConfig); m_processor.set(newProcessor); } catch (final Exception e) { VoltDB.crashLocalVoltDB(STR, true, e); } }
/** * This creates a import connector from configuration provided. * @param catalogContext * @param partitions */
This creates a import connector from configuration provided
create
{ "repo_name": "migue/voltdb", "path": "src/frontend/org/voltdb/importer/ImportManager.java", "license": "agpl-3.0", "size": 9552 }
[ "java.net.URI", "java.util.Properties", "org.voltdb.CatalogContext", "org.voltdb.VoltDB", "org.voltdb.compiler.deploymentfile.ImportType", "org.voltdb.importer.formatter.AbstractFormatterFactory", "org.voltdb.utils.CatalogUtil" ]
import java.net.URI; import java.util.Properties; import org.voltdb.CatalogContext; import org.voltdb.VoltDB; import org.voltdb.compiler.deploymentfile.ImportType; import org.voltdb.importer.formatter.AbstractFormatterFactory; import org.voltdb.utils.CatalogUtil;
import java.net.*; import java.util.*; import org.voltdb.*; import org.voltdb.compiler.deploymentfile.*; import org.voltdb.importer.formatter.*; import org.voltdb.utils.*;
[ "java.net", "java.util", "org.voltdb", "org.voltdb.compiler", "org.voltdb.importer", "org.voltdb.utils" ]
java.net; java.util; org.voltdb; org.voltdb.compiler; org.voltdb.importer; org.voltdb.utils;
1,039,820
private String getSavedRequestURL(HttpServletRequest hreq) { return _agent.getAttribute(hreq, WebAccessControlUtil.KEY_JOSSO_SAVED_REQUEST_URI); }
String function(HttpServletRequest hreq) { return _agent.getAttribute(hreq, WebAccessControlUtil.KEY_JOSSO_SAVED_REQUEST_URI); }
/** * Return the request URI (with the corresponding query string, if any) * from the saved request so that we can redirect to it. * * @param hreq current http request */
Return the request URI (with the corresponding query string, if any) from the saved request so that we can redirect to it
getSavedRequestURL
{ "repo_name": "webbfontaine/josso1", "path": "agents/josso-weblogic81-agent/src/main/java/org/josso/servlet/agent/GenericServletSSOAgentFilter.java", "license": "lgpl-2.1", "size": 26565 }
[ "javax.servlet.http.HttpServletRequest", "org.josso.agent.http.WebAccessControlUtil" ]
import javax.servlet.http.HttpServletRequest; import org.josso.agent.http.WebAccessControlUtil;
import javax.servlet.http.*; import org.josso.agent.http.*;
[ "javax.servlet", "org.josso.agent" ]
javax.servlet; org.josso.agent;
1,973,825
@Override public void addValve(Valve valve) { if (valve instanceof ClusterValve && (!valves.contains(valve))) valves.add(valve); }
void function(Valve valve) { if (valve instanceof ClusterValve && (!valves.contains(valve))) valves.add(valve); }
/** * Add cluster valve * Cluster Valves are only add to container when cluster is started! * @param valve The new cluster Valve. */
Add cluster valve Cluster Valves are only add to container when cluster is started
addValve
{ "repo_name": "Nickname0806/Test_Q4", "path": "java/org/apache/catalina/ha/tcp/SimpleTcpCluster.java", "license": "apache-2.0", "size": 28352 }
[ "org.apache.catalina.Valve", "org.apache.catalina.ha.ClusterValve" ]
import org.apache.catalina.Valve; import org.apache.catalina.ha.ClusterValve;
import org.apache.catalina.*; import org.apache.catalina.ha.*;
[ "org.apache.catalina" ]
org.apache.catalina;
642,507
public boolean canDelete(Object ho) { long id = model.getUserDetails().getId(); boolean b = false; if (ho instanceof TreeImageTimeSet) { Browser browser = model.getSelectedBrowser(); ExperimenterData exp = browser.getNodeOwner((TreeImageDisplay) ho); if (exp.getId() == id) b = true; } else b = EditorUtil.isUserOwner(ho, id); if (b) return b; //user is the owner. GroupData group = null; if (ho instanceof ExperimenterData || ho instanceof GroupData) { // users and groups should not be deleted at all return false; } else if (ho instanceof DataObject) { DataObject data = (DataObject) ho; return data.canDelete(); } else if (ho instanceof TreeImageTimeSet) { Browser browser = model.getSelectedBrowser(); if (browser == null) return false; group = browser.getNodeGroup((TreeImageDisplay) ho); } else { return false; } //Do not have enough information about the group. if (group.getPermissions() == null) return false; switch (group.getPermissions().getPermissionsLevel()) { case GroupData.PERMISSIONS_GROUP_READ_WRITE: case GroupData.PERMISSIONS_PUBLIC_READ_WRITE: return true; } return EditorUtil.isUserGroupOwner(group, id); }
boolean function(Object ho) { long id = model.getUserDetails().getId(); boolean b = false; if (ho instanceof TreeImageTimeSet) { Browser browser = model.getSelectedBrowser(); ExperimenterData exp = browser.getNodeOwner((TreeImageDisplay) ho); if (exp.getId() == id) b = true; } else b = EditorUtil.isUserOwner(ho, id); if (b) return b; GroupData group = null; if (ho instanceof ExperimenterData ho instanceof GroupData) { return false; } else if (ho instanceof DataObject) { DataObject data = (DataObject) ho; return data.canDelete(); } else if (ho instanceof TreeImageTimeSet) { Browser browser = model.getSelectedBrowser(); if (browser == null) return false; group = browser.getNodeGroup((TreeImageDisplay) ho); } else { return false; } if (group.getPermissions() == null) return false; switch (group.getPermissions().getPermissionsLevel()) { case GroupData.PERMISSIONS_GROUP_READ_WRITE: case GroupData.PERMISSIONS_PUBLIC_READ_WRITE: return true; } return EditorUtil.isUserGroupOwner(group, id); }
/** * Implemented as specified by the {@link TreeViewer} interface. * @see TreeViewer#canDelete(Object) */
Implemented as specified by the <code>TreeViewer</code> interface
canDelete
{ "repo_name": "dominikl/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/treeviewer/view/TreeViewerComponent.java", "license": "gpl-2.0", "size": 162310 }
[ "org.openmicroscopy.shoola.agents.treeviewer.browser.Browser", "org.openmicroscopy.shoola.agents.util.EditorUtil", "org.openmicroscopy.shoola.agents.util.browser.TreeImageDisplay", "org.openmicroscopy.shoola.agents.util.browser.TreeImageTimeSet" ]
import org.openmicroscopy.shoola.agents.treeviewer.browser.Browser; import org.openmicroscopy.shoola.agents.util.EditorUtil; import org.openmicroscopy.shoola.agents.util.browser.TreeImageDisplay; import org.openmicroscopy.shoola.agents.util.browser.TreeImageTimeSet;
import org.openmicroscopy.shoola.agents.treeviewer.browser.*; import org.openmicroscopy.shoola.agents.util.*; import org.openmicroscopy.shoola.agents.util.browser.*;
[ "org.openmicroscopy.shoola" ]
org.openmicroscopy.shoola;
310,683
EntityDocument getBaseRevision() { return baseRevision; }
EntityDocument getBaseRevision() { return baseRevision; }
/** * Returns base entity revision, upon which this update is built. If no base * revision was provided when this builder was constructed, this method returns * {@code null}. * * @return base entity revision that is being updated */
Returns base entity revision, upon which this update is built. If no base revision was provided when this builder was constructed, this method returns null
getBaseRevision
{ "repo_name": "Wikidata/Wikidata-Toolkit", "path": "wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/EntityUpdateBuilder.java", "license": "apache-2.0", "size": 7453 }
[ "org.wikidata.wdtk.datamodel.interfaces.EntityDocument" ]
import org.wikidata.wdtk.datamodel.interfaces.EntityDocument;
import org.wikidata.wdtk.datamodel.interfaces.*;
[ "org.wikidata.wdtk" ]
org.wikidata.wdtk;
2,806,057
private Collection<JMeterProperty> replaceValues(PropertyIterator iter, ValueTransformer transform) throws InvalidVariableException { List<JMeterProperty> props = new LinkedList<>(); while (iter.hasNext()) { JMeterProperty val = iter.next(); if (log.isDebugEnabled()) { log.debug("About to replace in property of type: {}: {}", val.getClass(), val); } if (val instanceof StringProperty) { // Must not convert TestElement.gui_class etc if (!val.getName().equals(TestElement.GUI_CLASS) && !val.getName().equals(TestElement.TEST_CLASS)) { val = transform.transformValue(val); log.debug("Replacement result: {}", val); } } else if (val instanceof NumberProperty) { val = transform.transformValue(val); log.debug("Replacement result: {}", val); } else if (val instanceof MultiProperty) { MultiProperty multiVal = (MultiProperty) val; Collection<JMeterProperty> newValues = replaceValues(multiVal.iterator(), transform); multiVal.clear(); for (JMeterProperty jmp : newValues) { multiVal.addProperty(jmp); } log.debug("Replacement result: {}", multiVal); } else { log.debug("Won't replace {}", val); } props.add(val); } return props; }
Collection<JMeterProperty> function(PropertyIterator iter, ValueTransformer transform) throws InvalidVariableException { List<JMeterProperty> props = new LinkedList<>(); while (iter.hasNext()) { JMeterProperty val = iter.next(); if (log.isDebugEnabled()) { log.debug(STR, val.getClass(), val); } if (val instanceof StringProperty) { if (!val.getName().equals(TestElement.GUI_CLASS) && !val.getName().equals(TestElement.TEST_CLASS)) { val = transform.transformValue(val); log.debug(STR, val); } } else if (val instanceof NumberProperty) { val = transform.transformValue(val); log.debug(STR, val); } else if (val instanceof MultiProperty) { MultiProperty multiVal = (MultiProperty) val; Collection<JMeterProperty> newValues = replaceValues(multiVal.iterator(), transform); multiVal.clear(); for (JMeterProperty jmp : newValues) { multiVal.addProperty(jmp); } log.debug(STR, multiVal); } else { log.debug(STR, val); } props.add(val); } return props; }
/** * Replaces a {@link StringProperty} containing functions with their Function properties equivalent. * <p>For example: * <code>${__time()}_${__threadNum()}_${__machineName()}</code> will become a * {@link org.apache.jmeter.testelement.property.FunctionProperty} of * a {@link CompoundVariable} containing three functions * @param iter the {@link PropertyIterator} over all properties, in which the values should be replaced * @param transform the {@link ValueTransformer}, that should do transformation * @return a new {@link Collection} with all the transformed {@link JMeterProperty}s * @throws InvalidVariableException when <code>transform</code> throws an {@link InvalidVariableException} while transforming a value */
Replaces a <code>StringProperty</code> containing functions with their Function properties equivalent. For example: <code>${__time()}_${__threadNum()}_${__machineName()}</code> will become a <code>org.apache.jmeter.testelement.property.FunctionProperty</code> of a <code>CompoundVariable</code> containing three functions
replaceValues
{ "repo_name": "benbenw/jmeter", "path": "src/core/src/main/java/org/apache/jmeter/engine/util/ValueReplacer.java", "license": "apache-2.0", "size": 8434 }
[ "java.util.Collection", "java.util.LinkedList", "java.util.List", "org.apache.jmeter.functions.InvalidVariableException", "org.apache.jmeter.testelement.TestElement", "org.apache.jmeter.testelement.property.JMeterProperty", "org.apache.jmeter.testelement.property.MultiProperty", "org.apache.jmeter.testelement.property.NumberProperty", "org.apache.jmeter.testelement.property.PropertyIterator", "org.apache.jmeter.testelement.property.StringProperty" ]
import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.apache.jmeter.functions.InvalidVariableException; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.property.JMeterProperty; import org.apache.jmeter.testelement.property.MultiProperty; import org.apache.jmeter.testelement.property.NumberProperty; import org.apache.jmeter.testelement.property.PropertyIterator; import org.apache.jmeter.testelement.property.StringProperty;
import java.util.*; import org.apache.jmeter.functions.*; import org.apache.jmeter.testelement.*; import org.apache.jmeter.testelement.property.*;
[ "java.util", "org.apache.jmeter" ]
java.util; org.apache.jmeter;
1,867,070
public final void initOutputHandler(CurnConfig config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException { // Parse handler-specific configuration variables String section = cfgHandler.getSectionName(); try { if (section != null) { scriptPath = config.getConfigurationValue(section, "Script"); language = config.getConfigurationValue(section, "Language"); allowEmbeddedHTML = config.getOptionalBooleanValue (section, CurnConfig.CFG_ALLOW_EMBEDDED_HTML, false); } } catch (NoSuchSectionException ex) { throw new ConfigurationException (ex); } // Verify that the script exists. File scriptFile = CurnUtil.mapConfiguredPathName (scriptPath); if (! scriptFile.exists()) { scriptPath = null; throw new ConfigurationException(section, "Script file \"" + scriptFile.getPath() + "\" does not exist."); } if (! scriptFile.isFile()) { scriptPath = null; throw new ConfigurationException(section, "Script file \"" + scriptFile.getPath() + "\" is not a regular file."); } // Allocate the script engine manager. try { scriptManager = new ScriptEngineManager(); } catch (Throwable ex) { throw new CurnException(ex); } // Next, get the scripting engine itself. try { scriptEngine = scriptManager.getEngineByName(language); } catch (Throwable ex) { throw new CurnException("Unable to load scripting engine for \"" + language + "\" language", ex); } // Set up a logger for the script. The logger name can't have dots // in it, because the underlying logging API strips them out, // thinking they're class/package delimiters. That means we have to // strip the extension or change it to something else. Since the // extension conveys information (i.e., the language), we just // convert it to an underscore. StringBuilder scriptLoggerName = new StringBuilder(128); String scriptName = scriptFile.getName(); scriptLoggerName.append(FileUtil.getFileNameNoExtension(scriptName)); scriptLoggerName.append('_'); scriptLoggerName.append(FileUtil.getFileNameExtension(scriptName)); scriptLogger = new Logger(scriptLoggerName.toString()); // Declare the script object. We'll fill it partially now; the rest // will be filled later. Also, for backward compatibility, register // individual BSF beans. this.scriptObjects = new CurnScriptObjects(); try { scriptEngine.put("curn", scriptObjects); } catch (Throwable ex) { throw new CurnException ("Can't register script 'curn' object", ex); } scriptObjects.config = config; scriptObjects.configSection = section; scriptObjects.logger = scriptLogger; // Load the contents of the script into an in-memory buffer. scriptString = loadScript(scriptFile); channels.clear(); }
final void function(CurnConfig config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException { String section = cfgHandler.getSectionName(); try { if (section != null) { scriptPath = config.getConfigurationValue(section, STR); language = config.getConfigurationValue(section, STR); allowEmbeddedHTML = config.getOptionalBooleanValue (section, CurnConfig.CFG_ALLOW_EMBEDDED_HTML, false); } } catch (NoSuchSectionException ex) { throw new ConfigurationException (ex); } File scriptFile = CurnUtil.mapConfiguredPathName (scriptPath); if (! scriptFile.exists()) { scriptPath = null; throw new ConfigurationException(section, STRSTR\STR); } if (! scriptFile.isFile()) { scriptPath = null; throw new ConfigurationException(section, STRSTR\STR); } try { scriptManager = new ScriptEngineManager(); } catch (Throwable ex) { throw new CurnException(ex); } try { scriptEngine = scriptManager.getEngineByName(language); } catch (Throwable ex) { throw new CurnException(STRSTR\STR, ex); } StringBuilder scriptLoggerName = new StringBuilder(128); String scriptName = scriptFile.getName(); scriptLoggerName.append(FileUtil.getFileNameNoExtension(scriptName)); scriptLoggerName.append('_'); scriptLoggerName.append(FileUtil.getFileNameExtension(scriptName)); scriptLogger = new Logger(scriptLoggerName.toString()); this.scriptObjects = new CurnScriptObjects(); try { scriptEngine.put("curn", scriptObjects); } catch (Throwable ex) { throw new CurnException (STR, ex); } scriptObjects.config = config; scriptObjects.configSection = section; scriptObjects.logger = scriptLogger; scriptString = loadScript(scriptFile); channels.clear(); }
/** * Initializes the output handler for another set of RSS channels. * * @param config the parsed <i>curn</i> configuration data * @param cfgHandler the <tt>ConfiguredOutputHandler</tt> wrapper * containing this object; the wrapper has some useful * metadata, such as the object's configuration section * name and extra variables. * * @throws ConfigurationException configuration error * @throws CurnException some other initialization error */
Initializes the output handler for another set of RSS channels
initOutputHandler
{ "repo_name": "bmc/curn", "path": "src/main/java/org/clapper/curn/output/script/ScriptOutputHandler.java", "license": "bsd-3-clause", "size": 23523 }
[ "java.io.File", "javax.script.ScriptEngineManager", "org.clapper.curn.ConfiguredOutputHandler", "org.clapper.curn.CurnConfig", "org.clapper.curn.CurnException", "org.clapper.curn.CurnUtil", "org.clapper.util.config.ConfigurationException", "org.clapper.util.config.NoSuchSectionException", "org.clapper.util.io.FileUtil", "org.clapper.util.logging.Logger" ]
import java.io.File; import javax.script.ScriptEngineManager; import org.clapper.curn.ConfiguredOutputHandler; import org.clapper.curn.CurnConfig; import org.clapper.curn.CurnException; import org.clapper.curn.CurnUtil; import org.clapper.util.config.ConfigurationException; import org.clapper.util.config.NoSuchSectionException; import org.clapper.util.io.FileUtil; import org.clapper.util.logging.Logger;
import java.io.*; import javax.script.*; import org.clapper.curn.*; import org.clapper.util.config.*; import org.clapper.util.io.*; import org.clapper.util.logging.*;
[ "java.io", "javax.script", "org.clapper.curn", "org.clapper.util" ]
java.io; javax.script; org.clapper.curn; org.clapper.util;
565,351
private class InternalListener implements com.google.security.zynamics.binnavi.Tagging.ITagManagerListener { private void removeTree(final ITreeNode<CTag> tag) { m_allTags.remove(tag); for (final ITreeNode<CTag> child : tag.getChildren()) { removeTree(child); } }
class InternalListener implements com.google.security.zynamics.binnavi.Tagging.ITagManagerListener { private void function(final ITreeNode<CTag> tag) { m_allTags.remove(tag); for (final ITreeNode<CTag> child : tag.getChildren()) { removeTree(child); } }
/** * Removes a tag and all of its children. * * @param tag The tag to remove. */
Removes a tag and all of its children
removeTree
{ "repo_name": "google/binnavi", "path": "src/main/java/com/google/security/zynamics/binnavi/API/disassembly/TagManager.java", "license": "apache-2.0", "size": 11371 }
[ "com.google.security.zynamics.binnavi.Tagging", "com.google.security.zynamics.zylib.types.trees.ITreeNode" ]
import com.google.security.zynamics.binnavi.Tagging; import com.google.security.zynamics.zylib.types.trees.ITreeNode;
import com.google.security.zynamics.binnavi.*; import com.google.security.zynamics.zylib.types.trees.*;
[ "com.google.security" ]
com.google.security;
2,114,081
private HjRecord readNext(Connection con) throws SQLException { // intitalize PreparedStatement st = null; try { // start the SQL expression StringBuilder sbSql = new StringBuilder(); StringBuilder sbFrom = new StringBuilder(); StringBuilder sbWhere = new StringBuilder(); sbSql.append("SELECT A.UUID,A.HARVEST_ID,A.INPUT_DATE,A.HARVEST_DATE"); sbSql.append(",A.JOB_STATUS,A.JOB_TYPE,A.CRITERIA,A.SERVICE_ID"); // append from clause sbFrom.append(" FROM "); sbFrom.append(getHarvestingJobsPendingTableName()).append(" A"); sbSql.append(sbFrom); // create where clause sbWhere.append("UPPER(A.JOB_STATUS)='").append(HjRecord.JobStatus.Submited.name().toUpperCase()).append( "'"); // append the where clause expressions if (sbWhere.length() > 0) { sbSql.append(" WHERE ").append(sbWhere.toString()); sbSql.append(" ORDER BY A.HARVEST_DATE ASC "); } // prepare the statements int n = 0; st = con.prepareStatement(sbSql.toString()); // execute the query logExpression(sbSql.toString()); ResultSet rs = st.executeQuery(); while (rs.next()) { String harvestUuid = Val.chkStr(rs.getString(2)); if (UuidUtil.isUuid(harvestUuid)) { HrSelectRequest harvestRequest = new HrSelectRequest(getRequestContext(), harvestUuid); harvestRequest.execute(); HrRecords harvestRecords = harvestRequest.getQueryResult().getRecords(); if (harvestRecords.size()>=1) { HjRecord record = new HjRecord(harvestRecords.get(0)); n = 1; record.setUuid(rs.getString(n++)); rs.getString(n++); record.setInputDate(rs.getTimestamp(n++)); record.setJobDate(rs.getTimestamp(n++)); record.setStatus(HjRecord.JobStatus.checkValueOf(rs.getString(n++))); record.setType(HjRecord.JobType.checkValueOf(rs.getString(n++))); record.setCriteria(CommonCriteria.parseXmlString(rs.getString(n++))); record.setServiceId(rs.getString(n++)); return record; } } } return null; } finally { closeStatement(st); } }
HjRecord function(Connection con) throws SQLException { PreparedStatement st = null; try { StringBuilder sbSql = new StringBuilder(); StringBuilder sbFrom = new StringBuilder(); StringBuilder sbWhere = new StringBuilder(); sbSql.append(STR); sbSql.append(STR); sbFrom.append(STR); sbFrom.append(getHarvestingJobsPendingTableName()).append(STR); sbSql.append(sbFrom); sbWhere.append(STR).append(HjRecord.JobStatus.Submited.name().toUpperCase()).append( "'"); if (sbWhere.length() > 0) { sbSql.append(STR).append(sbWhere.toString()); sbSql.append(STR); } int n = 0; st = con.prepareStatement(sbSql.toString()); logExpression(sbSql.toString()); ResultSet rs = st.executeQuery(); while (rs.next()) { String harvestUuid = Val.chkStr(rs.getString(2)); if (UuidUtil.isUuid(harvestUuid)) { HrSelectRequest harvestRequest = new HrSelectRequest(getRequestContext(), harvestUuid); harvestRequest.execute(); HrRecords harvestRecords = harvestRequest.getQueryResult().getRecords(); if (harvestRecords.size()>=1) { HjRecord record = new HjRecord(harvestRecords.get(0)); n = 1; record.setUuid(rs.getString(n++)); rs.getString(n++); record.setInputDate(rs.getTimestamp(n++)); record.setJobDate(rs.getTimestamp(n++)); record.setStatus(HjRecord.JobStatus.checkValueOf(rs.getString(n++))); record.setType(HjRecord.JobType.checkValueOf(rs.getString(n++))); record.setCriteria(CommonCriteria.parseXmlString(rs.getString(n++))); record.setServiceId(rs.getString(n++)); return record; } } } return null; } finally { closeStatement(st); } }
/** * Reads next pending job record. * @param con database connection * @return next pending job record or <code>null</code> if no more pending jobs * @throws java.sql.SQLException if accessing database failed */
Reads next pending job record
readNext
{ "repo_name": "usgin/usgin-geoportal", "path": "src/com/esri/gpt/catalog/harvest/jobs/HjGetNextRequest.java", "license": "apache-2.0", "size": 7250 }
[ "com.esri.gpt.catalog.harvest.repository.HrRecords", "com.esri.gpt.catalog.harvest.repository.HrSelectRequest", "com.esri.gpt.control.webharvest.common.CommonCriteria", "com.esri.gpt.framework.util.UuidUtil", "com.esri.gpt.framework.util.Val", "java.sql.Connection", "java.sql.PreparedStatement", "java.sql.ResultSet", "java.sql.SQLException" ]
import com.esri.gpt.catalog.harvest.repository.HrRecords; import com.esri.gpt.catalog.harvest.repository.HrSelectRequest; import com.esri.gpt.control.webharvest.common.CommonCriteria; import com.esri.gpt.framework.util.UuidUtil; import com.esri.gpt.framework.util.Val; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException;
import com.esri.gpt.catalog.harvest.repository.*; import com.esri.gpt.control.webharvest.common.*; import com.esri.gpt.framework.util.*; import java.sql.*;
[ "com.esri.gpt", "java.sql" ]
com.esri.gpt; java.sql;
332,034
protected Hashtable getPrefixMapping() { return _prefixMapping; }
Hashtable function() { return _prefixMapping; }
/** * Returns a Hashtable containing the prefix mappings that were declared * for this element. This does not include all prefix mappings in scope, * so one may have to check ancestor elements to get all mappings that are * in in scope. * @return Prefix mappings (for this element only). */
Returns a Hashtable containing the prefix mappings that were declared for this element. This does not include all prefix mappings in scope, so one may have to check ancestor elements to get all mappings that are in in scope
getPrefixMapping
{ "repo_name": "haikuowuya/android_system_code", "path": "src/com/sun/org/apache/xalan/internal/xsltc/compiler/SyntaxTreeNode.java", "license": "apache-2.0", "size": 33289 }
[ "java.util.Hashtable" ]
import java.util.Hashtable;
import java.util.*;
[ "java.util" ]
java.util;
2,074,357
public static MozuUrl getOptionUrl(String attributeFQN, String productCode, String responseFields) { UrlFormatter formatter = new UrlFormatter("/api/commerce/catalog/admin/products/{productCode}/Options/{attributeFQN}?responseFields={responseFields}"); formatter.formatUrl("attributeFQN", attributeFQN); formatter.formatUrl("productCode", productCode); formatter.formatUrl("responseFields", responseFields); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
static MozuUrl function(String attributeFQN, String productCode, String responseFields) { UrlFormatter formatter = new UrlFormatter(STR); formatter.formatUrl(STR, attributeFQN); formatter.formatUrl(STR, productCode); formatter.formatUrl(STR, responseFields); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
/** * Get Resource Url for GetOption * @param attributeFQN The fully qualified name of the attribute, which is a user defined attribute identifier. * @param productCode Merchant-created code that uniquely identifies the product such as a SKU or item number. Once created, the product code is read-only. * @param responseFields Use this field to include those fields which are not included by default. * @return String Resource Url */
Get Resource Url for GetOption
getOptionUrl
{ "repo_name": "lakshmi-nair/mozu-java", "path": "mozu-javaasync-core/src/main/java/com/mozu/api/urls/commerce/catalog/admin/products/ProductOptionUrl.java", "license": "mit", "size": 4327 }
[ "com.mozu.api.MozuUrl", "com.mozu.api.utils.UrlFormatter" ]
import com.mozu.api.MozuUrl; import com.mozu.api.utils.UrlFormatter;
import com.mozu.api.*; import com.mozu.api.utils.*;
[ "com.mozu.api" ]
com.mozu.api;
2,682,609
public StringReader convertToXHTML(InputStream input) throws IOException { StringWriter output = new StringWriter(); TagNode node = getHtmlCleaner().clean(input, "UTF-8"); //TagNode node = getHtmlCleaner().clean(result); node.serialize(new SimpleXmlSerializer(getHtmlCleaner().getProperties()), output); output.flush(); String xhtml = output.toString(); return new StringReader(xhtml); }
StringReader function(InputStream input) throws IOException { StringWriter output = new StringWriter(); TagNode node = getHtmlCleaner().clean(input, "UTF-8"); node.serialize(new SimpleXmlSerializer(getHtmlCleaner().getProperties()), output); output.flush(); String xhtml = output.toString(); return new StringReader(xhtml); }
/** * Convert the HTML input stream into DOM parsable XHTML. */
Convert the HTML input stream into DOM parsable XHTML
convertToXHTML
{ "repo_name": "BOTlibre/BOTlibre", "path": "micro-ai-engine/android/source/org/botlibre/sense/http/Http.java", "license": "epl-1.0", "size": 46787 }
[ "java.io.IOException", "java.io.InputStream", "java.io.StringReader", "java.io.StringWriter", "org.htmlcleaner.SimpleXmlSerializer", "org.htmlcleaner.TagNode" ]
import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import org.htmlcleaner.SimpleXmlSerializer; import org.htmlcleaner.TagNode;
import java.io.*; import org.htmlcleaner.*;
[ "java.io", "org.htmlcleaner" ]
java.io; org.htmlcleaner;
460,950
public void setSubAccount(SubAccount subAccount) { this.subAccount = subAccount; }
void function(SubAccount subAccount) { this.subAccount = subAccount; }
/** * Sets the subAccount attribute value. * * @param subAccount The subAccount to set. * @deprecated */
Sets the subAccount attribute value
setSubAccount
{ "repo_name": "ua-eas/ua-kfs-5.3", "path": "work/src/org/kuali/kfs/module/bc/businessobject/CalculatedSalaryFoundationTrackerOverride.java", "license": "agpl-3.0", "size": 14378 }
[ "org.kuali.kfs.coa.businessobject.SubAccount" ]
import org.kuali.kfs.coa.businessobject.SubAccount;
import org.kuali.kfs.coa.businessobject.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
59,970
public static void closeClient(ApiRootResource root) { ClientConfiguration config = WebClient.getConfig(root); HTTPConduit conduit = config.getHttpConduit(); if (conduit == null) { throw new IllegalArgumentException( "Client is not using the HTTP transport"); } conduit.close(); }
static void function(ApiRootResource root) { ClientConfiguration config = WebClient.getConfig(root); HTTPConduit conduit = config.getHttpConduit(); if (conduit == null) { throw new IllegalArgumentException( STR); } conduit.close(); }
/** * Closes the transport level conduit in the client. Reopening a new * connection, requires creating a new client object using the build() * method in this builder. * * @param root The resource returned by the build() method of this * builder class */
Closes the transport level conduit in the client. Reopening a new connection, requires creating a new client object using the build() method in this builder
closeClient
{ "repo_name": "kostin88/cm_api", "path": "java/src/main/java/com/cloudera/api/ClouderaManagerClientBuilder.java", "license": "apache-2.0", "size": 10422 }
[ "org.apache.cxf.jaxrs.client.ClientConfiguration", "org.apache.cxf.jaxrs.client.WebClient", "org.apache.cxf.transport.http.HTTPConduit" ]
import org.apache.cxf.jaxrs.client.ClientConfiguration; import org.apache.cxf.jaxrs.client.WebClient; import org.apache.cxf.transport.http.HTTPConduit;
import org.apache.cxf.jaxrs.client.*; import org.apache.cxf.transport.http.*;
[ "org.apache.cxf" ]
org.apache.cxf;
1,638,811
private static Pair<String, String> register(final String registerNodeValue) { return new Pair<String, String>(registerNodeValue, "C"); }
static Pair<String, String> function(final String registerNodeValue) { return new Pair<String, String>(registerNodeValue, "C"); }
/** * <Rm> * * Operation: * * shifter_operand = Rm shifter_carry_out = C Flag */
Operation: shifter_operand = Rm shifter_carry_out = C Flag
register
{ "repo_name": "mayl8822/binnavi", "path": "src/main/java/com/google/security/zynamics/reil/translators/arm/AddressingModeOneGenerator.java", "license": "apache-2.0", "size": 48307 }
[ "com.google.security.zynamics.zylib.general.Pair" ]
import com.google.security.zynamics.zylib.general.Pair;
import com.google.security.zynamics.zylib.general.*;
[ "com.google.security" ]
com.google.security;
1,187,985
public DialogBox createEmptyTrashDialog(boolean showDialog) { // Create the UI elements of the DialogBox final DialogBox dialogBox = new DialogBox(true, false); //DialogBox(autohide, modal) dialogBox.setStylePrimaryName("ode-DialogBox"); dialogBox.setText(MESSAGES.createNoProjectsDialogText()); Grid mainGrid = new Grid(2, 2); mainGrid.getCellFormatter().setAlignment(0, 0, HasHorizontalAlignment.ALIGN_CENTER, HasVerticalAlignment.ALIGN_MIDDLE); mainGrid.getCellFormatter().setAlignment(0, 1, HasHorizontalAlignment.ALIGN_CENTER, HasVerticalAlignment.ALIGN_MIDDLE); mainGrid.getCellFormatter().setAlignment(1, 1, HasHorizontalAlignment.ALIGN_RIGHT, HasVerticalAlignment.ALIGN_MIDDLE); Image dialogImage = new Image(Ode.getImageBundle().codiVert()); Grid messageGrid = new Grid(2, 1); messageGrid.getCellFormatter().setAlignment(0, 0, HasHorizontalAlignment.ALIGN_JUSTIFY, HasVerticalAlignment.ALIGN_MIDDLE); messageGrid.getCellFormatter().setAlignment(1, 0, HasHorizontalAlignment.ALIGN_LEFT, HasVerticalAlignment.ALIGN_MIDDLE); Label messageChunk2 = new Label(MESSAGES.showEmptyTrashMessage()); messageGrid.setWidget(1, 0, messageChunk2); mainGrid.setWidget(0, 0, dialogImage); mainGrid.setWidget(0, 1, messageGrid); dialogBox.setWidget(mainGrid); dialogBox.center(); if (showDialog) { dialogBox.show(); } return dialogBox; }
DialogBox function(boolean showDialog) { final DialogBox dialogBox = new DialogBox(true, false); dialogBox.setStylePrimaryName(STR); dialogBox.setText(MESSAGES.createNoProjectsDialogText()); Grid mainGrid = new Grid(2, 2); mainGrid.getCellFormatter().setAlignment(0, 0, HasHorizontalAlignment.ALIGN_CENTER, HasVerticalAlignment.ALIGN_MIDDLE); mainGrid.getCellFormatter().setAlignment(0, 1, HasHorizontalAlignment.ALIGN_CENTER, HasVerticalAlignment.ALIGN_MIDDLE); mainGrid.getCellFormatter().setAlignment(1, 1, HasHorizontalAlignment.ALIGN_RIGHT, HasVerticalAlignment.ALIGN_MIDDLE); Image dialogImage = new Image(Ode.getImageBundle().codiVert()); Grid messageGrid = new Grid(2, 1); messageGrid.getCellFormatter().setAlignment(0, 0, HasHorizontalAlignment.ALIGN_JUSTIFY, HasVerticalAlignment.ALIGN_MIDDLE); messageGrid.getCellFormatter().setAlignment(1, 0, HasHorizontalAlignment.ALIGN_LEFT, HasVerticalAlignment.ALIGN_MIDDLE); Label messageChunk2 = new Label(MESSAGES.showEmptyTrashMessage()); messageGrid.setWidget(1, 0, messageChunk2); mainGrid.setWidget(0, 0, dialogImage); mainGrid.setWidget(0, 1, messageGrid); dialogBox.setWidget(mainGrid); dialogBox.center(); if (showDialog) { dialogBox.show(); } return dialogBox; }
/** * Creates a dialog box to show empty trash list message. * @param showDialog Convenience variable to show the created DialogBox. * @return The created and optionally displayed Dialog box. */
Creates a dialog box to show empty trash list message
createEmptyTrashDialog
{ "repo_name": "halatmit/appinventor-sources", "path": "appinventor/appengine/src/com/google/appinventor/client/Ode.java", "license": "apache-2.0", "size": 99070 }
[ "com.google.gwt.user.client.ui.DialogBox", "com.google.gwt.user.client.ui.Grid", "com.google.gwt.user.client.ui.HasHorizontalAlignment", "com.google.gwt.user.client.ui.HasVerticalAlignment", "com.google.gwt.user.client.ui.Image", "com.google.gwt.user.client.ui.Label" ]
import com.google.gwt.user.client.ui.DialogBox; import com.google.gwt.user.client.ui.Grid; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import com.google.gwt.user.client.ui.HasVerticalAlignment; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.*;
[ "com.google.gwt" ]
com.google.gwt;
2,042,931
public synchronized void load() { try { // first try the old format File queueFile = getQueueFile(); if (queueFile.exists()) { BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(queueFile))); try { String line; while ((line = in.readLine()) != null) { AbstractProject j = Jenkins.getInstance().getItemByFullName(line, AbstractProject.class); if (j != null) j.scheduleBuild(); } } finally { in.close(); } // discard the queue file now that we are done queueFile.delete(); } else { queueFile = getXMLQueueFile(); if (queueFile.exists()) { List list = (List) new XmlFile(XSTREAM, queueFile).read(); int maxId = 0; for (Object o : list) { if (o instanceof Task) { // backward compatibility schedule((Task)o, 0); } else if (o instanceof Item) { Item item = (Item)o; if(item.task==null) continue; // botched persistence. throw this one away maxId = Math.max(maxId, item.id); if (item instanceof WaitingItem) { waitingList.add((WaitingItem) item); } else if (item instanceof BlockedItem) { blockedProjects.put(item.task, (BlockedItem) item); } else if (item instanceof BuildableItem) { buildables.add((BuildableItem) item); } else { throw new IllegalStateException("Unknown item type! " + item); } } // this conveniently ignores null } WaitingItem.COUNTER.set(maxId); // I just had an incident where all the executors are dead at AbstractProject._getRuns() // because runs is null. Debugger revealed that this is caused by a MatrixConfiguration // object that doesn't appear to be de-serialized properly. // I don't know how this problem happened, but to diagnose this problem better // when it happens again, save the old queue file for introspection. File bk = new File(queueFile.getPath() + ".bak"); bk.delete(); queueFile.renameTo(bk); queueFile.delete(); } } } catch (IOException e) { LOGGER.log(Level.WARNING, "Failed to load the queue file " + getXMLQueueFile(), e); } }
synchronized void function() { try { File queueFile = getQueueFile(); if (queueFile.exists()) { BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(queueFile))); try { String line; while ((line = in.readLine()) != null) { AbstractProject j = Jenkins.getInstance().getItemByFullName(line, AbstractProject.class); if (j != null) j.scheduleBuild(); } } finally { in.close(); } queueFile.delete(); } else { queueFile = getXMLQueueFile(); if (queueFile.exists()) { List list = (List) new XmlFile(XSTREAM, queueFile).read(); int maxId = 0; for (Object o : list) { if (o instanceof Task) { schedule((Task)o, 0); } else if (o instanceof Item) { Item item = (Item)o; if(item.task==null) continue; maxId = Math.max(maxId, item.id); if (item instanceof WaitingItem) { waitingList.add((WaitingItem) item); } else if (item instanceof BlockedItem) { blockedProjects.put(item.task, (BlockedItem) item); } else if (item instanceof BuildableItem) { buildables.add((BuildableItem) item); } else { throw new IllegalStateException(STR + item); } } } WaitingItem.COUNTER.set(maxId); File bk = new File(queueFile.getPath() + ".bak"); bk.delete(); queueFile.renameTo(bk); queueFile.delete(); } } } catch (IOException e) { LOGGER.log(Level.WARNING, STR + getXMLQueueFile(), e); } }
/** * Loads the queue contents that was {@link #save() saved}. */
Loads the queue contents that was <code>#save() saved</code>
load
{ "repo_name": "zll5267/jenkins", "path": "core/src/main/java/hudson/model/Queue.java", "license": "mit", "size": 59720 }
[ "java.io.BufferedReader", "java.io.File", "java.io.FileInputStream", "java.io.IOException", "java.io.InputStreamReader", "java.util.List", "java.util.logging.Level" ]
import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.List; import java.util.logging.Level;
import java.io.*; import java.util.*; import java.util.logging.*;
[ "java.io", "java.util" ]
java.io; java.util;
663,095
public void setInitialReplayIdMap(Map<String, Long> initialReplayIdMap) { this.initialReplayIdMap = initialReplayIdMap; }
void function(Map<String, Long> initialReplayIdMap) { this.initialReplayIdMap = initialReplayIdMap; }
/** * Replay IDs to start from per channel name. */
Replay IDs to start from per channel name
setInitialReplayIdMap
{ "repo_name": "objectiser/camel", "path": "components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/SalesforceEndpointConfig.java", "license": "apache-2.0", "size": 24729 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,085,307
interface WithLocation { WithCreate withLocation(String location); } interface WithCreate extends Creatable<Database>, DefinitionStages.WithLocation { } } interface Update extends Appliable<Database>, UpdateStages.WithLocation { }
interface WithLocation { WithCreate withLocation(String location); } interface WithCreate extends Creatable<Database>, DefinitionStages.WithLocation { } } interface Update extends Appliable<Database>, UpdateStages.WithLocation { }
/** * Specifies location. * @param location Resource location * @return the next definition stage */
Specifies location
withLocation
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/kusto/mgmt-v2019_11_09/src/main/java/com/microsoft/azure/management/kusto/v2019_11_09/Database.java", "license": "mit", "size": 3606 }
[ "com.microsoft.azure.arm.model.Appliable", "com.microsoft.azure.arm.model.Creatable" ]
import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.model.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
1,798,330
public String getTooltipText() { final StringBuffer sb = new StringBuffer(); sb.append("<b>Table:</b> " + tablename + "<br>"); if(getPolygon().getId() != 0) { sb.append("<b>Id: </b> " + getPolygon().getId() + "<br>"); } for(Map.Entry<String, String> property : getPolygon().getProperties().entrySet()) { final String tooltipProperty = property.getKey(); final String tooltipPropertyUpper = tooltipProperty.substring(0, 1).toUpperCase() + tooltipProperty.substring(1); sb.append("<b>" + tooltipPropertyUpper + ":</b> " + property.getValue() + "<br>"); } return sb.toString(); }
String function() { final StringBuffer sb = new StringBuffer(); sb.append(STR + tablename + "<br>"); if(getPolygon().getId() != 0) { sb.append(STR + getPolygon().getId() + "<br>"); } for(Map.Entry<String, String> property : getPolygon().getProperties().entrySet()) { final String tooltipProperty = property.getKey(); final String tooltipPropertyUpper = tooltipProperty.substring(0, 1).toUpperCase() + tooltipProperty.substring(1); sb.append("<b>" + tooltipPropertyUpper + STR + property.getValue() + "<br>"); } return sb.toString(); }
/** * Get the tooltip text * @return */
Get the tooltip text
getTooltipText
{ "repo_name": "jnidzwetzki/bboxdb", "path": "bboxdb-tools/src/main/java/org/bboxdb/tools/gui/views/query/OverlayElement.java", "license": "apache-2.0", "size": 11704 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,334,835
KeyValueIterator<Bytes, byte[]> backwardFetch(final Bytes keyFrom, final Bytes keyTo, final long from, final long to); /** * Gets all the key-value pairs in the existing windows. * * @return an iterator over windowed key-value pairs {@code <Windowed<K>, value>}
KeyValueIterator<Bytes, byte[]> backwardFetch(final Bytes keyFrom, final Bytes keyTo, final long from, final long to); /** * Gets all the key-value pairs in the existing windows. * * @return an iterator over windowed key-value pairs {@code <Windowed<K>, value>}
/** * Fetch all records from the segmented store in the provided key range and time range * from all existing segments in backward order (from latest to earliest) * @param keyFrom The first key that could be in the range * @param keyTo The last key that could be in the range * @param from earliest time to match * @param to latest time to match * @return an iterator over key-value pairs */
Fetch all records from the segmented store in the provided key range and time range from all existing segments in backward order (from latest to earliest)
backwardFetch
{ "repo_name": "guozhangwang/kafka", "path": "streams/src/main/java/org/apache/kafka/streams/state/internals/SegmentedBytesStore.java", "license": "apache-2.0", "size": 9537 }
[ "org.apache.kafka.common.utils.Bytes", "org.apache.kafka.streams.state.KeyValueIterator" ]
import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.common.utils.*; import org.apache.kafka.streams.state.*;
[ "org.apache.kafka" ]
org.apache.kafka;
1,428,918
public void testPlayFromStringReader() throws Exception { String myCSV = "symbol, price, volume" + NEW_LINE + "IBM, 10.2, 10000"; StringReader reader = new StringReader(myCSV); trySource(new AdapterInputSource(reader)); }
void function() throws Exception { String myCSV = STR + NEW_LINE + STR; StringReader reader = new StringReader(myCSV); trySource(new AdapterInputSource(reader)); }
/** * Play a CSV file that is from memory. */
Play a CSV file that is from memory
testPlayFromStringReader
{ "repo_name": "b-cuts/esper", "path": "esperio-csv/src/test/java/com/espertech/esperio/regression/adapter/TestCSVAdapterUseCases.java", "license": "gpl-2.0", "size": 15761 }
[ "com.espertech.esperio.AdapterInputSource", "java.io.StringReader" ]
import com.espertech.esperio.AdapterInputSource; import java.io.StringReader;
import com.espertech.esperio.*; import java.io.*;
[ "com.espertech.esperio", "java.io" ]
com.espertech.esperio; java.io;
1,536,967
return sendAsync(HttpMethod.GET, null); }
return sendAsync(HttpMethod.GET, null); }
/** * Gets the MobileLobApp from the service * * @return a future with the result */
Gets the MobileLobApp from the service
getAsync
{ "repo_name": "microsoftgraph/msgraph-sdk-java", "path": "src/main/java/com/microsoft/graph/requests/MobileLobAppRequest.java", "license": "mit", "size": 6610 }
[ "com.microsoft.graph.http.HttpMethod" ]
import com.microsoft.graph.http.HttpMethod;
import com.microsoft.graph.http.*;
[ "com.microsoft.graph" ]
com.microsoft.graph;
563,392
public void setUpdated_at(Date updated_at) { this.updated_at = updated_at; }
void function(Date updated_at) { this.updated_at = updated_at; }
/** * This method was generated by MyBatis Generator. * This method sets the value of the database column flowdetails.updated_at * * @param updated_at the value for flowdetails.updated_at * * @mbggenerated Tue May 26 15:53:09 CST 2015 */
This method was generated by MyBatis Generator. This method sets the value of the database column flowdetails.updated_at
setUpdated_at
{ "repo_name": "wolabs/womano", "path": "main/java/com/culabs/unicomportal/model/db/DBFlowdetails.java", "license": "apache-2.0", "size": 6427 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
213,884
final ProceedingJoinPoint point = Mockito .mock(ProceedingJoinPoint.class); final MethodSignature signature = Mockito.mock(MethodSignature.class); Mockito.when(point.getSignature()).thenReturn(signature); Mockito.when(signature.getMethod()) .thenReturn(this.getClass().getMethods()[0]); Mockito.when(signature.getReturnType()) .thenReturn(Void.TYPE); new QuietExceptionsLogger().wrap(point); Mockito.verify(point).proceed(); }
final ProceedingJoinPoint point = Mockito .mock(ProceedingJoinPoint.class); final MethodSignature signature = Mockito.mock(MethodSignature.class); Mockito.when(point.getSignature()).thenReturn(signature); Mockito.when(signature.getMethod()) .thenReturn(this.getClass().getMethods()[0]); Mockito.when(signature.getReturnType()) .thenReturn(Void.TYPE); new QuietExceptionsLogger().wrap(point); Mockito.verify(point).proceed(); }
/** * Call method that doesn't throw exception. * @throws Throwable In case of error. */
Call method that doesn't throw exception
withoutException
{ "repo_name": "54uso/jcabi-aspects", "path": "src/test/java/com/jcabi/aspects/aj/QuietExceptionsLoggerTest.java", "license": "bsd-3-clause", "size": 4320 }
[ "org.aspectj.lang.ProceedingJoinPoint", "org.aspectj.lang.reflect.MethodSignature", "org.mockito.Mockito" ]
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.reflect.MethodSignature; import org.mockito.Mockito;
import org.aspectj.lang.*; import org.aspectj.lang.reflect.*; import org.mockito.*;
[ "org.aspectj.lang", "org.mockito" ]
org.aspectj.lang; org.mockito;
1,314,744
public static void debug(String tag, String msg) { if (BuildConfig.DEBUG) { Log.d(tag, msg); } }
static void function(String tag, String msg) { if (BuildConfig.DEBUG) { Log.d(tag, msg); } }
/** * Send Log type debug only if the signature is debug type * * @param tag String Tag name * @param msg message to send */
Send Log type debug only if the signature is debug type
debug
{ "repo_name": "fvasquezjatar/fermat-unused", "path": "CBP/android/desktop/fermat-cbp-android-desktop-wallet-manager-bitdubai/src/main/java/com/bitdubai/desktop/wallet_manager/util/CommonLogger.java", "license": "mit", "size": 1477 }
[ "android.util.Log", "com.bitdubai.desktop.wallet_manager.BuildConfig" ]
import android.util.Log; import com.bitdubai.desktop.wallet_manager.BuildConfig;
import android.util.*; import com.bitdubai.desktop.wallet_manager.*;
[ "android.util", "com.bitdubai.desktop" ]
android.util; com.bitdubai.desktop;
1,594,550
@Nullable public RouteController onCreateRouteController(String routeId) { return null; } public static final class ProviderMetadata { private final ComponentName mComponentName; ProviderMetadata(ComponentName componentName) { if (componentName == null) { throw new IllegalArgumentException("componentName must not be null"); } mComponentName = componentName; }
RouteController function(String routeId) { return null; } public static final class ProviderMetadata { private final ComponentName mComponentName; ProviderMetadata(ComponentName componentName) { if (componentName == null) { throw new IllegalArgumentException(STR); } mComponentName = componentName; }
/** * Called by the media router to obtain a route controller for a particular route. * <p> * The media router will invoke the {@link com.commonsware.cwac.mediarouter.media.MediaRouteProvider.RouteController#onRelease} method of the route * controller when it is no longer needed to allow it to free its resources. * </p> * * @param routeId The unique id of the route. * @return The route controller. Returns null if there is no such route or if the route * cannot be controlled using the route controller interface. */
Called by the media router to obtain a route controller for a particular route. The media router will invoke the <code>com.commonsware.cwac.mediarouter.media.MediaRouteProvider.RouteController#onRelease</code> method of the route controller when it is no longer needed to allow it to free its resources.
onCreateRouteController
{ "repo_name": "commonsguy/cwac-mediarouter", "path": "mediarouter/src/com/commonsware/cwac/mediarouter/media/MediaRouteProvider.java", "license": "apache-2.0", "size": 14803 }
[ "android.content.ComponentName" ]
import android.content.ComponentName;
import android.content.*;
[ "android.content" ]
android.content;
1,377,653
public void setNegativeItemLabelPositionFallback( ItemLabelPosition position) { this.negativeItemLabelPositionFallback = position; fireChangeEvent(); }
void function( ItemLabelPosition position) { this.negativeItemLabelPositionFallback = position; fireChangeEvent(); }
/** * Sets the fallback position for negative item labels that don't fit * within a bar, and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param position the position (<code>null</code> permitted). * * @see #getNegativeItemLabelPositionFallback() * @since 1.0.2 */
Sets the fallback position for negative item labels that don't fit within a bar, and sends a <code>RendererChangeEvent</code> to all registered listeners
setNegativeItemLabelPositionFallback
{ "repo_name": "akardapolov/ASH-Viewer", "path": "jfreechart-fse/src/main/java/org/jfree/chart/renderer/xy/XYBarRenderer.java", "license": "gpl-3.0", "size": 41965 }
[ "org.jfree.chart.labels.ItemLabelPosition" ]
import org.jfree.chart.labels.ItemLabelPosition;
import org.jfree.chart.labels.*;
[ "org.jfree.chart" ]
org.jfree.chart;
917,151
public void setCall(AbstractFunction fun) { _call = fun; }
void function(AbstractFunction fun) { _call = fun; }
/** * Sets the __call */
Sets the __call
setCall
{ "repo_name": "WelcomeHUME/svn-caucho-com-resin", "path": "modules/quercus/src/com/caucho/quercus/env/QuercusClass.java", "license": "gpl-2.0", "size": 70646 }
[ "com.caucho.quercus.function.AbstractFunction" ]
import com.caucho.quercus.function.AbstractFunction;
import com.caucho.quercus.function.*;
[ "com.caucho.quercus" ]
com.caucho.quercus;
1,775,142
@ServiceMethod(returns = ReturnType.SINGLE) CommunityGalleryInner get(String location, String publicGalleryName);
@ServiceMethod(returns = ReturnType.SINGLE) CommunityGalleryInner get(String location, String publicGalleryName);
/** * Get a community gallery by gallery public name. * * @param location Resource location. * @param publicGalleryName The public name of the community gallery. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a community gallery by gallery public name. */
Get a community gallery by gallery public name
get
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-compute/src/main/java/com/azure/resourcemanager/compute/fluent/CommunityGalleriesClient.java", "license": "mit", "size": 3663 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.resourcemanager.compute.fluent.models.CommunityGalleryInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.compute.fluent.models.CommunityGalleryInner;
import com.azure.core.annotation.*; import com.azure.resourcemanager.compute.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,317,636
public static boolean getRegionCachePrefetch(final Configuration conf, final byte[] tableName) throws IOException { return getRegionCachePrefetch(conf, TableName.valueOf(tableName)); }
static boolean function(final Configuration conf, final byte[] tableName) throws IOException { return getRegionCachePrefetch(conf, TableName.valueOf(tableName)); }
/** * Check whether region cache prefetch is enabled or not for the table. * @param conf The Configuration object to use. * @param tableName name of table to check * @return true if table's region cache prefecth is enabled. Otherwise * it is disabled. * @throws IOException */
Check whether region cache prefetch is enabled or not for the table
getRegionCachePrefetch
{ "repo_name": "alipayhuber/hack-hbase", "path": "hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java", "license": "apache-2.0", "size": 62596 }
[ "java.io.IOException", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hbase.TableName" ]
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName;
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,075,146
@JsonProperty("time_exec") public Double getExecutionTime() { return this.executionTime; }
@JsonProperty(STR) Double function() { return this.executionTime; }
/** * "time_exec": 0.019799947738647 */
"time_exec": 0.019799947738647
getExecutionTime
{ "repo_name": "dbadia/openhab", "path": "bundles/binding/org.openhab.binding.netatmo/src/main/java/org/openhab/binding/netatmo/internal/weather/GetStationsDataResponse.java", "license": "epl-1.0", "size": 23138 }
[ "org.codehaus.jackson.annotate.JsonProperty" ]
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.annotate.*;
[ "org.codehaus.jackson" ]
org.codehaus.jackson;
2,237,671
void enterTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx); void exitTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx);
void enterTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx); void exitTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx);
/** * Exit a parse tree produced by {@link JavaParser#typeArgument}. * @param ctx the parse tree */
Exit a parse tree produced by <code>JavaParser#typeArgument</code>
exitTypeArgument
{ "repo_name": "zmughal/oop-analysis", "path": "src/generated-sources/JavaListener.java", "license": "apache-2.0", "size": 38949 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
798,803
@Test void checkConnections() { EllipsesIntoClusters alg = new EllipsesIntoClusters(2.1,0.5,0.5); List<EllipseInfo> input = new ArrayList<>(); input.add(create(0 ,0,1,1,0)); input.add(create(2.0,0,1,1,0)); input.add(create(4.0,0,1,1,0)); input.add(create( 0,2,1,1,0)); input.add(create(2.0,2,1,1,0)); input.add(create(4.0,2,1,1,0)); List<List<EllipsesIntoClusters.Node>> output = new ArrayList<>(); alg.process(input,output); assertEquals( 1 , output.size()); List<EllipsesIntoClusters.Node> found = output.get(0); assertEquals( 6 , found.size()); int histogram[] = new int[5]; for( EllipsesIntoClusters.Node n : found ) { histogram[n.connections.size]++; } assertEquals(0, histogram[0]); assertEquals(0, histogram[1]); assertEquals(4, histogram[2]); assertEquals(2, histogram[3]); assertEquals(0, histogram[4]); }
@Test void checkConnections() { EllipsesIntoClusters alg = new EllipsesIntoClusters(2.1,0.5,0.5); List<EllipseInfo> input = new ArrayList<>(); input.add(create(0 ,0,1,1,0)); input.add(create(2.0,0,1,1,0)); input.add(create(4.0,0,1,1,0)); input.add(create( 0,2,1,1,0)); input.add(create(2.0,2,1,1,0)); input.add(create(4.0,2,1,1,0)); List<List<EllipsesIntoClusters.Node>> output = new ArrayList<>(); alg.process(input,output); assertEquals( 1 , output.size()); List<EllipsesIntoClusters.Node> found = output.get(0); assertEquals( 6 , found.size()); int histogram[] = new int[5]; for( EllipsesIntoClusters.Node n : found ) { histogram[n.connections.size]++; } assertEquals(0, histogram[0]); assertEquals(0, histogram[1]); assertEquals(4, histogram[2]); assertEquals(2, histogram[3]); assertEquals(0, histogram[4]); }
/** * Provide it a simple case to cluster and make sure everything is connected properly */
Provide it a simple case to cluster and make sure everything is connected properly
checkConnections
{ "repo_name": "lessthanoptimal/BoofCV", "path": "main/boofcv-recognition/src/test/java/boofcv/alg/fiducial/calib/circle/TestEllipsesIntoClusters.java", "license": "apache-2.0", "size": 8199 }
[ "java.util.ArrayList", "java.util.List", "org.junit.jupiter.api.Assertions", "org.junit.jupiter.api.Test" ]
import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test;
import java.util.*; import org.junit.jupiter.api.*;
[ "java.util", "org.junit.jupiter" ]
java.util; org.junit.jupiter;
1,519,040
InputStream openResource(URI uri) throws MalformedURLException, IOException;
InputStream openResource(URI uri) throws MalformedURLException, IOException;
/** * Obtain an input stream to the resource at the given URI. * @param uri resource URI * @return input stream or <code>null</code> if the resource is not found * @throws MalformedURLException in case of invalid URI * @throws IOException in case of read error */
Obtain an input stream to the resource at the given URI
openResource
{ "repo_name": "Subasinghe/ode", "path": "bpel-compiler/src/main/java/org/apache/ode/bpel/compiler/ResourceFinder.java", "license": "apache-2.0", "size": 1595 }
[ "java.io.IOException", "java.io.InputStream", "java.net.MalformedURLException" ]
import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
2,220,646
boolean isAirBlock(BlockPos pos);
boolean isAirBlock(BlockPos pos);
/** * Checks to see if an air block exists at the provided location. Note that this only checks to see if the blocks * material is set to air, meaning it is possible for non-vanilla blocks to still pass this check. * * @param pos The position of the block being checked. */
Checks to see if an air block exists at the provided location. Note that this only checks to see if the blocks material is set to air, meaning it is possible for non-vanilla blocks to still pass this check
isAirBlock
{ "repo_name": "trixmot/mod1", "path": "build/tmp/recompileMc/sources/net/minecraft/world/IBlockAccess.java", "license": "lgpl-2.1", "size": 1515 }
[ "net.minecraft.util.BlockPos" ]
import net.minecraft.util.BlockPos;
import net.minecraft.util.*;
[ "net.minecraft.util" ]
net.minecraft.util;
1,272,458
static Node newCallNode(Node callTarget, Node... parameters) { boolean isFreeCall = isName(callTarget); Node call = new Node(Token.CALL, callTarget); call.putBooleanProp(Node.FREE_CALL, isFreeCall); for (Node parameter : parameters) { call.addChildToBack(parameter); } return call; }
static Node newCallNode(Node callTarget, Node... parameters) { boolean isFreeCall = isName(callTarget); Node call = new Node(Token.CALL, callTarget); call.putBooleanProp(Node.FREE_CALL, isFreeCall); for (Node parameter : parameters) { call.addChildToBack(parameter); } return call; }
/** * A new CALL node with the "FREE_CALL" set based on call target. */
A new CALL node with the "FREE_CALL" set based on call target
newCallNode
{ "repo_name": "JonathanWalsh/Granule-Closure-Compiler", "path": "src/com/google/javascript/jscomp/NodeUtil.java", "license": "apache-2.0", "size": 88449 }
[ "com.google.javascript.rhino.Node", "com.google.javascript.rhino.Token" ]
import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.*;
[ "com.google.javascript" ]
com.google.javascript;
1,137,639
public InputStream getInputStream() throws IOException { return fileSystem.getInputStream(this); }
InputStream function() throws IOException { return fileSystem.getInputStream(this); }
/** * Opens the file denoted by this path, following symbolic links, for reading, * and returns an input stream to it. * * @throws IOException if the file was not found or could not be opened for * reading */
Opens the file denoted by this path, following symbolic links, for reading, and returns an input stream to it
getInputStream
{ "repo_name": "variac/bazel", "path": "src/main/java/com/google/devtools/build/lib/vfs/Path.java", "license": "apache-2.0", "size": 46086 }
[ "java.io.IOException", "java.io.InputStream" ]
import java.io.IOException; import java.io.InputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,653,795
public CopyBehaviourCallback onCopyAlien(QName classRef, CopyDetails copyDetails) { return AlienAspectCopyBehaviourCallback.INSTANCE; } private static class TransferredAspectCopyBehaviourCallback extends DefaultCopyBehaviourCallback { private static final CopyBehaviourCallback INSTANCE = new TransferredAspectCopyBehaviourCallback();
CopyBehaviourCallback function(QName classRef, CopyDetails copyDetails) { return AlienAspectCopyBehaviourCallback.INSTANCE; } private static class TransferredAspectCopyBehaviourCallback extends DefaultCopyBehaviourCallback { private static final CopyBehaviourCallback INSTANCE = new TransferredAspectCopyBehaviourCallback();
/** * When an alien node is copied, don't copy the alien aspect. */
When an alien node is copied, don't copy the alien aspect
onCopyAlien
{ "repo_name": "fxcebx/community-edition", "path": "projects/repository/source/java/org/alfresco/repo/transfer/RepoTransferReceiverImpl.java", "license": "lgpl-3.0", "size": 60954 }
[ "org.alfresco.repo.copy.CopyBehaviourCallback", "org.alfresco.repo.copy.CopyDetails", "org.alfresco.repo.copy.DefaultCopyBehaviourCallback", "org.alfresco.service.namespace.QName" ]
import org.alfresco.repo.copy.CopyBehaviourCallback; import org.alfresco.repo.copy.CopyDetails; import org.alfresco.repo.copy.DefaultCopyBehaviourCallback; import org.alfresco.service.namespace.QName;
import org.alfresco.repo.copy.*; import org.alfresco.service.namespace.*;
[ "org.alfresco.repo", "org.alfresco.service" ]
org.alfresco.repo; org.alfresco.service;
222,297
public void testMemstoreEditsVisibilityWithSameKey() throws IOException { final byte[] row = Bytes.toBytes(1); final byte[] f = Bytes.toBytes("family"); final byte[] q1 = Bytes.toBytes("q1"); final byte[] q2 = Bytes.toBytes("q2"); final byte[] v1 = Bytes.toBytes("value1"); final byte[] v2 = Bytes.toBytes("value2"); // INSERT 1: Write both columns val1 MultiVersionConsistencyControl.WriteEntry w = mvcc.beginMemstoreInsertWithSeqNum(this.startSeqNum.incrementAndGet()); KeyValue kv11 = new KeyValue(row, f, q1, v1); kv11.setSequenceId(w.getWriteNumber()); memstore.add(kv11); KeyValue kv12 = new KeyValue(row, f, q2, v1); kv12.setSequenceId(w.getWriteNumber()); memstore.add(kv12); mvcc.completeMemstoreInsert(w); // BEFORE STARTING INSERT 2, SEE FIRST KVS KeyValueScanner s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv11, kv12}); // START INSERT 2: Write both columns val2 w = mvcc.beginMemstoreInsertWithSeqNum(this.startSeqNum.incrementAndGet()); KeyValue kv21 = new KeyValue(row, f, q1, v2); kv21.setSequenceId(w.getWriteNumber()); memstore.add(kv21); KeyValue kv22 = new KeyValue(row, f, q2, v2); kv22.setSequenceId(w.getWriteNumber()); memstore.add(kv22); // BEFORE COMPLETING INSERT 2, SEE FIRST KVS s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv11, kv12}); // COMPLETE INSERT 2 mvcc.completeMemstoreInsert(w); // NOW SHOULD SEE NEW KVS IN ADDITION TO OLD KVS. // See HBASE-1485 for discussion about what we should do with // the duplicate-TS inserts s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv21, kv11, kv22, kv12}); }
void function() throws IOException { final byte[] row = Bytes.toBytes(1); final byte[] f = Bytes.toBytes(STR); final byte[] q1 = Bytes.toBytes("q1"); final byte[] q2 = Bytes.toBytes("q2"); final byte[] v1 = Bytes.toBytes(STR); final byte[] v2 = Bytes.toBytes(STR); MultiVersionConsistencyControl.WriteEntry w = mvcc.beginMemstoreInsertWithSeqNum(this.startSeqNum.incrementAndGet()); KeyValue kv11 = new KeyValue(row, f, q1, v1); kv11.setSequenceId(w.getWriteNumber()); memstore.add(kv11); KeyValue kv12 = new KeyValue(row, f, q2, v1); kv12.setSequenceId(w.getWriteNumber()); memstore.add(kv12); mvcc.completeMemstoreInsert(w); KeyValueScanner s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv11, kv12}); w = mvcc.beginMemstoreInsertWithSeqNum(this.startSeqNum.incrementAndGet()); KeyValue kv21 = new KeyValue(row, f, q1, v2); kv21.setSequenceId(w.getWriteNumber()); memstore.add(kv21); KeyValue kv22 = new KeyValue(row, f, q2, v2); kv22.setSequenceId(w.getWriteNumber()); memstore.add(kv22); s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv11, kv12}); mvcc.completeMemstoreInsert(w); s = this.memstore.getScanners(mvcc.memstoreReadPoint()).get(0); assertScannerResults(s, new KeyValue[]{kv21, kv11, kv22, kv12}); }
/** * Regression test for HBASE-2616, HBASE-2670. * When we insert a higher-memstoreTS version of a cell but with * the same timestamp, we still need to provide consistent reads * for the same scanner. */
Regression test for HBASE-2616, HBASE-2670. When we insert a higher-memstoreTS version of a cell but with the same timestamp, we still need to provide consistent reads for the same scanner
testMemstoreEditsVisibilityWithSameKey
{ "repo_name": "toshimasa-nasu/hbase", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java", "license": "apache-2.0", "size": 37226 }
[ "java.io.IOException", "org.apache.hadoop.hbase.KeyValue", "org.apache.hadoop.hbase.util.Bytes" ]
import java.io.IOException; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.util.Bytes;
import java.io.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
108,999
@FIXVersion(introduced="4.4") public void setDiscretionInstructions() { throw new UnsupportedOperationException(getUnsupportedTagMessage()); }
@FIXVersion(introduced="4.4") void function() { throw new UnsupportedOperationException(getUnsupportedTagMessage()); }
/** * Sets the PegInstructions component if used in this message to the proper implementation * class. */
Sets the PegInstructions component if used in this message to the proper implementation class
setDiscretionInstructions
{ "repo_name": "marvisan/HadesFIX", "path": "Model/src/main/java/net/hades/fix/message/CrossOrderModificationRequestMsg.java", "license": "gpl-3.0", "size": 87836 }
[ "net.hades.fix.message.anno.FIXVersion" ]
import net.hades.fix.message.anno.FIXVersion;
import net.hades.fix.message.anno.*;
[ "net.hades.fix" ]
net.hades.fix;
1,869,176
public final StyleableProperty<String> createStyleableStringProperty( S styleable, String propertyName, String cssProperty, Function<S, StyleableProperty<String>> function, String initialValue) { return createStyleableStringProperty(styleable, propertyName, cssProperty, function, initialValue, false); }
final StyleableProperty<String> function( S styleable, String propertyName, String cssProperty, Function<S, StyleableProperty<String>> function, String initialValue) { return createStyleableStringProperty(styleable, propertyName, cssProperty, function, initialValue, false); }
/** * Create a StyleableProperty&lt;String&gt; with initial value. The inherit flag defaults to false. * @param styleable The <code>this</code> reference of the returned property. This is also the property bean. * @param propertyName The field name of the StyleableProperty&lt;String&gt; * @param cssProperty The CSS property name * @param function A function that returns the StyleableProperty&lt;String&gt; that was created by this method call. * @param initialValue The initial value of the property. CSS may reset the property to this value. * @return a StyleableProperty created with initial value and false inherit flag */
Create a StyleableProperty&lt;String&gt; with initial value. The inherit flag defaults to false
createStyleableStringProperty
{ "repo_name": "teamfx/openjfx-10-dev-rt", "path": "modules/javafx.graphics/src/main/java/javafx/css/StyleablePropertyFactory.java", "license": "gpl-2.0", "size": 113819 }
[ "java.util.function.Function" ]
import java.util.function.Function;
import java.util.function.*;
[ "java.util" ]
java.util;
2,398,597
@Override public boolean previous() throws SQLException { return relative(-1); }
boolean function() throws SQLException { return relative(-1); }
/** * Moves the cursor to the previous row in this ResultSet object. * * @return true if the cursor is on a valid row; false if it is off * the result set * @throws SQLException if a database access error occurs or ResultSet is * closed or the result set type is TYPE_FORWARD_ONLY */
Moves the cursor to the previous row in this ResultSet object
previous
{ "repo_name": "zyzyis/monetdb", "path": "java/src/main/java/nl/cwi/monetdb/jdbc/MonetResultSet.java", "license": "mpl-2.0", "size": 112988 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
2,221,287
protected void addActionsPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ActionGroup_actions_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ActionGroup_actions_feature", "_UI_ActionGroup_type"), UIModelPackage.Literals.ACTION_GROUP__ACTIONS, true, false, true, null, null, null)); }
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), UIModelPackage.Literals.ACTION_GROUP__ACTIONS, true, false, true, null, null, null)); }
/** * This adds a property descriptor for the Actions feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This adds a property descriptor for the Actions feature.
addActionsPropertyDescriptor
{ "repo_name": "debabratahazra/DS", "path": "designstudio/components/page/ui/com.odcgroup.page.edit/src/generated/java/com/odcgroup/page/uimodel/provider/ActionGroupItemProvider.java", "license": "epl-1.0", "size": 4284 }
[ "com.odcgroup.page.uimodel.UIModelPackage", "org.eclipse.emf.edit.provider.ComposeableAdapterFactory" ]
import com.odcgroup.page.uimodel.UIModelPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import com.odcgroup.page.uimodel.*; import org.eclipse.emf.edit.provider.*;
[ "com.odcgroup.page", "org.eclipse.emf" ]
com.odcgroup.page; org.eclipse.emf;
1,147,697
public void reAlloc() { final long newAllocationSize = allocationSizeInBytes * 2L; if (newAllocationSize > MAX_ALLOCATION_SIZE) { throw new OversizedAllocationException("Requested amount of memory is more than max allowed allocation size"); } final int curSize = (int)newAllocationSize; final DrillBuf newBuf = allocator.buffer(curSize); newBuf.setZero(0, newBuf.capacity()); newBuf.setBytes(0, data, 0, data.capacity()); data.release(); data = newBuf; allocationSizeInBytes = curSize; } /** * {@inheritDoc}
void function() { final long newAllocationSize = allocationSizeInBytes * 2L; if (newAllocationSize > MAX_ALLOCATION_SIZE) { throw new OversizedAllocationException(STR); } final int curSize = (int)newAllocationSize; final DrillBuf newBuf = allocator.buffer(curSize); newBuf.setZero(0, newBuf.capacity()); newBuf.setBytes(0, data, 0, data.capacity()); data.release(); data = newBuf; allocationSizeInBytes = curSize; } /** * {@inheritDoc}
/** * Allocate new buffer with double capacity, and copy data into the new buffer. Replace vector's buffer with new buffer, and release old one */
Allocate new buffer with double capacity, and copy data into the new buffer. Replace vector's buffer with new buffer, and release old one
reAlloc
{ "repo_name": "hnfgns/incubator-drill", "path": "exec/vector/src/main/java/org/apache/drill/exec/vector/BitVector.java", "license": "apache-2.0", "size": 13204 }
[ "io.netty.buffer.DrillBuf", "org.apache.drill.exec.exception.OversizedAllocationException" ]
import io.netty.buffer.DrillBuf; import org.apache.drill.exec.exception.OversizedAllocationException;
import io.netty.buffer.*; import org.apache.drill.exec.exception.*;
[ "io.netty.buffer", "org.apache.drill" ]
io.netty.buffer; org.apache.drill;
1,149,514
private synchronized SnapshotSentinel removeSentinelIfFinished( final Map<TableName, SnapshotSentinel> sentinels, final SnapshotDescription snapshot) { if (!snapshot.hasTable()) { return null; } TableName snapshotTable = TableName.valueOf(snapshot.getTable()); SnapshotSentinel h = sentinels.get(snapshotTable); if (h == null) { return null; } if (!h.getSnapshot().getName().equals(snapshot.getName())) { // specified snapshot is to the one currently running return null; } // Remove from the "in-progress" list once completed if (h.isFinished()) { sentinels.remove(snapshotTable); } return h; }
synchronized SnapshotSentinel function( final Map<TableName, SnapshotSentinel> sentinels, final SnapshotDescription snapshot) { if (!snapshot.hasTable()) { return null; } TableName snapshotTable = TableName.valueOf(snapshot.getTable()); SnapshotSentinel h = sentinels.get(snapshotTable); if (h == null) { return null; } if (!h.getSnapshot().getName().equals(snapshot.getName())) { return null; } if (h.isFinished()) { sentinels.remove(snapshotTable); } return h; }
/** * Return the handler if it is currently live and has the same snapshot target name. * The handler is removed from the sentinels map if completed. * @param sentinels live handlers * @param snapshot snapshot description * @return null if doesn't match, else a live handler. */
Return the handler if it is currently live and has the same snapshot target name. The handler is removed from the sentinels map if completed
removeSentinelIfFinished
{ "repo_name": "apurtell/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java", "license": "apache-2.0", "size": 54021 }
[ "java.util.Map", "org.apache.hadoop.hbase.TableName", "org.apache.hadoop.hbase.master.SnapshotSentinel", "org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos" ]
import java.util.Map; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.master.SnapshotSentinel; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.master.*; import org.apache.hadoop.hbase.shaded.protobuf.generated.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
699,743
public Entity newResource(Entity container, String id, Object[] others) { return new BaseMailArchiveMessageEdit((MessageChannel) container, id); }
Entity function(Entity container, String id, Object[] others) { return new BaseMailArchiveMessageEdit((MessageChannel) container, id); }
/** * Construct a new rsource given just an id. * * @param container * The Resource that is the container for the new resource (may be null). * @param id * The id for the new object. * @param others * (options) array of objects to load into the Resource's fields. * @return The new resource. */
Construct a new rsource given just an id
newResource
{ "repo_name": "OpenCollabZA/sakai", "path": "mailarchive/mailarchive-impl/impl/src/java/org/sakaiproject/mailarchive/impl/BaseMailArchiveService.java", "license": "apache-2.0", "size": 41581 }
[ "org.sakaiproject.entity.api.Entity", "org.sakaiproject.message.api.MessageChannel" ]
import org.sakaiproject.entity.api.Entity; import org.sakaiproject.message.api.MessageChannel;
import org.sakaiproject.entity.api.*; import org.sakaiproject.message.api.*;
[ "org.sakaiproject.entity", "org.sakaiproject.message" ]
org.sakaiproject.entity; org.sakaiproject.message;
214,615
public Graphics2D getGraphics() { this.verifyCanvas(false); Graphics2D g = this.canvas.createGraphics(); g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); g.setPaint(Color.BLACK); g.setFont(EnderX.getFont("Geneva", 12)); return g; }
Graphics2D function() { this.verifyCanvas(false); Graphics2D g = this.canvas.createGraphics(); g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); g.setPaint(Color.BLACK); g.setFont(EnderX.getFont(STR, 12)); return g; }
/** * Creates a graphics context for this Window. * @return a graphics context for this Window. */
Creates a graphics context for this Window
getGraphics
{ "repo_name": "Pugduddly/enderX", "path": "src/main/java/xyz/pugduddly/enderX/ui/AcceleratedWindow.java", "license": "gpl-2.0", "size": 28781 }
[ "java.awt.Color", "java.awt.Graphics2D", "java.awt.RenderingHints", "xyz.pugduddly.enderX.EnderX" ]
import java.awt.Color; import java.awt.Graphics2D; import java.awt.RenderingHints; import xyz.pugduddly.enderX.EnderX;
import java.awt.*; import xyz.pugduddly.*;
[ "java.awt", "xyz.pugduddly" ]
java.awt; xyz.pugduddly;
2,122,302
private boolean computeForUndirectedWeightedGraph() { final Iterable<IElement> nodesOfGraph = this.g.getNodes(); UndirectedNode node1, node2; // neighbors for node1, node2: HashMap<Node, Double> neighbors1, neighbors2; // indices for both for-loops to save some time with using matching(1,2) // = matching(2,1) int nodeIndex1 = 0, nodeIndex2; for (IElement iElement1 : nodesOfGraph) { node1 = (UndirectedNode) iElement1; neighbors1 = this.getNeighborNodesUndirectedWeighted(node1); nodeIndex2 = 0; for (IElement iElement2 : nodesOfGraph) { if (nodeIndex2 < nodeIndex1) { // matching is equal to equivalent calculated before // (matching(1,2) = matching(2,1)) nodeIndex2++; continue; } node2 = (UndirectedNode) iElement2; neighbors2 = this.getNeighborNodesUndirectedWeighted(node2); // intersection double sum = getMapValueSum(getMatching(neighbors1, neighbors2)); this.matching.put(node1, node2, sum); this.matchingD.incr(sum); nodeIndex2++; } nodeIndex1++; } return true; }
boolean function() { final Iterable<IElement> nodesOfGraph = this.g.getNodes(); UndirectedNode node1, node2; HashMap<Node, Double> neighbors1, neighbors2; int nodeIndex1 = 0, nodeIndex2; for (IElement iElement1 : nodesOfGraph) { node1 = (UndirectedNode) iElement1; neighbors1 = this.getNeighborNodesUndirectedWeighted(node1); nodeIndex2 = 0; for (IElement iElement2 : nodesOfGraph) { if (nodeIndex2 < nodeIndex1) { nodeIndex2++; continue; } node2 = (UndirectedNode) iElement2; neighbors2 = this.getNeighborNodesUndirectedWeighted(node2); double sum = getMapValueSum(getMatching(neighbors1, neighbors2)); this.matching.put(node1, node2, sum); this.matchingD.incr(sum); nodeIndex2++; } nodeIndex1++; } return true; }
/** * Computing for graphs with undirected weighted edges based only on current * snapshot. */
Computing for graphs with undirected weighted edges based only on current snapshot
computeForUndirectedWeightedGraph
{ "repo_name": "timgrube/DNA", "path": "src/dna/metrics/similarityMeasures/matching/Matching.java", "license": "gpl-3.0", "size": 18046 }
[ "dna.graph.IElement", "dna.graph.nodes.Node", "dna.graph.nodes.UndirectedNode", "java.util.HashMap" ]
import dna.graph.IElement; import dna.graph.nodes.Node; import dna.graph.nodes.UndirectedNode; import java.util.HashMap;
import dna.graph.*; import dna.graph.nodes.*; import java.util.*;
[ "dna.graph", "dna.graph.nodes", "java.util" ]
dna.graph; dna.graph.nodes; java.util;
781,122
public ImageBbox getRequestBbox() { Bbox correctedBbox= getCalculatedBbox(); Integer reqWidth= this.width; Integer reqHeight= this.height; if (this.angle !=null && this.angle > 0 && this.angle < 360){ Bbox reqBbox= getBboxWithRotation(correctedBbox,this.angle); //make the widht/height larger/smaller according the calculated bbox. Double floorWidth = Math.floor(reqBbox.getWidth()/correctedBbox.getWidth() * this.width); Double floorHeight = Math.floor(reqBbox.getHeight()/correctedBbox.getHeight() * this.height); reqWidth = floorWidth.intValue(); reqHeight = floorHeight.intValue(); correctedBbox=reqBbox; } return new ImageBbox(correctedBbox,reqWidth,reqHeight); } /** * @param settings a JSONObject in the following format: <pre> * { * requests: [ * { * protocol: "", * extent: "", //if extent is other then the given bbox. * url: "", * alpha: "", * body: "", * tileWidth: "", //default 256, for tiling * tileHeight: "", //default 256, for tiling * serverExtent: "" //server extent, for tiling * } * ], * geometries: [ * wktgeom: "", * color: "" * ], * bbox: "", * width: "", * height: "", * srid: "", * angle: "", * quality: "" * }
ImageBbox function() { Bbox correctedBbox= getCalculatedBbox(); Integer reqWidth= this.width; Integer reqHeight= this.height; if (this.angle !=null && this.angle > 0 && this.angle < 360){ Bbox reqBbox= getBboxWithRotation(correctedBbox,this.angle); Double floorWidth = Math.floor(reqBbox.getWidth()/correctedBbox.getWidth() * this.width); Double floorHeight = Math.floor(reqBbox.getHeight()/correctedBbox.getHeight() * this.height); reqWidth = floorWidth.intValue(); reqHeight = floorHeight.intValue(); correctedBbox=reqBbox; } return new ImageBbox(correctedBbox,reqWidth,reqHeight); } /** * @param settings a JSONObject in the following format: <pre> * { * requests: [ * { * protocol: STRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTRSTR" * }
/** * Get the request Bbox,height and width. * * @return the imagebox of the request */
Get the request Bbox,height and width
getRequestBbox
{ "repo_name": "B3Partners/flamingo", "path": "viewer/src/main/java/nl/b3p/viewer/image/CombineImageSettings.java", "license": "agpl-3.0", "size": 19784 }
[ "org.json.JSONObject" ]
import org.json.JSONObject;
import org.json.*;
[ "org.json" ]
org.json;
2,241,469
public final SessionFactory getSessionFactory() { return (this.hibernateTemplate != null ? this.hibernateTemplate.getSessionFactory() : null); }
final SessionFactory function() { return (this.hibernateTemplate != null ? this.hibernateTemplate.getSessionFactory() : null); }
/** * Return the Hibernate SessionFactory used by this DAO. */
Return the Hibernate SessionFactory used by this DAO
getSessionFactory
{ "repo_name": "leogoing/spring_jeesite", "path": "spring-orm-4.0/org/springframework/orm/hibernate4/support/HibernateDaoSupport.java", "license": "apache-2.0", "size": 5174 }
[ "org.hibernate.SessionFactory" ]
import org.hibernate.SessionFactory;
import org.hibernate.*;
[ "org.hibernate" ]
org.hibernate;
1,129,403
public Object lastKey() { if (size() == 0) { throw new NoSuchElementException("Map is empty"); } return insertOrder.get(size() - 1); }
Object function() { if (size() == 0) { throw new NoSuchElementException(STR); } return insertOrder.get(size() - 1); }
/** * Gets the last key in this map by insert order. * * @return the last key currently in this map * @throws NoSuchElementException if this map is empty */
Gets the last key in this map by insert order
lastKey
{ "repo_name": "ervandew/formic", "path": "src/java/org/apache/commons/collections/map/ListOrderedMap.java", "license": "lgpl-2.1", "size": 22086 }
[ "java.util.NoSuchElementException" ]
import java.util.NoSuchElementException;
import java.util.*;
[ "java.util" ]
java.util;
1,181,313
public URLStreamHandlerFactory getURLStreamHandlerFactory() { return new VfsStreamHandlerFactory(); }
URLStreamHandlerFactory function() { return new VfsStreamHandlerFactory(); }
/** * Get the URLStreamHandlerFactory. * @return The URLStreamHandlerFactory. */
Get the URLStreamHandlerFactory
getURLStreamHandlerFactory
{ "repo_name": "raviu/wso2-commons-vfs", "path": "core/src/main/java/org/apache/commons/vfs2/impl/DefaultFileSystemManager.java", "license": "apache-2.0", "size": 40745 }
[ "java.net.URLStreamHandlerFactory" ]
import java.net.URLStreamHandlerFactory;
import java.net.*;
[ "java.net" ]
java.net;
984,981
boolean accept(Type type, Class<?> clazz);
boolean accept(Type type, Class<?> clazz);
/** * Whether the build accept the type or class passed in. */
Whether the build accept the type or class passed in
accept
{ "repo_name": "qtvbwfn/dubbo", "path": "dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/definition/builder/TypeBuilder.java", "license": "apache-2.0", "size": 1374 }
[ "java.lang.reflect.Type" ]
import java.lang.reflect.Type;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
801,692
public Optional<DirectSignerResponse> findSignerIdentifiedBy(String identifier) { return getSigners().stream().filter(signer -> signer.hasIdentifier(identifier)).findFirst(); }
Optional<DirectSignerResponse> function(String identifier) { return getSigners().stream().filter(signer -> signer.hasIdentifier(identifier)).findFirst(); }
/** * Try to find the signer with the given identifier for this job. If you expect the signer to exist, * consider using {@link #getSignerIdentifiedBy(String)} instead. * * @param identifier the string identifying the signer, either a personal identification number, * or a custom identifier * @return the found {@link DirectSignerResponse signer}, or {@link Optional#empty() empty}. */
Try to find the signer with the given identifier for this job. If you expect the signer to exist, consider using <code>#getSignerIdentifiedBy(String)</code> instead
findSignerIdentifiedBy
{ "repo_name": "digipost/signature-api-client-java", "path": "src/main/java/no/digipost/signature/client/direct/DirectJobResponse.java", "license": "apache-2.0", "size": 3217 }
[ "java.util.Optional" ]
import java.util.Optional;
import java.util.*;
[ "java.util" ]
java.util;
1,959,042
private void validateStaticMods( List<StaticModification> staticModificationList, File kojakConfFile ) throws ProxlGenXMLDataException { Set<String> aminoAcidSet = new HashSet<>(); for ( StaticModification staticModification : staticModificationList ) { String aminoAcid = staticModification.getAminoAcid(); if ( aminoAcidSet.contains( aminoAcid )) { String msg = "ERROR: Duplicate aminoAcid entries for config key '" + FIXED_MODIFICATION_CONFIG_KEY + "' ,Kojak Conf file: " + kojakConfFile.getAbsolutePath(); log.error( msg ); throw new ProxlGenXMLDataException(msg); } aminoAcidSet.add( aminoAcid ); } }
void function( List<StaticModification> staticModificationList, File kojakConfFile ) throws ProxlGenXMLDataException { Set<String> aminoAcidSet = new HashSet<>(); for ( StaticModification staticModification : staticModificationList ) { String aminoAcid = staticModification.getAminoAcid(); if ( aminoAcidSet.contains( aminoAcid )) { String msg = STR + FIXED_MODIFICATION_CONFIG_KEY + STR + kojakConfFile.getAbsolutePath(); log.error( msg ); throw new ProxlGenXMLDataException(msg); } aminoAcidSet.add( aminoAcid ); } }
/** * Validate same residue is not in the list more than once. * @param kojakConfStaticMods * @throws ProxlGenXMLDataException */
Validate same residue is not in the list more than once
validateStaticMods
{ "repo_name": "yeastrc/proxl-import-kojak", "path": "src/org/yeastrc/proxl/proxl_gen_import_xml_kojak/common/kojak/KojakConfFileReader.java", "license": "apache-2.0", "size": 22787 }
[ "java.io.File", "java.util.HashSet", "java.util.List", "java.util.Set", "org.yeastrc.proxl.proxl_gen_import_xml_kojak.common.exceptions.ProxlGenXMLDataException", "org.yeastrc.proxl_import.api.xml_dto.StaticModification" ]
import java.io.File; import java.util.HashSet; import java.util.List; import java.util.Set; import org.yeastrc.proxl.proxl_gen_import_xml_kojak.common.exceptions.ProxlGenXMLDataException; import org.yeastrc.proxl_import.api.xml_dto.StaticModification;
import java.io.*; import java.util.*; import org.yeastrc.proxl.proxl_gen_import_xml_kojak.common.exceptions.*; import org.yeastrc.proxl_import.api.xml_dto.*;
[ "java.io", "java.util", "org.yeastrc.proxl", "org.yeastrc.proxl_import" ]
java.io; java.util; org.yeastrc.proxl; org.yeastrc.proxl_import;
1,893,357
void onLoadError(int sourceId, IOException e); } public static final class UnrecognizedInputFormatException extends ParserException { public UnrecognizedInputFormatException(Extractor[] extractors) { super("None of the available extractors (" + Util.getCommaDelimitedSimpleClassNames(extractors) + ") could read the stream."); } } public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT_ON_DEMAND = 3; public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT_LIVE = 6; private static final int MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA = -1; private static final long NO_RESET_PENDING = Long.MIN_VALUE; private static final List<Class<? extends Extractor>> DEFAULT_EXTRACTOR_CLASSES; static { DEFAULT_EXTRACTOR_CLASSES = new ArrayList<>(); // Load extractors using reflection so that they can be deleted cleanly. // Class.forName(<class name>) appears for each extractor so that automated tools like proguard // can detect the use of reflection (see http://proguard.sourceforge.net/FAQ.html#forname). try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.webm.WebmExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.mp4.FragmentedMp4Extractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.mp4.Mp4Extractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.mp3.Mp3Extractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.ts.AdtsExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.ts.TsExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.flv.FlvExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.ogg.OggExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.ts.PsExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.extractor.wav.WavExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName("com.google.android.exoplayer.ext.flac.FlacExtractor") .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { // Extractor not found. } } private final ExtractorHolder extractorHolder; private final Allocator allocator; private final int requestedBufferSize; private final SparseArray<InternalTrackOutput> sampleQueues; private final int minLoadableRetryCount; private final Uri uri; private final DataSource dataSource; private final Handler eventHandler; private final EventListener eventListener; private final int eventSourceId; private volatile boolean tracksBuilt; private volatile SeekMap seekMap; private volatile DrmInitData drmInitData; private boolean prepared; private int enabledTrackCount; private MediaFormat[] mediaFormats; private long maxTrackDurationUs; private boolean[] pendingMediaFormat; private boolean[] pendingDiscontinuities; private boolean[] trackEnabledStates; private int remainingReleaseCount; private long downstreamPositionUs; private long lastSeekPositionUs; private long pendingResetPositionUs; private boolean havePendingNextSampleUs; private long pendingNextSampleUs; private long sampleTimeOffsetUs; private Loader loader; private ExtractingLoadable loadable; private IOException currentLoadableException; // TODO: Set this back to 0 in the correct place (some place indicative of making progress). private int currentLoadableExceptionCount; private long currentLoadableExceptionTimestamp; private boolean loadingFinished; private int extractedSampleCount; private int extractedSampleCountAtStartOfLoad; public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, Handler eventHandler, EventListener eventListener, int eventSourceId, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA, eventHandler, eventListener, eventSourceId, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, int minLoadableRetryCount, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, minLoadableRetryCount, null, null, 0, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, int minLoadableRetryCount, Handler eventHandler, EventListener eventListener, int eventSourceId, Extractor... extractors) { this.uri = uri; this.dataSource = dataSource; this.eventListener = eventListener; this.eventHandler = eventHandler; this.eventSourceId = eventSourceId; this.allocator = allocator; this.requestedBufferSize = requestedBufferSize; this.minLoadableRetryCount = minLoadableRetryCount; if (extractors == null || extractors.length == 0) { extractors = new Extractor[DEFAULT_EXTRACTOR_CLASSES.size()]; for (int i = 0; i < extractors.length; i++) { try { extractors[i] = DEFAULT_EXTRACTOR_CLASSES.get(i).newInstance(); } catch (InstantiationException e) { throw new IllegalStateException("Unexpected error creating default extractor", e); } catch (IllegalAccessException e) { throw new IllegalStateException("Unexpected error creating default extractor", e); } } } extractorHolder = new ExtractorHolder(extractors, this); sampleQueues = new SparseArray<>(); pendingResetPositionUs = NO_RESET_PENDING; }
void onLoadError(int sourceId, IOException e); } public static final class UnrecognizedInputFormatException extends ParserException { public UnrecognizedInputFormatException(Extractor[] extractors) { super(STR + Util.getCommaDelimitedSimpleClassNames(extractors) + STR); } } public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT_ON_DEMAND = 3; public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT_LIVE = 6; private static final int MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA = -1; private static final long NO_RESET_PENDING = Long.MIN_VALUE; private static final List<Class<? extends Extractor>> DEFAULT_EXTRACTOR_CLASSES; static { DEFAULT_EXTRACTOR_CLASSES = new ArrayList<>(); try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } try { DEFAULT_EXTRACTOR_CLASSES.add( Class.forName(STR) .asSubclass(Extractor.class)); } catch (ClassNotFoundException e) { } } private final ExtractorHolder extractorHolder; private final Allocator allocator; private final int requestedBufferSize; private final SparseArray<InternalTrackOutput> sampleQueues; private final int minLoadableRetryCount; private final Uri uri; private final DataSource dataSource; private final Handler eventHandler; private final EventListener eventListener; private final int eventSourceId; private volatile boolean tracksBuilt; private volatile SeekMap seekMap; private volatile DrmInitData drmInitData; private boolean prepared; private int enabledTrackCount; private MediaFormat[] mediaFormats; private long maxTrackDurationUs; private boolean[] pendingMediaFormat; private boolean[] pendingDiscontinuities; private boolean[] trackEnabledStates; private int remainingReleaseCount; private long downstreamPositionUs; private long lastSeekPositionUs; private long pendingResetPositionUs; private boolean havePendingNextSampleUs; private long pendingNextSampleUs; private long sampleTimeOffsetUs; private Loader loader; private ExtractingLoadable loadable; private IOException currentLoadableException; private int currentLoadableExceptionCount; private long currentLoadableExceptionTimestamp; private boolean loadingFinished; private int extractedSampleCount; private int extractedSampleCountAtStartOfLoad; public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, Handler eventHandler, EventListener eventListener, int eventSourceId, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, MIN_RETRY_COUNT_DEFAULT_FOR_MEDIA, eventHandler, eventListener, eventSourceId, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, int minLoadableRetryCount, Extractor... extractors) { this(uri, dataSource, allocator, requestedBufferSize, minLoadableRetryCount, null, null, 0, extractors); } public ExtractorSampleSource(Uri uri, DataSource dataSource, Allocator allocator, int requestedBufferSize, int minLoadableRetryCount, Handler eventHandler, EventListener eventListener, int eventSourceId, Extractor... extractors) { this.uri = uri; this.dataSource = dataSource; this.eventListener = eventListener; this.eventHandler = eventHandler; this.eventSourceId = eventSourceId; this.allocator = allocator; this.requestedBufferSize = requestedBufferSize; this.minLoadableRetryCount = minLoadableRetryCount; if (extractors == null extractors.length == 0) { extractors = new Extractor[DEFAULT_EXTRACTOR_CLASSES.size()]; for (int i = 0; i < extractors.length; i++) { try { extractors[i] = DEFAULT_EXTRACTOR_CLASSES.get(i).newInstance(); } catch (InstantiationException e) { throw new IllegalStateException(STR, e); } catch (IllegalAccessException e) { throw new IllegalStateException(STR, e); } } } extractorHolder = new ExtractorHolder(extractors, this); sampleQueues = new SparseArray<>(); pendingResetPositionUs = NO_RESET_PENDING; }
/** * Invoked when an error occurs loading media data. * * @param sourceId The id of the reporting {@link SampleSource}. * @param e The cause of the failure. */
Invoked when an error occurs loading media data
onLoadError
{ "repo_name": "kj2648/ExoplayerMultitrackTry", "path": "library/src/main/java/com/google/android/exoplayer/extractor/ExtractorSampleSource.java", "license": "apache-2.0", "size": 33338 }
[ "android.net.Uri", "android.os.Handler", "android.util.SparseArray", "com.google.android.exoplayer.MediaFormat", "com.google.android.exoplayer.ParserException", "com.google.android.exoplayer.drm.DrmInitData", "com.google.android.exoplayer.upstream.Allocator", "com.google.android.exoplayer.upstream.DataSource", "com.google.android.exoplayer.upstream.Loader", "com.google.android.exoplayer.util.Util", "java.io.IOException", "java.util.ArrayList", "java.util.List" ]
import android.net.Uri; import android.os.Handler; import android.util.SparseArray; import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.drm.DrmInitData; import com.google.android.exoplayer.upstream.Allocator; import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.Loader; import com.google.android.exoplayer.util.Util; import java.io.IOException; import java.util.ArrayList; import java.util.List;
import android.net.*; import android.os.*; import android.util.*; import com.google.android.exoplayer.*; import com.google.android.exoplayer.drm.*; import com.google.android.exoplayer.upstream.*; import com.google.android.exoplayer.util.*; import java.io.*; import java.util.*;
[ "android.net", "android.os", "android.util", "com.google.android", "java.io", "java.util" ]
android.net; android.os; android.util; com.google.android; java.io; java.util;
2,097,731
public Entity getEntity() { if (entity != null && entity.isValid() && !entity.isDead() && entity.getUniqueId().equals(id)) { //still cached return entity; } //try and load last chunk the entity was in if (entity != null) { entity.getLocation().getChunk().load(); } else { System.out.println("entity is null: " + name); } //cache has expired (new entity ID, etc) so grab entity for (World w : Bukkit.getWorlds()) for (Entity e : w.getEntities()) { if (e.getUniqueId().equals(id)) { entity = e; return e; } } //unable to find entity! return null; }
Entity function() { if (entity != null && entity.isValid() && !entity.isDead() && entity.getUniqueId().equals(id)) { return entity; } if (entity != null) { entity.getLocation().getChunk().load(); } else { System.out.println(STR + name); } for (World w : Bukkit.getWorlds()) for (Entity e : w.getEntities()) { if (e.getUniqueId().equals(id)) { entity = e; return e; } } return null; }
/** * Returns the entity this NPC is attached to. * This method attempts to save cycles by caching the last known entity to * represent our UUID'd creature. If the cache is no longer valid, an entire * sweep of worlds and entities is performed to lookup the entity. * @return The entity attached to our UUID, or NULL if none is found */
Returns the entity this NPC is attached to. This method attempts to save cycles by caching the last known entity to represent our UUID'd creature. If the cache is no longer valid, an entire sweep of worlds and entities is performed to lookup the entity
getEntity
{ "repo_name": "Dove-Bren/QuestManager", "path": "src/main/java/com/skyisland/questmanager/npc/NPC.java", "license": "gpl-3.0", "size": 6036 }
[ "org.bukkit.Bukkit", "org.bukkit.World", "org.bukkit.entity.Entity" ]
import org.bukkit.Bukkit; import org.bukkit.World; import org.bukkit.entity.Entity;
import org.bukkit.*; import org.bukkit.entity.*;
[ "org.bukkit", "org.bukkit.entity" ]
org.bukkit; org.bukkit.entity;
1,855,541
private Map<Integer, Integer> scanDatabaseToDetermineFrequencyOfSingleItems(String input) throws FileNotFoundException, IOException { // a map for storing the support of each item (key: item, value: support) Map<Integer, Integer> mapSupport = new HashMap<Integer, Integer>(); //Create object for reading the input file BufferedReader reader = new BufferedReader(new FileReader(input)); String line; // for each line (transaction) until the end of file while( ((line = reader.readLine())!= null)){ // if the line is a comment, is empty or is a // kind of metadata if (line.isEmpty() == true || line.charAt(0) == '#' || line.charAt(0) == '%' || line.charAt(0) == '@') { continue; } // split the line into items String[] lineSplited = line.split(" "); // for each item for(String itemString : lineSplited){ // increase the support count of the item Integer item = Integer.parseInt(itemString); // increase the support count of the item Integer count = mapSupport.get(item); if(count == null){ mapSupport.put(item, 1); }else{ mapSupport.put(item, ++count); } } // increase the transaction count transactionCount++; } // close the input file reader.close(); return mapSupport; }
Map<Integer, Integer> function(String input) throws FileNotFoundException, IOException { Map<Integer, Integer> mapSupport = new HashMap<Integer, Integer>(); BufferedReader reader = new BufferedReader(new FileReader(input)); String line; while( ((line = reader.readLine())!= null)){ if (line.isEmpty() == true line.charAt(0) == '#' line.charAt(0) == '%' line.charAt(0) == '@') { continue; } String[] lineSplited = line.split(" "); for(String itemString : lineSplited){ Integer item = Integer.parseInt(itemString); Integer count = mapSupport.get(item); if(count == null){ mapSupport.put(item, 1); }else{ mapSupport.put(item, ++count); } } transactionCount++; } reader.close(); return mapSupport; }
/** * This method scans the input database to calculate the support of single items * @param input the path of the input file * @throws IOException exception if error while writing the file * @return a map for storing the support of each item (key: item, value: support) */
This method scans the input database to calculate the support of single items
scanDatabaseToDetermineFrequencyOfSingleItems
{ "repo_name": "saurabhdas1/fyp", "path": "Final Year/src/pkgfinal/year/algorithms/frequentpatterns/fpgrowth/AlgoFPClose.java", "license": "gpl-3.0", "size": 20276 }
[ "java.io.BufferedReader", "java.io.FileNotFoundException", "java.io.FileReader", "java.io.IOException", "java.util.HashMap", "java.util.Map" ]
import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.Map;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
623,867
public static double parseDouble(ICommandSender par0ICommandSender, String par1Str) { try { double var2 = Double.parseDouble(par1Str); if (!Doubles.isFinite(var2)) { throw new NumberInvalidException("commands.generic.double.invalid", new Object[] {par1Str}); } else { return var2; } } catch (NumberFormatException var4) { throw new NumberInvalidException("commands.generic.double.invalid", new Object[] {par1Str}); } }
static double function(ICommandSender par0ICommandSender, String par1Str) { try { double var2 = Double.parseDouble(par1Str); if (!Doubles.isFinite(var2)) { throw new NumberInvalidException(STR, new Object[] {par1Str}); } else { return var2; } } catch (NumberFormatException var4) { throw new NumberInvalidException(STR, new Object[] {par1Str}); } }
/** * Parses a double from the given string or throws an exception if it's not a double. */
Parses a double from the given string or throws an exception if it's not a double
parseDouble
{ "repo_name": "herpingdo/Hakkit", "path": "net/minecraft/src/CommandBase.java", "license": "gpl-3.0", "size": 12999 }
[ "com.google.common.primitives.Doubles" ]
import com.google.common.primitives.Doubles;
import com.google.common.primitives.*;
[ "com.google.common" ]
com.google.common;
742,473
public final void setNFSClientInformation( ClientInfo cInfo) { m_nfsClientInfo = cInfo; }
final void function( ClientInfo cInfo) { m_nfsClientInfo = cInfo; }
/** * Set the NFS client information * * @param cInfo ClientInfo */
Set the NFS client information
setNFSClientInformation
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/oncrpc/nfs/NFSSrvSession.java", "license": "lgpl-3.0", "size": 11452 }
[ "org.alfresco.jlan.server.auth.ClientInfo" ]
import org.alfresco.jlan.server.auth.ClientInfo;
import org.alfresco.jlan.server.auth.*;
[ "org.alfresco.jlan" ]
org.alfresco.jlan;
1,105,648
public synchronized Iterator<String> getAttributeNames() { return attributes.keySet().iterator(); }
synchronized Iterator<String> function() { return attributes.keySet().iterator(); }
/** * Returns an Iterator of all keys * * @return an Iterator */
Returns an Iterator of all keys
getAttributeNames
{ "repo_name": "rkapsi/daap", "path": "src/main/java/org/ardverk/daap/DaapSession.java", "license": "apache-2.0", "size": 5629 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
576,637
public static void validaCancelamentoSubstituicao(br.com.swconsultoria.nfe.schema.envEventoCancSubst.TRetEnvEvento retorno) throws NfeException { if (!StatusEnum.LOTE_EVENTO_PROCESSADO.getCodigo().equals(retorno.getCStat())) { throw new NfeException(retorno.getCStat() + " - " + retorno.getXMotivo()); } final String[] erro = {""}; retorno.getRetEvento().forEach( retEvento -> { if (!StatusEnum.EVENTO_VINCULADO.getCodigo().equals(retEvento.getInfEvento().getCStat())) { erro[0] += retEvento.getInfEvento().getChNFe() + " - " +retEvento.getInfEvento().getCStat() + " - " + retEvento.getInfEvento().getXMotivo() + System.lineSeparator(); } }); if(ObjetoUtil.verifica(erro[0]).isPresent()){ throw new NfeException(erro[0]); } }
static void function(br.com.swconsultoria.nfe.schema.envEventoCancSubst.TRetEnvEvento retorno) throws NfeException { if (!StatusEnum.LOTE_EVENTO_PROCESSADO.getCodigo().equals(retorno.getCStat())) { throw new NfeException(retorno.getCStat() + STR + retorno.getXMotivo()); } final String[] erro = {""}; retorno.getRetEvento().forEach( retEvento -> { if (!StatusEnum.EVENTO_VINCULADO.getCodigo().equals(retEvento.getInfEvento().getCStat())) { erro[0] += retEvento.getInfEvento().getChNFe() + STR +retEvento.getInfEvento().getCStat() + STR + retEvento.getInfEvento().getXMotivo() + System.lineSeparator(); } }); if(ObjetoUtil.verifica(erro[0]).isPresent()){ throw new NfeException(erro[0]); } }
/** * Valida o Retorno Do Cancelamento Substituicao * * @param retorno * @throws NfeException */
Valida o Retorno Do Cancelamento Substituicao
validaCancelamentoSubstituicao
{ "repo_name": "Samuel-Oliveira/Java_NFe", "path": "src/main/java/br/com/swconsultoria/nfe/util/RetornoUtil.java", "license": "mit", "size": 8675 }
[ "br.com.swconsultoria.nfe.dom.enuns.StatusEnum", "br.com.swconsultoria.nfe.exception.NfeException", "br.com.swconsultoria.nfe.schema.envEventoCancNFe.TRetEnvEvento" ]
import br.com.swconsultoria.nfe.dom.enuns.StatusEnum; import br.com.swconsultoria.nfe.exception.NfeException; import br.com.swconsultoria.nfe.schema.envEventoCancNFe.TRetEnvEvento;
import br.com.swconsultoria.nfe.dom.enuns.*; import br.com.swconsultoria.nfe.exception.*; import br.com.swconsultoria.nfe.schema.*;
[ "br.com.swconsultoria" ]
br.com.swconsultoria;
163,449
private static boolean filterIp(InetAddress inetAddress) { return !PMS.getConfiguration().getIpFiltering().allowed(inetAddress); }
static boolean function(InetAddress inetAddress) { return !PMS.getConfiguration().getIpFiltering().allowed(inetAddress); }
/** * Applies the IP filter to the specified internet address. Returns true * if the address is not allowed and therefore should be filtered out, * false otherwise. * * @param inetAddress The internet address to verify. * @return True when not allowed, false otherwise. */
Applies the IP filter to the specified internet address. Returns true if the address is not allowed and therefore should be filtered out, false otherwise
filterIp
{ "repo_name": "UniversalMediaServer/UniversalMediaServer", "path": "src/main/java/net/pms/network/mediaserver/socketchannelserver/RequestHandler.java", "license": "gpl-2.0", "size": 15571 }
[ "java.net.InetAddress", "net.pms.PMS" ]
import java.net.InetAddress; import net.pms.PMS;
import java.net.*; import net.pms.*;
[ "java.net", "net.pms" ]
java.net; net.pms;
2,454,846
File image = discCache.get(imageUri); return image.exists() ? image : null; }
File image = discCache.get(imageUri); return image.exists() ? image : null; }
/** * Returns {@link File} of cached image or <b>null</b> if image was not * cached in disc cache */
Returns <code>File</code> of cached image or null if image was not cached in disc cache
findInCache
{ "repo_name": "hugoYe/Roid-Library", "path": "src/com/rincliu/library/common/persistence/image/core/assist/DiscCacheUtil.java", "license": "apache-2.0", "size": 1945 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,630,665
@FIXVersion(introduced="4.4") @TagNumRef(tagNum=TagNum.LegSecurityAltID, required=true, condRequired=true) public String getLegSecurityAltID() { return legSecurityAltID; }
@FIXVersion(introduced="4.4") @TagNumRef(tagNum=TagNum.LegSecurityAltID, required=true, condRequired=true) String function() { return legSecurityAltID; }
/** * Message field getter. * @return field value */
Message field getter
getLegSecurityAltID
{ "repo_name": "marvisan/HadesFIX", "path": "Model/src/main/java/net/hades/fix/message/group/LegSecurityAltIDGroup.java", "license": "gpl-3.0", "size": 7612 }
[ "net.hades.fix.message.anno.FIXVersion", "net.hades.fix.message.anno.TagNumRef", "net.hades.fix.message.type.TagNum" ]
import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.TagNum;
import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*;
[ "net.hades.fix" ]
net.hades.fix;
535,099
private static byte[] readFileFully( final InputStream inputStream, final File incomingFile, final byte[] buf) throws IOException { MessageDigest md = BlobUtils.createMessageDigest(); try (FileOutputStream fos = new FileOutputStream(incomingFile)) { while (true) { final int bytesExpected = readLength(inputStream); if (bytesExpected == -1) { // done break; } if (bytesExpected > BUFFER_SIZE) { throw new IOException( "Unexpected number of incoming bytes: " + bytesExpected); } readFully(inputStream, buf, 0, bytesExpected, "buffer"); fos.write(buf, 0, bytesExpected); md.update(buf, 0, bytesExpected); } return md.digest(); } } // -------------------------------------------------------------------------------------------- // Utilities // --------------------------------------------------------------------------------------------
static byte[] function( final InputStream inputStream, final File incomingFile, final byte[] buf) throws IOException { MessageDigest md = BlobUtils.createMessageDigest(); try (FileOutputStream fos = new FileOutputStream(incomingFile)) { while (true) { final int bytesExpected = readLength(inputStream); if (bytesExpected == -1) { break; } if (bytesExpected > BUFFER_SIZE) { throw new IOException( STR + bytesExpected); } readFully(inputStream, buf, 0, bytesExpected, STR); fos.write(buf, 0, bytesExpected); md.update(buf, 0, bytesExpected); } return md.digest(); } }
/** * Reads a full file from <tt>inputStream</tt> into <tt>incomingFile</tt> returning its checksum. * * @param inputStream * stream to read from * @param incomingFile * file to write to * @param buf * An auxiliary buffer for data serialization/deserialization * * @return the received file's content hash * * @throws IOException * thrown if an I/O error occurs while reading/writing data from/to the respective streams */
Reads a full file from inputStream into incomingFile returning its checksum
readFileFully
{ "repo_name": "hequn8128/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServerConnection.java", "license": "apache-2.0", "size": 14601 }
[ "java.io.File", "java.io.FileOutputStream", "java.io.IOException", "java.io.InputStream", "java.security.MessageDigest", "org.apache.flink.runtime.blob.BlobUtils" ]
import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.MessageDigest; import org.apache.flink.runtime.blob.BlobUtils;
import java.io.*; import java.security.*; import org.apache.flink.runtime.blob.*;
[ "java.io", "java.security", "org.apache.flink" ]
java.io; java.security; org.apache.flink;
400,167
public Map<Integer, Double> getSectionData();
Map<Integer, Double> function();
/** * Get the data * @return Map associating position with data value */
Get the data
getSectionData
{ "repo_name": "mgarber/scriptureV2", "path": "src/java/nextgen/core/readers/WigReader.java", "license": "lgpl-3.0", "size": 12579 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,032,966
private void addAlgebraicFuncToCombineFE(POForEach cfe, Map<PhysicalOperator, Integer> op2newpos) throws CloneNotSupportedException, PlanException { //an array that we will first populate with physical operators in order //of their position in input. Used while adding plans to combine foreach // just so that output of combine foreach same positions as input. That // means the same operator to position mapping can be used by reduce as well PhysicalOperator[] opsInOrder = new PhysicalOperator[op2newpos.size() + 1]; for(Map.Entry<PhysicalOperator, Integer> op2pos : op2newpos.entrySet()){ opsInOrder[op2pos.getValue()] = op2pos.getKey(); } // first position is used by group column and a plan has been added for it, //so start with 1 for(int i=1; i < opsInOrder.length; i++){ //create new inner plan for foreach //add cloned copy of given physical operator and a new project. // Even if the udf in query takes multiple input, only one project // needs to be added because input to this udf //will be the INITIAL version of udf evaluated in map. PhysicalPlan newPlan = new PhysicalPlan(); PhysicalOperator newOp = opsInOrder[i].clone(); newPlan.add(newOp); POProject proj = new POProject( createOperatorKey(cfe.getOperatorKey().getScope()), 1, i ); proj.setResultType(DataType.BAG); newPlan.add(proj); newPlan.connect(proj, newOp); cfe.addInputPlan(newPlan, false); } }
void function(POForEach cfe, Map<PhysicalOperator, Integer> op2newpos) throws CloneNotSupportedException, PlanException { PhysicalOperator[] opsInOrder = new PhysicalOperator[op2newpos.size() + 1]; for(Map.Entry<PhysicalOperator, Integer> op2pos : op2newpos.entrySet()){ opsInOrder[op2pos.getValue()] = op2pos.getKey(); } for(int i=1; i < opsInOrder.length; i++){ PhysicalPlan newPlan = new PhysicalPlan(); PhysicalOperator newOp = opsInOrder[i].clone(); newPlan.add(newOp); POProject proj = new POProject( createOperatorKey(cfe.getOperatorKey().getScope()), 1, i ); proj.setResultType(DataType.BAG); newPlan.add(proj); newPlan.connect(proj, newOp); cfe.addInputPlan(newPlan, false); } }
/** * add algebraic functions with appropriate projection to new foreach in combiner * @param cfe - the new foreach in combiner * @param op2newpos - mapping of physical operator to position in input * @throws CloneNotSupportedException * @throws PlanException */
add algebraic functions with appropriate projection to new foreach in combiner
addAlgebraicFuncToCombineFE
{ "repo_name": "hxquangnhat/PIG-ROLLUP-MRCUBE", "path": "src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/CombinerOptimizer.java", "license": "apache-2.0", "size": 53537 }
[ "java.util.Map", "org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator", "org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POProject", "org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan", "org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach", "org.apache.pig.data.DataType", "org.apache.pig.impl.plan.PlanException" ]
import java.util.Map; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POProject; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach; import org.apache.pig.data.DataType; import org.apache.pig.impl.plan.PlanException;
import java.util.*; import org.apache.pig.backend.hadoop.executionengine.*; import org.apache.pig.data.*; import org.apache.pig.impl.plan.*;
[ "java.util", "org.apache.pig" ]
java.util; org.apache.pig;
496,711
private SimplePanel wrapInSpacer(Widget w) { SimplePanel spacer = new SimplePanel(); spacer.setStylePrimaryName("loginSpacer"); spacer.add(w); return spacer; }
SimplePanel function(Widget w) { SimplePanel spacer = new SimplePanel(); spacer.setStylePrimaryName(STR); spacer.add(w); return spacer; }
/** * Wrap in spacer. * * @param w * the w * @return the simple panel */
Wrap in spacer
wrapInSpacer
{ "repo_name": "MeasureAuthoringTool/MeasureAuthoringTool_LatestSprint", "path": "mat/src/main/java/mat/client/login/TempPwdView.java", "license": "cc0-1.0", "size": 9211 }
[ "com.google.gwt.user.client.ui.SimplePanel", "com.google.gwt.user.client.ui.Widget" ]
import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.user.client.ui.*;
[ "com.google.gwt" ]
com.google.gwt;
2,603,536
@Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SEQUENCE_INPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SEQUENCE_OUTPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__ON_ERROR_SEQUENCE_INPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__ON_ERROR_SEQUENCE_OUTPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__CONTAINER); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SERVICE_PARAMETERS); } return childrenFeatures; }
Collection<? extends EStructuralFeature> function(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SEQUENCE_INPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SEQUENCE_OUTPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__ON_ERROR_SEQUENCE_INPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__ON_ERROR_SEQUENCE_OUTPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__CONTAINER); childrenFeatures.add(EsbPackage.Literals.INBOUND_ENDPOINT__SERVICE_PARAMETERS); } return childrenFeatures; }
/** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> <!-- end-user-doc --> * @generated */
This specifies how to implement <code>#getChildren</code> and is used to deduce an appropriate feature for an <code>org.eclipse.emf.edit.command.AddCommand</code>, <code>org.eclipse.emf.edit.command.RemoveCommand</code> or <code>org.eclipse.emf.edit.command.MoveCommand</code> in <code>#createCommand</code>.
getChildrenFeatures
{ "repo_name": "nwnpallewela/developer-studio", "path": "esb/plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/InboundEndpointItemProvider.java", "license": "apache-2.0", "size": 156993 }
[ "java.util.Collection", "org.eclipse.emf.ecore.EStructuralFeature", "org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage" ]
import java.util.Collection; import org.eclipse.emf.ecore.EStructuralFeature; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import java.util.*; import org.eclipse.emf.ecore.*; import org.wso2.developerstudio.eclipse.gmf.esb.*;
[ "java.util", "org.eclipse.emf", "org.wso2.developerstudio" ]
java.util; org.eclipse.emf; org.wso2.developerstudio;
7,314
boolean isSuccessful() throws IOException;
boolean isSuccessful() throws IOException;
/** * Returns a boolean indicating whether the MapReduce job was successful * * @return a boolean indicating whether the MapReduce job was successful * @throws IOException */
Returns a boolean indicating whether the MapReduce job was successful
isSuccessful
{ "repo_name": "mkambol/pentaho-hadoop-shims", "path": "shim-api/src/main/java/org/pentaho/hadoop/shim/api/mapreduce/MapReduceJob.java", "license": "apache-2.0", "size": 2323 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,463,224
ConfigDatabase config = configDatabase(); List<String> remotes = config.getAllSubsections("remote"); List<Remote> allRemotes = new ArrayList<Remote>(); for (String remoteName : remotes) { String remoteSection = "remote." + remoteName; Optional<String> remoteFetchURL = config.get(remoteSection + ".url"); Optional<String> remoteFetch = config.get(remoteSection + ".fetch"); Optional<String> remoteMapped = config.get(remoteSection + ".mapped"); Optional<String> remoteMappedBranch = config.get(remoteSection + ".mappedBranch"); Optional<String> remoteUserName = config.get(remoteSection + ".username"); Optional<String> remotePassword = config.get(remoteSection + ".password"); if (remoteFetchURL.isPresent() && remoteFetch.isPresent()) { Optional<String> remotePushURL = config.get(remoteSection + ".pushurl"); allRemotes.add(new Remote(remoteName, remoteFetchURL.get(), remotePushURL.or(remoteFetchURL.get()), remoteFetch.get(), remoteMapped.or("false").equals("true"), remoteMappedBranch.orNull(), remoteUserName.orNull(), remotePassword.orNull())); } } return ImmutableList.copyOf(allRemotes); }
ConfigDatabase config = configDatabase(); List<String> remotes = config.getAllSubsections(STR); List<Remote> allRemotes = new ArrayList<Remote>(); for (String remoteName : remotes) { String remoteSection = STR + remoteName; Optional<String> remoteFetchURL = config.get(remoteSection + ".url"); Optional<String> remoteFetch = config.get(remoteSection + STR); Optional<String> remoteMapped = config.get(remoteSection + STR); Optional<String> remoteMappedBranch = config.get(remoteSection + STR); Optional<String> remoteUserName = config.get(remoteSection + STR); Optional<String> remotePassword = config.get(remoteSection + STR); if (remoteFetchURL.isPresent() && remoteFetch.isPresent()) { Optional<String> remotePushURL = config.get(remoteSection + STR); allRemotes.add(new Remote(remoteName, remoteFetchURL.get(), remotePushURL.or(remoteFetchURL.get()), remoteFetch.get(), remoteMapped.or("false").equals("true"), remoteMappedBranch.orNull(), remoteUserName.orNull(), remotePassword.orNull())); } } return ImmutableList.copyOf(allRemotes); }
/** * Executes the remote-list operation. * * @return {@code List<Remote>} of all remotes found in the config database, may be empty. */
Executes the remote-list operation
_call
{ "repo_name": "mtCarto/geogig", "path": "src/core/src/main/java/org/locationtech/geogig/porcelain/RemoteListOp.java", "license": "bsd-3-clause", "size": 2518 }
[ "com.google.common.base.Optional", "com.google.common.collect.ImmutableList", "java.util.ArrayList", "java.util.List", "org.locationtech.geogig.repository.Remote", "org.locationtech.geogig.storage.ConfigDatabase" ]
import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.List; import org.locationtech.geogig.repository.Remote; import org.locationtech.geogig.storage.ConfigDatabase;
import com.google.common.base.*; import com.google.common.collect.*; import java.util.*; import org.locationtech.geogig.repository.*; import org.locationtech.geogig.storage.*;
[ "com.google.common", "java.util", "org.locationtech.geogig" ]
com.google.common; java.util; org.locationtech.geogig;
2,051,556