method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public static byte[] convertToXmlByteArray(RestoreObjectRequest restoreObjectRequest) throws SdkClientException { XmlWriter xml = new XmlWriter(); xml.start("RestoreRequest"); if (restoreObjectRequest.getExpirationInDays() != -1) { xml.start("Days").value(Integer.toString(restoreObjectRequest.getExpirationInDays())).end(); } final GlacierJobParameters glacierJobParameters = restoreObjectRequest.getGlacierJobParameters(); if (glacierJobParameters != null) { xml.start("GlacierJobParameters"); addIfNotNull(xml, "Tier", glacierJobParameters.getTier()); xml.end(); } addIfNotNull(xml, "Type", restoreObjectRequest.getType()); addIfNotNull(xml, "Tier", restoreObjectRequest.getTier()); addIfNotNull(xml, "Description", restoreObjectRequest.getDescription()); addSelectParametersIfNotNull(xml, restoreObjectRequest.getSelectParameters()); addOutputLocationIfNotNull(xml, restoreObjectRequest.getOutputLocation()); xml.end(); return xml.getBytes(); }
static byte[] function(RestoreObjectRequest restoreObjectRequest) throws SdkClientException { XmlWriter xml = new XmlWriter(); xml.start(STR); if (restoreObjectRequest.getExpirationInDays() != -1) { xml.start("Days").value(Integer.toString(restoreObjectRequest.getExpirationInDays())).end(); } final GlacierJobParameters glacierJobParameters = restoreObjectRequest.getGlacierJobParameters(); if (glacierJobParameters != null) { xml.start(STR); addIfNotNull(xml, "Tier", glacierJobParameters.getTier()); xml.end(); } addIfNotNull(xml, "Type", restoreObjectRequest.getType()); addIfNotNull(xml, "Tier", restoreObjectRequest.getTier()); addIfNotNull(xml, STR, restoreObjectRequest.getDescription()); addSelectParametersIfNotNull(xml, restoreObjectRequest.getSelectParameters()); addOutputLocationIfNotNull(xml, restoreObjectRequest.getOutputLocation()); xml.end(); return xml.getBytes(); }
/** * Converts the RestoreObjectRequest to an XML fragment that can be sent to * the RestoreObject operation of Amazon S3. * * @param restoreObjectRequest * The container which provides options for restoring an object, * which was transitioned to the Glacier from S3 when it was * expired, into S3 again. * * @return A byte array containing the data * * @throws SdkClientException */
Converts the RestoreObjectRequest to an XML fragment that can be sent to the RestoreObject operation of Amazon S3
convertToXmlByteArray
{ "repo_name": "jentfoo/aws-sdk-java", "path": "aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/transform/RequestXmlFactory.java", "license": "apache-2.0", "size": 12658 }
[ "com.amazonaws.SdkClientException", "com.amazonaws.services.s3.internal.XmlWriter", "com.amazonaws.services.s3.model.GlacierJobParameters", "com.amazonaws.services.s3.model.RestoreObjectRequest" ]
import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.internal.XmlWriter; import com.amazonaws.services.s3.model.GlacierJobParameters; import com.amazonaws.services.s3.model.RestoreObjectRequest;
import com.amazonaws.*; import com.amazonaws.services.s3.internal.*; import com.amazonaws.services.s3.model.*;
[ "com.amazonaws", "com.amazonaws.services" ]
com.amazonaws; com.amazonaws.services;
2,658,254
public static void instanceStarted(BundleContext bundleContext, Collection<String> instances, Long timeout) throws Exception { CompletionService<Boolean> completionService = new ExecutorCompletionService<Boolean>(EXECUTOR); List<Future<Boolean>> waitForstarted = new LinkedList<Future<Boolean>>(); StringBuilder sb = new StringBuilder(); sb.append(" "); for (String instance : instances) { waitForstarted.add(completionService.submit(new WaitForInstanceStartedTask(bundleContext, instance, timeout))); sb.append(instance).append(" "); } System.out.println("Waiting for child instances: [" + sb.toString() + "] to get started."); for (String instance : instances) { Future<Boolean> f = completionService.poll(timeout, TimeUnit.MILLISECONDS); if ( f == null || !f.get()) { throw new Exception("Instance " + instance + " failed to start."); } } }
static void function(BundleContext bundleContext, Collection<String> instances, Long timeout) throws Exception { CompletionService<Boolean> completionService = new ExecutorCompletionService<Boolean>(EXECUTOR); List<Future<Boolean>> waitForstarted = new LinkedList<Future<Boolean>>(); StringBuilder sb = new StringBuilder(); sb.append(" "); for (String instance : instances) { waitForstarted.add(completionService.submit(new WaitForInstanceStartedTask(bundleContext, instance, timeout))); sb.append(instance).append(" "); } System.out.println(STR + sb.toString() + STR); for (String instance : instances) { Future<Boolean> f = completionService.poll(timeout, TimeUnit.MILLISECONDS); if ( f == null !f.get()) { throw new Exception(STR + instance + STR); } } }
/** * Wait for all containers to become registered. */
Wait for all containers to become registered
instanceStarted
{ "repo_name": "jludvice/fabric8", "path": "itests/paxexam/common/src/main/java/io/fabric8/itests/paxexam/support/Provision.java", "license": "apache-2.0", "size": 12432 }
[ "java.util.Collection", "java.util.LinkedList", "java.util.List", "java.util.concurrent.CompletionService", "java.util.concurrent.ExecutorCompletionService", "java.util.concurrent.Future", "java.util.concurrent.TimeUnit", "org.osgi.framework.BundleContext" ]
import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.osgi.framework.BundleContext;
import java.util.*; import java.util.concurrent.*; import org.osgi.framework.*;
[ "java.util", "org.osgi.framework" ]
java.util; org.osgi.framework;
47,805
public Builder addMethodImports(final Collection<Method> methods) { methodImports.addAll(methods); return this; }
Builder function(final Collection<Method> methods) { methodImports.addAll(methods); return this; }
/** * Overload to {@link #addMethodImports(Method...)}. */
Overload to <code>#addMethodImports(Method...)</code>
addMethodImports
{ "repo_name": "jorgebay/tinkerpop", "path": "gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/jsr223/DefaultImportCustomizer.java", "license": "apache-2.0", "size": 4494 }
[ "java.lang.reflect.Method", "java.util.Collection" ]
import java.lang.reflect.Method; import java.util.Collection;
import java.lang.reflect.*; import java.util.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
354,366
@RequestMapping(value = "/transacts", method = RequestMethod.GET) public String setupForm(Model model) { return "listTransactionsView"; }
@RequestMapping(value = STR, method = RequestMethod.GET) String function(Model model) { return STR; }
/** * Method will be called in initial page load at GET /transacts */
Method will be called in initial page load at GET /transacts
setupForm
{ "repo_name": "victorangelo/exercise", "path": "Dashboard/src/main/java/com/transact/controller/TransactController.java", "license": "gpl-3.0", "size": 5569 }
[ "org.springframework.ui.Model", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestMethod" ]
import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.ui.*; import org.springframework.web.bind.annotation.*;
[ "org.springframework.ui", "org.springframework.web" ]
org.springframework.ui; org.springframework.web;
1,780,755
@Test public void testSLRemoteEnvEntry_String_NotExist() throws Exception { try { // The test case looks for a environment variable named "envStringNotExist". String tempStr = fejb1.getStringEnvVar("envStringNotExist"); fail("Get environment not exist should have failed, instead got string object = " + tempStr); } catch (NamingException ne) { svLogger.info("Caught expected " + ne.getClass().getName()); } }
void function() throws Exception { try { String tempStr = fejb1.getStringEnvVar(STR); fail(STR + tempStr); } catch (NamingException ne) { svLogger.info(STR + ne.getClass().getName()); } }
/** * (ive36) Test an env-entry of type String where there is no env-entry. */
(ive36) Test an env-entry of type String where there is no env-entry
testSLRemoteEnvEntry_String_NotExist
{ "repo_name": "kgibm/open-liberty", "path": "dev/com.ibm.ws.ejbcontainer.legacy_fat/test-applications/EJB2XRemoteSpecWeb.war/src/com/ibm/ejb2x/base/spec/slr/web/SLRemoteImplEnvEntryServlet.java", "license": "epl-1.0", "size": 39112 }
[ "javax.naming.NamingException", "org.junit.Assert" ]
import javax.naming.NamingException; import org.junit.Assert;
import javax.naming.*; import org.junit.*;
[ "javax.naming", "org.junit" ]
javax.naming; org.junit;
823,999
public static ColorStateList getNewTabTilePlusTintList(Context context, boolean isIncognito) { return AppCompatResources.getColorStateList(context, isIncognito ? R.color.new_tab_tile_plus_color_incognito : R.color.new_tab_tile_plus_color); }
static ColorStateList function(Context context, boolean isIncognito) { return AppCompatResources.getColorStateList(context, isIncognito ? R.color.new_tab_tile_plus_color_incognito : R.color.new_tab_tile_plus_color); }
/** * Returns the {@link ColorStateList} to use for the plus sign in new tab tile based on the * incognito mode. * * @param context {@link Context} used to retrieve color. * @param isIncognito Whether the color is used for incognito mode. * @return The {@link ColorStateList} for new tab tile plus sign color. */
Returns the <code>ColorStateList</code> to use for the plus sign in new tab tile based on the incognito mode
getNewTabTilePlusTintList
{ "repo_name": "scheib/chromium", "path": "chrome/android/features/tab_ui/java/src/org/chromium/chrome/browser/tasks/tab_management/TabUiThemeProvider.java", "license": "bsd-3-clause", "size": 30707 }
[ "android.content.Context", "android.content.res.ColorStateList", "androidx.appcompat.content.res.AppCompatResources" ]
import android.content.Context; import android.content.res.ColorStateList; import androidx.appcompat.content.res.AppCompatResources;
import android.content.*; import android.content.res.*; import androidx.appcompat.content.res.*;
[ "android.content", "androidx.appcompat" ]
android.content; androidx.appcompat;
1,977,433
public HashMap<UUID, Trade> getTrades() { HashMap<UUID, Trade> tr = new HashMap<>(); for (Trade t: trades){ tr.put(t.getTradeUUID(), t); } return tr; }
HashMap<UUID, Trade> function() { HashMap<UUID, Trade> tr = new HashMap<>(); for (Trade t: trades){ tr.put(t.getTradeUUID(), t); } return tr; }
/** * Get all trades the user is involved in. * * @return the trades, as a hash map */
Get all trades the user is involved in
getTrades
{ "repo_name": "CMPUT301F15T03/301p", "path": "T03/app/src/main/java/ca/ualberta/cmput301/t03/trading/TradeList.java", "license": "gpl-3.0", "size": 6427 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
819,655
private void deleteBuddy(Buddy buddy) { // Remove buddy DeleteBuddyResponseEvent responseEvent = coreService.removeBuddy( new DeleteBuddyRequestEvent(buddy.toBuddyDetails()) ); // Failure if (!responseEvent.isSuccess()) { // Notify the admin about the error if (log.isWarnEnabled()) { log.warn(String.format("Remove user %s", responseEvent.getExceptionMessage())); } // Provide more detailed description of the issue by printing the exception if (log.isDebugEnabled()) { log.debug(responseEvent.getException()); } } }
void function(Buddy buddy) { DeleteBuddyResponseEvent responseEvent = coreService.removeBuddy( new DeleteBuddyRequestEvent(buddy.toBuddyDetails()) ); if (!responseEvent.isSuccess()) { if (log.isWarnEnabled()) { log.warn(String.format(STR, responseEvent.getExceptionMessage())); } if (log.isDebugEnabled()) { log.debug(responseEvent.getException()); } } }
/** * Removes buddy from the system * * @param buddy Buddy */
Removes buddy from the system
deleteBuddy
{ "repo_name": "marcelmika/lims", "path": "docroot/WEB-INF/src/com/marcelmika/lims/portal/hooks/UserListener.java", "license": "mit", "size": 3204 }
[ "com.marcelmika.lims.api.events.buddy.DeleteBuddyRequestEvent", "com.marcelmika.lims.api.events.buddy.DeleteBuddyResponseEvent", "com.marcelmika.lims.portal.domain.Buddy" ]
import com.marcelmika.lims.api.events.buddy.DeleteBuddyRequestEvent; import com.marcelmika.lims.api.events.buddy.DeleteBuddyResponseEvent; import com.marcelmika.lims.portal.domain.Buddy;
import com.marcelmika.lims.api.events.buddy.*; import com.marcelmika.lims.portal.domain.*;
[ "com.marcelmika.lims" ]
com.marcelmika.lims;
1,531,358
return new ComparatorCondition("=", this, new LiteralOperand( new LinkedHashSet<String>(Arrays.asList(values)))); }
return new ComparatorCondition("=", this, new LiteralOperand( new LinkedHashSet<String>(Arrays.asList(values)))); }
/** * Returns a <a href= * "http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html#ConditionExpressionReference.Comparators" * >comparator condition</a> (that evaluates to true if the value of the * current attribute is equal to the set of specified values) for building condition * expression. */
Returns a comparator condition (that evaluates to true if the value of the current attribute is equal to the set of specified values) for building condition expression
eq
{ "repo_name": "dagnir/aws-sdk-java", "path": "aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/xspec/SS.java", "license": "apache-2.0", "size": 12373 }
[ "java.util.Arrays", "java.util.LinkedHashSet" ]
import java.util.Arrays; import java.util.LinkedHashSet;
import java.util.*;
[ "java.util" ]
java.util;
2,682,112
public static void print(Component comp, String uri, String cssuri) { String script = "zk.print('" + comp.getUuid() + "', '" + uri + "'"; if (cssuri != null) { if (uri.contains("zkau") && !cssuri.startsWith("/")) cssuri = "/" + cssuri; //absolute path if using default template.zul within jar file script += ", '" + cssuri + "');"; } else { script += ");"; } Clients.evalJavaScript(script); }
static void function(Component comp, String uri, String cssuri) { String script = STR + comp.getUuid() + STR + uri + "'"; if (cssuri != null) { if (uri.contains("zkau") && !cssuri.startsWith("/")) cssuri = "/" + cssuri; script += STR + cssuri + "');"; } else { script += ");"; } Clients.evalJavaScript(script); }
/** * The specific component to print with custom template ZUL page and print CSS style * @param comp any ZK component * @param uri the path to the template ZUL page * @param cssuri the path to the CSS file */
The specific component to print with custom template ZUL page and print CSS style
print
{ "repo_name": "VincentJian/print", "path": "src/main/java/org/zkoss/addon/print/PrintUtil.java", "license": "gpl-2.0", "size": 1333 }
[ "org.zkoss.zk.ui.Component", "org.zkoss.zk.ui.util.Clients" ]
import org.zkoss.zk.ui.Component; import org.zkoss.zk.ui.util.Clients;
import org.zkoss.zk.ui.*; import org.zkoss.zk.ui.util.*;
[ "org.zkoss.zk" ]
org.zkoss.zk;
210,704
@Test public void isInstitutionalRessourceManagerFor() { Identity owner1 = JunitTestHelper.createAndPersistIdentityAsUser("instit-" + UUID.randomUUID().toString()); Identity owner2 = JunitTestHelper.createAndPersistIdentityAsUser("instit-" + UUID.randomUUID().toString()); Identity part3 = JunitTestHelper.createAndPersistIdentityAsUser("instit-" + UUID.randomUUID().toString()); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); securityManager.addIdentityToSecurityGroup(owner1, re.getOwnerGroup()); securityManager.addIdentityToSecurityGroup(owner2, re.getOwnerGroup()); securityManager.addIdentityToSecurityGroup(part3, re.getParticipantGroup()); dbInstance.commit(); //set the institutions owner1.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); owner2.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); part3.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); userManager.updateUserFromIdentity(owner1); userManager.updateUserFromIdentity(owner2); userManager.updateUserFromIdentity(part3); dbInstance.commit(); //promote owner1 to institution resource manager SecurityGroup institutionalResourceManagerGroup = securityManager.findSecurityGroupByName(Constants.GROUP_INST_ORES_MANAGER); securityManager.addIdentityToSecurityGroup(owner1, institutionalResourceManagerGroup); dbInstance.commitAndCloseSession(); //check boolean institutionMgr1 = repositoryManager.isInstitutionalRessourceManagerFor(re, owner1); boolean institutionMgr2 = repositoryManager.isInstitutionalRessourceManagerFor(re, owner2); boolean institutionMgr3 = repositoryManager.isInstitutionalRessourceManagerFor(re, part3); Assert.assertTrue(institutionMgr1); Assert.assertFalse(institutionMgr2); Assert.assertFalse(institutionMgr3); }
void function() { Identity owner1 = JunitTestHelper.createAndPersistIdentityAsUser(STR + UUID.randomUUID().toString()); Identity owner2 = JunitTestHelper.createAndPersistIdentityAsUser(STR + UUID.randomUUID().toString()); Identity part3 = JunitTestHelper.createAndPersistIdentityAsUser(STR + UUID.randomUUID().toString()); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); securityManager.addIdentityToSecurityGroup(owner1, re.getOwnerGroup()); securityManager.addIdentityToSecurityGroup(owner2, re.getOwnerGroup()); securityManager.addIdentityToSecurityGroup(part3, re.getParticipantGroup()); dbInstance.commit(); owner1.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); owner2.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); part3.getUser().setProperty(UserConstants.INSTITUTIONALNAME, "volks"); userManager.updateUserFromIdentity(owner1); userManager.updateUserFromIdentity(owner2); userManager.updateUserFromIdentity(part3); dbInstance.commit(); SecurityGroup institutionalResourceManagerGroup = securityManager.findSecurityGroupByName(Constants.GROUP_INST_ORES_MANAGER); securityManager.addIdentityToSecurityGroup(owner1, institutionalResourceManagerGroup); dbInstance.commitAndCloseSession(); boolean institutionMgr1 = repositoryManager.isInstitutionalRessourceManagerFor(re, owner1); boolean institutionMgr2 = repositoryManager.isInstitutionalRessourceManagerFor(re, owner2); boolean institutionMgr3 = repositoryManager.isInstitutionalRessourceManagerFor(re, part3); Assert.assertTrue(institutionMgr1); Assert.assertFalse(institutionMgr2); Assert.assertFalse(institutionMgr3); }
/** * How can be a resource manager if Constants.ORESOURCE_USERMANAGER is never used? */
How can be a resource manager if Constants.ORESOURCE_USERMANAGER is never used
isInstitutionalRessourceManagerFor
{ "repo_name": "stevenhva/InfoLearn_OpenOLAT", "path": "src/test/java/org/olat/repository/RepositoryManagerTest.java", "license": "apache-2.0", "size": 44418 }
[ "java.util.UUID", "junit.framework.Assert", "org.olat.basesecurity.Constants", "org.olat.basesecurity.SecurityGroup", "org.olat.core.id.Identity", "org.olat.core.id.UserConstants", "org.olat.test.JunitTestHelper" ]
import java.util.UUID; import junit.framework.Assert; import org.olat.basesecurity.Constants; import org.olat.basesecurity.SecurityGroup; import org.olat.core.id.Identity; import org.olat.core.id.UserConstants; import org.olat.test.JunitTestHelper;
import java.util.*; import junit.framework.*; import org.olat.basesecurity.*; import org.olat.core.id.*; import org.olat.test.*;
[ "java.util", "junit.framework", "org.olat.basesecurity", "org.olat.core", "org.olat.test" ]
java.util; junit.framework; org.olat.basesecurity; org.olat.core; org.olat.test;
564,236
public static boolean equipWithItem(final Player player, final String clazz, final String info) { ItemTestHelper.generateRPClasses(); final Item item = SingletonRepository.getEntityManager().getItem(clazz); item.setInfoString(info); return player.equipToInventoryOnly(item); }
static boolean function(final Player player, final String clazz, final String info) { ItemTestHelper.generateRPClasses(); final Item item = SingletonRepository.getEntityManager().getItem(clazz); item.setInfoString(info); return player.equipToInventoryOnly(item); }
/** * Equip the player with the given item and set the given item string. * * @param player * @param clazz * @param info * @return success flag */
Equip the player with the given item and set the given item string
equipWithItem
{ "repo_name": "AntumDeluge/arianne-stendhal", "path": "tests/utilities/PlayerTestHelper.java", "license": "gpl-2.0", "size": 10785 }
[ "games.stendhal.server.core.engine.SingletonRepository", "games.stendhal.server.entity.item.Item", "games.stendhal.server.entity.player.Player" ]
import games.stendhal.server.core.engine.SingletonRepository; import games.stendhal.server.entity.item.Item; import games.stendhal.server.entity.player.Player;
import games.stendhal.server.core.engine.*; import games.stendhal.server.entity.item.*; import games.stendhal.server.entity.player.*;
[ "games.stendhal.server" ]
games.stendhal.server;
1,954,932
@Test public void testFailuretoReadEdits() throws Exception { assertTrue(fs.mkdirs(new Path(TEST_DIR1))); HATestUtil.waitForStandbyToCatchUp(nn0, nn1); // If these two ops are applied twice, the first op will throw an // exception the second time its replayed. fs.setOwner(new Path(TEST_DIR1), "foo", "bar"); assertTrue(fs.delete(new Path(TEST_DIR1), true)); // This op should get applied just fine. assertTrue(fs.mkdirs(new Path(TEST_DIR2))); // This is the op the mocking will cause to fail to be read. assertTrue(fs.mkdirs(new Path(TEST_DIR3))); LimitedEditLogAnswer answer = causeFailureOnEditLogRead(); try { HATestUtil.waitForStandbyToCatchUp(nn0, nn1); fail("Standby fully caught up, but should not have been able to"); } catch (HATestUtil.CouldNotCatchUpException e) { // Expected. The NN did not exit. } // Null because it was deleted. assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR1, false, false, false)); // Should have been successfully created. assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR2, false, false, false).isDirectory()); // Null because it hasn't been created yet. assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR3, false, false, false)); // Now let the standby read ALL the edits. answer.setThrowExceptionOnRead(false); HATestUtil.waitForStandbyToCatchUp(nn0, nn1); // Null because it was deleted. assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR1, false, false, false)); // Should have been successfully created. assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR2, false, false, false).isDirectory()); // Should now have been successfully created. assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR3, false, false, false).isDirectory()); }
void function() throws Exception { assertTrue(fs.mkdirs(new Path(TEST_DIR1))); HATestUtil.waitForStandbyToCatchUp(nn0, nn1); fs.setOwner(new Path(TEST_DIR1), "foo", "bar"); assertTrue(fs.delete(new Path(TEST_DIR1), true)); assertTrue(fs.mkdirs(new Path(TEST_DIR2))); assertTrue(fs.mkdirs(new Path(TEST_DIR3))); LimitedEditLogAnswer answer = causeFailureOnEditLogRead(); try { HATestUtil.waitForStandbyToCatchUp(nn0, nn1); fail(STR); } catch (HATestUtil.CouldNotCatchUpException e) { } assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR1, false, false, false)); assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR2, false, false, false).isDirectory()); assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR3, false, false, false)); answer.setThrowExceptionOnRead(false); HATestUtil.waitForStandbyToCatchUp(nn0, nn1); assertNull(NameNodeAdapter.getFileInfo(nn1, TEST_DIR1, false, false, false)); assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR2, false, false, false).isDirectory()); assertTrue(NameNodeAdapter.getFileInfo(nn1, TEST_DIR3, false, false, false).isDirectory()); }
/** * Test that the standby NN won't double-replay earlier edits if it encounters * a failure to read a later edit. */
Test that the standby NN won't double-replay earlier edits if it encounters a failure to read a later edit
testFailuretoReadEdits
{ "repo_name": "plusplusjiajia/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestFailureToReadEdits.java", "license": "apache-2.0", "size": 13653 }
[ "org.apache.hadoop.fs.Path", "org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter", "org.junit.Assert" ]
import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.junit.Assert;
import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
2,634,866
public ExternalId getRateFutureNodeId(final LocalDate curveDate, final Tenor tenor, final Tenor rateTenor, final int numberFuturesFromTenor) { if (_rateFutureNodeIds == null) { throw new OpenGammaRuntimeException("Cannot get rate future node id provider for curve node id mapper called " + _name); } final CurveInstrumentProvider mapper = _rateFutureNodeIds.get(tenor); if (mapper != null) { return mapper.getInstrument(curveDate, tenor, rateTenor, numberFuturesFromTenor); } throw new OpenGammaRuntimeException("Can't get instrument mapper definition for rate future number " + numberFuturesFromTenor + " with time to start " + tenor + " and rate tenor " + rateTenor); }
ExternalId function(final LocalDate curveDate, final Tenor tenor, final Tenor rateTenor, final int numberFuturesFromTenor) { if (_rateFutureNodeIds == null) { throw new OpenGammaRuntimeException(STR + _name); } final CurveInstrumentProvider mapper = _rateFutureNodeIds.get(tenor); if (mapper != null) { return mapper.getInstrument(curveDate, tenor, rateTenor, numberFuturesFromTenor); } throw new OpenGammaRuntimeException(STR + numberFuturesFromTenor + STR + tenor + STR + rateTenor); }
/** * Gets the external id of the rate future node at a particular tenor that is valid for that curve date. * * @param curveDate * The curve date * @param tenor * The start tenor * @param rateTenor * The tenor of the future * @param numberFuturesFromTenor * The number of futures from the start tenor * @return The external id of the security * @throws OpenGammaRuntimeException * if the external id for this tenor and date could not be found. */
Gets the external id of the rate future node at a particular tenor that is valid for that curve date
getRateFutureNodeId
{ "repo_name": "McLeodMoores/starling", "path": "projects/financial/src/main/java/com/opengamma/financial/analytics/curve/CurveNodeIdMapper.java", "license": "apache-2.0", "size": 64900 }
[ "com.opengamma.OpenGammaRuntimeException", "com.opengamma.financial.analytics.ircurve.CurveInstrumentProvider", "com.opengamma.id.ExternalId", "com.opengamma.util.time.Tenor", "org.threeten.bp.LocalDate" ]
import com.opengamma.OpenGammaRuntimeException; import com.opengamma.financial.analytics.ircurve.CurveInstrumentProvider; import com.opengamma.id.ExternalId; import com.opengamma.util.time.Tenor; import org.threeten.bp.LocalDate;
import com.opengamma.*; import com.opengamma.financial.analytics.ircurve.*; import com.opengamma.id.*; import com.opengamma.util.time.*; import org.threeten.bp.*;
[ "com.opengamma", "com.opengamma.financial", "com.opengamma.id", "com.opengamma.util", "org.threeten.bp" ]
com.opengamma; com.opengamma.financial; com.opengamma.id; com.opengamma.util; org.threeten.bp;
1,720,513
public void assertNull(AssertionInfo info, Object actual) { if (actual == null) { return; } throw failures.failure(info, shouldBeEqual(actual, null, comparisonStrategy, info.representation())); }
void function(AssertionInfo info, Object actual) { if (actual == null) { return; } throw failures.failure(info, shouldBeEqual(actual, null, comparisonStrategy, info.representation())); }
/** * Asserts that the given object is {@code null}. * * @param info contains information about the assertion. * @param actual the given object. * @throws AssertionError if the given object is not {@code null}. */
Asserts that the given object is null
assertNull
{ "repo_name": "dorzey/assertj-core", "path": "src/main/java/org/assertj/core/internal/Objects.java", "license": "apache-2.0", "size": 38624 }
[ "org.assertj.core.api.AssertionInfo", "org.assertj.core.error.ShouldBeEqual" ]
import org.assertj.core.api.AssertionInfo; import org.assertj.core.error.ShouldBeEqual;
import org.assertj.core.api.*; import org.assertj.core.error.*;
[ "org.assertj.core" ]
org.assertj.core;
678,201
void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState) throws IOException { if (nodeEnv.hasNodeFile()) { synchronized (this) { Index index = metaData.getIndex(); if (hasIndex(index)) { String localUUid = indexService(index).indexUUID(); throw new IllegalStateException("Can't delete index store for [" + index.getName() + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]"); } if (clusterState.metaData().hasIndex(index.getName()) && (clusterState.nodes().getLocalNode().isMasterNode() == true)) { // we do not delete the store if it is a master eligible node and the index is still in the cluster state // because we want to keep the meta data for indices around even if no shards are left here final IndexMetaData idxMeta = clusterState.metaData().index(index.getName()); throw new IllegalStateException("Can't delete index store for [" + index.getName() + "] - it's still part of the " + "cluster state [" + idxMeta.getIndexUUID() + "] [" + metaData.getIndexUUID() + "], " + "we are master eligible, so will keep the index metadata even if no shards are left."); } } final IndexSettings indexSettings = buildIndexSettings(metaData); deleteIndexStore(reason, indexSettings.getIndex(), indexSettings); } }
void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState) throws IOException { if (nodeEnv.hasNodeFile()) { synchronized (this) { Index index = metaData.getIndex(); if (hasIndex(index)) { String localUUid = indexService(index).indexUUID(); throw new IllegalStateException(STR + index.getName() + STR + localUUid + STR + metaData.getIndexUUID() + "]"); } if (clusterState.metaData().hasIndex(index.getName()) && (clusterState.nodes().getLocalNode().isMasterNode() == true)) { final IndexMetaData idxMeta = clusterState.metaData().index(index.getName()); throw new IllegalStateException(STR + index.getName() + STR + STR + idxMeta.getIndexUUID() + STR + metaData.getIndexUUID() + STR + STR); } } final IndexSettings indexSettings = buildIndexSettings(metaData); deleteIndexStore(reason, indexSettings.getIndex(), indexSettings); } }
/** * Deletes the index store trying to acquire all shards locks for this index. * This method will delete the metadata for the index even if the actual shards can't be locked. * * Package private for testing */
Deletes the index store trying to acquire all shards locks for this index. This method will delete the metadata for the index even if the actual shards can't be locked. Package private for testing
deleteIndexStore
{ "repo_name": "strapdata/elassandra5-rc", "path": "core/src/main/java/org/elasticsearch/indices/IndicesService.java", "license": "apache-2.0", "size": 65029 }
[ "java.io.IOException", "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.cluster.metadata.IndexMetaData", "org.elasticsearch.index.Index", "org.elasticsearch.index.IndexSettings" ]
import java.io.IOException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings;
import java.io.*; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.index.*;
[ "java.io", "org.elasticsearch.cluster", "org.elasticsearch.index" ]
java.io; org.elasticsearch.cluster; org.elasticsearch.index;
389,192
public static String getChannel(Context context) { String channel = getMetaChannel(context, "UMENG_CHANNEL"); return channel; }
static String function(Context context) { String channel = getMetaChannel(context, STR); return channel; }
/** * Gets channel. * * @param context the context * @return the channel */
Gets channel
getChannel
{ "repo_name": "Ryanst/AndroidDemo", "path": "app/src/main/java/com/ryanst/app/util/ChannelUtil.java", "license": "apache-2.0", "size": 1710 }
[ "android.content.Context" ]
import android.content.Context;
import android.content.*;
[ "android.content" ]
android.content;
789,974
public void setControlsEnabled(boolean b) { user.setEnabled(b); pass.setEnabled(b); login.setEnabled(b); enableControls(); //login.requestFocus(); configButton.setEnabled(b); encryptedButton.setEnabled(b); if (groupsBox != null) groupsBox.setEnabled(b); if (b) { setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); setButtonDefault(login); setButtonDefault(cancel); } else { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); login.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); login.setEnabled(false); } }
void function(boolean b) { user.setEnabled(b); pass.setEnabled(b); login.setEnabled(b); enableControls(); configButton.setEnabled(b); encryptedButton.setEnabled(b); if (groupsBox != null) groupsBox.setEnabled(b); if (b) { setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); setButtonDefault(login); setButtonDefault(cancel); } else { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); login.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); login.setEnabled(false); } }
/** * Sets whether or not the buttons composing the display are enabled. * * @param b Pass <code>true</code> if this component should be enabled, * <code>false</code> otherwise. */
Sets whether or not the buttons composing the display are enabled
setControlsEnabled
{ "repo_name": "dpwrussell/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/util/ui/login/ScreenLogin.java", "license": "gpl-2.0", "size": 41801 }
[ "java.awt.Cursor" ]
import java.awt.Cursor;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,334,464
@Override public void propertyChange(PropertyChangeEvent event) { final String prop = event.getProperty(); // If the property is of any interest to us if (prop.equals(TeamUI.GLOBAL_IGNORES_CHANGED) || prop.equals(TeamUI.GLOBAL_FILE_TYPES_CHANGED) || prop.equals(Activator.DECORATORS_CHANGED)) { postLabelEvent(); } else if (prop.equals(UIPreferences.THEME_UncommittedChangeBackgroundColor) || prop.equals(UIPreferences.THEME_UncommittedChangeFont) || prop.equals(UIPreferences.THEME_UncommittedChangeForegroundColor) || prop.equals(UIPreferences.THEME_IgnoredResourceFont) || prop.equals(UIPreferences.THEME_IgnoredResourceBackgroundColor) || prop.equals(UIPreferences.THEME_IgnoredResourceForegroundColor)) { ensureFontAndColorsCreated(FONT_IDS, COLOR_IDS); postLabelEvent(); // TODO do I really need this? } }
void function(PropertyChangeEvent event) { final String prop = event.getProperty(); if (prop.equals(TeamUI.GLOBAL_IGNORES_CHANGED) prop.equals(TeamUI.GLOBAL_FILE_TYPES_CHANGED) prop.equals(Activator.DECORATORS_CHANGED)) { postLabelEvent(); } else if (prop.equals(UIPreferences.THEME_UncommittedChangeBackgroundColor) prop.equals(UIPreferences.THEME_UncommittedChangeFont) prop.equals(UIPreferences.THEME_UncommittedChangeForegroundColor) prop.equals(UIPreferences.THEME_IgnoredResourceFont) prop.equals(UIPreferences.THEME_IgnoredResourceBackgroundColor) prop.equals(UIPreferences.THEME_IgnoredResourceForegroundColor)) { ensureFontAndColorsCreated(FONT_IDS, COLOR_IDS); postLabelEvent(); } }
/** * Callback for IPropertyChangeListener events * * If any of the relevant preferences has been changed we refresh all * decorations (all projects and their resources). * * @see org.eclipse.jface.util.IPropertyChangeListener#propertyChange(org.eclipse.jface.util.PropertyChangeEvent) */
Callback for IPropertyChangeListener events If any of the relevant preferences has been changed we refresh all decorations (all projects and their resources)
propertyChange
{ "repo_name": "collaborative-modeling/egit", "path": "org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/decorators/GitLightweightDecorator.java", "license": "epl-1.0", "size": 27493 }
[ "org.eclipse.egit.ui.Activator", "org.eclipse.egit.ui.UIPreferences", "org.eclipse.jface.util.PropertyChangeEvent", "org.eclipse.team.ui.TeamUI" ]
import org.eclipse.egit.ui.Activator; import org.eclipse.egit.ui.UIPreferences; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.team.ui.TeamUI;
import org.eclipse.egit.ui.*; import org.eclipse.jface.util.*; import org.eclipse.team.ui.*;
[ "org.eclipse.egit", "org.eclipse.jface", "org.eclipse.team" ]
org.eclipse.egit; org.eclipse.jface; org.eclipse.team;
2,780,275
protected void ensureClusterStateConsistency() throws IOException { if (cluster() != null && cluster().size() > 0) { final NamedWriteableRegistry namedWriteableRegistry = cluster().getNamedWriteableRegistry(); ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState(); byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState); // remove local node reference masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry); Map<String, Object> masterStateMap = convertToMap(masterClusterState); int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; String masterId = masterClusterState.nodes().getMasterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); byte[] localClusterStateBytes = ClusterState.Builder.toBytes(localClusterState); // remove local node reference localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry); final Map<String, Object> localStateMap = convertToMap(localClusterState); final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to match) if (masterClusterState.version() == localClusterState.version() && masterId.equals(localClusterState.nodes().getMasterNodeId())) { try { assertEquals("clusterstate UUID does not match", masterClusterState.stateUUID(), localClusterState.stateUUID()); // We cannot compare serialization bytes since serialization order of maps is not guaranteed // but we can compare serialization sizes - they should be the same assertEquals("clusterstate size does not match", masterClusterStateSize, localClusterStateSize); // Compare JSON serialization assertNull("clusterstate JSON serialization does not match", differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap)); } catch (AssertionError error) { logger.error("Cluster state from master:\n{}\nLocal cluster state:\n{}", masterClusterState.toString(), localClusterState.toString()); throw error; } } } } }
void function() throws IOException { if (cluster() != null && cluster().size() > 0) { final NamedWriteableRegistry namedWriteableRegistry = cluster().getNamedWriteableRegistry(); ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState(); byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState); masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry); Map<String, Object> masterStateMap = convertToMap(masterClusterState); int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; String masterId = masterClusterState.nodes().getMasterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); byte[] localClusterStateBytes = ClusterState.Builder.toBytes(localClusterState); localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry); final Map<String, Object> localStateMap = convertToMap(localClusterState); final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; if (masterClusterState.version() == localClusterState.version() && masterId.equals(localClusterState.nodes().getMasterNodeId())) { try { assertEquals(STR, masterClusterState.stateUUID(), localClusterState.stateUUID()); assertEquals(STR, masterClusterStateSize, localClusterStateSize); assertNull(STR, differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap)); } catch (AssertionError error) { logger.error(STR, masterClusterState.toString(), localClusterState.toString()); throw error; } } } } }
/** * Verifies that all nodes that have the same version of the cluster state as master have same cluster state */
Verifies that all nodes that have the same version of the cluster state as master have same cluster state
ensureClusterStateConsistency
{ "repo_name": "s1monw/elasticsearch", "path": "test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java", "license": "apache-2.0", "size": 107351 }
[ "java.io.IOException", "java.util.Map", "org.elasticsearch.client.Client", "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.common.io.stream.NamedWriteableRegistry", "org.elasticsearch.test.XContentTestUtils" ]
import java.io.IOException; import java.util.Map; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.XContentTestUtils;
import java.io.*; import java.util.*; import org.elasticsearch.client.*; import org.elasticsearch.cluster.*; import org.elasticsearch.common.io.stream.*; import org.elasticsearch.test.*;
[ "java.io", "java.util", "org.elasticsearch.client", "org.elasticsearch.cluster", "org.elasticsearch.common", "org.elasticsearch.test" ]
java.io; java.util; org.elasticsearch.client; org.elasticsearch.cluster; org.elasticsearch.common; org.elasticsearch.test;
363,099
public List<SAXParseException> getWarnings() { return warnings; }
List<SAXParseException> function() { return warnings; }
/** * Returns the validation warnings */
Returns the validation warnings
getWarnings
{ "repo_name": "kingargyle/turmeric-bot", "path": "camel-core/src/main/java/org/apache/camel/processor/validation/SchemaValidationException.java", "license": "apache-2.0", "size": 2961 }
[ "java.util.List", "org.xml.sax.SAXParseException" ]
import java.util.List; import org.xml.sax.SAXParseException;
import java.util.*; import org.xml.sax.*;
[ "java.util", "org.xml.sax" ]
java.util; org.xml.sax;
2,631,913
@Test public void testGetCheckedException() { final BackgroundInitializerTestImpl init = new BackgroundInitializerTestImpl(); final Exception ex = new Exception(); init.ex = ex; init.start(); try { init.get(); fail("Exception not thrown!"); } catch (final ConcurrentException cex) { assertEquals("Exception not thrown", ex, cex.getCause()); } }
void function() { final BackgroundInitializerTestImpl init = new BackgroundInitializerTestImpl(); final Exception ex = new Exception(); init.ex = ex; init.start(); try { init.get(); fail(STR); } catch (final ConcurrentException cex) { assertEquals(STR, ex, cex.getCause()); } }
/** * Tests the get() method if background processing causes a checked * exception. */
Tests the get() method if background processing causes a checked exception
testGetCheckedException
{ "repo_name": "ManfredTremmel/gwt-commons-lang3", "path": "src/test/java/org/apache/commons/lang3/concurrent/BackgroundInitializerTest.java", "license": "apache-2.0", "size": 11320 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,174,473
public void setDocument(PlainDocument plainDocument) { JEditorPane = new JEditorPane(); setDocument("text/plain", plainDocument); }
void function(PlainDocument plainDocument) { JEditorPane = new JEditorPane(); setDocument(STR, plainDocument); }
/** * Method to set a PlainDocument as the Document to print. * * @param plainDocument the PlainDocument to use. */
Method to set a PlainDocument as the Document to print
setDocument
{ "repo_name": "vipinraj/Spark", "path": "core/src/main/java/org/jivesoftware/spark/ui/ChatPrinter.java", "license": "apache-2.0", "size": 14200 }
[ "javax.swing.JEditorPane", "javax.swing.text.PlainDocument" ]
import javax.swing.JEditorPane; import javax.swing.text.PlainDocument;
import javax.swing.*; import javax.swing.text.*;
[ "javax.swing" ]
javax.swing;
2,426,718
@Test public void testIsBlankEmpty() { Assert.assertTrue(Strings.isBlank("")); }
void function() { Assert.assertTrue(Strings.isBlank("")); }
/** * Verify isBlank works with an empty string. */
Verify isBlank works with an empty string
testIsBlankEmpty
{ "repo_name": "TroyHisted/relib", "path": "src/test/java/org/relib/util/StringsTest.java", "license": "apache-2.0", "size": 7086 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,488,688
public static void metaScan(Configuration configuration, MetaScannerVisitor visitor, byte [] userTableName, byte[] row, int rowLimit) throws IOException { metaScan(configuration, null, visitor, userTableName, row, rowLimit, HConstants.META_TABLE_NAME); }
static void function(Configuration configuration, MetaScannerVisitor visitor, byte [] userTableName, byte[] row, int rowLimit) throws IOException { metaScan(configuration, null, visitor, userTableName, row, rowLimit, HConstants.META_TABLE_NAME); }
/** * Scans the meta table and calls a visitor on each RowResult. Uses a table * name and a row name to locate meta regions. And it only scans at most * <code>rowLimit</code> of rows. * * @param configuration HBase configuration. * @param visitor Visitor object. * @param userTableName User table name in meta table to start scan at. Pass * null if not interested in a particular table. * @param row Name of the row at the user table. The scan will start from * the region row where the row resides. * @param rowLimit Max of processed rows. If it is less than 0, it * will be set to default value <code>Integer.MAX_VALUE</code>. * @throws IOException e */
Scans the meta table and calls a visitor on each RowResult. Uses a table name and a row name to locate meta regions. And it only scans at most <code>rowLimit</code> of rows
metaScan
{ "repo_name": "wanhao/IRIndex", "path": "src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java", "license": "apache-2.0", "size": 20879 }
[ "java.io.IOException", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hbase.HConstants" ]
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants;
import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,338,255
private static void setExportInfo(Catalog catalog, ExportType exportType) { Database db = catalog.getClusters().get("cluster").getDatabases().get("database"); if (db.getIsactiveactivedred()) { // add default export configuration to DR conflict table exportType = addExportConfigToDRConflictsTable(catalog, exportType); } if (exportType == null) { return; } List<String> streamList = new ArrayList<String>(); boolean noEmptyTarget = (exportType.getConfiguration().size() != 1); for (ExportConfigurationType exportConfiguration : exportType.getConfiguration()) { boolean connectorEnabled = exportConfiguration.isEnabled(); // Get the stream name from the xml attribute "stream" // Should default to Constants.DEFAULT_EXPORT_CONNECTOR_NAME if not specified String streamName = exportConfiguration.getStream(); if (streamName == null || streamName.trim().isEmpty()) { throw new RuntimeException("stream must be specified along with type in export configuration."); } if (connectorEnabled) { if (streamList.contains(streamName)) { throw new RuntimeException("Multiple connectors can not be assigned to single export stream: " + streamName + "."); } else { streamList.add(streamName); } } boolean defaultConnector = streamName.equals(Constants.DEFAULT_EXPORT_CONNECTOR_NAME); org.voltdb.catalog.Connector catconn = db.getConnectors().get(streamName); if (catconn == null) { if (connectorEnabled) { if (defaultConnector) { hostLog.info("Export configuration enabled and provided for the default export " + "stream in deployment file, however, no export " + "tables are assigned to the default stream. " + "Export stream will be disabled."); } else { hostLog.info("Export configuration enabled and provided for export stream " + streamName + " in deployment file however no export " + "tables are assigned to the this stream. " + "Export stream " + streamName + " will be disabled."); } } continue; } Properties processorProperties = checkExportProcessorConfiguration(exportConfiguration); for (String name: processorProperties.stringPropertyNames()) { ConnectorProperty prop = catconn.getConfig().add(name); prop.setName(name); prop.setValue(processorProperties.getProperty(name)); } // on-server export always uses the guest processor catconn.setLoaderclass(ExportManager.PROCESSOR_CLASS); catconn.setEnabled(connectorEnabled); if (!connectorEnabled) { if (defaultConnector) { hostLog.info("Export configuration for the default export stream is present and is " + "configured to be disabled. The default export stream will be disabled."); } else { hostLog.info("Export configuration for export stream " + streamName + " is present and is " + "configured to be disabled. Export stream " + streamName + " will be disabled."); } } else { if (defaultConnector) { hostLog.info("Default export stream is configured and enabled with type=" + exportConfiguration.getType()); } else { hostLog.info("Export stream " + streamName + " is configured and enabled with type=" + exportConfiguration.getType()); } if (exportConfiguration.getProperty() != null) { if (defaultConnector) { hostLog.info("Default export stream configuration properties are: "); } else { hostLog.info("Export stream " + streamName + " configuration properties are: "); } for (PropertyType configProp : exportConfiguration.getProperty()) { if (!configProp.getName().toLowerCase().contains("password")) { hostLog.info("Export Configuration Property NAME=" + configProp.getName() + " VALUE=" + configProp.getValue()); } } } } } }
static void function(Catalog catalog, ExportType exportType) { Database db = catalog.getClusters().get(STR).getDatabases().get(STR); if (db.getIsactiveactivedred()) { exportType = addExportConfigToDRConflictsTable(catalog, exportType); } if (exportType == null) { return; } List<String> streamList = new ArrayList<String>(); boolean noEmptyTarget = (exportType.getConfiguration().size() != 1); for (ExportConfigurationType exportConfiguration : exportType.getConfiguration()) { boolean connectorEnabled = exportConfiguration.isEnabled(); String streamName = exportConfiguration.getStream(); if (streamName == null streamName.trim().isEmpty()) { throw new RuntimeException(STR); } if (connectorEnabled) { if (streamList.contains(streamName)) { throw new RuntimeException(STR + streamName + "."); } else { streamList.add(streamName); } } boolean defaultConnector = streamName.equals(Constants.DEFAULT_EXPORT_CONNECTOR_NAME); org.voltdb.catalog.Connector catconn = db.getConnectors().get(streamName); if (catconn == null) { if (connectorEnabled) { if (defaultConnector) { hostLog.info(STR + STR + STR + STR); } else { hostLog.info(STR + streamName + STR + STR + STR + streamName + STR); } } continue; } Properties processorProperties = checkExportProcessorConfiguration(exportConfiguration); for (String name: processorProperties.stringPropertyNames()) { ConnectorProperty prop = catconn.getConfig().add(name); prop.setName(name); prop.setValue(processorProperties.getProperty(name)); } catconn.setLoaderclass(ExportManager.PROCESSOR_CLASS); catconn.setEnabled(connectorEnabled); if (!connectorEnabled) { if (defaultConnector) { hostLog.info(STR + STR); } else { hostLog.info(STR + streamName + STR + STR + streamName + STR); } } else { if (defaultConnector) { hostLog.info(STR + exportConfiguration.getType()); } else { hostLog.info(STR + streamName + STR + exportConfiguration.getType()); } if (exportConfiguration.getProperty() != null) { if (defaultConnector) { hostLog.info(STR); } else { hostLog.info(STR + streamName + STR); } for (PropertyType configProp : exportConfiguration.getProperty()) { if (!configProp.getName().toLowerCase().contains(STR)) { hostLog.info(STR + configProp.getName() + STR + configProp.getValue()); } } } } } }
/** * Set deployment time settings for export * @param catalog The catalog to be updated. * @param exportsType A reference to the <exports> element of the deployment.xml file. */
Set deployment time settings for export
setExportInfo
{ "repo_name": "creative-quant/voltdb", "path": "src/frontend/org/voltdb/utils/CatalogUtil.java", "license": "agpl-3.0", "size": 92628 }
[ "java.util.ArrayList", "java.util.List", "java.util.Properties", "org.voltdb.catalog.Catalog", "org.voltdb.catalog.Connector", "org.voltdb.catalog.ConnectorProperty", "org.voltdb.catalog.Database", "org.voltdb.common.Constants", "org.voltdb.compiler.deploymentfile.ExportConfigurationType", "org.voltdb.compiler.deploymentfile.ExportType", "org.voltdb.compiler.deploymentfile.PropertyType", "org.voltdb.export.ExportManager" ]
import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.voltdb.catalog.Catalog; import org.voltdb.catalog.Connector; import org.voltdb.catalog.ConnectorProperty; import org.voltdb.catalog.Database; import org.voltdb.common.Constants; import org.voltdb.compiler.deploymentfile.ExportConfigurationType; import org.voltdb.compiler.deploymentfile.ExportType; import org.voltdb.compiler.deploymentfile.PropertyType; import org.voltdb.export.ExportManager;
import java.util.*; import org.voltdb.catalog.*; import org.voltdb.common.*; import org.voltdb.compiler.deploymentfile.*; import org.voltdb.export.*;
[ "java.util", "org.voltdb.catalog", "org.voltdb.common", "org.voltdb.compiler", "org.voltdb.export" ]
java.util; org.voltdb.catalog; org.voltdb.common; org.voltdb.compiler; org.voltdb.export;
1,199,381
private void fixCtrlH() { InputMap inputMap = getInputMap(); KeyStroke char010 = KeyStroke.getKeyStroke("typed \010"); InputMap parent = inputMap; while (parent != null) { parent.remove(char010); parent = parent.getParent(); } KeyStroke backspace = KeyStroke.getKeyStroke("BACK_SPACE"); inputMap.put(backspace, DefaultEditorKit.deletePrevCharAction); }
void function() { InputMap inputMap = getInputMap(); KeyStroke char010 = KeyStroke.getKeyStroke(STR); InputMap parent = inputMap; while (parent != null) { parent.remove(char010); parent = parent.getParent(); } KeyStroke backspace = KeyStroke.getKeyStroke(STR); inputMap.put(backspace, DefaultEditorKit.deletePrevCharAction); }
/** * Removes the "Ctrl+H <=> Backspace" behavior that Java shows, for some odd reason... */
Removes the "Ctrl+H Backspace" behavior that Java shows, for some odd reason..
fixCtrlH
{ "repo_name": "kevinmcgoldrick/Tank", "path": "tools/agent_debugger/src/main/java/org/fife/ui/rtextarea/RTextArea.java", "license": "epl-1.0", "size": 54300 }
[ "javax.swing.InputMap", "javax.swing.KeyStroke", "javax.swing.text.DefaultEditorKit" ]
import javax.swing.InputMap; import javax.swing.KeyStroke; import javax.swing.text.DefaultEditorKit;
import javax.swing.*; import javax.swing.text.*;
[ "javax.swing" ]
javax.swing;
2,121,240
public List<DependencyInfo> parseFile(String filePath, String fileContents) { return parseFileReader(filePath, new StringReader(fileContents)); }
List<DependencyInfo> function(String filePath, String fileContents) { return parseFileReader(filePath, new StringReader(fileContents)); }
/** * Parses the given file and returns a list of dependency information that it * contained. * It uses the passed in fileContents instead of reading the file. * * @param filePath Path to the file to parse. * @param fileContents The contents to parse. * @return A list of DependencyInfo objects. */
Parses the given file and returns a list of dependency information that it contained. It uses the passed in fileContents instead of reading the file
parseFile
{ "repo_name": "GoogleChromeLabs/chromeos_smart_card_connector", "path": "third_party/closure-compiler/src/src/com/google/javascript/jscomp/deps/DepsFileRegexParser.java", "license": "apache-2.0", "size": 7269 }
[ "java.io.StringReader", "java.util.List" ]
import java.io.StringReader; import java.util.List;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,399,260
public static PathFragment getUniqueDirectory(Label label, PathFragment fragment) { return label.getPackageIdentifier().getSourceRoot().getRelative(fragment) .getRelative(label.getName()); }
static PathFragment function(Label label, PathFragment fragment) { return label.getPackageIdentifier().getSourceRoot().getRelative(fragment) .getRelative(label.getName()); }
/** * Returns a path fragment qualified by the rule name and unique fragment to * disambiguate artifacts produced from the source file appearing in * multiple rules. * * <p>For example "//pkg:target" -> "pkg/&lt;fragment&gt;/target. */
Returns a path fragment qualified by the rule name and unique fragment to disambiguate artifacts produced from the source file appearing in multiple rules. For example "//pkg:target" -> "pkg/&lt;fragment&gt;/target
getUniqueDirectory
{ "repo_name": "iamthearm/bazel", "path": "src/main/java/com/google/devtools/build/lib/analysis/AnalysisUtils.java", "license": "apache-2.0", "size": 5856 }
[ "com.google.devtools.build.lib.cmdline.Label", "com.google.devtools.build.lib.vfs.PathFragment" ]
import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.cmdline.*; import com.google.devtools.build.lib.vfs.*;
[ "com.google.devtools" ]
com.google.devtools;
658,076
public static void main(String[] args) { if (args.length==0 || args.length>2) { System.err.println ("\nUsage - java org.openqa.jetty.http.HttpServer [<addr>:]<port>"); System.err.println ("\nUsage - java org.openqa.jetty.http.HttpServer -r [savefile]"); System.err.println (" Serves files from '.' directory"); System.err.println (" Dump handler for not found requests"); System.err.println (" Default port is 8080"); System.exit(1); } try{ if (args.length==1) { // Create the server HttpServer server = new HttpServer(); // Default is no virtual host String host=null; HttpContext context = server.getContext(host,"/"); context.setResourceBase("."); context.addHandler(new ResourceHandler()); context.addHandler(new DumpHandler()); context.addHandler(new NotFoundHandler()); InetAddrPort address = new InetAddrPort(args[0]); server.addListener(address); server.start(); } else { Resource resource = Resource.newResource(args[1]); ObjectInputStream in = new ObjectInputStream(resource.getInputStream()); HttpServer server = (HttpServer)in.readObject(); in.close(); server.start(); } } catch (Exception e) { log.warn(LogSupport.EXCEPTION,e); } }
static void function(String[] args) { if (args.length==0 args.length>2) { System.err.println (STR); System.err.println (STR); System.err.println (STR); System.err.println (STR); System.err.println (STR); System.exit(1); } try{ if (args.length==1) { HttpServer server = new HttpServer(); String host=null; HttpContext context = server.getContext(host,"/"); context.setResourceBase("."); context.addHandler(new ResourceHandler()); context.addHandler(new DumpHandler()); context.addHandler(new NotFoundHandler()); InetAddrPort address = new InetAddrPort(args[0]); server.addListener(address); server.start(); } else { Resource resource = Resource.newResource(args[1]); ObjectInputStream in = new ObjectInputStream(resource.getInputStream()); HttpServer server = (HttpServer)in.readObject(); in.close(); server.start(); } } catch (Exception e) { log.warn(LogSupport.EXCEPTION,e); } }
/** Construct server from command line arguments. * @param args */
Construct server from command line arguments
main
{ "repo_name": "krosenvold/selenium-git-release-candidate", "path": "java/server/src/org/openqa/jetty/http/HttpServer.java", "license": "apache-2.0", "size": 50160 }
[ "java.io.ObjectInputStream", "org.openqa.jetty.http.handler.DumpHandler", "org.openqa.jetty.http.handler.NotFoundHandler", "org.openqa.jetty.http.handler.ResourceHandler", "org.openqa.jetty.util.InetAddrPort", "org.openqa.jetty.util.LogSupport", "org.openqa.jetty.util.Resource" ]
import java.io.ObjectInputStream; import org.openqa.jetty.http.handler.DumpHandler; import org.openqa.jetty.http.handler.NotFoundHandler; import org.openqa.jetty.http.handler.ResourceHandler; import org.openqa.jetty.util.InetAddrPort; import org.openqa.jetty.util.LogSupport; import org.openqa.jetty.util.Resource;
import java.io.*; import org.openqa.jetty.http.handler.*; import org.openqa.jetty.util.*;
[ "java.io", "org.openqa.jetty" ]
java.io; org.openqa.jetty;
479,960
public OneResponse chmod(String octet) { return chmod(client, id, octet); }
OneResponse function(String octet) { return chmod(client, id, octet); }
/** * Changes the permissions * * @param octet Permissions octed , e.g. 640 * @return If an error occurs the error message contains the reason. */
Changes the permissions
chmod
{ "repo_name": "mattthias/one", "path": "src/oca/java/src/org/opennebula/client/image/Image.java", "license": "apache-2.0", "size": 18831 }
[ "org.opennebula.client.OneResponse" ]
import org.opennebula.client.OneResponse;
import org.opennebula.client.*;
[ "org.opennebula.client" ]
org.opennebula.client;
1,178,000
@Nonnull public ApiFuture<WriteResult> update(@Nonnull Map<String, Object> fields, Precondition options) { WriteBatch writeBatch = rpcContext.getFirestore().batch(); return extractFirst(writeBatch.update(this, fields, options).commit()); }
ApiFuture<WriteResult> function(@Nonnull Map<String, Object> fields, Precondition options) { WriteBatch writeBatch = rpcContext.getFirestore().batch(); return extractFirst(writeBatch.update(this, fields, options).commit()); }
/** * Updates fields in the document referred to by this DocumentReference. If the document doesn't * exist yet, the update will fail. * * @param fields A map containing the fields and values with which to update the document. * @param options Preconditions to enforce on this update. * @return An ApiFuture that will be resolved when the write finishes. */
Updates fields in the document referred to by this DocumentReference. If the document doesn't exist yet, the update will fail
update
{ "repo_name": "googleapis/java-firestore", "path": "google-cloud-firestore/src/main/java/com/google/cloud/firestore/DocumentReference.java", "license": "apache-2.0", "size": 18076 }
[ "com.google.api.core.ApiFuture", "java.util.Map", "javax.annotation.Nonnull" ]
import com.google.api.core.ApiFuture; import java.util.Map; import javax.annotation.Nonnull;
import com.google.api.core.*; import java.util.*; import javax.annotation.*;
[ "com.google.api", "java.util", "javax.annotation" ]
com.google.api; java.util; javax.annotation;
533,276
public static void close(final URLConnection connection) throws IOException { if (connection == null) { return; } String protocol = connection.getURL().getProtocol(); if ("file".equals(protocol)) // XXX what about JarUrlConnection ?! { // Close FileUrlConnection's input stream because // otherwise it leaks open file handles. See WICKET-4359. // Most other connection types should not call getInputStream() here, // especially remote connections. connection.getInputStream().close(); } if (connection instanceof HttpURLConnection) { ((HttpURLConnection)connection).disconnect(); } }
static void function(final URLConnection connection) throws IOException { if (connection == null) { return; } String protocol = connection.getURL().getProtocol(); if ("file".equals(protocol)) { connection.getInputStream().close(); } if (connection instanceof HttpURLConnection) { ((HttpURLConnection)connection).disconnect(); } }
/** * Closes a connection * * @param connection * @throws IOException */
Closes a connection
close
{ "repo_name": "afiantara/apache-wicket-1.5.7", "path": "src/wicket-util/src/main/java/org/apache/wicket/util/io/Connections.java", "license": "apache-2.0", "size": 3523 }
[ "java.io.IOException", "java.net.HttpURLConnection", "java.net.URLConnection" ]
import java.io.IOException; import java.net.HttpURLConnection; import java.net.URLConnection;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
757,482
public Controller getController(int i) { if (geometricalControllers == null) { geometricalControllers = new ArrayList<Controller>(1); } return geometricalControllers.get(i); }
Controller function(int i) { if (geometricalControllers == null) { geometricalControllers = new ArrayList<Controller>(1); } return geometricalControllers.get(i); }
/** * Returns the controller in this list of controllers at index i. * * @param i * The index to get a controller from. * @return The controller at index i. * @see com.jme.scene.Controller */
Returns the controller in this list of controllers at index i
getController
{ "repo_name": "accelazh/ThreeBodyProblem", "path": "lib/jME2_0_1-Stable/src/com/jme/scene/Spatial.java", "license": "mit", "size": 62639 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
206,039
EReference getMediatorSequence_Description();
EReference getMediatorSequence_Description();
/** * Returns the meta object for the containment reference '{@link org.wso2.developerstudio.eclipse.esb.MediatorSequence#getDescription <em>Description</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Description</em>'. * @see org.wso2.developerstudio.eclipse.esb.MediatorSequence#getDescription() * @see #getMediatorSequence() * @generated */
Returns the meta object for the containment reference '<code>org.wso2.developerstudio.eclipse.esb.MediatorSequence#getDescription Description</code>'.
getMediatorSequence_Description
{ "repo_name": "chanakaudaya/developer-studio", "path": "esb/org.wso2.developerstudio.eclipse.esb/src/org/wso2/developerstudio/eclipse/esb/EsbPackage.java", "license": "apache-2.0", "size": 373548 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,235,288
public void testUpdatePurgedTuple4() throws SQLException { getConnection().setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); Statement s = createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery("select * from t1"); rs.next(); // Point to first tuple int firstKey = rs.getInt(1); int valA = rs.getInt(2); int valB = rs.getInt(3); println("T1: Read next Tuple:(" + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); rs.next(); // Go to next println("T1: Read next Tuple:(" + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); Connection con2 = openDefaultConnection(); con2.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); try { PreparedStatement ps2 = con2.prepareStatement ("delete from t1 where id=?"); ps2.setInt(1, firstKey); assertEquals("Expected one record to be deleted", 1, ps2.executeUpdate()); println("T2: Deleted record with id=" + firstKey); con2.commit(); println("T2: commit"); // Now purge the table ps2 = con2.prepareStatement ("call SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE(?,?,?,?,?)"); ps2.setString(1, "SPLICE"); // schema ps2.setString(2, "T1"); // table name ps2.setInt(3, 1); // purge ps2.setInt(4, 0); // defragment rows ps2.setInt(5, 0); // truncate end println("T3: call SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE"); println("T3: purges deleted records"); ps2.executeUpdate(); con2.commit(); println("T3: commit"); ps2 = con2.prepareStatement("insert into t1 values(?,?,?,?)"); ps2.setInt(1, firstKey); ps2.setInt(2, valA); ps2.setInt(3, valB); ps2.setString(4, "UPDATE TUPLE " + firstKey); assertEquals("Expected one record to be inserted", 1, ps2.executeUpdate()); println("T4: Inserted record (" + firstKey + "," + valA + "," + valB + ")" ); con2.commit(); println("T4: commit"); } catch (SQLException e) { con2.rollback(); throw e; } rs.previous(); // Go back to first tuple println("T1: Read previous Tuple:(" + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); println("T1: id=" + rs.getInt(1)); rs.updateInt(2, 3); rs.updateRow(); println("T1: updated column 2, to value=3"); println("T1: commit"); commit(); rs = s.executeQuery("select * from t1"); while (rs.next()) { println("T4: Read next Tuple:(" + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); } con2.close(); }
void function() throws SQLException { getConnection().setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); Statement s = createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(STR); rs.next(); int firstKey = rs.getInt(1); int valA = rs.getInt(2); int valB = rs.getInt(3); println(STR + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); rs.next(); println(STR + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); Connection con2 = openDefaultConnection(); con2.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); try { PreparedStatement ps2 = con2.prepareStatement (STR); ps2.setInt(1, firstKey); assertEquals(STR, 1, ps2.executeUpdate()); println(STR + firstKey); con2.commit(); println(STR); ps2 = con2.prepareStatement (STR); ps2.setString(1, STR); ps2.setString(2, "T1"); ps2.setInt(3, 1); ps2.setInt(4, 0); ps2.setInt(5, 0); println(STR); println(STR); ps2.executeUpdate(); con2.commit(); println(STR); ps2 = con2.prepareStatement(STR); ps2.setInt(1, firstKey); ps2.setInt(2, valA); ps2.setInt(3, valB); ps2.setString(4, STR + firstKey); assertEquals(STR, 1, ps2.executeUpdate()); println(STR + firstKey + "," + valA + "," + valB + ")" ); con2.commit(); println(STR); } catch (SQLException e) { con2.rollback(); throw e; } rs.previous(); println(STR + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); println(STR + rs.getInt(1)); rs.updateInt(2, 3); rs.updateRow(); println(STR); println(STR); commit(); rs = s.executeQuery(STR); while (rs.next()) { println(STR + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); } con2.close(); }
/** * Test what happens if you update a tuple which is deleted, purged and * then reinserted with the exact same values **/
Test what happens if you update a tuple which is deleted, purged and then reinserted with the exact same values
testUpdatePurgedTuple4
{ "repo_name": "splicemachine/spliceengine", "path": "db-testing/src/test/java/com/splicemachine/dbTesting/functionTests/tests/jdbcapi/ConcurrencyTest.java", "license": "agpl-3.0", "size": 35162 }
[ "java.sql.Connection", "java.sql.PreparedStatement", "java.sql.ResultSet", "java.sql.SQLException", "java.sql.Statement" ]
import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,851,194
@Test public void testTokenCache() throws IOException { System.out.println("running dist job"); // make sure JT starts jConf = mrCluster.createJobConf(); // provide namenodes names for the job to get the delegation tokens for String nnUri = dfsCluster.getURI().toString(); jConf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri); // job tracker principla id.. jConf.set(JTConfig.JT_USER_NAME, "jt_id"); // using argument to pass the file name String[] args = { "-tokenCacheFile", tokenFileName.toString(), "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" }; int res = -1; try { res = ToolRunner.run(jConf, new MySleepJob(), args); } catch (Exception e) { System.out.println("Job failed with" + e.getLocalizedMessage()); e.printStackTrace(System.out); fail("Job failed"); } assertEquals("dist job res is not 0", res, 0); }
void function() throws IOException { System.out.println(STR); jConf = mrCluster.createJobConf(); String nnUri = dfsCluster.getURI().toString(); jConf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri); jConf.set(JTConfig.JT_USER_NAME, "jt_id"); String[] args = { STR, tokenFileName.toString(), "-m", "1", "-r", "1", "-mt", "1", "-rt", "1" }; int res = -1; try { res = ToolRunner.run(jConf, new MySleepJob(), args); } catch (Exception e) { System.out.println(STR + e.getLocalizedMessage()); e.printStackTrace(System.out); fail(STR); } assertEquals(STR, res, 0); }
/** * run a distributed job and verify that TokenCache is available * @throws IOException */
run a distributed job and verify that TokenCache is available
testTokenCache
{ "repo_name": "rekhajoshm/mapreduce-fork", "path": "src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java", "license": "apache-2.0", "size": 12972 }
[ "java.io.IOException", "org.apache.hadoop.mapreduce.MRJobConfig", "org.apache.hadoop.mapreduce.server.jobtracker.JTConfig", "org.apache.hadoop.util.ToolRunner", "org.junit.Assert" ]
import java.io.IOException; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.util.ToolRunner; import org.junit.Assert;
import java.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.server.jobtracker.*; import org.apache.hadoop.util.*; import org.junit.*;
[ "java.io", "org.apache.hadoop", "org.junit" ]
java.io; org.apache.hadoop; org.junit;
1,553,388
public void sendToAll(FMLProxyPacket pkt) { channels.get(Side.SERVER).attr(FMLOutboundHandler.FML_MESSAGETARGET).set(FMLOutboundHandler.OutboundTarget.ALL); channels.get(Side.SERVER).writeAndFlush(pkt).addListener(ChannelFutureListener.FIRE_EXCEPTION_ON_FAILURE); }
void function(FMLProxyPacket pkt) { channels.get(Side.SERVER).attr(FMLOutboundHandler.FML_MESSAGETARGET).set(FMLOutboundHandler.OutboundTarget.ALL); channels.get(Side.SERVER).writeAndFlush(pkt).addListener(ChannelFutureListener.FIRE_EXCEPTION_ON_FAILURE); }
/** * Send a packet to all on the server * * @param pkt */
Send a packet to all on the server
sendToAll
{ "repo_name": "Scrik/Cauldron-1", "path": "eclipse/cauldron/src/main/java/cpw/mods/fml/common/network/FMLEventChannel.java", "license": "gpl-3.0", "size": 6713 }
[ "io.netty.channel.ChannelFutureListener" ]
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.*;
[ "io.netty.channel" ]
io.netty.channel;
2,749,595
@Override public TestElement createTestElement() { CriticalSectionController controller = new CriticalSectionController(); modifyTestElement(controller); return controller; }
TestElement function() { CriticalSectionController controller = new CriticalSectionController(); modifyTestElement(controller); return controller; }
/** * Implements JMeterGUIComponent.createTestElement() */
Implements JMeterGUIComponent.createTestElement()
createTestElement
{ "repo_name": "tuanhq/jmeter", "path": "src/components/org/apache/jmeter/control/gui/CriticalSectionControllerGui.java", "license": "apache-2.0", "size": 5950 }
[ "org.apache.jmeter.control.CriticalSectionController", "org.apache.jmeter.testelement.TestElement" ]
import org.apache.jmeter.control.CriticalSectionController; import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.control.*; import org.apache.jmeter.testelement.*;
[ "org.apache.jmeter" ]
org.apache.jmeter;
2,691,935
public List<SqlValidatorNamespace> getChildren() { return Lists.transform(children, scopeChild -> scopeChild.namespace); }
List<SqlValidatorNamespace> function() { return Lists.transform(children, scopeChild -> scopeChild.namespace); }
/** * Returns an immutable list of child namespaces. * * @return list of child namespaces */
Returns an immutable list of child namespaces
getChildren
{ "repo_name": "vlsi/incubator-calcite", "path": "core/src/main/java/org/apache/calcite/sql/validate/ListScope.java", "license": "apache-2.0", "size": 7736 }
[ "com.google.common.collect.Lists", "java.util.List" ]
import com.google.common.collect.Lists; import java.util.List;
import com.google.common.collect.*; import java.util.*;
[ "com.google.common", "java.util" ]
com.google.common; java.util;
2,331,071
public void setExternalIds(Map<String, String> externalIds) { ColumnDescription columndesc = new ColumnDescription( BridgeColumn.EXTERNALIDS .columnName(), "setExternalIds", VersionNum.VERSION100); super.setDataHandler(columndesc, externalIds); }
void function(Map<String, String> externalIds) { ColumnDescription columndesc = new ColumnDescription( BridgeColumn.EXTERNALIDS .columnName(), STR, VersionNum.VERSION100); super.setDataHandler(columndesc, externalIds); }
/** * Add a Column entity which column name is "external_ids" to the Row entity * of attributes. * @param externalIds the column data which column name is "external_ids" */
Add a Column entity which column name is "external_ids" to the Row entity of attributes
setExternalIds
{ "repo_name": "sonu283304/onos", "path": "protocols/ovsdb/rfc/src/main/java/org/onosproject/ovsdb/rfc/table/Bridge.java", "license": "apache-2.0", "size": 26424 }
[ "java.util.Map", "org.onosproject.ovsdb.rfc.tableservice.ColumnDescription" ]
import java.util.Map; import org.onosproject.ovsdb.rfc.tableservice.ColumnDescription;
import java.util.*; import org.onosproject.ovsdb.rfc.tableservice.*;
[ "java.util", "org.onosproject.ovsdb" ]
java.util; org.onosproject.ovsdb;
1,395,695
@SuppressWarnings("unchecked") public T asyncHandlers(List<AsyncHandler> value) { this.subresources.asyncHandlers.addAll(value); return (T) this; }
@SuppressWarnings(STR) T function(List<AsyncHandler> value) { this.subresources.asyncHandlers.addAll(value); return (T) this; }
/** * Add all AsyncHandler objects to this subresource * @return this * @param value List of AsyncHandler objects. */
Add all AsyncHandler objects to this subresource
asyncHandlers
{ "repo_name": "wildfly-swarm/wildfly-config-api", "path": "generator/src/test/java/org/wildfly/apigen/test/invocation/logging/Logging.java", "license": "apache-2.0", "size": 17694 }
[ "java.util.List", "org.wildfly.apigen.test.invocation.logging.subsystem.asyncHandler.AsyncHandler" ]
import java.util.List; import org.wildfly.apigen.test.invocation.logging.subsystem.asyncHandler.AsyncHandler;
import java.util.*; import org.wildfly.apigen.test.invocation.logging.subsystem.*;
[ "java.util", "org.wildfly.apigen" ]
java.util; org.wildfly.apigen;
1,316,503
public String get(String name) { return (String)parameters.get(name.trim().toLowerCase(Locale.ENGLISH)); }
String function(String name) { return (String)parameters.get(name.trim().toLowerCase(Locale.ENGLISH)); }
/** * Retrieve the value associated with the given name, or null if there * is no current association. * * @param name the parameter name * @return the parameter's value */
Retrieve the value associated with the given name, or null if there is no current association
get
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jaxws/src/java.activation/share/classes/javax/activation/MimeTypeParameterList.java", "license": "gpl-2.0", "size": 10957 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
2,310,200
public void exit(CommonCompositorBase node) { exit((AnnotatedBase)node); }
void function(CommonCompositorBase node) { exit((AnnotatedBase)node); }
/** * Exit compositor base element. * * @param node element being exited */
Exit compositor base element
exit
{ "repo_name": "vkorbut/jibx", "path": "jibx/build/src/org/jibx/schema/SchemaVisitor.java", "license": "bsd-3-clause", "size": 28814 }
[ "org.jibx.schema.elements.AnnotatedBase", "org.jibx.schema.elements.CommonCompositorBase" ]
import org.jibx.schema.elements.AnnotatedBase; import org.jibx.schema.elements.CommonCompositorBase;
import org.jibx.schema.elements.*;
[ "org.jibx.schema" ]
org.jibx.schema;
2,826,020
if (contact != null) return contact; assertID(); try { @Cleanup Cursor cursor = addressBook.provider.query(addressBook.syncAdapterURI(ContentUris.withAppendedId(Groups.CONTENT_URI, id)), new String[] { COLUMN_UID, Groups.TITLE, Groups.NOTES }, null, null, null); if (cursor == null || !cursor.moveToNext()) throw new FileNotFoundException("Contact group not found"); contact = new Contact(); contact.uid = cursor.getString(0); contact.group = true; contact.displayName = cursor.getString(1); contact.note = cursor.getString(2); // query UIDs of all contacts which are member of the group @Cleanup Cursor c = addressBook.provider.query(addressBook.syncAdapterURI(ContactsContract.Data.CONTENT_URI), new String[] { Data.RAW_CONTACT_ID }, GroupMembership.MIMETYPE + "=? AND " + GroupMembership.GROUP_ROW_ID + "=?", new String[] { GroupMembership.CONTENT_ITEM_TYPE, String.valueOf(id) }, null); while (c != null && c.moveToNext()) { long contactID = c.getLong(0); Constants.log.fine("Member ID: " + contactID); @Cleanup Cursor c2 = addressBook.provider.query( addressBook.syncAdapterURI(ContentUris.withAppendedId(RawContacts.CONTENT_URI, contactID)), new String[] { AndroidContact.COLUMN_UID }, null, null, null ); if (c2 != null && c2.moveToNext()) { String uid = c2.getString(0); if (!StringUtils.isEmpty(uid)) { Constants.log.fine("Found member of group: " + uid); contact.members.add(uid); } } } return contact; } catch (RemoteException e) { throw new ContactsStorageException("Couldn't read contact group", e); } }
if (contact != null) return contact; assertID(); try { @Cleanup Cursor cursor = addressBook.provider.query(addressBook.syncAdapterURI(ContentUris.withAppendedId(Groups.CONTENT_URI, id)), new String[] { COLUMN_UID, Groups.TITLE, Groups.NOTES }, null, null, null); if (cursor == null !cursor.moveToNext()) throw new FileNotFoundException(STR); contact = new Contact(); contact.uid = cursor.getString(0); contact.group = true; contact.displayName = cursor.getString(1); contact.note = cursor.getString(2); @Cleanup Cursor c = addressBook.provider.query(addressBook.syncAdapterURI(ContactsContract.Data.CONTENT_URI), new String[] { Data.RAW_CONTACT_ID }, GroupMembership.MIMETYPE + STR + GroupMembership.GROUP_ROW_ID + "=?", new String[] { GroupMembership.CONTENT_ITEM_TYPE, String.valueOf(id) }, null); while (c != null && c.moveToNext()) { long contactID = c.getLong(0); Constants.log.fine(STR + contactID); @Cleanup Cursor c2 = addressBook.provider.query( addressBook.syncAdapterURI(ContentUris.withAppendedId(RawContacts.CONTENT_URI, contactID)), new String[] { AndroidContact.COLUMN_UID }, null, null, null ); if (c2 != null && c2.moveToNext()) { String uid = c2.getString(0); if (!StringUtils.isEmpty(uid)) { Constants.log.fine(STR + uid); contact.members.add(uid); } } } return contact; } catch (RemoteException e) { throw new ContactsStorageException(STR, e); } }
/** * Creates a {@link Contact} (representation of a VCard) from the group. * @throws IllegalArgumentException if group is not persistent yet ({@link #id} is null) */
Creates a <code>Contact</code> (representation of a VCard) from the group
getContact
{ "repo_name": "ElysionT/ContactsDAV", "path": "vcard4android/src/main/java/at/bitfire/vcard4android/AndroidGroup.java", "license": "gpl-3.0", "size": 6764 }
[ "android.content.ContentUris", "android.database.Cursor", "android.os.RemoteException", "android.provider.ContactsContract", "java.io.FileNotFoundException", "org.apache.commons.lang3.StringUtils" ]
import android.content.ContentUris; import android.database.Cursor; import android.os.RemoteException; import android.provider.ContactsContract; import java.io.FileNotFoundException; import org.apache.commons.lang3.StringUtils;
import android.content.*; import android.database.*; import android.os.*; import android.provider.*; import java.io.*; import org.apache.commons.lang3.*;
[ "android.content", "android.database", "android.os", "android.provider", "java.io", "org.apache.commons" ]
android.content; android.database; android.os; android.provider; java.io; org.apache.commons;
1,247,158
public static <T> Queue<T> newSpscQueue() { return hasUnsafe() ? new SpscLinkedQueue<T>() : new SpscLinkedAtomicQueue<T>(); }
static <T> Queue<T> function() { return hasUnsafe() ? new SpscLinkedQueue<T>() : new SpscLinkedAtomicQueue<T>(); }
/** * Create a new {@link Queue} which is safe to use for single producer (one thread!) and a single * consumer (one thread!). */
Create a new <code>Queue</code> which is safe to use for single producer (one thread!) and a single consumer (one thread!)
newSpscQueue
{ "repo_name": "fengjiachun/netty", "path": "common/src/main/java/io/netty/util/internal/PlatformDependent.java", "license": "apache-2.0", "size": 56602 }
[ "java.util.Queue", "org.jctools.queues.SpscLinkedQueue", "org.jctools.queues.atomic.SpscLinkedAtomicQueue" ]
import java.util.Queue; import org.jctools.queues.SpscLinkedQueue; import org.jctools.queues.atomic.SpscLinkedAtomicQueue;
import java.util.*; import org.jctools.queues.*; import org.jctools.queues.atomic.*;
[ "java.util", "org.jctools.queues" ]
java.util; org.jctools.queues;
144,242
public boolean containsDomainValue(Date date) { return containsDomainValue(getTime(date)); }
boolean function(Date date) { return containsDomainValue(getTime(date)); }
/** * Returns <code>true</code> if a value is contained in the timeline. * * @param date date to verify * * @return <code>true</code> if value is contained in the timeline */
Returns <code>true</code> if a value is contained in the timeline
containsDomainValue
{ "repo_name": "Epsilon2/Memetic-Algorithm-for-TSP", "path": "jfreechart-1.0.16/source/org/jfree/chart/axis/SegmentedTimeline.java", "license": "mit", "size": 64025 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
1,034,558
public Name getPrefix(int posn) { Enumeration comps = impl.getPrefix(posn); return (new CompoundName(comps, mySyntax)); }
Name function(int posn) { Enumeration comps = impl.getPrefix(posn); return (new CompoundName(comps, mySyntax)); }
/** * Creates a compound name whose components consist of a prefix of the * components in this compound name. * The result and this compound name share the same syntax. * Subsequent changes to * this compound name does not affect the name that is returned and * vice versa. * * @param posn The 0-based index of the component at which to stop. * Must be in the range [0,size()]. * @return A compound name consisting of the components at indexes in * the range [0,posn). * @exception ArrayIndexOutOfBoundsException * If posn is outside the specified range. */
Creates a compound name whose components consist of a prefix of the components in this compound name. The result and this compound name share the same syntax. Subsequent changes to this compound name does not affect the name that is returned and vice versa
getPrefix
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk/jdk/src/share/classes/javax/naming/CompoundName.java", "license": "mit", "size": 25642 }
[ "java.util.Enumeration" ]
import java.util.Enumeration;
import java.util.*;
[ "java.util" ]
java.util;
1,471,986
@Column(name = "FARE_SVR_CD", length = 1, nullable = true) public String getFareSaverCode() { return fareSaverCode; }
@Column(name = STR, length = 1, nullable = true) String function() { return fareSaverCode; }
/** * Gets the fareSaverCode attribute. * @return Returns the fareSaverCode. */
Gets the fareSaverCode attribute
getFareSaverCode
{ "repo_name": "bhutchinson/kfs", "path": "kfs-tem/src/main/java/org/kuali/kfs/module/tem/businessobject/AgencyStagingData.java", "license": "agpl-3.0", "size": 52782 }
[ "javax.persistence.Column" ]
import javax.persistence.Column;
import javax.persistence.*;
[ "javax.persistence" ]
javax.persistence;
2,056,223
private void attemptWrite(ConstructableConfiguration config) { Pojo pojo = new Pojo(); pojo.setFoo("Foo!"); SessionFactory sessionFactory = config.buildSessionFactory(); Session session = null; try { session = sessionFactory.openSession(); Integer result = (Integer) session.save(pojo); assertNotNull(result); session.close(); session = sessionFactory.openSession(); Pojo pojo2 = (Pojo) session.load(Pojo.class, result); assertNotNull(pojo); assertEquals(pojo.getId(), pojo2.getId()); assertEquals(pojo.getFoo(), pojo2.getFoo()); } finally { if (session != null && session.isOpen()) { session.close(); } sessionFactory.close(); } }
void function(ConstructableConfiguration config) { Pojo pojo = new Pojo(); pojo.setFoo("Foo!"); SessionFactory sessionFactory = config.buildSessionFactory(); Session session = null; try { session = sessionFactory.openSession(); Integer result = (Integer) session.save(pojo); assertNotNull(result); session.close(); session = sessionFactory.openSession(); Pojo pojo2 = (Pojo) session.load(Pojo.class, result); assertNotNull(pojo); assertEquals(pojo.getId(), pojo2.getId()); assertEquals(pojo.getFoo(), pojo2.getFoo()); } finally { if (session != null && session.isOpen()) { session.close(); } sessionFactory.close(); } }
/** * Works Hibernate's configuration by attempting a write to the 'database'. With the latest * hiberanates, the configuration isn't really built until the session factory is built, and even * then, some of the data doesn't exist until a write occurs. * @param config */
Works Hibernate's configuration by attempting a write to the 'database'. With the latest hiberanates, the configuration isn't really built until the session factory is built, and even then, some of the data doesn't exist until a write occurs
attemptWrite
{ "repo_name": "picocontainer/NanoContainer-persistence", "path": "persistence-hibernate-annotations/src/test/org/nanocontainer/persistence/hibernate/annotations/ConstructableConfigurationTestCase.java", "license": "bsd-3-clause", "size": 2889 }
[ "org.hibernate.Session", "org.hibernate.SessionFactory" ]
import org.hibernate.Session; import org.hibernate.SessionFactory;
import org.hibernate.*;
[ "org.hibernate" ]
org.hibernate;
756,395
@Test public void testT1RV6D3_T1LV15D3() { test_id = getTestId("T1RV6D3", "T1LV15D3", "159"); String src = selectTRVD("T1RV6D3"); String dest = selectTLVD("T1LV15D3"); String result = "."; try { result = TRVD_TLVD_Action(src, dest); } catch (RecognitionException e) { e.printStackTrace(); } catch (TokenStreamException e) { e.printStackTrace(); } assertTrue(Success, checkResult_Success(src, dest, result)); GraphicalEditor editor = getActiveEditor(); if (editor != null) { validateOrGenerateResults(editor, generateResults); } }
void function() { test_id = getTestId(STR, STR, "159"); String src = selectTRVD(STR); String dest = selectTLVD(STR); String result = "."; try { result = TRVD_TLVD_Action(src, dest); } catch (RecognitionException e) { e.printStackTrace(); } catch (TokenStreamException e) { e.printStackTrace(); } assertTrue(Success, checkResult_Success(src, dest, result)); GraphicalEditor editor = getActiveEditor(); if (editor != null) { validateOrGenerateResults(editor, generateResults); } }
/** * Perform the test for the given matrix column (T1RV6D3) and row * (T1LV15D3). * */
Perform the test for the given matrix column (T1RV6D3) and row (T1LV15D3)
testT1RV6D3_T1LV15D3
{ "repo_name": "jason-rhodes/bridgepoint", "path": "src/org.xtuml.bp.als.oal.test/src/org/xtuml/bp/als/oal/test/SingleDimensionFixedArrayAssigmentTest_12_Generics.java", "license": "apache-2.0", "size": 155634 }
[ "org.xtuml.bp.ui.graphics.editor.GraphicalEditor" ]
import org.xtuml.bp.ui.graphics.editor.GraphicalEditor;
import org.xtuml.bp.ui.graphics.editor.*;
[ "org.xtuml.bp" ]
org.xtuml.bp;
1,489,456
@CheckResult public AdGroup withAllAdsSkipped() { if (count == C.LENGTH_UNSET) { return new AdGroup( 0, new int[0], new Uri[0], new long[0]); } int count = this.states.length; @AdState int[] states = Arrays.copyOf(this.states, count); for (int i = 0; i < count; i++) { if (states[i] == AD_STATE_AVAILABLE || states[i] == AD_STATE_UNAVAILABLE) { states[i] = AD_STATE_SKIPPED; } } return new AdGroup(count, states, uris, durationsUs); }
AdGroup function() { if (count == C.LENGTH_UNSET) { return new AdGroup( 0, new int[0], new Uri[0], new long[0]); } int count = this.states.length; @AdState int[] states = Arrays.copyOf(this.states, count); for (int i = 0; i < count; i++) { if (states[i] == AD_STATE_AVAILABLE states[i] == AD_STATE_UNAVAILABLE) { states[i] = AD_STATE_SKIPPED; } } return new AdGroup(count, states, uris, durationsUs); }
/** * Returns an instance with all unavailable and available ads marked as skipped. If the ad count * hasn't been set, it will be set to zero. */
Returns an instance with all unavailable and available ads marked as skipped. If the ad count hasn't been set, it will be set to zero
withAllAdsSkipped
{ "repo_name": "tntcrowd/ExoPlayer", "path": "library/core/src/main/java/com/google/android/exoplayer2/source/ads/AdPlaybackState.java", "license": "apache-2.0", "size": 18060 }
[ "android.net.Uri", "java.util.Arrays" ]
import android.net.Uri; import java.util.Arrays;
import android.net.*; import java.util.*;
[ "android.net", "java.util" ]
android.net; java.util;
253,487
public PortTypeEditorLinkPersistence getPortTypeEditorLinkPersistence() { return portTypeEditorLinkPersistence; }
PortTypeEditorLinkPersistence function() { return portTypeEditorLinkPersistence; }
/** * Returns the port type editor link persistence. * * @return the port type editor link persistence */
Returns the port type editor link persistence
getPortTypeEditorLinkPersistence
{ "repo_name": "queza85/edison", "path": "edison-portal-framework/edison-appstore-2016-portlet/docroot/WEB-INF/src/org/kisti/edison/science/service/base/ScienceAppManagerServiceBaseImpl.java", "license": "gpl-3.0", "size": 52555 }
[ "org.kisti.edison.science.service.persistence.PortTypeEditorLinkPersistence" ]
import org.kisti.edison.science.service.persistence.PortTypeEditorLinkPersistence;
import org.kisti.edison.science.service.persistence.*;
[ "org.kisti.edison" ]
org.kisti.edison;
991,947
@DataProvider(name = "bindData") static Object[][] bindObjects() { Object[][] data = { {"SimpleString", "SimpleString", 0}, {"String", new XX("now is the time"), 1}, {"String[]", new XX(new String[3]), 3}, {"Long[4]", new XX(new Long[4]), 3}, {"RejectME", new XX(new RejectME()), -1}, }; return data; }
@DataProvider(name = STR) static Object[][] bindObjects() { Object[][] data = { {STR, STR, 0}, {STR, new XX(STR), 1}, {STR, new XX(new String[3]), 3}, {STR, new XX(new Long[4]), 3}, {STR, new XX(new RejectME()), -1}, }; return data; }
/** * Data to test serialFilter call counts. * - name * - Object * - expected count of calls to checkInput. * * @return array of test data */
Data to test serialFilter call counts. - name - Object - expected count of calls to checkInput
bindObjects
{ "repo_name": "JetBrains/jdk8u_jdk", "path": "test/sun/rmi/server/UnicastServerRef/FilterUSRTest.java", "license": "gpl-2.0", "size": 7140 }
[ "org.testng.annotations.DataProvider" ]
import org.testng.annotations.DataProvider;
import org.testng.annotations.*;
[ "org.testng.annotations" ]
org.testng.annotations;
22,809
// Conversion //----------------------------------------------------------------------- public Chronology withUTC() { return INSTANCE_UTC; }
Chronology function() { return INSTANCE_UTC; }
/** * Gets the Chronology in the UTC time zone. * * @return the chronology in UTC */
Gets the Chronology in the UTC time zone
withUTC
{ "repo_name": "aleatorio12/ProVentasConnector", "path": "lib/joda-time-2.4-dist/joda-time-2.4/src/main/java/org/joda/time/chrono/IslamicChronology.java", "license": "gpl-3.0", "size": 21916 }
[ "org.joda.time.Chronology" ]
import org.joda.time.Chronology;
import org.joda.time.*;
[ "org.joda.time" ]
org.joda.time;
1,758,429
public void setInventorySlotContents(int slot, ItemStack itemStack, boolean changed) { hasChanged = changed; if (slot >= OFFSET) { extraItems[slot - OFFSET] = itemStack; } else { super.setInventorySlotContents(slot, itemStack); } }
void function(int slot, ItemStack itemStack, boolean changed) { hasChanged = changed; if (slot >= OFFSET) { extraItems[slot - OFFSET] = itemStack; } else { super.setInventorySlotContents(slot, itemStack); } }
/** * Sets the given item stack to the specified slot in the inventory, mark as dirty according to the boolean argument * @param slot whose content will change * @param itemStack to put in the slot * @param changed if the inventory packet should be sent next tick */
Sets the given item stack to the specified slot in the inventory, mark as dirty according to the boolean argument
setInventorySlotContents
{ "repo_name": "nargacu83/ClassesOfWarcraft", "path": "battlegear mod src/minecraft/mods/battlegear2/api/core/InventoryPlayerBattle.java", "license": "gpl-3.0", "size": 17124 }
[ "net.minecraft.item.ItemStack" ]
import net.minecraft.item.ItemStack;
import net.minecraft.item.*;
[ "net.minecraft.item" ]
net.minecraft.item;
1,027,294
@ServiceMethod(returns = ReturnType.SINGLE) Mono<RoleAssignmentMetricsResultInner> getMetricsForSubscriptionAsync();
@ServiceMethod(returns = ReturnType.SINGLE) Mono<RoleAssignmentMetricsResultInner> getMetricsForSubscriptionAsync();
/** * Get role assignment usage metrics for a subscription. * * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return role assignment usage metrics for a subscription. */
Get role assignment usage metrics for a subscription
getMetricsForSubscriptionAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/RoleAssignmentMetricsClient.java", "license": "mit", "size": 2835 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.resourcemanager.authorization.fluent.models.RoleAssignmentMetricsResultInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.authorization.fluent.models.RoleAssignmentMetricsResultInner;
import com.azure.core.annotation.*; import com.azure.resourcemanager.authorization.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,385,499
@FIXVersion(introduced = "4.3") @TagNumRef(tagNum = TagNum.Account) public void setAccount(String account) { this.account = account; }
@FIXVersion(introduced = "4.3") @TagNumRef(tagNum = TagNum.Account) void function(String account) { this.account = account; }
/** * Message field setter. * @param account field value */
Message field setter
setAccount
{ "repo_name": "marvisan/HadesFIX", "path": "Model/src/main/java/net/hades/fix/message/MassQuoteAckMsg.java", "license": "gpl-3.0", "size": 29333 }
[ "net.hades.fix.message.anno.FIXVersion", "net.hades.fix.message.anno.TagNumRef", "net.hades.fix.message.type.TagNum" ]
import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.TagNum;
import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*;
[ "net.hades.fix" ]
net.hades.fix;
2,446,240
public DateTime getResponseDateHeader(String headerName);
DateTime function(String headerName);
/** * Returns the value of a header containing a header or {@code null} if no * response header of this type exists or it could not be parsed as a valid * date. * * @param headerName name of header * @return header value. */
Returns the value of a header containing a header or null if no response header of this type exists or it could not be parsed as a valid date
getResponseDateHeader
{ "repo_name": "simonrrr/gdata-java-client", "path": "java/src/com/google/gdata/client/Service.java", "license": "apache-2.0", "size": 83027 }
[ "com.google.gdata.data.DateTime" ]
import com.google.gdata.data.DateTime;
import com.google.gdata.data.*;
[ "com.google.gdata" ]
com.google.gdata;
491,264
private void updateMetaData(CarbonDictionaryWriter dictionaryWriter) throws IOException { if (null != dictionaryWriter) { dictionaryWriter.commit(); } }
void function(CarbonDictionaryWriter dictionaryWriter) throws IOException { if (null != dictionaryWriter) { dictionaryWriter.commit(); } }
/** * update dictionary metadata * * @param dictionaryWriter * @throws IOException */
update dictionary metadata
updateMetaData
{ "repo_name": "sgururajshetty/carbondata", "path": "core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java", "license": "apache-2.0", "size": 10058 }
[ "java.io.IOException", "org.apache.carbondata.core.writer.CarbonDictionaryWriter" ]
import java.io.IOException; import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
import java.io.*; import org.apache.carbondata.core.writer.*;
[ "java.io", "org.apache.carbondata" ]
java.io; org.apache.carbondata;
1,338,005
public String createContact(ContactDetail contactDetail) { SilverTrace.info("yellowpages", "YellowpagesBmEJB.createContact()", "root.MSG_GEN_ENTER_METHOD"); SilverTrace.info("yellowpages", "YellowpagesBmEJB.createContact()", "root.MSG_GEN_PARAM_VALUE", "contactDetail = " + contactDetail.toString()); ContactPK contactPK = null; contactDetail.getPK().setSpace(this.space); contactDetail.getPK().setComponentName(this.componentId); contactDetail.setCreationDate(new Date()); contactDetail.setCreatorId(currentUser.getId()); try { // create the contact contactPK = getContactBm().createContact(contactDetail); contactDetail.getPK().setId(contactPK.getId()); // add this contact to the current topic addContactToTopic(contactPK.getId(), currentTopic.getNodePK().getId()); } catch (Exception re) { throw new YellowpagesRuntimeException("YellowpagesBmEJB.createContact()", SilverpeasRuntimeException.ERROR, "yellowpages.EX_CREATE_CONTACT_FAILED", re); } SilverTrace.info("yellowpages", "YellowpagesBmEJB.createContact()", "root.MSG_GEN_RETURN_VALUE", "id = " + contactPK.getId()); SilverTrace.info("yellowpages", "YellowpagesBmEJB.createContact()", "root.MSG_GEN_EXIT_METHOD"); return contactPK.getId(); }
String function(ContactDetail contactDetail) { SilverTrace.info(STR, STR, STR); SilverTrace.info(STR, STR, STR, STR + contactDetail.toString()); ContactPK contactPK = null; contactDetail.getPK().setSpace(this.space); contactDetail.getPK().setComponentName(this.componentId); contactDetail.setCreationDate(new Date()); contactDetail.setCreatorId(currentUser.getId()); try { contactPK = getContactBm().createContact(contactDetail); contactDetail.getPK().setId(contactPK.getId()); addContactToTopic(contactPK.getId(), currentTopic.getNodePK().getId()); } catch (Exception re) { throw new YellowpagesRuntimeException(STR, SilverpeasRuntimeException.ERROR, STR, re); } SilverTrace.info(STR, STR, STR, STR + contactPK.getId()); SilverTrace.info(STR, STR, STR); return contactPK.getId(); }
/** * Create a new Contact (only the header - parameters) to the current Topic * @param contactDetail a ContactDetail * @return the id of the new contact * @see com.stratelia.webactiv.util.contact.model.ContactDetail * @exception javax.ejb.FinderException * @exception javax.ejb.CreateException * @exception javax.ejb.NamingException * @exception java.sql.SQLException * @since 1.0 */
Create a new Contact (only the header - parameters) to the current Topic
createContact
{ "repo_name": "stephaneperry/Silverpeas-Components", "path": "yellowpages/yellowpages-ejb/src/main/java/com/stratelia/webactiv/yellowpages/control/ejb/YellowpagesBmEJB.java", "license": "agpl-3.0", "size": 66673 }
[ "com.stratelia.silverpeas.silvertrace.SilverTrace", "com.stratelia.webactiv.util.contact.model.ContactDetail", "com.stratelia.webactiv.util.contact.model.ContactPK", "com.stratelia.webactiv.util.exception.SilverpeasRuntimeException", "com.stratelia.webactiv.yellowpages.model.YellowpagesRuntimeException", "java.util.Date" ]
import com.stratelia.silverpeas.silvertrace.SilverTrace; import com.stratelia.webactiv.util.contact.model.ContactDetail; import com.stratelia.webactiv.util.contact.model.ContactPK; import com.stratelia.webactiv.util.exception.SilverpeasRuntimeException; import com.stratelia.webactiv.yellowpages.model.YellowpagesRuntimeException; import java.util.Date;
import com.stratelia.silverpeas.silvertrace.*; import com.stratelia.webactiv.util.contact.model.*; import com.stratelia.webactiv.util.exception.*; import com.stratelia.webactiv.yellowpages.model.*; import java.util.*;
[ "com.stratelia.silverpeas", "com.stratelia.webactiv", "java.util" ]
com.stratelia.silverpeas; com.stratelia.webactiv; java.util;
53,416
public AiTile getNearestSafeTiles2() { ai.checkInterruption(); int tmpDistance = 100; AiTile result = null; for (AiTile aiTile : ai.getSecuretiles) { ai.checkInterruption(); int myDistance = ai.getCG().nonCyclicTileDistance(ai.getZone().getOwnHero().getTile(), aiTile); if ( tmpDistance > myDistance ) { tmpDistance = myDistance; result = aiTile; } } return result; }
AiTile function() { ai.checkInterruption(); int tmpDistance = 100; AiTile result = null; for (AiTile aiTile : ai.getSecuretiles) { ai.checkInterruption(); int myDistance = ai.getCG().nonCyclicTileDistance(ai.getZone().getOwnHero().getTile(), aiTile); if ( tmpDistance > myDistance ) { tmpDistance = myDistance; result = aiTile; } } return result; }
/** * Calcule la case hors de danger la plus proche de notre agent parmis ses cases voisines. * * @return result la case hors de danger la plus proche */
Calcule la case hors de danger la plus proche de notre agent parmis ses cases voisines
getNearestSafeTiles2
{ "repo_name": "vlabatut/totalboumboum", "path": "resources/ai/org/totalboumboum/ai/v201314/ais/donmezlabatcamy/v4/TileHandler.java", "license": "gpl-2.0", "size": 20297 }
[ "org.totalboumboum.ai.v201314.adapter.data.AiTile" ]
import org.totalboumboum.ai.v201314.adapter.data.AiTile;
import org.totalboumboum.ai.v201314.adapter.data.*;
[ "org.totalboumboum.ai" ]
org.totalboumboum.ai;
41,698
public static MultipleCurrencyCurveSensitivityMarket of(final Currency ccy, final CurveSensitivityMarket sensitivity) { ArgumentChecker.notNull(ccy, "Currency"); ArgumentChecker.notNull(sensitivity, "Sensitivity"); final TreeMap<Currency, CurveSensitivityMarket> map = new TreeMap<Currency, CurveSensitivityMarket>(); map.put(ccy, sensitivity); return new MultipleCurrencyCurveSensitivityMarket(map); }
static MultipleCurrencyCurveSensitivityMarket function(final Currency ccy, final CurveSensitivityMarket sensitivity) { ArgumentChecker.notNull(ccy, STR); ArgumentChecker.notNull(sensitivity, STR); final TreeMap<Currency, CurveSensitivityMarket> map = new TreeMap<Currency, CurveSensitivityMarket>(); map.put(ccy, sensitivity); return new MultipleCurrencyCurveSensitivityMarket(map); }
/** * Create a new multiple currency sensitivity with one currency. * @param ccy The currency. Not null. * @param sensitivity The sensitivity associated to the currency. Not null. * @return The multiple currency sensitivity. */
Create a new multiple currency sensitivity with one currency
of
{ "repo_name": "charles-cooper/idylfin", "path": "src/com/opengamma/analytics/financial/interestrate/market/description/MultipleCurrencyCurveSensitivityMarket.java", "license": "apache-2.0", "size": 7869 }
[ "com.opengamma.util.ArgumentChecker", "com.opengamma.util.money.Currency", "java.util.TreeMap" ]
import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; import java.util.TreeMap;
import com.opengamma.util.*; import com.opengamma.util.money.*; import java.util.*;
[ "com.opengamma.util", "java.util" ]
com.opengamma.util; java.util;
307,505
public static void printAvailableCommands(Map<String, Command> commands, PrintStream printer) { if (commands.isEmpty()) { return; } printer.println("Commands:"); printer.println(); int longestCommandName = commands .keySet() .stream() .mapToInt(name -> name.length()) .max() .orElse(0); for (Map.Entry<String, Command> commandEntry : commands.entrySet()) { if ("help".equals(commandEntry.getKey())) { // don't print help message along with available other commands continue; } printCommand(printer, commandEntry.getKey(), commandEntry.getValue(), longestCommandName); } Command helpCmd = commands.get("help"); if (null != helpCmd) { printer.println(); printCommand(printer, "help", helpCmd, longestCommandName); } printer.println(); }
static void function(Map<String, Command> commands, PrintStream printer) { if (commands.isEmpty()) { return; } printer.println(STR); printer.println(); int longestCommandName = commands .keySet() .stream() .mapToInt(name -> name.length()) .max() .orElse(0); for (Map.Entry<String, Command> commandEntry : commands.entrySet()) { if ("help".equals(commandEntry.getKey())) { continue; } printCommand(printer, commandEntry.getKey(), commandEntry.getValue(), longestCommandName); } Command helpCmd = commands.get("help"); if (null != helpCmd) { printer.println(); printCommand(printer, "help", helpCmd, longestCommandName); } printer.println(); }
/** * Print the available commands in <tt>commander</tt>. * * @param commands commands * @param printer printer */
Print the available commands in commander
printAvailableCommands
{ "repo_name": "ivankelly/bookkeeper", "path": "tools/framework/src/main/java/org/apache/bookkeeper/tools/framework/CommandUtils.java", "license": "apache-2.0", "size": 7114 }
[ "java.io.PrintStream", "java.util.Map" ]
import java.io.PrintStream; import java.util.Map;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
52,097
private void rowToCSV(Row row) { Cell cell = null; int lastCellNum = 0; ArrayList<String> csvLine = new ArrayList<String>(); // Check to ensure that a row was recovered from the sheet as it is // possible that one or more rows between other populated rows could be // missing - blank. If the row does contain cells then... if(row != null) { // Get the index for the right most cell on the row and then // step along the row from left to right recovering the contents // of each cell, converting that into a formatted String and // then storing the String into the csvLine ArrayList. lastCellNum = row.getLastCellNum(); for(int i = 0; i <= lastCellNum; i++) { cell = row.getCell(i); if(cell == null) { csvLine.add(""); } else { if(cell.getCellType() != Cell.CELL_TYPE_FORMULA) { csvLine.add(this.formatter.formatCellValue(cell)); } else { csvLine.add(this.formatter.formatCellValue(cell, this.evaluator)); } } } // Make a note of the index number of the right most cell. This value // will later be used to ensure that the matrix of data in the CSV file // is square. if(lastCellNum > this.maxRowWidth) { this.maxRowWidth = lastCellNum; } } this.csvData.add(csvLine); }
void function(Row row) { Cell cell = null; int lastCellNum = 0; ArrayList<String> csvLine = new ArrayList<String>(); if(row != null) { lastCellNum = row.getLastCellNum(); for(int i = 0; i <= lastCellNum; i++) { cell = row.getCell(i); if(cell == null) { csvLine.add(""); } else { if(cell.getCellType() != Cell.CELL_TYPE_FORMULA) { csvLine.add(this.formatter.formatCellValue(cell)); } else { csvLine.add(this.formatter.formatCellValue(cell, this.evaluator)); } } } if(lastCellNum > this.maxRowWidth) { this.maxRowWidth = lastCellNum; } } this.csvData.add(csvLine); }
/** * Called to convert a row of cells into a line of data that can later be * output to the CSV file. * * @param row An instance of either the HSSFRow or XSSFRow classes that * encapsulates information about a row of cells recovered from * an Excel workbook. */
Called to convert a row of cells into a line of data that can later be output to the CSV file
rowToCSV
{ "repo_name": "luanx/stat", "path": "stat-util/src/main/java/com/wantdo/stat/excel/poi_src/ToCSV.java", "license": "apache-2.0", "size": 38493 }
[ "java.util.ArrayList", "org.apache.poi.ss.usermodel.Cell", "org.apache.poi.ss.usermodel.Row" ]
import java.util.ArrayList; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row;
import java.util.*; import org.apache.poi.ss.usermodel.*;
[ "java.util", "org.apache.poi" ]
java.util; org.apache.poi;
2,678,179
public static Type getReturnType(final Method method) { return getType(method.getReturnType()); }
static Type function(final Method method) { return getType(method.getReturnType()); }
/** * Returns the Java type corresponding to the return type of the given * method. * * @param method * a method. * @return the Java type corresponding to the return type of the given * method. */
Returns the Java type corresponding to the return type of the given method
getReturnType
{ "repo_name": "AlterRS/Deobfuscator", "path": "deps/alterrs/asm/Type.java", "license": "mit", "size": 22022 }
[ "java.lang.reflect.Method" ]
import java.lang.reflect.Method;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
2,189,193
@Test public void includeInitialWindowSizeInFirstSettings_largeWindowSize() throws Exception { int initialWindowSize = 75535; // 65535 + 10000 startTransport( DEFAULT_START_STREAM_ID, null, true, DEFAULT_MAX_MESSAGE_SIZE, initialWindowSize, null); clientTransport.sendConnectionPrefaceAndSettings(); ArgumentCaptor<Settings> settings = ArgumentCaptor.forClass(Settings.class); verify(frameWriter, timeout(TIME_OUT_MS)).settings(settings.capture()); assertEquals(75535, settings.getValue().get(7)); verify(frameWriter, timeout(TIME_OUT_MS)).windowUpdate(0, 10000); }
void function() throws Exception { int initialWindowSize = 75535; startTransport( DEFAULT_START_STREAM_ID, null, true, DEFAULT_MAX_MESSAGE_SIZE, initialWindowSize, null); clientTransport.sendConnectionPrefaceAndSettings(); ArgumentCaptor<Settings> settings = ArgumentCaptor.forClass(Settings.class); verify(frameWriter, timeout(TIME_OUT_MS)).settings(settings.capture()); assertEquals(75535, settings.getValue().get(7)); verify(frameWriter, timeout(TIME_OUT_MS)).windowUpdate(0, 10000); }
/** * A "large" window size is anything over 65535 (the starting size for any connection-level * flow control value). */
A "large" window size is anything over 65535 (the starting size for any connection-level flow control value)
includeInitialWindowSizeInFirstSettings_largeWindowSize
{ "repo_name": "grpc/grpc-java", "path": "okhttp/src/test/java/io/grpc/okhttp/OkHttpClientTransportTest.java", "license": "apache-2.0", "size": 96916 }
[ "io.grpc.okhttp.internal.framed.Settings", "org.junit.Assert", "org.mockito.ArgumentCaptor", "org.mockito.Mockito" ]
import io.grpc.okhttp.internal.framed.Settings; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito;
import io.grpc.okhttp.internal.framed.*; import org.junit.*; import org.mockito.*;
[ "io.grpc.okhttp", "org.junit", "org.mockito" ]
io.grpc.okhttp; org.junit; org.mockito;
1,009,560
private String getManagerEmail(User manager, IWContext iwc) { try { Email email = getUserBusiness(iwc).getUserMail(manager); if (email != null) { String sEmail = email.getEmailAddress(); if (sEmail != null && !sEmail.equals("")) { return sEmail; } } return "-"; } catch (Exception e) { e.printStackTrace(); return "-"; } }
String function(User manager, IWContext iwc) { try { Email email = getUserBusiness(iwc).getUserMail(manager); if (email != null) { String sEmail = email.getEmailAddress(); if (sEmail != null && !sEmail.equals(STR-STR-"; } }
/** * Method getManagerEmail. * * @param manager * @return String */
Method getManagerEmail
getManagerEmail
{ "repo_name": "idega/platform2", "path": "src/se/idega/idegaweb/commune/presentation/ManagerView.java", "license": "gpl-3.0", "size": 8933 }
[ "com.idega.core.contact.data.Email", "com.idega.presentation.IWContext", "com.idega.user.data.User" ]
import com.idega.core.contact.data.Email; import com.idega.presentation.IWContext; import com.idega.user.data.User;
import com.idega.core.contact.data.*; import com.idega.presentation.*; import com.idega.user.data.*;
[ "com.idega.core", "com.idega.presentation", "com.idega.user" ]
com.idega.core; com.idega.presentation; com.idega.user;
1,274,677
VirtualFile getPresentableFile(CompileContext context, Module module, VirtualFile outputRoot, VirtualFile generatedFile);
VirtualFile getPresentableFile(CompileContext context, Module module, VirtualFile outputRoot, VirtualFile generatedFile);
/** * Used by make subsystem to obtain the file that should be opened in the editor instead of generated file if there were errors found * while compiling the generated file * * * @param context current compile context * @param module the module to which the generated file was attributed * @param outputRoot the compiler output root * @param generatedFile - the file that was generated by this compiler * @return substituting file that should be used for navigation in UI or null if no such substitutor is available * */
Used by make subsystem to obtain the file that should be opened in the editor instead of generated file if there were errors found while compiling the generated file
getPresentableFile
{ "repo_name": "leafclick/intellij-community", "path": "java/compiler/openapi/src/com/intellij/openapi/compiler/SourceGeneratingCompiler.java", "license": "apache-2.0", "size": 1963 }
[ "com.intellij.openapi.module.Module", "com.intellij.openapi.vfs.VirtualFile" ]
import com.intellij.openapi.module.Module; import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.module.*; import com.intellij.openapi.vfs.*;
[ "com.intellij.openapi" ]
com.intellij.openapi;
2,331,357
synchronized List<TaskStatus> getNonRunningTasks() { List<TaskStatus> result = new ArrayList<TaskStatus>(tasks.size()); for(Map.Entry<TaskAttemptID, TaskInProgress> task: tasks.entrySet()) { if (!runningTasks.containsKey(task.getKey())) { result.add(task.getValue().getStatus()); } } return result; }
synchronized List<TaskStatus> getNonRunningTasks() { List<TaskStatus> result = new ArrayList<TaskStatus>(tasks.size()); for(Map.Entry<TaskAttemptID, TaskInProgress> task: tasks.entrySet()) { if (!runningTasks.containsKey(task.getKey())) { result.add(task.getValue().getStatus()); } } return result; }
/** * Get the list of stored tasks on this task tracker. * @return */
Get the list of stored tasks on this task tracker
getNonRunningTasks
{ "repo_name": "apache/hadoop-mapreduce", "path": "src/java/org/apache/hadoop/mapred/TaskTracker.java", "license": "apache-2.0", "size": 120136 }
[ "java.util.ArrayList", "java.util.List", "java.util.Map" ]
import java.util.ArrayList; import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,402,387
if (couplingType == ResourceRefInfo.BRANCH_COUPLING_TIGHT) return 0x8000; // value of SQLServerXAResource.SSTRANSTIGHTLYCPLD (32768) // Loose branch coupling is default for Microsoft SQL Server return XAResource.TMNOFLAGS; }
if (couplingType == ResourceRefInfo.BRANCH_COUPLING_TIGHT) return 0x8000; return XAResource.TMNOFLAGS; }
/** * Returns the XA start flag for loose or tight branch coupling * * @param couplingType branch coupling type * @return XA start flag value for the specified coupling type */
Returns the XA start flag for loose or tight branch coupling
branchCouplingSupported
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.jdbc/src/com/ibm/ws/rsadapter/impl/MicrosoftSQLServerHelper.java", "license": "epl-1.0", "size": 12692 }
[ "com.ibm.ws.resource.ResourceRefInfo", "javax.transaction.xa.XAResource" ]
import com.ibm.ws.resource.ResourceRefInfo; import javax.transaction.xa.XAResource;
import com.ibm.ws.resource.*; import javax.transaction.xa.*;
[ "com.ibm.ws", "javax.transaction" ]
com.ibm.ws; javax.transaction;
539,308
public void testToArray() { SortedLongSet set = perpare(testArray); long[] sortedArray = new long[testArray.length]; System.arraycopy(testArray, 0, sortedArray, 0, testArray.length); Arrays.sort(sortedArray); assertTrue(Arrays.equals(sortedArray, set.toArray())); SortedLongSet set0 = new SortedLongSet(); assertTrue(Arrays.equals(new long[]{}, set0.toArray())); }
void function() { SortedLongSet set = perpare(testArray); long[] sortedArray = new long[testArray.length]; System.arraycopy(testArray, 0, sortedArray, 0, testArray.length); Arrays.sort(sortedArray); assertTrue(Arrays.equals(sortedArray, set.toArray())); SortedLongSet set0 = new SortedLongSet(); assertTrue(Arrays.equals(new long[]{}, set0.toArray())); }
/** * Test method for {@link freenet.support.SortedLongSet#toArray()}. */
Test method for <code>freenet.support.SortedLongSet#toArray()</code>
testToArray
{ "repo_name": "saces/fred", "path": "test/freenet/support/SortedLongSetTest.java", "license": "gpl-2.0", "size": 4241 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
2,479,731
public void addPropertyChangeListener( PropertyChangeListener listener) { listeners.addPropertyChangeListener( listener); }
void function( PropertyChangeListener listener) { listeners.addPropertyChangeListener( listener); }
/** * Adds a listener which will be notified of changes to the document title. */
Adds a listener which will be notified of changes to the document title
addPropertyChangeListener
{ "repo_name": "tensberg/jgloss-mirror", "path": "jgloss/src/main/java/jgloss/ui/html/JGlossHTMLDoc.java", "license": "gpl-2.0", "size": 27647 }
[ "java.beans.PropertyChangeListener" ]
import java.beans.PropertyChangeListener;
import java.beans.*;
[ "java.beans" ]
java.beans;
1,383,653
public static IoFuture<StreamConnection> performUpgrade(final XnioWorker worker, InetSocketAddress bindAddress, URI uri, final Map<String, List<String>> headers, ChannelListener<? super StreamConnection> openListener, ChannelListener<? super BoundChannel> bindListener, OptionMap optionMap, ExtendedHandshakeChecker handshakeChecker) { return new HttpUpgradeState<StreamConnection>(worker, null, bindAddress, uri, headers, openListener, bindListener, optionMap, handshakeChecker).doUpgrade(); }
static IoFuture<StreamConnection> function(final XnioWorker worker, InetSocketAddress bindAddress, URI uri, final Map<String, List<String>> headers, ChannelListener<? super StreamConnection> openListener, ChannelListener<? super BoundChannel> bindListener, OptionMap optionMap, ExtendedHandshakeChecker handshakeChecker) { return new HttpUpgradeState<StreamConnection>(worker, null, bindAddress, uri, headers, openListener, bindListener, optionMap, handshakeChecker).doUpgrade(); }
/** * Connects to the target server using HTTP upgrade. * * @param worker The worker * @param bindAddress The bind address * @param uri The URI to connect to * @param headers Any additional headers to include in the upgrade request. This must include an <code>Upgrade</code> header that specifies the type of upgrade being performed * @param openListener The open listener that is invoked once the HTTP upgrade is done * @param bindListener The bind listener that is invoked when the socket is bound * @param optionMap The option map for the connection * @param handshakeChecker A handshake checker that can be supplied to verify that the server returned a valid response to the upgrade request * @return An IoFuture of the connection */
Connects to the target server using HTTP upgrade
performUpgrade
{ "repo_name": "xnio/xnio", "path": "api/src/main/java/org/xnio/http/HttpUpgrade.java", "license": "apache-2.0", "size": 25849 }
[ "java.net.InetSocketAddress", "java.util.List", "java.util.Map", "org.xnio.ChannelListener", "org.xnio.IoFuture", "org.xnio.OptionMap", "org.xnio.StreamConnection", "org.xnio.XnioWorker", "org.xnio.channels.BoundChannel" ]
import java.net.InetSocketAddress; import java.util.List; import java.util.Map; import org.xnio.ChannelListener; import org.xnio.IoFuture; import org.xnio.OptionMap; import org.xnio.StreamConnection; import org.xnio.XnioWorker; import org.xnio.channels.BoundChannel;
import java.net.*; import java.util.*; import org.xnio.*; import org.xnio.channels.*;
[ "java.net", "java.util", "org.xnio", "org.xnio.channels" ]
java.net; java.util; org.xnio; org.xnio.channels;
1,346,959
public static Object call1(Callable fun, Scriptable thisObj, Object arg0, Context cx, Scriptable scope) { return fun.call(cx, scope, thisObj, new Object[] { arg0 } ); }
static Object function(Callable fun, Scriptable thisObj, Object arg0, Context cx, Scriptable scope) { return fun.call(cx, scope, thisObj, new Object[] { arg0 } ); }
/** * Implement ....(arg) call shrinking optimizer code. * * @param fun a {@link org.mozilla.javascript.Callable} object. * @param thisObj a {@link org.mozilla.javascript.Scriptable} object. * @param arg0 a {@link java.lang.Object} object. * @param cx a {@link org.mozilla.javascript.Context} object. * @param scope a {@link org.mozilla.javascript.Scriptable} object. * @return a {@link java.lang.Object} object. */
Implement ....(arg) call shrinking optimizer code
call1
{ "repo_name": "oswetto/LoboEvolution", "path": "LoboParser/src/main/java/org/mozilla/javascript/optimizer/OptRuntime.java", "license": "gpl-3.0", "size": 18041 }
[ "org.mozilla.javascript.Callable", "org.mozilla.javascript.Context", "org.mozilla.javascript.Scriptable" ]
import org.mozilla.javascript.Callable; import org.mozilla.javascript.Context; import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.*;
[ "org.mozilla.javascript" ]
org.mozilla.javascript;
1,284,369
public GenericSearchBuilder<T, K> op(String name, Object field, Op op) { return left(field, op, name); }
GenericSearchBuilder<T, K> function(String name, Object field, Op op) { return left(field, op, name); }
/** * Adds an condition that starts with open parenthesis. Use cp() to close * the parenthesis. * * @param name parameter name used to set the parameter value later. * @param field field of the entity object * @param op operator * @return this */
Adds an condition that starts with open parenthesis. Use cp() to close the parenthesis
op
{ "repo_name": "GabrielBrascher/cloudstack", "path": "framework/db/src/main/java/com/cloud/utils/db/GenericSearchBuilder.java", "license": "apache-2.0", "size": 9358 }
[ "com.cloud.utils.db.SearchCriteria" ]
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.*;
[ "com.cloud.utils" ]
com.cloud.utils;
497,635
public String getItemMetaDataByLabel(String label) { if (this.itemMetaDataSet != null) { for (Iterator i = this.itemMetaDataSet.iterator(); i.hasNext(); ) { ItemMetaDataIfc itemMetaData = (ItemMetaDataIfc) i.next(); if (itemMetaData.getLabel().equals(label)) { return itemMetaData.getEntry(); } } } return null; }
String function(String label) { if (this.itemMetaDataSet != null) { for (Iterator i = this.itemMetaDataSet.iterator(); i.hasNext(); ) { ItemMetaDataIfc itemMetaData = (ItemMetaDataIfc) i.next(); if (itemMetaData.getLabel().equals(label)) { return itemMetaData.getEntry(); } } } return null; }
/** * Get meta data by label * @param label * @return */
Get meta data by label
getItemMetaDataByLabel
{ "repo_name": "eemirtekin/Sakai-10.6-TR", "path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/ItemFacade.java", "license": "apache-2.0", "size": 32871 }
[ "java.util.Iterator", "org.sakaiproject.tool.assessment.data.ifc.assessment.ItemMetaDataIfc" ]
import java.util.Iterator; import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemMetaDataIfc;
import java.util.*; import org.sakaiproject.tool.assessment.data.ifc.assessment.*;
[ "java.util", "org.sakaiproject.tool" ]
java.util; org.sakaiproject.tool;
827,520
public synchronized String modifyScenario(final LGWorkloadConfig lrConfig) throws IOException { this.lrConfig = lrConfig; String newScenarioPath = null; if (lrConfig.getScenarioPath().endsWith(".lrs")) { newScenarioPath = lrConfig.getScenarioPath().replace(SCENARIO_FILE_SUFFIX, "_new" + SCENARIO_FILE_SUFFIX); } else { throw new IllegalArgumentException("Workload error. Scenario path must end with" + SCENARIO_FILE_SUFFIX + " !"); } // Read scenario file and save modified version in buffer String scriptName = null; StringBuilder writeBuffer = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader( new FileInputStream(lrConfig.getScenarioPath()), "UTF8")); String line = null; LinkedList<String> currentTags = new LinkedList<String>(); int numGroupUsers = lrConfig.getNumUsers(); int allUsersIndex = 0; while ((line = reader.readLine()) != null) { updateTagChange(currentTags, line); if (line.trim().startsWith(SCENARIO_PATH_KEY) && !currentTags.isEmpty() && currentTags.peek().equals(SCENARIO_PRIVATE_CONFIG_TAG)) { writeBuffer.append(SCENARIO_PATH_KEY); writeBuffer.append(newScenarioPath); } else if (line.trim().startsWith(V_USERS_KEY) && !currentTags.isEmpty() && currentTags.peek().equals(SCENARIO_PRIVATE_CONFIG_TAG)) { writeBuffer.append(V_USERS_KEY); allUsersIndex = writeBuffer.length(); } else if (currentTags.size() == 2 && currentTags.indexOf(TEST_CHIEF_TAG) == 1 && scriptName == null) { writeBuffer.append(line); scriptName = currentTags.peek(); } else if (!currentTags.isEmpty() && currentTags.peek().equals(GROUP_CHIEF_TAG)) { writeBuffer.append(line); writeBuffer.append(NEWLINE); generateWorkloadGroups(currentTags, reader, writeBuffer); writeBuffer.append(SCENARIO_TAG_CLOSE); } else if (!currentTags.isEmpty() && currentTags.peek().equals(SCHEDULER_CONFIG_TAG) && line.contains(SCHEDULING_PATTERN)) { writeBuffer.append(line); writeBuffer.append(NEWLINE); modifySchedule(currentTags, reader, writeBuffer, lrConfig.getNumUsers()); } else if (!currentTags.isEmpty() && currentTags.peek().equals(SCHEDULER_CONFIG_TAG) && line.contains(SCHEDULING_GROUPNAME_PATTERN)) { // skip old configuration while (line != null && !line.contains(SCHEDULING_GROUPNAME_PATTERN_END)) { line = reader.readLine(); } writeBuffer.append(SCHEDULING_GROUPNAME_PATTERN); writeBuffer.append(groupNames.getFirst().toLowerCase()); writeBuffer.append(SCHEDULING_GROUPNAME_PATTERN_END); numGroupUsers = groupUserNums.get(groupNames.pop()); } else { writeBuffer.append(line); } writeBuffer.append(NEWLINE); } reader.close(); // Insert number of vusers (cannot be appended when passing the // vusers-key, since the number may be affected when counting the // groups) writeBuffer.insert(allUsersIndex, lrConfig.getNumUsers()); // Write buffer to file BufferedWriter writer = new BufferedWriter(new FileWriter(newScenarioPath, false)); writer.write(writeBuffer.toString()); writer.flush(); writer.close(); return newScenarioPath; }
synchronized String function(final LGWorkloadConfig lrConfig) throws IOException { this.lrConfig = lrConfig; String newScenarioPath = null; if (lrConfig.getScenarioPath().endsWith(".lrs")) { newScenarioPath = lrConfig.getScenarioPath().replace(SCENARIO_FILE_SUFFIX, "_new" + SCENARIO_FILE_SUFFIX); } else { throw new IllegalArgumentException(STR + SCENARIO_FILE_SUFFIX + STR); } String scriptName = null; StringBuilder writeBuffer = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader( new FileInputStream(lrConfig.getScenarioPath()), "UTF8")); String line = null; LinkedList<String> currentTags = new LinkedList<String>(); int numGroupUsers = lrConfig.getNumUsers(); int allUsersIndex = 0; while ((line = reader.readLine()) != null) { updateTagChange(currentTags, line); if (line.trim().startsWith(SCENARIO_PATH_KEY) && !currentTags.isEmpty() && currentTags.peek().equals(SCENARIO_PRIVATE_CONFIG_TAG)) { writeBuffer.append(SCENARIO_PATH_KEY); writeBuffer.append(newScenarioPath); } else if (line.trim().startsWith(V_USERS_KEY) && !currentTags.isEmpty() && currentTags.peek().equals(SCENARIO_PRIVATE_CONFIG_TAG)) { writeBuffer.append(V_USERS_KEY); allUsersIndex = writeBuffer.length(); } else if (currentTags.size() == 2 && currentTags.indexOf(TEST_CHIEF_TAG) == 1 && scriptName == null) { writeBuffer.append(line); scriptName = currentTags.peek(); } else if (!currentTags.isEmpty() && currentTags.peek().equals(GROUP_CHIEF_TAG)) { writeBuffer.append(line); writeBuffer.append(NEWLINE); generateWorkloadGroups(currentTags, reader, writeBuffer); writeBuffer.append(SCENARIO_TAG_CLOSE); } else if (!currentTags.isEmpty() && currentTags.peek().equals(SCHEDULER_CONFIG_TAG) && line.contains(SCHEDULING_PATTERN)) { writeBuffer.append(line); writeBuffer.append(NEWLINE); modifySchedule(currentTags, reader, writeBuffer, lrConfig.getNumUsers()); } else if (!currentTags.isEmpty() && currentTags.peek().equals(SCHEDULER_CONFIG_TAG) && line.contains(SCHEDULING_GROUPNAME_PATTERN)) { while (line != null && !line.contains(SCHEDULING_GROUPNAME_PATTERN_END)) { line = reader.readLine(); } writeBuffer.append(SCHEDULING_GROUPNAME_PATTERN); writeBuffer.append(groupNames.getFirst().toLowerCase()); writeBuffer.append(SCHEDULING_GROUPNAME_PATTERN_END); numGroupUsers = groupUserNums.get(groupNames.pop()); } else { writeBuffer.append(line); } writeBuffer.append(NEWLINE); } reader.close(); writeBuffer.insert(allUsersIndex, lrConfig.getNumUsers()); BufferedWriter writer = new BufferedWriter(new FileWriter(newScenarioPath, false)); writer.write(writeBuffer.toString()); writer.flush(); writer.close(); return newScenarioPath; }
/** * Modifies the scenario file according to the passed workload * configuration. * * @param lrConfig * describes the modification on the scenario * @return the path to the new scenario file * @throws IOException * if scenario modification fails */
Modifies the scenario file according to the passed workload configuration
modifyScenario
{ "repo_name": "sopeco/LPE-Common", "path": "org.lpe.common.loadgenerator/src/org/lpe/common/loadgenerator/scenario/ScenarioModifier.java", "license": "apache-2.0", "size": 18030 }
[ "java.io.BufferedReader", "java.io.BufferedWriter", "java.io.FileInputStream", "java.io.FileWriter", "java.io.IOException", "java.io.InputStreamReader", "java.util.LinkedList", "org.lpe.common.loadgenerator.config.LGWorkloadConfig" ]
import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.LinkedList; import org.lpe.common.loadgenerator.config.LGWorkloadConfig;
import java.io.*; import java.util.*; import org.lpe.common.loadgenerator.config.*;
[ "java.io", "java.util", "org.lpe.common" ]
java.io; java.util; org.lpe.common;
1,400,316
@Override public ThreadContext.ContextStack getContextStack() { return event.getContextStack(); }
ThreadContext.ContextStack function() { return event.getContextStack(); }
/** * Returns a copy of the context stack. * @return a copy of the context stack. */
Returns a copy of the context stack
getContextStack
{ "repo_name": "xnslong/logging-log4j2", "path": "log4j-flume-ng/src/main/java/org/apache/logging/log4j/flume/appender/FlumeEvent.java", "license": "apache-2.0", "size": 11916 }
[ "org.apache.logging.log4j.ThreadContext" ]
import org.apache.logging.log4j.ThreadContext;
import org.apache.logging.log4j.*;
[ "org.apache.logging" ]
org.apache.logging;
913,056
public IJavaElement getJavaElement() { return this.typeRoot; }
IJavaElement function() { return this.typeRoot; }
/** * The Java element (an <code>org.eclipse.jdt.core.ICompilationUnit</code> or an <code>org.eclipse.jdt.core.IClassFile</code>) * this compilation unit was created from, or <code>null</code> if it was not created from a Java element. * * @return the Java element this compilation unit was created from, or <code>null</code> if none * @since 3.1 * @see #getTypeRoot() */
The Java element (an <code>org.eclipse.jdt.core.ICompilationUnit</code> or an <code>org.eclipse.jdt.core.IClassFile</code>) this compilation unit was created from, or <code>null</code> if it was not created from a Java element
getJavaElement
{ "repo_name": "Niky4000/UsefulUtils", "path": "projects/others/eclipse-platform-parent/eclipse.jdt.core-master/org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/CompilationUnit.java", "license": "gpl-3.0", "size": 41878 }
[ "org.eclipse.jdt.core.IJavaElement" ]
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.*;
[ "org.eclipse.jdt" ]
org.eclipse.jdt;
1,649,650
public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); LifeExpectancyProcessor.printDocumentation(); LifeExpectancyProcessor processor = new LifeExpectancyProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalResults(); }
static void function(String[] args) throws IOException { ExampleHelpers.configureLogging(); LifeExpectancyProcessor.printDocumentation(); LifeExpectancyProcessor processor = new LifeExpectancyProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalResults(); }
/** * Main method. Processes the whole dump using this processor and writes the * results to a file. To change which dump file to use and whether to run in * offline mode, modify the settings in {@link ExampleHelpers}. * * @param args * @throws IOException */
Main method. Processes the whole dump using this processor and writes the results to a file. To change which dump file to use and whether to run in offline mode, modify the settings in <code>ExampleHelpers</code>
main
{ "repo_name": "notconfusing/Wikidata-Toolkit", "path": "wdtk-examples/src/main/java/org/wikidata/wdtk/examples/LifeExpectancyProcessor.java", "license": "apache-2.0", "size": 5620 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
245,955
protected void onEldestEntryRemove(Map.Entry<K, V> eldest) { }
void function(Map.Entry<K, V> eldest) { }
/** * Invoked when eldest entry is about to be removed. * * @param eldest eldest entry. */
Invoked when eldest entry is about to be removed
onEldestEntryRemove
{ "repo_name": "scriptella/scriptella-etl", "path": "core/src/java/scriptella/util/LRUMap.java", "license": "apache-2.0", "size": 1584 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,264,635
@RequestMapping(method = RequestMethod.POST, value = "/{functionId:.+}", produces = {APPLICATION_JSON_UTF8_VALUE}) @ApiOperation(value = "execute function", notes = "Execute function with arguments on regions, members, or group(s). By default function will be executed on all nodes if none of (onRegion, onMembers, onGroups) specified") @ApiResponses({@ApiResponse(code = 200, message = "OK."), @ApiResponse(code = 401, message = "Invalid Username or Password."), @ApiResponse(code = 403, message = "Insufficient privileges for operation."), @ApiResponse(code = 500, message = "if GemFire throws an error or exception"), @ApiResponse(code = 400, message = "if Function arguments specified as JSON document in the request body is invalid")}) @ResponseBody @ResponseStatus(HttpStatus.OK) public ResponseEntity<String> execute(@PathVariable("functionId") String functionId, @RequestParam(value = "onRegion", required = false) String region, @RequestParam(value = "onMembers", required = false) final String[] members, @RequestParam(value = "onGroups", required = false) final String[] groups, @RequestParam(value = "filter", required = false) final String[] filter, @RequestBody(required = false) final String argsInBody) { Function<?> function = FunctionService.getFunction(functionId); // this exception will be handled by BaseControllerAdvice to eventually return a 404 if (function == null) { throw new EntityNotFoundException( String.format("The function %s is not registered.", functionId)); } Object[] args = null; if (argsInBody != null) { args = jsonToObjectArray(argsInBody); } // check for required permissions of the function Collection<ResourcePermission> requiredPermissions = function.getRequiredPermissions(region, args); for (ResourcePermission requiredPermission : requiredPermissions) { securityService.authorize(requiredPermission); } Execution<Object, ?, ?> execution; functionId = decode(functionId); if (StringUtils.hasText(region)) { execution = executeOnRegion(functionId, region, argsInBody); } else if (ArrayUtils.isNotEmpty(members)) { execution = executeOnMembers(functionId, members, argsInBody); } else if (ArrayUtils.isNotEmpty(groups)) { execution = executeOnGroups(functionId, groups, argsInBody); } else { execution = executeOnAllMembers(functionId, argsInBody); } if (!ArrayUtils.isEmpty(filter)) { logger.debug("Executing Function ({}) with filter ({})", functionId, ArrayUtils.toString(filter)); Set<String> filter1 = ArrayUtils.asSet(filter); execution = execution.withFilter(filter1); } final ResultCollector<?, ?> results; try { if (args != null) { // execute function with specified arguments if (args.length == 1) { results = execution.setArguments(args[0]).execute(functionId); } else { results = execution.setArguments(args).execute(functionId); } } else { // execute function with no args results = execution.execute(functionId); } } catch (ClassCastException cce) { throw new GemfireRestException("Key is of an inappropriate type for this region!", cce); } catch (NullPointerException npe) { throw new GemfireRestException( "Specified key is null and this region does not permit null keys!", npe); } catch (LowMemoryException lme) { throw new GemfireRestException("Server has encountered low memory condition!", lme); } catch (IllegalArgumentException ie) { throw new GemfireRestException("Input parameter is null! ", ie); } catch (FunctionException fe) { throw new GemfireRestException("Server has encountered error while executing the function!", fe); } try { final HttpHeaders headers = new HttpHeaders(); headers.setLocation(toUri("functions", functionId)); Object functionResult; if (results instanceof NoResult) { return new ResponseEntity<>("", headers, HttpStatus.OK); } functionResult = results.getResult(); if (functionResult instanceof List<?>) { @SuppressWarnings("unchecked") String functionResultAsJson = JSONUtils.convertCollectionToJson((ArrayList<Object>) functionResult); return new ResponseEntity<>(functionResultAsJson, headers, HttpStatus.OK); } else { throw new GemfireRestException( "Function has returned results that could not be converted into Restful (JSON) format!"); } } catch (FunctionException fe) { fe.printStackTrace(); throw new GemfireRestException( "Server has encountered an error while processing function execution!", fe); } }
@RequestMapping(method = RequestMethod.POST, value = STR, produces = {APPLICATION_JSON_UTF8_VALUE}) @ApiOperation(value = STR, notes = STR) @ApiResponses({@ApiResponse(code = 200, message = "OK."), @ApiResponse(code = 401, message = STR), @ApiResponse(code = 403, message = STR), @ApiResponse(code = 500, message = STR), @ApiResponse(code = 400, message = STR)}) @ResponseStatus(HttpStatus.OK) ResponseEntity<String> function(@PathVariable(STR) String functionId, @RequestParam(value = STR, required = false) String region, @RequestParam(value = STR, required = false) final String[] members, @RequestParam(value = STR, required = false) final String[] groups, @RequestParam(value = STR, required = false) final String[] filter, @RequestBody(required = false) final String argsInBody) { Function<?> function = FunctionService.getFunction(functionId); if (function == null) { throw new EntityNotFoundException( String.format(STR, functionId)); } Object[] args = null; if (argsInBody != null) { args = jsonToObjectArray(argsInBody); } Collection<ResourcePermission> requiredPermissions = function.getRequiredPermissions(region, args); for (ResourcePermission requiredPermission : requiredPermissions) { securityService.authorize(requiredPermission); } Execution<Object, ?, ?> execution; functionId = decode(functionId); if (StringUtils.hasText(region)) { execution = executeOnRegion(functionId, region, argsInBody); } else if (ArrayUtils.isNotEmpty(members)) { execution = executeOnMembers(functionId, members, argsInBody); } else if (ArrayUtils.isNotEmpty(groups)) { execution = executeOnGroups(functionId, groups, argsInBody); } else { execution = executeOnAllMembers(functionId, argsInBody); } if (!ArrayUtils.isEmpty(filter)) { logger.debug(STR, functionId, ArrayUtils.toString(filter)); Set<String> filter1 = ArrayUtils.asSet(filter); execution = execution.withFilter(filter1); } final ResultCollector<?, ?> results; try { if (args != null) { if (args.length == 1) { results = execution.setArguments(args[0]).execute(functionId); } else { results = execution.setArguments(args).execute(functionId); } } else { results = execution.execute(functionId); } } catch (ClassCastException cce) { throw new GemfireRestException(STR, cce); } catch (NullPointerException npe) { throw new GemfireRestException( STR, npe); } catch (LowMemoryException lme) { throw new GemfireRestException(STR, lme); } catch (IllegalArgumentException ie) { throw new GemfireRestException(STR, ie); } catch (FunctionException fe) { throw new GemfireRestException(STR, fe); } try { final HttpHeaders headers = new HttpHeaders(); headers.setLocation(toUri(STR, functionId)); Object functionResult; if (results instanceof NoResult) { return new ResponseEntity<>(STRuncheckedSTRFunction has returned results that could not be converted into Restful (JSON) format!STRServer has encountered an error while processing function execution!", fe); } }
/** * Execute a function on Gemfire data node using REST API call. Arguments to the function are * passed as JSON string in the request body. * * @param functionId represents function to be executed * @param region list of regions on which function to be executed. * @param members list of nodes on which function to be executed. * @param groups list of groups on which function to be executed. * @param filter list of keys which the function will use to determine on which node to execute * the function. * @param argsInBody function argument as a JSON document * @return result as a JSON document */
Execute a function on Gemfire data node using REST API call. Arguments to the function are passed as JSON string in the request body
execute
{ "repo_name": "masaki-yamakawa/geode", "path": "geode-web-api/src/main/java/org/apache/geode/rest/internal/web/controllers/FunctionAccessController.java", "license": "apache-2.0", "size": 12867 }
[ "io.swagger.annotations.ApiOperation", "io.swagger.annotations.ApiResponse", "io.swagger.annotations.ApiResponses", "java.util.Collection", "java.util.Set", "org.apache.geode.cache.LowMemoryException", "org.apache.geode.cache.execute.Execution", "org.apache.geode.cache.execute.Function", "org.apache.geode.cache.execute.FunctionException", "org.apache.geode.cache.execute.FunctionService", "org.apache.geode.cache.execute.ResultCollector", "org.apache.geode.internal.cache.execute.NoResult", "org.apache.geode.management.internal.exceptions.EntityNotFoundException", "org.apache.geode.rest.internal.web.exception.GemfireRestException", "org.apache.geode.rest.internal.web.util.ArrayUtils", "org.apache.geode.security.ResourcePermission", "org.springframework.http.HttpHeaders", "org.springframework.http.HttpStatus", "org.springframework.http.ResponseEntity", "org.springframework.util.StringUtils", "org.springframework.web.bind.annotation.PathVariable", "org.springframework.web.bind.annotation.RequestBody", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestMethod", "org.springframework.web.bind.annotation.RequestParam", "org.springframework.web.bind.annotation.ResponseStatus" ]
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import java.util.Collection; import java.util.Set; import org.apache.geode.cache.LowMemoryException; import org.apache.geode.cache.execute.Execution; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.internal.cache.execute.NoResult; import org.apache.geode.management.internal.exceptions.EntityNotFoundException; import org.apache.geode.rest.internal.web.exception.GemfireRestException; import org.apache.geode.rest.internal.web.util.ArrayUtils; import org.apache.geode.security.ResourcePermission; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus;
import io.swagger.annotations.*; import java.util.*; import org.apache.geode.cache.*; import org.apache.geode.cache.execute.*; import org.apache.geode.internal.cache.execute.*; import org.apache.geode.management.internal.exceptions.*; import org.apache.geode.rest.internal.web.exception.*; import org.apache.geode.rest.internal.web.util.*; import org.apache.geode.security.*; import org.springframework.http.*; import org.springframework.util.*; import org.springframework.web.bind.annotation.*;
[ "io.swagger.annotations", "java.util", "org.apache.geode", "org.springframework.http", "org.springframework.util", "org.springframework.web" ]
io.swagger.annotations; java.util; org.apache.geode; org.springframework.http; org.springframework.util; org.springframework.web;
134,804
public static MoodleGroup createGroup(MoodleGroup group) throws MoodleRestGroupException, UnsupportedEncodingException, MoodleRestException { MoodleGroup[] a=new MoodleGroup[1]; a[0]=group; MoodleGroup[] gps=createGroups(a); return gps[0]; }
static MoodleGroup function(MoodleGroup group) throws MoodleRestGroupException, UnsupportedEncodingException, MoodleRestException { MoodleGroup[] a=new MoodleGroup[1]; a[0]=group; MoodleGroup[] gps=createGroups(a); return gps[0]; }
/** * <p>Method to create a new group in a Moodle course.</p> * * @param group MoodleGroup * @return group MoodleGroup object * @throws MoodleRestGroupException * @throws UnsupportedEncodingException * @throws MoodleRestException */
Method to create a new group in a Moodle course
createGroup
{ "repo_name": "bantonia/MoodleRest", "path": "src/net/beaconhillcott/moodlerest/MoodleRestGroup.java", "license": "gpl-2.0", "size": 65345 }
[ "java.io.UnsupportedEncodingException" ]
import java.io.UnsupportedEncodingException;
import java.io.*;
[ "java.io" ]
java.io;
2,315,587
@Override public JClusters rename(Name name) { return new JClusters(name, null); } // ------------------------------------------------------------------------- // Row5 type methods // -------------------------------------------------------------------------
JClusters function(Name name) { return new JClusters(name, null); }
/** * Rename this table */
Rename this table
rename
{ "repo_name": "reportportal/commons-dao", "path": "src/main/java/com/epam/ta/reportportal/jooq/tables/JClusters.java", "license": "apache-2.0", "size": 5114 }
[ "org.jooq.Name" ]
import org.jooq.Name;
import org.jooq.*;
[ "org.jooq" ]
org.jooq;
2,675,743
byte[] getDigest() { return Hashing.crc32().hashBytes(payload).asBytes(); }
byte[] getDigest() { return Hashing.crc32().hashBytes(payload).asBytes(); }
/** * crc32 on payload */
crc32 on payload
getDigest
{ "repo_name": "hdsdi3g/MyDMAM", "path": "app/hd3gtv/mydmam/embddb/store/StoreItem.java", "license": "lgpl-3.0", "size": 5809 }
[ "com.google.common.hash.Hashing" ]
import com.google.common.hash.Hashing;
import com.google.common.hash.*;
[ "com.google.common" ]
com.google.common;
2,233,545
public void clickButtonFireIntentService(View view) { Intent serviceRequest = new Intent(this, MyIntentService.class); serviceRequest.setAction(MyIntentService.ACTION_NO1); serviceRequest.putExtra(MyIntentService.EXTRA_PARAM1, "Paul on Thread #" + Thread.currentThread().getId()); startService(serviceRequest); }
void function(View view) { Intent serviceRequest = new Intent(this, MyIntentService.class); serviceRequest.setAction(MyIntentService.ACTION_NO1); serviceRequest.putExtra(MyIntentService.EXTRA_PARAM1, STR + Thread.currentThread().getId()); startService(serviceRequest); }
/** * onClick listener for button, defined in activity_main.xml */
onClick listener for button, defined in activity_main.xml
clickButtonFireIntentService
{ "repo_name": "matboehmer/moco2016-ExamplesAndDemos", "path": "02-BackgroundOperationsApp/src/main/java/io/moxd/moco2016/backgroundoperations/MainActivity.java", "license": "mit", "size": 5835 }
[ "android.content.Intent", "android.view.View" ]
import android.content.Intent; import android.view.View;
import android.content.*; import android.view.*;
[ "android.content", "android.view" ]
android.content; android.view;
2,441,186
public int getTrackSupport(int rendererIndex, int groupIndex, int trackIndex) { return rendererFormatSupports[rendererIndex][groupIndex][trackIndex] & RendererCapabilities.FORMAT_SUPPORT_MASK; }
int function(int rendererIndex, int groupIndex, int trackIndex) { return rendererFormatSupports[rendererIndex][groupIndex][trackIndex] & RendererCapabilities.FORMAT_SUPPORT_MASK; }
/** * Returns the extent to which an individual track is supported by the renderer. * * @param rendererIndex The renderer index. * @param groupIndex The index of the track group to which the track belongs. * @param trackIndex The index of the track within the track group. * @return One of {@link RendererCapabilities#FORMAT_HANDLED}, {@link * RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES}, {@link * RendererCapabilities#FORMAT_UNSUPPORTED_DRM}, {@link * RendererCapabilities#FORMAT_UNSUPPORTED_SUBTYPE} and {@link * RendererCapabilities#FORMAT_UNSUPPORTED_TYPE}. */
Returns the extent to which an individual track is supported by the renderer
getTrackSupport
{ "repo_name": "MaTriXy/ExoPlayer", "path": "library/core/src/main/java/com/google/android/exoplayer2/trackselection/MappingTrackSelector.java", "license": "apache-2.0", "size": 23182 }
[ "com.google.android.exoplayer2.RendererCapabilities" ]
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.*;
[ "com.google.android" ]
com.google.android;
2,796,169
@Override protected synchronized void stopInternal() throws LifecycleException { setState(LifecycleState.STOPPING); // Stop the Valves in our pipeline (including the basic), if any Valve current = first; if (current == null) { current = basic; } while (current != null) { if (current instanceof Lifecycle) ((Lifecycle) current).stop(); current = current.getNext(); } }
synchronized void function() throws LifecycleException { setState(LifecycleState.STOPPING); Valve current = first; if (current == null) { current = basic; } while (current != null) { if (current instanceof Lifecycle) ((Lifecycle) current).stop(); current = current.getNext(); } }
/** * Stop {@link Valve}s) in this pipeline and implement the requirements * of {@link LifecycleBase#stopInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */
Stop <code>Valve</code>s) in this pipeline and implement the requirements of <code>LifecycleBase#stopInternal()</code>
stopInternal
{ "repo_name": "pistolove/sourcecode4junit", "path": "Source4Tomcat/src/org/apache/catalina/core/StandardPipeline.java", "license": "apache-2.0", "size": 14412 }
[ "org.apache.catalina.Lifecycle", "org.apache.catalina.LifecycleException", "org.apache.catalina.LifecycleState", "org.apache.catalina.Valve" ]
import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Valve;
import org.apache.catalina.*;
[ "org.apache.catalina" ]
org.apache.catalina;
2,077,044
protected Schema getFixedSchema(Object fixed) { return ((GenericContainer)fixed).getSchema(); }
Schema function(Object fixed) { return ((GenericContainer)fixed).getSchema(); }
/** Called to obtain the schema of a fixed. By default calls * {GenericContainer#getSchema(). May be overridden for alternate fixed * representations. */
Called to obtain the schema of a fixed. By default calls {GenericContainer#getSchema(). May be overridden for alternate fixed
getFixedSchema
{ "repo_name": "relateiq/avro", "path": "lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java", "license": "apache-2.0", "size": 40400 }
[ "org.apache.avro.Schema" ]
import org.apache.avro.Schema;
import org.apache.avro.*;
[ "org.apache.avro" ]
org.apache.avro;
349,465
public boolean sendMuteCommand(Command command, int zone) throws CommandTypeNotSupportedException;
boolean function(Command command, int zone) throws CommandTypeNotSupportedException;
/** * Send a mute command to the AVR based on the openHAB command * * @param command * @param zone * @return */
Send a mute command to the AVR based on the openHAB command
sendMuteCommand
{ "repo_name": "sebmarchand/openhab2-addons", "path": "addons/binding/org.openhab.binding.pioneeravr/src/main/java/org/openhab/binding/pioneeravr/protocol/AvrConnection.java", "license": "epl-1.0", "size": 3204 }
[ "org.eclipse.smarthome.core.types.Command" ]
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.*;
[ "org.eclipse.smarthome" ]
org.eclipse.smarthome;
10,602
public void getIamPolicy( com.google.iam.v1.GetIamPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v1.Policy> responseObserver) { asyncUnimplementedUnaryCall(getGetIamPolicyMethodHelper(), responseObserver); }
void function( com.google.iam.v1.GetIamPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v1.Policy> responseObserver) { asyncUnimplementedUnaryCall(getGetIamPolicyMethodHelper(), responseObserver); }
/** * * * <pre> * Gets the access control policy for a * [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty policy if the * resource exists and does not have a policy set. * Authorization requires the following * [Google IAM](https://cloud.google.com/iam) permission on the specified * resource parent: * * `cloudtasks.queues.getIamPolicy` * </pre> */
<code> Gets the access control policy for a [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty policy if the resource exists and does not have a policy set. Authorization requires the following [Google IAM](HREF) permission on the specified resource parent: `cloudtasks.queues.getIamPolicy` </code>
getIamPolicy
{ "repo_name": "vam-google/google-cloud-java", "path": "google-api-grpc/grpc-google-cloud-tasks-v2beta2/src/main/java/com/google/cloud/tasks/v2beta2/CloudTasksGrpc.java", "license": "apache-2.0", "size": 139467 }
[ "io.grpc.stub.ServerCalls" ]
import io.grpc.stub.ServerCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
896,287
JSONArray getPlatformPluginsForUpdate(VersionNumber from, VersionNumber to) { Jenkins jenkins = Jenkins.get(); JSONArray pluginCategories = JSONArray.fromObject(getPlatformPluginList().toString()); for (Iterator<?> categoryIterator = pluginCategories.iterator(); categoryIterator.hasNext();) { Object category = categoryIterator.next(); if (category instanceof JSONObject) { JSONObject cat = (JSONObject)category; JSONArray plugins = cat.getJSONArray("plugins"); nextPlugin: for (Iterator<?> pluginIterator = plugins.iterator(); pluginIterator.hasNext();) { Object pluginData = pluginIterator.next(); if (pluginData instanceof JSONObject) { JSONObject plugin = (JSONObject)pluginData; if (plugin.has("added")) { String sinceVersion = plugin.getString("added"); if (sinceVersion != null) { VersionNumber v = new VersionNumber(sinceVersion); if(v.compareTo(to) <= 0 && v.compareTo(from) > 0) { // This plugin is valid, we'll leave "suggested" state // to match the experience during install // but only add it if it's currently uninstalled String pluginName = plugin.getString("name"); if (null == jenkins.getPluginManager().getPlugin(pluginName)) { // Also check that a compatible version exists in an update site boolean foundCompatibleVersion = false; for (UpdateSite site : jenkins.getUpdateCenter().getSiteList()) { UpdateSite.Plugin sitePlug = site.getPlugin(pluginName); if (sitePlug != null && !sitePlug.isForNewerHudson() && !sitePlug.isForNewerJava() && !sitePlug.isNeededDependenciesForNewerJenkins()) { foundCompatibleVersion = true; break; } } if (foundCompatibleVersion) { continue nextPlugin; } } } } } } pluginIterator.remove(); } if (plugins.isEmpty()) { categoryIterator.remove(); } } } return pluginCategories; }
JSONArray getPlatformPluginsForUpdate(VersionNumber from, VersionNumber to) { Jenkins jenkins = Jenkins.get(); JSONArray pluginCategories = JSONArray.fromObject(getPlatformPluginList().toString()); for (Iterator<?> categoryIterator = pluginCategories.iterator(); categoryIterator.hasNext();) { Object category = categoryIterator.next(); if (category instanceof JSONObject) { JSONObject cat = (JSONObject)category; JSONArray plugins = cat.getJSONArray(STR); nextPlugin: for (Iterator<?> pluginIterator = plugins.iterator(); pluginIterator.hasNext();) { Object pluginData = pluginIterator.next(); if (pluginData instanceof JSONObject) { JSONObject plugin = (JSONObject)pluginData; if (plugin.has("added")) { String sinceVersion = plugin.getString("added"); if (sinceVersion != null) { VersionNumber v = new VersionNumber(sinceVersion); if(v.compareTo(to) <= 0 && v.compareTo(from) > 0) { String pluginName = plugin.getString("name"); if (null == jenkins.getPluginManager().getPlugin(pluginName)) { boolean foundCompatibleVersion = false; for (UpdateSite site : jenkins.getUpdateCenter().getSiteList()) { UpdateSite.Plugin sitePlug = site.getPlugin(pluginName); if (sitePlug != null && !sitePlug.isForNewerHudson() && !sitePlug.isForNewerJava() && !sitePlug.isNeededDependenciesForNewerJenkins()) { foundCompatibleVersion = true; break; } } if (foundCompatibleVersion) { continue nextPlugin; } } } } } } pluginIterator.remove(); } if (plugins.isEmpty()) { categoryIterator.remove(); } } } return pluginCategories; }
/** * Get the platform plugins added in the version range */
Get the platform plugins added in the version range
getPlatformPluginsForUpdate
{ "repo_name": "damianszczepanik/jenkins", "path": "core/src/main/java/jenkins/install/SetupWizard.java", "license": "mit", "size": 33878 }
[ "hudson.model.UpdateSite", "hudson.util.VersionNumber", "java.util.Iterator", "net.sf.json.JSONArray", "net.sf.json.JSONObject" ]
import hudson.model.UpdateSite; import hudson.util.VersionNumber; import java.util.Iterator; import net.sf.json.JSONArray; import net.sf.json.JSONObject;
import hudson.model.*; import hudson.util.*; import java.util.*; import net.sf.json.*;
[ "hudson.model", "hudson.util", "java.util", "net.sf.json" ]
hudson.model; hudson.util; java.util; net.sf.json;
1,444,710
public void eraseInArea(int type) throws IOException;
void function(int type) throws IOException;
/** * Erases the text an attributes within the current area * * @param type the type of erasure (0=erase to beginning of screen, 1=erase to end of screen, 2=erase all) */
Erases the text an attributes within the current area
eraseInArea
{ "repo_name": "appnativa/rare", "path": "source/tenletd/com/appnativa/rare/terminal/iDisplay.java", "license": "gpl-3.0", "size": 15301 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,282,668
public static List<Job> closedOrUnallocatedJobs(ClusterState clusterState) { PersistentTasksCustomMetadata persistentTasks = clusterState.metadata().custom(PersistentTasksCustomMetadata.TYPE); Set<String> openJobIds = MlTasks.openJobIds(persistentTasks); openJobIds.removeAll(MlTasks.unassignedJobIds(persistentTasks, clusterState.nodes())); MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); return mlMetadata.getJobs().values().stream() .filter(job -> openJobIds.contains(job.getId()) == false) .collect(Collectors.toList()); }
static List<Job> function(ClusterState clusterState) { PersistentTasksCustomMetadata persistentTasks = clusterState.metadata().custom(PersistentTasksCustomMetadata.TYPE); Set<String> openJobIds = MlTasks.openJobIds(persistentTasks); openJobIds.removeAll(MlTasks.unassignedJobIds(persistentTasks, clusterState.nodes())); MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); return mlMetadata.getJobs().values().stream() .filter(job -> openJobIds.contains(job.getId()) == false) .collect(Collectors.toList()); }
/** * Find the configurations for all closed jobs and the jobs that * do not have an allocation in the cluster state. * Closed jobs are those that do not have an associated persistent task, * unallocated jobs have a task but no executing node * * @param clusterState The cluster state * @return The closed job configurations */
Find the configurations for all closed jobs and the jobs that do not have an allocation in the cluster state. Closed jobs are those that do not have an associated persistent task, unallocated jobs have a task but no executing node
closedOrUnallocatedJobs
{ "repo_name": "HonzaKral/elasticsearch", "path": "x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java", "license": "apache-2.0", "size": 32521 }
[ "java.util.List", "java.util.Set", "java.util.stream.Collectors", "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.persistent.PersistentTasksCustomMetadata", "org.elasticsearch.xpack.core.ml.MlMetadata", "org.elasticsearch.xpack.core.ml.MlTasks", "org.elasticsearch.xpack.core.ml.job.config.Job" ]
import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.job.config.Job;
import java.util.*; import java.util.stream.*; import org.elasticsearch.cluster.*; import org.elasticsearch.persistent.*; import org.elasticsearch.xpack.core.ml.*; import org.elasticsearch.xpack.core.ml.job.config.*;
[ "java.util", "org.elasticsearch.cluster", "org.elasticsearch.persistent", "org.elasticsearch.xpack" ]
java.util; org.elasticsearch.cluster; org.elasticsearch.persistent; org.elasticsearch.xpack;
1,622,217
private static MethodGenerator compileInit(NodeSortRecordGenerator sortRecord, ConstantPoolGen cpg, String className) { final InstructionList il = new InstructionList(); final MethodGenerator init = new MethodGenerator(ACC_PUBLIC, com.sun.org.apache.bcel.internal.generic.Type.VOID, null, null, "<init>", className, il, cpg); // Call the constructor in the NodeSortRecord superclass il.append(ALOAD_0); il.append(new INVOKESPECIAL(cpg.addMethodref(NODE_SORT_RECORD, "<init>", "()V"))); il.append(RETURN); return init; }
static MethodGenerator function(NodeSortRecordGenerator sortRecord, ConstantPoolGen cpg, String className) { final InstructionList il = new InstructionList(); final MethodGenerator init = new MethodGenerator(ACC_PUBLIC, com.sun.org.apache.bcel.internal.generic.Type.VOID, null, null, STR, className, il, cpg); il.append(ALOAD_0); il.append(new INVOKESPECIAL(cpg.addMethodref(NODE_SORT_RECORD, STR, "()V"))); il.append(RETURN); return init; }
/** * Create a constructor for the new class. Updates the reference to the * collator in the super calls only when the stylesheet specifies a new * language in xsl:sort. */
Create a constructor for the new class. Updates the reference to the collator in the super calls only when the stylesheet specifies a new language in xsl:sort
compileInit
{ "repo_name": "wangsongpeng/jdk-src", "path": "src/main/java/com/sun/org/apache/xalan/internal/xsltc/compiler/Sort.java", "license": "apache-2.0", "size": 30304 }
[ "com.sun.org.apache.bcel.internal.generic.ConstantPoolGen", "com.sun.org.apache.bcel.internal.generic.InstructionList", "com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator", "com.sun.org.apache.xalan.internal.xsltc.compiler.util.NodeSortRecordGenerator", "com.sun.org.apache.xalan.internal.xsltc.compiler.util.Type" ]
import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen; import com.sun.org.apache.bcel.internal.generic.InstructionList; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.NodeSortRecordGenerator; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Type;
import com.sun.org.apache.bcel.internal.generic.*; import com.sun.org.apache.xalan.internal.xsltc.compiler.util.*;
[ "com.sun.org" ]
com.sun.org;
2,032,135
@Deprecated public ResourceApk packWithResources( Artifact resourceApk, RuleContext ruleContext, NestedSet<ResourceContainer> resourceContainers, boolean createSource, Artifact proguardCfg) { TransitiveInfoCollection resourcesPrerequisite = ruleContext.getPrerequisite("resources", Mode.TARGET); ResourceContainer resourceContainer = Iterables.getOnlyElement( resourcesPrerequisite.getProvider(AndroidResourcesProvider.class) .getTransitiveAndroidResources()); // Dealing with Android library projects if (Iterables.size(resourceContainers) > 1) { if (resourceContainer.getConstantsInlined() && !resourceContainer.getArtifacts(ResourceType.RESOURCES).isEmpty()) { ruleContext.ruleError("This android_binary depends on an android_library, so the" + " resources '" + AndroidCommon.getAndroidResources(ruleContext).getLabel() + "' should have the attribute inline_constants set to 0"); throw new RuleConfigurationException(); } } // This binary depends on a library project, so we need to regenerate the // resources. The resulting sources and apk will combine all the resources // contained in the transitive closure of the binary. AndroidAaptActionHelper aaptActionHelper = new AndroidAaptActionHelper(ruleContext, getManifest(), Lists.newArrayList(resourceContainers)); List<String> resourceConfigurationFilters = ruleContext.getTokenizedStringListAttr("resource_configuration_filters"); List<String> uncompressedExtensions = ruleContext.getTokenizedStringListAttr("nocompress_extensions"); ImmutableList.Builder<String> additionalAaptOpts = ImmutableList.<String>builder(); for (String extension : uncompressedExtensions) { additionalAaptOpts.add("-0").add(extension); } if (!resourceConfigurationFilters.isEmpty()) { additionalAaptOpts.add("-c").add(Joiner.on(",").join(resourceConfigurationFilters)); } Artifact javaSourcesJar = null; if (createSource) { javaSourcesJar = ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_JAVA_SOURCE_JAR); aaptActionHelper.createGenerateResourceSymbolsAction( javaSourcesJar, null, resourceContainer.getJavaPackage(), true); } List<String> densities = ruleContext.getTokenizedStringListAttr("densities"); aaptActionHelper.createGenerateApkAction(resourceApk, resourceContainer.getRenameManifestPackage(), additionalAaptOpts.build(), densities); ResourceContainer updatedResources = new ResourceContainer( ruleContext.getLabel(), resourceContainer.getJavaPackage(), resourceContainer.getRenameManifestPackage(), resourceContainer.getConstantsInlined(), resourceApk, getManifest(), javaSourcesJar, resourceContainer.getArtifacts(ResourceType.ASSETS), resourceContainer.getArtifacts(ResourceType.RESOURCES), resourceContainer.getRoots(ResourceType.ASSETS), resourceContainer.getRoots(ResourceType.RESOURCES), resourceContainer.isManifestExported(), resourceContainer.getRTxt(), null); aaptActionHelper.createGenerateProguardAction(proguardCfg); return new ResourceApk(resourceApk, updatedResources.getJavaSourceJar(), resourceContainers, updatedResources, manifest, proguardCfg, true); }
ResourceApk function( Artifact resourceApk, RuleContext ruleContext, NestedSet<ResourceContainer> resourceContainers, boolean createSource, Artifact proguardCfg) { TransitiveInfoCollection resourcesPrerequisite = ruleContext.getPrerequisite(STR, Mode.TARGET); ResourceContainer resourceContainer = Iterables.getOnlyElement( resourcesPrerequisite.getProvider(AndroidResourcesProvider.class) .getTransitiveAndroidResources()); if (Iterables.size(resourceContainers) > 1) { if (resourceContainer.getConstantsInlined() && !resourceContainer.getArtifacts(ResourceType.RESOURCES).isEmpty()) { ruleContext.ruleError(STR + STR + AndroidCommon.getAndroidResources(ruleContext).getLabel() + STR); throw new RuleConfigurationException(); } } AndroidAaptActionHelper aaptActionHelper = new AndroidAaptActionHelper(ruleContext, getManifest(), Lists.newArrayList(resourceContainers)); List<String> resourceConfigurationFilters = ruleContext.getTokenizedStringListAttr(STR); List<String> uncompressedExtensions = ruleContext.getTokenizedStringListAttr(STR); ImmutableList.Builder<String> additionalAaptOpts = ImmutableList.<String>builder(); for (String extension : uncompressedExtensions) { additionalAaptOpts.add("-0").add(extension); } if (!resourceConfigurationFilters.isEmpty()) { additionalAaptOpts.add("-c").add(Joiner.on(",").join(resourceConfigurationFilters)); } Artifact javaSourcesJar = null; if (createSource) { javaSourcesJar = ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_JAVA_SOURCE_JAR); aaptActionHelper.createGenerateResourceSymbolsAction( javaSourcesJar, null, resourceContainer.getJavaPackage(), true); } List<String> densities = ruleContext.getTokenizedStringListAttr(STR); aaptActionHelper.createGenerateApkAction(resourceApk, resourceContainer.getRenameManifestPackage(), additionalAaptOpts.build(), densities); ResourceContainer updatedResources = new ResourceContainer( ruleContext.getLabel(), resourceContainer.getJavaPackage(), resourceContainer.getRenameManifestPackage(), resourceContainer.getConstantsInlined(), resourceApk, getManifest(), javaSourcesJar, resourceContainer.getArtifacts(ResourceType.ASSETS), resourceContainer.getArtifacts(ResourceType.RESOURCES), resourceContainer.getRoots(ResourceType.ASSETS), resourceContainer.getRoots(ResourceType.RESOURCES), resourceContainer.isManifestExported(), resourceContainer.getRTxt(), null); aaptActionHelper.createGenerateProguardAction(proguardCfg); return new ResourceApk(resourceApk, updatedResources.getJavaSourceJar(), resourceContainers, updatedResources, manifest, proguardCfg, true); }
/** * Packages up the manifest with resources, and generates the R.java. * * @deprecated in favor of {@link ApplicationManifest#packWithDataAndResources}. */
Packages up the manifest with resources, and generates the R.java
packWithResources
{ "repo_name": "Krasnyanskiy/bazel", "path": "src/main/java/com/google/devtools/build/lib/rules/android/ApplicationManifest.java", "license": "apache-2.0", "size": 20232 }
[ "com.google.common.base.Joiner", "com.google.common.collect.ImmutableList", "com.google.common.collect.Iterables", "com.google.common.collect.Lists", "com.google.devtools.build.lib.actions.Artifact", "com.google.devtools.build.lib.analysis.RuleConfiguredTarget", "com.google.devtools.build.lib.analysis.RuleContext", "com.google.devtools.build.lib.analysis.TransitiveInfoCollection", "com.google.devtools.build.lib.collect.nestedset.NestedSet", "com.google.devtools.build.lib.rules.android.AndroidResourcesProvider", "java.util.List" ]
import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.rules.android.AndroidResourcesProvider; import java.util.List;
import com.google.common.base.*; import com.google.common.collect.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.collect.nestedset.*; import com.google.devtools.build.lib.rules.android.*; import java.util.*;
[ "com.google.common", "com.google.devtools", "java.util" ]
com.google.common; com.google.devtools; java.util;
1,605,524
List<OFMessage> getMessagesFromCapture() { List<OFMessage> msgs = new ArrayList<OFMessage>(); assertTrue("No write on channel was captured", writeCapture.hasCaptured()); List<List<OFMessage>> capturedVals = writeCapture.getValues(); for (List<OFMessage> oneWriteList: capturedVals) msgs.addAll(oneWriteList); writeCapture.reset(); return msgs; }
List<OFMessage> getMessagesFromCapture() { List<OFMessage> msgs = new ArrayList<OFMessage>(); assertTrue(STR, writeCapture.hasCaptured()); List<List<OFMessage>> capturedVals = writeCapture.getValues(); for (List<OFMessage> oneWriteList: capturedVals) msgs.addAll(oneWriteList); writeCapture.reset(); return msgs; }
/** * Extract the list of OFMessages that was captured by the Channel.write() * capture. Will check that something was actually captured first. We'll * collapse the messages from multiple writes into a single list of * OFMessages. * Resets the channelWriteCapture. */
Extract the list of OFMessages that was captured by the Channel.write() capture. Will check that something was actually captured first. We'll collapse the messages from multiple writes into a single list of OFMessages. Resets the channelWriteCapture
getMessagesFromCapture
{ "repo_name": "chinmaymhatre91/floodlight", "path": "src/test/java/net/floodlightcontroller/core/internal/OFChannelHandlerVer13Test.java", "license": "apache-2.0", "size": 18559 }
[ "java.util.ArrayList", "java.util.List", "org.easymock.EasyMock", "org.junit.Assert", "org.projectfloodlight.openflow.protocol.OFMessage" ]
import java.util.ArrayList; import java.util.List; import org.easymock.EasyMock; import org.junit.Assert; import org.projectfloodlight.openflow.protocol.OFMessage;
import java.util.*; import org.easymock.*; import org.junit.*; import org.projectfloodlight.openflow.protocol.*;
[ "java.util", "org.easymock", "org.junit", "org.projectfloodlight.openflow" ]
java.util; org.easymock; org.junit; org.projectfloodlight.openflow;
941,725
public void initialize(RandomAccessInputStream stream) throws IOException;
void function(RandomAccessInputStream stream) throws IOException;
/** * Construct a new POI filesystem around the given stream. * * @throws IOException if an error occurred when reading from the stream. */
Construct a new POI filesystem around the given stream
initialize
{ "repo_name": "ximenesuk/bioformats", "path": "components/bio-formats/src/loci/formats/services/POIService.java", "license": "gpl-2.0", "size": 4210 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,969,338
@Override public String getStudyInstanceUID() { return isDICOMDIR ? getElementValueFromSQ(directoryRecordSeq, Tags.StudyInstanceUID, null) : getElementValue(Tags.StudyInstanceUID, null); }
String function() { return isDICOMDIR ? getElementValueFromSQ(directoryRecordSeq, Tags.StudyInstanceUID, null) : getElementValue(Tags.StudyInstanceUID, null); }
/** * Convenience method to get the contents of the StudyInstanceUID element. * If the DicomObject is a DICOMDIR, the DirectoryRecordSeq element * is searched for the first StudyInstanceUID element. * @return the text of the element or null if the element does not exist. */
Convenience method to get the contents of the StudyInstanceUID element. If the DicomObject is a DICOMDIR, the DirectoryRecordSeq element is searched for the first StudyInstanceUID element
getStudyInstanceUID
{ "repo_name": "blezek/Notion", "path": "src/main/java/org/rsna/ctp/objects/DicomObject.java", "license": "bsd-3-clause", "size": 75827 }
[ "org.dcm4che.dict.Tags" ]
import org.dcm4che.dict.Tags;
import org.dcm4che.dict.*;
[ "org.dcm4che.dict" ]
org.dcm4che.dict;
1,104,245