method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
protected void connectionFailed(Channel failedChannel, IOException cause) { failureCause = IOExceptionSupport.create(cause); channel = failedChannel; connected.set(false); connectLatch.countDown(); }
void function(Channel failedChannel, IOException cause) { failureCause = IOExceptionSupport.create(cause); channel = failedChannel; connected.set(false); connectLatch.countDown(); }
/** * Called when the transport connection failed and an error should be returned. * * @param failedChannel The Channel instance that failed. * @param cause An IOException that describes the cause of the failed connection. */
Called when the transport connection failed and an error should be returned
connectionFailed
{ "repo_name": "michaelandrepearce/activemq-artemis", "path": "tests/artemis-test-support/src/main/java/org/apache/activemq/transport/amqp/client/transport/NettyTcpTransport.java", "license": "apache-2.0", "size": 13435 }
[ "io.netty.channel.Channel", "java.io.IOException", "org.apache.activemq.transport.amqp.client.util.IOExceptionSupport" ]
import io.netty.channel.Channel; import java.io.IOException; import org.apache.activemq.transport.amqp.client.util.IOExceptionSupport;
import io.netty.channel.*; import java.io.*; import org.apache.activemq.transport.amqp.client.util.*;
[ "io.netty.channel", "java.io", "org.apache.activemq" ]
io.netty.channel; java.io; org.apache.activemq;
1,246,468
@POST @Produces("application/json") @Consumes("application/json") @Path("AddRecommendationException") public List<String> AddRecommendationException(RecommendationException objRecommendationException) { List<String> response = new ArrayList<String>(); try { DataAccessInterface objDAInterface = new RecommendationExceptionAdapter(); AbstractDataBridge objADBridge = new DatabaseStorage(objDAInterface); response = objADBridge.SaveRecommendationException(objRecommendationException); logger.info("RecommendationException saved successfully, RecommendationException Details="+objRecommendationException); } catch(Exception ex) { logger.info("Error in adding RecommendationException"); response.add("Error in adding RecommendationException"); } return response; }
@Produces(STR) @Consumes(STR) @Path(STR) List<String> function(RecommendationException objRecommendationException) { List<String> response = new ArrayList<String>(); try { DataAccessInterface objDAInterface = new RecommendationExceptionAdapter(); AbstractDataBridge objADBridge = new DatabaseStorage(objDAInterface); response = objADBridge.SaveRecommendationException(objRecommendationException); logger.info(STR+objRecommendationException); } catch(Exception ex) { logger.info(STR); response.add(STR); } return response; }
/** * This function is using to Add RecommendationException * @param objRecommendationException * @return a list of object string with "Error", "No Error" and new added ID */
This function is using to Add RecommendationException
AddRecommendationException
{ "repo_name": "ubiquitous-computing-lab/Mining-Minds", "path": "data-curation-layer/lifelog-mapping-and-representation/MMDataCurationRestfulService/src/main/java/org/uclab/mm/rs/servicecuration/ServiceCurationResource.java", "license": "apache-2.0", "size": 55302 }
[ "java.util.ArrayList", "java.util.List", "javax.ws.rs.Consumes", "javax.ws.rs.Path", "javax.ws.rs.Produces", "org.uclab.mm.datamodel.AbstractDataBridge", "org.uclab.mm.datamodel.DataAccessInterface", "org.uclab.mm.datamodel.DatabaseStorage", "org.uclab.mm.datamodel.sc.RecommendationException", "org.uclab.mm.datamodel.sc.dataadapter.RecommendationExceptionAdapter" ]
import java.util.ArrayList; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.Produces; import org.uclab.mm.datamodel.AbstractDataBridge; import org.uclab.mm.datamodel.DataAccessInterface; import org.uclab.mm.datamodel.DatabaseStorage; import org.uclab.mm.datamodel.sc.RecommendationException; import org.uclab.mm.datamodel.sc.dataadapter.RecommendationExceptionAdapter;
import java.util.*; import javax.ws.rs.*; import org.uclab.mm.datamodel.*; import org.uclab.mm.datamodel.sc.*; import org.uclab.mm.datamodel.sc.dataadapter.*;
[ "java.util", "javax.ws", "org.uclab.mm" ]
java.util; javax.ws; org.uclab.mm;
970,186
public static ArrayList<String> getDirListingSu(String str) throws RootNotPermittedException { ArrayList<String> arrayLis=RootHelper.runShellCommand(LS.replace("%", str)); return arrayLis; }
static ArrayList<String> function(String str) throws RootNotPermittedException { ArrayList<String> arrayLis=RootHelper.runShellCommand(LS.replace("%", str)); return arrayLis; }
/** * Get a shell based listing * Context is superuser level shell * @param str * @return */
Get a shell based listing Context is superuser level shell
getDirListingSu
{ "repo_name": "martincz/AmazeFileManager", "path": "src/main/java/com/amaze/filemanager/utils/RootUtils.java", "license": "gpl-3.0", "size": 9824 }
[ "com.amaze.filemanager.exceptions.RootNotPermittedException", "com.amaze.filemanager.filesystem.RootHelper", "java.util.ArrayList" ]
import com.amaze.filemanager.exceptions.RootNotPermittedException; import com.amaze.filemanager.filesystem.RootHelper; import java.util.ArrayList;
import com.amaze.filemanager.exceptions.*; import com.amaze.filemanager.filesystem.*; import java.util.*;
[ "com.amaze.filemanager", "java.util" ]
com.amaze.filemanager; java.util;
2,241,940
protected int readGenerationNumber() throws IOException { int retval = readInt(); if(retval < 0 || retval > GENERATION_NUMBER_THRESHOLD) { throw new IOException("Generation Number '" + retval + "' has more than 5 digits"); } return retval; }
int function() throws IOException { int retval = readInt(); if(retval < 0 retval > GENERATION_NUMBER_THRESHOLD) { throw new IOException(STR + retval + STR); } return retval; }
/** * This will read a integer from the Stream and throw an {@link IllegalArgumentException} if the integer value * has more than the maximum object revision (i.e. : bigger than {@link #GENERATION_NUMBER_THRESHOLD}) * @return the generation number being read. * @throws IOException if an I/O error occurs */
This will read a integer from the Stream and throw an <code>IllegalArgumentException</code> if the integer value has more than the maximum object revision (i.e. : bigger than <code>#GENERATION_NUMBER_THRESHOLD</code>)
readGenerationNumber
{ "repo_name": "kalaspuffar/pdfbox", "path": "pdfbox/src/main/java/org/apache/pdfbox/pdfparser/BaseParser.java", "license": "apache-2.0", "size": 42779 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,469,114
@NotNull TextRange getDeclarationRange(@NotNull T container);
TextRange getDeclarationRange(@NotNull T container);
/** * Returns the declaration range for the specified container. * @param container the container * @return the declaration range for it. */
Returns the declaration range for the specified container
getDeclarationRange
{ "repo_name": "android-ia/platform_tools_idea", "path": "platform/lang-api/src/com/intellij/codeInsight/hint/DeclarationRangeHandler.java", "license": "apache-2.0", "size": 1503 }
[ "com.intellij.openapi.util.TextRange", "org.jetbrains.annotations.NotNull" ]
import com.intellij.openapi.util.TextRange; import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.util.*; import org.jetbrains.annotations.*;
[ "com.intellij.openapi", "org.jetbrains.annotations" ]
com.intellij.openapi; org.jetbrains.annotations;
301,330
private boolean updateComposingText(@NonNull BrailleTranslator translator, @NonNull InputConnection ic) { if (mComposingBraille.position() == 0) { return ic.commitText("", 1); } String text = translator.backTranslate(getComposingBrailleArray()); if (TextUtils.isEmpty(text)) { return ic.setComposingText("\u00A0", 1); } else { return ic.setComposingText(text, 1); } }
boolean function(@NonNull BrailleTranslator translator, @NonNull InputConnection ic) { if (mComposingBraille.position() == 0) { return ic.commitText(STR\u00A0", 1); } else { return ic.setComposingText(text, 1); } }
/** * Updates the composing text based on the braille dots composed thus far, * and maintains the composing state of the editor. * Returns {@code true} if the current string of braille dots could be * translated into text, otherwise {@code false}. */
Updates the composing text based on the braille dots composed thus far, and maintains the composing state of the editor. Returns true if the current string of braille dots could be translated into text, otherwise false
updateComposingText
{ "repo_name": "google/brailleback", "path": "braille/brailleback/src/com/googlecode/eyesfree/brailleback/BrailleIME.java", "license": "apache-2.0", "size": 29235 }
[ "android.support.annotation.NonNull", "android.view.inputmethod.InputConnection", "com.googlecode.eyesfree.braille.translate.BrailleTranslator" ]
import android.support.annotation.NonNull; import android.view.inputmethod.InputConnection; import com.googlecode.eyesfree.braille.translate.BrailleTranslator;
import android.support.annotation.*; import android.view.inputmethod.*; import com.googlecode.eyesfree.braille.translate.*;
[ "android.support", "android.view", "com.googlecode.eyesfree" ]
android.support; android.view; com.googlecode.eyesfree;
241,765
public static ArrayList<ClusterSet> splitHypotesis(ClusterSet fullClusterSet) { ArrayList<ClusterSet> listOfClusterSet = new ArrayList<ClusterSet>(); for (String showName : fullClusterSet.getShowNames()) { // logger.finer("showName=" + showName); ClusterSet clusterSet = new ClusterSet(); for (Segment segment : fullClusterSet.getSegments()) { if (segment.getShowName().equals(showName) == true) { Cluster cluster = clusterSet.getOrCreateANewCluster(segment.getClusterName()); cluster.addSegment(segment); } } listOfClusterSet.add(clusterSet); } // Collections.sort(listOfClusterSet, new comparClusterSet()); return listOfClusterSet; } private static float maxMemoryUsed = 0.0f; private static int numMemoryStats = 0; private static float avgMemoryUsed = 0.0f; private static DecimalFormat memFormat = new DecimalFormat("0.00 Mb");
static ArrayList<ClusterSet> function(ClusterSet fullClusterSet) { ArrayList<ClusterSet> listOfClusterSet = new ArrayList<ClusterSet>(); for (String showName : fullClusterSet.getShowNames()) { ClusterSet clusterSet = new ClusterSet(); for (Segment segment : fullClusterSet.getSegments()) { if (segment.getShowName().equals(showName) == true) { Cluster cluster = clusterSet.getOrCreateANewCluster(segment.getClusterName()); cluster.addSegment(segment); } } listOfClusterSet.add(clusterSet); } return listOfClusterSet; } private static float maxMemoryUsed = 0.0f; private static int numMemoryStats = 0; private static float avgMemoryUsed = 0.0f; private static DecimalFormat memFormat = new DecimalFormat(STR);
/** * Split hypotesis. * * @param fullClusterSet the full cluster set * @return the array list */
Split hypotesis
splitHypotesis
{ "repo_name": "Adirockzz95/GenderDetect", "path": "src/src/fr/lium/spkDiarization/lib/MainTools.java", "license": "gpl-3.0", "size": 23637 }
[ "fr.lium.spkDiarization.libClusteringData.Cluster", "fr.lium.spkDiarization.libClusteringData.ClusterSet", "fr.lium.spkDiarization.libClusteringData.Segment", "java.text.DecimalFormat", "java.util.ArrayList" ]
import fr.lium.spkDiarization.libClusteringData.Cluster; import fr.lium.spkDiarization.libClusteringData.ClusterSet; import fr.lium.spkDiarization.libClusteringData.Segment; import java.text.DecimalFormat; import java.util.ArrayList;
import fr.lium.*; import java.text.*; import java.util.*;
[ "fr.lium", "java.text", "java.util" ]
fr.lium; java.text; java.util;
342,980
void event(ServiceEvent event, Collection<BundleContext> contexts);
void event(ServiceEvent event, Collection<BundleContext> contexts);
/** * Event hook method. This method is called prior to service event delivery * when a publishing bundle registers, modifies or unregisters a service. * This method can filter the bundles which receive the event. * * @param event The service event to be delivered. * @param contexts A collection of Bundle Contexts for bundles which have * listeners to which the specified event will be delivered. The * implementation of this method may remove bundle contexts from the * collection to prevent the event from being delivered to the * associated bundles. The collection supports all the optional * {@code Collection} operations except {@code add} and * {@code addAll}. Attempting to add to the collection will result in * an {@code UnsupportedOperationException}. The collection is not * synchronized. */
Event hook method. This method is called prior to service event delivery when a publishing bundle registers, modifies or unregisters a service. This method can filter the bundles which receive the event
event
{ "repo_name": "aosgi/org.apache.felix.framework", "path": "src/main/java/org/osgi/framework/hooks/service/EventHook.java", "license": "apache-2.0", "size": 2189 }
[ "java.util.Collection", "org.osgi.framework.BundleContext", "org.osgi.framework.ServiceEvent" ]
import java.util.Collection; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceEvent;
import java.util.*; import org.osgi.framework.*;
[ "java.util", "org.osgi.framework" ]
java.util; org.osgi.framework;
882,671
@Test public void testInternalIteratorSingleBufFull() throws Exception { final Logger log = Logger.getLogger("TestDbusEventBufferIterator.testInternalIteratorSingleBufFull"); //log.setLevel(Level.DEBUG); log.info("starting"); final DbusEventBuffer dbusBuf = new DbusEventBuffer(TestDbusEventBuffer.getConfig( 845, 100000, 256, 500, AllocationPolicy.HEAP_MEMORY, QueuePolicy.BLOCK_ON_WRITE, AssertLevel.NONE)); dbusBuf.start(0); log.info("append a full buffer"); DbusEventGenerator generator = new DbusEventGenerator(10); final Vector<DbusEvent> events = new Vector<DbusEvent>(); generator.generateEvents(6, 2, 120, 38, events); log.debug(dbusBuf.toShortString()); dbusBuf.assertBuffersLimits(); DbusEventAppender appender = new DbusEventAppender(events, dbusBuf, null, 1.0, false, -1); appender.run(); log.info("verify new iterator"); DbusEventIterator iter1 = dbusBuf.acquireIterator("testInternalIteratorSingleBufFull"); log.debug("it1=" + iter1); Assert.assertEquals(iter1.getCurrentPosition(), dbusBuf.getHead()); Assert.assertEquals(iter1._iteratorTail.getPosition(), dbusBuf.getTail()); Assert.assertEquals(dbusBuf._busyIteratorPool.size(), 1); Assert.assertTrue(iter1.hasNext()); DbusEvent e = iter1.next(); Assert.assertTrue(e.isEndOfPeriodMarker()); Assert.assertTrue(iter1.hasNext()); dbusBuf.assertBuffersLimits(); log.info("make sure we can read some events"); readAndCompareIteratorEvents(iter1, events, 0, 6, true, true, true); log.debug("after read: " + dbusBuf.toShortString()); log.debug(iter1); log.info("append more windows"); final Vector<DbusEvent> events2 = new Vector<DbusEvent>(); generator = new DbusEventGenerator(200); generator.generateEvents(2, 1, 120, 39, events2); appender = new DbusEventAppender(events2, dbusBuf, null, 1.0, false, -1); appender.run(); log.debug("after 2 more events added: " + dbusBuf.toShortString()); log.debug(iter1); readAndCompareIteratorEvents(iter1, events2, 0, 2, true, false, true); log.debug("after 2 more events read: " + dbusBuf.toShortString()); log.debug(iter1); dbusBuf.assertBuffersLimits(); // create another iterator - make sure it can read too DbusEventIterator iter2 = dbusBuf.acquireIterator("testInternalIteratorSingleBufFull2"); long iCWP = iter2.getCurrentPosition(); long head = dbusBuf.getBufferPositionParser().sanitize(dbusBuf.getHead(), dbusBuf.getBuffer()); Assert.assertEquals(iCWP, head); Assert.assertEquals(iter2._iteratorTail.getPosition(), dbusBuf.getTail()); Assert.assertEquals(dbusBuf._busyIteratorPool.size(), 2); Assert.assertTrue(iter2.hasNext()); log.debug("iter2=" + iter2); readAndCompareIteratorEvents(iter2, events2, 0, 2, true, false, true); // read same events and don't remove dbusBuf.releaseIterator(iter2); dbusBuf.assertBuffersLimits(); log.debug("iter1=" + iter1); iter1.remove(); log.debug("buf (after read)=" + dbusBuf); generator = new DbusEventGenerator(300); final Vector<DbusEvent> events3 = new Vector<DbusEvent>(); generator.generateEvents(4, 2, 120, 39, events3); appender = new DbusEventAppender(events3, dbusBuf, null, 1.0, false, -1); appender.run(); dbusBuf.assertBuffersLimits(); log.info("make sure we can read remainder of events"); readAndCompareIteratorEvents(iter1, events3, 0, 4, false, true, true); dbusBuf.assertBuffersLimits(); Assert.assertTrue(dbusBuf.empty()); dbusBuf.releaseIterator(iter1); log.info("done"); } private static HashSet<String> _idSet = new HashSet<String>(); private final AtomicInteger thrCounter = new AtomicInteger();
void function() throws Exception { final Logger log = Logger.getLogger(STR); log.info(STR); final DbusEventBuffer dbusBuf = new DbusEventBuffer(TestDbusEventBuffer.getConfig( 845, 100000, 256, 500, AllocationPolicy.HEAP_MEMORY, QueuePolicy.BLOCK_ON_WRITE, AssertLevel.NONE)); dbusBuf.start(0); log.info(STR); DbusEventGenerator generator = new DbusEventGenerator(10); final Vector<DbusEvent> events = new Vector<DbusEvent>(); generator.generateEvents(6, 2, 120, 38, events); log.debug(dbusBuf.toShortString()); dbusBuf.assertBuffersLimits(); DbusEventAppender appender = new DbusEventAppender(events, dbusBuf, null, 1.0, false, -1); appender.run(); log.info(STR); DbusEventIterator iter1 = dbusBuf.acquireIterator(STR); log.debug("it1=" + iter1); Assert.assertEquals(iter1.getCurrentPosition(), dbusBuf.getHead()); Assert.assertEquals(iter1._iteratorTail.getPosition(), dbusBuf.getTail()); Assert.assertEquals(dbusBuf._busyIteratorPool.size(), 1); Assert.assertTrue(iter1.hasNext()); DbusEvent e = iter1.next(); Assert.assertTrue(e.isEndOfPeriodMarker()); Assert.assertTrue(iter1.hasNext()); dbusBuf.assertBuffersLimits(); log.info(STR); readAndCompareIteratorEvents(iter1, events, 0, 6, true, true, true); log.debug(STR + dbusBuf.toShortString()); log.debug(iter1); log.info(STR); final Vector<DbusEvent> events2 = new Vector<DbusEvent>(); generator = new DbusEventGenerator(200); generator.generateEvents(2, 1, 120, 39, events2); appender = new DbusEventAppender(events2, dbusBuf, null, 1.0, false, -1); appender.run(); log.debug(STR + dbusBuf.toShortString()); log.debug(iter1); readAndCompareIteratorEvents(iter1, events2, 0, 2, true, false, true); log.debug(STR + dbusBuf.toShortString()); log.debug(iter1); dbusBuf.assertBuffersLimits(); DbusEventIterator iter2 = dbusBuf.acquireIterator(STR); long iCWP = iter2.getCurrentPosition(); long head = dbusBuf.getBufferPositionParser().sanitize(dbusBuf.getHead(), dbusBuf.getBuffer()); Assert.assertEquals(iCWP, head); Assert.assertEquals(iter2._iteratorTail.getPosition(), dbusBuf.getTail()); Assert.assertEquals(dbusBuf._busyIteratorPool.size(), 2); Assert.assertTrue(iter2.hasNext()); log.debug(STR + iter2); readAndCompareIteratorEvents(iter2, events2, 0, 2, true, false, true); dbusBuf.releaseIterator(iter2); dbusBuf.assertBuffersLimits(); log.debug(STR + iter1); iter1.remove(); log.debug(STR + dbusBuf); generator = new DbusEventGenerator(300); final Vector<DbusEvent> events3 = new Vector<DbusEvent>(); generator.generateEvents(4, 2, 120, 39, events3); appender = new DbusEventAppender(events3, dbusBuf, null, 1.0, false, -1); appender.run(); dbusBuf.assertBuffersLimits(); log.info(STR); readAndCompareIteratorEvents(iter1, events3, 0, 4, false, true, true); dbusBuf.assertBuffersLimits(); Assert.assertTrue(dbusBuf.empty()); dbusBuf.releaseIterator(iter1); log.info("done"); } private static HashSet<String> _idSet = new HashSet<String>(); private final AtomicInteger thrCounter = new AtomicInteger();
/** * Verify that Iterator's CP never gets ahead of Iterator's tail, even at * the end of the buffer (client, NO-OVERWITE policy). */
Verify that Iterator's CP never gets ahead of Iterator's tail, even at the end of the buffer (client, NO-OVERWITE policy)
testInternalIteratorSingleBufFull
{ "repo_name": "rahuljoshi123/databus", "path": "databus-core/databus-core-impl/src/test/java/com/linkedin/databus/core/TestDbusEventBuffer.java", "license": "apache-2.0", "size": 192220 }
[ "com.linkedin.databus.core.DbusEventBuffer", "com.linkedin.databus.core.test.DbusEventAppender", "com.linkedin.databus.core.test.DbusEventGenerator", "com.linkedin.databus2.core.AssertLevel", "java.util.HashSet", "java.util.Vector", "java.util.concurrent.atomic.AtomicInteger", "junit.framework.Assert", "org.apache.log4j.Logger", "org.testng.AssertJUnit" ]
import com.linkedin.databus.core.DbusEventBuffer; import com.linkedin.databus.core.test.DbusEventAppender; import com.linkedin.databus.core.test.DbusEventGenerator; import com.linkedin.databus2.core.AssertLevel; import java.util.HashSet; import java.util.Vector; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.Assert; import org.apache.log4j.Logger; import org.testng.AssertJUnit;
import com.linkedin.databus.core.*; import com.linkedin.databus.core.test.*; import com.linkedin.databus2.core.*; import java.util.*; import java.util.concurrent.atomic.*; import junit.framework.*; import org.apache.log4j.*; import org.testng.*;
[ "com.linkedin.databus", "com.linkedin.databus2", "java.util", "junit.framework", "org.apache.log4j", "org.testng" ]
com.linkedin.databus; com.linkedin.databus2; java.util; junit.framework; org.apache.log4j; org.testng;
344,783
@Endpoint( describeByClass = true ) public static <U extends TNumber> StatelessRandomUniformV2<U> create(Scope scope, Operand<? extends TNumber> shape, Operand<? extends TType> key, Operand<? extends TType> counter, Operand<TInt32> alg, Class<U> dtype) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "StatelessRandomUniformV2"); opBuilder.addInput(shape.asOutput()); opBuilder.addInput(key.asOutput()); opBuilder.addInput(counter.asOutput()); opBuilder.addInput(alg.asOutput()); opBuilder.setAttr("dtype", Operands.toDataType(dtype)); return new StatelessRandomUniformV2<>(opBuilder.build()); }
@Endpoint( describeByClass = true ) static <U extends TNumber> StatelessRandomUniformV2<U> function(Scope scope, Operand<? extends TNumber> shape, Operand<? extends TType> key, Operand<? extends TType> counter, Operand<TInt32> alg, Class<U> dtype) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, STR); opBuilder.addInput(shape.asOutput()); opBuilder.addInput(key.asOutput()); opBuilder.addInput(counter.asOutput()); opBuilder.addInput(alg.asOutput()); opBuilder.setAttr("dtype", Operands.toDataType(dtype)); return new StatelessRandomUniformV2<>(opBuilder.build()); }
/** * Factory method to create a class wrapping a new StatelessRandomUniformV2 operation. * * @param scope current scope * @param shape The shape of the output tensor. * @param key Key for the counter-based RNG algorithm (shape uint64[1]). * @param counter Initial counter for the counter-based RNG algorithm (shape uint64[2] or uint64[1] depending on the algorithm). If a larger vector is given, only the needed portion on the left (i.e. [:N]) will be used. * @param alg The RNG algorithm (shape int32[]). * @param dtype The type of the output. * @param <U> data type for {@code StatelessRandomUniformV2} output and operands * @return a new instance of StatelessRandomUniformV2 */
Factory method to create a class wrapping a new StatelessRandomUniformV2 operation
create
{ "repo_name": "tensorflow/java", "path": "tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/random/StatelessRandomUniformV2.java", "license": "apache-2.0", "size": 6418 }
[ "org.tensorflow.Operand", "org.tensorflow.OperationBuilder", "org.tensorflow.op.Operands", "org.tensorflow.op.Scope", "org.tensorflow.op.annotation.Endpoint", "org.tensorflow.types.TInt32", "org.tensorflow.types.family.TNumber", "org.tensorflow.types.family.TType" ]
import org.tensorflow.Operand; import org.tensorflow.OperationBuilder; import org.tensorflow.op.Operands; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.types.TInt32; import org.tensorflow.types.family.TNumber; import org.tensorflow.types.family.TType;
import org.tensorflow.*; import org.tensorflow.op.*; import org.tensorflow.op.annotation.*; import org.tensorflow.types.*; import org.tensorflow.types.family.*;
[ "org.tensorflow", "org.tensorflow.op", "org.tensorflow.types" ]
org.tensorflow; org.tensorflow.op; org.tensorflow.types;
2,311,213
public String getProducer() { return info.getString( COSName.PRODUCER ); }
String function() { return info.getString( COSName.PRODUCER ); }
/** * This will get the producer of the document. This will return null if no producer exists. * * @return The producer of the document. */
This will get the producer of the document. This will return null if no producer exists
getProducer
{ "repo_name": "myrridin/qz-print", "path": "pdfbox_1.8.4_qz/src/org/apache/pdfbox/pdmodel/PDDocumentInformation.java", "license": "lgpl-2.1", "size": 8591 }
[ "org.apache.pdfbox.cos.COSName" ]
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.*;
[ "org.apache.pdfbox" ]
org.apache.pdfbox;
2,762,263
public GridGeometry2D reduce(GridGeometry2D gg) { if (gg.getEnvelope().getMaximum(1) > validArea.getMaximum(1)) { gg = reduceGridGeometrySide(gg, Side.TOP); } if (gg.getEnvelope().getMaximum(0) > validArea.getMaximum(0)) { gg = reduceGridGeometrySide(gg, Side.RIGHT); } if (gg.getEnvelope().getMinimum(1) < validArea.getMinimum(1)) { gg = reduceGridGeometrySide(gg, Side.BOTTOM); } if (gg.getEnvelope().getMinimum(0) < validArea.getMinimum(0)) { gg = reduceGridGeometrySide(gg, Side.LEFT); } return gg; }
GridGeometry2D function(GridGeometry2D gg) { if (gg.getEnvelope().getMaximum(1) > validArea.getMaximum(1)) { gg = reduceGridGeometrySide(gg, Side.TOP); } if (gg.getEnvelope().getMaximum(0) > validArea.getMaximum(0)) { gg = reduceGridGeometrySide(gg, Side.RIGHT); } if (gg.getEnvelope().getMinimum(1) < validArea.getMinimum(1)) { gg = reduceGridGeometrySide(gg, Side.BOTTOM); } if (gg.getEnvelope().getMinimum(0) < validArea.getMinimum(0)) { gg = reduceGridGeometrySide(gg, Side.LEFT); } return gg; }
/** * Reduces the given grid geometry by at most one pixel on each side, in an attempt to make it * fit the */
Reduces the given grid geometry by at most one pixel on each side, in an attempt to make it fit the
reduce
{ "repo_name": "geotools/geotools", "path": "modules/library/render/src/main/java/org/geotools/renderer/lite/gridcoverage2d/GridGeometryReducer.java", "license": "lgpl-2.1", "size": 7288 }
[ "org.geotools.coverage.grid.GridGeometry2D" ]
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.coverage.grid.*;
[ "org.geotools.coverage" ]
org.geotools.coverage;
1,670,820
public Set<String> getMappedTypes(IComponent component) { return typeToComponentMapper.getMappedTypes(component); }
Set<String> function(IComponent component) { return typeToComponentMapper.getMappedTypes(component); }
/** * Retrieves the types mapped to the given component. If the component does * not exist, an empty set is returned. */
Retrieves the types mapped to the given component. If the component does not exist, an empty set is returned
getMappedTypes
{ "repo_name": "vimaier/conqat", "path": "org.conqat.engine.architecture/src/org/conqat/engine/architecture/assessment/shared/ArchitectureAssessor.java", "license": "apache-2.0", "size": 13791 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,757,583
private static void putPositionOfRecord(ByteBuffer buf, WALRecord rec) { FileWALPointer p = (FileWALPointer)rec.position(); buf.putLong(p.index()); buf.putInt(p.fileOffset()); buf.putInt(rec.size()); }
static void function(ByteBuffer buf, WALRecord rec) { FileWALPointer p = (FileWALPointer)rec.position(); buf.putLong(p.index()); buf.putInt(p.fileOffset()); buf.putInt(rec.size()); }
/** * Writes rec file position to given {@code buf}. * * @param buf Buffer to write rec file position. * @param rec WAL rec. */
Writes rec file position to given buf
putPositionOfRecord
{ "repo_name": "SomeFire/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/wal/serializer/RecordV2Serializer.java", "license": "apache-2.0", "size": 10919 }
[ "java.nio.ByteBuffer", "org.apache.ignite.internal.pagemem.wal.record.WALRecord", "org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer" ]
import java.nio.ByteBuffer; import org.apache.ignite.internal.pagemem.wal.record.WALRecord; import org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer;
import java.nio.*; import org.apache.ignite.internal.pagemem.wal.record.*; import org.apache.ignite.internal.processors.cache.persistence.wal.*;
[ "java.nio", "org.apache.ignite" ]
java.nio; org.apache.ignite;
523,307
@SuppressWarnings("nls") private void handleMessageUserList(final UserMessageProtocol.UserList response) { if (response.getUsers().isEmpty()) { terminal.getTerminal().println("No user in room: " + response.getRoom()); terminal.getTerminal().println(); } else { terminal.getTerminal().println(String.format("%1d existing user(s) in room: %2s...", Integer.valueOf(response.getUsers().size()), response.getRoom())); for (String user : response.getUsers()) { terminal.getTerminal().println(String.format("| %1s", user)); } terminal.getTerminal().println(); } terminal.resume(); } // private final void executeAndStart() // { // // Do we have some submitted commands to execute first? // if (terminal.hasCommandToSubmit()) // { // terminal.doSubmitCommand(); // } // // // User can start entering some commands. // terminal.start(); // output.start(); // // }
@SuppressWarnings("nls") void function(final UserMessageProtocol.UserList response) { if (response.getUsers().isEmpty()) { terminal.getTerminal().println(STR + response.getRoom()); terminal.getTerminal().println(); } else { terminal.getTerminal().println(String.format(STR, Integer.valueOf(response.getUsers().size()), response.getRoom())); for (String user : response.getUsers()) { terminal.getTerminal().println(String.format(STR, user)); } terminal.getTerminal().println(); } terminal.resume(); }
/** * Handles a {@link org.heliosphere.thot.akka.chat.server.user.UserMessageProtocol.UserList} message. * <hr> * @param response Message to handle. */
Handles a <code>org.heliosphere.thot.akka.chat.server.user.UserMessageProtocol.UserList</code> message.
handleMessageUserList
{ "repo_name": "ressec/thot", "path": "thot-akka/src/main/java/org/heliosphere/thot/akka/chat/client/TerminalActor.java", "license": "apache-2.0", "size": 31333 }
[ "org.heliosphere.thot.akka.chat.server.user.UserMessageProtocol" ]
import org.heliosphere.thot.akka.chat.server.user.UserMessageProtocol;
import org.heliosphere.thot.akka.chat.server.user.*;
[ "org.heliosphere.thot" ]
org.heliosphere.thot;
2,612,799
@Test public void testLabelBasedCoincidence() { String description = "SaMvThus - creating a coincidence group with labels"; TestHelper.testStart(description); Runner r = ArtificialDomain.runnerArtificialAutobiography(); r.exec("$CreateScene #Troy CloseOthers With Instances w_c_bai20 'Hector', w_c_bai20 'Achilles'"); @SuppressWarnings("unused") VerbInstance virel = r.exac("'Hector' / wcr_vr_rel1 / 'Achilles'."); VerbInstance vi1 = r.exac("'Hector' / #A wa_v_av1 / a w_c_bai21."); VerbInstance viskip = r.exac("'Hector' / #B wa_v_av10 / 'Achilles'."); VerbInstance vi2 = r.exac("'Achilles' / #A thus wa_v_av2 / the w_c_bai21."); // r.exec("$DebugHere"); // test the link mesh r.ah.linkedBy(Hardwired.LINK_COINCIDENCE, vi1, vi2); r.ah.linkedBy(Hardwired.LINK_COINCIDENCE, vi2, vi1); r.ah.notLinkedBy(Hardwired.LINK_COINCIDENCE, vi1, viskip); r.ah.linkedBy(Hardwired.LINK_SUCCESSOR, vi1, viskip); r.ah.linkedBy(Hardwired.LINK_SUCCESSOR, vi2, viskip); // r.ah.linkedBy(ViLinkDB.IR_CONTEXT, vi2, virel); TestHelper.testDone(); }
void function() { String description = STR; TestHelper.testStart(description); Runner r = ArtificialDomain.runnerArtificialAutobiography(); r.exec(STR); @SuppressWarnings(STR) VerbInstance virel = r.exac(STR); VerbInstance vi1 = r.exac(STR); VerbInstance viskip = r.exac(STR); VerbInstance vi2 = r.exac(STR); r.ah.linkedBy(Hardwired.LINK_COINCIDENCE, vi1, vi2); r.ah.linkedBy(Hardwired.LINK_COINCIDENCE, vi2, vi1); r.ah.notLinkedBy(Hardwired.LINK_COINCIDENCE, vi1, viskip); r.ah.linkedBy(Hardwired.LINK_SUCCESSOR, vi1, viskip); r.ah.linkedBy(Hardwired.LINK_SUCCESSOR, vi2, viskip); TestHelper.testDone(); }
/** * Tests that the coincidence verb is connected to the leader with * coincidence links and the other links are in place The leader is * determined with labels */
Tests that the coincidence verb is connected to the leader with coincidence links and the other links are in place The leader is determined with labels
testLabelBasedCoincidence
{ "repo_name": "Xapagy/Xapagy", "path": "src/test/java/org/xapagy/metaverbs/testSaMvThus.java", "license": "agpl-3.0", "size": 4802 }
[ "org.xapagy.ArtificialDomain", "org.xapagy.TestHelper", "org.xapagy.concepts.Hardwired", "org.xapagy.debug.Runner", "org.xapagy.instances.VerbInstance" ]
import org.xapagy.ArtificialDomain; import org.xapagy.TestHelper; import org.xapagy.concepts.Hardwired; import org.xapagy.debug.Runner; import org.xapagy.instances.VerbInstance;
import org.xapagy.*; import org.xapagy.concepts.*; import org.xapagy.debug.*; import org.xapagy.instances.*;
[ "org.xapagy", "org.xapagy.concepts", "org.xapagy.debug", "org.xapagy.instances" ]
org.xapagy; org.xapagy.concepts; org.xapagy.debug; org.xapagy.instances;
2,710,488
public Expression getFilterExpresion() { return exp; }
Expression function() { return exp; }
/** * Method will the read filter expression corresponding to the resolver. * This method is required in row level executer inorder to evaluate the filter * expression against spark, as mentioned above row level is a special type * filter resolver. * * @return Expression */
Method will the read filter expression corresponding to the resolver. This method is required in row level executer inorder to evaluate the filter expression against spark, as mentioned above row level is a special type filter resolver
getFilterExpresion
{ "repo_name": "foryou2030/incubator-carbondata", "path": "core/src/main/java/org/apache/carbondata/scan/filter/resolver/RowLevelFilterResolverImpl.java", "license": "apache-2.0", "size": 5961 }
[ "org.apache.carbondata.scan.expression.Expression" ]
import org.apache.carbondata.scan.expression.Expression;
import org.apache.carbondata.scan.expression.*;
[ "org.apache.carbondata" ]
org.apache.carbondata;
2,137,677
@CalledByNative private void onBytesRead(ByteBuffer buffer) { try { if (mContentLengthOverLimit) { return; } int size = buffer.remaining(); mSize += size; if (mSkippingToOffset) { if (mSize <= mOffset) { return; } else { mSkippingToOffset = false; buffer.position((int) (mOffset - (mSize - size))); } } boolean contentLengthOverLimit = (mContentLengthLimit != 0 && mSize > mContentLengthLimit); if (contentLengthOverLimit) { buffer.limit(size - (int) (mSize - mContentLengthLimit)); } while (buffer.hasRemaining()) { mSink.write(buffer); } if (contentLengthOverLimit) { onContentLengthOverLimit(); } } catch (Exception e) { onCalledByNativeException(e); } }
void function(ByteBuffer buffer) { try { if (mContentLengthOverLimit) { return; } int size = buffer.remaining(); mSize += size; if (mSkippingToOffset) { if (mSize <= mOffset) { return; } else { mSkippingToOffset = false; buffer.position((int) (mOffset - (mSize - size))); } } boolean contentLengthOverLimit = (mContentLengthLimit != 0 && mSize > mContentLengthLimit); if (contentLengthOverLimit) { buffer.limit(size - (int) (mSize - mContentLengthLimit)); } while (buffer.hasRemaining()) { mSink.write(buffer); } if (contentLengthOverLimit) { onContentLengthOverLimit(); } } catch (Exception e) { onCalledByNativeException(e); } }
/** * Consumes a portion of the response. * * @param byteBuffer The ByteBuffer to append. Must be a direct buffer, and * no references to it may be retained after the method ends, as * it wraps code managed on the native heap. */
Consumes a portion of the response
onBytesRead
{ "repo_name": "axinging/chromium-crosswalk", "path": "components/cronet/android/java/src/org/chromium/net/ChromiumUrlRequest.java", "license": "bsd-3-clause", "size": 26226 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
1,524,103
private boolean isWildCardSearch(JSONArray fields) { // Only do a wildcard search if we are passed ["*"] if (fields.length() == 1) { try { if ("*".equals(fields.getString(0))) { return true; } } catch (JSONException e) { return false; } } return false; }
boolean function(JSONArray fields) { if (fields.length() == 1) { try { if ("*".equals(fields.getString(0))) { return true; } } catch (JSONException e) { return false; } } return false; }
/** * If the user passes in the '*' wildcard character for search then they want all fields for each contact * * @param fields * @return true if wildcard search requested, false otherwise */
If the user passes in the '*' wildcard character for search then they want all fields for each contact
isWildCardSearch
{ "repo_name": "evernym/cordova-plugin-contacts", "path": "src/android/ContactAccessorSdk5.java", "license": "apache-2.0", "size": 109550 }
[ "org.json.JSONArray", "org.json.JSONException" ]
import org.json.JSONArray; import org.json.JSONException;
import org.json.*;
[ "org.json" ]
org.json;
487,716
public G2 scalarBaseMul(BigInteger k) { this.p.mul(TwistPoint.twistGen, k); return this; }
G2 function(BigInteger k) { this.p.mul(TwistPoint.twistGen, k); return this; }
/** * Perform a scalar multiplication with the generator point. * * @param k is the scalar. * @return the result which is also this object. */
Perform a scalar multiplication with the generator point
scalarBaseMul
{ "repo_name": "DeDiS/cothority", "path": "external/java/src/main/java/ch/epfl/dedis/lib/crypto/bn256/BN.java", "license": "gpl-2.0", "size": 19030 }
[ "java.math.BigInteger" ]
import java.math.BigInteger;
import java.math.*;
[ "java.math" ]
java.math;
1,359,365
public List<InetAddress> getNaturalEndpoints(String keyspaceName, String cf, String key) { KSMetaData ksMetaData = Schema.instance.getKSMetaData(keyspaceName); if (ksMetaData == null) throw new IllegalArgumentException("Unknown keyspace '" + keyspaceName + "'"); CFMetaData cfMetaData = ksMetaData.cfMetaData().get(cf); if (cfMetaData == null) throw new IllegalArgumentException("Unknown table '" + cf + "' in keyspace '" + keyspaceName + "'"); return getNaturalEndpoints(keyspaceName, getPartitioner().getToken(cfMetaData.getKeyValidator().fromString(key))); }
List<InetAddress> function(String keyspaceName, String cf, String key) { KSMetaData ksMetaData = Schema.instance.getKSMetaData(keyspaceName); if (ksMetaData == null) throw new IllegalArgumentException(STR + keyspaceName + "'"); CFMetaData cfMetaData = ksMetaData.cfMetaData().get(cf); if (cfMetaData == null) throw new IllegalArgumentException(STR + cf + STR + keyspaceName + "'"); return getNaturalEndpoints(keyspaceName, getPartitioner().getToken(cfMetaData.getKeyValidator().fromString(key))); }
/** * This method returns the N endpoints that are responsible for storing the * specified key i.e for replication. * * @param keyspaceName keyspace name also known as keyspace * @param cf Column family name * @param key key for which we need to find the endpoint * @return the endpoint responsible for this key */
This method returns the N endpoints that are responsible for storing the specified key i.e for replication
getNaturalEndpoints
{ "repo_name": "hobinyoon/apache-cassandra-2.2.3-src", "path": "src/java/org/apache/cassandra/service/StorageService.java", "license": "apache-2.0", "size": 183550 }
[ "java.net.InetAddress", "java.util.List", "org.apache.cassandra.config.CFMetaData", "org.apache.cassandra.config.KSMetaData", "org.apache.cassandra.config.Schema" ]
import java.net.InetAddress; import java.util.List; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.KSMetaData; import org.apache.cassandra.config.Schema;
import java.net.*; import java.util.*; import org.apache.cassandra.config.*;
[ "java.net", "java.util", "org.apache.cassandra" ]
java.net; java.util; org.apache.cassandra;
19,464
public StreamPartitioner<T> getPartitioner() { return partitioner; }
StreamPartitioner<T> function() { return partitioner; }
/** * Returns the {@code StreamPartitioner} that must be used for partitioning the elements * of the input {@code StreamTransformation}. */
Returns the StreamPartitioner that must be used for partitioning the elements of the input StreamTransformation
getPartitioner
{ "repo_name": "DieBauer/flink", "path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/transformations/PartitionTransformation.java", "license": "apache-2.0", "size": 2911 }
[ "org.apache.flink.streaming.runtime.partitioner.StreamPartitioner" ]
import org.apache.flink.streaming.runtime.partitioner.StreamPartitioner;
import org.apache.flink.streaming.runtime.partitioner.*;
[ "org.apache.flink" ]
org.apache.flink;
509,922
//------------------------------------------------------------------------- public ImmutableRatesProviderBuilder timeSeries(Index index, LocalDateDoubleTimeSeries timeSeries) { ArgChecker.notNull(index, "index"); ArgChecker.notNull(timeSeries, "timeSeries"); this.timeSeries.put(index, timeSeries); return this; }
ImmutableRatesProviderBuilder function(Index index, LocalDateDoubleTimeSeries timeSeries) { ArgChecker.notNull(index, "index"); ArgChecker.notNull(timeSeries, STR); this.timeSeries.put(index, timeSeries); return this; }
/** * Adds a time-series to the provider. * <p> * This adds the specified time-series to the provider. * This operates using {@link Map#put(Object, Object)} semantics using the index as the key. * * @param index the FX index * @param timeSeries the FX index time-series * @return this, for chaining */
Adds a time-series to the provider. This adds the specified time-series to the provider. This operates using <code>Map#put(Object, Object)</code> semantics using the index as the key
timeSeries
{ "repo_name": "ChinaQuants/Strata", "path": "modules/pricer/src/main/java/com/opengamma/strata/pricer/rate/ImmutableRatesProviderBuilder.java", "license": "apache-2.0", "size": 14022 }
[ "com.opengamma.strata.basics.index.Index", "com.opengamma.strata.collect.ArgChecker", "com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries" ]
import com.opengamma.strata.basics.index.Index; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.basics.index.*; import com.opengamma.strata.collect.*; import com.opengamma.strata.collect.timeseries.*;
[ "com.opengamma.strata" ]
com.opengamma.strata;
1,795,489
public void setObjectId(ObjectId id) { this.id = id; }
void function(ObjectId id) { this.id = id; }
/** * Sets the object id. * * @param id the new object id */
Sets the object id
setObjectId
{ "repo_name": "jjeb/kettle-trunk", "path": "engine/src/org/pentaho/di/job/entry/JobEntryBase.java", "license": "apache-2.0", "size": 34649 }
[ "org.pentaho.di.repository.ObjectId" ]
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.*;
[ "org.pentaho.di" ]
org.pentaho.di;
2,697,566
public List<Tuple2<String, DataSet<?>>> getGatherBcastVars() { return this.bcVarsGather; }
List<Tuple2<String, DataSet<?>>> function() { return this.bcVarsGather; }
/** * Get the broadcast variables of the GatherFunction. * * @return a List of Tuple2, where the first field is the broadcast variable name * and the second field is the broadcast DataSet. */
Get the broadcast variables of the GatherFunction
getGatherBcastVars
{ "repo_name": "haohui/flink", "path": "flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java", "license": "apache-2.0", "size": 4678 }
[ "java.util.List", "org.apache.flink.api.java.DataSet", "org.apache.flink.api.java.tuple.Tuple2" ]
import java.util.List; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.tuple.Tuple2;
import java.util.*; import org.apache.flink.api.java.*; import org.apache.flink.api.java.tuple.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
929,508
@Override public Object nextItemId(Object itemId) { return ((Container.Ordered) items).nextItemId(itemId); }
Object function(Object itemId) { return ((Container.Ordered) items).nextItemId(itemId); }
/** * Gets the ID of the Item following the Item that corresponds to itemId. * * @see com.vaadin.data.Container.Ordered#nextItemId(java.lang.Object) */
Gets the ID of the Item following the Item that corresponds to itemId
nextItemId
{ "repo_name": "jdahlstrom/vaadin.react", "path": "server/src/main/java/com/vaadin/ui/Table.java", "license": "apache-2.0", "size": 223299 }
[ "com.vaadin.data.Container" ]
import com.vaadin.data.Container;
import com.vaadin.data.*;
[ "com.vaadin.data" ]
com.vaadin.data;
755,795
protected TimeZone getTimeZone() { if (timeZone != null) { return timeZone; } return ThreadLocalUserContext.getTimeZone(); }
TimeZone function() { if (timeZone != null) { return timeZone; } return ThreadLocalUserContext.getTimeZone(); }
/** * The time zone of this object (if given) or the time zone of the user if found in the ThreadLocalUserContext, otherwise {@link TimeZone#getDefault()}. * @see ThreadLocalUserContext#getUser() * @see ThreadLocalUserContext#getTimeZone() */
The time zone of this object (if given) or the time zone of the user if found in the ThreadLocalUserContext, otherwise <code>TimeZone#getDefault()</code>
getTimeZone
{ "repo_name": "FlowsenAusMonotown/projectforge", "path": "projectforge-business/src/main/java/org/projectforge/framework/xstream/converter/DateConverter.java", "license": "gpl-3.0", "size": 4007 }
[ "java.util.TimeZone", "org.projectforge.framework.persistence.user.api.ThreadLocalUserContext" ]
import java.util.TimeZone; import org.projectforge.framework.persistence.user.api.ThreadLocalUserContext;
import java.util.*; import org.projectforge.framework.persistence.user.api.*;
[ "java.util", "org.projectforge.framework" ]
java.util; org.projectforge.framework;
661,200
public static SamlRegisteredServiceServiceProviderMetadataFacade get(final SamlRegisteredServiceCachingMetadataResolver resolver, final SamlRegisteredService registeredService, final RequestAbstractType request) { return get(resolver, registeredService, request.getIssuer().getValue()); }
static SamlRegisteredServiceServiceProviderMetadataFacade function(final SamlRegisteredServiceCachingMetadataResolver resolver, final SamlRegisteredService registeredService, final RequestAbstractType request) { return get(resolver, registeredService, request.getIssuer().getValue()); }
/** * Adapt saml metadata and parse. Acts as a facade. * * @param resolver the resolver * @param registeredService the service * @param request the request * @return the saml metadata adaptor */
Adapt saml metadata and parse. Acts as a facade
get
{ "repo_name": "joansmith/cas", "path": "cas-server-support-saml-idp/src/main/java/org/jasig/cas/support/saml/services/idp/metadata/SamlRegisteredServiceServiceProviderMetadataFacade.java", "license": "apache-2.0", "size": 10995 }
[ "org.jasig.cas.support.saml.services.SamlRegisteredService", "org.jasig.cas.support.saml.services.idp.metadata.cache.SamlRegisteredServiceCachingMetadataResolver", "org.opensaml.saml.saml2.core.RequestAbstractType" ]
import org.jasig.cas.support.saml.services.SamlRegisteredService; import org.jasig.cas.support.saml.services.idp.metadata.cache.SamlRegisteredServiceCachingMetadataResolver; import org.opensaml.saml.saml2.core.RequestAbstractType;
import org.jasig.cas.support.saml.services.*; import org.jasig.cas.support.saml.services.idp.metadata.cache.*; import org.opensaml.saml.saml2.core.*;
[ "org.jasig.cas", "org.opensaml.saml" ]
org.jasig.cas; org.opensaml.saml;
440,082
void setReducerResources(Resources resources);
void setReducerResources(Resources resources);
/** * Override the resources, such as memory and virtual cores, to use for each reducer of this MapReduce job. * * @param resources Resources that each reducer should use. */
Override the resources, such as memory and virtual cores, to use for each reducer of this MapReduce job
setReducerResources
{ "repo_name": "caskdata/cdap", "path": "cdap-api/src/main/java/co/cask/cdap/api/mapreduce/MapReduceContext.java", "license": "apache-2.0", "size": 8029 }
[ "co.cask.cdap.api.Resources" ]
import co.cask.cdap.api.Resources;
import co.cask.cdap.api.*;
[ "co.cask.cdap" ]
co.cask.cdap;
577,073
private void connectChildFragment(PlanNode node, int childIdx, PlanFragment childFragment) throws InternalException { ExchangeNode exchangeNode = new ExchangeNode(ctx_.getNextNodeId()); exchangeNode.addChild(childFragment.getPlanRoot(), false); exchangeNode.init(ctx_.getRootAnalyzer()); node.setChild(childIdx, exchangeNode); childFragment.setDestination(exchangeNode); }
void function(PlanNode node, int childIdx, PlanFragment childFragment) throws InternalException { ExchangeNode exchangeNode = new ExchangeNode(ctx_.getNextNodeId()); exchangeNode.addChild(childFragment.getPlanRoot(), false); exchangeNode.init(ctx_.getRootAnalyzer()); node.setChild(childIdx, exchangeNode); childFragment.setDestination(exchangeNode); }
/** * Replace node's child at index childIdx with an ExchangeNode that receives its * input from childFragment. */
Replace node's child at index childIdx with an ExchangeNode that receives its input from childFragment
connectChildFragment
{ "repo_name": "cgvarela/Impala", "path": "fe/src/main/java/com/cloudera/impala/planner/DistributedPlanner.java", "license": "apache-2.0", "size": 43211 }
[ "com.cloudera.impala.common.InternalException" ]
import com.cloudera.impala.common.InternalException;
import com.cloudera.impala.common.*;
[ "com.cloudera.impala" ]
com.cloudera.impala;
904,403
public void merge(CoverageData coverageData) { ClassData classData = (ClassData)coverageData; // If objects contain data for different classes then don't merge if (!this.getName().equals(classData.getName())) return; getBothLocks(classData); try { super.merge(coverageData); // We can't just call this.branches.putAll(classData.branches); // Why not? If we did a putAll, then the LineData objects from // the coverageData class would overwrite the LineData objects // that are already in "this.branches" And we don't need to // update the LineData objects that are already in this.branches // because they are shared between this.branches and this.children, // so the object hit counts will be moved when we called // super.merge() above. for (Iterator<Integer> iter = classData.branches.keySet().iterator(); iter.hasNext();) { Integer key = iter.next(); if (!this.branches.containsKey(key)) { this.branches.put(key, classData.branches.get(key)); } } this.containsInstrumentationInfo |= classData.containsInstrumentationInfo; this.methodNamesAndDescriptors.addAll(classData .getMethodNamesAndDescriptors()); if (classData.sourceFileName != null) this.sourceFileName = classData.sourceFileName; } finally { lock.unlock(); classData.lock.unlock(); } }
void function(CoverageData coverageData) { ClassData classData = (ClassData)coverageData; if (!this.getName().equals(classData.getName())) return; getBothLocks(classData); try { super.merge(coverageData); for (Iterator<Integer> iter = classData.branches.keySet().iterator(); iter.hasNext();) { Integer key = iter.next(); if (!this.branches.containsKey(key)) { this.branches.put(key, classData.branches.get(key)); } } this.containsInstrumentationInfo = classData.containsInstrumentationInfo; this.methodNamesAndDescriptors.addAll(classData .getMethodNamesAndDescriptors()); if (classData.sourceFileName != null) this.sourceFileName = classData.sourceFileName; } finally { lock.unlock(); classData.lock.unlock(); } }
/** * Merge some existing instrumentation with this instrumentation. * * @param coverageData Some existing coverage data. */
Merge some existing instrumentation with this instrumentation
merge
{ "repo_name": "simplegeo/cobertura", "path": "src/net/sourceforge/cobertura/coveragedata/ClassData.java", "license": "gpl-2.0", "size": 15642 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
2,361,035
static byte[] getNTLM2SessionResponse(String password, byte[] challenge, byte[] clientChallenge) throws NTLMEngineException { try { byte[] ntlmHash = ntlmHash(password); // Look up MD5 algorithm (was necessary on jdk 1.4.2) // This used to be needed, but java 1.5.0_07 includes the MD5 // algorithm (finally) // Class x = Class.forName("gnu.crypto.hash.MD5"); // Method updateMethod = x.getMethod("update",new // Class[]{byte[].class}); // Method digestMethod = x.getMethod("digest",new Class[0]); // Object mdInstance = x.newInstance(); // updateMethod.invoke(mdInstance,new Object[]{challenge}); // updateMethod.invoke(mdInstance,new Object[]{clientChallenge}); // byte[] digest = (byte[])digestMethod.invoke(mdInstance,new // Object[0]); MessageDigest md5 = MessageDigest.getInstance("MD5"); md5.update(challenge); md5.update(clientChallenge); byte[] digest = md5.digest(); byte[] sessionHash = new byte[8]; System.arraycopy(digest, 0, sessionHash, 0, 8); return lmResponse(ntlmHash, sessionHash); } catch (Exception e) { if (e instanceof NTLMEngineException) throw (NTLMEngineException) e; throw new NTLMEngineException(e.getMessage(), e); } }
static byte[] getNTLM2SessionResponse(String password, byte[] challenge, byte[] clientChallenge) throws NTLMEngineException { try { byte[] ntlmHash = ntlmHash(password); MessageDigest md5 = MessageDigest.getInstance("MD5"); md5.update(challenge); md5.update(clientChallenge); byte[] digest = md5.digest(); byte[] sessionHash = new byte[8]; System.arraycopy(digest, 0, sessionHash, 0, 8); return lmResponse(ntlmHash, sessionHash); } catch (Exception e) { if (e instanceof NTLMEngineException) throw (NTLMEngineException) e; throw new NTLMEngineException(e.getMessage(), e); } }
/** * Calculates the NTLM2 Session Response for the given challenge, using the * specified password and client challenge. * * @param password * The user's password. * @param challenge * The Type 2 challenge from the server. * @param clientChallenge * The random 8-byte client challenge. * * @return The NTLM2 Session Response. This is placed in the NTLM response * field of the Type 3 message; the LM response field contains the * client challenge, null-padded to 24 bytes. */
Calculates the NTLM2 Session Response for the given challenge, using the specified password and client challenge
getNTLM2SessionResponse
{ "repo_name": "0x90sled/droidtowers", "path": "main/source/org/apach3/http/impl/auth/NTLMEngineImpl.java", "license": "mit", "size": 51410 }
[ "java.security.MessageDigest" ]
import java.security.MessageDigest;
import java.security.*;
[ "java.security" ]
java.security;
1,725,301
protected boolean isValidAccount(Account account, AccountCreationStatusDTO accountCreationStatus) { boolean isValid = true; String errorMessage = ""; String strSize = ""; if (account == null) { //account was not created setFailStatus(accountCreationStatus, KcConstants.AccountCreationService.ERROR_KC_DOCUMENT_ACCOUNT_GENERATION_PROBLEM); return false; } if (StringUtils.isBlank(account.getChartOfAccountsCode()) || StringUtils.isBlank(account.getAccountNumber())){ //chart of accounts or account number blank setFailStatus(accountCreationStatus, KcConstants.AccountCreationService.ERROR_KC_DOCUMENT_ACCOUNT_MISSING_CHART_OR_ACCT_NBR); return false; } if (!isValidChartCode(account.getChartOfAccountsCode())) { //the chart of accounts code is not valid setFailStatus( accountCreationStatus, KcConstants.AccountCreationService.AUTOMATCICG_ACCOUNT_MAINTENANCE_CHART_NOT_DEFINED); return false; } if (!isValidAccountNumberLength(account.getAccountNumber(), accountCreationStatus)){ //the account number is an inappropriate length //error set in method return false; } if (!checkUniqueAccountNumber(account.getAccountNumber())){ //account is not unique setFailStatus( accountCreationStatus, KcUtils.getErrorMessage(KFSKeyConstants.ERROR_DOCUMENT_ACCMAINT_ACCT_NMBR_NOT_UNIQUE, new String[]{account.getAccountNumber()})); return false; } if (isValidChartAccount(account.getChartOfAccountsCode(), account.getAccountNumber())) { //the chart and account already exist setFailStatus( accountCreationStatus, KcConstants.AccountCreationService.AUTOMATCICG_ACCOUNT_MAINTENANCE_ACCT_ALREADY_DEFINED); return false; } if (!checkAccountNumberPrefix(account.getAccountNumber(), accountCreationStatus)){ //account begins with invalid prefix //error set in method return false; } return isValid; }
boolean function(Account account, AccountCreationStatusDTO accountCreationStatus) { boolean isValid = true; String errorMessage = STR"; if (account == null) { setFailStatus(accountCreationStatus, KcConstants.AccountCreationService.ERROR_KC_DOCUMENT_ACCOUNT_GENERATION_PROBLEM); return false; } if (StringUtils.isBlank(account.getChartOfAccountsCode()) StringUtils.isBlank(account.getAccountNumber())){ setFailStatus(accountCreationStatus, KcConstants.AccountCreationService.ERROR_KC_DOCUMENT_ACCOUNT_MISSING_CHART_OR_ACCT_NBR); return false; } if (!isValidChartCode(account.getChartOfAccountsCode())) { setFailStatus( accountCreationStatus, KcConstants.AccountCreationService.AUTOMATCICG_ACCOUNT_MAINTENANCE_CHART_NOT_DEFINED); return false; } if (!isValidAccountNumberLength(account.getAccountNumber(), accountCreationStatus)){ return false; } if (!checkUniqueAccountNumber(account.getAccountNumber())){ setFailStatus( accountCreationStatus, KcUtils.getErrorMessage(KFSKeyConstants.ERROR_DOCUMENT_ACCMAINT_ACCT_NMBR_NOT_UNIQUE, new String[]{account.getAccountNumber()})); return false; } if (isValidChartAccount(account.getChartOfAccountsCode(), account.getAccountNumber())) { setFailStatus( accountCreationStatus, KcConstants.AccountCreationService.AUTOMATCICG_ACCOUNT_MAINTENANCE_ACCT_ALREADY_DEFINED); return false; } if (!checkAccountNumberPrefix(account.getAccountNumber(), accountCreationStatus)){ return false; } return isValid; }
/** * Check to see if the main link between KFS and KC is valid, namely the chart and account number. * If these two values have some kind of error, then we don't want to generate an Account document * and we'll want to return a failure to KC. * * * @param account * @param accountCreationStatus * @return */
Check to see if the main link between KFS and KC is valid, namely the chart and account number. If these two values have some kind of error, then we don't want to generate an Account document and we'll want to return a failure to KC
isValidAccount
{ "repo_name": "ua-eas/kfs-devops-automation-fork", "path": "kfs-kc/src/main/java/org/kuali/kfs/module/external/kc/service/impl/AccountCreationServiceImpl.java", "license": "agpl-3.0", "size": 39076 }
[ "org.apache.commons.lang.StringUtils", "org.kuali.kfs.coa.businessobject.Account", "org.kuali.kfs.integration.cg.dto.AccountCreationStatusDTO", "org.kuali.kfs.module.external.kc.KcConstants", "org.kuali.kfs.module.external.kc.service.AccountCreationService", "org.kuali.kfs.module.external.kc.util.KcUtils", "org.kuali.kfs.sys.KFSKeyConstants" ]
import org.apache.commons.lang.StringUtils; import org.kuali.kfs.coa.businessobject.Account; import org.kuali.kfs.integration.cg.dto.AccountCreationStatusDTO; import org.kuali.kfs.module.external.kc.KcConstants; import org.kuali.kfs.module.external.kc.service.AccountCreationService; import org.kuali.kfs.module.external.kc.util.KcUtils; import org.kuali.kfs.sys.KFSKeyConstants;
import org.apache.commons.lang.*; import org.kuali.kfs.coa.businessobject.*; import org.kuali.kfs.integration.cg.dto.*; import org.kuali.kfs.module.external.kc.*; import org.kuali.kfs.module.external.kc.service.*; import org.kuali.kfs.module.external.kc.util.*; import org.kuali.kfs.sys.*;
[ "org.apache.commons", "org.kuali.kfs" ]
org.apache.commons; org.kuali.kfs;
227,067
private void startOpenmrs(ServletContext servletContext) throws Exception { // start spring // after this point, all errors need to also call: contextLoader.closeWebApplicationContext(event.getServletContext()) // logic copied from org.springframework.web.context.ContextLoaderListener ContextLoader contextLoader = new ContextLoader(); contextLoader.initWebApplicationContext(servletContext); try { WebDaemon.startOpenmrs(servletContext); } catch (Exception exception) { contextLoader.closeWebApplicationContext(servletContext); throw exception; } }
void function(ServletContext servletContext) throws Exception { ContextLoader contextLoader = new ContextLoader(); contextLoader.initWebApplicationContext(servletContext); try { WebDaemon.startOpenmrs(servletContext); } catch (Exception exception) { contextLoader.closeWebApplicationContext(servletContext); throw exception; } }
/**`` * Do everything to get openmrs going. * * @param servletContext the servletContext from the filterconfig * @see Listener#startOpenmrs(ServletContext) */
`` Do everything to get openmrs going
startOpenmrs
{ "repo_name": "nilusi/Legacy-UI", "path": "web/src/main/java/org/openmrs/web/filter/update/UpdateFilter.java", "license": "mpl-2.0", "size": 25719 }
[ "javax.servlet.ServletContext", "org.openmrs.web.WebDaemon", "org.springframework.web.context.ContextLoader" ]
import javax.servlet.ServletContext; import org.openmrs.web.WebDaemon; import org.springframework.web.context.ContextLoader;
import javax.servlet.*; import org.openmrs.web.*; import org.springframework.web.context.*;
[ "javax.servlet", "org.openmrs.web", "org.springframework.web" ]
javax.servlet; org.openmrs.web; org.springframework.web;
2,374,722
Trace getTrace(String tenantId, String id);
Trace getTrace(String tenantId, String id);
/** * This method returns the end to end trace associated with the * supplied id. * * @param tenantId The tenant * @param id The id * @return The end to end trace, or null if not found */
This method returns the end to end trace associated with the supplied id
getTrace
{ "repo_name": "hawkular/hawkular-btm", "path": "api/src/main/java/org/hawkular/apm/api/services/TraceService.java", "license": "apache-2.0", "size": 2268 }
[ "org.hawkular.apm.api.model.trace.Trace" ]
import org.hawkular.apm.api.model.trace.Trace;
import org.hawkular.apm.api.model.trace.*;
[ "org.hawkular.apm" ]
org.hawkular.apm;
1,914,120
protected final IndexSearcher searcher() { return searcher; } /** * Would using index metadata like {@link IndexReader#docFreq}
final IndexSearcher function() { return searcher; } /** * Would using index metadata like {@link IndexReader#docFreq}
/** * Searcher that this filter is targeting. */
Searcher that this filter is targeting
searcher
{ "repo_name": "ern/elasticsearch", "path": "server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java", "license": "apache-2.0", "size": 10380 }
[ "org.apache.lucene.index.IndexReader", "org.apache.lucene.search.IndexSearcher" ]
import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.index.*; import org.apache.lucene.search.*;
[ "org.apache.lucene" ]
org.apache.lucene;
2,248,904
public Component getTableCellRendererComponent(final JTable table, final Object value, final boolean isSelected, final boolean hasFocus, final int row, final int column) { setFont(null); final NumberFormat nf = NumberFormat.getNumberInstance(); if (value != null) { setText(nf.format(value)); } else { setText(""); } if (isSelected) { setBackground(table.getSelectionBackground()); } else { setBackground(null); } return this; }
Component function(final JTable table, final Object value, final boolean isSelected, final boolean hasFocus, final int row, final int column) { setFont(null); final NumberFormat nf = NumberFormat.getNumberInstance(); if (value != null) { setText(nf.format(value)); } else { setText(""); } if (isSelected) { setBackground(table.getSelectionBackground()); } else { setBackground(null); } return this; }
/** * Returns itself as the renderer. Supports the TableCellRenderer interface. * * @param table the table. * @param value the data to be rendered. * @param isSelected a boolean that indicates whether or not the cell is * selected. * @param hasFocus a boolean that indicates whether or not the cell has * the focus. * @param row the (zero-based) row index. * @param column the (zero-based) column index. * * @return the component that can render the contents of the cell. */
Returns itself as the renderer. Supports the TableCellRenderer interface
getTableCellRendererComponent
{ "repo_name": "jfree/jcommon", "path": "src/main/java/org/jfree/ui/NumberCellRenderer.java", "license": "lgpl-2.1", "size": 3633 }
[ "java.awt.Component", "java.text.NumberFormat", "javax.swing.JTable" ]
import java.awt.Component; import java.text.NumberFormat; import javax.swing.JTable;
import java.awt.*; import java.text.*; import javax.swing.*;
[ "java.awt", "java.text", "javax.swing" ]
java.awt; java.text; javax.swing;
2,742,450
public CellReference<?> getTargetCell() { return targetCell; }
CellReference<?> function() { return targetCell; }
/** * Gets the reference of target cell for this event. * * @return target cell */
Gets the reference of target cell for this event
getTargetCell
{ "repo_name": "travisfw/vaadin", "path": "client/src/com/vaadin/client/widgets/Grid.java", "license": "apache-2.0", "size": 285859 }
[ "com.vaadin.client.widget.grid.CellReference" ]
import com.vaadin.client.widget.grid.CellReference;
import com.vaadin.client.widget.grid.*;
[ "com.vaadin.client" ]
com.vaadin.client;
2,033,110
private boolean compiledByAjc(Class<?> clazz) { // The AJTypeSystem goes to great lengths to provide a uniform appearance between code-style and // annotation-style aspects. Therefore there is no 'clean' way to tell them apart. Here we rely on // an implementation detail of the AspectJ compiler. for (Field field : clazz.getDeclaredFields()) { if (field.getName().startsWith(AJC_MAGIC)) { return true; } } return false; }
boolean function(Class<?> clazz) { for (Field field : clazz.getDeclaredFields()) { if (field.getName().startsWith(AJC_MAGIC)) { return true; } } return false; }
/** * We need to detect this as "code-style" AspectJ aspects should not be * interpreted by Spring AOP. */
We need to detect this as "code-style" AspectJ aspects should not be interpreted by Spring AOP
compiledByAjc
{ "repo_name": "deathspeeder/class-guard", "path": "spring-framework-3.2.x/spring-aop/src/main/java/org/springframework/aop/aspectj/annotation/AbstractAspectJAdvisorFactory.java", "license": "gpl-2.0", "size": 11923 }
[ "java.lang.reflect.Field" ]
import java.lang.reflect.Field;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
863,844
public void translationChanged(Translation translation, Action action) throws Exception;
void function(Translation translation, Action action) throws Exception;
/** * Translation changed. * * @param translation the translation * @param action the action * @throws Exception the exception */
Translation changed
translationChanged
{ "repo_name": "WestCoastInformatics/ihtsdo-refset-tool", "path": "services/src/main/java/org/ihtsdo/otf/refset/services/handlers/WorkflowListener.java", "license": "apache-2.0", "size": 2902 }
[ "org.ihtsdo.otf.refset.Translation" ]
import org.ihtsdo.otf.refset.Translation;
import org.ihtsdo.otf.refset.*;
[ "org.ihtsdo.otf" ]
org.ihtsdo.otf;
794,279
public static void recoverSegmentIfRequired(String segmentDir) throws IOException { if (FileFactory.isFileExist(segmentDir)) { String indexName = CarbonTablePath.getCarbonStreamIndexFileName(); String indexPath = segmentDir + File.separator + indexName; CarbonFile index = FileFactory.getCarbonFile(indexPath); CarbonFile[] files = listDataFiles(segmentDir); // TODO better to check backup index at first // index file exists if (index.exists()) { // data file exists if (files.length > 0) { CarbonIndexFileReader indexReader = new CarbonIndexFileReader(); try { // map block index indexReader.openThriftReader(indexPath); Map<String, Long> tableSizeMap = new HashMap<>(); while (indexReader.hasNext()) { BlockIndex blockIndex = indexReader.readBlockIndexInfo(); tableSizeMap.put(blockIndex.getFile_name(), blockIndex.getFile_size()); } // recover each file for (CarbonFile file : files) { Long size = tableSizeMap.get(file.getName()); if (null == size || size == 0) { file.delete(); } else if (size < file.getSize()) { FileFactory.truncateFile(file.getCanonicalPath(), size); } } } finally { indexReader.closeThriftReader(); } } } else { if (files.length > 0) { for (CarbonFile file : files) { file.delete(); } } } } }
static void function(String segmentDir) throws IOException { if (FileFactory.isFileExist(segmentDir)) { String indexName = CarbonTablePath.getCarbonStreamIndexFileName(); String indexPath = segmentDir + File.separator + indexName; CarbonFile index = FileFactory.getCarbonFile(indexPath); CarbonFile[] files = listDataFiles(segmentDir); if (index.exists()) { if (files.length > 0) { CarbonIndexFileReader indexReader = new CarbonIndexFileReader(); try { indexReader.openThriftReader(indexPath); Map<String, Long> tableSizeMap = new HashMap<>(); while (indexReader.hasNext()) { BlockIndex blockIndex = indexReader.readBlockIndexInfo(); tableSizeMap.put(blockIndex.getFile_name(), blockIndex.getFile_size()); } for (CarbonFile file : files) { Long size = tableSizeMap.get(file.getName()); if (null == size size == 0) { file.delete(); } else if (size < file.getSize()) { FileFactory.truncateFile(file.getCanonicalPath(), size); } } } finally { indexReader.closeThriftReader(); } } } else { if (files.length > 0) { for (CarbonFile file : files) { file.delete(); } } } } }
/** * check the health of stream segment and try to recover segment from job fault * this method will be invoked in following scenarios. * 1. at the begin of the streaming (StreamSinkFactory.getStreamSegmentId) * 2. after job failed (CarbonAppendableStreamSink.writeDataFileJob) */
check the health of stream segment and try to recover segment from job fault this method will be invoked in following scenarios. 1. at the begin of the streaming (StreamSinkFactory.getStreamSegmentId) 2. after job failed (CarbonAppendableStreamSink.writeDataFileJob)
recoverSegmentIfRequired
{ "repo_name": "jackylk/incubator-carbondata", "path": "streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java", "license": "apache-2.0", "size": 27844 }
[ "java.io.File", "java.io.IOException", "java.util.HashMap", "java.util.Map", "org.apache.carbondata.core.datastore.filesystem.CarbonFile", "org.apache.carbondata.core.datastore.impl.FileFactory", "org.apache.carbondata.core.reader.CarbonIndexFileReader", "org.apache.carbondata.core.util.path.CarbonTablePath", "org.apache.carbondata.format.BlockIndex" ]
import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.carbondata.core.datastore.filesystem.CarbonFile; import org.apache.carbondata.core.datastore.impl.FileFactory; import org.apache.carbondata.core.reader.CarbonIndexFileReader; import org.apache.carbondata.core.util.path.CarbonTablePath; import org.apache.carbondata.format.BlockIndex;
import java.io.*; import java.util.*; import org.apache.carbondata.core.datastore.filesystem.*; import org.apache.carbondata.core.datastore.impl.*; import org.apache.carbondata.core.reader.*; import org.apache.carbondata.core.util.path.*; import org.apache.carbondata.format.*;
[ "java.io", "java.util", "org.apache.carbondata" ]
java.io; java.util; org.apache.carbondata;
2,243,373
public void setRepositoryResolver(RepositoryResolver<DaemonClient> resolver) { repositoryResolver = resolver; }
void function(RepositoryResolver<DaemonClient> resolver) { repositoryResolver = resolver; }
/** * Sets the resolver that locates repositories by name. * @param resolver the resolver instance. */
Sets the resolver that locates repositories by name
setRepositoryResolver
{ "repo_name": "mbiarnes/uberfire", "path": "uberfire-nio2-backport/uberfire-nio2-impls/uberfire-nio2-jgit/src/main/java/org/uberfire/java/nio/fs/jgit/daemon/git/Daemon.java", "license": "apache-2.0", "size": 12624 }
[ "org.eclipse.jgit.transport.resolver.RepositoryResolver" ]
import org.eclipse.jgit.transport.resolver.RepositoryResolver;
import org.eclipse.jgit.transport.resolver.*;
[ "org.eclipse.jgit" ]
org.eclipse.jgit;
2,250,400
@ServiceMethod(returns = ReturnType.SINGLE) public List<AvailableServiceTierInner> listByWorkspace(String resourceGroupName, String workspaceName) { return listByWorkspaceAsync(resourceGroupName, workspaceName).block(); }
@ServiceMethod(returns = ReturnType.SINGLE) List<AvailableServiceTierInner> function(String resourceGroupName, String workspaceName) { return listByWorkspaceAsync(resourceGroupName, workspaceName).block(); }
/** * Gets the available service tiers for the workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the available service tiers for the workspace. */
Gets the available service tiers for the workspace
listByWorkspace
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/loganalytics/azure-resourcemanager-loganalytics/src/main/java/com/azure/resourcemanager/loganalytics/implementation/AvailableServiceTiersClientImpl.java", "license": "mit", "size": 10728 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.resourcemanager.loganalytics.fluent.models.AvailableServiceTierInner", "java.util.List" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.loganalytics.fluent.models.AvailableServiceTierInner; import java.util.List;
import com.azure.core.annotation.*; import com.azure.resourcemanager.loganalytics.fluent.models.*; import java.util.*;
[ "com.azure.core", "com.azure.resourcemanager", "java.util" ]
com.azure.core; com.azure.resourcemanager; java.util;
1,227,909
@DoesServiceRequest public boolean exists() throws StorageException { return this.exists(null , null , null ); }
boolean function() throws StorageException { return this.exists(null , null , null ); }
/** * Returns a value that indicates whether the container exists. * * @return <code>true</code> if the container exists, otherwise <code>false</code>. * * @throws StorageException * If a storage service error occurred. */
Returns a value that indicates whether the container exists
exists
{ "repo_name": "emgerner-msft/azure-storage-android", "path": "microsoft-azure-storage/src/com/microsoft/azure/storage/blob/CloudBlobContainer.java", "license": "apache-2.0", "size": 103657 }
[ "com.microsoft.azure.storage.StorageException" ]
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
1,661,750
final String MD5 = "MD5"; try { // Create MD5 Hash MessageDigest digest = java.security.MessageDigest .getInstance(MD5); digest.update(inputString.getBytes()); byte messageDigest[] = digest.digest(); // Create Hex String StringBuilder hexString = new StringBuilder(); for (byte aMessageDigest : messageDigest) { String h = Integer.toHexString(0xFF & aMessageDigest); while (h.length() < 2) h = "0" + h; hexString.append(h); } return hexString.toString(); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } return ""; }
final String MD5 = "MD5"; try { MessageDigest digest = java.security.MessageDigest .getInstance(MD5); digest.update(inputString.getBytes()); byte messageDigest[] = digest.digest(); StringBuilder hexString = new StringBuilder(); for (byte aMessageDigest : messageDigest) { String h = Integer.toHexString(0xFF & aMessageDigest); while (h.length() < 2) h = "0" + h; hexString.append(h); } return hexString.toString(); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } return ""; }
/** * Note: This function is no longer used. * This function generates md5 hash of the input string * @param inputString * @return md5 hash of the input string */
Note: This function is no longer used. This function generates md5 hash of the input string
md5
{ "repo_name": "invalidred/Cross-platform-AES-encryption", "path": "Android/CryptLib.java", "license": "apache-2.0", "size": 8959 }
[ "java.security.MessageDigest", "java.security.NoSuchAlgorithmException" ]
import java.security.MessageDigest; import java.security.NoSuchAlgorithmException;
import java.security.*;
[ "java.security" ]
java.security;
2,732,883
public Number getX(int series, int item) { List bins = getBins(series); HistogramBin bin = (HistogramBin) bins.get(item); double x = (bin.getStartBoundary() + bin.getEndBoundary()) / 2.; return new Double(x); }
Number function(int series, int item) { List bins = getBins(series); HistogramBin bin = (HistogramBin) bins.get(item); double x = (bin.getStartBoundary() + bin.getEndBoundary()) / 2.; return new Double(x); }
/** * Returns the X value for a bin. This value won't be used for plotting * histograms, since the renderer will ignore it. But other renderers can * use it (for example, you could use the dataset to create a line * chart). * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * @param item the item index (zero based). * * @return The start value. * * @throws IndexOutOfBoundsException if <code>series</code> is outside the * specified range. */
Returns the X value for a bin. This value won't be used for plotting histograms, since the renderer will ignore it. But other renderers can use it (for example, you could use the dataset to create a line chart)
getX
{ "repo_name": "fluidware/Eastwood-Charts", "path": "source/org/jfree/data/statistics/HistogramDataset.java", "license": "lgpl-2.1", "size": 17761 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
679,186
void replaceChild(@NotNull ASTNode oldChild, @NotNull ASTNode newChild);
void replaceChild(@NotNull ASTNode oldChild, @NotNull ASTNode newChild);
/** * Replaces the specified child node with another node. * * @param oldChild the child node to replace. * @param newChild the node to replace with. */
Replaces the specified child node with another node
replaceChild
{ "repo_name": "ernestp/consulo", "path": "platform/core-api/src/com/intellij/lang/ASTNode.java", "license": "apache-2.0", "size": 9123 }
[ "org.jetbrains.annotations.NotNull" ]
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.*;
[ "org.jetbrains.annotations" ]
org.jetbrains.annotations;
627,113
@Override AssociationType getType();
AssociationType getType();
/** * Type of association represented. * * <p>Override of {@link Property#getType()} which type narrows to {@link AssociationType}. * * @see Property#getType() */
Type of association represented. Override of <code>Property#getType()</code> which type narrows to <code>AssociationType</code>
getType
{ "repo_name": "geotools/geotools", "path": "modules/library/opengis/src/main/java/org/opengis/feature/Association.java", "license": "lgpl-2.1", "size": 4265 }
[ "org.opengis.feature.type.AssociationType" ]
import org.opengis.feature.type.AssociationType;
import org.opengis.feature.type.*;
[ "org.opengis.feature" ]
org.opengis.feature;
631,256
PrintWriter error(String msg);
PrintWriter error(String msg);
/** * An error in the build. * * @return A writer to receive details of the error. Not null. */
An error in the build
error
{ "repo_name": "eclipse/hudson.core", "path": "hudson-core/src/main/java/hudson/model/TaskListener.java", "license": "apache-2.0", "size": 3312 }
[ "java.io.PrintWriter" ]
import java.io.PrintWriter;
import java.io.*;
[ "java.io" ]
java.io;
1,716,569
@Override public Traits identifyUser(String userID, String email, String username) { Traits traits = new Traits(); traits.putEmail(email); traits.putUsername(username); tracker.identify(userID, traits, new Options()); return traits; }
Traits function(String userID, String email, String username) { Traits traits = new Traits(); traits.putEmail(email); traits.putUsername(username); tracker.identify(userID, traits, new Options()); return traits; }
/** * This function is set to identify the user for subsequent calls * * @param userID - User Id from the server * @param email - email of the user * @param username - Username/email that the user uses for signing in * @return A {@link Traits} object populated with provided user info */
This function is set to identify the user for subsequent calls
identifyUser
{ "repo_name": "FDoubleman/wd-edx-android", "path": "VideoLocker/src/main/java/org/edx/mobile/module/analytics/ISegmentImpl.java", "license": "apache-2.0", "size": 31576 }
[ "com.segment.analytics.Options", "com.segment.analytics.Traits" ]
import com.segment.analytics.Options; import com.segment.analytics.Traits;
import com.segment.analytics.*;
[ "com.segment.analytics" ]
com.segment.analytics;
1,106,260
public void addNotificationListener(NotificationListener listener, NotificationFilter filter, Object handback) throws java.lang.IllegalArgumentException { if (MODELMBEAN_LOGGER.isLoggable(Level.TRACE)) { MODELMBEAN_LOGGER.log(Level.TRACE, "Entry"); } if (listener == null) throw new IllegalArgumentException( "notification listener must not be null"); if (generalBroadcaster == null) generalBroadcaster = new NotificationBroadcasterSupport(); generalBroadcaster.addNotificationListener(listener, filter, handback); if (MODELMBEAN_LOGGER.isLoggable(Level.TRACE)) { MODELMBEAN_LOGGER.log(Level.TRACE, "NotificationListener added"); MODELMBEAN_LOGGER.log(Level.TRACE, "Exit"); } }
void function(NotificationListener listener, NotificationFilter filter, Object handback) throws java.lang.IllegalArgumentException { if (MODELMBEAN_LOGGER.isLoggable(Level.TRACE)) { MODELMBEAN_LOGGER.log(Level.TRACE, "Entry"); } if (listener == null) throw new IllegalArgumentException( STR); if (generalBroadcaster == null) generalBroadcaster = new NotificationBroadcasterSupport(); generalBroadcaster.addNotificationListener(listener, filter, handback); if (MODELMBEAN_LOGGER.isLoggable(Level.TRACE)) { MODELMBEAN_LOGGER.log(Level.TRACE, STR); MODELMBEAN_LOGGER.log(Level.TRACE, "Exit"); } }
/** * Registers an object which implements the NotificationListener * interface as a listener. This * object's 'handleNotification()' method will be invoked when any * notification is issued through or by the ModelMBean. This does * not include attributeChangeNotifications. They must be registered * for independently. * * @param listener The listener object which will handles * notifications emitted by the registered MBean. * @param filter The filter object. If null, no filtering will be * performed before handling notifications. * @param handback The context to be sent to the listener with * the notification when a notification is emitted. * * @exception IllegalArgumentException The listener cannot be null. * * @see #removeNotificationListener */
Registers an object which implements the NotificationListener interface as a listener. This object's 'handleNotification()' method will be invoked when any notification is issued through or by the ModelMBean. This does not include attributeChangeNotifications. They must be registered for independently
addNotificationListener
{ "repo_name": "md-5/jdk10", "path": "src/java.management/share/classes/javax/management/modelmbean/RequiredModelMBean.java", "license": "gpl-2.0", "size": 117626 }
[ "java.lang.System", "javax.management.NotificationBroadcasterSupport", "javax.management.NotificationFilter", "javax.management.NotificationListener" ]
import java.lang.System; import javax.management.NotificationBroadcasterSupport; import javax.management.NotificationFilter; import javax.management.NotificationListener;
import java.lang.*; import javax.management.*;
[ "java.lang", "javax.management" ]
java.lang; javax.management;
2,408,076
@Internal public KeySelector<T, KEY> getKeySelector() { return this.keySelector; }
KeySelector<T, KEY> function() { return this.keySelector; }
/** * Gets the key selector that can get the key by which the stream if partitioned from the elements. * @return The key selector for the key. */
Gets the key selector that can get the key by which the stream if partitioned from the elements
getKeySelector
{ "repo_name": "zohar-mizrahi/flink", "path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/KeyedStream.java", "license": "apache-2.0", "size": 34115 }
[ "org.apache.flink.api.java.functions.KeySelector" ]
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.functions.*;
[ "org.apache.flink" ]
org.apache.flink;
1,019,787
public void setAccessSecurity(Tool.AccessSecurity access) { m_accessSecurity = access; }
void function(Tool.AccessSecurity access) { m_accessSecurity = access; }
/** * Set the access security. * * @param access * The new access security setting. */
Set the access security
setAccessSecurity
{ "repo_name": "eemirtekin/Sakai-10.6-TR", "path": "kernel/kernel-impl/src/main/java/org/sakaiproject/tool/impl/ToolImpl.java", "license": "apache-2.0", "size": 7802 }
[ "org.sakaiproject.tool.api.Tool" ]
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.*;
[ "org.sakaiproject.tool" ]
org.sakaiproject.tool;
1,432,329
public List<Spawnpoint> getSpectatorSpawns() { return _arena.getSpawns().getAll(ArenaContext.SPECTATOR); }
List<Spawnpoint> function() { return _arena.getSpawns().getAll(ArenaContext.SPECTATOR); }
/** * Get all spectator spawn points. */
Get all spectator spawn points
getSpectatorSpawns
{ "repo_name": "JCThePants/PV-Star", "path": "src/com/jcwhatever/pvs/scripting/ArenaSpawnsApiObject.java", "license": "mit", "size": 8601 }
[ "com.jcwhatever.pvs.api.arena.options.ArenaContext", "com.jcwhatever.pvs.api.spawns.Spawnpoint", "java.util.List" ]
import com.jcwhatever.pvs.api.arena.options.ArenaContext; import com.jcwhatever.pvs.api.spawns.Spawnpoint; import java.util.List;
import com.jcwhatever.pvs.api.arena.options.*; import com.jcwhatever.pvs.api.spawns.*; import java.util.*;
[ "com.jcwhatever.pvs", "java.util" ]
com.jcwhatever.pvs; java.util;
563,409
// app id is mocked out Map<String, String> extraParams = Maps.newHashMap(); extraParams.put("fields", "count"); String resp = getResponse("/appdata/john.doe/@friends/app", "GET", extraParams, null, "application/json"); JSONObject data = getJson(resp).getJSONObject("entry"); assertEquals(3, data.length()); JSONObject janesEntries = data.getJSONObject("jane.doe"); assertEquals(1, janesEntries.length()); assertEquals("7", janesEntries.getString("count")); JSONObject georgesEntries = data.getJSONObject("george.doe"); assertEquals(1, georgesEntries.length()); assertEquals("2", georgesEntries.getString("count")); } /** * Expected response for app data in json: * * { * "entry" : { * "john.doe" : {"count" : "0"}, * } * }
Map<String, String> extraParams = Maps.newHashMap(); extraParams.put(STR, "count"); String resp = getResponse(STR, "GET", extraParams, null, STR); JSONObject data = getJson(resp).getJSONObject("entry"); assertEquals(3, data.length()); JSONObject janesEntries = data.getJSONObject(STR); assertEquals(1, janesEntries.length()); assertEquals("7", janesEntries.getString("count")); JSONObject georgesEntries = data.getJSONObject(STR); assertEquals(1, georgesEntries.length()); assertEquals("2", georgesEntries.getString("count")); } /** * Expected response for app data in json: * * { * "entry" : { * STR : {"count" : "0"}, * } * }
/** * Expected response for app data in json: * * { * "entry" : { * "jane.doe" : {"count" : "7"}, * "george.doe" : {"count" : "2"}, * "maija.m" : {}, // TODO: Should this entry really be included if she doesn't have any data? * } * } * * @throws Exception if test encounters an error */
Expected response for app data in json: { "entry" : { "jane.doe" : {"count" : "7"}, "george.doe" : {"count" : "2"}, } }
testGetFriendsAppDataJson
{ "repo_name": "hgschmie/shindig", "path": "java/social-api/src/test/java/org/apache/shindig/social/dataservice/integration/RestfulJsonDataTest.java", "license": "apache-2.0", "size": 5887 }
[ "com.google.common.collect.Maps", "java.util.Map", "org.json.JSONObject" ]
import com.google.common.collect.Maps; import java.util.Map; import org.json.JSONObject;
import com.google.common.collect.*; import java.util.*; import org.json.*;
[ "com.google.common", "java.util", "org.json" ]
com.google.common; java.util; org.json;
149,436
protected void sequence_SelectedSignalAssignmentStatement(EObject context, SelectedSignalAssignmentStatement semanticObject) { genericSequencer.createSequence(context, semanticObject); }
void function(EObject context, SelectedSignalAssignmentStatement semanticObject) { genericSequencer.createSequence(context, semanticObject); }
/** * Constraint: * ( * label=Label? * postponed?='postponed'? * selected=Expression * target=Target * guarded?='guarded'? * delay=DelayMechanism? * waveform+=ConditionalWaveform * waveform+=ConditionalWaveform* * ) */
Constraint: ( label=Label? postponed?='postponed'? selected=Expression target=Target guarded?='guarded'? delay=DelayMechanism? waveform+=ConditionalWaveform waveform+=ConditionalWaveform )
sequence_SelectedSignalAssignmentStatement
{ "repo_name": "mlanoe/x-vhdl", "path": "plugins/net.mlanoe.language.vhdl.xtext/src-gen/net/mlanoe/language/vhdl/xtext/serializer/AbstractVhdlSemanticSequencer.java", "license": "gpl-3.0", "size": 147569 }
[ "net.mlanoe.language.vhdl.statement.SelectedSignalAssignmentStatement", "org.eclipse.emf.ecore.EObject" ]
import net.mlanoe.language.vhdl.statement.SelectedSignalAssignmentStatement; import org.eclipse.emf.ecore.EObject;
import net.mlanoe.language.vhdl.statement.*; import org.eclipse.emf.ecore.*;
[ "net.mlanoe.language", "org.eclipse.emf" ]
net.mlanoe.language; org.eclipse.emf;
496,243
public Set<Contact> searchContacts(String search) { Set<Contact> contacts = null; try { String request = "from Contact where nom = ? or prenom = ?"; Object[] paramNames = { search, search }; // Execute query @SuppressWarnings("unchecked") List<Contact> list = (List<Contact>) getHibernateTemplate().find(request, paramNames); contacts = new HashSet<>(list); } catch (HibernateException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } return contacts; } // ****************************** Update ********************************
Set<Contact> function(String search) { Set<Contact> contacts = null; try { String request = STR; Object[] paramNames = { search, search }; @SuppressWarnings(STR) List<Contact> list = (List<Contact>) getHibernateTemplate().find(request, paramNames); contacts = new HashSet<>(list); } catch (HibernateException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } return contacts; }
/** * Seach a Contact by : firstname, lastname, country, group name * * @param keywords * @return contacts : a set of Contact */
Seach a Contact by : firstname, lastname, country, group name
searchContacts
{ "repo_name": "ooussem/Gestionnaire-de-contacts", "path": "src/DAO/ContactDaoImpl.java", "license": "apache-2.0", "size": 12640 }
[ "java.util.HashSet", "java.util.List", "java.util.Set", "org.hibernate.HibernateException" ]
import java.util.HashSet; import java.util.List; import java.util.Set; import org.hibernate.HibernateException;
import java.util.*; import org.hibernate.*;
[ "java.util", "org.hibernate" ]
java.util; org.hibernate;
1,182,279
@Override public void message(String to, Serializable payload) { getBroker().message(to, getAddress(), payload); } // // query handling //
void function(String to, Serializable payload) { getBroker().message(to, getAddress(), payload); } //
/** * Sends a unidirectional message to an {@link com.caucho.bam.actor.ActorHolder}, * addressed by the Actor's address. * * @param to the target actor's address * @param payload the message payload */
Sends a unidirectional message to an <code>com.caucho.bam.actor.ActorHolder</code>, addressed by the Actor's address
message
{ "repo_name": "dlitz/resin", "path": "modules/resin/src/com/caucho/bam/actor/AbstractActorSender.java", "license": "gpl-2.0", "size": 6443 }
[ "java.io.Serializable" ]
import java.io.Serializable;
import java.io.*;
[ "java.io" ]
java.io;
742,394
Commands.runInNewThread(command); }
Commands.runInNewThread(command); }
/** * Runs the command in a new thread. */
Runs the command in a new thread
run
{ "repo_name": "Team4334/atalibj", "path": "src/edu/first/commands/ThreadedCommand.java", "license": "gpl-3.0", "size": 666 }
[ "edu.first.command.Commands" ]
import edu.first.command.Commands;
import edu.first.command.*;
[ "edu.first.command" ]
edu.first.command;
1,316,775
List<String> getPostProcessorTypeIdentifiers();
List<String> getPostProcessorTypeIdentifiers();
/** * Returns the type identifiers of all post processors. * @return All type identifiers of post processors. */
Returns the type identifiers of all post processors
getPostProcessorTypeIdentifiers
{ "repo_name": "langmo/youscope", "path": "core/api/src/main/java/org/youscope/clientinterfaces/ClientAddonProvider.java", "license": "gpl-2.0", "size": 10029 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,561,488
@Test public void testToString() { System.out.println("toString"); assertEquals(parameterCollector.toString(), "{answer=42&1111111111111111111111111111111111111111111111111111111111111111;F=1029&10000011111;sheldon=73&1111111111111111111111111111111111111111111111111111111111111111}"); }
void function() { System.out.println(STR); assertEquals(parameterCollector.toString(), STR); }
/** * Test of toString method, of class ParameterCollector. */
Test of toString method, of class ParameterCollector
testToString
{ "repo_name": "bengtmartensson/IrpTransmogrifier", "path": "src/test/java/org/harctoolbox/irp/ParameterCollectorNGTest.java", "license": "gpl-3.0", "size": 4441 }
[ "org.testng.Assert" ]
import org.testng.Assert;
import org.testng.*;
[ "org.testng" ]
org.testng;
157,271
static void resizeIconDrawable(Drawable icon) { icon.setBounds(0, 0, sIconTextureWidth, sIconTextureHeight); }
static void resizeIconDrawable(Drawable icon) { icon.setBounds(0, 0, sIconTextureWidth, sIconTextureHeight); }
/** * Resizes an icon drawable to the correct icon size. */
Resizes an icon drawable to the correct icon size
resizeIconDrawable
{ "repo_name": "n2hsu/Nii-Launcher", "path": "src/com/n2hsu/launcher/Utilities.java", "license": "apache-2.0", "size": 11607 }
[ "android.graphics.drawable.Drawable" ]
import android.graphics.drawable.Drawable;
import android.graphics.drawable.*;
[ "android.graphics" ]
android.graphics;
2,012,834
private void initShortestPathsFromRoot() { Integer root = instance.getRoot(); // Create a instance of the problem searching for all the shortest path // from a single node ArcShortestPathOneSourceInstance ash = new ArcShortestPathOneSourceInstance( instance.getGraph()); ash.setCosts(instance.getCosts(true)); ash.setSource(root); // Use the dijkstra algorithm to solve that instance ArcDijkstraOneSourceAlgorithm adij = new ArcDijkstraOneSourceAlgorithm(); adij.setInstance(ash); adij.setComputeOnlyCosts(false); adij.compute(); HashMap<Integer, Integer> aDijCosts = adij.getCosts(); HashMap<Integer, List<Arc>> aDijPaths = adij.getShortestPaths(); // Register all the shortest paths and their cost. Iterator<Integer> it = instance.getGraph().getVerticesIterator(); while (it.hasNext()) { Integer v = it.next(); Arc a = new Arc(root, v, true); costs.put(a, aDijCosts.get(v)); shortestPaths.put(a, aDijPaths.get(v)); } }
void function() { Integer root = instance.getRoot(); ArcShortestPathOneSourceInstance ash = new ArcShortestPathOneSourceInstance( instance.getGraph()); ash.setCosts(instance.getCosts(true)); ash.setSource(root); ArcDijkstraOneSourceAlgorithm adij = new ArcDijkstraOneSourceAlgorithm(); adij.setInstance(ash); adij.setComputeOnlyCosts(false); adij.compute(); HashMap<Integer, Integer> aDijCosts = adij.getCosts(); HashMap<Integer, List<Arc>> aDijPaths = adij.getShortestPaths(); Iterator<Integer> it = instance.getGraph().getVerticesIterator(); while (it.hasNext()) { Integer v = it.next(); Arc a = new Arc(root, v, true); costs.put(a, aDijCosts.get(v)); shortestPaths.put(a, aDijPaths.get(v)); } }
/** * Compute all the shortest paths from the root and registered them in the * {@link #shortestPaths} map. Register also their cost in the * {@link #costs} map. */
Compute all the shortest paths from the root and registered them in the <code>#shortestPaths</code> map. Register also their cost in the <code>#costs</code> map
initShortestPathsFromRoot
{ "repo_name": "mouton5000/DSTAlgoEvaluation", "path": "src/graphTheory/algorithms/steinerProblems/steinerArborescenceApproximation/RoosAlgorithm.java", "license": "mit", "size": 11847 }
[ "java.util.HashMap", "java.util.Iterator", "java.util.List" ]
import java.util.HashMap; import java.util.Iterator; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,478,847
private void applyDemoOptionsToClusterkrafOptions(com.twotoasters.clusterkraf.Options options) { options.setTransitionDuration(this.options.transitionDuration); Interpolator interpolator = null; try { interpolator = (Interpolator)Class.forName(this.options.transitionInterpolator).newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } options.setTransitionInterpolator(interpolator); options.setPixelDistanceToJoinCluster(getPixelDistanceToJoinCluster()); options.setZoomToBoundsAnimationDuration(this.options.zoomToBoundsAnimationDuration); options.setShowInfoWindowAnimationDuration(this.options.showInfoWindowAnimationDuration); options.setExpandBoundsFactor(this.options.expandBoundsFactor); options.setSinglePointClickBehavior(this.options.singlePointClickBehavior); options.setClusterClickBehavior(this.options.clusterClickBehavior); options.setClusterInfoWindowClickBehavior(this.options.clusterInfoWindowClickBehavior); options.setZoomToBoundsPadding(getResources().getDrawable(R.drawable.ic_map_pin_cluster).getIntrinsicHeight()); options.setMarkerOptionsChooser(new ToastedMarkerOptionsChooser(this, inputPoints.get(0))); options.setOnMarkerClickDownstreamListener(new ToastedOnMarkerClickDownstreamListener(this)); options.setProcessingListener(this); }
void function(com.twotoasters.clusterkraf.Options options) { options.setTransitionDuration(this.options.transitionDuration); Interpolator interpolator = null; try { interpolator = (Interpolator)Class.forName(this.options.transitionInterpolator).newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } options.setTransitionInterpolator(interpolator); options.setPixelDistanceToJoinCluster(getPixelDistanceToJoinCluster()); options.setZoomToBoundsAnimationDuration(this.options.zoomToBoundsAnimationDuration); options.setShowInfoWindowAnimationDuration(this.options.showInfoWindowAnimationDuration); options.setExpandBoundsFactor(this.options.expandBoundsFactor); options.setSinglePointClickBehavior(this.options.singlePointClickBehavior); options.setClusterClickBehavior(this.options.clusterClickBehavior); options.setClusterInfoWindowClickBehavior(this.options.clusterInfoWindowClickBehavior); options.setZoomToBoundsPadding(getResources().getDrawable(R.drawable.ic_map_pin_cluster).getIntrinsicHeight()); options.setMarkerOptionsChooser(new ToastedMarkerOptionsChooser(this, inputPoints.get(0))); options.setOnMarkerClickDownstreamListener(new ToastedOnMarkerClickDownstreamListener(this)); options.setProcessingListener(this); }
/** * Applies the sample.SampleActivity.Options chosen in Normal or Advanced * mode menus to the clusterkraf.Options which will be used to construct our * Clusterkraf instance * * @param options */
Applies the sample.SampleActivity.Options chosen in Normal or Advanced mode menus to the clusterkraf.Options which will be used to construct our Clusterkraf instance
applyDemoOptionsToClusterkrafOptions
{ "repo_name": "twotoasters/clusterkraf", "path": "sample/src/com/twotoasters/clusterkraf/sample/SampleActivity.java", "license": "apache-2.0", "size": 10604 }
[ "android.view.animation.Interpolator" ]
import android.view.animation.Interpolator;
import android.view.animation.*;
[ "android.view" ]
android.view;
1,379,217
public boolean isGuestUser() { return OpenCms.getDefaultUsers().isUserGuest(getName()); }
boolean function() { return OpenCms.getDefaultUsers().isUserGuest(getName()); }
/** * Checks if this user is the default guest user.<p> * * @return <code>true</code> if this user is the default guest user */
Checks if this user is the default guest user
isGuestUser
{ "repo_name": "sbonoc/opencms-core", "path": "src/org/opencms/file/CmsUser.java", "license": "lgpl-2.1", "size": 20594 }
[ "org.opencms.main.OpenCms" ]
import org.opencms.main.OpenCms;
import org.opencms.main.*;
[ "org.opencms.main" ]
org.opencms.main;
72,196
public boolean isPrivate() { return (mask & Opcodes.ACC_PRIVATE) != 0; }
boolean function() { return (mask & Opcodes.ACC_PRIVATE) != 0; }
/** * Returns {@code true} if this instance describes {@code private} visibility. * * @return {@code true} if this instance describes {@code private} visibility. */
Returns true if this instance describes private visibility
isPrivate
{ "repo_name": "RobAustin/byte-buddy", "path": "byte-buddy-dep/src/main/java/net/bytebuddy/modifier/Visibility.java", "license": "apache-2.0", "size": 2380 }
[ "org.objectweb.asm.Opcodes" ]
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.*;
[ "org.objectweb.asm" ]
org.objectweb.asm;
1,881,164
@Override public void process(Batch batch, BatchMaker batchMaker) throws StageException { Iterator<Record> it = batch.getRecords(); if (it.hasNext()) { while (it.hasNext()) { Record record = it.next(); try { process(record, batchMaker); } catch (OnRecordErrorException ex) { switch (getContext().getOnErrorRecord()) { case DISCARD: break; case TO_ERROR: getContext().toError(record, ex); break; case STOP_PIPELINE: throw ex; default: throw new IllegalStateException(Utils.format("It should never happen. OnError '{}'", getContext().getOnErrorRecord(), ex)); } } } } else { emptyBatch(batchMaker); } }
void function(Batch batch, BatchMaker batchMaker) throws StageException { Iterator<Record> it = batch.getRecords(); if (it.hasNext()) { while (it.hasNext()) { Record record = it.next(); try { process(record, batchMaker); } catch (OnRecordErrorException ex) { switch (getContext().getOnErrorRecord()) { case DISCARD: break; case TO_ERROR: getContext().toError(record, ex); break; case STOP_PIPELINE: throw ex; default: throw new IllegalStateException(Utils.format(STR, getContext().getOnErrorRecord(), ex)); } } } } else { emptyBatch(batchMaker); } }
/** * Processes the batch by calling the {@link #process(Record, BatchMaker)} method for each record in the batch. * <p/> * If the calls to the {@link #process(Record, BatchMaker)} throws an {@link OnRecordErrorException}, the error * handling is done based on the stage 'on record error' configuration, discarded, sent to error, or stopping the * pipeline. * * @param batch the batch of records to process. * @param batchMaker records created by the <code>Processor</code> stage must be added to the <code>BatchMaker</code> * for them to be available to the rest of the pipeline. * @throws StageException if the <code>Processor</code> had an error while processing records. */
Processes the batch by calling the <code>#process(Record, BatchMaker)</code> method for each record in the batch. If the calls to the <code>#process(Record, BatchMaker)</code> throws an <code>OnRecordErrorException</code>, the error handling is done based on the stage 'on record error' configuration, discarded, sent to error, or stopping the pipeline
process
{ "repo_name": "streamsets/datacollector-api", "path": "src/main/java/com/streamsets/pipeline/api/base/RecordProcessor.java", "license": "apache-2.0", "size": 3956 }
[ "com.streamsets.pipeline.api.Batch", "com.streamsets.pipeline.api.BatchMaker", "com.streamsets.pipeline.api.Record", "com.streamsets.pipeline.api.StageException", "com.streamsets.pipeline.api.impl.Utils", "java.util.Iterator" ]
import com.streamsets.pipeline.api.Batch; import com.streamsets.pipeline.api.BatchMaker; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.impl.Utils; import java.util.Iterator;
import com.streamsets.pipeline.api.*; import com.streamsets.pipeline.api.impl.*; import java.util.*;
[ "com.streamsets.pipeline", "java.util" ]
com.streamsets.pipeline; java.util;
852,730
public Set<String> getKnownMessageEncodings() { return decompressors.keySet(); }
Set<String> function() { return decompressors.keySet(); }
/** * Provides a list of all message encodings that have decompressors available. */
Provides a list of all message encodings that have decompressors available
getKnownMessageEncodings
{ "repo_name": "ejona86/grpc-java", "path": "api/src/main/java/io/grpc/DecompressorRegistry.java", "license": "apache-2.0", "size": 5276 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
560,567
@Override public Collection<? extends PValue> expand() { List<PCollection<?>> retval = new ArrayList<>(); for (TaggedKeyedPCollection<K, ?> taggedPCollection : keyedCollections) { retval.add(taggedPCollection.pCollection); } return retval; }
Collection<? extends PValue> function() { List<PCollection<?>> retval = new ArrayList<>(); for (TaggedKeyedPCollection<K, ?> taggedPCollection : keyedCollections) { retval.add(taggedPCollection.pCollection); } return retval; }
/** * Expands the component PCollections, stripping off any tag-specific * information. */
Expands the component PCollections, stripping off any tag-specific information
expand
{ "repo_name": "haonaturel/DataflowJavaSDK", "path": "sdk/src/main/java/com/google/cloud/dataflow/sdk/transforms/join/KeyedPCollectionTuple.java", "license": "apache-2.0", "size": 6980 }
[ "com.google.cloud.dataflow.sdk.values.PCollection", "com.google.cloud.dataflow.sdk.values.PValue", "java.util.ArrayList", "java.util.Collection", "java.util.List" ]
import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.dataflow.sdk.values.PValue; import java.util.ArrayList; import java.util.Collection; import java.util.List;
import com.google.cloud.dataflow.sdk.values.*; import java.util.*;
[ "com.google.cloud", "java.util" ]
com.google.cloud; java.util;
580,098
public ASHandler getFirstChainedAs(){ return firstAsHandler; }
ASHandler function(){ return firstAsHandler; }
/** * It returns the first AS Handler (not IM-SCF!) in the chain. */
It returns the first AS Handler (not IM-SCF!) in the chain
getFirstChainedAs
{ "repo_name": "OvooOpenSourceCommunity/sip-service-broker", "path": "ra/src/main/java/pl/ovoo/slee/resource/sip/broker/service/sessionfsm/SessionContext.java", "license": "agpl-3.0", "size": 10349 }
[ "pl.ovoo.slee.resource.sip.broker.service.ASHandler" ]
import pl.ovoo.slee.resource.sip.broker.service.ASHandler;
import pl.ovoo.slee.resource.sip.broker.service.*;
[ "pl.ovoo.slee" ]
pl.ovoo.slee;
2,335,671
@Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { parseIssues.add(new ParseIssue(line, charPositionInLine, msg, currentFileName, ParseIssueType.SYNTAX_ERROR)); try { setAtLeastOneError(true); // Print error messages with file name if (currentFileName == null) log.error("line " + line + ":" + charPositionInLine + " " + msg); else { String fileName = currentFileName; log.error(fileName + " line " + line + ":" + charPositionInLine + " " + msg); } } catch (Exception e1) { log.error("ERROR: while customizing error message:" + e1); } }
void function(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { parseIssues.add(new ParseIssue(line, charPositionInLine, msg, currentFileName, ParseIssueType.SYNTAX_ERROR)); try { setAtLeastOneError(true); if (currentFileName == null) log.error(STR + line + ":" + charPositionInLine + " " + msg); else { String fileName = currentFileName; log.error(fileName + STR + line + ":" + charPositionInLine + " " + msg); } } catch (Exception e1) { log.error(STR + e1); } }
/** * Syntax error occurred. Add the error to the list of parse issues. */
Syntax error occurred. Add the error to the list of parse issues
syntaxError
{ "repo_name": "iyounus/incubator-systemml", "path": "src/main/java/org/apache/sysml/parser/common/CustomErrorListener.java", "license": "apache-2.0", "size": 10650 }
[ "org.antlr.v4.runtime.RecognitionException", "org.antlr.v4.runtime.Recognizer" ]
import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.*;
[ "org.antlr.v4" ]
org.antlr.v4;
902,057
@Override() public java.lang.Class getJavaClass( ) { return org.chocolate_milk.model.SignonMsgsRs.class; }
@Override() java.lang.Class function( ) { return org.chocolate_milk.model.SignonMsgsRs.class; }
/** * Method getJavaClass. * * @return the Java class represented by this descriptor. */
Method getJavaClass
getJavaClass
{ "repo_name": "galleon1/chocolate-milk", "path": "src/org/chocolate_milk/model/descriptors/SignonMsgsRsDescriptor.java", "license": "lgpl-3.0", "size": 6061 }
[ "org.chocolate_milk.model.SignonMsgsRs" ]
import org.chocolate_milk.model.SignonMsgsRs;
import org.chocolate_milk.model.*;
[ "org.chocolate_milk.model" ]
org.chocolate_milk.model;
1,184,402
@ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<SubscriptionFeatureRegistrationInner> listAsync() { return new PagedFlux<>( () -> listSinglePageAsync(), nextLink -> listAllBySubscriptionNextSinglePageAsync(nextLink)); }
@ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<SubscriptionFeatureRegistrationInner> function() { return new PagedFlux<>( () -> listSinglePageAsync(), nextLink -> listAllBySubscriptionNextSinglePageAsync(nextLink)); }
/** * Returns subscription feature registrations for given subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of subscription feature registrations as paginated response with {@link PagedFlux}. */
Returns subscription feature registrations for given subscription
listAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-resources/src/main/java/com/azure/resourcemanager/resources/implementation/SubscriptionFeatureRegistrationsClientImpl.java", "license": "mit", "size": 53711 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedFlux", "com.azure.resourcemanager.resources.fluent.models.SubscriptionFeatureRegistrationInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.resourcemanager.resources.fluent.models.SubscriptionFeatureRegistrationInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.resources.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
156,845
public void setMaxCacheItemCount(int maxCacheItemCount) { if (maxCacheItemCount > 0) { ClientConfig.CACHEITEM_COUNT = maxCacheItemCount; } }
void function(int maxCacheItemCount) { if (maxCacheItemCount > 0) { ClientConfig.CACHEITEM_COUNT = maxCacheItemCount; } }
/** * max amount of blockid==>serverlist cache intems * * @param maxCacheItemCount */
max amount of blockid==>serverlist cache intems
setMaxCacheItemCount
{ "repo_name": "tangwan/study_code", "path": "taobao-tfs-test/src/main/java/com/taobao/common/tfs/DefaultTfsManager.java", "license": "apache-2.0", "size": 34208 }
[ "com.taobao.common.tfs.impl.ClientConfig" ]
import com.taobao.common.tfs.impl.ClientConfig;
import com.taobao.common.tfs.impl.*;
[ "com.taobao.common" ]
com.taobao.common;
1,923,175
public void setLocationDAO(LocationDAO dao);
void function(LocationDAO dao);
/** * Set the data access object that the service will use to interact with the database. This is * set by spring in the applicationContext-service.xml file * * @param dao */
Set the data access object that the service will use to interact with the database. This is set by spring in the applicationContext-service.xml file
setLocationDAO
{ "repo_name": "nilusi/Legacy-UI", "path": "api/src/main/java/org/openmrs/api/LocationService.java", "license": "mpl-2.0", "size": 20370 }
[ "org.openmrs.api.db.LocationDAO" ]
import org.openmrs.api.db.LocationDAO;
import org.openmrs.api.db.*;
[ "org.openmrs.api" ]
org.openmrs.api;
84,309
public void onFrameworkResponse(boolean isEventHandled); } public KeyEventChannel(@NonNull BinaryMessenger binaryMessenger) { this.channel = new BasicMessageChannel<>(binaryMessenger, "flutter/keyevent", JSONMessageCodec.INSTANCE); } @NonNull public final BasicMessageChannel<Object> channel;
void function(boolean isEventHandled); } public KeyEventChannel(@NonNull BinaryMessenger binaryMessenger) { this.channel = new BasicMessageChannel<>(binaryMessenger, STR, JSONMessageCodec.INSTANCE); } @NonNull public final BasicMessageChannel<Object> channel;
/** * Called whenever the framework responds that a given key event was handled or not handled by * the framework. * * @param isEventHandled whether the framework decides to handle the event. */
Called whenever the framework responds that a given key event was handled or not handled by the framework
onFrameworkResponse
{ "repo_name": "rmacnak-google/engine", "path": "shell/platform/android/io/flutter/embedding/engine/systemchannels/KeyEventChannel.java", "license": "bsd-3-clause", "size": 4894 }
[ "androidx.annotation.NonNull", "io.flutter.plugin.common.BasicMessageChannel", "io.flutter.plugin.common.BinaryMessenger", "io.flutter.plugin.common.JSONMessageCodec" ]
import androidx.annotation.NonNull; import io.flutter.plugin.common.BasicMessageChannel; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.JSONMessageCodec;
import androidx.annotation.*; import io.flutter.plugin.common.*;
[ "androidx.annotation", "io.flutter.plugin" ]
androidx.annotation; io.flutter.plugin;
2,142,297
@Override protected Dialog onCreateDialog(int id) { return DialogFactory.getInstance().getPendingDialog(id); } // // Helpers. //
Dialog function(int id) { return DialogFactory.getInstance().getPendingDialog(id); } //
/** * onCreateDialog event - forward requests to the factory. */
onCreateDialog event - forward requests to the factory
onCreateDialog
{ "repo_name": "xiaosea/Sokoban", "path": "src/main/java/com/dio/sokoban/SokoGameActivity.java", "license": "gpl-2.0", "size": 23694 }
[ "android.app.Dialog" ]
import android.app.Dialog;
import android.app.*;
[ "android.app" ]
android.app;
2,303,922
public long getRemainingValidTime(CacheKey key) { long expiryTime = getExpiryTimeInMillis(key); long remainingTime = expiryTime - System.currentTimeMillis(); if (remainingTime > 0) { return remainingTime; } return 0; }
long function(CacheKey key) { long expiryTime = getExpiryTimeInMillis(key); long remainingTime = expiryTime - System.currentTimeMillis(); if (remainingTime > 0) { return remainingTime; } return 0; }
/** * INTERNAL: * Return the remaining life of this object */
Return the remaining life of this object
getRemainingValidTime
{ "repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs", "path": "foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/invalidation/CacheInvalidationPolicy.java", "license": "epl-1.0", "size": 7274 }
[ "org.eclipse.persistence.internal.identitymaps.CacheKey" ]
import org.eclipse.persistence.internal.identitymaps.CacheKey;
import org.eclipse.persistence.internal.identitymaps.*;
[ "org.eclipse.persistence" ]
org.eclipse.persistence;
1,419,316
void sourceOnlyMode() throws RemoteException;
void sourceOnlyMode() throws RemoteException;
/** * Switches the engine to run in source only mode (no decompilation). */
Switches the engine to run in source only mode (no decompilation)
sourceOnlyMode
{ "repo_name": "OndraZizka/windup", "path": "tooling/api/src/main/java/org/jboss/windup/tooling/ExecutionBuilder.java", "license": "epl-1.0", "size": 3921 }
[ "java.rmi.RemoteException" ]
import java.rmi.RemoteException;
import java.rmi.*;
[ "java.rmi" ]
java.rmi;
2,743,507
public static OriginatorInfo getInstance( Object obj) { if (obj instanceof OriginatorInfo) { return (OriginatorInfo)obj; } else if (obj != null) { return new OriginatorInfo(ASN1Sequence.getInstance(obj)); } return null; }
static OriginatorInfo function( Object obj) { if (obj instanceof OriginatorInfo) { return (OriginatorInfo)obj; } else if (obj != null) { return new OriginatorInfo(ASN1Sequence.getInstance(obj)); } return null; }
/** * Return an OriginatorInfo object from the given object. * <p> * Accepted inputs: * <ul> * <li> null &rarr; null * <li> {@link OriginatorInfo} object * <li> {@link org.spongycastle.asn1.ASN1Sequence#getInstance(java.lang.Object) ASN1Sequence} input formats with OriginatorInfo structure inside * </ul> * * @param obj the object we want converted. * @exception IllegalArgumentException if the object cannot be converted. */
Return an OriginatorInfo object from the given object. Accepted inputs: null &rarr; null <code>OriginatorInfo</code> object <code>org.spongycastle.asn1.ASN1Sequence#getInstance(java.lang.Object) ASN1Sequence</code> input formats with OriginatorInfo structure inside
getInstance
{ "repo_name": "Skywalker-11/spongycastle", "path": "core/src/main/java/org/spongycastle/asn1/cms/OriginatorInfo.java", "license": "mit", "size": 4571 }
[ "org.spongycastle.asn1.ASN1Sequence" ]
import org.spongycastle.asn1.ASN1Sequence;
import org.spongycastle.asn1.*;
[ "org.spongycastle.asn1" ]
org.spongycastle.asn1;
1,911,903
static <T, U, R> R privilegedExecution(BiFunction<T, U, R> function, T t, U u) { return privilegedExecution().execute(function, t, u); }
static <T, U, R> R privilegedExecution(BiFunction<T, U, R> function, T t, U u) { return privilegedExecution().execute(function, t, u); }
/** * Execute the given function, in a privileged block if a security manager is checking. * @param function the function * @param t the first argument to the function * @param u the second argument to the function * @param <T> the type of the first argument to the function * @param <U> the type of the second argument to the function * @param <R> the type of the function return value * @return the return value of the function */
Execute the given function, in a privileged block if a security manager is checking
privilegedExecution
{ "repo_name": "luck3y/wildfly-core", "path": "host-controller/src/main/java/org/jboss/as/host/controller/SecurityActions.java", "license": "lgpl-2.1", "size": 4592 }
[ "java.util.function.BiFunction" ]
import java.util.function.BiFunction;
import java.util.function.*;
[ "java.util" ]
java.util;
2,041,856
void enterAtomicExpression(@NotNull EsperEPL2GrammarParser.AtomicExpressionContext ctx); void exitAtomicExpression(@NotNull EsperEPL2GrammarParser.AtomicExpressionContext ctx);
void enterAtomicExpression(@NotNull EsperEPL2GrammarParser.AtomicExpressionContext ctx); void exitAtomicExpression(@NotNull EsperEPL2GrammarParser.AtomicExpressionContext ctx);
/** * Exit a parse tree produced by {@link EsperEPL2GrammarParser#atomicExpression}. * @param ctx the parse tree */
Exit a parse tree produced by <code>EsperEPL2GrammarParser#atomicExpression</code>
exitAtomicExpression
{ "repo_name": "georgenicoll/esper", "path": "esper/src/main/java/com/espertech/esper/epl/generated/EsperEPL2GrammarListener.java", "license": "gpl-2.0", "size": 114105 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
2,637,076
public void actualizar() { try{ calcModelo.addNumero(pnlNumero.getNumero()); pnlResultado.mostrar(calcModelo.getCantidad(), calcModelo.getMedia(), calcModelo.getMinimo(), calcModelo.getMaximo()); } catch(NumberFormatException e) { JOptionPane.showMessageDialog(this,"Introduzca valor correcto"); } pnlNumero.cogerFoco(); }
void function() { try{ calcModelo.addNumero(pnlNumero.getNumero()); pnlResultado.mostrar(calcModelo.getCantidad(), calcModelo.getMedia(), calcModelo.getMinimo(), calcModelo.getMaximo()); } catch(NumberFormatException e) { JOptionPane.showMessageDialog(this,STR); } pnlNumero.cogerFoco(); }
/** * Actualiza la calculadora y muestra los resultados en el panel de resultados */
Actualiza la calculadora y muestra los resultados en el panel de resultados
actualizar
{ "repo_name": "djgonza/Programacion", "path": "UT9/AD 11 Calculadora AL/GuiCalculadora.java", "license": "mit", "size": 3654 }
[ "javax.swing.JOptionPane" ]
import javax.swing.JOptionPane;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
236,302
private LinkedHashMap<String, String> getAvailableLocales(CmsResource resource) { LinkedHashMap<String, String> result = null; List<Locale> locales = null; try { if (CmsResourceTypeXmlPage.isXmlPage(resource)) { locales = CmsXmlPageFactory.unmarshal(getCmsObject(), resource, getRequest()).getLocales(); } else if (CmsResourceTypeXmlContent.isXmlContent(resource)) { locales = CmsXmlContentFactory.unmarshal(getCmsObject(), resource, getRequest()).getLocales(); } else if (CmsResourceTypeXmlContainerPage.isContainerPage(resource)) { locales = CmsXmlContainerPageFactory.unmarshal(getCmsObject(), resource).getLocales(); } } catch (CmsException e) { LOG.warn(e.getLocalizedMessage(), e); } if (locales != null) { Locale wpLocale = OpenCms.getWorkplaceManager().getWorkplaceLocale(getCmsObject()); result = new LinkedHashMap<String, String>(); for (Locale locale : locales) { result.put(locale.toString(), locale.getDisplayName(wpLocale)); } } return result; }
LinkedHashMap<String, String> function(CmsResource resource) { LinkedHashMap<String, String> result = null; List<Locale> locales = null; try { if (CmsResourceTypeXmlPage.isXmlPage(resource)) { locales = CmsXmlPageFactory.unmarshal(getCmsObject(), resource, getRequest()).getLocales(); } else if (CmsResourceTypeXmlContent.isXmlContent(resource)) { locales = CmsXmlContentFactory.unmarshal(getCmsObject(), resource, getRequest()).getLocales(); } else if (CmsResourceTypeXmlContainerPage.isContainerPage(resource)) { locales = CmsXmlContainerPageFactory.unmarshal(getCmsObject(), resource).getLocales(); } } catch (CmsException e) { LOG.warn(e.getLocalizedMessage(), e); } if (locales != null) { Locale wpLocale = OpenCms.getWorkplaceManager().getWorkplaceLocale(getCmsObject()); result = new LinkedHashMap<String, String>(); for (Locale locale : locales) { result.put(locale.toString(), locale.getDisplayName(wpLocale)); } } return result; }
/** * Returns the available locales mapped to there display name for the given resource * or <code>null</code> in case of non xml-content/xml-page resources.<p> * * @param resource the resource * * @return the available locales */
Returns the available locales mapped to there display name for the given resource or <code>null</code> in case of non xml-content/xml-page resources
getAvailableLocales
{ "repo_name": "victos/opencms-core", "path": "src/org/opencms/gwt/CmsVfsService.java", "license": "lgpl-2.1", "size": 81131 }
[ "java.util.LinkedHashMap", "java.util.List", "java.util.Locale", "org.opencms.file.CmsResource", "org.opencms.file.types.CmsResourceTypeXmlContainerPage", "org.opencms.file.types.CmsResourceTypeXmlContent", "org.opencms.file.types.CmsResourceTypeXmlPage", "org.opencms.main.CmsException", "org.opencms.main.OpenCms", "org.opencms.xml.containerpage.CmsXmlContainerPageFactory", "org.opencms.xml.content.CmsXmlContentFactory", "org.opencms.xml.page.CmsXmlPageFactory" ]
import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import org.opencms.file.CmsResource; import org.opencms.file.types.CmsResourceTypeXmlContainerPage; import org.opencms.file.types.CmsResourceTypeXmlContent; import org.opencms.file.types.CmsResourceTypeXmlPage; import org.opencms.main.CmsException; import org.opencms.main.OpenCms; import org.opencms.xml.containerpage.CmsXmlContainerPageFactory; import org.opencms.xml.content.CmsXmlContentFactory; import org.opencms.xml.page.CmsXmlPageFactory;
import java.util.*; import org.opencms.file.*; import org.opencms.file.types.*; import org.opencms.main.*; import org.opencms.xml.containerpage.*; import org.opencms.xml.content.*; import org.opencms.xml.page.*;
[ "java.util", "org.opencms.file", "org.opencms.main", "org.opencms.xml" ]
java.util; org.opencms.file; org.opencms.main; org.opencms.xml;
1,147,117
public List<ExpressRoutePortsLocationBandwidths> availableBandwidths() { return this.availableBandwidths; }
List<ExpressRoutePortsLocationBandwidths> function() { return this.availableBandwidths; }
/** * Get the inventory of available ExpressRoutePort bandwidths. * * @return the availableBandwidths value */
Get the inventory of available ExpressRoutePort bandwidths
availableBandwidths
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2020_04_01/src/main/java/com/microsoft/azure/management/network/v2020_04_01/implementation/ExpressRoutePortsLocationInner.java", "license": "mit", "size": 3621 }
[ "com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePortsLocationBandwidths", "java.util.List" ]
import com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePortsLocationBandwidths; import java.util.List;
import com.microsoft.azure.management.network.v2020_04_01.*; import java.util.*;
[ "com.microsoft.azure", "java.util" ]
com.microsoft.azure; java.util;
2,775,537
void processOrderPaymentRequest(@Nonnull final Long orderPaymentRequest);
void processOrderPaymentRequest(@Nonnull final Long orderPaymentRequest);
/** * Process order payment request * * @param orderPaymentRequest */
Process order payment request
processOrderPaymentRequest
{ "repo_name": "sflpro/ms_payment", "path": "queue/queue_consumer/queue_consumer_core/src/main/java/com/sfl/pms/queue/consumer/payment/common/order/OrderPaymentRequestProcessingConsumerService.java", "license": "apache-2.0", "size": 416 }
[ "javax.annotation.Nonnull" ]
import javax.annotation.Nonnull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
124,168
public static DataField getDataField(String propertyName, String label) { DataField field = (DataField) getNewComponentInstance(DATA_FIELD); field.setPropertyName(propertyName); field.setLabel(label); return field; }
static DataField function(String propertyName, String label) { DataField field = (DataField) getNewComponentInstance(DATA_FIELD); field.setPropertyName(propertyName); field.setLabel(label); return field; }
/** * Retrieves a new data field instance from Spring (initialized by the bean definition * with the given id) and sets the property name and label to the given parameters * * @param propertyName name of the property the data field should bind to * @param label label for the field * @return DataField */
Retrieves a new data field instance from Spring (initialized by the bean definition with the given id) and sets the property name and label to the given parameters
getDataField
{ "repo_name": "ua-eas/ksd-kc5.2.1-rice2.3.6-ua", "path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/util/ComponentFactory.java", "license": "apache-2.0", "size": 43761 }
[ "org.kuali.rice.krad.uif.field.DataField" ]
import org.kuali.rice.krad.uif.field.DataField;
import org.kuali.rice.krad.uif.field.*;
[ "org.kuali.rice" ]
org.kuali.rice;
329,404
private static HeaderAndEntry readHeaderAndEntry(ByteBuffer bytesFromLog, EnvironmentImpl envImpl, boolean readFullItem) throws ChecksumException { HeaderAndEntry ret = new HeaderAndEntry(); ret.header = new LogEntryHeader(bytesFromLog, LogEntryType.LOG_VERSION); ret.header.readVariablePortion(bytesFromLog); ret.entry = LogEntryType.findType(ret.header.getType()).getNewLogEntry(); ret.entry.readEntry(ret.header, bytesFromLog, readFullItem); return ret; } private static class HeaderAndEntry { public LogEntryHeader header; public LogEntry entry; private HeaderAndEntry() { }
static HeaderAndEntry function(ByteBuffer bytesFromLog, EnvironmentImpl envImpl, boolean readFullItem) throws ChecksumException { HeaderAndEntry ret = new HeaderAndEntry(); ret.header = new LogEntryHeader(bytesFromLog, LogEntryType.LOG_VERSION); ret.header.readVariablePortion(bytesFromLog); ret.entry = LogEntryType.findType(ret.header.getType()).getNewLogEntry(); ret.entry.readEntry(ret.header, bytesFromLog, readFullItem); return ret; } private static class HeaderAndEntry { public LogEntryHeader header; public LogEntry entry; private HeaderAndEntry() { }
/** * Convenience method for marshalling a header and log entry * out of a byte buffer read directly out of the log. * @throws DatabaseException */
Convenience method for marshalling a header and log entry out of a byte buffer read directly out of the log
readHeaderAndEntry
{ "repo_name": "bjorndm/prebake", "path": "code/third_party/bdb/test/com/sleepycat/je/log/LogManagerTest.java", "license": "apache-2.0", "size": 27341 }
[ "com.sleepycat.je.dbi.EnvironmentImpl", "com.sleepycat.je.log.entry.LogEntry", "java.nio.ByteBuffer" ]
import com.sleepycat.je.dbi.EnvironmentImpl; import com.sleepycat.je.log.entry.LogEntry; import java.nio.ByteBuffer;
import com.sleepycat.je.dbi.*; import com.sleepycat.je.log.entry.*; import java.nio.*;
[ "com.sleepycat.je", "java.nio" ]
com.sleepycat.je; java.nio;
496,901
public Map<String, Object> metadata() { return metadata; }
Map<String, Object> function() { return metadata; }
/** * Gets the metadata. * * <p>Any metadata related to the entity value. * * @return the metadata */
Gets the metadata. Any metadata related to the entity value
metadata
{ "repo_name": "watson-developer-cloud/java-sdk", "path": "assistant/src/main/java/com/ibm/watson/assistant/v1/model/CreateValueOptions.java", "license": "apache-2.0", "size": 8511 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
234,701
public Date getPayPeriodEndDate() { return payPeriodEndDate; }
Date function() { return payPeriodEndDate; }
/** * Gets the payPeriodEndDate. * * @return Returns the payPeriodEndDate. */
Gets the payPeriodEndDate
getPayPeriodEndDate
{ "repo_name": "Ariah-Group/Finance", "path": "af_webapp/src/main/java/org/kuali/kfs/module/ld/businessobject/LaborJournalVoucherDetail.java", "license": "apache-2.0", "size": 12809 }
[ "java.sql.Date" ]
import java.sql.Date;
import java.sql.*;
[ "java.sql" ]
java.sql;
2,808,060
@Override public TempJavaCodeFragmentFiles getTempJavaCodeFragmentFiles() { return tempFileHandle; }
TempJavaCodeFragmentFiles function() { return tempFileHandle; }
/** * Returns the temporary file handle. * * @return temporary file handle */
Returns the temporary file handle
getTempJavaCodeFragmentFiles
{ "repo_name": "VinodKumarS-Huawei/ietf96yang", "path": "utils/yangutils/plugin/src/main/java/org/onosproject/yangutils/translator/tojava/javamodel/YangJavaEnumeration.java", "license": "apache-2.0", "size": 4338 }
[ "org.onosproject.yangutils.translator.tojava.TempJavaCodeFragmentFiles" ]
import org.onosproject.yangutils.translator.tojava.TempJavaCodeFragmentFiles;
import org.onosproject.yangutils.translator.tojava.*;
[ "org.onosproject.yangutils" ]
org.onosproject.yangutils;
173,764
public void setCompileDir(File compileDir) { this.compileDir = compileDir; }
void function(File compileDir) { this.compileDir = compileDir; }
/** * The compile directory. * * @param compileDir The compile directory. */
The compile directory
setCompileDir
{ "repo_name": "garyhodgson/enunciate", "path": "core/src/main/java/org/codehaus/enunciate/main/EnunciateTask.java", "license": "apache-2.0", "size": 12942 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,293,434
protected RelatedOptions getIncrementalOptions() { RelatedOptions incrementalOpts = new RelatedOptions("Incremental import arguments"); incrementalOpts.addOption(OptionBuilder.withArgName("import-type") .hasArg() .withDescription( "Define an incremental import of type 'append' or 'lastmodified'") .withLongOpt(INCREMENT_TYPE_ARG) .create()); incrementalOpts.addOption(OptionBuilder.withArgName("column") .hasArg() .withDescription("Source column to check for incremental change") .withLongOpt(INCREMENT_COL_ARG) .create()); incrementalOpts.addOption(OptionBuilder.withArgName("value") .hasArg() .withDescription("Last imported value in the incremental check column") .withLongOpt(INCREMENT_LAST_VAL_ARG) .create()); return incrementalOpts; }
RelatedOptions function() { RelatedOptions incrementalOpts = new RelatedOptions(STR); incrementalOpts.addOption(OptionBuilder.withArgName(STR) .hasArg() .withDescription( STR) .withLongOpt(INCREMENT_TYPE_ARG) .create()); incrementalOpts.addOption(OptionBuilder.withArgName(STR) .hasArg() .withDescription(STR) .withLongOpt(INCREMENT_COL_ARG) .create()); incrementalOpts.addOption(OptionBuilder.withArgName("value") .hasArg() .withDescription(STR) .withLongOpt(INCREMENT_LAST_VAL_ARG) .create()); return incrementalOpts; }
/** * Return options for incremental import. */
Return options for incremental import
getIncrementalOptions
{ "repo_name": "sahilsehgal81/Sqoop", "path": "src/java/org/apache/sqoop/tool/ImportTool.java", "license": "apache-2.0", "size": 39652 }
[ "com.cloudera.sqoop.cli.RelatedOptions", "org.apache.commons.cli.OptionBuilder" ]
import com.cloudera.sqoop.cli.RelatedOptions; import org.apache.commons.cli.OptionBuilder;
import com.cloudera.sqoop.cli.*; import org.apache.commons.cli.*;
[ "com.cloudera.sqoop", "org.apache.commons" ]
com.cloudera.sqoop; org.apache.commons;
1,532,479
public IndexRequest ttl(TimeValue ttl) { this.ttl = ttl; return this; }
IndexRequest function(TimeValue ttl) { this.ttl = ttl; return this; }
/** * Sets the ttl as a {@link TimeValue} instance. */
Sets the ttl as a <code>TimeValue</code> instance
ttl
{ "repo_name": "camilojd/elasticsearch", "path": "core/src/main/java/org/elasticsearch/action/index/IndexRequest.java", "license": "apache-2.0", "size": 22707 }
[ "org.elasticsearch.common.unit.TimeValue" ]
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.unit.*;
[ "org.elasticsearch.common" ]
org.elasticsearch.common;
1,025,534
@Override public int read(byte[] bts) throws IOException { try { beforeRead(bts.length); int n = in.read(bts); afterRead(n); return n; } catch (IOException e) { handleIOException(e); return -1; } }
int function(byte[] bts) throws IOException { try { beforeRead(bts.length); int n = in.read(bts); afterRead(n); return n; } catch (IOException e) { handleIOException(e); return -1; } }
/** * Invokes the delegate's <code>read(byte[])</code> method. * @param bts the buffer to read the bytes into * @return the number of bytes read or -1 if the end of stream * @throws IOException if an I/O error occurs */
Invokes the delegate's <code>read(byte[])</code> method
read
{ "repo_name": "berisd/VirtualFile", "path": "src/main/java/at/beris/virtualfile/io/ProxyInputStream.java", "license": "lgpl-3.0", "size": 7073 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,552,198
private void checkOneCell(KeyValue kv, byte[] cf, int rowIdx, int colIdx, long ts) { String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts; assertEquals("Row mismatch which checking: " + ctx, "row:"+ rowIdx, Bytes.toString(kv.getRow())); assertEquals("ColumnFamily mismatch while checking: " + ctx, Bytes.toString(cf), Bytes.toString(kv.getFamily())); assertEquals("Column qualifier mismatch while checking: " + ctx, "column:" + colIdx, Bytes.toString(kv.getQualifier())); assertEquals("Timestamp mismatch while checking: " + ctx, ts, kv.getTimestamp()); assertEquals("Value mismatch while checking: " + ctx, "value-version-" + ts, Bytes.toString(kv.getValue())); } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
void function(KeyValue kv, byte[] cf, int rowIdx, int colIdx, long ts) { String ctx = STR + rowIdx + STR + colIdx + STR + ts; assertEquals(STR + ctx, "row:"+ rowIdx, Bytes.toString(kv.getRow())); assertEquals(STR + ctx, Bytes.toString(cf), Bytes.toString(kv.getFamily())); assertEquals(STR + ctx, STR + colIdx, Bytes.toString(kv.getQualifier())); assertEquals(STR + ctx, ts, kv.getTimestamp()); assertEquals(STR + ctx, STR + ts, Bytes.toString(kv.getValue())); } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
/** * Assert that the passed in KeyValue has expected contents for the * specified row, column & timestamp. */
Assert that the passed in KeyValue has expected contents for the specified row, column & timestamp
checkOneCell
{ "repo_name": "zqxjjj/NobidaBase", "path": "target/hbase-0.94.9/hbase-0.94.9/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java", "license": "apache-2.0", "size": 147002 }
[ "org.apache.hadoop.hbase.KeyValue", "org.apache.hadoop.hbase.util.Bytes" ]
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
436,376
@Deprecated public UnaryCallSettings.Builder<DisableServiceRequest, Operation> disableServiceSettings() { return getStubSettingsBuilder().disableServiceSettings(); }
UnaryCallSettings.Builder<DisableServiceRequest, Operation> function() { return getStubSettingsBuilder().disableServiceSettings(); }
/** * Returns the builder for the settings used for calls to disableService. * * @deprecated This method is deprecated and will be removed in the next major version update. */
Returns the builder for the settings used for calls to disableService
disableServiceSettings
{ "repo_name": "googleapis/java-service-usage", "path": "google-cloud-service-usage/src/main/java/com/google/api/serviceusage/v1beta1/ServiceUsageSettings.java", "license": "apache-2.0", "size": 28328 }
[ "com.google.api.gax.rpc.UnaryCallSettings", "com.google.longrunning.Operation" ]
import com.google.api.gax.rpc.UnaryCallSettings; import com.google.longrunning.Operation;
import com.google.api.gax.rpc.*; import com.google.longrunning.*;
[ "com.google.api", "com.google.longrunning" ]
com.google.api; com.google.longrunning;
1,976,595
void emitWatermark(Watermark mark);
void emitWatermark(Watermark mark);
/** * Emits a {@link Watermark} from an operator. This watermark is broadcast to all downstream * operators. * * <p>A watermark specifies that no element with a timestamp lower or equal to the watermark * timestamp will be emitted in the future. */
Emits a <code>Watermark</code> from an operator. This watermark is broadcast to all downstream operators. A watermark specifies that no element with a timestamp lower or equal to the watermark timestamp will be emitted in the future
emitWatermark
{ "repo_name": "tillrohrmann/flink", "path": "flink-streaming-java/src/main/java/org/apache/flink/streaming/api/operators/Output.java", "license": "apache-2.0", "size": 2280 }
[ "org.apache.flink.streaming.api.watermark.Watermark" ]
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.watermark.*;
[ "org.apache.flink" ]
org.apache.flink;
1,623,195
@Override public java.util.Date getUpdateDate() { return _orgImgContent.getUpdateDate(); }
java.util.Date function() { return _orgImgContent.getUpdateDate(); }
/** * Returns the update date of this org img content. * * @return the update date of this org img content */
Returns the update date of this org img content
getUpdateDate
{ "repo_name": "queza85/edison", "path": "edison-portal-framework/edison-content-2016-portlet/docroot/WEB-INF/service/org/kisti/edison/content/model/OrgImgContentWrapper.java", "license": "gpl-3.0", "size": 11009 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
2,448,481
public void logWarn(String message) throws Exception { addLogEntry(getLastModifiedBy(), 0l, 0l, null, null, "WARNING: " + message); //fireWarningEvent(message); Logger.getLogger(getClass()).warn(message); }
void function(String message) throws Exception { addLogEntry(getLastModifiedBy(), 0l, 0l, null, null, STR + message); Logger.getLogger(getClass()).warn(message); }
/** * Log warning to console and the database. * * @param message the message * @throws Exception the exception */
Log warning to console and the database
logWarn
{ "repo_name": "IHTSDO/OTF-Mapping-Service", "path": "jpa-services/src/main/java/org/ihtsdo/otf/mapping/jpa/algo/AbstractAlgorithm.java", "license": "apache-2.0", "size": 3584 }
[ "org.apache.log4j.Logger" ]
import org.apache.log4j.Logger;
import org.apache.log4j.*;
[ "org.apache.log4j" ]
org.apache.log4j;
926,431