method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
|---|---|---|---|---|---|---|---|---|---|---|---|
byte[] hash = Hashing.murmur3_128(0).newHasher().putString(uid, UTF_8).hash().asBytes();
return new OperatorID(hash);
}
|
byte[] hash = Hashing.murmur3_128(0).newHasher().putString(uid, UTF_8).hash().asBytes(); return new OperatorID(hash); }
|
/**
* Generate {@link OperatorID}'s from {@code uid}'s.
*
* <p>{@link
* org.apache.flink.streaming.api.graph.StreamGraphHasherV2#traverseStreamGraphAndGenerateHashes(StreamGraph)})}
*
* @param uid {@code DataStream} operator uid.
* @return corresponding {@link OperatorID}
*/
|
Generate <code>OperatorID</code>'s from uid's. <code>org.apache.flink.streaming.api.graph.StreamGraphHasherV2#traverseStreamGraphAndGenerateHashes(StreamGraph)</code>)}
|
fromUid
|
{
"repo_name": "apache/flink",
"path": "flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/runtime/OperatorIDGenerator.java",
"license": "apache-2.0",
"size": 1800
}
|
[
"org.apache.flink.runtime.jobgraph.OperatorID",
"org.apache.flink.shaded.guava30.com.google.common.hash.Hashing"
] |
import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.shaded.guava30.com.google.common.hash.Hashing;
|
import org.apache.flink.runtime.jobgraph.*; import org.apache.flink.shaded.guava30.com.google.common.hash.*;
|
[
"org.apache.flink"
] |
org.apache.flink;
| 2,197,826
|
protected boolean allSubsetsOfSizeK_1AreFrequent(int[] candidate, List<Itemset> levelK_1) {
// generate all subsets by always each item from the candidate, one by one
for(int posRemoved=0; posRemoved< candidate.length; posRemoved++){
// perform a binary search to check if the subset appears in level k-1.
int first = 0;
int last = levelK_1.size() - 1;
// variable to remember if we found the subset
boolean found = false;
// the binary search
while( first <= last )
{
int middle = ( first + last ) / 2;
if(sameAs(levelK_1.get(middle), candidate, posRemoved) < 0 ){
first = middle + 1; // the itemset compared is larger than the subset according to the lexical order
}
else if(sameAs(levelK_1.get(middle), candidate, posRemoved) > 0 ){
last = middle - 1; // the itemset compared is smaller than the subset is smaller according to the lexical order
}
else{
found = true; // we have found it so we stop
break;
}
}
if(found == false){ // if we did not find it, that means that candidate is not a frequent itemset because
// at least one of its subsets does not appear in level k-1.
return false;
}
}
return true;
}
|
boolean function(int[] candidate, List<Itemset> levelK_1) { for(int posRemoved=0; posRemoved< candidate.length; posRemoved++){ int first = 0; int last = levelK_1.size() - 1; boolean found = false; while( first <= last ) { int middle = ( first + last ) / 2; if(sameAs(levelK_1.get(middle), candidate, posRemoved) < 0 ){ first = middle + 1; } else if(sameAs(levelK_1.get(middle), candidate, posRemoved) > 0 ){ last = middle - 1; } else{ found = true; break; } } if(found == false){ return false; } } return true; }
|
/**
* Method to check if all the subsets of size k-1 of a candidate of size k are freuqnet
* @param candidate a candidate itemset of size k
* @param levelK_1 the frequent itemsets of size k-1
* @return true if all the subsets are frequet
*/
|
Method to check if all the subsets of size k-1 of a candidate of size k are freuqnet
|
allSubsetsOfSizeK_1AreFrequent
|
{
"repo_name": "YinYanfei/CadalWorkspace",
"path": "ca/pfv/spmf/algorithms/frequentpatterns/apriori_close/AlgoAprioriClose.java",
"license": "gpl-3.0",
"size": 19262
}
|
[
"ca.pfv.spmf.patterns.itemset_array_integers_with_count.Itemset",
"java.util.List"
] |
import ca.pfv.spmf.patterns.itemset_array_integers_with_count.Itemset; import java.util.List;
|
import ca.pfv.spmf.patterns.itemset_array_integers_with_count.*; import java.util.*;
|
[
"ca.pfv.spmf",
"java.util"
] |
ca.pfv.spmf; java.util;
| 2,274,369
|
public void testAckedMessageAreConsumed() throws JMSException {
connection.start();
Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
Topic queue = session.createTopic(getQueueName());
MessageProducer producer = session.createProducer(queue);
MessageConsumer consumer = session.createDurableSubscriber(queue, "subscriber-id1");
producer.send(session.createTextMessage("Hello"));
// Consume the message...
Message msg = consumer.receive(1000);
assertNotNull(msg);
msg.acknowledge();
// Reset the session.
session.close();
session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
// Attempt to Consume the message...
consumer = session.createDurableSubscriber(queue, "subscriber-id1");
msg = consumer.receive(1000);
assertNull(msg);
session.close();
}
// This test cant, unfortunately, pass
//- in hedwig, acknowledge is a ACKNOWLEDGE UNTIL. So the last ack will ack all messages until then ...
/*
public void testLastMessageAcked() throws JMSException {
connection.start();
Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
Topic queue = session.createTopic(getQueueName());
MessageProducer producer = session.createProducer(queue);
MessageConsumer consumer = session.createDurableSubscriber(queue, "subscriber-id2");
TextMessage msg1 = session.createTextMessage("msg1");
TextMessage msg2 = session.createTextMessage("msg2");
TextMessage msg3 = session.createTextMessage("msg3");
producer.send(msg1);
producer.send(msg2);
producer.send(msg3);
// Consume the message...
Message msg = consumer.receive(1000);
assertNotNull(msg);
msg = consumer.receive(1000);
assertNotNull(msg);
msg = consumer.receive(1000);
assertNotNull(msg);
msg.acknowledge();
// Reset the session.
session.close();
session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
// Attempt to Consume the message...
consumer = session.createDurableSubscriber(queue, "subscriber-id2");
msg = consumer.receive(1000);
assertNotNull(msg);
assertEquals(msg1,msg);
msg = consumer.receive(1000);
assertNotNull(msg);
assertEquals(msg2,msg);
msg = consumer.receive(1000);
assertNull(msg);
session.close();
}
|
void function() throws JMSException { connection.start(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Topic queue = session.createTopic(getQueueName()); MessageProducer producer = session.createProducer(queue); MessageConsumer consumer = session.createDurableSubscriber(queue, STR); producer.send(session.createTextMessage("Hello")); Message msg = consumer.receive(1000); assertNotNull(msg); msg.acknowledge(); session.close(); session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); consumer = session.createDurableSubscriber(queue, STR); msg = consumer.receive(1000); assertNull(msg); session.close(); } /* public void testLastMessageAcked() throws JMSException { connection.start(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Topic queue = session.createTopic(getQueueName()); MessageProducer producer = session.createProducer(queue); MessageConsumer consumer = session.createDurableSubscriber(queue, STR); TextMessage msg1 = session.createTextMessage("msg1"); TextMessage msg2 = session.createTextMessage("msg2"); TextMessage msg3 = session.createTextMessage("msg3"); producer.send(msg1); producer.send(msg2); producer.send(msg3); Message msg = consumer.receive(1000); assertNotNull(msg); msg = consumer.receive(1000); assertNotNull(msg); msg = consumer.receive(1000); assertNotNull(msg); msg.acknowledge(); session.close(); session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); consumer = session.createDurableSubscriber(queue, STR); msg = consumer.receive(1000); assertNotNull(msg); assertEquals(msg1,msg); msg = consumer.receive(1000); assertNotNull(msg); assertEquals(msg2,msg); msg = consumer.receive(1000); assertNull(msg); session.close(); }
|
/**
* Tests if acknowledged messages are being consumed.
*
* @throws JMSException
*/
|
Tests if acknowledged messages are being consumed
|
testAckedMessageAreConsumed
|
{
"repo_name": "mocc/bookkeeper-lab",
"path": "hedwig-client-jms/src/test/java/org/apache/activemq/JMSIndividualAckTest.java",
"license": "apache-2.0",
"size": 5683
}
|
[
"javax.jms.JMSException",
"javax.jms.Message",
"javax.jms.MessageConsumer",
"javax.jms.MessageProducer",
"javax.jms.Session",
"javax.jms.TextMessage",
"javax.jms.Topic"
] |
import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import javax.jms.Topic;
|
import javax.jms.*;
|
[
"javax.jms"
] |
javax.jms;
| 2,628,129
|
KeyStore getSslKeyStore();
|
KeyStore getSslKeyStore();
|
/**
* Get the SSL keystore, used for HTTPS support.
*
* @return sslKeystore
*/
|
Get the SSL keystore, used for HTTPS support
|
getSslKeyStore
|
{
"repo_name": "teknux-org/jetty-bootstrap",
"path": "jetty-bootstrap/src/main/java/org/teknux/jettybootstrap/configuration/IJettyConfiguration.java",
"license": "apache-2.0",
"size": 13453
}
|
[
"java.security.KeyStore"
] |
import java.security.KeyStore;
|
import java.security.*;
|
[
"java.security"
] |
java.security;
| 2,086,798
|
public static IDataTable retrieveOQLDataTable(IContext context, String oqlQuery, int amount, int offset) throws CoreException
{
return component.core().retrieveOQLDataTable(context, oqlQuery, amount, offset);
}
|
static IDataTable function(IContext context, String oqlQuery, int amount, int offset) throws CoreException { return component.core().retrieveOQLDataTable(context, oqlQuery, amount, offset); }
|
/**
* Retrieve raw data (IDataTable) using an OQL query (asynchronously).
* @param context the context.
* @param oqlQuery the OQL query to execute.
* @param amount maximum number of objects to retrieve.
* @param offset index of first object to retrieve.
* @return the data table containing the raw data.
*/
|
Retrieve raw data (IDataTable) using an OQL query (asynchronously)
|
retrieveOQLDataTable
|
{
"repo_name": "mrgroen/reCAPTCHA",
"path": "test/javasource/com/mendix/core/Core.java",
"license": "apache-2.0",
"size": 76718
}
|
[
"com.mendix.systemwideinterfaces.connectionbus.data.IDataTable",
"com.mendix.systemwideinterfaces.core.IContext"
] |
import com.mendix.systemwideinterfaces.connectionbus.data.IDataTable; import com.mendix.systemwideinterfaces.core.IContext;
|
import com.mendix.systemwideinterfaces.connectionbus.data.*; import com.mendix.systemwideinterfaces.core.*;
|
[
"com.mendix.systemwideinterfaces"
] |
com.mendix.systemwideinterfaces;
| 86,661
|
public void setDescriptions(List<String> descriptions) {
this.descriptions = descriptions;
}
|
void function(List<String> descriptions) { this.descriptions = descriptions; }
|
/**
* <p>Setter for the field <code>descriptions</code>.</p>
*
* @param descriptions a {@link java.util.List} object.
*/
|
Setter for the field <code>descriptions</code>
|
setDescriptions
|
{
"repo_name": "NotFound403/WePay",
"path": "src/main/java/cn/felord/wepay/ali/sdk/api/domain/VoucherTermInfo.java",
"license": "apache-2.0",
"size": 1396
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,845,131
|
protected void value(@Nullable CacheObject val) {
assert Thread.holdsLock(this);
this.val = val;
}
|
void function(@Nullable CacheObject val) { assert Thread.holdsLock(this); this.val = val; }
|
/**
* Sets entry value. If off-heap value storage is enabled, will serialize value to off-heap.
*
* @param val Value to store.
*/
|
Sets entry value. If off-heap value storage is enabled, will serialize value to off-heap
|
value
|
{
"repo_name": "pperalta/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheMapEntry.java",
"license": "apache-2.0",
"size": 154311
}
|
[
"org.jetbrains.annotations.Nullable"
] |
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.*;
|
[
"org.jetbrains.annotations"
] |
org.jetbrains.annotations;
| 1,413,115
|
private static CacheableResultTransformer create(boolean[] includeInTuple) {
return new CacheableResultTransformer( includeInTuple, null );
}
private CacheableResultTransformer(boolean[] includeInTuple, boolean[] includeInTransform) {
if ( includeInTuple == null ) {
throw new IllegalArgumentException( "includeInTuple cannot be null" );
}
this.includeInTuple = includeInTuple;
tupleLength = ArrayHelper.countTrue( includeInTuple );
tupleSubsetLength = (
includeInTransform == null ?
tupleLength :
ArrayHelper.countTrue( includeInTransform )
);
if ( tupleSubsetLength == tupleLength ) {
includeInTransformIndex = null;
}
else {
includeInTransformIndex = new int[tupleSubsetLength];
for ( int i = 0, j = 0 ; i < includeInTransform.length ; i++ ) {
if ( includeInTransform[ i ] ) {
includeInTransformIndex[ j ] = i;
j++;
}
}
}
}
|
static CacheableResultTransformer function(boolean[] includeInTuple) { return new CacheableResultTransformer( includeInTuple, null ); } private CacheableResultTransformer(boolean[] includeInTuple, boolean[] includeInTransform) { if ( includeInTuple == null ) { throw new IllegalArgumentException( STR ); } this.includeInTuple = includeInTuple; tupleLength = ArrayHelper.countTrue( includeInTuple ); tupleSubsetLength = ( includeInTransform == null ? tupleLength : ArrayHelper.countTrue( includeInTransform ) ); if ( tupleSubsetLength == tupleLength ) { includeInTransformIndex = null; } else { includeInTransformIndex = new int[tupleSubsetLength]; for ( int i = 0, j = 0 ; i < includeInTransform.length ; i++ ) { if ( includeInTransform[ i ] ) { includeInTransformIndex[ j ] = i; j++; } } } }
|
/**
* Returns a CacheableResultTransformer that is used to transform
* tuples to a value(s) that can be cached.
*
* @param includeInTuple - array with the i-th element indicating
* whether the i-th expression returned by a query is
* included in the tuple; the number of true values equals
* the length of the tuple that will be transformed;
* must be non-null
*
* @return a CacheableResultTransformer that is used to transform
* tuples to a value(s) that can be cached.
*/
|
Returns a CacheableResultTransformer that is used to transform tuples to a value(s) that can be cached
|
create
|
{
"repo_name": "1fechner/FeatureExtractor",
"path": "sources/FeatureExtractor/lib/hibernate-release-5.1.0.Final/project/hibernate-core/src/main/java/org/hibernate/transform/CacheableResultTransformer.java",
"license": "lgpl-2.1",
"size": 11511
}
|
[
"org.hibernate.internal.util.collections.ArrayHelper"
] |
import org.hibernate.internal.util.collections.ArrayHelper;
|
import org.hibernate.internal.util.collections.*;
|
[
"org.hibernate.internal"
] |
org.hibernate.internal;
| 1,432,667
|
DataSet vectorize(InputStream is, String label);
|
DataSet vectorize(InputStream is, String label);
|
/**
* Text coming from an input stream considered as one document
* @param is the input stream to read from
* @param label the label to assign
* @return a dataset with a applyTransformToDestination of weights(relative to impl; could be word counts or tfidf scores)
*/
|
Text coming from an input stream considered as one document
|
vectorize
|
{
"repo_name": "deeplearning4j/deeplearning4j",
"path": "deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/TextVectorizer.java",
"license": "apache-2.0",
"size": 3408
}
|
[
"java.io.InputStream",
"org.nd4j.linalg.dataset.DataSet"
] |
import java.io.InputStream; import org.nd4j.linalg.dataset.DataSet;
|
import java.io.*; import org.nd4j.linalg.dataset.*;
|
[
"java.io",
"org.nd4j.linalg"
] |
java.io; org.nd4j.linalg;
| 1,931,545
|
List<HaWorkVO> findPreviousHA(long instanceId);
|
List<HaWorkVO> findPreviousHA(long instanceId);
|
/**
* Finds all the work items related to this instance.
*
* @param instanceId
* @return list of WorkVO or empty list.
*/
|
Finds all the work items related to this instance
|
findPreviousHA
|
{
"repo_name": "remibergsma/cosmic",
"path": "cosmic-core/server/src/main/java/com/cloud/ha/dao/HighAvailabilityDao.java",
"license": "apache-2.0",
"size": 2057
}
|
[
"com.cloud.ha.HaWorkVO",
"java.util.List"
] |
import com.cloud.ha.HaWorkVO; import java.util.List;
|
import com.cloud.ha.*; import java.util.*;
|
[
"com.cloud.ha",
"java.util"
] |
com.cloud.ha; java.util;
| 537,425
|
HlsMediaChunkExtractor createExtractor(
Uri uri,
Format format,
@Nullable List<Format> muxedCaptionFormats,
TimestampAdjuster timestampAdjuster,
Map<String, List<String>> responseHeaders,
ExtractorInput sniffingExtractorInput,
PlayerId playerId)
throws IOException;
|
HlsMediaChunkExtractor createExtractor( Uri uri, Format format, @Nullable List<Format> muxedCaptionFormats, TimestampAdjuster timestampAdjuster, Map<String, List<String>> responseHeaders, ExtractorInput sniffingExtractorInput, PlayerId playerId) throws IOException;
|
/**
* Creates an {@link Extractor} for extracting HLS media chunks.
*
* @param uri The URI of the media chunk.
* @param format A {@link Format} associated with the chunk to extract.
* @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption
* information is available in the multivariant playlist.
* @param timestampAdjuster Adjuster corresponding to the provided discontinuity sequence number.
* @param responseHeaders The HTTP response headers associated with the media segment or
* initialization section to extract.
* @param sniffingExtractorInput The first extractor input that will be passed to the returned
* extractor's {@link Extractor#read(ExtractorInput, PositionHolder)}. Must only be used to
* call {@link Extractor#sniff(ExtractorInput)}.
* @param playerId The {@link PlayerId} of the player using this extractors factory.
* @return An {@link HlsMediaChunkExtractor}.
* @throws IOException If an I/O error is encountered while sniffing.
*/
|
Creates an <code>Extractor</code> for extracting HLS media chunks
|
createExtractor
|
{
"repo_name": "google/ExoPlayer",
"path": "library/hls/src/main/java/com/google/android/exoplayer2/source/hls/HlsExtractorFactory.java",
"license": "apache-2.0",
"size": 2680
}
|
[
"android.net.Uri",
"androidx.annotation.Nullable",
"com.google.android.exoplayer2.Format",
"com.google.android.exoplayer2.analytics.PlayerId",
"com.google.android.exoplayer2.extractor.ExtractorInput",
"com.google.android.exoplayer2.util.TimestampAdjuster",
"java.io.IOException",
"java.util.List",
"java.util.Map"
] |
import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.util.TimestampAdjuster; import java.io.IOException; import java.util.List; import java.util.Map;
|
import android.net.*; import androidx.annotation.*; import com.google.android.exoplayer2.*; import com.google.android.exoplayer2.analytics.*; import com.google.android.exoplayer2.extractor.*; import com.google.android.exoplayer2.util.*; import java.io.*; import java.util.*;
|
[
"android.net",
"androidx.annotation",
"com.google.android",
"java.io",
"java.util"
] |
android.net; androidx.annotation; com.google.android; java.io; java.util;
| 1,391,140
|
@Override
public void handle(HttpChannel<?> channel) throws IOException,
ServletException {
super.handle(channel);
}
|
void function(HttpChannel<?> channel) throws IOException, ServletException { super.handle(channel); }
|
/**
* Handler method converting a Jetty HttpChannel into a Restlet Call.
*
* @param channel
* The channel to handle.
*/
|
Handler method converting a Jetty HttpChannel into a Restlet Call
|
handle
|
{
"repo_name": "evandor/skysail",
"path": "skysail.server.http.jetty/src/io/skysail/server/http/jetty/internal/WrappedServer.java",
"license": "apache-2.0",
"size": 1648
}
|
[
"java.io.IOException",
"javax.servlet.ServletException",
"org.eclipse.jetty.server.HttpChannel"
] |
import java.io.IOException; import javax.servlet.ServletException; import org.eclipse.jetty.server.HttpChannel;
|
import java.io.*; import javax.servlet.*; import org.eclipse.jetty.server.*;
|
[
"java.io",
"javax.servlet",
"org.eclipse.jetty"
] |
java.io; javax.servlet; org.eclipse.jetty;
| 733,917
|
protected List<Sequence> loadSequences(Session session) {
Map<Class, ClassDescriptor> descriptors = session.getDescriptors();
List<PortableSequenceGenerator> sequenceGenerators = new ArrayList<PortableSequenceGenerator>();
for (Class<?> entityClass : descriptors.keySet()) {
PortableSequenceGenerator sequenceGenerator = AnnotationUtils.findAnnotation(entityClass,
PortableSequenceGenerator.class);
if (sequenceGenerator != null) {
sequenceGenerators.add(sequenceGenerator);
}
loadFieldSequences(entityClass, sequenceGenerators);
for (Method method : entityClass.getMethods()) {
PortableSequenceGenerator methodSequenceGenerator = method.getAnnotation(
PortableSequenceGenerator.class);
if (methodSequenceGenerator != null) {
sequenceGenerators.add(methodSequenceGenerator);
}
}
}
List<Sequence> sequences = new ArrayList<Sequence>();
for (PortableSequenceGenerator sequenceGenerator : sequenceGenerators) {
Sequence sequence = new MaxValueIncrementerSequenceWrapper(sequenceGenerator);
sequences.add(sequence);
}
return sequences;
}
|
List<Sequence> function(Session session) { Map<Class, ClassDescriptor> descriptors = session.getDescriptors(); List<PortableSequenceGenerator> sequenceGenerators = new ArrayList<PortableSequenceGenerator>(); for (Class<?> entityClass : descriptors.keySet()) { PortableSequenceGenerator sequenceGenerator = AnnotationUtils.findAnnotation(entityClass, PortableSequenceGenerator.class); if (sequenceGenerator != null) { sequenceGenerators.add(sequenceGenerator); } loadFieldSequences(entityClass, sequenceGenerators); for (Method method : entityClass.getMethods()) { PortableSequenceGenerator methodSequenceGenerator = method.getAnnotation( PortableSequenceGenerator.class); if (methodSequenceGenerator != null) { sequenceGenerators.add(methodSequenceGenerator); } } } List<Sequence> sequences = new ArrayList<Sequence>(); for (PortableSequenceGenerator sequenceGenerator : sequenceGenerators) { Sequence sequence = new MaxValueIncrementerSequenceWrapper(sequenceGenerator); sequences.add(sequence); } return sequences; }
|
/**
* Gets any {@link Sequence} from the session.
*
* @param session the current session.
* @return a list of {@link Sequence}s.
*/
|
Gets any <code>Sequence</code> from the session
|
loadSequences
|
{
"repo_name": "mztaylor/rice-git",
"path": "rice-framework/krad-data/src/main/java/org/kuali/rice/krad/data/jpa/eclipselink/KradEclipseLinkCustomizer.java",
"license": "apache-2.0",
"size": 16180
}
|
[
"java.lang.reflect.Method",
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"org.eclipse.persistence.descriptors.ClassDescriptor",
"org.eclipse.persistence.sequencing.Sequence",
"org.eclipse.persistence.sessions.Session",
"org.kuali.rice.krad.data.jpa.PortableSequenceGenerator",
"org.springframework.core.annotation.AnnotationUtils"
] |
import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.sequencing.Sequence; import org.eclipse.persistence.sessions.Session; import org.kuali.rice.krad.data.jpa.PortableSequenceGenerator; import org.springframework.core.annotation.AnnotationUtils;
|
import java.lang.reflect.*; import java.util.*; import org.eclipse.persistence.descriptors.*; import org.eclipse.persistence.sequencing.*; import org.eclipse.persistence.sessions.*; import org.kuali.rice.krad.data.jpa.*; import org.springframework.core.annotation.*;
|
[
"java.lang",
"java.util",
"org.eclipse.persistence",
"org.kuali.rice",
"org.springframework.core"
] |
java.lang; java.util; org.eclipse.persistence; org.kuali.rice; org.springframework.core;
| 1,234,644
|
public static java.util.Set extractMedicNotesSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.MedicNotesVoCollection voCollection)
{
return extractMedicNotesSet(domainFactory, voCollection, null, new HashMap());
}
|
static java.util.Set function(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.MedicNotesVoCollection voCollection) { return extractMedicNotesSet(domainFactory, voCollection, null, new HashMap()); }
|
/**
* Create the ims.emergency.domain.objects.MedicNotes set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
|
Create the ims.emergency.domain.objects.MedicNotes set from the value object collection
|
extractMedicNotesSet
|
{
"repo_name": "IMS-MAXIMS/openMAXIMS",
"path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/emergency/vo/domain/MedicNotesVoAssembler.java",
"license": "agpl-3.0",
"size": 21079
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,090,108
|
public final void updateSubchannelAddresses(
Subchannel subchannel, EquivalentAddressGroup addrs) {
Preconditions.checkNotNull(addrs, "addrs");
updateSubchannelAddresses(subchannel, Collections.singletonList(addrs));
}
|
final void function( Subchannel subchannel, EquivalentAddressGroup addrs) { Preconditions.checkNotNull(addrs, "addrs"); updateSubchannelAddresses(subchannel, Collections.singletonList(addrs)); }
|
/**
* Equivalent to {@link #updateSubchannelAddresses(io.grpc.LoadBalancer.Subchannel, List)} with
* the given single {@code EquivalentAddressGroup}.
*
* @since 1.4.0
*/
|
Equivalent to <code>#updateSubchannelAddresses(io.grpc.LoadBalancer.Subchannel, List)</code> with the given single EquivalentAddressGroup
|
updateSubchannelAddresses
|
{
"repo_name": "zhangkun83/grpc-java",
"path": "core/src/main/java/io/grpc/LoadBalancer.java",
"license": "apache-2.0",
"size": 29665
}
|
[
"com.google.common.base.Preconditions",
"java.util.Collections"
] |
import com.google.common.base.Preconditions; import java.util.Collections;
|
import com.google.common.base.*; import java.util.*;
|
[
"com.google.common",
"java.util"
] |
com.google.common; java.util;
| 1,822,913
|
void checkValidYearOfEra(int yearOfEra) {
if (yearOfEra < MIN_YEAR_OF_ERA ||
yearOfEra > MAX_YEAR_OF_ERA) {
throw new DateTimeException("Invalid year of Hijrah Era");
}
}
|
void checkValidYearOfEra(int yearOfEra) { if (yearOfEra < MIN_YEAR_OF_ERA yearOfEra > MAX_YEAR_OF_ERA) { throw new DateTimeException(STR); } }
|
/**
* Check the validity of a yearOfEra.
* @param yearOfEra the year to check
*/
|
Check the validity of a yearOfEra
|
checkValidYearOfEra
|
{
"repo_name": "karianna/jdk8_tl",
"path": "jdk/src/share/classes/java/time/chrono/HijrahChronology.java",
"license": "gpl-2.0",
"size": 44237
}
|
[
"java.time.DateTimeException"
] |
import java.time.DateTimeException;
|
import java.time.*;
|
[
"java.time"
] |
java.time;
| 183,592
|
public static int getHour() {
Calendar calendar = Calendar.getInstance(Locale.getDefault());
return calendar.get(Calendar.HOUR_OF_DAY);
}
|
static int function() { Calendar calendar = Calendar.getInstance(Locale.getDefault()); return calendar.get(Calendar.HOUR_OF_DAY); }
|
/**
* Get system time as hour.
* @return
*/
|
Get system time as hour
|
getHour
|
{
"repo_name": "yinglovezhuzhu/PullView_eclipse",
"path": "PullView/src/com/opensource/pullview/utils/DateUtil.java",
"license": "apache-2.0",
"size": 4545
}
|
[
"java.util.Calendar",
"java.util.Locale"
] |
import java.util.Calendar; import java.util.Locale;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,384,234
|
public static File getAppDir()
{
return getContext().getFilesDir();
}
|
static File function() { return getContext().getFilesDir(); }
|
/**
* Get application folder.
* @return application folder.
*/
|
Get application folder
|
getAppDir
|
{
"repo_name": "yangjun2/android",
"path": "androidhap/InfinitiInTouch/src/com/airbiquity/hap/A.java",
"license": "unlicense",
"size": 20026
}
|
[
"java.io.File"
] |
import java.io.File;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,025,180
|
protected Map<String, String> getParams() throws AuthFailureError {
return null;
}
|
Map<String, String> function() throws AuthFailureError { return null; }
|
/**
* Returns a Map of parameters to be used for a POST or PUT request. Can throw
* {@link AuthFailureError} as authentication may be required to provide these values.
*
* <p>Note that you can directly override {@link #getBody()} for custom data.</p>
*
* @throws AuthFailureError in the event of auth failure
*/
|
Returns a Map of parameters to be used for a POST or PUT request. Can throw <code>AuthFailureError</code> as authentication may be required to provide these values. Note that you can directly override <code>#getBody()</code> for custom data
|
getParams
|
{
"repo_name": "YamatoMiura/kuruchan",
"path": "app/libs/volley/src/main/java/com/android/volley/Request.java",
"license": "gpl-2.0",
"size": 19896
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 342,859
|
public Rectangle squaredBoundingBox(Set<Part> parts) {
int minX = Integer.MAX_VALUE;
int minY = Integer.MAX_VALUE;
int maxX = Integer.MIN_VALUE;
int maxY = Integer.MIN_VALUE;
// Compute a rectangle (minX, minY, maxX, maxY) surrounding the body
for (Part p : parts) {
minX = Math.min(minX, p.getNormalizedX());
minY = Math.min(minY, p.getNormalizedY());
maxX = Math.max(maxX, p.getNormalizedX());
maxY = Math.max(maxY, p.getNormalizedY());
}
// Expand the rectangle to a squared bounding box with size the max(height, width).
int bodyWidth = maxX - minX;
int bodyHeight = maxY - minY;
int bboxSize = Math.max(bodyWidth, bodyHeight);
// Compute the top left corner of the squared BBox
final int bboxTopLeftX = minX - (bboxSize - bodyWidth) / 2;
final int bboxTopLeftY = minY - (bboxSize - bodyHeight) / 2;
// Squared Bounding Box surrounding the body
return new Rectangle(bboxTopLeftX, bboxTopLeftY, bboxSize, bboxSize);
}
|
Rectangle function(Set<Part> parts) { int minX = Integer.MAX_VALUE; int minY = Integer.MAX_VALUE; int maxX = Integer.MIN_VALUE; int maxY = Integer.MIN_VALUE; for (Part p : parts) { minX = Math.min(minX, p.getNormalizedX()); minY = Math.min(minY, p.getNormalizedY()); maxX = Math.max(maxX, p.getNormalizedX()); maxY = Math.max(maxY, p.getNormalizedY()); } int bodyWidth = maxX - minX; int bodyHeight = maxY - minY; int bboxSize = Math.max(bodyWidth, bodyHeight); final int bboxTopLeftX = minX - (bboxSize - bodyWidth) / 2; final int bboxTopLeftY = minY - (bboxSize - bodyHeight) / 2; return new Rectangle(bboxTopLeftX, bboxTopLeftY, bboxSize, bboxSize); }
|
/**
* Compute a square bounding box that surrounds the body (defined by its parts). First finds the the exact bounding
* rectangle and then use the maximum from the height and width as a size of the surrounding box.
* @param parts of the body to compute the bounding box fof.
* @return Square bounding box that surrounds the body parts coordinates.
*/
|
Compute a square bounding box that surrounds the body (defined by its parts). First finds the the exact bounding rectangle and then use the maximum from the height and width as a size of the surrounding box
|
squaredBoundingBox
|
{
"repo_name": "spring-cloud-stream-app-starters/tensorflow",
"path": "spring-cloud-starter-stream-processor-pose-estimation/src/main/java/org/springframework/cloud/stream/app/pose/estimation/model/PoseMatcher.java",
"license": "apache-2.0",
"size": 5790
}
|
[
"java.awt.Rectangle",
"java.util.Set"
] |
import java.awt.Rectangle; import java.util.Set;
|
import java.awt.*; import java.util.*;
|
[
"java.awt",
"java.util"
] |
java.awt; java.util;
| 2,471,794
|
@SuppressWarnings("unchecked")
public void write(Object key, Object value, String baseOutputPath)
throws IOException, InterruptedException {
checkBaseOutputPath(baseOutputPath);
TaskAttemptContext taskContext = new TaskAttemptContext(
context.getConfiguration(), context.getTaskAttemptID());
getRecordWriter(taskContext, baseOutputPath).write(key, value);
}
|
@SuppressWarnings(STR) void function(Object key, Object value, String baseOutputPath) throws IOException, InterruptedException { checkBaseOutputPath(baseOutputPath); TaskAttemptContext taskContext = new TaskAttemptContext( context.getConfiguration(), context.getTaskAttemptID()); getRecordWriter(taskContext, baseOutputPath).write(key, value); }
|
/**
* Write key value to an output file name.
*
* Gets the record writer from job's output format.
* Job's output format should be a FileOutputFormat.
*
* @param key the key
* @param value the value
* @param baseOutputPath base-output path to write the record to.
* Note: Framework will generate unique filename for the baseOutputPath
*/
|
Write key value to an output file name. Gets the record writer from job's output format. Job's output format should be a FileOutputFormat
|
write
|
{
"repo_name": "julianpeeters/avro",
"path": "lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java",
"license": "apache-2.0",
"size": 17550
}
|
[
"java.io.IOException",
"org.apache.hadoop.mapreduce.TaskAttemptContext"
] |
import java.io.IOException; import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import java.io.*; import org.apache.hadoop.mapreduce.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 1,627,691
|
public void paintIcon(Component c, Graphics g, int x, int y) {
// We don't insist that it be on the same Component
Graphics2D g2 = (Graphics2D) g;
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
if (c != null) {
g.setColor(c.getForeground());
g.setFont(c.getFont());
}
if (fRotation == ROTATE_NONE) {
int yPos = y + fCharHeight;
for (int i = 0; i < fCharStrings.length; i++) {
// Special rules for Japanese - "half-height" characters (like ya, yu, yo in combinations)
// should draw in the top-right quadrant when drawn vertically
// - they draw in the bottom-left normally
int tweak;
switch (fPosition[i]) {
case POSITION_NORMAL:
// Roman fonts should be centered. Japanese fonts are always monospaced.
g.drawString(fCharStrings[i], x+((fWidth-fCharWidths[i])/2), yPos);
break;
case POSITION_TOP_RIGHT:
tweak = fCharHeight/3; // Should be 2, but they aren't actually half-height
g.drawString(fCharStrings[i], x+(tweak/2), yPos-tweak);
break;
case POSITION_FAR_TOP_RIGHT:
tweak = fCharHeight - fCharHeight/3;
g.drawString(fCharStrings[i], x+(tweak/2), yPos-tweak);
break;
}
yPos += fCharHeight;
}
}
else if (fRotation == ROTATE_LEFT) {
g.translate(x+fWidth,y+fHeight);
((Graphics2D)g).rotate(-NINETY_DEGREES);
g.drawString(fLabel, kBufferSpace, -fDescent);
((Graphics2D)g).rotate(NINETY_DEGREES);
g.translate(-(x+fWidth),-(y+fHeight));
}
else if (fRotation == ROTATE_RIGHT) {
g.translate(x,y);
((Graphics2D)g).rotate(NINETY_DEGREES);
g.drawString(fLabel, kBufferSpace, -fDescent);
((Graphics2D)g).rotate(-NINETY_DEGREES);
g.translate(-x,-y);
}
}
|
void function(Component c, Graphics g, int x, int y) { Graphics2D g2 = (Graphics2D) g; g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); if (c != null) { g.setColor(c.getForeground()); g.setFont(c.getFont()); } if (fRotation == ROTATE_NONE) { int yPos = y + fCharHeight; for (int i = 0; i < fCharStrings.length; i++) { int tweak; switch (fPosition[i]) { case POSITION_NORMAL: g.drawString(fCharStrings[i], x+((fWidth-fCharWidths[i])/2), yPos); break; case POSITION_TOP_RIGHT: tweak = fCharHeight/3; g.drawString(fCharStrings[i], x+(tweak/2), yPos-tweak); break; case POSITION_FAR_TOP_RIGHT: tweak = fCharHeight - fCharHeight/3; g.drawString(fCharStrings[i], x+(tweak/2), yPos-tweak); break; } yPos += fCharHeight; } } else if (fRotation == ROTATE_LEFT) { g.translate(x+fWidth,y+fHeight); ((Graphics2D)g).rotate(-NINETY_DEGREES); g.drawString(fLabel, kBufferSpace, -fDescent); ((Graphics2D)g).rotate(NINETY_DEGREES); g.translate(-(x+fWidth),-(y+fHeight)); } else if (fRotation == ROTATE_RIGHT) { g.translate(x,y); ((Graphics2D)g).rotate(NINETY_DEGREES); g.drawString(fLabel, kBufferSpace, -fDescent); ((Graphics2D)g).rotate(-NINETY_DEGREES); g.translate(-x,-y); } }
|
/**
* Draw the icon at the specified location. Icon implementations
* may use the Component argument to get properties useful for
* painting, e.g. the foreground or background color.
*/
|
Draw the icon at the specified location. Icon implementations may use the Component argument to get properties useful for painting, e.g. the foreground or background color
|
paintIcon
|
{
"repo_name": "tectronics/xenogeddon",
"path": "src/jmetest/input/VTextIcon.java",
"license": "gpl-2.0",
"size": 14366
}
|
[
"java.awt.Component",
"java.awt.Graphics",
"java.awt.Graphics2D",
"java.awt.RenderingHints"
] |
import java.awt.Component; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.RenderingHints;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,012,134
|
public void addData(int position, List<T> data) {
if (0 <= position && position < mData.size()) {
mData.addAll(position, data);
notifyItemInserted(position);
notifyItemRangeChanged(position, mData.size() - position - data.size());
} else {
throw new ArrayIndexOutOfBoundsException("inserted position most greater than 0 and less than data size");
}
}
|
void function(int position, List<T> data) { if (0 <= position && position < mData.size()) { mData.addAll(position, data); notifyItemInserted(position); notifyItemRangeChanged(position, mData.size() - position - data.size()); } else { throw new ArrayIndexOutOfBoundsException(STR); } }
|
/**
* add new data in to certain location
*
* @param position
*/
|
add new data in to certain location
|
addData
|
{
"repo_name": "weiwenqiang/GitHub",
"path": "MVP/MVP-master/core/src/main/java/com/hpw/mvpframe/widget/recyclerview/BaseQuickAdapter.java",
"license": "apache-2.0",
"size": 41827
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,820,218
|
public boolean isPrivate() {
return Modifier.isPrivate(getModifiers());
}
|
boolean function() { return Modifier.isPrivate(getModifiers()); }
|
/**
* Indicates if this <code>JavaConstructor</code> is <code>private</code>.
*
* @return <code>true</code> if this <code>JavaConstructor</code> is <code>private</code>, otherwise <code>false</code>.
*/
|
Indicates if this <code>JavaConstructor</code> is <code>private</code>
|
isPrivate
|
{
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "moxy/org.eclipse.persistence.moxy.dynamicxjc/src/org/eclipse/persistence/jaxb/javamodel/xjc/XJCJavaConstructorImpl.java",
"license": "epl-1.0",
"size": 6613
}
|
[
"java.lang.reflect.Modifier"
] |
import java.lang.reflect.Modifier;
|
import java.lang.reflect.*;
|
[
"java.lang"
] |
java.lang;
| 2,109,746
|
public Collection<Taxon> getSupportedTaxaWithNCBIGenes() {
return supportedTaxaWithNCBIGenes;
}
|
Collection<Taxon> function() { return supportedTaxaWithNCBIGenes; }
|
/**
* Those taxa that are supported by GEMMA and have genes in NCBI.
*
* @return Collection of taxa that are supported by the GEMMA and have genes held by NCBI.
*/
|
Those taxa that are supported by GEMMA and have genes in NCBI
|
getSupportedTaxaWithNCBIGenes
|
{
"repo_name": "ppavlidis/Gemma",
"path": "gemma-core/src/main/java/ubic/gemma/core/loader/genome/gene/ncbi/NcbiGeneDomainObjectGenerator.java",
"license": "apache-2.0",
"size": 11078
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,905,170
|
public void drawScreen(int mouseX, int mouseY, float partialTicks)
{
this.drawDefaultBackground();
int k = this.guiLeft;
int l = this.guiTop;
this.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY);
GlStateManager.disableRescaleNormal();
RenderHelper.disableStandardItemLighting();
GlStateManager.disableLighting();
GlStateManager.disableDepth();
super.drawScreen(mouseX, mouseY, partialTicks);
RenderHelper.enableGUIStandardItemLighting();
GlStateManager.pushMatrix();
GlStateManager.translate((float)k, (float)l, 0.0F);
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
GlStateManager.enableRescaleNormal();
this.theSlot = null;
short short1 = 240;
short short2 = 240;
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, (float)short1 / 1.0F, (float)short2 / 1.0F);
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
int k1;
for (int i1 = 0; i1 < this.inventorySlots.inventorySlots.size(); ++i1)
{
Slot slot = (Slot)this.inventorySlots.inventorySlots.get(i1);
this.drawSlot(slot);
if (this.isMouseOverSlot(slot, mouseX, mouseY) && slot.canBeHovered())
{
this.theSlot = slot;
GlStateManager.disableLighting();
GlStateManager.disableDepth();
int j1 = slot.xDisplayPosition;
k1 = slot.yDisplayPosition;
GlStateManager.colorMask(true, true, true, false);
this.drawGradientRect(j1, k1, j1 + 16, k1 + 16, -2130706433, -2130706433);
GlStateManager.colorMask(true, true, true, true);
GlStateManager.enableLighting();
GlStateManager.enableDepth();
}
}
RenderHelper.disableStandardItemLighting();
this.drawGuiContainerForegroundLayer(mouseX, mouseY);
RenderHelper.enableGUIStandardItemLighting();
InventoryPlayer inventoryplayer = this.mc.thePlayer.inventory;
ItemStack itemstack = this.draggedStack == null ? inventoryplayer.getItemStack() : this.draggedStack;
if (itemstack != null)
{
byte b0 = 8;
k1 = this.draggedStack == null ? 8 : 16;
String s = null;
if (this.draggedStack != null && this.isRightMouseClick)
{
itemstack = itemstack.copy();
itemstack.stackSize = MathHelper.ceiling_float_int((float)itemstack.stackSize / 2.0F);
}
else if (this.dragSplitting && this.dragSplittingSlots.size() > 1)
{
itemstack = itemstack.copy();
itemstack.stackSize = this.dragSplittingRemnant;
if (itemstack.stackSize == 0)
{
s = "" + EnumChatFormatting.YELLOW + "0";
}
}
this.drawItemStack(itemstack, mouseX - k - b0, mouseY - l - k1, s);
}
if (this.returningStack != null)
{
float f1 = (float)(Minecraft.getSystemTime() - this.returningStackTime) / 100.0F;
if (f1 >= 1.0F)
{
f1 = 1.0F;
this.returningStack = null;
}
k1 = this.returningStackDestSlot.xDisplayPosition - this.touchUpX;
int j2 = this.returningStackDestSlot.yDisplayPosition - this.touchUpY;
int l1 = this.touchUpX + (int)((float)k1 * f1);
int i2 = this.touchUpY + (int)((float)j2 * f1);
this.drawItemStack(this.returningStack, l1, i2, (String)null);
}
GlStateManager.popMatrix();
if (inventoryplayer.getItemStack() == null && this.theSlot != null && this.theSlot.getHasStack())
{
ItemStack itemstack1 = this.theSlot.getStack();
this.renderToolTip(itemstack1, mouseX, mouseY);
}
GlStateManager.enableLighting();
GlStateManager.enableDepth();
RenderHelper.enableStandardItemLighting();
}
|
void function(int mouseX, int mouseY, float partialTicks) { this.drawDefaultBackground(); int k = this.guiLeft; int l = this.guiTop; this.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY); GlStateManager.disableRescaleNormal(); RenderHelper.disableStandardItemLighting(); GlStateManager.disableLighting(); GlStateManager.disableDepth(); super.drawScreen(mouseX, mouseY, partialTicks); RenderHelper.enableGUIStandardItemLighting(); GlStateManager.pushMatrix(); GlStateManager.translate((float)k, (float)l, 0.0F); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); GlStateManager.enableRescaleNormal(); this.theSlot = null; short short1 = 240; short short2 = 240; OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, (float)short1 / 1.0F, (float)short2 / 1.0F); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); int k1; for (int i1 = 0; i1 < this.inventorySlots.inventorySlots.size(); ++i1) { Slot slot = (Slot)this.inventorySlots.inventorySlots.get(i1); this.drawSlot(slot); if (this.isMouseOverSlot(slot, mouseX, mouseY) && slot.canBeHovered()) { this.theSlot = slot; GlStateManager.disableLighting(); GlStateManager.disableDepth(); int j1 = slot.xDisplayPosition; k1 = slot.yDisplayPosition; GlStateManager.colorMask(true, true, true, false); this.drawGradientRect(j1, k1, j1 + 16, k1 + 16, -2130706433, -2130706433); GlStateManager.colorMask(true, true, true, true); GlStateManager.enableLighting(); GlStateManager.enableDepth(); } } RenderHelper.disableStandardItemLighting(); this.drawGuiContainerForegroundLayer(mouseX, mouseY); RenderHelper.enableGUIStandardItemLighting(); InventoryPlayer inventoryplayer = this.mc.thePlayer.inventory; ItemStack itemstack = this.draggedStack == null ? inventoryplayer.getItemStack() : this.draggedStack; if (itemstack != null) { byte b0 = 8; k1 = this.draggedStack == null ? 8 : 16; String s = null; if (this.draggedStack != null && this.isRightMouseClick) { itemstack = itemstack.copy(); itemstack.stackSize = MathHelper.ceiling_float_int((float)itemstack.stackSize / 2.0F); } else if (this.dragSplitting && this.dragSplittingSlots.size() > 1) { itemstack = itemstack.copy(); itemstack.stackSize = this.dragSplittingRemnant; if (itemstack.stackSize == 0) { s = STR0"; } } this.drawItemStack(itemstack, mouseX - k - b0, mouseY - l - k1, s); } if (this.returningStack != null) { float f1 = (float)(Minecraft.getSystemTime() - this.returningStackTime) / 100.0F; if (f1 >= 1.0F) { f1 = 1.0F; this.returningStack = null; } k1 = this.returningStackDestSlot.xDisplayPosition - this.touchUpX; int j2 = this.returningStackDestSlot.yDisplayPosition - this.touchUpY; int l1 = this.touchUpX + (int)((float)k1 * f1); int i2 = this.touchUpY + (int)((float)j2 * f1); this.drawItemStack(this.returningStack, l1, i2, (String)null); } GlStateManager.popMatrix(); if (inventoryplayer.getItemStack() == null && this.theSlot != null && this.theSlot.getHasStack()) { ItemStack itemstack1 = this.theSlot.getStack(); this.renderToolTip(itemstack1, mouseX, mouseY); } GlStateManager.enableLighting(); GlStateManager.enableDepth(); RenderHelper.enableStandardItemLighting(); }
|
/**
* Draws the screen and all the components in it. Args : mouseX, mouseY, renderPartialTicks
*/
|
Draws the screen and all the components in it. Args : mouseX, mouseY, renderPartialTicks
|
drawScreen
|
{
"repo_name": "kelthalorn/ConquestCraft",
"path": "build/tmp/recompSrc/net/minecraft/client/gui/inventory/GuiContainer.java",
"license": "lgpl-2.1",
"size": 28781
}
|
[
"net.minecraft.client.Minecraft",
"net.minecraft.client.renderer.GlStateManager",
"net.minecraft.client.renderer.OpenGlHelper",
"net.minecraft.client.renderer.RenderHelper",
"net.minecraft.entity.player.InventoryPlayer",
"net.minecraft.inventory.Slot",
"net.minecraft.item.ItemStack",
"net.minecraft.util.MathHelper"
] |
import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.OpenGlHelper; import net.minecraft.client.renderer.RenderHelper; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import net.minecraft.util.MathHelper;
|
import net.minecraft.client.*; import net.minecraft.client.renderer.*; import net.minecraft.entity.player.*; import net.minecraft.inventory.*; import net.minecraft.item.*; import net.minecraft.util.*;
|
[
"net.minecraft.client",
"net.minecraft.entity",
"net.minecraft.inventory",
"net.minecraft.item",
"net.minecraft.util"
] |
net.minecraft.client; net.minecraft.entity; net.minecraft.inventory; net.minecraft.item; net.minecraft.util;
| 2,226,244
|
private String readLine() throws IOException {
return serialInput.readLine();
}
/**
* {@inheritDoc}
|
String function() throws IOException { return serialInput.readLine(); } /** * {@inheritDoc}
|
/**
* Read a line from the Input Stream.
*
* @return
* @throws IOException
*/
|
Read a line from the Input Stream
|
readLine
|
{
"repo_name": "georgeerhan/openhab2-addons",
"path": "addons/binding/org.openhab.binding.dscalarm/src/main/java/org/openhab/binding/dscalarm/handler/IT100BridgeHandler.java",
"license": "epl-1.0",
"size": 8464
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 670,145
|
@CanDistro
@PutMapping("/instance")
@Secured(action = ActionTypes.WRITE)
public String updateInstance(@RequestParam(defaultValue = "v2", required = false) String ver,
HttpServletRequest request) throws Exception {
String namespaceId = WebUtils.optional(request, CommonParams.NAMESPACE_ID, Constants.DEFAULT_NAMESPACE_ID);
String serviceName = WebUtils.required(request, CommonParams.SERVICE_NAME);
NamingUtils.checkServiceNameFormat(serviceName);
Instance instance = HttpRequestInstanceBuilder.newBuilder()
.setDefaultInstanceEphemeral(switchDomain.isDefaultInstanceEphemeral()).setRequest(request).build();
getInstanceOperator(ver).updateInstance(namespaceId, serviceName, instance);
return "ok";
}
|
@PutMapping(STR) @Secured(action = ActionTypes.WRITE) String function(@RequestParam(defaultValue = "v2", required = false) String ver, HttpServletRequest request) throws Exception { String namespaceId = WebUtils.optional(request, CommonParams.NAMESPACE_ID, Constants.DEFAULT_NAMESPACE_ID); String serviceName = WebUtils.required(request, CommonParams.SERVICE_NAME); NamingUtils.checkServiceNameFormat(serviceName); Instance instance = HttpRequestInstanceBuilder.newBuilder() .setDefaultInstanceEphemeral(switchDomain.isDefaultInstanceEphemeral()).setRequest(request).build(); getInstanceOperator(ver).updateInstance(namespaceId, serviceName, instance); return "ok"; }
|
/**
* Update instance.
*
* @param request http request
* @return 'ok' if success
* @throws Exception any error during update
*/
|
Update instance
|
updateInstance
|
{
"repo_name": "alibaba/nacos",
"path": "naming/src/main/java/com/alibaba/nacos/naming/controllers/UpgradeOpsController.java",
"license": "apache-2.0",
"size": 25233
}
|
[
"com.alibaba.nacos.api.common.Constants",
"com.alibaba.nacos.api.naming.CommonParams",
"com.alibaba.nacos.api.naming.pojo.Instance",
"com.alibaba.nacos.api.naming.utils.NamingUtils",
"com.alibaba.nacos.auth.annotation.Secured",
"com.alibaba.nacos.core.utils.WebUtils",
"com.alibaba.nacos.naming.pojo.instance.HttpRequestInstanceBuilder",
"com.alibaba.nacos.plugin.auth.constant.ActionTypes",
"javax.servlet.http.HttpServletRequest",
"org.springframework.web.bind.annotation.PutMapping",
"org.springframework.web.bind.annotation.RequestParam"
] |
import com.alibaba.nacos.api.common.Constants; import com.alibaba.nacos.api.naming.CommonParams; import com.alibaba.nacos.api.naming.pojo.Instance; import com.alibaba.nacos.api.naming.utils.NamingUtils; import com.alibaba.nacos.auth.annotation.Secured; import com.alibaba.nacos.core.utils.WebUtils; import com.alibaba.nacos.naming.pojo.instance.HttpRequestInstanceBuilder; import com.alibaba.nacos.plugin.auth.constant.ActionTypes; import javax.servlet.http.HttpServletRequest; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestParam;
|
import com.alibaba.nacos.api.common.*; import com.alibaba.nacos.api.naming.*; import com.alibaba.nacos.api.naming.pojo.*; import com.alibaba.nacos.api.naming.utils.*; import com.alibaba.nacos.auth.annotation.*; import com.alibaba.nacos.core.utils.*; import com.alibaba.nacos.naming.pojo.instance.*; import com.alibaba.nacos.plugin.auth.constant.*; import javax.servlet.http.*; import org.springframework.web.bind.annotation.*;
|
[
"com.alibaba.nacos",
"javax.servlet",
"org.springframework.web"
] |
com.alibaba.nacos; javax.servlet; org.springframework.web;
| 1,085,036
|
List<NavalBattleReport> list();
|
List<NavalBattleReport> list();
|
/**
* Listing all NavalBattleReport from the database.
*
* @return a list of all the NavalBattleReport that exist inside the table Avatar.
*/
|
Listing all NavalBattleReport from the database
|
list
|
{
"repo_name": "EaW1805/data",
"path": "src/main/java/com/eaw1805/data/managers/beans/NavalBattleReportManagerBean.java",
"license": "mit",
"size": 2519
}
|
[
"com.eaw1805.data.model.battles.NavalBattleReport",
"java.util.List"
] |
import com.eaw1805.data.model.battles.NavalBattleReport; import java.util.List;
|
import com.eaw1805.data.model.battles.*; import java.util.*;
|
[
"com.eaw1805.data",
"java.util"
] |
com.eaw1805.data; java.util;
| 2,835,851
|
public static CharSequence join(Iterable<CharSequence> list) {
final CharSequence delimiter = Resources.getSystem().getText(R.string.list_delimeter);
return join(delimiter, list);
}
|
static CharSequence function(Iterable<CharSequence> list) { final CharSequence delimiter = Resources.getSystem().getText(R.string.list_delimeter); return join(delimiter, list); }
|
/**
* Returns list of multiple {@link CharSequence} joined into a single
* {@link CharSequence} separated by localized delimiter such as ", ".
*
* @hide
*/
|
Returns list of multiple <code>CharSequence</code> joined into a single <code>CharSequence</code> separated by localized delimiter such as ", "
|
join
|
{
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "frameworks/base/core/java/android/text/TextUtils.java",
"license": "gpl-3.0",
"size": 60007
}
|
[
"android.content.res.Resources"
] |
import android.content.res.Resources;
|
import android.content.res.*;
|
[
"android.content"
] |
android.content;
| 1,063,443
|
private void sendVolumeCommand(String itemName, String command, DataOutputStream outToServer, BufferedReader i)
throws IOException {
char[] cbuf = new char[50]; // Response is always 50 characters
logger.debug("Sending volume command {}", command);
outToServer.write(hexStringToByteArray(command));
i.read(cbuf, 0, 50);
Matcher m = volumePattern.matcher(new String(cbuf));
if (m.find()) {
String volume = m.group(1);
eventPublisher.postUpdate(itemName, new DecimalType(volume));
logger.debug("Setting volume for item {} on {}", itemName, volume);
} else {
logger.error("Error sending regular volume command {}, received this: {}", command, new String(cbuf));
}
}
|
void function(String itemName, String command, DataOutputStream outToServer, BufferedReader i) throws IOException { char[] cbuf = new char[50]; logger.debug(STR, command); outToServer.write(hexStringToByteArray(command)); i.read(cbuf, 0, 50); Matcher m = volumePattern.matcher(new String(cbuf)); if (m.find()) { String volume = m.group(1); eventPublisher.postUpdate(itemName, new DecimalType(volume)); logger.debug(STR, itemName, volume); } else { logger.error(STR, command, new String(cbuf)); } }
|
/**
* Send volume commands to groups (music zones)
*
* @param itemName
* item name to send update to
* @param command
* Sonance IP code to execute
* @param outToServer
* date output stream we can write to
* @param i
* bufered reader where we can read from
* @throws IOException
* throws an exception when we can't reach to amplifier
*/
|
Send volume commands to groups (music zones)
|
sendVolumeCommand
|
{
"repo_name": "paolodenti/openhab",
"path": "bundles/binding/org.openhab.binding.sonance/src/main/java/org/openhab/binding/sonance/internal/SonanceBinding.java",
"license": "epl-1.0",
"size": 24123
}
|
[
"java.io.BufferedReader",
"java.io.DataOutputStream",
"java.io.IOException",
"java.util.regex.Matcher",
"org.openhab.core.library.types.DecimalType"
] |
import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.IOException; import java.util.regex.Matcher; import org.openhab.core.library.types.DecimalType;
|
import java.io.*; import java.util.regex.*; import org.openhab.core.library.types.*;
|
[
"java.io",
"java.util",
"org.openhab.core"
] |
java.io; java.util; org.openhab.core;
| 1,433,893
|
private void writeObject(java.io.ObjectOutputStream stream) throws IOException{
resetBoard();
stream.defaultWriteObject();
}
|
void function(java.io.ObjectOutputStream stream) throws IOException{ resetBoard(); stream.defaultWriteObject(); }
|
/**
* Custom serialization clears the boards of piece tiles and replaces those tiles with what they were covering
* Allows system to not care if the board has pieces on it when saving
* @param stream - java.io.ObjectOutputStream
* @throws IOException
*/
|
Custom serialization clears the boards of piece tiles and replaces those tiles with what they were covering Allows system to not care if the board has pieces on it when saving
|
writeObject
|
{
"repo_name": "Betta-Testers/Imbrius-Kabasuji",
"path": "src/model/Board.java",
"license": "mit",
"size": 13710
}
|
[
"java.io.IOException"
] |
import java.io.IOException;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 1,271,035
|
@Test
public void testStep11Partial() {
ParserStatus expectedStatus = ParserStatus.PARTIAL;
int expectedStep = 11;
BufferedInputStream testInputStream = new BufferedInputStream(new ByteArrayInputStream(new byte[] { 5, 4, 3, 2, 2,
3, 4, 5, 6, 7, 8, 9, 0xB, 0xD, 0xA, 0xC, 1, 2, 3, 4, 0xA, 1, 1, 2, 0, 0, 0, 5, 0, 1, 0xD, 5, 0xA }));
try {
assertEquals(expectedStatus, testRxMsgFileParser.parseFile(testInputStream, "testLogFile.bin"));
assertEquals(expectedStep, testRxMsgFileParser.getStep());
} catch (FileParserException e) {
fail("Unexpected exception: " + e);
}
}
|
void function() { ParserStatus expectedStatus = ParserStatus.PARTIAL; int expectedStep = 11; BufferedInputStream testInputStream = new BufferedInputStream(new ByteArrayInputStream(new byte[] { 5, 4, 3, 2, 2, 3, 4, 5, 6, 7, 8, 9, 0xB, 0xD, 0xA, 0xC, 1, 2, 3, 4, 0xA, 1, 1, 2, 0, 0, 0, 5, 0, 1, 0xD, 5, 0xA })); try { assertEquals(expectedStatus, testRxMsgFileParser.parseFile(testInputStream, STR)); assertEquals(expectedStep, testRxMsgFileParser.getStep()); } catch (FileParserException e) { fail(STR + e); } }
|
/**
* Step 11 test without enough bytes. Should return PARTIAL.
*/
|
Step 11 test without enough bytes. Should return PARTIAL
|
testStep11Partial
|
{
"repo_name": "hmusavi/jpo-ode",
"path": "jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/importer/parser/RxMsgFileParserTest.java",
"license": "apache-2.0",
"size": 19633
}
|
[
"java.io.BufferedInputStream",
"java.io.ByteArrayInputStream",
"org.junit.Assert",
"us.dot.its.jpo.ode.importer.parser.FileParser"
] |
import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import org.junit.Assert; import us.dot.its.jpo.ode.importer.parser.FileParser;
|
import java.io.*; import org.junit.*; import us.dot.its.jpo.ode.importer.parser.*;
|
[
"java.io",
"org.junit",
"us.dot.its"
] |
java.io; org.junit; us.dot.its;
| 1,351,704
|
public DataNode setFocus_parametersScalar(double focus_parameters);
|
DataNode function(double focus_parameters);
|
/**
* list of polynomial coefficients describing the focal length of the zone plate, in increasing powers of photon energy,
* that describes the focal length of the zone plate (in microns) at an X-ray photon energy (in electron volts).
* <p>
* <b>Type:</b> NX_FLOAT
* <b>Dimensions:</b>
* </p>
*
* @param focus_parameters the focus_parameters
*/
|
list of polynomial coefficients describing the focal length of the zone plate, in increasing powers of photon energy, that describes the focal length of the zone plate (in microns) at an X-ray photon energy (in electron volts). Type: NX_FLOAT Dimensions:
|
setFocus_parametersScalar
|
{
"repo_name": "colinpalmer/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXfresnel_zone_plate.java",
"license": "epl-1.0",
"size": 14652
}
|
[
"org.eclipse.dawnsci.analysis.api.tree.DataNode"
] |
import org.eclipse.dawnsci.analysis.api.tree.DataNode;
|
import org.eclipse.dawnsci.analysis.api.tree.*;
|
[
"org.eclipse.dawnsci"
] |
org.eclipse.dawnsci;
| 1,533,611
|
private Node parseBasicTypeExpression(JsDocToken token) {
if (token == JsDocToken.STAR) {
return newNode(Token.STAR);
} else if (token == JsDocToken.LEFT_CURLY) {
skipEOLs();
return parseRecordType(next());
} else if (token == JsDocToken.LEFT_PAREN) {
skipEOLs();
return parseUnionType(next());
} else if (token == JsDocToken.STRING) {
String string = stream.getString();
switch (string) {
case "function":
skipEOLs();
return parseFunctionType(next());
case "null":
case "undefined":
return newStringNode(string);
default:
return parseTypeName(token);
}
}
restoreLookAhead(token);
return reportGenericTypeSyntaxWarning();
}
|
Node function(JsDocToken token) { if (token == JsDocToken.STAR) { return newNode(Token.STAR); } else if (token == JsDocToken.LEFT_CURLY) { skipEOLs(); return parseRecordType(next()); } else if (token == JsDocToken.LEFT_PAREN) { skipEOLs(); return parseUnionType(next()); } else if (token == JsDocToken.STRING) { String string = stream.getString(); switch (string) { case STR: skipEOLs(); return parseFunctionType(next()); case "null": case STR: return newStringNode(string); default: return parseTypeName(token); } } restoreLookAhead(token); return reportGenericTypeSyntaxWarning(); }
|
/**
* BasicTypeExpression := '*' | 'null' | 'undefined' | TypeName
* | FunctionType | UnionType | RecordType
*/
|
BasicTypeExpression := '*' | 'null' | 'undefined' | TypeName | FunctionType | UnionType | RecordType
|
parseBasicTypeExpression
|
{
"repo_name": "rintaro/closure-compiler",
"path": "src/com/google/javascript/jscomp/parsing/JsDocInfoParser.java",
"license": "apache-2.0",
"size": 83340
}
|
[
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.Token"
] |
import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token;
|
import com.google.javascript.rhino.*;
|
[
"com.google.javascript"
] |
com.google.javascript;
| 1,652,405
|
private AlgebraicNumber parseNumber( StringTokenizer tokens )
{
BigRational[] rats = new BigRational[ this .getOrder() ];
for ( int i = 0; i < rats.length; i++ ) {
rats[ i ] = new BigRational( tokens .nextToken() );
}
return new AlgebraicNumber( this, rats );
}
|
AlgebraicNumber function( StringTokenizer tokens ) { BigRational[] rats = new BigRational[ this .getOrder() ]; for ( int i = 0; i < rats.length; i++ ) { rats[ i ] = new BigRational( tokens .nextToken() ); } return new AlgebraicNumber( this, rats ); }
|
/**
* Consumes this.getOrder() tokens from the tokenizer
* @param tokens
* @return
*/
|
Consumes this.getOrder() tokens from the tokenizer
|
parseNumber
|
{
"repo_name": "vorth/vzome-core",
"path": "src/main/java/com/vzome/core/algebra/AlgebraicField.java",
"license": "apache-2.0",
"size": 14924
}
|
[
"java.util.StringTokenizer"
] |
import java.util.StringTokenizer;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,828,107
|
@SuppressWarnings("unchecked")
public static CmsXmlPage convertToXmlPage(CmsObject cms, byte[] content, Locale locale, String encoding)
throws CmsImportExportException, CmsXmlException {
CmsXmlPage xmlPage = null;
Document page = CmsXmlUtils.unmarshalHelper(content, null);
Element xmltemplate = page.getRootElement();
if ((xmltemplate == null) || !"XMLTEMPLATE".equals(xmltemplate.getName())) {
throw new CmsImportExportException(Messages.get().container(Messages.ERR_NOT_FOUND_ELEM_XMLTEMPLATE_0));
}
// get all edittemplate nodes
Iterator<Element> i = xmltemplate.elementIterator("edittemplate");
boolean useEditTemplates = true;
if (!i.hasNext()) {
// no edittemplate nodes found, get the template nodes
i = xmltemplate.elementIterator("TEMPLATE");
useEditTemplates = false;
}
// now create the XML page
xmlPage = new CmsXmlPage(locale, encoding);
while (i.hasNext()) {
Element currentTemplate = i.next();
String bodyName = currentTemplate.attributeValue("name");
if (CmsStringUtil.isEmpty(bodyName)) {
// no template name found, use the parameter body name
bodyName = "body";
}
String bodyContent = null;
if (useEditTemplates) {
// no content manipulation needed for edittemplates
bodyContent = currentTemplate.getText();
} else {
// parse content for TEMPLATEs
StringBuffer contentBuffer = new StringBuffer();
for (Iterator<Node> k = currentTemplate.nodeIterator(); k.hasNext();) {
Node n = k.next();
if (n.getNodeType() == Node.CDATA_SECTION_NODE) {
contentBuffer.append(n.getText());
continue;
} else if (n.getNodeType() == Node.ELEMENT_NODE) {
if ("LINK".equals(n.getName())) {
contentBuffer.append(OpenCms.getSystemInfo().getOpenCmsContext());
contentBuffer.append(n.getText());
continue;
}
}
}
bodyContent = contentBuffer.toString();
}
if (bodyContent == null) {
throw new CmsImportExportException(Messages.get().container(Messages.ERR_BODY_CONTENT_NOT_FOUND_0));
}
bodyContent = CmsStringUtil.substitute(
bodyContent,
CmsStringUtil.MACRO_OPENCMS_CONTEXT,
OpenCms.getSystemInfo().getOpenCmsContext());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(bodyContent)) {
xmlPage.addValue(bodyName, locale);
xmlPage.setStringValue(cms, bodyName, locale, bodyContent);
}
}
return xmlPage;
}
|
@SuppressWarnings(STR) static CmsXmlPage function(CmsObject cms, byte[] content, Locale locale, String encoding) throws CmsImportExportException, CmsXmlException { CmsXmlPage xmlPage = null; Document page = CmsXmlUtils.unmarshalHelper(content, null); Element xmltemplate = page.getRootElement(); if ((xmltemplate == null) !STR.equals(xmltemplate.getName())) { throw new CmsImportExportException(Messages.get().container(Messages.ERR_NOT_FOUND_ELEM_XMLTEMPLATE_0)); } Iterator<Element> i = xmltemplate.elementIterator(STR); boolean useEditTemplates = true; if (!i.hasNext()) { i = xmltemplate.elementIterator(STR); useEditTemplates = false; } xmlPage = new CmsXmlPage(locale, encoding); while (i.hasNext()) { Element currentTemplate = i.next(); String bodyName = currentTemplate.attributeValue("name"); if (CmsStringUtil.isEmpty(bodyName)) { bodyName = "body"; } String bodyContent = null; if (useEditTemplates) { bodyContent = currentTemplate.getText(); } else { StringBuffer contentBuffer = new StringBuffer(); for (Iterator<Node> k = currentTemplate.nodeIterator(); k.hasNext();) { Node n = k.next(); if (n.getNodeType() == Node.CDATA_SECTION_NODE) { contentBuffer.append(n.getText()); continue; } else if (n.getNodeType() == Node.ELEMENT_NODE) { if ("LINK".equals(n.getName())) { contentBuffer.append(OpenCms.getSystemInfo().getOpenCmsContext()); contentBuffer.append(n.getText()); continue; } } } bodyContent = contentBuffer.toString(); } if (bodyContent == null) { throw new CmsImportExportException(Messages.get().container(Messages.ERR_BODY_CONTENT_NOT_FOUND_0)); } bodyContent = CmsStringUtil.substitute( bodyContent, CmsStringUtil.MACRO_OPENCMS_CONTEXT, OpenCms.getSystemInfo().getOpenCmsContext()); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(bodyContent)) { xmlPage.addValue(bodyName, locale); xmlPage.setStringValue(cms, bodyName, locale, bodyContent); } } return xmlPage; }
|
/**
* Converts the contents of a page into an xml page.<p>
*
* @param cms the cms object
* @param content the content used with xml templates
* @param locale the locale of the body element(s)
* @param encoding the encoding to the xml page
* @return the xml page content or null if conversion failed
* @throws CmsImportExportException if the body content or the XMLTEMPLATE element were not found
* @throws CmsXmlException if there is an error reading xml contents from the byte array into a document
*/
|
Converts the contents of a page into an xml page
|
convertToXmlPage
|
{
"repo_name": "victos/opencms-core",
"path": "src/org/opencms/importexport/CmsXmlPageConverter.java",
"license": "lgpl-2.1",
"size": 5441
}
|
[
"java.util.Iterator",
"java.util.Locale",
"org.dom4j.Document",
"org.dom4j.Element",
"org.dom4j.Node",
"org.opencms.file.CmsObject",
"org.opencms.main.OpenCms",
"org.opencms.util.CmsStringUtil",
"org.opencms.xml.CmsXmlException",
"org.opencms.xml.CmsXmlUtils",
"org.opencms.xml.page.CmsXmlPage"
] |
import java.util.Iterator; import java.util.Locale; import org.dom4j.Document; import org.dom4j.Element; import org.dom4j.Node; import org.opencms.file.CmsObject; import org.opencms.main.OpenCms; import org.opencms.util.CmsStringUtil; import org.opencms.xml.CmsXmlException; import org.opencms.xml.CmsXmlUtils; import org.opencms.xml.page.CmsXmlPage;
|
import java.util.*; import org.dom4j.*; import org.opencms.file.*; import org.opencms.main.*; import org.opencms.util.*; import org.opencms.xml.*; import org.opencms.xml.page.*;
|
[
"java.util",
"org.dom4j",
"org.opencms.file",
"org.opencms.main",
"org.opencms.util",
"org.opencms.xml"
] |
java.util; org.dom4j; org.opencms.file; org.opencms.main; org.opencms.util; org.opencms.xml;
| 2,794,277
|
public final int indexOf(X value) {
int index = 0;
for (X key : options) {
if (Utils.equals(key, value))
return index;
index++;
}
return -1;
}
|
final int function(X value) { int index = 0; for (X key : options) { if (Utils.equals(key, value)) return index; index++; } return -1; }
|
/**
* Retrieve the index associated with a selectable value or -1 if not
* present. This index is only meaningful if the options were ordered i.e.
* if the Map or Collection used to construct the object was itself ordered.
*
* @param value
* @return
*/
|
Retrieve the index associated with a selectable value or -1 if not present. This index is only meaningful if the options were ordered i.e. if the Map or Collection used to construct the object was itself ordered
|
indexOf
|
{
"repo_name": "sodash/open-code",
"path": "winterwell.web/src/com/winterwell/web/fields/SelectField.java",
"license": "mit",
"size": 5659
}
|
[
"com.winterwell.utils.Utils"
] |
import com.winterwell.utils.Utils;
|
import com.winterwell.utils.*;
|
[
"com.winterwell.utils"
] |
com.winterwell.utils;
| 2,856,171
|
public void endDocument() throws SAXException
{
if (m_firstTagNotEmitted)
{
flush();
}
m_handler.endDocument();
}
|
void function() throws SAXException { if (m_firstTagNotEmitted) { flush(); } m_handler.endDocument(); }
|
/**
* Pass the call on to the underlying handler
* @see org.xml.sax.ContentHandler#endDocument()
*/
|
Pass the call on to the underlying handler
|
endDocument
|
{
"repo_name": "itgeeker/jdk",
"path": "src/com/sun/org/apache/xml/internal/serializer/ToUnknownStream.java",
"license": "apache-2.0",
"size": 38953
}
|
[
"org.xml.sax.SAXException"
] |
import org.xml.sax.SAXException;
|
import org.xml.sax.*;
|
[
"org.xml.sax"
] |
org.xml.sax;
| 923,977
|
public List<Article> getSubmittedArticles(String instance);
|
List<Article> function(String instance);
|
/**
* Gets the list of articles that are currently submitted.
*
* @param userId
* @return
*/
|
Gets the list of articles that are currently submitted
|
getSubmittedArticles
|
{
"repo_name": "tamerman/mobile-starting-framework",
"path": "writer/api/src/main/java/org/kuali/mobility/writer/dao/ArticleDao.java",
"license": "mit",
"size": 3769
}
|
[
"java.util.List",
"org.kuali.mobility.writer.entity.Article"
] |
import java.util.List; import org.kuali.mobility.writer.entity.Article;
|
import java.util.*; import org.kuali.mobility.writer.entity.*;
|
[
"java.util",
"org.kuali.mobility"
] |
java.util; org.kuali.mobility;
| 2,298,911
|
public static void overwriteCrosstoolWithSimpleCompleteToolchain(Path workspace)
throws IOException {
overwriteCrosstoolFile(workspace, TextFormat.printToString(simpleCompleteToolchainProto()));
}
|
static void function(Path workspace) throws IOException { overwriteCrosstoolFile(workspace, TextFormat.printToString(simpleCompleteToolchainProto())); }
|
/**
* Overwrites the default CROSSTOOL file with a reasonable toolchain.
*/
|
Overwrites the default CROSSTOOL file with a reasonable toolchain
|
overwriteCrosstoolWithSimpleCompleteToolchain
|
{
"repo_name": "anupcshan/bazel",
"path": "src/test/java/com/google/devtools/build/lib/rules/cpp/CrosstoolConfigurationHelper.java",
"license": "apache-2.0",
"size": 4922
}
|
[
"com.google.devtools.build.lib.vfs.Path",
"com.google.protobuf.TextFormat",
"java.io.IOException"
] |
import com.google.devtools.build.lib.vfs.Path; import com.google.protobuf.TextFormat; import java.io.IOException;
|
import com.google.devtools.build.lib.vfs.*; import com.google.protobuf.*; import java.io.*;
|
[
"com.google.devtools",
"com.google.protobuf",
"java.io"
] |
com.google.devtools; com.google.protobuf; java.io;
| 2,899,168
|
public List<Protocol> getServerProtocols() {
return this.serverProtocols;
}
|
List<Protocol> function() { return this.serverProtocols; }
|
/**
* Returns the modifiable list of required server protocols. An empty list
* means that all protocols are potentially supported (default case). You
* should update this list to restrict the actual protocols supported by
* your application.
*
* @return The list of required server protocols.
*/
|
Returns the modifiable list of required server protocols. An empty list means that all protocols are potentially supported (default case). You should update this list to restrict the actual protocols supported by your application
|
getServerProtocols
|
{
"repo_name": "zhangjunfang/eclipse-dir",
"path": "restlet/src/org/restlet/service/ConnectorService.java",
"license": "bsd-2-clause",
"size": 5400
}
|
[
"java.util.List",
"org.restlet.data.Protocol"
] |
import java.util.List; import org.restlet.data.Protocol;
|
import java.util.*; import org.restlet.data.*;
|
[
"java.util",
"org.restlet.data"
] |
java.util; org.restlet.data;
| 267,005
|
@Test void compareToNaive() {
GrayF32 inten = new GrayF32(width, height);
QueueCorner naiveMin = new QueueCorner(inten.getWidth() * inten.getHeight());
QueueCorner naiveMax = new QueueCorner(inten.getWidth() * inten.getHeight());
for (int useSubImage = 0; useSubImage <= 1; useSubImage++) {
// make sure it handles sub images correctly
if (useSubImage == 1) {
GrayF32 larger = new GrayF32(inten.width + 10, inten.height + 8);
inten = larger.subimage(5, 5, inten.width + 5, inten.height + 5, null);
}
for (int nonMaxWidth = 3; nonMaxWidth <= 9; nonMaxWidth += 2) {
int radius = nonMaxWidth / 2;
NonMaxExtractorNaive reg = new NonMaxExtractorNaive(strict);
reg.setSearchRadius(radius);
reg.setThreshold(0.6f);
for (int i = 0; i < 10; i++) {
ImageMiscOps.fillGaussian(inten, rand, 0, 3, -100, 100);
// detect the corners
findLocalPeaks(inten, 0.6f, radius, 0);
naiveMin.reset();naiveMax.reset();
reg.process(inten, naiveMax);
PixelMath.negative(inten, inten);
reg.process(inten, naiveMin);
// check the number of corners
if( canDetectMin ) {
assertTrue(foundMinimum.size() > 0);
assertEquals(naiveMin.size(), foundMinimum.size());
checkSamePoints(naiveMin,foundMinimum);
}
if( canDetectMax ) {
assertTrue(foundMaximum.size() > 0);
assertEquals(naiveMax.size(), foundMaximum.size());
checkSamePoints(naiveMax,foundMaximum);
}
}
}
}
}
|
@Test void compareToNaive() { GrayF32 inten = new GrayF32(width, height); QueueCorner naiveMin = new QueueCorner(inten.getWidth() * inten.getHeight()); QueueCorner naiveMax = new QueueCorner(inten.getWidth() * inten.getHeight()); for (int useSubImage = 0; useSubImage <= 1; useSubImage++) { if (useSubImage == 1) { GrayF32 larger = new GrayF32(inten.width + 10, inten.height + 8); inten = larger.subimage(5, 5, inten.width + 5, inten.height + 5, null); } for (int nonMaxWidth = 3; nonMaxWidth <= 9; nonMaxWidth += 2) { int radius = nonMaxWidth / 2; NonMaxExtractorNaive reg = new NonMaxExtractorNaive(strict); reg.setSearchRadius(radius); reg.setThreshold(0.6f); for (int i = 0; i < 10; i++) { ImageMiscOps.fillGaussian(inten, rand, 0, 3, -100, 100); findLocalPeaks(inten, 0.6f, radius, 0); naiveMin.reset();naiveMax.reset(); reg.process(inten, naiveMax); PixelMath.negative(inten, inten); reg.process(inten, naiveMin); if( canDetectMin ) { assertTrue(foundMinimum.size() > 0); assertEquals(naiveMin.size(), foundMinimum.size()); checkSamePoints(naiveMin,foundMinimum); } if( canDetectMax ) { assertTrue(foundMaximum.size() > 0); assertEquals(naiveMax.size(), foundMaximum.size()); checkSamePoints(naiveMax,foundMaximum); } } } } }
|
/**
* Compares output against naive algorithm. Checks for compliance with sub-images
*/
|
Compares output against naive algorithm. Checks for compliance with sub-images
|
compareToNaive
|
{
"repo_name": "lessthanoptimal/BoofCV",
"path": "main/boofcv-feature/src/test/java/boofcv/alg/feature/detect/extract/GenericNonMaxTests.java",
"license": "apache-2.0",
"size": 7138
}
|
[
"org.junit.jupiter.api.Assertions",
"org.junit.jupiter.api.Test"
] |
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.*;
|
[
"org.junit.jupiter"
] |
org.junit.jupiter;
| 1,420,594
|
return INSTANCE;
}
private PresentValueCurveSensitivityBlackSTIRFutureOptionCalculator() {
}
private static final FuturesTransactionBlackSTIRFuturesMethod METHOD_STRIRFUT_MARGIN = new FuturesTransactionBlackSTIRFuturesMethod();
// ----- Futures ------
|
return INSTANCE; } private PresentValueCurveSensitivityBlackSTIRFutureOptionCalculator() { } private static final FuturesTransactionBlackSTIRFuturesMethod METHOD_STRIRFUT_MARGIN = new FuturesTransactionBlackSTIRFuturesMethod();
|
/**
* Gets the calculator instance.
* @return The calculator.
*/
|
Gets the calculator instance
|
getInstance
|
{
"repo_name": "DevStreet/FinanceAnalytics",
"path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/provider/calculator/blackstirfutures/PresentValueCurveSensitivityBlackSTIRFutureOptionCalculator.java",
"license": "apache-2.0",
"size": 2069
}
|
[
"com.opengamma.analytics.financial.interestrate.future.provider.FuturesTransactionBlackSTIRFuturesMethod"
] |
import com.opengamma.analytics.financial.interestrate.future.provider.FuturesTransactionBlackSTIRFuturesMethod;
|
import com.opengamma.analytics.financial.interestrate.future.provider.*;
|
[
"com.opengamma.analytics"
] |
com.opengamma.analytics;
| 2,258,257
|
@Test(expected = RegistryException.class)
public void getUniqueGroupIdsForUserRegistryException() throws Exception {
servlet.setExpectedMethodName("getUniqueGroupIdsForUser");
RegistryException expected = new RegistryException("expected");
servlet.setFakeResponse(expected.toString());
assertEquals("user1", servlet.getUniqueGroupIdsForUser("user1"));
}
|
@Test(expected = RegistryException.class) void function() throws Exception { servlet.setExpectedMethodName(STR); RegistryException expected = new RegistryException(STR); servlet.setFakeResponse(expected.toString()); assertEquals("user1", servlet.getUniqueGroupIdsForUser("user1")); }
|
/**
* Test method for {@link com.ibm.ws.security.registry.test.UserRegistryServletConnection#getUniqueGroupIdsForUser(java.lang.String)}.
*/
|
Test method for <code>com.ibm.ws.security.registry.test.UserRegistryServletConnection#getUniqueGroupIdsForUser(java.lang.String)</code>
|
getUniqueGroupIdsForUserRegistryException
|
{
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.security.registry_test.servlet/test/com/ibm/ws/security/registry/test/UserRegistryServletConnectionTest.java",
"license": "epl-1.0",
"size": 29746
}
|
[
"com.ibm.ws.security.registry.RegistryException",
"org.junit.Assert",
"org.junit.Test"
] |
import com.ibm.ws.security.registry.RegistryException; import org.junit.Assert; import org.junit.Test;
|
import com.ibm.ws.security.registry.*; import org.junit.*;
|
[
"com.ibm.ws",
"org.junit"
] |
com.ibm.ws; org.junit;
| 2,253,036
|
public static CoreService getCoreService(String accessToken, MessageLogger messageLogger) {
return getCoreService(null, accessToken, messageLogger);
}
|
static CoreService function(String accessToken, MessageLogger messageLogger) { return getCoreService(null, accessToken, messageLogger); }
|
/**
* Get a Cloud Core client and configure it to use the specified access token and message logger
*
* @param accessToken The OAuth access token to use for authorization
* @param messageLogger The message logger implementation to use for logging messages
* @return A client for Cloud Core service
*/
|
Get a Cloud Core client and configure it to use the specified access token and message logger
|
getCoreService
|
{
"repo_name": "bluegaspode/ickstream-java-common",
"path": "ickprotocol/core/src/main/java/com/ickstream/protocol/service/core/CoreServiceFactory.java",
"license": "bsd-3-clause",
"size": 6045
}
|
[
"com.ickstream.common.jsonrpc.MessageLogger"
] |
import com.ickstream.common.jsonrpc.MessageLogger;
|
import com.ickstream.common.jsonrpc.*;
|
[
"com.ickstream.common"
] |
com.ickstream.common;
| 493,176
|
private PlanTable evaluateFilter(PlanTable currentTable) {
PlanTable newTable = new PlanTable();
for (PlanTableEntry entry : currentTable) {
Set<String> variables = Sets.newHashSet(entry.getProcessedVariables());
CNF predicates = entry.getPredicates();
CNF subCNF = predicates.removeSubCNF(variables);
if (subCNF.size() > 0) {
FilterEmbeddingsNode node = new FilterEmbeddingsNode(entry.getQueryPlan().getRoot(),
subCNF);
newTable.add(new PlanTableEntry(GRAPH, Sets.newHashSet(entry.getProcessedVariables()),
predicates, new QueryPlanEstimator(new QueryPlan(node), queryHandler, graphStatistics)));
} else {
newTable.add(entry);
}
}
return newTable;
}
//------------------------------------------------------------------------------------------------
// Filter embedding evaluation
//------------------------------------------------------------------------------------------------
|
PlanTable function(PlanTable currentTable) { PlanTable newTable = new PlanTable(); for (PlanTableEntry entry : currentTable) { Set<String> variables = Sets.newHashSet(entry.getProcessedVariables()); CNF predicates = entry.getPredicates(); CNF subCNF = predicates.removeSubCNF(variables); if (subCNF.size() > 0) { FilterEmbeddingsNode node = new FilterEmbeddingsNode(entry.getQueryPlan().getRoot(), subCNF); newTable.add(new PlanTableEntry(GRAPH, Sets.newHashSet(entry.getProcessedVariables()), predicates, new QueryPlanEstimator(new QueryPlan(node), queryHandler, graphStatistics))); } else { newTable.add(entry); } } return newTable; }
|
/**
* The method checks if a filter can be applied on any of the entries in the specified table. If
* this is the case, a {@link FilterEmbeddingsNode} is added to the query plan represented by the
* affected entries.
*
* @param currentTable query plan table
* @return input table with possibly updated entries
*/
|
The method checks if a filter can be applied on any of the entries in the specified table. If this is the case, a <code>FilterEmbeddingsNode</code> is added to the query plan represented by the affected entries
|
evaluateFilter
|
{
"repo_name": "smee/gradoop",
"path": "gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/single/cypher/planning/planner/greedy/GreedyPlanner.java",
"license": "apache-2.0",
"size": 24467
}
|
[
"com.google.common.collect.Sets",
"java.util.Set",
"org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.estimation.QueryPlanEstimator",
"org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.plantable.PlanTable",
"org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.plantable.PlanTableEntry",
"org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.QueryPlan",
"org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.unary.FilterEmbeddingsNode"
] |
import com.google.common.collect.Sets; import java.util.Set; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.estimation.QueryPlanEstimator; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.plantable.PlanTable; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.plantable.PlanTableEntry; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.QueryPlan; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.unary.FilterEmbeddingsNode;
|
import com.google.common.collect.*; import java.util.*; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.estimation.*; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.plantable.*; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.*; import org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.unary.*;
|
[
"com.google.common",
"java.util",
"org.gradoop.flink"
] |
com.google.common; java.util; org.gradoop.flink;
| 972,569
|
public CMSSignedData generate(
// FIXME Avoid accessing more than once to support CMSProcessableInputStream
CMSTypedData content,
boolean encapsulate)
throws CMSException
{
if (!signerInfs.isEmpty())
{
throw new IllegalStateException("this method can only be used with SignerInfoGenerator");
}
// TODO
// if (signerInfs.isEmpty())
// {
//
// if (encapsulate)
// {
// throw new IllegalArgumentException("no signers, encapsulate must be false");
// }
// if (!DATA.equals(eContentType))
// {
// throw new IllegalArgumentException("no signers, eContentType must be id-data");
// }
// }
//
// if (!DATA.equals(eContentType))
// {
//
// // TODO signedAttrs must be present for all signers
// }
ASN1EncodableVector digestAlgs = new ASN1EncodableVector();
ASN1EncodableVector signerInfos = new ASN1EncodableVector();
digests.clear(); // clear the current preserved digest state
//
// add the precalculated SignerInfo objects.
//
for (Iterator it = _signers.iterator(); it.hasNext();)
{
SignerInformation signer = (SignerInformation)it.next();
digestAlgs.add(CMSSignedHelper.INSTANCE.fixAlgID(signer.getDigestAlgorithmID()));
// TODO Verify the content type and calculated digest match the precalculated SignerInfo
signerInfos.add(signer.toASN1Structure());
}
//
// add the SignerInfo objects
//
ASN1ObjectIdentifier contentTypeOID = content.getContentType();
ASN1OctetString octs = null;
if (content != null)
{
ByteArrayOutputStream bOut = null;
if (encapsulate)
{
bOut = new ByteArrayOutputStream();
}
OutputStream cOut = CMSUtils.attachSignersToOutputStream(signerGens, bOut);
// Just in case it's unencapsulated and there are no signers!
cOut = CMSUtils.getSafeOutputStream(cOut);
try
{
content.write(cOut);
cOut.close();
}
catch (IOException e)
{
throw new CMSException("data processing exception: " + e.getMessage(), e);
}
if (encapsulate)
{
octs = new BEROctetString(bOut.toByteArray());
}
}
for (Iterator it = signerGens.iterator(); it.hasNext();)
{
SignerInfoGenerator sGen = (SignerInfoGenerator)it.next();
SignerInfo inf = sGen.generate(contentTypeOID);
digestAlgs.add(inf.getDigestAlgorithm());
signerInfos.add(inf);
byte[] calcDigest = sGen.getCalculatedDigest();
if (calcDigest != null)
{
digests.put(inf.getDigestAlgorithm().getAlgorithm().getId(), calcDigest);
}
}
ASN1Set certificates = null;
if (certs.size() != 0)
{
certificates = CMSUtils.createBerSetFromList(certs);
}
ASN1Set certrevlist = null;
if (crls.size() != 0)
{
certrevlist = CMSUtils.createBerSetFromList(crls);
}
ContentInfo encInfo = new ContentInfo(contentTypeOID, octs);
SignedData sd = new SignedData(
new DERSet(digestAlgs),
encInfo,
certificates,
certrevlist,
new DERSet(signerInfos));
ContentInfo contentInfo = new ContentInfo(
CMSObjectIdentifiers.signedData, sd);
return new CMSSignedData(content, contentInfo);
}
|
CMSSignedData function( CMSTypedData content, boolean encapsulate) throws CMSException { if (!signerInfs.isEmpty()) { throw new IllegalStateException(STR); } ASN1EncodableVector digestAlgs = new ASN1EncodableVector(); ASN1EncodableVector signerInfos = new ASN1EncodableVector(); digests.clear(); { SignerInformation signer = (SignerInformation)it.next(); digestAlgs.add(CMSSignedHelper.INSTANCE.fixAlgID(signer.getDigestAlgorithmID())); signerInfos.add(signer.toASN1Structure()); } ASN1OctetString octs = null; if (content != null) { ByteArrayOutputStream bOut = null; if (encapsulate) { bOut = new ByteArrayOutputStream(); } OutputStream cOut = CMSUtils.attachSignersToOutputStream(signerGens, bOut); cOut = CMSUtils.getSafeOutputStream(cOut); try { content.write(cOut); cOut.close(); } catch (IOException e) { throw new CMSException(STR + e.getMessage(), e); } if (encapsulate) { octs = new BEROctetString(bOut.toByteArray()); } } for (Iterator it = signerGens.iterator(); it.hasNext();) { SignerInfoGenerator sGen = (SignerInfoGenerator)it.next(); SignerInfo inf = sGen.generate(contentTypeOID); digestAlgs.add(inf.getDigestAlgorithm()); signerInfos.add(inf); byte[] calcDigest = sGen.getCalculatedDigest(); if (calcDigest != null) { digests.put(inf.getDigestAlgorithm().getAlgorithm().getId(), calcDigest); } } ASN1Set certificates = null; if (certs.size() != 0) { certificates = CMSUtils.createBerSetFromList(certs); } ASN1Set certrevlist = null; if (crls.size() != 0) { certrevlist = CMSUtils.createBerSetFromList(crls); } ContentInfo encInfo = new ContentInfo(contentTypeOID, octs); SignedData sd = new SignedData( new DERSet(digestAlgs), encInfo, certificates, certrevlist, new DERSet(signerInfos)); ContentInfo contentInfo = new ContentInfo( CMSObjectIdentifiers.signedData, sd); return new CMSSignedData(content, contentInfo); }
|
/**
* Generate a CMS Signed Data object which can be carrying a detached CMS signature, or have encapsulated data, depending on the value
* of the encapsulated parameter.
*
* @param content the content to be signed.
* @param encapsulate true if the content should be encapsulated in the signature, false otherwise.
*/
|
Generate a CMS Signed Data object which can be carrying a detached CMS signature, or have encapsulated data, depending on the value of the encapsulated parameter
|
generate
|
{
"repo_name": "GaloisInc/hacrypto",
"path": "src/Java/BouncyCastle/BouncyCastle-1.50/pkix/src/main/java/org/bouncycastle/cms/CMSSignedDataGenerator.java",
"license": "bsd-3-clause",
"size": 7611
}
|
[
"java.io.ByteArrayOutputStream",
"java.io.IOException",
"java.io.OutputStream",
"java.util.Iterator",
"org.bouncycastle.asn1.ASN1EncodableVector",
"org.bouncycastle.asn1.ASN1OctetString",
"org.bouncycastle.asn1.ASN1Set",
"org.bouncycastle.asn1.BEROctetString",
"org.bouncycastle.asn1.DERSet",
"org.bouncycastle.asn1.cms.CMSObjectIdentifiers",
"org.bouncycastle.asn1.cms.ContentInfo",
"org.bouncycastle.asn1.cms.SignedData",
"org.bouncycastle.asn1.cms.SignerInfo"
] |
import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Iterator; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Set; import org.bouncycastle.asn1.BEROctetString; import org.bouncycastle.asn1.DERSet; import org.bouncycastle.asn1.cms.CMSObjectIdentifiers; import org.bouncycastle.asn1.cms.ContentInfo; import org.bouncycastle.asn1.cms.SignedData; import org.bouncycastle.asn1.cms.SignerInfo;
|
import java.io.*; import java.util.*; import org.bouncycastle.asn1.*; import org.bouncycastle.asn1.cms.*;
|
[
"java.io",
"java.util",
"org.bouncycastle.asn1"
] |
java.io; java.util; org.bouncycastle.asn1;
| 1,957,212
|
@Test
public void testMemoryCacheMultipleContext() throws AuthenticationException {
String file = FILE_DEFAULT_NAME + "testGetItem";
setupCache(file);
ITokenCacheStore tokenCacheA = new FileTokenCacheStore(mTargetContex, file);
AuthenticationContext contextA = new AuthenticationContext(getInstrumentation()
.getContext(), VALID_AUTHORITY, false, tokenCacheA);
AuthenticationContext contextB = new AuthenticationContext(getInstrumentation()
.getContext(), VALID_AUTHORITY, false, tokenCacheA);
// Verify the cache
TokenCacheItem item = contextA.getCache().getItem(CacheKey.createCacheKey(mCacheItem));
assertNotNull("Token cache item is expected to be NOT null", item);
item = contextA.getCache().getItem(CacheKey.createCacheKey(mTestItem2));
assertNotNull("Token cache item is expected to be NOT null", item);
item = contextB.getCache().getItem(CacheKey.createCacheKey(mTestItem2));
assertNotNull("Token cache item is expected to be NOT null", item);
// do remove operation
contextA.getCache().removeItem(CacheKey.createCacheKey(mCacheItem));
item = contextA.getCache().getItem(CacheKey.createCacheKey(mCacheItem));
assertNull("Token cache item is expected to be null", item);
item = contextB.getCache().getItem(CacheKey.createCacheKey(mCacheItem));
assertNull("Token cache item is expected to be null", item);
}
private class CustomLogger implements ILogger {
private String mLogMessage;
private ADALError mLogErrorCode;
|
void function() throws AuthenticationException { String file = FILE_DEFAULT_NAME + STR; setupCache(file); ITokenCacheStore tokenCacheA = new FileTokenCacheStore(mTargetContex, file); AuthenticationContext contextA = new AuthenticationContext(getInstrumentation() .getContext(), VALID_AUTHORITY, false, tokenCacheA); AuthenticationContext contextB = new AuthenticationContext(getInstrumentation() .getContext(), VALID_AUTHORITY, false, tokenCacheA); TokenCacheItem item = contextA.getCache().getItem(CacheKey.createCacheKey(mCacheItem)); assertNotNull(STR, item); item = contextA.getCache().getItem(CacheKey.createCacheKey(mTestItem2)); assertNotNull(STR, item); item = contextB.getCache().getItem(CacheKey.createCacheKey(mTestItem2)); assertNotNull(STR, item); contextA.getCache().removeItem(CacheKey.createCacheKey(mCacheItem)); item = contextA.getCache().getItem(CacheKey.createCacheKey(mCacheItem)); assertNull(STR, item); item = contextB.getCache().getItem(CacheKey.createCacheKey(mCacheItem)); assertNull(STR, item); } private class CustomLogger implements ILogger { private String mLogMessage; private ADALError mLogErrorCode;
|
/**
* memory cache is shared between context
* @throws AuthenticationException
*/
|
memory cache is shared between context
|
testMemoryCacheMultipleContext
|
{
"repo_name": "iambmelt/azure-activedirectory-library-for-android",
"path": "adal/src/androidTest/java/com/microsoft/aad/adal/FileTokenCacheStoreTests.java",
"license": "apache-2.0",
"size": 13957
}
|
[
"com.microsoft.aad.adal.Logger",
"org.junit.Assert"
] |
import com.microsoft.aad.adal.Logger; import org.junit.Assert;
|
import com.microsoft.aad.adal.*; import org.junit.*;
|
[
"com.microsoft.aad",
"org.junit"
] |
com.microsoft.aad; org.junit;
| 1,243,266
|
final List<OCRInfo> ocrInfos = new ArrayList<>();
ocrInfos.add(new OCRInfo(AccessibilityNodeInfoCompat.obtain(node)));
ocrController.recognizeTextForNodes(
screenCapture, ocrInfos, null, new Filter.NodeCompat(node -> true));
runTimeoutRunnable();
}
@Override
public void onOCRStarted() {}
|
final List<OCRInfo> ocrInfos = new ArrayList<>(); ocrInfos.add(new OCRInfo(AccessibilityNodeInfoCompat.obtain(node))); ocrController.recognizeTextForNodes( screenCapture, ocrInfos, null, new Filter.NodeCompat(node -> true)); runTimeoutRunnable(); } public void onOCRStarted() {}
|
/**
* Captures screen and performs ocr(optical character recognition) to recognize text for the given
* node.
*/
|
Captures screen and performs ocr(optical character recognition) to recognize text for the given node
|
perform
|
{
"repo_name": "google/talkback",
"path": "talkback/src/main/java/com/google/android/accessibility/talkback/imagecaption/CharacterCaptionRequest.java",
"license": "apache-2.0",
"size": 3057
}
|
[
"androidx.core.view.accessibility.AccessibilityNodeInfoCompat",
"com.google.android.accessibility.utils.Filter",
"com.google.android.accessibility.utils.ocr.OCRInfo",
"java.util.ArrayList",
"java.util.List"
] |
import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import com.google.android.accessibility.utils.Filter; import com.google.android.accessibility.utils.ocr.OCRInfo; import java.util.ArrayList; import java.util.List;
|
import androidx.core.view.accessibility.*; import com.google.android.accessibility.utils.*; import com.google.android.accessibility.utils.ocr.*; import java.util.*;
|
[
"androidx.core",
"com.google.android",
"java.util"
] |
androidx.core; com.google.android; java.util;
| 2,262,098
|
private String uploadFileToS3(File toUpload, MigrationType type) {
String s3Key = type.name() +"-"+ toUpload.getName();
log.info("Atempting to upload: "+getS3URL(awsBucket, s3Key));
PutObjectResult results = this.awsClient.putObject(awsBucket, s3Key, toUpload);
log.info(results);
this.backupFileName = s3Key;
return getS3URL(this.awsBucket, this.backupFileName);
}
|
String function(File toUpload, MigrationType type) { String s3Key = type.name() +"-"+ toUpload.getName(); log.info(STR+getS3URL(awsBucket, s3Key)); PutObjectResult results = this.awsClient.putObject(awsBucket, s3Key, toUpload); log.info(results); this.backupFileName = s3Key; return getS3URL(this.awsBucket, this.backupFileName); }
|
/**
* Upload the file to S3
* @param toUpload
* @param id
*/
|
Upload the file to S3
|
uploadFileToS3
|
{
"repo_name": "hhu94/Synapse-Repository-Services",
"path": "services/repository-managers/src/main/java/org/sagebionetworks/repo/manager/backup/daemon/BackupRestoreDaemon.java",
"license": "apache-2.0",
"size": 12819
}
|
[
"com.amazonaws.services.s3.model.PutObjectResult",
"java.io.File",
"org.sagebionetworks.repo.model.migration.MigrationType"
] |
import com.amazonaws.services.s3.model.PutObjectResult; import java.io.File; import org.sagebionetworks.repo.model.migration.MigrationType;
|
import com.amazonaws.services.s3.model.*; import java.io.*; import org.sagebionetworks.repo.model.migration.*;
|
[
"com.amazonaws.services",
"java.io",
"org.sagebionetworks.repo"
] |
com.amazonaws.services; java.io; org.sagebionetworks.repo;
| 1,221,602
|
@Override
protected boolean checkEntryNotValid(RegionEntry mapEntry) {
return (super.checkEntryNotValid(mapEntry) || mapEntry.getKey() instanceof ThreadIdentifier);
}
|
boolean function(RegionEntry mapEntry) { return (super.checkEntryNotValid(mapEntry) mapEntry.getKey() instanceof ThreadIdentifier); }
|
/**
* This method is over-ridden since we do not want GII of ThreadIdentifier objects to happen
*/
|
This method is over-ridden since we do not want GII of ThreadIdentifier objects to happen
|
checkEntryNotValid
|
{
"repo_name": "davinash/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/HARegion.java",
"license": "apache-2.0",
"size": 19981
}
|
[
"org.apache.geode.internal.cache.ha.ThreadIdentifier"
] |
import org.apache.geode.internal.cache.ha.ThreadIdentifier;
|
import org.apache.geode.internal.cache.ha.*;
|
[
"org.apache.geode"
] |
org.apache.geode;
| 149,220
|
private int getStreamId(HttpHeaders httpHeaders) throws Exception {
return httpHeaders.getInt(HttpConversionUtil.ExtensionHeaderNames.STREAM_ID.text(),
connection().local().incrementAndGetNextStreamId());
}
|
int function(HttpHeaders httpHeaders) throws Exception { return httpHeaders.getInt(HttpConversionUtil.ExtensionHeaderNames.STREAM_ID.text(), connection().local().incrementAndGetNextStreamId()); }
|
/**
* Get the next stream id either from the {@link HttpHeaders} object or HTTP/2 codec
*
* @param httpHeaders The HTTP/1.x headers object to look for the stream id
* @return The stream id to use with this {@link HttpHeaders} object
* @throws Exception If the {@code httpHeaders} object specifies an invalid stream id
*/
|
Get the next stream id either from the <code>HttpHeaders</code> object or HTTP/2 codec
|
getStreamId
|
{
"repo_name": "bryce-anderson/netty",
"path": "codec-http2/src/main/java/io/netty/handler/codec/http2/HttpToHttp2ConnectionHandler.java",
"license": "apache-2.0",
"size": 6515
}
|
[
"io.netty.handler.codec.http.HttpHeaders"
] |
import io.netty.handler.codec.http.HttpHeaders;
|
import io.netty.handler.codec.http.*;
|
[
"io.netty.handler"
] |
io.netty.handler;
| 999,614
|
String contentId = message.getStringProperty(ContentCommunicationConstants.CONTENT_ID);
String contentPurpose = message.getStringProperty(ContentCommunicationConstants.PURPOSE);
ContentInfo content = instanceContentService.getContent(contentId, contentPurpose);
Optional<InstanceReference> instanceReference = instanceTypeResolver.resolveReference(content.getInstanceId());
if (!instanceReference.isPresent() || !isTypeSupported(instanceReference.get())) {
LOGGER.trace("Content {} not supported for ocr!", content.getName());
return;
}
LOGGER.info("Content {} fetched from content topic for OCR conversion.", content.getName());
if (!content.exists()) {
throw new RollbackedRuntimeException("Content " + contentId + " is missing, will try again!");
}
Instance instance = instanceReference.get().toInstance();
// Create an empty ocr content bound to the instance, so later on when the content is
// actually ocred, it can be updated to this content id instead of the instance
// directly. This is done to avoid race conditions with multiple ocr services and
// multiple incoming ocred contents to the same instance.
String ocredContentId = createEmptyContent(content, instance.getId());
Pair<String, String> nameAndExtension = FileUtil.splitNameAndExtension(content.getName());
Serializable instanceId = (Serializable) message.getObjectProperty(InstanceCommunicationConstants.INSTANCE_ID);
try (BufferedInputStream contentStream = new BufferedInputStream(content.getInputStream())) {
senderService.send(CONTENT_OCR_QUEUE, contentStream, SendOptions.create()
.withWriter(BytesMessageWriter.instance())
.withProperty(InstanceCommunicationConstants.MIMETYPE, content.getMimeType())
.withProperty(InstanceCommunicationConstants.INSTANCE_ID, instanceId)
.withProperty(ContentCommunicationConstants.FILE_NAME, nameAndExtension.getFirst())
.withProperty(ContentCommunicationConstants.FILE_EXTENSION, "." + nameAndExtension.getSecond())
.withProperty(OCRContentMessageAttributes.OCRED_CONTENT_ID, ocredContentId)
.withProperty(OCRContentMessageAttributes.OCR_LANGUAGE, getOcrLanguage(instance)));
} catch (IOException e) {
throw new RollbackedRuntimeException("Cannot consume the content stream!", e);
}
}
|
String contentId = message.getStringProperty(ContentCommunicationConstants.CONTENT_ID); String contentPurpose = message.getStringProperty(ContentCommunicationConstants.PURPOSE); ContentInfo content = instanceContentService.getContent(contentId, contentPurpose); Optional<InstanceReference> instanceReference = instanceTypeResolver.resolveReference(content.getInstanceId()); if (!instanceReference.isPresent() !isTypeSupported(instanceReference.get())) { LOGGER.trace(STR, content.getName()); return; } LOGGER.info(STR, content.getName()); if (!content.exists()) { throw new RollbackedRuntimeException(STR + contentId + STR); } Instance instance = instanceReference.get().toInstance(); String ocredContentId = createEmptyContent(content, instance.getId()); Pair<String, String> nameAndExtension = FileUtil.splitNameAndExtension(content.getName()); Serializable instanceId = (Serializable) message.getObjectProperty(InstanceCommunicationConstants.INSTANCE_ID); try (BufferedInputStream contentStream = new BufferedInputStream(content.getInputStream())) { senderService.send(CONTENT_OCR_QUEUE, contentStream, SendOptions.create() .withWriter(BytesMessageWriter.instance()) .withProperty(InstanceCommunicationConstants.MIMETYPE, content.getMimeType()) .withProperty(InstanceCommunicationConstants.INSTANCE_ID, instanceId) .withProperty(ContentCommunicationConstants.FILE_NAME, nameAndExtension.getFirst()) .withProperty(ContentCommunicationConstants.FILE_EXTENSION, "." + nameAndExtension.getSecond()) .withProperty(OCRContentMessageAttributes.OCRED_CONTENT_ID, ocredContentId) .withProperty(OCRContentMessageAttributes.OCR_LANGUAGE, getOcrLanguage(instance))); } catch (IOException e) { throw new RollbackedRuntimeException(STR, e); } }
|
/**
* Listen on the Content topic for any OCR-able contents and put them in the ocr queue.
*
* @param message the message
* @throws JMSException if the JMS provider fails to get some property value from the incoming message
* due to some internal error.
*/
|
Listen on the Content topic for any OCR-able contents and put them in the ocr queue
|
onContentAdded
|
{
"repo_name": "SirmaITT/conservation-space-1.7.0",
"path": "docker/sirma-platform/platform/seip-parent/extensions/ocr-integration/src/main/java/com/sirma/sep/ocr/jms/ContentOCRQueue.java",
"license": "lgpl-3.0",
"size": 7444
}
|
[
"com.sirma.itt.seip.Pair",
"com.sirma.itt.seip.domain.instance.Instance",
"com.sirma.itt.seip.domain.instance.InstanceReference",
"com.sirma.itt.seip.exception.RollbackedRuntimeException",
"com.sirma.itt.seip.instance.messaging.InstanceCommunicationConstants",
"com.sirma.itt.seip.util.file.FileUtil",
"com.sirma.sep.content.ContentInfo",
"com.sirma.sep.content.jms.ContentCommunicationConstants",
"com.sirmaenterprise.sep.jms.api.SendOptions",
"com.sirmaenterprise.sep.jms.convert.BytesMessageWriter",
"java.io.BufferedInputStream",
"java.io.IOException",
"java.io.Serializable",
"java.util.Optional"
] |
import com.sirma.itt.seip.Pair; import com.sirma.itt.seip.domain.instance.Instance; import com.sirma.itt.seip.domain.instance.InstanceReference; import com.sirma.itt.seip.exception.RollbackedRuntimeException; import com.sirma.itt.seip.instance.messaging.InstanceCommunicationConstants; import com.sirma.itt.seip.util.file.FileUtil; import com.sirma.sep.content.ContentInfo; import com.sirma.sep.content.jms.ContentCommunicationConstants; import com.sirmaenterprise.sep.jms.api.SendOptions; import com.sirmaenterprise.sep.jms.convert.BytesMessageWriter; import java.io.BufferedInputStream; import java.io.IOException; import java.io.Serializable; import java.util.Optional;
|
import com.sirma.itt.seip.*; import com.sirma.itt.seip.domain.instance.*; import com.sirma.itt.seip.exception.*; import com.sirma.itt.seip.instance.messaging.*; import com.sirma.itt.seip.util.file.*; import com.sirma.sep.content.*; import com.sirma.sep.content.jms.*; import com.sirmaenterprise.sep.jms.api.*; import com.sirmaenterprise.sep.jms.convert.*; import java.io.*; import java.util.*;
|
[
"com.sirma.itt",
"com.sirma.sep",
"com.sirmaenterprise.sep",
"java.io",
"java.util"
] |
com.sirma.itt; com.sirma.sep; com.sirmaenterprise.sep; java.io; java.util;
| 1,035,262
|
public static double estimateSwapCosts(CostFunctions from,
OperationCount oc, CostFunctions to) {
double cost = 0;
cost += oc.listCount * to.INIT.getCost(oc.listSize);
cost += oc.listCount * from.ITERATE.getCost(oc.listSize);
for (int i = 0; i < oc.listSize; i++) {
cost += oc.listCount * to.ADD_SUCCESS.getCost(i);
}
return cost;
}
|
static double function(CostFunctions from, OperationCount oc, CostFunctions to) { double cost = 0; cost += oc.listCount * to.INIT.getCost(oc.listSize); cost += oc.listCount * from.ITERATE.getCost(oc.listSize); for (int i = 0; i < oc.listSize; i++) { cost += oc.listCount * to.ADD_SUCCESS.getCost(i); } return cost; }
|
/**
*
* estimation is computed as the cost of initializing oc.listCount many
* lists of size oc.listSize, iterating over oc.listSize many lists of size
* oc.listSize and adding oc.listSize single elements
*
* @param from
* datastructure to swap from
* @param oc
* operation count of the considered list type, only required for
* list size and count
* @param to
* datastructure to swap to
* @return estimation of the runtime costs for exchanging datastructure for
* operation count oc from datastructure from to datastructure to
*/
|
estimation is computed as the cost of initializing oc.listCount many lists of size oc.listSize, iterating over oc.listSize many lists of size oc.listSize and adding oc.listSize single elements
|
estimateSwapCosts
|
{
"repo_name": "BenjaminSchiller/DNA",
"path": "src/dna/graph/datastructures/cost/CostEstimation.java",
"license": "gpl-3.0",
"size": 6372
}
|
[
"dna.graph.datastructures.count.OperationCount"
] |
import dna.graph.datastructures.count.OperationCount;
|
import dna.graph.datastructures.count.*;
|
[
"dna.graph.datastructures"
] |
dna.graph.datastructures;
| 2,652,252
|
public static INDArrayIndex[] resolve(INDArrayIndex[] allIndex, INDArrayIndex...intendedIndexes) {
int numNewAxes = numNewAxis(intendedIndexes);
INDArrayIndex[] all = new INDArrayIndex[allIndex.length + numNewAxes];
Arrays.fill(all,NDArrayIndex.all());
for(int i = 0; i < allIndex.length; i++) {
//collapse single length indexes in to point indexes
if (i >= intendedIndexes.length) break;
if (intendedIndexes[i] instanceof NDArrayIndex) {
NDArrayIndex idx = (NDArrayIndex) intendedIndexes[i];
if (idx.indices.length == 1)
intendedIndexes[i] = new PointIndex(idx.indices[0]);
}
all[i] = intendedIndexes[i];
}
return all;
}
|
static INDArrayIndex[] function(INDArrayIndex[] allIndex, INDArrayIndex...intendedIndexes) { int numNewAxes = numNewAxis(intendedIndexes); INDArrayIndex[] all = new INDArrayIndex[allIndex.length + numNewAxes]; Arrays.fill(all,NDArrayIndex.all()); for(int i = 0; i < allIndex.length; i++) { if (i >= intendedIndexes.length) break; if (intendedIndexes[i] instanceof NDArrayIndex) { NDArrayIndex idx = (NDArrayIndex) intendedIndexes[i]; if (idx.indices.length == 1) intendedIndexes[i] = new PointIndex(idx.indices[0]); } all[i] = intendedIndexes[i]; } return all; }
|
/**
* Given an all index and
* the intended indexes, return an
* index array containing a combination of all elements
* for slicing and overriding particular indexes where necessary
* @param allIndex the index containing all elements
* @param intendedIndexes the indexes specified by the user
* @return the resolved indexes (containing all where nothing is specified, and the intended index
* for a particular dimension otherwise)
*/
|
Given an all index and the intended indexes, return an index array containing a combination of all elements for slicing and overriding particular indexes where necessary
|
resolve
|
{
"repo_name": "EronWright/nd4j",
"path": "nd4j-api/src/main/java/org/nd4j/linalg/indexing/NDArrayIndex.java",
"license": "apache-2.0",
"size": 18001
}
|
[
"java.util.Arrays"
] |
import java.util.Arrays;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,361,274
|
public static NamingContextExt unchecked_narrow(org.omg.CORBA.Object obj)
{
if (obj == null)
return null;
else if (obj instanceof NamingContextExt)
return (NamingContextExt) obj;
else
{
// Do not call the _is_a(..).
Delegate delegate = ((ObjectImpl) obj)._get_delegate();
return new _NamingContextExtStub(delegate);
}
}
|
static NamingContextExt function(org.omg.CORBA.Object obj) { if (obj == null) return null; else if (obj instanceof NamingContextExt) return (NamingContextExt) obj; else { Delegate delegate = ((ObjectImpl) obj)._get_delegate(); return new _NamingContextExtStub(delegate); } }
|
/**
* Narrow the given object to the NamingContextExt. No type-checking is
* performed to verify that the object actually supports the requested type.
* The {@link BAD_OPERATION} will be thrown if unsupported operations are
* invoked on the new returned reference, but no failure is expected at the
* time of the unchecked_narrow.
*
* @param obj the object to cast.
*
* @return the casted NamingContextExt
*
* @since 1.5
*
* @see OMG issue 4158.
*/
|
Narrow the given object to the NamingContextExt. No type-checking is performed to verify that the object actually supports the requested type. The <code>BAD_OPERATION</code> will be thrown if unsupported operations are invoked on the new returned reference, but no failure is expected at the time of the unchecked_narrow
|
unchecked_narrow
|
{
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/org/omg/CosNaming/NamingContextExtHelper.java",
"license": "bsd-3-clause",
"size": 5557
}
|
[
"org.omg.CORBA"
] |
import org.omg.CORBA;
|
import org.omg.*;
|
[
"org.omg"
] |
org.omg;
| 835,153
|
public List<SigningOutbox> getSigningOutboxes() {
return signingOutboxes;
}
|
List<SigningOutbox> function() { return signingOutboxes; }
|
/**
* Gets all signining Outboxes which belongs to this Outbox. <p /> This is part of the new MH 3.0 functionality for
* signing PDFs.
*
* @return The list of signing outboxes.
*/
|
Gets all signining Outboxes which belongs to this Outbox. This is part of the new MH 3.0 functionality for signing PDFs
|
getSigningOutboxes
|
{
"repo_name": "republique-et-canton-de-geneve/sedex-Message-Handler",
"path": "src/main/java/ch/admin/suis/msghandler/config/Outbox.java",
"license": "agpl-3.0",
"size": 5096
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,971,045
|
protected void checkTankFields(Tank t) {
if(t.battleTierMax < 1 || t.battleTierMax > 12) {
report(t, "battleTierMax: " + t.battleTierMax);
}
if(t.battleTierMin < 1 || t.battleTierMin > 12) {
report(t, "battleTierMin: " + t.battleTierMin);
}
if(t.tier > 1 && t.cost < 1)
report(t, "cost: " + t.cost);
if(t.crewMembers < 1 || t.crewMembers > 10) {
report(t, "crewMembers: " + t.crewMembers);
}
if(t.currency == null) {
report(t, "currency is null");
}
if(t.gunArcRight < 1 || t.gunArcRight > 360) {
report(t, "gunArcHigh: " + t.gunArcRight);
}
if(t.gunArcLeft < -360 || t.gunArcLeft > 0) {
report(t, "gunArcLow: " + t.gunArcLeft);
}
if(t.hullFront < 5) {
report(t, "hullFront: " + t.hullFront);
}
if(t.hullSide < 5) {
report(t, "hullSide: " + t.hullSide);
}
if(t.hullRear < 5) {
report(t, "hullRear: " + t.hullRear);
}
if(t.id.length() < 2) {
report(t, "id: " + t.id);
}
if(t.name.length() < 2) {
report(t, "name: " + t.name);
}
if(t.nation == null) {
report(t, "nation is null");
}
if(t.tier < 1 || t.tier > 12) {
report(t, "tier: " + t.tier);
}
if(t.speed < 5 || t.speed > 120) {
report(t, "topSpeed: " + t.speed);
}
if(t.type == null) {
report(t, "type is null");
}
checkTankEquipment(t, t.equipmentStock);
checkTankEquipment(t, t.equipmentTop);
}
|
void function(Tank t) { if(t.battleTierMax < 1 t.battleTierMax > 12) { report(t, STR + t.battleTierMax); } if(t.battleTierMin < 1 t.battleTierMin > 12) { report(t, STR + t.battleTierMin); } if(t.tier > 1 && t.cost < 1) report(t, STR + t.cost); if(t.crewMembers < 1 t.crewMembers > 10) { report(t, STR + t.crewMembers); } if(t.currency == null) { report(t, STR); } if(t.gunArcRight < 1 t.gunArcRight > 360) { report(t, STR + t.gunArcRight); } if(t.gunArcLeft < -360 t.gunArcLeft > 0) { report(t, STR + t.gunArcLeft); } if(t.hullFront < 5) { report(t, STR + t.hullFront); } if(t.hullSide < 5) { report(t, STR + t.hullSide); } if(t.hullRear < 5) { report(t, STR + t.hullRear); } if(t.id.length() < 2) { report(t, STR + t.id); } if(t.name.length() < 2) { report(t, STR + t.name); } if(t.nation == null) { report(t, STR); } if(t.tier < 1 t.tier > 12) { report(t, STR + t.tier); } if(t.speed < 5 t.speed > 120) { report(t, STR + t.speed); } if(t.type == null) { report(t, STR); } checkTankEquipment(t, t.equipmentStock); checkTankEquipment(t, t.equipmentTop); }
|
/**
* Checks all fields of a single tank
* @param t the tank to check
*/
|
Checks all fields of a single tank
|
checkTankFields
|
{
"repo_name": "Klamann/WotCrawler",
"path": "src/main/java/de/nx42/wotcrawler/ext/Evaluator.java",
"license": "gpl-3.0",
"size": 19554
}
|
[
"de.nx42.wotcrawler.db.tank.Tank"
] |
import de.nx42.wotcrawler.db.tank.Tank;
|
import de.nx42.wotcrawler.db.tank.*;
|
[
"de.nx42.wotcrawler"
] |
de.nx42.wotcrawler;
| 623,875
|
public static java.util.List extractRoleDisciplineSecurityLevelList(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.RoleDisciplineSecurityLevelLiteVoCollection voCollection)
{
return extractRoleDisciplineSecurityLevelList(domainFactory, voCollection, null, new HashMap());
}
|
static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.RoleDisciplineSecurityLevelLiteVoCollection voCollection) { return extractRoleDisciplineSecurityLevelList(domainFactory, voCollection, null, new HashMap()); }
|
/**
* Create the ims.ocrr.configuration.domain.objects.RoleDisciplineSecurityLevel list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
|
Create the ims.ocrr.configuration.domain.objects.RoleDisciplineSecurityLevel list from the value object collection
|
extractRoleDisciplineSecurityLevelList
|
{
"repo_name": "open-health-hub/openMAXIMS",
"path": "openmaxims_workspace/ValueObjects/src/ims/ocrr/vo/domain/RoleDisciplineSecurityLevelLiteVoAssembler.java",
"license": "agpl-3.0",
"size": 21527
}
|
[
"java.util.HashMap"
] |
import java.util.HashMap;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,491,123
|
public static Location createLocation(Source code) {
return new SourceLocation(
code,
null ,
null
);
}
|
static Location function(Source code) { return new SourceLocation( code, null , null ); }
|
/**
* Creates a new location for the given file
*
* @param code the the {@link Source} object to create a location for
* @return a new location
*/
|
Creates a new location for the given file
|
createLocation
|
{
"repo_name": "vesperin/mix",
"path": "src/main/java/com/vesperin/base/locations/Locations.java",
"license": "apache-2.0",
"size": 10060
}
|
[
"com.vesperin.base.Source"
] |
import com.vesperin.base.Source;
|
import com.vesperin.base.*;
|
[
"com.vesperin.base"
] |
com.vesperin.base;
| 704,393
|
public ServiceFuture<RoleAssignmentInner> createByIdAsync(String roleAssignmentId, final ServiceCallback<RoleAssignmentInner> serviceCallback) {
return ServiceFuture.fromResponse(createByIdWithServiceResponseAsync(roleAssignmentId), serviceCallback);
}
|
ServiceFuture<RoleAssignmentInner> function(String roleAssignmentId, final ServiceCallback<RoleAssignmentInner> serviceCallback) { return ServiceFuture.fromResponse(createByIdWithServiceResponseAsync(roleAssignmentId), serviceCallback); }
|
/**
* Creates a role assignment by ID.
*
* @param roleAssignmentId The ID of the role assignment to create.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
|
Creates a role assignment by ID
|
createByIdAsync
|
{
"repo_name": "ljhljh235/azure-sdk-for-java",
"path": "azure-mgmt-graph-rbac/src/main/java/com/microsoft/azure/management/graphrbac/implementation/RoleAssignmentsInner.java",
"license": "apache-2.0",
"size": 130184
}
|
[
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture"
] |
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
|
import com.microsoft.rest.*;
|
[
"com.microsoft.rest"
] |
com.microsoft.rest;
| 153,977
|
@Test
public void test_setInitialBillsProcessed() {
Integer value = 1;
instance.setInitialBillsProcessed(value);
assertEquals("'setInitialBillsProcessed' should be correct.",
value, TestsHelper.getField(instance, "initialBillsProcessed"));
}
|
void function() { Integer value = 1; instance.setInitialBillsProcessed(value); assertEquals(STR, value, TestsHelper.getField(instance, STR)); }
|
/**
* <p>
* Accuracy test for the method <code>setInitialBillsProcessed(Integer initialBillsProcessed)</code>.<br>
* The value should be properly set.
* </p>
*/
|
Accuracy test for the method <code>setInitialBillsProcessed(Integer initialBillsProcessed)</code>. The value should be properly set.
|
test_setInitialBillsProcessed
|
{
"repo_name": "NASA-Tournament-Lab/CoECI-OPM-Service-Credit-Redeposit-Deposit-Application",
"path": "Code/SCRD_BRE/src/java/tests/gov/opm/scrd/entities/application/AuditBatchUnitTests.java",
"license": "apache-2.0",
"size": 26926
}
|
[
"gov.opm.scrd.TestsHelper",
"org.junit.Assert"
] |
import gov.opm.scrd.TestsHelper; import org.junit.Assert;
|
import gov.opm.scrd.*; import org.junit.*;
|
[
"gov.opm.scrd",
"org.junit"
] |
gov.opm.scrd; org.junit;
| 1,550,706
|
public static <T> void run(Sorter sorter, ArrayModel<T> target,
Comparator<T> comparator, ArrayModelFactory arrayFactory,
NodeModelFactory nodeFactory, Registry recursions,
Stopwatch stopwatch) {
sorter.prepare(target.getSize());
stopwatch.start();
sorter.sort(target, comparator, arrayFactory, nodeFactory,
recursions, stopwatch);
stopwatch.stop();
}
private SorterSandbox() {
}
|
static <T> void function(Sorter sorter, ArrayModel<T> target, Comparator<T> comparator, ArrayModelFactory arrayFactory, NodeModelFactory nodeFactory, Registry recursions, Stopwatch stopwatch) { sorter.prepare(target.getSize()); stopwatch.start(); sorter.sort(target, comparator, arrayFactory, nodeFactory, recursions, stopwatch); stopwatch.stop(); } private SorterSandbox() { }
|
/**
* Runs specified sorter on the target.
*
* @param sorter sorting algorithm implementation.
* @param target target array to be sorted.
* @param comparator an item comparator.
* @param arrayFactory item arrays allocator.
* @param nodeFactory item nodes provider.
* @param recursions registry of recursive calls
* @param stopwatch time consumption registry.
*
* @see Comparator
* @see ItemArray
* @see ItemArrayFactory
* @see ItemNodeFactory
* @see Registry
* @see Sorter
* @see Stopwatch
*/
|
Runs specified sorter on the target
|
run
|
{
"repo_name": "ilyagubarev/Algorithms",
"path": "Java/Algorithms/src/main/java/com/ilyagubarev/algorithms/sorting/SorterSandbox.java",
"license": "apache-2.0",
"size": 2266
}
|
[
"com.ilyagubarev.algorithms.adt.arrays.ArrayModel",
"com.ilyagubarev.algorithms.adt.arrays.ArrayModelFactory",
"com.ilyagubarev.algorithms.adt.nodes.NodeModelFactory",
"com.ilyagubarev.algorithms.adt.utils.Registry",
"com.ilyagubarev.algorithms.adt.utils.Stopwatch",
"com.ilyagubarev.algorithms.sorting.methods.Sorter",
"java.util.Comparator"
] |
import com.ilyagubarev.algorithms.adt.arrays.ArrayModel; import com.ilyagubarev.algorithms.adt.arrays.ArrayModelFactory; import com.ilyagubarev.algorithms.adt.nodes.NodeModelFactory; import com.ilyagubarev.algorithms.adt.utils.Registry; import com.ilyagubarev.algorithms.adt.utils.Stopwatch; import com.ilyagubarev.algorithms.sorting.methods.Sorter; import java.util.Comparator;
|
import com.ilyagubarev.algorithms.adt.arrays.*; import com.ilyagubarev.algorithms.adt.nodes.*; import com.ilyagubarev.algorithms.adt.utils.*; import com.ilyagubarev.algorithms.sorting.methods.*; import java.util.*;
|
[
"com.ilyagubarev.algorithms",
"java.util"
] |
com.ilyagubarev.algorithms; java.util;
| 2,130,305
|
private void resolveReferences(Type base, Events events) {
// remove unmeaningful references (for safety), as we are using this reference within the
// structure directly and do not copy it
base.cleanUpBackReferences();
addAnnotations(base, base.getAnnotations(), events);
if (base instanceof TypeWithMethods) {
TypeWithMethods typeWithMethods = (TypeWithMethods) base;
resolveMethodReferences(typeWithMethods.getMethods(), events);
}
if (base instanceof ClassType) {
ClassType classType = (ClassType) base;
addSuperclass(classType, classType.getSuperClasses(), events);
addInterface(classType, classType.getRealizedInterfaces(), events);
}
if (base instanceof InterfaceType) {
InterfaceType interfaceType = (InterfaceType) base;
addSuperinterface(interfaceType, interfaceType.getSuperInterfaces(), events);
}
// nothing to do for AnnotationTypes - everything handled already at TypeWithAnnotations.
}
|
void function(Type base, Events events) { base.cleanUpBackReferences(); addAnnotations(base, base.getAnnotations(), events); if (base instanceof TypeWithMethods) { TypeWithMethods typeWithMethods = (TypeWithMethods) base; resolveMethodReferences(typeWithMethods.getMethods(), events); } if (base instanceof ClassType) { ClassType classType = (ClassType) base; addSuperclass(classType, classType.getSuperClasses(), events); addInterface(classType, classType.getRealizedInterfaces(), events); } if (base instanceof InterfaceType) { InterfaceType interfaceType = (InterfaceType) base; addSuperinterface(interfaceType, interfaceType.getSuperInterfaces(), events); } }
|
/**
* Resolves the references of the given type and ensures that references to already existing
* entities in the structure are used and new entities - unknown to the class cache - are added.
*
* @param base
* the type.
* @param events
* write notifications here.
*/
|
Resolves the references of the given type and ensures that references to already existing entities in the structure are used and new entities - unknown to the class cache - are added
|
resolveReferences
|
{
"repo_name": "inspectIT/inspectIT",
"path": "inspectit.server/src/main/java/rocks/inspectit/server/instrumentation/classcache/ClassCacheModification.java",
"license": "agpl-3.0",
"size": 20015
}
|
[
"rocks.inspectit.server.instrumentation.classcache.events.Events",
"rocks.inspectit.shared.all.instrumentation.classcache.ClassType",
"rocks.inspectit.shared.all.instrumentation.classcache.InterfaceType",
"rocks.inspectit.shared.all.instrumentation.classcache.Type",
"rocks.inspectit.shared.all.instrumentation.classcache.TypeWithMethods"
] |
import rocks.inspectit.server.instrumentation.classcache.events.Events; import rocks.inspectit.shared.all.instrumentation.classcache.ClassType; import rocks.inspectit.shared.all.instrumentation.classcache.InterfaceType; import rocks.inspectit.shared.all.instrumentation.classcache.Type; import rocks.inspectit.shared.all.instrumentation.classcache.TypeWithMethods;
|
import rocks.inspectit.server.instrumentation.classcache.events.*; import rocks.inspectit.shared.all.instrumentation.classcache.*;
|
[
"rocks.inspectit.server",
"rocks.inspectit.shared"
] |
rocks.inspectit.server; rocks.inspectit.shared;
| 772,533
|
public Builder addAddendum(String xmlTextNodeContent)
throws MisconfigurationException {
return addAddendum(HumanReadableText.fromXmlTextNode(xmlTextNodeContent));
}
|
Builder function(String xmlTextNodeContent) throws MisconfigurationException { return addAddendum(HumanReadableText.fromXmlTextNode(xmlTextNodeContent)); }
|
/**
* Adds an addendum parsed from a plexus {@code <configuration />}.
* <p>
* This may be called multiple times, so
* <pre>
* <addendum>line 1</addendum>
* <addendum>line 2</addendum>
* </pre>
* is equivalent to
* <pre>
* <addendum>
* line 1
* line 2
* </addendum>
* </pre>
*/
|
Adds an addendum parsed from a plexus . This may be called multiple times, so <code> <addendum>line 1</addendum> <addendum>line 2</addendum> </code> is equivalent to <code> <addendum> line 1 line 2 </addendum> </code>
|
addAddendum
|
{
"repo_name": "mikesamuel/fences-maven-enforcer-rule",
"path": "common/src/main/java/com/google/security/fences/config/Rationale.java",
"license": "apache-2.0",
"size": 7439
}
|
[
"com.google.security.fences.util.MisconfigurationException"
] |
import com.google.security.fences.util.MisconfigurationException;
|
import com.google.security.fences.util.*;
|
[
"com.google.security"
] |
com.google.security;
| 853,184
|
public NEATGenome crossover(final NEATGenome mom, final NEATGenome dad) {
NEATParent best = favorParent(mom,dad);
final Chromosome babyNeurons = new Chromosome();
final Chromosome babyGenes = new Chromosome();
final List<Long> vecNeurons = new ArrayList<Long>();
int curMom = 0; // current gene index from mom
int curDad = 0; // current gene index from dad
NEATLinkGene selectedGene = null;
while ((curMom < mom.getNumGenes()) || (curDad < dad.getNumGenes())) {
NEATLinkGene momGene = null; // the mom gene object
NEATLinkGene dadGene = null; // the dad gene object
// grab the actual objects from mom and dad for the specified indexes
// if there are none, then null
if (curMom < mom.getNumGenes()) {
momGene = (NEATLinkGene) mom.getLinks().get(curMom);
}
if (curDad < dad.getNumGenes()) {
dadGene = (NEATLinkGene) dad.getLinks().get(curDad);
}
// now select a gene for mom or dad. This gene is for the baby
if ((momGene == null) && (dadGene != null)) {
if (best == NEATParent.Dad) {
selectedGene = dadGene;
}
curDad++;
} else if ((dadGene == null) && (momGene != null)) {
if (best == NEATParent.Mom) {
selectedGene = momGene;
}
curMom++;
} else if (momGene.getInnovationId() < dadGene.getInnovationId()) {
if (best == NEATParent.Mom) {
selectedGene = momGene;
}
curMom++;
} else if (dadGene.getInnovationId() < momGene.getInnovationId()) {
if (best == NEATParent.Dad) {
selectedGene = dadGene;
}
curDad++;
} else if (dadGene.getInnovationId() == momGene.getInnovationId()) {
if (Math.random() < 0.5f) {
selectedGene = momGene;
}
else {
selectedGene = dadGene;
}
curMom++;
curDad++;
}
if (babyGenes.size() == 0) {
babyGenes.add(selectedGene);
} else {
if (((NEATLinkGene) babyGenes.get(babyGenes.size() - 1))
.getInnovationId() != selectedGene.getInnovationId()) {
babyGenes.add(selectedGene);
}
}
// Check if we already have the nodes referred to in SelectedGene.
// If not, they need to be added.
addNeuronID(selectedGene.getFromNeuronID(), vecNeurons);
addNeuronID(selectedGene.getToNeuronID(), vecNeurons);
}// end while
// now create the required nodes. First sort them into order
Collections.sort(vecNeurons);
for (int i = 0; i < vecNeurons.size(); i++) {
babyNeurons.add(getInnovations().createNeuronFromID(
vecNeurons.get(i)));
}
// finally, create the genome
final NEATGenome babyGenome = new NEATGenome(getPopulation()
.assignGenomeID(), babyNeurons, babyGenes, mom.getInputCount(),
mom.getOutputCount());
babyGenome.setGeneticAlgorithm(this);
babyGenome.setPopulation(getPopulation());
babyGenome.validate();
return babyGenome;
}
|
NEATGenome function(final NEATGenome mom, final NEATGenome dad) { NEATParent best = favorParent(mom,dad); final Chromosome babyNeurons = new Chromosome(); final Chromosome babyGenes = new Chromosome(); final List<Long> vecNeurons = new ArrayList<Long>(); int curMom = 0; int curDad = 0; NEATLinkGene selectedGene = null; while ((curMom < mom.getNumGenes()) (curDad < dad.getNumGenes())) { NEATLinkGene momGene = null; NEATLinkGene dadGene = null; if (curMom < mom.getNumGenes()) { momGene = (NEATLinkGene) mom.getLinks().get(curMom); } if (curDad < dad.getNumGenes()) { dadGene = (NEATLinkGene) dad.getLinks().get(curDad); } if ((momGene == null) && (dadGene != null)) { if (best == NEATParent.Dad) { selectedGene = dadGene; } curDad++; } else if ((dadGene == null) && (momGene != null)) { if (best == NEATParent.Mom) { selectedGene = momGene; } curMom++; } else if (momGene.getInnovationId() < dadGene.getInnovationId()) { if (best == NEATParent.Mom) { selectedGene = momGene; } curMom++; } else if (dadGene.getInnovationId() < momGene.getInnovationId()) { if (best == NEATParent.Dad) { selectedGene = dadGene; } curDad++; } else if (dadGene.getInnovationId() == momGene.getInnovationId()) { if (Math.random() < 0.5f) { selectedGene = momGene; } else { selectedGene = dadGene; } curMom++; curDad++; } if (babyGenes.size() == 0) { babyGenes.add(selectedGene); } else { if (((NEATLinkGene) babyGenes.get(babyGenes.size() - 1)) .getInnovationId() != selectedGene.getInnovationId()) { babyGenes.add(selectedGene); } } addNeuronID(selectedGene.getFromNeuronID(), vecNeurons); addNeuronID(selectedGene.getToNeuronID(), vecNeurons); } Collections.sort(vecNeurons); for (int i = 0; i < vecNeurons.size(); i++) { babyNeurons.add(getInnovations().createNeuronFromID( vecNeurons.get(i))); } final NEATGenome babyGenome = new NEATGenome(getPopulation() .assignGenomeID(), babyNeurons, babyGenes, mom.getInputCount(), mom.getOutputCount()); babyGenome.setGeneticAlgorithm(this); babyGenome.setPopulation(getPopulation()); babyGenome.validate(); return babyGenome; }
|
/**
* Perform the crossover.
*
* @param mom
* The mother.
* @param dad
* The father.
* @return The child.
*/
|
Perform the crossover
|
crossover
|
{
"repo_name": "larhoy/SentimentProjectV2",
"path": "SentimentAnalysisV2/encog-core-3.1.0/src/main/java/org/encog/neural/neat/training/NEATTraining.java",
"license": "mit",
"size": 20588
}
|
[
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.encog.ml.genetic.genome.Chromosome"
] |
import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.encog.ml.genetic.genome.Chromosome;
|
import java.util.*; import org.encog.ml.genetic.genome.*;
|
[
"java.util",
"org.encog.ml"
] |
java.util; org.encog.ml;
| 2,382,772
|
public void line (float x, float y, float z, float x2, float y2, float z2, Color c1, Color c2) {
if (currType != ShapeType.Line) throw new GdxRuntimeException("Must call begin(ShapeType.Line)");
checkDirty();
checkFlush(2);
renderer.color(c1.r, c1.g, c1.b, c1.a);
renderer.vertex(x, y, z);
renderer.color(c2.r, c2.g, c2.b, c2.a);
renderer.vertex(x2, y2, z2);
}
|
void function (float x, float y, float z, float x2, float y2, float z2, Color c1, Color c2) { if (currType != ShapeType.Line) throw new GdxRuntimeException(STR); checkDirty(); checkFlush(2); renderer.color(c1.r, c1.g, c1.b, c1.a); renderer.vertex(x, y, z); renderer.color(c2.r, c2.g, c2.b, c2.a); renderer.vertex(x2, y2, z2); }
|
/** Draws a line. The {@link ShapeType} passed to begin has to be {@link ShapeType#Line}. The line is drawn with 2 colors
* interpolated between start & end point.
* @param c1 Color at start of the line
* @param c2 Color at end of the line */
|
Draws a line. The <code>ShapeType</code> passed to begin has to be <code>ShapeType#Line</code>. The line is drawn with 2 colors interpolated between start & end point
|
line
|
{
"repo_name": "0359xiaodong/libgdx",
"path": "gdx/src/com/badlogic/gdx/graphics/glutils/ShapeRenderer.java",
"license": "apache-2.0",
"size": 38957
}
|
[
"com.badlogic.gdx.graphics.Color",
"com.badlogic.gdx.utils.GdxRuntimeException"
] |
import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.utils.GdxRuntimeException;
|
import com.badlogic.gdx.graphics.*; import com.badlogic.gdx.utils.*;
|
[
"com.badlogic.gdx"
] |
com.badlogic.gdx;
| 2,829,751
|
List<ICookie> getCookieJarContents();
|
List<ICookie> getCookieJarContents();
|
/**
* This method is used to retrieve the contents of Burp's session handling
* cookie jar. Extensions that provide an
* <code>ISessionHandlingAction</code> can query and update the cookie jar
* in order to handle unusual session handling mechanisms.
*
* @return A list of <code>ICookie</code> objects representing the contents
* of Burp's session handling cookie jar.
*/
|
This method is used to retrieve the contents of Burp's session handling cookie jar. Extensions that provide an <code>ISessionHandlingAction</code> can query and update the cookie jar in order to handle unusual session handling mechanisms
|
getCookieJarContents
|
{
"repo_name": "tomsteele/burpbuddy",
"path": "src/main/java/burp/IBurpExtenderCallbacks.java",
"license": "mit",
"size": 41643
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 869,455
|
public Map<String, String> doClassify(String text) {
if (trace) {
System.out.println("RoteListClassifier(" + getName() + ").classify(" + text + ")...");
}
final Map<String, String> result = termsAndStopwords.doClassify(text);
if (trace) {
System.out.println("RoteListClassifier(" + getName() + ").classify(" + text + ").result=" + result);
}
return result;
}
|
Map<String, String> function(String text) { if (trace) { System.out.println(STR + getName() + STR + text + ")..."); } final Map<String, String> result = termsAndStopwords.doClassify(text); if (trace) { System.out.println(STR + getName() + STR + text + STR + result); } return result; }
|
/**
* Determine whether the text is a valid term, and not a stopword.
*/
|
Determine whether the text is a valid term, and not a stopword
|
doClassify
|
{
"repo_name": "KoehlerSB747/sd-tools",
"path": "src/main/java/org/sd/atn/RoteListClassifier.java",
"license": "apache-2.0",
"size": 56734
}
|
[
"java.util.Map"
] |
import java.util.Map;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,650,558
|
public static File createTempFile(final String name)
{
try
{
// create file
final File file = File.createTempFile(name, ".txt");
// create writer
try (FileOutputStream fos = new FileOutputStream(file);
OutputStreamWriter writer = new OutputStreamWriter(fos, Charset.forName("UTF-8").newEncoder()))
{
// place content in file
writer.write("this is a sample test upload file");
}
return file;
}
catch (Exception exception)
{
throw new RuntimeException("Unable to create test file.", exception);
}
}
|
static File function(final String name) { try { final File file = File.createTempFile(name, ".txt"); try (FileOutputStream fos = new FileOutputStream(file); OutputStreamWriter writer = new OutputStreamWriter(fos, Charset.forName("UTF-8").newEncoder())) { writer.write(STR); } return file; } catch (Exception exception) { throw new RuntimeException(STR, exception); } }
|
/**
* Create temp file TODO .. support multiple mimetypes .. build files with
* real size content
*
* @param name file name
* @return {@link File} file
*/
|
Create temp file TODO .. support multiple mimetypes .. build files with real size content
|
createTempFile
|
{
"repo_name": "dnacreative/records-management",
"path": "rm-automation/src/main/java/org/alfresco/po/common/util/Utils.java",
"license": "lgpl-3.0",
"size": 15519
}
|
[
"java.io.File",
"java.io.FileOutputStream",
"java.io.OutputStreamWriter",
"java.nio.charset.Charset"
] |
import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.nio.charset.Charset;
|
import java.io.*; import java.nio.charset.*;
|
[
"java.io",
"java.nio"
] |
java.io; java.nio;
| 601,456
|
protected void restoreInputsInSymbolTable() {
Map<String, Object> inputs = script.getInputs();
Map<String, Metadata> inputMetadata = script.getInputMetadata();
LocalVariableMap symbolTable = script.getSymbolTable();
Set<String> inputVariables = script.getInputVariables();
for (String inputVariable : inputVariables) {
if (symbolTable.get(inputVariable) == null) {
// retrieve optional metadata if it exists
Metadata m = inputMetadata.get(inputVariable);
script.in(inputVariable, inputs.get(inputVariable), m);
}
}
}
|
void function() { Map<String, Object> inputs = script.getInputs(); Map<String, Metadata> inputMetadata = script.getInputMetadata(); LocalVariableMap symbolTable = script.getSymbolTable(); Set<String> inputVariables = script.getInputVariables(); for (String inputVariable : inputVariables) { if (symbolTable.get(inputVariable) == null) { Metadata m = inputMetadata.get(inputVariable); script.in(inputVariable, inputs.get(inputVariable), m); } } }
|
/**
* Restore the input variables in the symbol table after script execution.
*/
|
Restore the input variables in the symbol table after script execution
|
restoreInputsInSymbolTable
|
{
"repo_name": "asurve/arvind-sysml",
"path": "src/main/java/org/apache/sysml/api/mlcontext/ScriptExecutor.java",
"license": "apache-2.0",
"size": 22312
}
|
[
"java.util.Map",
"java.util.Set",
"org.apache.sysml.runtime.controlprogram.LocalVariableMap"
] |
import java.util.Map; import java.util.Set; import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
|
import java.util.*; import org.apache.sysml.runtime.controlprogram.*;
|
[
"java.util",
"org.apache.sysml"
] |
java.util; org.apache.sysml;
| 289,106
|
public Builder transform(Transformation transformation) {
if (transformation == null) {
throw new IllegalArgumentException("Transformation must not be null.");
}
if (transformations == null) {
transformations = new ArrayList<Transformation>(2);
}
transformations.add(transformation);
return this;
}
|
Builder function(Transformation transformation) { if (transformation == null) { throw new IllegalArgumentException(STR); } if (transformations == null) { transformations = new ArrayList<Transformation>(2); } transformations.add(transformation); return this; }
|
/**
* Add a custom transformation to be applied to the image.
* <p>
* Custom transformations will always be run after the built-in transformations.
*/
|
Add a custom transformation to be applied to the image. Custom transformations will always be run after the built-in transformations
|
transform
|
{
"repo_name": "yongjiliu/MusicPlayer",
"path": "picasso/src/main/java/com/squareup/picasso/Request.java",
"license": "gpl-2.0",
"size": 11596
}
|
[
"java.util.ArrayList"
] |
import java.util.ArrayList;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 263,117
|
public static PercentType toPercentType(int val) {
int scaledValue = (int) Math.ceil(val / BRIGHTNESS_FACTOR);
return new PercentType(
Util.constrainToRange(scaledValue, PercentType.ZERO.intValue(), PercentType.HUNDRED.intValue()));
}
|
static PercentType function(int val) { int scaledValue = (int) Math.ceil(val / BRIGHTNESS_FACTOR); return new PercentType( Util.constrainToRange(scaledValue, PercentType.ZERO.intValue(), PercentType.HUNDRED.intValue())); }
|
/**
* convert a brightness value from int to PercentType
*
* @param val the value
* @return the corresponding PercentType value
*/
|
convert a brightness value from int to PercentType
|
toPercentType
|
{
"repo_name": "openhab/openhab2",
"path": "bundles/org.openhab.binding.deconz/src/main/java/org/openhab/binding/deconz/internal/Util.java",
"license": "epl-1.0",
"size": 3210
}
|
[
"org.openhab.core.library.types.PercentType"
] |
import org.openhab.core.library.types.PercentType;
|
import org.openhab.core.library.types.*;
|
[
"org.openhab.core"
] |
org.openhab.core;
| 810,727
|
public static <T> Collection<T> assertCollectionSize(String message, Collection<T> list, int size) {
assertEquals(size, list.size(), message + " should be of size: " + size + " but is: " + list);
return list;
}
|
static <T> Collection<T> function(String message, Collection<T> list, int size) { assertEquals(size, list.size(), message + STR + size + STR + list); return list; }
|
/**
* Asserts that a list is of the given size
*/
|
Asserts that a list is of the given size
|
assertCollectionSize
|
{
"repo_name": "nikhilvibhav/camel",
"path": "core/camel-core/src/test/java/org/apache/camel/TestSupport.java",
"license": "apache-2.0",
"size": 24554
}
|
[
"java.util.Collection",
"org.junit.jupiter.api.Assertions"
] |
import java.util.Collection; import org.junit.jupiter.api.Assertions;
|
import java.util.*; import org.junit.jupiter.api.*;
|
[
"java.util",
"org.junit.jupiter"
] |
java.util; org.junit.jupiter;
| 1,168,960
|
public String waitOnMaster(JVMClusterUtil.MasterThread masterThread) {
while (masterThread.isAlive()) {
try {
LOG.info("Waiting on " + masterThread.getMaster().getServerName().toString());
masterThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
masterThreads.remove(masterThread);
return masterThread.getName();
}
|
String function(JVMClusterUtil.MasterThread masterThread) { while (masterThread.isAlive()) { try { LOG.info(STR + masterThread.getMaster().getServerName().toString()); masterThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } masterThreads.remove(masterThread); return masterThread.getName(); }
|
/**
* Wait for the specified master to stop. Removes this thread from list of running threads.
* @return Name of master that just went down.
*/
|
Wait for the specified master to stop. Removes this thread from list of running threads
|
waitOnMaster
|
{
"repo_name": "JingchengDu/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java",
"license": "apache-2.0",
"size": 15340
}
|
[
"org.apache.hadoop.hbase.util.JVMClusterUtil"
] |
import org.apache.hadoop.hbase.util.JVMClusterUtil;
|
import org.apache.hadoop.hbase.util.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 1,968,825
|
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
|
boolean function(Collection<? extends T> c) { throw new UnsupportedOperationException(); }
|
/**
* Always throws UnsupportedOperationException.
*/
|
Always throws UnsupportedOperationException
|
addAll
|
{
"repo_name": "pfirmstone/river-internet",
"path": "JGDMS/jgdms-platform/src/main/java/net/jini/core/constraint/ArraySet.java",
"license": "apache-2.0",
"size": 5149
}
|
[
"java.util.Collection"
] |
import java.util.Collection;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 1,113,541
|
public Function getFunctionForAstNode(Node functionNode) {
Preconditions.checkArgument(functionNode.isFunction());
return functionsByNode.get(functionNode);
}
|
Function function(Node functionNode) { Preconditions.checkArgument(functionNode.isFunction()); return functionsByNode.get(functionNode); }
|
/**
* Returns the call graph Function object corresponding to the provided
* AST Token.FUNCTION node, or null if no such object exists.
*/
|
Returns the call graph Function object corresponding to the provided AST Token.FUNCTION node, or null if no such object exists
|
getFunctionForAstNode
|
{
"repo_name": "zombiezen/cardcpx",
"path": "third_party/closure-compiler/src/com/google/javascript/jscomp/CallGraph.java",
"license": "apache-2.0",
"size": 23961
}
|
[
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.Node"
] |
import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node;
|
import com.google.common.base.*; import com.google.javascript.rhino.*;
|
[
"com.google.common",
"com.google.javascript"
] |
com.google.common; com.google.javascript;
| 2,665,777
|
public void setSupportProgress(int progress) {
mImpl.setSupportProgress(progress);
}
/**
* Support version of {@link #onCreateNavigateUpTaskStack(android.app.TaskStackBuilder)}.
* This method will be called on all platform versions.
*
* Define the synthetic task stack that will be generated during Up navigation from
* a different task.
*
* <p>The default implementation of this method adds the parent chain of this activity
* as specified in the manifest to the supplied {@link TaskStackBuilder}. Applications
* may choose to override this method to construct the desired task stack in a different
* way.</p>
*
* <p>This method will be invoked by the default implementation of {@link #onNavigateUp()}
|
void function(int progress) { mImpl.setSupportProgress(progress); } /** * Support version of {@link #onCreateNavigateUpTaskStack(android.app.TaskStackBuilder)}. * This method will be called on all platform versions. * * Define the synthetic task stack that will be generated during Up navigation from * a different task. * * <p>The default implementation of this method adds the parent chain of this activity * as specified in the manifest to the supplied {@link TaskStackBuilder}. Applications * may choose to override this method to construct the desired task stack in a different * way.</p> * * <p>This method will be invoked by the default implementation of {@link #onNavigateUp()}
|
/**
* Support library version of {@link Activity#setProgress(int)}.
* <p>
* Sets the progress for the progress bars in the title.
* <p>
* In order for the progress bar to be shown, the feature must be requested
* via {@link #supportRequestWindowFeature(int)}.
*
* @param progress The progress for the progress bar. Valid ranges are from
* 0 to 10000 (both inclusive). If 10000 is given, the progress
* bar will be completely filled and will fade out.
*/
|
Support library version of <code>Activity#setProgress(int)</code>. Sets the progress for the progress bars in the title. In order for the progress bar to be shown, the feature must be requested via <code>#supportRequestWindowFeature(int)</code>
|
setSupportProgress
|
{
"repo_name": "0359xiaodong/HoloEverywhere",
"path": "library/src/android/support/v7/app/ActionBarActivity.java",
"license": "mit",
"size": 16485
}
|
[
"android.support.v4.app.TaskStackBuilder"
] |
import android.support.v4.app.TaskStackBuilder;
|
import android.support.v4.app.*;
|
[
"android.support"
] |
android.support;
| 2,904,269
|
void removeFinalCertFromPath(LinkedList<X509Certificate> certPathList) {
certPathList.removeLast();
}
|
void removeFinalCertFromPath(LinkedList<X509Certificate> certPathList) { certPathList.removeLast(); }
|
/** Removes final certificate from the certPathList
*
* @param certPathList the certification path list
*/
|
Removes final certificate from the certPathList
|
removeFinalCertFromPath
|
{
"repo_name": "rokn/Count_Words_2015",
"path": "testing/openjdk/jdk/src/share/classes/sun/security/provider/certpath/ReverseBuilder.java",
"license": "mit",
"size": 19883
}
|
[
"java.security.cert.X509Certificate",
"java.util.LinkedList"
] |
import java.security.cert.X509Certificate; import java.util.LinkedList;
|
import java.security.cert.*; import java.util.*;
|
[
"java.security",
"java.util"
] |
java.security; java.util;
| 1,508,700
|
public MapReduceDriver<K1, V1, K2, V2, K3, V3> withKeyOrderComparator(
RawComparator<K2> orderComparator) {
setKeyOrderComparator(orderComparator);
return this;
}
|
MapReduceDriver<K1, V1, K2, V2, K3, V3> function( RawComparator<K2> orderComparator) { setKeyOrderComparator(orderComparator); return this; }
|
/**
* Identical to {@link #setKeyOrderComparator(RawComparator)}, but with a
* fluent programming style
* @param orderComparator Comparator to use in the shuffle stage for key
* value ordering
* @return this
*/
|
Identical to <code>#setKeyOrderComparator(RawComparator)</code>, but with a fluent programming style
|
withKeyOrderComparator
|
{
"repo_name": "apache/mrunit",
"path": "src/main/java/org/apache/hadoop/mrunit/MapReduceDriver.java",
"license": "apache-2.0",
"size": 9890
}
|
[
"org.apache.hadoop.io.RawComparator"
] |
import org.apache.hadoop.io.RawComparator;
|
import org.apache.hadoop.io.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 892,525
|
private Package getAndVerifyPackage(String pn, Manifest man, URL url) {
Package pkg = getDefinedPackage(pn);
if (pkg != null) {
if (pkg.isSealed()) {
if (!pkg.isSealed(url)) {
throw new SecurityException(
"sealing violation: package " + pn + " is sealed");
}
} else {
// can't seal package if already defined without sealing
if ((man != null) && isSealed(pn, man)) {
throw new SecurityException(
"sealing violation: can't seal package " + pn +
": already defined");
}
}
}
return pkg;
}
|
Package function(String pn, Manifest man, URL url) { Package pkg = getDefinedPackage(pn); if (pkg != null) { if (pkg.isSealed()) { if (!pkg.isSealed(url)) { throw new SecurityException( STR + pn + STR); } } else { if ((man != null) && isSealed(pn, man)) { throw new SecurityException( STR + pn + STR); } } } return pkg; }
|
/**
* Get the Package with the specified package name. If defined
* then verify that it against the manifest and code source.
*
* @throws SecurityException if there is a sealing violation (JAR spec)
*/
|
Get the Package with the specified package name. If defined then verify that it against the manifest and code source
|
getAndVerifyPackage
|
{
"repo_name": "dmlloyd/openjdk-modules",
"path": "jdk/src/java.base/share/classes/jdk/internal/loader/BuiltinClassLoader.java",
"license": "gpl-2.0",
"size": 35311
}
|
[
"java.util.jar.Manifest"
] |
import java.util.jar.Manifest;
|
import java.util.jar.*;
|
[
"java.util"
] |
java.util;
| 1,859,778
|
@ScalarFunction
public static long fromEpochSecondsBucket(long seconds, long bucket) {
return TimeUnit.SECONDS.toMillis(seconds * bucket);
}
|
static long function(long seconds, long bucket) { return TimeUnit.SECONDS.toMillis(seconds * bucket); }
|
/**
* Converts nSecondsSinceEpoch (seconds that have been divided by a bucket), to epoch millis
*/
|
Converts nSecondsSinceEpoch (seconds that have been divided by a bucket), to epoch millis
|
fromEpochSecondsBucket
|
{
"repo_name": "linkedin/pinot",
"path": "pinot-common/src/main/java/org/apache/pinot/common/function/scalar/DateTimeFunctions.java",
"license": "apache-2.0",
"size": 18941
}
|
[
"java.util.concurrent.TimeUnit"
] |
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.*;
|
[
"java.util"
] |
java.util;
| 661,766
|
public static Object unwrap(final RType value, final Map<RType, Object> cache) {
if (cache.containsKey(value)) {
return cache.get(value);
} else if (value instanceof RArray) {
List<RType> rtypes = ((RArray) value).getValue();
List<Object> rv = new ArrayList<Object>(rtypes.size());
cache.put(value, rv);
unwrapCollection(rtypes, rv, cache);
return rv;
} else if (value instanceof RList) {
List<RType> rtypes = ((RList) value).getValue();
List<Object> rv = new ArrayList<Object>(rtypes.size());
cache.put(value, rv);
unwrapCollection(rtypes, rv, cache);
return rv;
} else if (value instanceof RSet) {
List<RType> rtypes = ((RSet) value).getValue();
Set<Object> rv = new HashSet<Object>(rtypes.size());
cache.put(value, rv);
unwrapCollection(rtypes, rv, cache);
return rv;
} else if (value instanceof RMap) {
Map<String, RType> map = ((RMap) value).getValue();
Map<String, Object> rv = new HashMap<String, Object>();
cache.put(value, rv);
for (Map.Entry<String, RType> entry : map.entrySet()) {
rv.put(entry.getKey(), unwrap(entry.getValue(), cache));
}
return rv;
} else {
Field f = ReflectionUtils.findField(value.getClass(), "val");
f.setAccessible(true);
Object rv = ReflectionUtils.getField(f, value);
cache.put(value, rv);
return rv;
}
}
|
static Object function(final RType value, final Map<RType, Object> cache) { if (cache.containsKey(value)) { return cache.get(value); } else if (value instanceof RArray) { List<RType> rtypes = ((RArray) value).getValue(); List<Object> rv = new ArrayList<Object>(rtypes.size()); cache.put(value, rv); unwrapCollection(rtypes, rv, cache); return rv; } else if (value instanceof RList) { List<RType> rtypes = ((RList) value).getValue(); List<Object> rv = new ArrayList<Object>(rtypes.size()); cache.put(value, rv); unwrapCollection(rtypes, rv, cache); return rv; } else if (value instanceof RSet) { List<RType> rtypes = ((RSet) value).getValue(); Set<Object> rv = new HashSet<Object>(rtypes.size()); cache.put(value, rv); unwrapCollection(rtypes, rv, cache); return rv; } else if (value instanceof RMap) { Map<String, RType> map = ((RMap) value).getValue(); Map<String, Object> rv = new HashMap<String, Object>(); cache.put(value, rv); for (Map.Entry<String, RType> entry : map.entrySet()) { rv.put(entry.getKey(), unwrap(entry.getValue(), cache)); } return rv; } else { Field f = ReflectionUtils.findField(value.getClass(), "val"); f.setAccessible(true); Object rv = ReflectionUtils.getField(f, value); cache.put(value, rv); return rv; } }
|
/**
* Descends into data structures wrapping all elements as it goes.
* Limitation: RArrays are turned into {@link List} instances!
*
* The cache argument is used to prevent cycles.
* @param value
*/
|
Descends into data structures wrapping all elements as it goes. Limitation: RArrays are turned into <code>List</code> instances! The cache argument is used to prevent cycles
|
unwrap
|
{
"repo_name": "emilroz/openmicroscopy",
"path": "components/blitz/src/omero/rtypes.java",
"license": "gpl-2.0",
"size": 35948
}
|
[
"java.lang.reflect.Field",
"java.util.ArrayList",
"java.util.HashMap",
"java.util.HashSet",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.springframework.util.ReflectionUtils"
] |
import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.util.ReflectionUtils;
|
import java.lang.reflect.*; import java.util.*; import org.springframework.util.*;
|
[
"java.lang",
"java.util",
"org.springframework.util"
] |
java.lang; java.util; org.springframework.util;
| 2,639,235
|
public Decompressor createDecompressor() {
return new BZip2DummyDecompressor();
}
|
Decompressor function() { return new BZip2DummyDecompressor(); }
|
/**
* This functionality is currently not supported.
*
* @return Decompressor
*/
|
This functionality is currently not supported
|
createDecompressor
|
{
"repo_name": "Shmuma/hadoop",
"path": "src/core/org/apache/hadoop/io/compress/BZip2Codec.java",
"license": "apache-2.0",
"size": 16292
}
|
[
"org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor"
] |
import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
|
import org.apache.hadoop.io.compress.bzip2.*;
|
[
"org.apache.hadoop"
] |
org.apache.hadoop;
| 7,499
|
public FileDataResult transferAgentPlugin(AuthzSubject subject, AppdefEntityID aid, String plugin)
throws PermissionException, AgentConnectionException, AgentNotFoundException, AgentRemoteException,
FileNotFoundException, IOException, ConfigPropertyException;
|
FileDataResult function(AuthzSubject subject, AppdefEntityID aid, String plugin) throws PermissionException, AgentConnectionException, AgentNotFoundException, AgentRemoteException, FileNotFoundException, IOException, ConfigPropertyException;
|
/**
* Transfer an agent plugin residing on the HQ server to an agent.
* @param subject The subject issuing the request.
* @param aid The agent id.
* @param plugin The plugin name.
* @return {@link FileDataResult} if sizes are 0 then file was not transferred
* @throws PermissionException if the subject does not have proper
* permissions to issue an agent plugin transfer.
* @throws FileNotFoundException if the plugin is not found on the HQ
* server.
* @throws IOException if an I/O error occurs, such as failing to calculate
* the file MD5 checksum.
* @throws AgentRemoteException if an exception occurs on the remote agent
* side.
* @throws AgentConnectionException if the connection to the agent fails.
* @throws AgentNotFoundException if no agent exists with the given agent
* id.
*/
|
Transfer an agent plugin residing on the HQ server to an agent
|
transferAgentPlugin
|
{
"repo_name": "cc14514/hq6",
"path": "hq-server/src/main/java/org/hyperic/hq/appdef/shared/AgentManager.java",
"license": "unlicense",
"size": 24936
}
|
[
"java.io.FileNotFoundException",
"java.io.IOException",
"org.hyperic.hq.agent.AgentConnectionException",
"org.hyperic.hq.agent.AgentRemoteException",
"org.hyperic.hq.agent.FileDataResult",
"org.hyperic.hq.authz.server.session.AuthzSubject",
"org.hyperic.hq.authz.shared.PermissionException",
"org.hyperic.util.ConfigPropertyException"
] |
import java.io.FileNotFoundException; import java.io.IOException; import org.hyperic.hq.agent.AgentConnectionException; import org.hyperic.hq.agent.AgentRemoteException; import org.hyperic.hq.agent.FileDataResult; import org.hyperic.hq.authz.server.session.AuthzSubject; import org.hyperic.hq.authz.shared.PermissionException; import org.hyperic.util.ConfigPropertyException;
|
import java.io.*; import org.hyperic.hq.agent.*; import org.hyperic.hq.authz.server.session.*; import org.hyperic.hq.authz.shared.*; import org.hyperic.util.*;
|
[
"java.io",
"org.hyperic.hq",
"org.hyperic.util"
] |
java.io; org.hyperic.hq; org.hyperic.util;
| 747,105
|
"code != null",
"that != null"
})
@Ensures("result != null")
static String rebaseLocalCalls(String code, String that,
Set<String> whitelist) {
StringBuilder buffer = new StringBuilder();
PushbackTokenizer tokenizer = new PushbackTokenizer(new StringReader(code));
boolean qualified = false;
while (tokenizer.hasNext()) {
Token token = tokenizer.next();
if (!qualified && token.kind == TokenKind.WORD
&& (whitelist == null || !whitelist.contains(token.text))) {
if (token.text.equals("this")) {
buffer.append("( ");
buffer.append(JavaUtils.BEGIN_GENERATED_CODE);
buffer.append(that);
buffer.append(JavaUtils.END_GENERATED_CODE);
buffer.append(" )");
} else {
if (JavaUtils.lookingAt(tokenizer, "(")) {
buffer.append(JavaUtils.BEGIN_GENERATED_CODE);
buffer.append(that);
buffer.append(".");
buffer.append(JavaUtils.END_GENERATED_CODE);
buffer.append(token.text);
} else {
buffer.append(token.text);
}
}
} else {
buffer.append(token.text);
}
qualified = token.text.equals(".");
}
return buffer.toString();
}
|
STR, STR }) @Ensures(STR) static String rebaseLocalCalls(String code, String that, Set<String> whitelist) { StringBuilder buffer = new StringBuilder(); PushbackTokenizer tokenizer = new PushbackTokenizer(new StringReader(code)); boolean qualified = false; while (tokenizer.hasNext()) { Token token = tokenizer.next(); if (!qualified && token.kind == TokenKind.WORD && (whitelist == null !whitelist.contains(token.text))) { if (token.text.equals("this")) { buffer.append(STR); buffer.append(JavaUtils.BEGIN_GENERATED_CODE); buffer.append(that); buffer.append(JavaUtils.END_GENERATED_CODE); buffer.append(STR); } else { if (JavaUtils.lookingAt(tokenizer, "(")) { buffer.append(JavaUtils.BEGIN_GENERATED_CODE); buffer.append(that); buffer.append("."); buffer.append(JavaUtils.END_GENERATED_CODE); buffer.append(token.text); } else { buffer.append(token.text); } } } else { buffer.append(token.text); } qualified = token.text.equals("."); } return buffer.toString(); }
|
/**
* Returns {@code code} with all unqualified or this-qualified
* identifiers followed by an open parenthesis rebased to
* {@code that}. Unqualified identifiers in {@code whitelist}
* are not subject to this change.
*/
|
Returns code with all unqualified or this-qualified identifiers followed by an open parenthesis rebased to that. Unqualified identifiers in whitelist are not subject to this change
|
rebaseLocalCalls
|
{
"repo_name": "konvergeio/cofoja",
"path": "src/main/java/com/google/java/contract/core/apt/ContractCreation.java",
"license": "lgpl-3.0",
"size": 18054
}
|
[
"com.google.java.contract.Ensures",
"com.google.java.contract.core.util.JavaTokenizer",
"com.google.java.contract.core.util.JavaUtils",
"com.google.java.contract.core.util.PushbackTokenizer",
"java.io.StringReader",
"java.util.Set"
] |
import com.google.java.contract.Ensures; import com.google.java.contract.core.util.JavaTokenizer; import com.google.java.contract.core.util.JavaUtils; import com.google.java.contract.core.util.PushbackTokenizer; import java.io.StringReader; import java.util.Set;
|
import com.google.java.contract.*; import com.google.java.contract.core.util.*; import java.io.*; import java.util.*;
|
[
"com.google.java",
"java.io",
"java.util"
] |
com.google.java; java.io; java.util;
| 1,058,769
|
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> breakPairingAsync(String resourceGroupName, String namespaceName, String alias);
|
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> breakPairingAsync(String resourceGroupName, String namespaceName, String alias);
|
/**
* This operation disables the Disaster Recovery and stops replicating changes from primary to secondary namespaces.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param alias The Disaster Recovery configuration name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
|
This operation disables the Disaster Recovery and stops replicating changes from primary to secondary namespaces
|
breakPairingAsync
|
{
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-servicebus/src/main/java/com/azure/resourcemanager/servicebus/fluent/DisasterRecoveryConfigsClient.java",
"license": "mit",
"size": 35020
}
|
[
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod"
] |
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod;
|
import com.azure.core.annotation.*;
|
[
"com.azure.core"
] |
com.azure.core;
| 2,117,527
|
public void buildXML(StringBuilder sb) {
Log.i(TAG, "ProcedurePage.toXML()");
sb.append("<Page>\n");
for (ProcedureElement e : elements) {
e.buildXML(sb);
}
sb.append("</Page>\n");
}
|
void function(StringBuilder sb) { Log.i(TAG, STR); sb.append(STR); for (ProcedureElement e : elements) { e.buildXML(sb); } sb.append(STR); }
|
/**
* Writes a string representation of this object to a StringBuilder
* @param sb the builder to write to.
*/
|
Writes a string representation of this object to a StringBuilder
|
buildXML
|
{
"repo_name": "Disha10/SANA",
"path": "app/src/main/java/org/sana/android/procedure/ProcedurePage.java",
"license": "bsd-3-clause",
"size": 14486
}
|
[
"android.util.Log"
] |
import android.util.Log;
|
import android.util.*;
|
[
"android.util"
] |
android.util;
| 1,239,095
|
public boolean isOpaqueCube(IBlockState state)
{
return false;
}
|
boolean function(IBlockState state) { return false; }
|
/**
* Used to determine ambient occlusion and culling when rebuilding chunks for render
*/
|
Used to determine ambient occlusion and culling when rebuilding chunks for render
|
isOpaqueCube
|
{
"repo_name": "Severed-Infinity/technium",
"path": "build/tmp/recompileMc/sources/net/minecraft/block/BlockCake.java",
"license": "gpl-3.0",
"size": 6580
}
|
[
"net.minecraft.block.state.IBlockState"
] |
import net.minecraft.block.state.IBlockState;
|
import net.minecraft.block.state.*;
|
[
"net.minecraft.block"
] |
net.minecraft.block;
| 2,860,630
|
protected static <UJO extends Ujo,VALUE> Property<UJO,VALUE> newKeyDefault(VALUE defaultValue) {
return new OrmProperty(UNDEFINED_INDEX, null, defaultValue, null);
}
// --------- STATIC METHODS -------------------
|
static <UJO extends Ujo,VALUE> Property<UJO,VALUE> function(VALUE defaultValue) { return new OrmProperty(UNDEFINED_INDEX, null, defaultValue, null); }
|
/** A Property Factory creates new key and assigns a next key index.
* @hidden
*/
|
A Property Factory creates new key and assigns a next key index
|
newKeyDefault
|
{
"repo_name": "pponec/ujorm",
"path": "project-m2/ujo-orm/src/main/java/org/ujorm/implementation/orm/OrmTableSynchronized.java",
"license": "apache-2.0",
"size": 14664
}
|
[
"org.ujorm.Ujo",
"org.ujorm.extensions.Property"
] |
import org.ujorm.Ujo; import org.ujorm.extensions.Property;
|
import org.ujorm.*; import org.ujorm.extensions.*;
|
[
"org.ujorm",
"org.ujorm.extensions"
] |
org.ujorm; org.ujorm.extensions;
| 2,336,590
|
static void dumpConfiguration(Writer writer) throws IOException {
Configuration conf = new Configuration(false);
conf.addResource(QUEUE_ACLS_FILE_NAME);
Configuration.dumpConfiguration(conf, writer);
}
|
static void dumpConfiguration(Writer writer) throws IOException { Configuration conf = new Configuration(false); conf.addResource(QUEUE_ACLS_FILE_NAME); Configuration.dumpConfiguration(conf, writer); }
|
/**
* prints the configuration of QueueManager in Json format.
* The method should be modified accordingly whenever
* QueueManager(Configuration) constructor is modified.
* @param writer {@link}Writer object to which the configuration properties
* are printed in json format
* @throws IOException
*/
|
prints the configuration of QueueManager in Json format. The method should be modified accordingly whenever QueueManager(Configuration) constructor is modified
|
dumpConfiguration
|
{
"repo_name": "leonhong/hadoop-common",
"path": "src/mapred/org/apache/hadoop/mapred/QueueManager.java",
"license": "apache-2.0",
"size": 12310
}
|
[
"java.io.IOException",
"java.io.Writer",
"org.apache.hadoop.conf.Configuration"
] |
import java.io.IOException; import java.io.Writer; import org.apache.hadoop.conf.Configuration;
|
import java.io.*; import org.apache.hadoop.conf.*;
|
[
"java.io",
"org.apache.hadoop"
] |
java.io; org.apache.hadoop;
| 2,277,328
|
public static TextBlock createTextBlock(String text, Font font, Paint paint,
float maxWidth, TextMeasurer measurer) {
return createTextBlock(text, font, paint, maxWidth, Integer.MAX_VALUE,
measurer);
}
|
static TextBlock function(String text, Font font, Paint paint, float maxWidth, TextMeasurer measurer) { return createTextBlock(text, font, paint, maxWidth, Integer.MAX_VALUE, measurer); }
|
/**
* Creates a new text block from the given string, breaking the
* text into lines so that the {@code maxWidth} value is respected.
*
* @param text the text.
* @param font the font.
* @param paint the paint.
* @param maxWidth the maximum width for each line.
* @param measurer the text measurer.
*
* @return A text block.
*/
|
Creates a new text block from the given string, breaking the text into lines so that the maxWidth value is respected
|
createTextBlock
|
{
"repo_name": "jfree/jfreechart-fse",
"path": "src/main/java/org/jfree/chart/text/TextUtilities.java",
"license": "lgpl-2.1",
"size": 35433
}
|
[
"java.awt.Font",
"java.awt.Paint"
] |
import java.awt.Font; import java.awt.Paint;
|
import java.awt.*;
|
[
"java.awt"
] |
java.awt;
| 2,270,841
|
Custom readFrom(StreamInput in) throws IOException;
|
Custom readFrom(StreamInput in) throws IOException;
|
/**
* Reads an object of this type from the provided {@linkplain StreamInput}. The receiving instance remains unchanged.
*/
|
Reads an object of this type from the provided StreamInput. The receiving instance remains unchanged
|
readFrom
|
{
"repo_name": "alexshadow007/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java",
"license": "apache-2.0",
"size": 59687
}
|
[
"java.io.IOException",
"org.elasticsearch.common.io.stream.StreamInput"
] |
import java.io.IOException; import org.elasticsearch.common.io.stream.StreamInput;
|
import java.io.*; import org.elasticsearch.common.io.stream.*;
|
[
"java.io",
"org.elasticsearch.common"
] |
java.io; org.elasticsearch.common;
| 2,297,037
|
private JPanel getContentPanel() {
if (contentPanel == null) {
GridBagConstraints gridBagConstraints4 = new GridBagConstraints();
contentPanel = new JPanel();
contentPanel.setLayout(new GridBagLayout());
contentPanel
.setBorder(javax.swing.BorderFactory
.createTitledBorder(
null,
"Host Certificates",
javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION,
null, LookAndFeel.getPanelLabelColor()));
gridBagConstraints4.weightx = 1.0;
gridBagConstraints4.gridy = 0;
gridBagConstraints4.gridx = 0;
gridBagConstraints4.weighty = 1.0;
gridBagConstraints4.fill = java.awt.GridBagConstraints.BOTH;
contentPanel.add(getJScrollPane(), gridBagConstraints4);
}
return contentPanel;
}
|
JPanel function() { if (contentPanel == null) { GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); contentPanel = new JPanel(); contentPanel.setLayout(new GridBagLayout()); contentPanel .setBorder(javax.swing.BorderFactory .createTitledBorder( null, STR, javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, LookAndFeel.getPanelLabelColor())); gridBagConstraints4.weightx = 1.0; gridBagConstraints4.gridy = 0; gridBagConstraints4.gridx = 0; gridBagConstraints4.weighty = 1.0; gridBagConstraints4.fill = java.awt.GridBagConstraints.BOTH; contentPanel.add(getJScrollPane(), gridBagConstraints4); } return contentPanel; }
|
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
|
This method initializes jPanel
|
getContentPanel
|
{
"repo_name": "NCIP/cagrid",
"path": "cagrid/Software/core/caGrid/projects/gaards-ui/src/org/cagrid/gaards/ui/dorian/federation/HostCertificatesWindow.java",
"license": "bsd-3-clause",
"size": 21756
}
|
[
"java.awt.GridBagConstraints",
"java.awt.GridBagLayout",
"javax.swing.JPanel",
"org.cagrid.grape.LookAndFeel"
] |
import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import javax.swing.JPanel; import org.cagrid.grape.LookAndFeel;
|
import java.awt.*; import javax.swing.*; import org.cagrid.grape.*;
|
[
"java.awt",
"javax.swing",
"org.cagrid.grape"
] |
java.awt; javax.swing; org.cagrid.grape;
| 2,102,036
|
List<T> findAllPaginatedAndSorted(final int page, final int size, final String sortBy, final String sortOrder);
// create
|
List<T> findAllPaginatedAndSorted(final int page, final int size, final String sortBy, final String sortOrder);
|
/**
* - contract: if nothing is found, an empty list will be returned to the calling client <br>
*/
|
- contract: if nothing is found, an empty list will be returned to the calling client
|
findAllPaginatedAndSorted
|
{
"repo_name": "KienKede/bonsai-manager-REST",
"path": "src/main/java/com/bonsaimanager/common/interfaces/IOperations.java",
"license": "apache-2.0",
"size": 1142
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,562,219
|
public StreamingEndpointInner withCustomHostNames(List<String> customHostNames) {
this.customHostNames = customHostNames;
return this;
}
|
StreamingEndpointInner function(List<String> customHostNames) { this.customHostNames = customHostNames; return this; }
|
/**
* Set the custom host names of the StreamingEndpoint.
*
* @param customHostNames the customHostNames value to set
* @return the StreamingEndpointInner object itself.
*/
|
Set the custom host names of the StreamingEndpoint
|
withCustomHostNames
|
{
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/mediaservices/mgmt-v2019_05_01_preview/src/main/java/com/microsoft/azure/management/mediaservices/v2019_05_01_preview/implementation/StreamingEndpointInner.java",
"license": "mit",
"size": 10825
}
|
[
"java.util.List"
] |
import java.util.List;
|
import java.util.*;
|
[
"java.util"
] |
java.util;
| 2,767,676
|
public void addResource(InputStream in) {
addResourceObject(new Resource(in));
}
|
void function(InputStream in) { addResourceObject(new Resource(in)); }
|
/**
* Add a configuration resource.
*
* The properties of this resource will override properties of previously
* added resources, unless they were marked <a href="#Final">final</a>.
*
* WARNING: The contents of the InputStream will be cached, by this method.
* So use this sparingly because it does increase the memory consumption.
*
* @param in InputStream to deserialize the object from. In will be read from
* when a get or set is called next. After it is read the stream will be
* closed.
*/
|
Add a configuration resource. The properties of this resource will override properties of previously added resources, unless they were marked final. So use this sparingly because it does increase the memory consumption
|
addResource
|
{
"repo_name": "simbadzina/hadoop-fcfs",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java",
"license": "apache-2.0",
"size": 103404
}
|
[
"java.io.InputStream"
] |
import java.io.InputStream;
|
import java.io.*;
|
[
"java.io"
] |
java.io;
| 2,013,937
|
@GuardedBy("this")
void setValue(ReferenceEntry<K, V> entry, K key, V value, long now) {
ValueReference<K, V> previous = entry.getValueReference();
int weight = map.weigher.weigh(key, value);
checkState(weight >= 0, "Weights must be non-negative");
ValueReference<K, V> valueReference =
map.valueStrength.referenceValue(this, entry, value, weight);
entry.setValueReference(valueReference);
recordWrite(entry, weight, now);
previous.notifyNewValue(value);
}
// loading
|
@GuardedBy("this") void setValue(ReferenceEntry<K, V> entry, K key, V value, long now) { ValueReference<K, V> previous = entry.getValueReference(); int weight = map.weigher.weigh(key, value); checkState(weight >= 0, STR); ValueReference<K, V> valueReference = map.valueStrength.referenceValue(this, entry, value, weight); entry.setValueReference(valueReference); recordWrite(entry, weight, now); previous.notifyNewValue(value); }
|
/**
* Sets a new value of an entry. Adds newly created entries at the end of the access queue.
*/
|
Sets a new value of an entry. Adds newly created entries at the end of the access queue
|
setValue
|
{
"repo_name": "uschindler/guava",
"path": "guava/src/com/google/common/cache/LocalCache.java",
"license": "apache-2.0",
"size": 144800
}
|
[
"com.google.common.base.Preconditions",
"javax.annotation.concurrent.GuardedBy"
] |
import com.google.common.base.Preconditions; import javax.annotation.concurrent.GuardedBy;
|
import com.google.common.base.*; import javax.annotation.concurrent.*;
|
[
"com.google.common",
"javax.annotation"
] |
com.google.common; javax.annotation;
| 40,829
|
public void setExifData(ExifData exifData) { this.exifData = exifData; }
|
public void setExifData(ExifData exifData) { this.exifData = exifData; }
|
/**
* Exif data of this picture. {@code null} if there are no exif data available.
*/
|
Exif data of this picture. null if there are no exif data available
|
getExifData
|
{
"repo_name": "shred/cilla",
"path": "cilla-core/src/main/java/org/shredzone/cilla/core/model/Picture.java",
"license": "agpl-3.0",
"size": 6776
}
|
[
"org.shredzone.cilla.core.model.embed.ExifData"
] |
import org.shredzone.cilla.core.model.embed.ExifData;
|
import org.shredzone.cilla.core.model.embed.*;
|
[
"org.shredzone.cilla"
] |
org.shredzone.cilla;
| 1,328,055
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.